re PR tree-optimization/67781 (wrong code generated on big-endian with -O1 -fexpensiv...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77
78 #ifndef LOAD_EXTEND_OP
79 #define LOAD_EXTEND_OP(M) UNKNOWN
80 #endif
81
82 /* Nonzero if we are folding constants inside an initializer; zero
83 otherwise. */
84 int folding_initializer = 0;
85
86 /* The following constants represent a bit based encoding of GCC's
87 comparison operators. This encoding simplifies transformations
88 on relational comparison operators, such as AND and OR. */
89 enum comparison_code {
90 COMPCODE_FALSE = 0,
91 COMPCODE_LT = 1,
92 COMPCODE_EQ = 2,
93 COMPCODE_LE = 3,
94 COMPCODE_GT = 4,
95 COMPCODE_LTGT = 5,
96 COMPCODE_GE = 6,
97 COMPCODE_ORD = 7,
98 COMPCODE_UNORD = 8,
99 COMPCODE_UNLT = 9,
100 COMPCODE_UNEQ = 10,
101 COMPCODE_UNLE = 11,
102 COMPCODE_UNGT = 12,
103 COMPCODE_NE = 13,
104 COMPCODE_UNGE = 14,
105 COMPCODE_TRUE = 15
106 };
107
108 static bool negate_expr_p (tree);
109 static tree negate_expr (tree);
110 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
111 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
112 static enum comparison_code comparison_to_compcode (enum tree_code);
113 static enum tree_code compcode_to_comparison (enum comparison_code);
114 static int operand_equal_for_comparison_p (tree, tree, tree);
115 static int twoval_comparison_p (tree, tree *, tree *, int *);
116 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
117 static tree make_bit_field_ref (location_t, tree, tree,
118 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
122 HOST_WIDE_INT *,
123 machine_mode *, int *, int *, int *,
124 tree *, tree *);
125 static int simple_operand_p (const_tree);
126 static bool simple_operand_p_2 (tree);
127 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
128 static tree range_predecessor (tree);
129 static tree range_successor (tree);
130 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
131 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
132 static tree unextend (tree, int, int, tree);
133 static tree optimize_minmax_comparison (location_t, enum tree_code,
134 tree, tree, tree);
135 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
136 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
137 static tree fold_binary_op_with_conditional_arg (location_t,
138 enum tree_code, tree,
139 tree, tree,
140 tree, tree, int);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
147 static tree fold_view_convert_expr (tree, tree);
148 static bool vec_cst_ctor_to_array (tree, tree *);
149
150
151 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
152 Otherwise, return LOC. */
153
154 static location_t
155 expr_location_or (tree t, location_t loc)
156 {
157 location_t tloc = EXPR_LOCATION (t);
158 return tloc == UNKNOWN_LOCATION ? loc : tloc;
159 }
160
161 /* Similar to protected_set_expr_location, but never modify x in place,
162 if location can and needs to be set, unshare it. */
163
164 static inline tree
165 protected_set_expr_location_unshare (tree x, location_t loc)
166 {
167 if (CAN_HAVE_LOCATION_P (x)
168 && EXPR_LOCATION (x) != loc
169 && !(TREE_CODE (x) == SAVE_EXPR
170 || TREE_CODE (x) == TARGET_EXPR
171 || TREE_CODE (x) == BIND_EXPR))
172 {
173 x = copy_node (x);
174 SET_EXPR_LOCATION (x, loc);
175 }
176 return x;
177 }
178 \f
179 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
180 division and returns the quotient. Otherwise returns
181 NULL_TREE. */
182
183 tree
184 div_if_zero_remainder (const_tree arg1, const_tree arg2)
185 {
186 widest_int quo;
187
188 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
189 SIGNED, &quo))
190 return wide_int_to_tree (TREE_TYPE (arg1), quo);
191
192 return NULL_TREE;
193 }
194 \f
195 /* This is nonzero if we should defer warnings about undefined
196 overflow. This facility exists because these warnings are a
197 special case. The code to estimate loop iterations does not want
198 to issue any warnings, since it works with expressions which do not
199 occur in user code. Various bits of cleanup code call fold(), but
200 only use the result if it has certain characteristics (e.g., is a
201 constant); that code only wants to issue a warning if the result is
202 used. */
203
204 static int fold_deferring_overflow_warnings;
205
206 /* If a warning about undefined overflow is deferred, this is the
207 warning. Note that this may cause us to turn two warnings into
208 one, but that is fine since it is sufficient to only give one
209 warning per expression. */
210
211 static const char* fold_deferred_overflow_warning;
212
213 /* If a warning about undefined overflow is deferred, this is the
214 level at which the warning should be emitted. */
215
216 static enum warn_strict_overflow_code fold_deferred_overflow_code;
217
218 /* Start deferring overflow warnings. We could use a stack here to
219 permit nested calls, but at present it is not necessary. */
220
221 void
222 fold_defer_overflow_warnings (void)
223 {
224 ++fold_deferring_overflow_warnings;
225 }
226
227 /* Stop deferring overflow warnings. If there is a pending warning,
228 and ISSUE is true, then issue the warning if appropriate. STMT is
229 the statement with which the warning should be associated (used for
230 location information); STMT may be NULL. CODE is the level of the
231 warning--a warn_strict_overflow_code value. This function will use
232 the smaller of CODE and the deferred code when deciding whether to
233 issue the warning. CODE may be zero to mean to always use the
234 deferred code. */
235
236 void
237 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
238 {
239 const char *warnmsg;
240 location_t locus;
241
242 gcc_assert (fold_deferring_overflow_warnings > 0);
243 --fold_deferring_overflow_warnings;
244 if (fold_deferring_overflow_warnings > 0)
245 {
246 if (fold_deferred_overflow_warning != NULL
247 && code != 0
248 && code < (int) fold_deferred_overflow_code)
249 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
250 return;
251 }
252
253 warnmsg = fold_deferred_overflow_warning;
254 fold_deferred_overflow_warning = NULL;
255
256 if (!issue || warnmsg == NULL)
257 return;
258
259 if (gimple_no_warning_p (stmt))
260 return;
261
262 /* Use the smallest code level when deciding to issue the
263 warning. */
264 if (code == 0 || code > (int) fold_deferred_overflow_code)
265 code = fold_deferred_overflow_code;
266
267 if (!issue_strict_overflow_warning (code))
268 return;
269
270 if (stmt == NULL)
271 locus = input_location;
272 else
273 locus = gimple_location (stmt);
274 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 }
276
277 /* Stop deferring overflow warnings, ignoring any deferred
278 warnings. */
279
280 void
281 fold_undefer_and_ignore_overflow_warnings (void)
282 {
283 fold_undefer_overflow_warnings (false, NULL, 0);
284 }
285
286 /* Whether we are deferring overflow warnings. */
287
288 bool
289 fold_deferring_overflow_warnings_p (void)
290 {
291 return fold_deferring_overflow_warnings > 0;
292 }
293
294 /* This is called when we fold something based on the fact that signed
295 overflow is undefined. */
296
297 static void
298 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
299 {
300 if (fold_deferring_overflow_warnings > 0)
301 {
302 if (fold_deferred_overflow_warning == NULL
303 || wc < fold_deferred_overflow_code)
304 {
305 fold_deferred_overflow_warning = gmsgid;
306 fold_deferred_overflow_code = wc;
307 }
308 }
309 else if (issue_strict_overflow_warning (wc))
310 warning (OPT_Wstrict_overflow, gmsgid);
311 }
312 \f
313 /* Return true if the built-in mathematical function specified by CODE
314 is odd, i.e. -f(x) == f(-x). */
315
316 bool
317 negate_mathfn_p (combined_fn fn)
318 {
319 switch (fn)
320 {
321 CASE_CFN_ASIN:
322 CASE_CFN_ASINH:
323 CASE_CFN_ATAN:
324 CASE_CFN_ATANH:
325 CASE_CFN_CASIN:
326 CASE_CFN_CASINH:
327 CASE_CFN_CATAN:
328 CASE_CFN_CATANH:
329 CASE_CFN_CBRT:
330 CASE_CFN_CPROJ:
331 CASE_CFN_CSIN:
332 CASE_CFN_CSINH:
333 CASE_CFN_CTAN:
334 CASE_CFN_CTANH:
335 CASE_CFN_ERF:
336 CASE_CFN_LLROUND:
337 CASE_CFN_LROUND:
338 CASE_CFN_ROUND:
339 CASE_CFN_SIN:
340 CASE_CFN_SINH:
341 CASE_CFN_TAN:
342 CASE_CFN_TANH:
343 CASE_CFN_TRUNC:
344 return true;
345
346 CASE_CFN_LLRINT:
347 CASE_CFN_LRINT:
348 CASE_CFN_NEARBYINT:
349 CASE_CFN_RINT:
350 return !flag_rounding_math;
351
352 default:
353 break;
354 }
355 return false;
356 }
357
358 /* Check whether we may negate an integer constant T without causing
359 overflow. */
360
361 bool
362 may_negate_without_overflow_p (const_tree t)
363 {
364 tree type;
365
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
367
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
371
372 return !wi::only_sign_bit_p (t);
373 }
374
375 /* Determine whether an expression T can be cheaply negated using
376 the function negate_expr without introducing undefined overflow. */
377
378 static bool
379 negate_expr_p (tree t)
380 {
381 tree type;
382
383 if (t == 0)
384 return false;
385
386 type = TREE_TYPE (t);
387
388 STRIP_SIGN_NOPS (t);
389 switch (TREE_CODE (t))
390 {
391 case INTEGER_CST:
392 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
393 return true;
394
395 /* Check that -CST will not overflow type. */
396 return may_negate_without_overflow_p (t);
397 case BIT_NOT_EXPR:
398 return (INTEGRAL_TYPE_P (type)
399 && TYPE_OVERFLOW_WRAPS (type));
400
401 case FIXED_CST:
402 return true;
403
404 case NEGATE_EXPR:
405 return !TYPE_OVERFLOW_SANITIZED (type);
406
407 case REAL_CST:
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
411
412 case COMPLEX_CST:
413 return negate_expr_p (TREE_REALPART (t))
414 && negate_expr_p (TREE_IMAGPART (t));
415
416 case VECTOR_CST:
417 {
418 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
419 return true;
420
421 int count = TYPE_VECTOR_SUBPARTS (type), i;
422
423 for (i = 0; i < count; i++)
424 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
425 return false;
426
427 return true;
428 }
429
430 case COMPLEX_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0))
432 && negate_expr_p (TREE_OPERAND (t, 1));
433
434 case CONJ_EXPR:
435 return negate_expr_p (TREE_OPERAND (t, 0));
436
437 case PLUS_EXPR:
438 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
439 || HONOR_SIGNED_ZEROS (element_mode (type))
440 || (INTEGRAL_TYPE_P (type)
441 && ! TYPE_OVERFLOW_WRAPS (type)))
442 return false;
443 /* -(A + B) -> (-B) - A. */
444 if (negate_expr_p (TREE_OPERAND (t, 1))
445 && reorder_operands_p (TREE_OPERAND (t, 0),
446 TREE_OPERAND (t, 1)))
447 return true;
448 /* -(A + B) -> (-A) - B. */
449 return negate_expr_p (TREE_OPERAND (t, 0));
450
451 case MINUS_EXPR:
452 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
453 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
454 && !HONOR_SIGNED_ZEROS (element_mode (type))
455 && (! INTEGRAL_TYPE_P (type)
456 || TYPE_OVERFLOW_WRAPS (type))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1));
459
460 case MULT_EXPR:
461 if (TYPE_UNSIGNED (type))
462 break;
463 /* INT_MIN/n * n doesn't overflow while negating one operand it does
464 if n is a power of two. */
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
466 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
467 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
468 && ! integer_pow2p (TREE_OPERAND (t, 0)))
469 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
470 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
471 break;
472
473 /* Fall through. */
474
475 case RDIV_EXPR:
476 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
479 break;
480
481 case TRUNC_DIV_EXPR:
482 case ROUND_DIV_EXPR:
483 case EXACT_DIV_EXPR:
484 if (TYPE_UNSIGNED (type))
485 break;
486 if (negate_expr_p (TREE_OPERAND (t, 0)))
487 return true;
488 /* In general we can't negate B in A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. */
491 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
492 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
493 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
494 && ! integer_onep (TREE_OPERAND (t, 1))))
495 return negate_expr_p (TREE_OPERAND (t, 1));
496 break;
497
498 case NOP_EXPR:
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
501 {
502 tree tem = strip_float_extensions (t);
503 if (tem != t)
504 return negate_expr_p (tem);
505 }
506 break;
507
508 case CALL_EXPR:
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (get_call_combined_fn (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
512 break;
513
514 case RSHIFT_EXPR:
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
517 {
518 tree op1 = TREE_OPERAND (t, 1);
519 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
520 return true;
521 }
522 break;
523
524 default:
525 break;
526 }
527 return false;
528 }
529
530 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
531 simplification is possible.
532 If negate_expr_p would return true for T, NULL_TREE will never be
533 returned. */
534
535 static tree
536 fold_negate_expr (location_t loc, tree t)
537 {
538 tree type = TREE_TYPE (t);
539 tree tem;
540
541 switch (TREE_CODE (t))
542 {
543 /* Convert - (~A) to A + 1. */
544 case BIT_NOT_EXPR:
545 if (INTEGRAL_TYPE_P (type))
546 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
547 build_one_cst (type));
548 break;
549
550 case INTEGER_CST:
551 tem = fold_negate_const (t, type);
552 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
553 || (ANY_INTEGRAL_TYPE_P (type)
554 && !TYPE_OVERFLOW_TRAPS (type)
555 && TYPE_OVERFLOW_WRAPS (type))
556 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
557 return tem;
558 break;
559
560 case REAL_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
563
564 case FIXED_CST:
565 tem = fold_negate_const (t, type);
566 return tem;
567
568 case COMPLEX_CST:
569 {
570 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
571 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
572 if (rpart && ipart)
573 return build_complex (type, rpart, ipart);
574 }
575 break;
576
577 case VECTOR_CST:
578 {
579 int count = TYPE_VECTOR_SUBPARTS (type), i;
580 tree *elts = XALLOCAVEC (tree, count);
581
582 for (i = 0; i < count; i++)
583 {
584 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
585 if (elts[i] == NULL_TREE)
586 return NULL_TREE;
587 }
588
589 return build_vector (type, elts);
590 }
591
592 case COMPLEX_EXPR:
593 if (negate_expr_p (t))
594 return fold_build2_loc (loc, COMPLEX_EXPR, type,
595 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
596 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
597 break;
598
599 case CONJ_EXPR:
600 if (negate_expr_p (t))
601 return fold_build1_loc (loc, CONJ_EXPR, type,
602 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
603 break;
604
605 case NEGATE_EXPR:
606 if (!TYPE_OVERFLOW_SANITIZED (type))
607 return TREE_OPERAND (t, 0);
608 break;
609
610 case PLUS_EXPR:
611 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
612 && !HONOR_SIGNED_ZEROS (element_mode (type)))
613 {
614 /* -(A + B) -> (-B) - A. */
615 if (negate_expr_p (TREE_OPERAND (t, 1))
616 && reorder_operands_p (TREE_OPERAND (t, 0),
617 TREE_OPERAND (t, 1)))
618 {
619 tem = negate_expr (TREE_OPERAND (t, 1));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 0));
622 }
623
624 /* -(A + B) -> (-A) - B. */
625 if (negate_expr_p (TREE_OPERAND (t, 0)))
626 {
627 tem = negate_expr (TREE_OPERAND (t, 0));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 1));
630 }
631 }
632 break;
633
634 case MINUS_EXPR:
635 /* - (A - B) -> B - A */
636 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
637 && !HONOR_SIGNED_ZEROS (element_mode (type))
638 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
641 break;
642
643 case MULT_EXPR:
644 if (TYPE_UNSIGNED (type))
645 break;
646
647 /* Fall through. */
648
649 case RDIV_EXPR:
650 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
651 {
652 tem = TREE_OPERAND (t, 1);
653 if (negate_expr_p (tem))
654 return fold_build2_loc (loc, TREE_CODE (t), type,
655 TREE_OPERAND (t, 0), negate_expr (tem));
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 negate_expr (tem), TREE_OPERAND (t, 1));
660 }
661 break;
662
663 case TRUNC_DIV_EXPR:
664 case ROUND_DIV_EXPR:
665 case EXACT_DIV_EXPR:
666 if (TYPE_UNSIGNED (type))
667 break;
668 if (negate_expr_p (TREE_OPERAND (t, 0)))
669 return fold_build2_loc (loc, TREE_CODE (t), type,
670 negate_expr (TREE_OPERAND (t, 0)),
671 TREE_OPERAND (t, 1));
672 /* In general we can't negate B in A / B, because if A is INT_MIN and
673 B is 1, we may turn this into INT_MIN / -1 which is undefined
674 and actually traps on some architectures. */
675 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
676 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
677 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
678 && ! integer_onep (TREE_OPERAND (t, 1))))
679 && negate_expr_p (TREE_OPERAND (t, 1)))
680 return fold_build2_loc (loc, TREE_CODE (t), type,
681 TREE_OPERAND (t, 0),
682 negate_expr (TREE_OPERAND (t, 1)));
683 break;
684
685 case NOP_EXPR:
686 /* Convert -((double)float) into (double)(-float). */
687 if (TREE_CODE (type) == REAL_TYPE)
688 {
689 tem = strip_float_extensions (t);
690 if (tem != t && negate_expr_p (tem))
691 return fold_convert_loc (loc, type, negate_expr (tem));
692 }
693 break;
694
695 case CALL_EXPR:
696 /* Negate -f(x) as f(-x). */
697 if (negate_mathfn_p (get_call_combined_fn (t))
698 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
699 {
700 tree fndecl, arg;
701
702 fndecl = get_callee_fndecl (t);
703 arg = negate_expr (CALL_EXPR_ARG (t, 0));
704 return build_call_expr_loc (loc, fndecl, 1, arg);
705 }
706 break;
707
708 case RSHIFT_EXPR:
709 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
710 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
711 {
712 tree op1 = TREE_OPERAND (t, 1);
713 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
714 {
715 tree ntype = TYPE_UNSIGNED (type)
716 ? signed_type_for (type)
717 : unsigned_type_for (type);
718 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
719 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
720 return fold_convert_loc (loc, type, temp);
721 }
722 }
723 break;
724
725 default:
726 break;
727 }
728
729 return NULL_TREE;
730 }
731
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
735
736 static tree
737 negate_expr (tree t)
738 {
739 tree type, tem;
740 location_t loc;
741
742 if (t == NULL_TREE)
743 return NULL_TREE;
744
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
748
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
753 }
754 \f
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
762
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
766
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead.
769
770 If IN is itself a literal or constant, return it as appropriate.
771
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
774
775 static tree
776 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
777 tree *minus_litp, int negate_p)
778 {
779 tree var = 0;
780
781 *conp = 0;
782 *litp = 0;
783 *minus_litp = 0;
784
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in);
787
788 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
789 || TREE_CODE (in) == FIXED_CST)
790 *litp = in;
791 else if (TREE_CODE (in) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
799 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
800 {
801 tree op0 = TREE_OPERAND (in, 0);
802 tree op1 = TREE_OPERAND (in, 1);
803 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
804 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
805
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
808 || TREE_CODE (op0) == FIXED_CST)
809 *litp = op0, op0 = 0;
810 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
811 || TREE_CODE (op1) == FIXED_CST)
812 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
813
814 if (op0 != 0 && TREE_CONSTANT (op0))
815 *conp = op0, op0 = 0;
816 else if (op1 != 0 && TREE_CONSTANT (op1))
817 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
818
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0 != 0 && op1 != 0)
822 var = in;
823 else if (op0 != 0)
824 var = op0;
825 else
826 var = op1, neg_var_p = neg1_p;
827
828 /* Now do any needed negations. */
829 if (neg_litp_p)
830 *minus_litp = *litp, *litp = 0;
831 if (neg_conp_p)
832 *conp = negate_expr (*conp);
833 if (neg_var_p)
834 var = negate_expr (var);
835 }
836 else if (TREE_CODE (in) == BIT_NOT_EXPR
837 && code == PLUS_EXPR)
838 {
839 /* -X - 1 is folded to ~X, undo that here. */
840 *minus_litp = build_one_cst (TREE_TYPE (in));
841 var = negate_expr (TREE_OPERAND (in, 0));
842 }
843 else if (TREE_CONSTANT (in))
844 *conp = in;
845 else
846 var = in;
847
848 if (negate_p)
849 {
850 if (*litp)
851 *minus_litp = *litp, *litp = 0;
852 else if (*minus_litp)
853 *litp = *minus_litp, *minus_litp = 0;
854 *conp = negate_expr (*conp);
855 var = negate_expr (var);
856 }
857
858 return var;
859 }
860
861 /* Re-associate trees split by the above function. T1 and T2 are
862 either expressions to associate or null. Return the new
863 expression, if any. LOC is the location of the new expression. If
864 we build an operation, do it in TYPE and with CODE. */
865
866 static tree
867 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
868 {
869 if (t1 == 0)
870 return t2;
871 else if (t2 == 0)
872 return t1;
873
874 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
875 try to fold this since we will have infinite recursion. But do
876 deal with any NEGATE_EXPRs. */
877 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
878 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 {
880 if (code == PLUS_EXPR)
881 {
882 if (TREE_CODE (t1) == NEGATE_EXPR)
883 return build2_loc (loc, MINUS_EXPR, type,
884 fold_convert_loc (loc, type, t2),
885 fold_convert_loc (loc, type,
886 TREE_OPERAND (t1, 0)));
887 else if (TREE_CODE (t2) == NEGATE_EXPR)
888 return build2_loc (loc, MINUS_EXPR, type,
889 fold_convert_loc (loc, type, t1),
890 fold_convert_loc (loc, type,
891 TREE_OPERAND (t2, 0)));
892 else if (integer_zerop (t2))
893 return fold_convert_loc (loc, type, t1);
894 }
895 else if (code == MINUS_EXPR)
896 {
897 if (integer_zerop (t2))
898 return fold_convert_loc (loc, type, t1);
899 }
900
901 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type, t2));
903 }
904
905 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
906 fold_convert_loc (loc, type, t2));
907 }
908 \f
909 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
910 for use in int_const_binop, size_binop and size_diffop. */
911
912 static bool
913 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 {
915 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
916 return false;
917 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
918 return false;
919
920 switch (code)
921 {
922 case LSHIFT_EXPR:
923 case RSHIFT_EXPR:
924 case LROTATE_EXPR:
925 case RROTATE_EXPR:
926 return true;
927
928 default:
929 break;
930 }
931
932 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
933 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
934 && TYPE_MODE (type1) == TYPE_MODE (type2);
935 }
936
937
938 /* Combine two integer constants ARG1 and ARG2 under operation CODE
939 to produce a new constant. Return NULL_TREE if we don't know how
940 to evaluate CODE at compile-time. */
941
942 static tree
943 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
944 int overflowable)
945 {
946 wide_int res;
947 tree t;
948 tree type = TREE_TYPE (arg1);
949 signop sign = TYPE_SIGN (type);
950 bool overflow = false;
951
952 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
953 TYPE_SIGN (TREE_TYPE (parg2)));
954
955 switch (code)
956 {
957 case BIT_IOR_EXPR:
958 res = wi::bit_or (arg1, arg2);
959 break;
960
961 case BIT_XOR_EXPR:
962 res = wi::bit_xor (arg1, arg2);
963 break;
964
965 case BIT_AND_EXPR:
966 res = wi::bit_and (arg1, arg2);
967 break;
968
969 case RSHIFT_EXPR:
970 case LSHIFT_EXPR:
971 if (wi::neg_p (arg2))
972 {
973 arg2 = -arg2;
974 if (code == RSHIFT_EXPR)
975 code = LSHIFT_EXPR;
976 else
977 code = RSHIFT_EXPR;
978 }
979
980 if (code == RSHIFT_EXPR)
981 /* It's unclear from the C standard whether shifts can overflow.
982 The following code ignores overflow; perhaps a C standard
983 interpretation ruling is needed. */
984 res = wi::rshift (arg1, arg2, sign);
985 else
986 res = wi::lshift (arg1, arg2);
987 break;
988
989 case RROTATE_EXPR:
990 case LROTATE_EXPR:
991 if (wi::neg_p (arg2))
992 {
993 arg2 = -arg2;
994 if (code == RROTATE_EXPR)
995 code = LROTATE_EXPR;
996 else
997 code = RROTATE_EXPR;
998 }
999
1000 if (code == RROTATE_EXPR)
1001 res = wi::rrotate (arg1, arg2);
1002 else
1003 res = wi::lrotate (arg1, arg2);
1004 break;
1005
1006 case PLUS_EXPR:
1007 res = wi::add (arg1, arg2, sign, &overflow);
1008 break;
1009
1010 case MINUS_EXPR:
1011 res = wi::sub (arg1, arg2, sign, &overflow);
1012 break;
1013
1014 case MULT_EXPR:
1015 res = wi::mul (arg1, arg2, sign, &overflow);
1016 break;
1017
1018 case MULT_HIGHPART_EXPR:
1019 res = wi::mul_high (arg1, arg2, sign);
1020 break;
1021
1022 case TRUNC_DIV_EXPR:
1023 case EXACT_DIV_EXPR:
1024 if (arg2 == 0)
1025 return NULL_TREE;
1026 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1027 break;
1028
1029 case FLOOR_DIV_EXPR:
1030 if (arg2 == 0)
1031 return NULL_TREE;
1032 res = wi::div_floor (arg1, arg2, sign, &overflow);
1033 break;
1034
1035 case CEIL_DIV_EXPR:
1036 if (arg2 == 0)
1037 return NULL_TREE;
1038 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1039 break;
1040
1041 case ROUND_DIV_EXPR:
1042 if (arg2 == 0)
1043 return NULL_TREE;
1044 res = wi::div_round (arg1, arg2, sign, &overflow);
1045 break;
1046
1047 case TRUNC_MOD_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1051 break;
1052
1053 case FLOOR_MOD_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case CEIL_MOD_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1063 break;
1064
1065 case ROUND_MOD_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::mod_round (arg1, arg2, sign, &overflow);
1069 break;
1070
1071 case MIN_EXPR:
1072 res = wi::min (arg1, arg2, sign);
1073 break;
1074
1075 case MAX_EXPR:
1076 res = wi::max (arg1, arg2, sign);
1077 break;
1078
1079 default:
1080 return NULL_TREE;
1081 }
1082
1083 t = force_fit_type (type, res, overflowable,
1084 (((sign == SIGNED || overflowable == -1)
1085 && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1087
1088 return t;
1089 }
1090
1091 tree
1092 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1093 {
1094 return int_const_binop_1 (code, arg1, arg2, 1);
1095 }
1096
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1101
1102 static tree
1103 const_binop (enum tree_code code, tree arg1, tree arg2)
1104 {
1105 /* Sanity check for the recursive cases. */
1106 if (!arg1 || !arg2)
1107 return NULL_TREE;
1108
1109 STRIP_NOPS (arg1);
1110 STRIP_NOPS (arg2);
1111
1112 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1113 {
1114 if (code == POINTER_PLUS_EXPR)
1115 return int_const_binop (PLUS_EXPR,
1116 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1117
1118 return int_const_binop (code, arg1, arg2);
1119 }
1120
1121 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1122 {
1123 machine_mode mode;
1124 REAL_VALUE_TYPE d1;
1125 REAL_VALUE_TYPE d2;
1126 REAL_VALUE_TYPE value;
1127 REAL_VALUE_TYPE result;
1128 bool inexact;
1129 tree t, type;
1130
1131 /* The following codes are handled by real_arithmetic. */
1132 switch (code)
1133 {
1134 case PLUS_EXPR:
1135 case MINUS_EXPR:
1136 case MULT_EXPR:
1137 case RDIV_EXPR:
1138 case MIN_EXPR:
1139 case MAX_EXPR:
1140 break;
1141
1142 default:
1143 return NULL_TREE;
1144 }
1145
1146 d1 = TREE_REAL_CST (arg1);
1147 d2 = TREE_REAL_CST (arg2);
1148
1149 type = TREE_TYPE (arg1);
1150 mode = TYPE_MODE (type);
1151
1152 /* Don't perform operation if we honor signaling NaNs and
1153 either operand is a signaling NaN. */
1154 if (HONOR_SNANS (mode)
1155 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1156 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1157 return NULL_TREE;
1158
1159 /* Don't perform operation if it would raise a division
1160 by zero exception. */
1161 if (code == RDIV_EXPR
1162 && real_equal (&d2, &dconst0)
1163 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1164 return NULL_TREE;
1165
1166 /* If either operand is a NaN, just return it. Otherwise, set up
1167 for floating-point trap; we return an overflow. */
1168 if (REAL_VALUE_ISNAN (d1))
1169 {
1170 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1171 is off. */
1172 d1.signalling = 0;
1173 t = build_real (type, d1);
1174 return t;
1175 }
1176 else if (REAL_VALUE_ISNAN (d2))
1177 {
1178 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1179 is off. */
1180 d2.signalling = 0;
1181 t = build_real (type, d2);
1182 return t;
1183 }
1184
1185 inexact = real_arithmetic (&value, code, &d1, &d2);
1186 real_convert (&result, mode, &value);
1187
1188 /* Don't constant fold this floating point operation if
1189 the result has overflowed and flag_trapping_math. */
1190 if (flag_trapping_math
1191 && MODE_HAS_INFINITIES (mode)
1192 && REAL_VALUE_ISINF (result)
1193 && !REAL_VALUE_ISINF (d1)
1194 && !REAL_VALUE_ISINF (d2))
1195 return NULL_TREE;
1196
1197 /* Don't constant fold this floating point operation if the
1198 result may dependent upon the run-time rounding mode and
1199 flag_rounding_math is set, or if GCC's software emulation
1200 is unable to accurately represent the result. */
1201 if ((flag_rounding_math
1202 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1203 && (inexact || !real_identical (&result, &value)))
1204 return NULL_TREE;
1205
1206 t = build_real (type, result);
1207
1208 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1209 return t;
1210 }
1211
1212 if (TREE_CODE (arg1) == FIXED_CST)
1213 {
1214 FIXED_VALUE_TYPE f1;
1215 FIXED_VALUE_TYPE f2;
1216 FIXED_VALUE_TYPE result;
1217 tree t, type;
1218 int sat_p;
1219 bool overflow_p;
1220
1221 /* The following codes are handled by fixed_arithmetic. */
1222 switch (code)
1223 {
1224 case PLUS_EXPR:
1225 case MINUS_EXPR:
1226 case MULT_EXPR:
1227 case TRUNC_DIV_EXPR:
1228 if (TREE_CODE (arg2) != FIXED_CST)
1229 return NULL_TREE;
1230 f2 = TREE_FIXED_CST (arg2);
1231 break;
1232
1233 case LSHIFT_EXPR:
1234 case RSHIFT_EXPR:
1235 {
1236 if (TREE_CODE (arg2) != INTEGER_CST)
1237 return NULL_TREE;
1238 wide_int w2 = arg2;
1239 f2.data.high = w2.elt (1);
1240 f2.data.low = w2.elt (0);
1241 f2.mode = SImode;
1242 }
1243 break;
1244
1245 default:
1246 return NULL_TREE;
1247 }
1248
1249 f1 = TREE_FIXED_CST (arg1);
1250 type = TREE_TYPE (arg1);
1251 sat_p = TYPE_SATURATING (type);
1252 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1253 t = build_fixed (type, result);
1254 /* Propagate overflow flags. */
1255 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1256 TREE_OVERFLOW (t) = 1;
1257 return t;
1258 }
1259
1260 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1261 {
1262 tree type = TREE_TYPE (arg1);
1263 tree r1 = TREE_REALPART (arg1);
1264 tree i1 = TREE_IMAGPART (arg1);
1265 tree r2 = TREE_REALPART (arg2);
1266 tree i2 = TREE_IMAGPART (arg2);
1267 tree real, imag;
1268
1269 switch (code)
1270 {
1271 case PLUS_EXPR:
1272 case MINUS_EXPR:
1273 real = const_binop (code, r1, r2);
1274 imag = const_binop (code, i1, i2);
1275 break;
1276
1277 case MULT_EXPR:
1278 if (COMPLEX_FLOAT_TYPE_P (type))
1279 return do_mpc_arg2 (arg1, arg2, type,
1280 /* do_nonfinite= */ folding_initializer,
1281 mpc_mul);
1282
1283 real = const_binop (MINUS_EXPR,
1284 const_binop (MULT_EXPR, r1, r2),
1285 const_binop (MULT_EXPR, i1, i2));
1286 imag = const_binop (PLUS_EXPR,
1287 const_binop (MULT_EXPR, r1, i2),
1288 const_binop (MULT_EXPR, i1, r2));
1289 break;
1290
1291 case RDIV_EXPR:
1292 if (COMPLEX_FLOAT_TYPE_P (type))
1293 return do_mpc_arg2 (arg1, arg2, type,
1294 /* do_nonfinite= */ folding_initializer,
1295 mpc_div);
1296 /* Fallthru ... */
1297 case TRUNC_DIV_EXPR:
1298 case CEIL_DIV_EXPR:
1299 case FLOOR_DIV_EXPR:
1300 case ROUND_DIV_EXPR:
1301 if (flag_complex_method == 0)
1302 {
1303 /* Keep this algorithm in sync with
1304 tree-complex.c:expand_complex_div_straight().
1305
1306 Expand complex division to scalars, straightforward algorithm.
1307 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1308 t = br*br + bi*bi
1309 */
1310 tree magsquared
1311 = const_binop (PLUS_EXPR,
1312 const_binop (MULT_EXPR, r2, r2),
1313 const_binop (MULT_EXPR, i2, i2));
1314 tree t1
1315 = const_binop (PLUS_EXPR,
1316 const_binop (MULT_EXPR, r1, r2),
1317 const_binop (MULT_EXPR, i1, i2));
1318 tree t2
1319 = const_binop (MINUS_EXPR,
1320 const_binop (MULT_EXPR, i1, r2),
1321 const_binop (MULT_EXPR, r1, i2));
1322
1323 real = const_binop (code, t1, magsquared);
1324 imag = const_binop (code, t2, magsquared);
1325 }
1326 else
1327 {
1328 /* Keep this algorithm in sync with
1329 tree-complex.c:expand_complex_div_wide().
1330
1331 Expand complex division to scalars, modified algorithm to minimize
1332 overflow with wide input ranges. */
1333 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1334 fold_abs_const (r2, TREE_TYPE (type)),
1335 fold_abs_const (i2, TREE_TYPE (type)));
1336
1337 if (integer_nonzerop (compare))
1338 {
1339 /* In the TRUE branch, we compute
1340 ratio = br/bi;
1341 div = (br * ratio) + bi;
1342 tr = (ar * ratio) + ai;
1343 ti = (ai * ratio) - ar;
1344 tr = tr / div;
1345 ti = ti / div; */
1346 tree ratio = const_binop (code, r2, i2);
1347 tree div = const_binop (PLUS_EXPR, i2,
1348 const_binop (MULT_EXPR, r2, ratio));
1349 real = const_binop (MULT_EXPR, r1, ratio);
1350 real = const_binop (PLUS_EXPR, real, i1);
1351 real = const_binop (code, real, div);
1352
1353 imag = const_binop (MULT_EXPR, i1, ratio);
1354 imag = const_binop (MINUS_EXPR, imag, r1);
1355 imag = const_binop (code, imag, div);
1356 }
1357 else
1358 {
1359 /* In the FALSE branch, we compute
1360 ratio = d/c;
1361 divisor = (d * ratio) + c;
1362 tr = (b * ratio) + a;
1363 ti = b - (a * ratio);
1364 tr = tr / div;
1365 ti = ti / div; */
1366 tree ratio = const_binop (code, i2, r2);
1367 tree div = const_binop (PLUS_EXPR, r2,
1368 const_binop (MULT_EXPR, i2, ratio));
1369
1370 real = const_binop (MULT_EXPR, i1, ratio);
1371 real = const_binop (PLUS_EXPR, real, r1);
1372 real = const_binop (code, real, div);
1373
1374 imag = const_binop (MULT_EXPR, r1, ratio);
1375 imag = const_binop (MINUS_EXPR, i1, imag);
1376 imag = const_binop (code, imag, div);
1377 }
1378 }
1379 break;
1380
1381 default:
1382 return NULL_TREE;
1383 }
1384
1385 if (real && imag)
1386 return build_complex (type, real, imag);
1387 }
1388
1389 if (TREE_CODE (arg1) == VECTOR_CST
1390 && TREE_CODE (arg2) == VECTOR_CST)
1391 {
1392 tree type = TREE_TYPE (arg1);
1393 int count = TYPE_VECTOR_SUBPARTS (type), i;
1394 tree *elts = XALLOCAVEC (tree, count);
1395
1396 for (i = 0; i < count; i++)
1397 {
1398 tree elem1 = VECTOR_CST_ELT (arg1, i);
1399 tree elem2 = VECTOR_CST_ELT (arg2, i);
1400
1401 elts[i] = const_binop (code, elem1, elem2);
1402
1403 /* It is possible that const_binop cannot handle the given
1404 code and return NULL_TREE */
1405 if (elts[i] == NULL_TREE)
1406 return NULL_TREE;
1407 }
1408
1409 return build_vector (type, elts);
1410 }
1411
1412 /* Shifts allow a scalar offset for a vector. */
1413 if (TREE_CODE (arg1) == VECTOR_CST
1414 && TREE_CODE (arg2) == INTEGER_CST)
1415 {
1416 tree type = TREE_TYPE (arg1);
1417 int count = TYPE_VECTOR_SUBPARTS (type), i;
1418 tree *elts = XALLOCAVEC (tree, count);
1419
1420 for (i = 0; i < count; i++)
1421 {
1422 tree elem1 = VECTOR_CST_ELT (arg1, i);
1423
1424 elts[i] = const_binop (code, elem1, arg2);
1425
1426 /* It is possible that const_binop cannot handle the given
1427 code and return NULL_TREE. */
1428 if (elts[i] == NULL_TREE)
1429 return NULL_TREE;
1430 }
1431
1432 return build_vector (type, elts);
1433 }
1434 return NULL_TREE;
1435 }
1436
1437 /* Overload that adds a TYPE parameter to be able to dispatch
1438 to fold_relational_const. */
1439
1440 tree
1441 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1442 {
1443 if (TREE_CODE_CLASS (code) == tcc_comparison)
1444 return fold_relational_const (code, type, arg1, arg2);
1445
1446 /* ??? Until we make the const_binop worker take the type of the
1447 result as argument put those cases that need it here. */
1448 switch (code)
1449 {
1450 case COMPLEX_EXPR:
1451 if ((TREE_CODE (arg1) == REAL_CST
1452 && TREE_CODE (arg2) == REAL_CST)
1453 || (TREE_CODE (arg1) == INTEGER_CST
1454 && TREE_CODE (arg2) == INTEGER_CST))
1455 return build_complex (type, arg1, arg2);
1456 return NULL_TREE;
1457
1458 case VEC_PACK_TRUNC_EXPR:
1459 case VEC_PACK_FIX_TRUNC_EXPR:
1460 {
1461 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1462 tree *elts;
1463
1464 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1465 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1466 if (TREE_CODE (arg1) != VECTOR_CST
1467 || TREE_CODE (arg2) != VECTOR_CST)
1468 return NULL_TREE;
1469
1470 elts = XALLOCAVEC (tree, nelts);
1471 if (!vec_cst_ctor_to_array (arg1, elts)
1472 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1473 return NULL_TREE;
1474
1475 for (i = 0; i < nelts; i++)
1476 {
1477 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1478 ? NOP_EXPR : FIX_TRUNC_EXPR,
1479 TREE_TYPE (type), elts[i]);
1480 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1481 return NULL_TREE;
1482 }
1483
1484 return build_vector (type, elts);
1485 }
1486
1487 case VEC_WIDEN_MULT_LO_EXPR:
1488 case VEC_WIDEN_MULT_HI_EXPR:
1489 case VEC_WIDEN_MULT_EVEN_EXPR:
1490 case VEC_WIDEN_MULT_ODD_EXPR:
1491 {
1492 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1493 unsigned int out, ofs, scale;
1494 tree *elts;
1495
1496 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1497 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1498 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1499 return NULL_TREE;
1500
1501 elts = XALLOCAVEC (tree, nelts * 4);
1502 if (!vec_cst_ctor_to_array (arg1, elts)
1503 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1504 return NULL_TREE;
1505
1506 if (code == VEC_WIDEN_MULT_LO_EXPR)
1507 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1508 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1509 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1510 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1511 scale = 1, ofs = 0;
1512 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1513 scale = 1, ofs = 1;
1514
1515 for (out = 0; out < nelts; out++)
1516 {
1517 unsigned int in1 = (out << scale) + ofs;
1518 unsigned int in2 = in1 + nelts * 2;
1519 tree t1, t2;
1520
1521 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1522 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1523
1524 if (t1 == NULL_TREE || t2 == NULL_TREE)
1525 return NULL_TREE;
1526 elts[out] = const_binop (MULT_EXPR, t1, t2);
1527 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1528 return NULL_TREE;
1529 }
1530
1531 return build_vector (type, elts);
1532 }
1533
1534 default:;
1535 }
1536
1537 if (TREE_CODE_CLASS (code) != tcc_binary)
1538 return NULL_TREE;
1539
1540 /* Make sure type and arg0 have the same saturating flag. */
1541 gcc_checking_assert (TYPE_SATURATING (type)
1542 == TYPE_SATURATING (TREE_TYPE (arg1)));
1543
1544 return const_binop (code, arg1, arg2);
1545 }
1546
1547 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1548 Return zero if computing the constants is not possible. */
1549
1550 tree
1551 const_unop (enum tree_code code, tree type, tree arg0)
1552 {
1553 /* Don't perform the operation, other than NEGATE and ABS, if
1554 flag_signaling_nans is on and the operand is a signaling NaN. */
1555 if (TREE_CODE (arg0) == REAL_CST
1556 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1557 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1558 && code != NEGATE_EXPR
1559 && code != ABS_EXPR)
1560 return NULL_TREE;
1561
1562 switch (code)
1563 {
1564 CASE_CONVERT:
1565 case FLOAT_EXPR:
1566 case FIX_TRUNC_EXPR:
1567 case FIXED_CONVERT_EXPR:
1568 return fold_convert_const (code, type, arg0);
1569
1570 case ADDR_SPACE_CONVERT_EXPR:
1571 /* If the source address is 0, and the source address space
1572 cannot have a valid object at 0, fold to dest type null. */
1573 if (integer_zerop (arg0)
1574 && !(targetm.addr_space.zero_address_valid
1575 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1576 return fold_convert_const (code, type, arg0);
1577 break;
1578
1579 case VIEW_CONVERT_EXPR:
1580 return fold_view_convert_expr (type, arg0);
1581
1582 case NEGATE_EXPR:
1583 {
1584 /* Can't call fold_negate_const directly here as that doesn't
1585 handle all cases and we might not be able to negate some
1586 constants. */
1587 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1588 if (tem && CONSTANT_CLASS_P (tem))
1589 return tem;
1590 break;
1591 }
1592
1593 case ABS_EXPR:
1594 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1595 return fold_abs_const (arg0, type);
1596 break;
1597
1598 case CONJ_EXPR:
1599 if (TREE_CODE (arg0) == COMPLEX_CST)
1600 {
1601 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1602 TREE_TYPE (type));
1603 return build_complex (type, TREE_REALPART (arg0), ipart);
1604 }
1605 break;
1606
1607 case BIT_NOT_EXPR:
1608 if (TREE_CODE (arg0) == INTEGER_CST)
1609 return fold_not_const (arg0, type);
1610 /* Perform BIT_NOT_EXPR on each element individually. */
1611 else if (TREE_CODE (arg0) == VECTOR_CST)
1612 {
1613 tree *elements;
1614 tree elem;
1615 unsigned count = VECTOR_CST_NELTS (arg0), i;
1616
1617 elements = XALLOCAVEC (tree, count);
1618 for (i = 0; i < count; i++)
1619 {
1620 elem = VECTOR_CST_ELT (arg0, i);
1621 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1622 if (elem == NULL_TREE)
1623 break;
1624 elements[i] = elem;
1625 }
1626 if (i == count)
1627 return build_vector (type, elements);
1628 }
1629 break;
1630
1631 case TRUTH_NOT_EXPR:
1632 if (TREE_CODE (arg0) == INTEGER_CST)
1633 return constant_boolean_node (integer_zerop (arg0), type);
1634 break;
1635
1636 case REALPART_EXPR:
1637 if (TREE_CODE (arg0) == COMPLEX_CST)
1638 return fold_convert (type, TREE_REALPART (arg0));
1639 break;
1640
1641 case IMAGPART_EXPR:
1642 if (TREE_CODE (arg0) == COMPLEX_CST)
1643 return fold_convert (type, TREE_IMAGPART (arg0));
1644 break;
1645
1646 case VEC_UNPACK_LO_EXPR:
1647 case VEC_UNPACK_HI_EXPR:
1648 case VEC_UNPACK_FLOAT_LO_EXPR:
1649 case VEC_UNPACK_FLOAT_HI_EXPR:
1650 {
1651 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1652 tree *elts;
1653 enum tree_code subcode;
1654
1655 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1656 if (TREE_CODE (arg0) != VECTOR_CST)
1657 return NULL_TREE;
1658
1659 elts = XALLOCAVEC (tree, nelts * 2);
1660 if (!vec_cst_ctor_to_array (arg0, elts))
1661 return NULL_TREE;
1662
1663 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1664 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1665 elts += nelts;
1666
1667 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1668 subcode = NOP_EXPR;
1669 else
1670 subcode = FLOAT_EXPR;
1671
1672 for (i = 0; i < nelts; i++)
1673 {
1674 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1675 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1676 return NULL_TREE;
1677 }
1678
1679 return build_vector (type, elts);
1680 }
1681
1682 case REDUC_MIN_EXPR:
1683 case REDUC_MAX_EXPR:
1684 case REDUC_PLUS_EXPR:
1685 {
1686 unsigned int nelts, i;
1687 tree *elts;
1688 enum tree_code subcode;
1689
1690 if (TREE_CODE (arg0) != VECTOR_CST)
1691 return NULL_TREE;
1692 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1693
1694 elts = XALLOCAVEC (tree, nelts);
1695 if (!vec_cst_ctor_to_array (arg0, elts))
1696 return NULL_TREE;
1697
1698 switch (code)
1699 {
1700 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1701 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1702 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1703 default: gcc_unreachable ();
1704 }
1705
1706 for (i = 1; i < nelts; i++)
1707 {
1708 elts[0] = const_binop (subcode, elts[0], elts[i]);
1709 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1710 return NULL_TREE;
1711 }
1712
1713 return elts[0];
1714 }
1715
1716 default:
1717 break;
1718 }
1719
1720 return NULL_TREE;
1721 }
1722
1723 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1724 indicates which particular sizetype to create. */
1725
1726 tree
1727 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1728 {
1729 return build_int_cst (sizetype_tab[(int) kind], number);
1730 }
1731 \f
1732 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1733 is a tree code. The type of the result is taken from the operands.
1734 Both must be equivalent integer types, ala int_binop_types_match_p.
1735 If the operands are constant, so is the result. */
1736
1737 tree
1738 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1739 {
1740 tree type = TREE_TYPE (arg0);
1741
1742 if (arg0 == error_mark_node || arg1 == error_mark_node)
1743 return error_mark_node;
1744
1745 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1746 TREE_TYPE (arg1)));
1747
1748 /* Handle the special case of two integer constants faster. */
1749 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1750 {
1751 /* And some specific cases even faster than that. */
1752 if (code == PLUS_EXPR)
1753 {
1754 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1755 return arg1;
1756 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1757 return arg0;
1758 }
1759 else if (code == MINUS_EXPR)
1760 {
1761 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1762 return arg0;
1763 }
1764 else if (code == MULT_EXPR)
1765 {
1766 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1767 return arg1;
1768 }
1769
1770 /* Handle general case of two integer constants. For sizetype
1771 constant calculations we always want to know about overflow,
1772 even in the unsigned case. */
1773 return int_const_binop_1 (code, arg0, arg1, -1);
1774 }
1775
1776 return fold_build2_loc (loc, code, type, arg0, arg1);
1777 }
1778
1779 /* Given two values, either both of sizetype or both of bitsizetype,
1780 compute the difference between the two values. Return the value
1781 in signed type corresponding to the type of the operands. */
1782
1783 tree
1784 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1785 {
1786 tree type = TREE_TYPE (arg0);
1787 tree ctype;
1788
1789 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1790 TREE_TYPE (arg1)));
1791
1792 /* If the type is already signed, just do the simple thing. */
1793 if (!TYPE_UNSIGNED (type))
1794 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1795
1796 if (type == sizetype)
1797 ctype = ssizetype;
1798 else if (type == bitsizetype)
1799 ctype = sbitsizetype;
1800 else
1801 ctype = signed_type_for (type);
1802
1803 /* If either operand is not a constant, do the conversions to the signed
1804 type and subtract. The hardware will do the right thing with any
1805 overflow in the subtraction. */
1806 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1807 return size_binop_loc (loc, MINUS_EXPR,
1808 fold_convert_loc (loc, ctype, arg0),
1809 fold_convert_loc (loc, ctype, arg1));
1810
1811 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1812 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1813 overflow) and negate (which can't either). Special-case a result
1814 of zero while we're here. */
1815 if (tree_int_cst_equal (arg0, arg1))
1816 return build_int_cst (ctype, 0);
1817 else if (tree_int_cst_lt (arg1, arg0))
1818 return fold_convert_loc (loc, ctype,
1819 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1820 else
1821 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1822 fold_convert_loc (loc, ctype,
1823 size_binop_loc (loc,
1824 MINUS_EXPR,
1825 arg1, arg0)));
1826 }
1827 \f
1828 /* A subroutine of fold_convert_const handling conversions of an
1829 INTEGER_CST to another integer type. */
1830
1831 static tree
1832 fold_convert_const_int_from_int (tree type, const_tree arg1)
1833 {
1834 /* Given an integer constant, make new constant with new type,
1835 appropriately sign-extended or truncated. Use widest_int
1836 so that any extension is done according ARG1's type. */
1837 return force_fit_type (type, wi::to_widest (arg1),
1838 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1839 TREE_OVERFLOW (arg1));
1840 }
1841
1842 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1843 to an integer type. */
1844
1845 static tree
1846 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1847 {
1848 bool overflow = false;
1849 tree t;
1850
1851 /* The following code implements the floating point to integer
1852 conversion rules required by the Java Language Specification,
1853 that IEEE NaNs are mapped to zero and values that overflow
1854 the target precision saturate, i.e. values greater than
1855 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1856 are mapped to INT_MIN. These semantics are allowed by the
1857 C and C++ standards that simply state that the behavior of
1858 FP-to-integer conversion is unspecified upon overflow. */
1859
1860 wide_int val;
1861 REAL_VALUE_TYPE r;
1862 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1863
1864 switch (code)
1865 {
1866 case FIX_TRUNC_EXPR:
1867 real_trunc (&r, VOIDmode, &x);
1868 break;
1869
1870 default:
1871 gcc_unreachable ();
1872 }
1873
1874 /* If R is NaN, return zero and show we have an overflow. */
1875 if (REAL_VALUE_ISNAN (r))
1876 {
1877 overflow = true;
1878 val = wi::zero (TYPE_PRECISION (type));
1879 }
1880
1881 /* See if R is less than the lower bound or greater than the
1882 upper bound. */
1883
1884 if (! overflow)
1885 {
1886 tree lt = TYPE_MIN_VALUE (type);
1887 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1888 if (real_less (&r, &l))
1889 {
1890 overflow = true;
1891 val = lt;
1892 }
1893 }
1894
1895 if (! overflow)
1896 {
1897 tree ut = TYPE_MAX_VALUE (type);
1898 if (ut)
1899 {
1900 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1901 if (real_less (&u, &r))
1902 {
1903 overflow = true;
1904 val = ut;
1905 }
1906 }
1907 }
1908
1909 if (! overflow)
1910 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1911
1912 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1913 return t;
1914 }
1915
1916 /* A subroutine of fold_convert_const handling conversions of a
1917 FIXED_CST to an integer type. */
1918
1919 static tree
1920 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1921 {
1922 tree t;
1923 double_int temp, temp_trunc;
1924 unsigned int mode;
1925
1926 /* Right shift FIXED_CST to temp by fbit. */
1927 temp = TREE_FIXED_CST (arg1).data;
1928 mode = TREE_FIXED_CST (arg1).mode;
1929 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1930 {
1931 temp = temp.rshift (GET_MODE_FBIT (mode),
1932 HOST_BITS_PER_DOUBLE_INT,
1933 SIGNED_FIXED_POINT_MODE_P (mode));
1934
1935 /* Left shift temp to temp_trunc by fbit. */
1936 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1937 HOST_BITS_PER_DOUBLE_INT,
1938 SIGNED_FIXED_POINT_MODE_P (mode));
1939 }
1940 else
1941 {
1942 temp = double_int_zero;
1943 temp_trunc = double_int_zero;
1944 }
1945
1946 /* If FIXED_CST is negative, we need to round the value toward 0.
1947 By checking if the fractional bits are not zero to add 1 to temp. */
1948 if (SIGNED_FIXED_POINT_MODE_P (mode)
1949 && temp_trunc.is_negative ()
1950 && TREE_FIXED_CST (arg1).data != temp_trunc)
1951 temp += double_int_one;
1952
1953 /* Given a fixed-point constant, make new constant with new type,
1954 appropriately sign-extended or truncated. */
1955 t = force_fit_type (type, temp, -1,
1956 (temp.is_negative ()
1957 && (TYPE_UNSIGNED (type)
1958 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1959 | TREE_OVERFLOW (arg1));
1960
1961 return t;
1962 }
1963
1964 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1965 to another floating point type. */
1966
1967 static tree
1968 fold_convert_const_real_from_real (tree type, const_tree arg1)
1969 {
1970 REAL_VALUE_TYPE value;
1971 tree t;
1972
1973 /* Don't perform the operation if flag_signaling_nans is on
1974 and the operand is a signaling NaN. */
1975 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1976 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1977 return NULL_TREE;
1978
1979 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1980 t = build_real (type, value);
1981
1982 /* If converting an infinity or NAN to a representation that doesn't
1983 have one, set the overflow bit so that we can produce some kind of
1984 error message at the appropriate point if necessary. It's not the
1985 most user-friendly message, but it's better than nothing. */
1986 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1987 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1988 TREE_OVERFLOW (t) = 1;
1989 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1990 && !MODE_HAS_NANS (TYPE_MODE (type)))
1991 TREE_OVERFLOW (t) = 1;
1992 /* Regular overflow, conversion produced an infinity in a mode that
1993 can't represent them. */
1994 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1995 && REAL_VALUE_ISINF (value)
1996 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1997 TREE_OVERFLOW (t) = 1;
1998 else
1999 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2000 return t;
2001 }
2002
2003 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2004 to a floating point type. */
2005
2006 static tree
2007 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2008 {
2009 REAL_VALUE_TYPE value;
2010 tree t;
2011
2012 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2013 t = build_real (type, value);
2014
2015 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2016 return t;
2017 }
2018
2019 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2020 to another fixed-point type. */
2021
2022 static tree
2023 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2024 {
2025 FIXED_VALUE_TYPE value;
2026 tree t;
2027 bool overflow_p;
2028
2029 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2030 TYPE_SATURATING (type));
2031 t = build_fixed (type, value);
2032
2033 /* Propagate overflow flags. */
2034 if (overflow_p | TREE_OVERFLOW (arg1))
2035 TREE_OVERFLOW (t) = 1;
2036 return t;
2037 }
2038
2039 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2040 to a fixed-point type. */
2041
2042 static tree
2043 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2044 {
2045 FIXED_VALUE_TYPE value;
2046 tree t;
2047 bool overflow_p;
2048 double_int di;
2049
2050 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2051
2052 di.low = TREE_INT_CST_ELT (arg1, 0);
2053 if (TREE_INT_CST_NUNITS (arg1) == 1)
2054 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2055 else
2056 di.high = TREE_INT_CST_ELT (arg1, 1);
2057
2058 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2059 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2060 TYPE_SATURATING (type));
2061 t = build_fixed (type, value);
2062
2063 /* Propagate overflow flags. */
2064 if (overflow_p | TREE_OVERFLOW (arg1))
2065 TREE_OVERFLOW (t) = 1;
2066 return t;
2067 }
2068
2069 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2070 to a fixed-point type. */
2071
2072 static tree
2073 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2074 {
2075 FIXED_VALUE_TYPE value;
2076 tree t;
2077 bool overflow_p;
2078
2079 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2080 &TREE_REAL_CST (arg1),
2081 TYPE_SATURATING (type));
2082 t = build_fixed (type, value);
2083
2084 /* Propagate overflow flags. */
2085 if (overflow_p | TREE_OVERFLOW (arg1))
2086 TREE_OVERFLOW (t) = 1;
2087 return t;
2088 }
2089
2090 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2091 type TYPE. If no simplification can be done return NULL_TREE. */
2092
2093 static tree
2094 fold_convert_const (enum tree_code code, tree type, tree arg1)
2095 {
2096 if (TREE_TYPE (arg1) == type)
2097 return arg1;
2098
2099 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2100 || TREE_CODE (type) == OFFSET_TYPE)
2101 {
2102 if (TREE_CODE (arg1) == INTEGER_CST)
2103 return fold_convert_const_int_from_int (type, arg1);
2104 else if (TREE_CODE (arg1) == REAL_CST)
2105 return fold_convert_const_int_from_real (code, type, arg1);
2106 else if (TREE_CODE (arg1) == FIXED_CST)
2107 return fold_convert_const_int_from_fixed (type, arg1);
2108 }
2109 else if (TREE_CODE (type) == REAL_TYPE)
2110 {
2111 if (TREE_CODE (arg1) == INTEGER_CST)
2112 return build_real_from_int_cst (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_real_from_real (type, arg1);
2115 else if (TREE_CODE (arg1) == FIXED_CST)
2116 return fold_convert_const_real_from_fixed (type, arg1);
2117 }
2118 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2119 {
2120 if (TREE_CODE (arg1) == FIXED_CST)
2121 return fold_convert_const_fixed_from_fixed (type, arg1);
2122 else if (TREE_CODE (arg1) == INTEGER_CST)
2123 return fold_convert_const_fixed_from_int (type, arg1);
2124 else if (TREE_CODE (arg1) == REAL_CST)
2125 return fold_convert_const_fixed_from_real (type, arg1);
2126 }
2127 else if (TREE_CODE (type) == VECTOR_TYPE)
2128 {
2129 if (TREE_CODE (arg1) == VECTOR_CST
2130 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2131 {
2132 int len = TYPE_VECTOR_SUBPARTS (type);
2133 tree elttype = TREE_TYPE (type);
2134 tree *v = XALLOCAVEC (tree, len);
2135 for (int i = 0; i < len; ++i)
2136 {
2137 tree elt = VECTOR_CST_ELT (arg1, i);
2138 tree cvt = fold_convert_const (code, elttype, elt);
2139 if (cvt == NULL_TREE)
2140 return NULL_TREE;
2141 v[i] = cvt;
2142 }
2143 return build_vector (type, v);
2144 }
2145 }
2146 return NULL_TREE;
2147 }
2148
2149 /* Construct a vector of zero elements of vector type TYPE. */
2150
2151 static tree
2152 build_zero_vector (tree type)
2153 {
2154 tree t;
2155
2156 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2157 return build_vector_from_val (type, t);
2158 }
2159
2160 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2161
2162 bool
2163 fold_convertible_p (const_tree type, const_tree arg)
2164 {
2165 tree orig = TREE_TYPE (arg);
2166
2167 if (type == orig)
2168 return true;
2169
2170 if (TREE_CODE (arg) == ERROR_MARK
2171 || TREE_CODE (type) == ERROR_MARK
2172 || TREE_CODE (orig) == ERROR_MARK)
2173 return false;
2174
2175 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2176 return true;
2177
2178 switch (TREE_CODE (type))
2179 {
2180 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2181 case POINTER_TYPE: case REFERENCE_TYPE:
2182 case OFFSET_TYPE:
2183 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2184 || TREE_CODE (orig) == OFFSET_TYPE)
2185 return true;
2186 return (TREE_CODE (orig) == VECTOR_TYPE
2187 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2188
2189 case REAL_TYPE:
2190 case FIXED_POINT_TYPE:
2191 case COMPLEX_TYPE:
2192 case VECTOR_TYPE:
2193 case VOID_TYPE:
2194 return TREE_CODE (type) == TREE_CODE (orig);
2195
2196 default:
2197 return false;
2198 }
2199 }
2200
2201 /* Convert expression ARG to type TYPE. Used by the middle-end for
2202 simple conversions in preference to calling the front-end's convert. */
2203
2204 tree
2205 fold_convert_loc (location_t loc, tree type, tree arg)
2206 {
2207 tree orig = TREE_TYPE (arg);
2208 tree tem;
2209
2210 if (type == orig)
2211 return arg;
2212
2213 if (TREE_CODE (arg) == ERROR_MARK
2214 || TREE_CODE (type) == ERROR_MARK
2215 || TREE_CODE (orig) == ERROR_MARK)
2216 return error_mark_node;
2217
2218 switch (TREE_CODE (type))
2219 {
2220 case POINTER_TYPE:
2221 case REFERENCE_TYPE:
2222 /* Handle conversions between pointers to different address spaces. */
2223 if (POINTER_TYPE_P (orig)
2224 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2225 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2226 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2227 /* fall through */
2228
2229 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2230 case OFFSET_TYPE:
2231 if (TREE_CODE (arg) == INTEGER_CST)
2232 {
2233 tem = fold_convert_const (NOP_EXPR, type, arg);
2234 if (tem != NULL_TREE)
2235 return tem;
2236 }
2237 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2238 || TREE_CODE (orig) == OFFSET_TYPE)
2239 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2240 if (TREE_CODE (orig) == COMPLEX_TYPE)
2241 return fold_convert_loc (loc, type,
2242 fold_build1_loc (loc, REALPART_EXPR,
2243 TREE_TYPE (orig), arg));
2244 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2245 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2246 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2247
2248 case REAL_TYPE:
2249 if (TREE_CODE (arg) == INTEGER_CST)
2250 {
2251 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2252 if (tem != NULL_TREE)
2253 return tem;
2254 }
2255 else if (TREE_CODE (arg) == REAL_CST)
2256 {
2257 tem = fold_convert_const (NOP_EXPR, type, arg);
2258 if (tem != NULL_TREE)
2259 return tem;
2260 }
2261 else if (TREE_CODE (arg) == FIXED_CST)
2262 {
2263 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2264 if (tem != NULL_TREE)
2265 return tem;
2266 }
2267
2268 switch (TREE_CODE (orig))
2269 {
2270 case INTEGER_TYPE:
2271 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2272 case POINTER_TYPE: case REFERENCE_TYPE:
2273 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2274
2275 case REAL_TYPE:
2276 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2277
2278 case FIXED_POINT_TYPE:
2279 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2280
2281 case COMPLEX_TYPE:
2282 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2283 return fold_convert_loc (loc, type, tem);
2284
2285 default:
2286 gcc_unreachable ();
2287 }
2288
2289 case FIXED_POINT_TYPE:
2290 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2291 || TREE_CODE (arg) == REAL_CST)
2292 {
2293 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2294 if (tem != NULL_TREE)
2295 goto fold_convert_exit;
2296 }
2297
2298 switch (TREE_CODE (orig))
2299 {
2300 case FIXED_POINT_TYPE:
2301 case INTEGER_TYPE:
2302 case ENUMERAL_TYPE:
2303 case BOOLEAN_TYPE:
2304 case REAL_TYPE:
2305 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2306
2307 case COMPLEX_TYPE:
2308 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2309 return fold_convert_loc (loc, type, tem);
2310
2311 default:
2312 gcc_unreachable ();
2313 }
2314
2315 case COMPLEX_TYPE:
2316 switch (TREE_CODE (orig))
2317 {
2318 case INTEGER_TYPE:
2319 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2320 case POINTER_TYPE: case REFERENCE_TYPE:
2321 case REAL_TYPE:
2322 case FIXED_POINT_TYPE:
2323 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2324 fold_convert_loc (loc, TREE_TYPE (type), arg),
2325 fold_convert_loc (loc, TREE_TYPE (type),
2326 integer_zero_node));
2327 case COMPLEX_TYPE:
2328 {
2329 tree rpart, ipart;
2330
2331 if (TREE_CODE (arg) == COMPLEX_EXPR)
2332 {
2333 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2334 TREE_OPERAND (arg, 0));
2335 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2336 TREE_OPERAND (arg, 1));
2337 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2338 }
2339
2340 arg = save_expr (arg);
2341 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2342 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2343 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2344 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2345 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2346 }
2347
2348 default:
2349 gcc_unreachable ();
2350 }
2351
2352 case VECTOR_TYPE:
2353 if (integer_zerop (arg))
2354 return build_zero_vector (type);
2355 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2356 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2357 || TREE_CODE (orig) == VECTOR_TYPE);
2358 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2359
2360 case VOID_TYPE:
2361 tem = fold_ignored_result (arg);
2362 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2363
2364 default:
2365 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2366 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2367 gcc_unreachable ();
2368 }
2369 fold_convert_exit:
2370 protected_set_expr_location_unshare (tem, loc);
2371 return tem;
2372 }
2373 \f
2374 /* Return false if expr can be assumed not to be an lvalue, true
2375 otherwise. */
2376
2377 static bool
2378 maybe_lvalue_p (const_tree x)
2379 {
2380 /* We only need to wrap lvalue tree codes. */
2381 switch (TREE_CODE (x))
2382 {
2383 case VAR_DECL:
2384 case PARM_DECL:
2385 case RESULT_DECL:
2386 case LABEL_DECL:
2387 case FUNCTION_DECL:
2388 case SSA_NAME:
2389
2390 case COMPONENT_REF:
2391 case MEM_REF:
2392 case INDIRECT_REF:
2393 case ARRAY_REF:
2394 case ARRAY_RANGE_REF:
2395 case BIT_FIELD_REF:
2396 case OBJ_TYPE_REF:
2397
2398 case REALPART_EXPR:
2399 case IMAGPART_EXPR:
2400 case PREINCREMENT_EXPR:
2401 case PREDECREMENT_EXPR:
2402 case SAVE_EXPR:
2403 case TRY_CATCH_EXPR:
2404 case WITH_CLEANUP_EXPR:
2405 case COMPOUND_EXPR:
2406 case MODIFY_EXPR:
2407 case TARGET_EXPR:
2408 case COND_EXPR:
2409 case BIND_EXPR:
2410 break;
2411
2412 default:
2413 /* Assume the worst for front-end tree codes. */
2414 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2415 break;
2416 return false;
2417 }
2418
2419 return true;
2420 }
2421
2422 /* Return an expr equal to X but certainly not valid as an lvalue. */
2423
2424 tree
2425 non_lvalue_loc (location_t loc, tree x)
2426 {
2427 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2428 us. */
2429 if (in_gimple_form)
2430 return x;
2431
2432 if (! maybe_lvalue_p (x))
2433 return x;
2434 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2435 }
2436
2437 /* When pedantic, return an expr equal to X but certainly not valid as a
2438 pedantic lvalue. Otherwise, return X. */
2439
2440 static tree
2441 pedantic_non_lvalue_loc (location_t loc, tree x)
2442 {
2443 return protected_set_expr_location_unshare (x, loc);
2444 }
2445 \f
2446 /* Given a tree comparison code, return the code that is the logical inverse.
2447 It is generally not safe to do this for floating-point comparisons, except
2448 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2449 ERROR_MARK in this case. */
2450
2451 enum tree_code
2452 invert_tree_comparison (enum tree_code code, bool honor_nans)
2453 {
2454 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2455 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2456 return ERROR_MARK;
2457
2458 switch (code)
2459 {
2460 case EQ_EXPR:
2461 return NE_EXPR;
2462 case NE_EXPR:
2463 return EQ_EXPR;
2464 case GT_EXPR:
2465 return honor_nans ? UNLE_EXPR : LE_EXPR;
2466 case GE_EXPR:
2467 return honor_nans ? UNLT_EXPR : LT_EXPR;
2468 case LT_EXPR:
2469 return honor_nans ? UNGE_EXPR : GE_EXPR;
2470 case LE_EXPR:
2471 return honor_nans ? UNGT_EXPR : GT_EXPR;
2472 case LTGT_EXPR:
2473 return UNEQ_EXPR;
2474 case UNEQ_EXPR:
2475 return LTGT_EXPR;
2476 case UNGT_EXPR:
2477 return LE_EXPR;
2478 case UNGE_EXPR:
2479 return LT_EXPR;
2480 case UNLT_EXPR:
2481 return GE_EXPR;
2482 case UNLE_EXPR:
2483 return GT_EXPR;
2484 case ORDERED_EXPR:
2485 return UNORDERED_EXPR;
2486 case UNORDERED_EXPR:
2487 return ORDERED_EXPR;
2488 default:
2489 gcc_unreachable ();
2490 }
2491 }
2492
2493 /* Similar, but return the comparison that results if the operands are
2494 swapped. This is safe for floating-point. */
2495
2496 enum tree_code
2497 swap_tree_comparison (enum tree_code code)
2498 {
2499 switch (code)
2500 {
2501 case EQ_EXPR:
2502 case NE_EXPR:
2503 case ORDERED_EXPR:
2504 case UNORDERED_EXPR:
2505 case LTGT_EXPR:
2506 case UNEQ_EXPR:
2507 return code;
2508 case GT_EXPR:
2509 return LT_EXPR;
2510 case GE_EXPR:
2511 return LE_EXPR;
2512 case LT_EXPR:
2513 return GT_EXPR;
2514 case LE_EXPR:
2515 return GE_EXPR;
2516 case UNGT_EXPR:
2517 return UNLT_EXPR;
2518 case UNGE_EXPR:
2519 return UNLE_EXPR;
2520 case UNLT_EXPR:
2521 return UNGT_EXPR;
2522 case UNLE_EXPR:
2523 return UNGE_EXPR;
2524 default:
2525 gcc_unreachable ();
2526 }
2527 }
2528
2529
2530 /* Convert a comparison tree code from an enum tree_code representation
2531 into a compcode bit-based encoding. This function is the inverse of
2532 compcode_to_comparison. */
2533
2534 static enum comparison_code
2535 comparison_to_compcode (enum tree_code code)
2536 {
2537 switch (code)
2538 {
2539 case LT_EXPR:
2540 return COMPCODE_LT;
2541 case EQ_EXPR:
2542 return COMPCODE_EQ;
2543 case LE_EXPR:
2544 return COMPCODE_LE;
2545 case GT_EXPR:
2546 return COMPCODE_GT;
2547 case NE_EXPR:
2548 return COMPCODE_NE;
2549 case GE_EXPR:
2550 return COMPCODE_GE;
2551 case ORDERED_EXPR:
2552 return COMPCODE_ORD;
2553 case UNORDERED_EXPR:
2554 return COMPCODE_UNORD;
2555 case UNLT_EXPR:
2556 return COMPCODE_UNLT;
2557 case UNEQ_EXPR:
2558 return COMPCODE_UNEQ;
2559 case UNLE_EXPR:
2560 return COMPCODE_UNLE;
2561 case UNGT_EXPR:
2562 return COMPCODE_UNGT;
2563 case LTGT_EXPR:
2564 return COMPCODE_LTGT;
2565 case UNGE_EXPR:
2566 return COMPCODE_UNGE;
2567 default:
2568 gcc_unreachable ();
2569 }
2570 }
2571
2572 /* Convert a compcode bit-based encoding of a comparison operator back
2573 to GCC's enum tree_code representation. This function is the
2574 inverse of comparison_to_compcode. */
2575
2576 static enum tree_code
2577 compcode_to_comparison (enum comparison_code code)
2578 {
2579 switch (code)
2580 {
2581 case COMPCODE_LT:
2582 return LT_EXPR;
2583 case COMPCODE_EQ:
2584 return EQ_EXPR;
2585 case COMPCODE_LE:
2586 return LE_EXPR;
2587 case COMPCODE_GT:
2588 return GT_EXPR;
2589 case COMPCODE_NE:
2590 return NE_EXPR;
2591 case COMPCODE_GE:
2592 return GE_EXPR;
2593 case COMPCODE_ORD:
2594 return ORDERED_EXPR;
2595 case COMPCODE_UNORD:
2596 return UNORDERED_EXPR;
2597 case COMPCODE_UNLT:
2598 return UNLT_EXPR;
2599 case COMPCODE_UNEQ:
2600 return UNEQ_EXPR;
2601 case COMPCODE_UNLE:
2602 return UNLE_EXPR;
2603 case COMPCODE_UNGT:
2604 return UNGT_EXPR;
2605 case COMPCODE_LTGT:
2606 return LTGT_EXPR;
2607 case COMPCODE_UNGE:
2608 return UNGE_EXPR;
2609 default:
2610 gcc_unreachable ();
2611 }
2612 }
2613
2614 /* Return a tree for the comparison which is the combination of
2615 doing the AND or OR (depending on CODE) of the two operations LCODE
2616 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2617 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2618 if this makes the transformation invalid. */
2619
2620 tree
2621 combine_comparisons (location_t loc,
2622 enum tree_code code, enum tree_code lcode,
2623 enum tree_code rcode, tree truth_type,
2624 tree ll_arg, tree lr_arg)
2625 {
2626 bool honor_nans = HONOR_NANS (ll_arg);
2627 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2628 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2629 int compcode;
2630
2631 switch (code)
2632 {
2633 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2634 compcode = lcompcode & rcompcode;
2635 break;
2636
2637 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2638 compcode = lcompcode | rcompcode;
2639 break;
2640
2641 default:
2642 return NULL_TREE;
2643 }
2644
2645 if (!honor_nans)
2646 {
2647 /* Eliminate unordered comparisons, as well as LTGT and ORD
2648 which are not used unless the mode has NaNs. */
2649 compcode &= ~COMPCODE_UNORD;
2650 if (compcode == COMPCODE_LTGT)
2651 compcode = COMPCODE_NE;
2652 else if (compcode == COMPCODE_ORD)
2653 compcode = COMPCODE_TRUE;
2654 }
2655 else if (flag_trapping_math)
2656 {
2657 /* Check that the original operation and the optimized ones will trap
2658 under the same condition. */
2659 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2660 && (lcompcode != COMPCODE_EQ)
2661 && (lcompcode != COMPCODE_ORD);
2662 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2663 && (rcompcode != COMPCODE_EQ)
2664 && (rcompcode != COMPCODE_ORD);
2665 bool trap = (compcode & COMPCODE_UNORD) == 0
2666 && (compcode != COMPCODE_EQ)
2667 && (compcode != COMPCODE_ORD);
2668
2669 /* In a short-circuited boolean expression the LHS might be
2670 such that the RHS, if evaluated, will never trap. For
2671 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2672 if neither x nor y is NaN. (This is a mixed blessing: for
2673 example, the expression above will never trap, hence
2674 optimizing it to x < y would be invalid). */
2675 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2676 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2677 rtrap = false;
2678
2679 /* If the comparison was short-circuited, and only the RHS
2680 trapped, we may now generate a spurious trap. */
2681 if (rtrap && !ltrap
2682 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2683 return NULL_TREE;
2684
2685 /* If we changed the conditions that cause a trap, we lose. */
2686 if ((ltrap || rtrap) != trap)
2687 return NULL_TREE;
2688 }
2689
2690 if (compcode == COMPCODE_TRUE)
2691 return constant_boolean_node (true, truth_type);
2692 else if (compcode == COMPCODE_FALSE)
2693 return constant_boolean_node (false, truth_type);
2694 else
2695 {
2696 enum tree_code tcode;
2697
2698 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2699 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2700 }
2701 }
2702 \f
2703 /* Return nonzero if two operands (typically of the same tree node)
2704 are necessarily equal. FLAGS modifies behavior as follows:
2705
2706 If OEP_ONLY_CONST is set, only return nonzero for constants.
2707 This function tests whether the operands are indistinguishable;
2708 it does not test whether they are equal using C's == operation.
2709 The distinction is important for IEEE floating point, because
2710 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2711 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2712
2713 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2714 even though it may hold multiple values during a function.
2715 This is because a GCC tree node guarantees that nothing else is
2716 executed between the evaluation of its "operands" (which may often
2717 be evaluated in arbitrary order). Hence if the operands themselves
2718 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2719 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2720 unset means assuming isochronic (or instantaneous) tree equivalence.
2721 Unless comparing arbitrary expression trees, such as from different
2722 statements, this flag can usually be left unset.
2723
2724 If OEP_PURE_SAME is set, then pure functions with identical arguments
2725 are considered the same. It is used when the caller has other ways
2726 to ensure that global memory is unchanged in between.
2727
2728 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2729 not values of expressions.
2730
2731 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2732 any operand with side effect. This is unnecesarily conservative in the
2733 case we know that arg0 and arg1 are in disjoint code paths (such as in
2734 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2735 addresses with TREE_CONSTANT flag set so we know that &var == &var
2736 even if var is volatile. */
2737
2738 int
2739 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2740 {
2741 /* If either is ERROR_MARK, they aren't equal. */
2742 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2743 || TREE_TYPE (arg0) == error_mark_node
2744 || TREE_TYPE (arg1) == error_mark_node)
2745 return 0;
2746
2747 /* Similar, if either does not have a type (like a released SSA name),
2748 they aren't equal. */
2749 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2750 return 0;
2751
2752 /* We cannot consider pointers to different address space equal. */
2753 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2754 && POINTER_TYPE_P (TREE_TYPE (arg1))
2755 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2756 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2757 return 0;
2758
2759 /* Check equality of integer constants before bailing out due to
2760 precision differences. */
2761 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2762 {
2763 /* Address of INTEGER_CST is not defined; check that we did not forget
2764 to drop the OEP_ADDRESS_OF flags. */
2765 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2766 return tree_int_cst_equal (arg0, arg1);
2767 }
2768
2769 if (!(flags & OEP_ADDRESS_OF))
2770 {
2771 /* If both types don't have the same signedness, then we can't consider
2772 them equal. We must check this before the STRIP_NOPS calls
2773 because they may change the signedness of the arguments. As pointers
2774 strictly don't have a signedness, require either two pointers or
2775 two non-pointers as well. */
2776 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2777 || POINTER_TYPE_P (TREE_TYPE (arg0))
2778 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2779 return 0;
2780
2781 /* If both types don't have the same precision, then it is not safe
2782 to strip NOPs. */
2783 if (element_precision (TREE_TYPE (arg0))
2784 != element_precision (TREE_TYPE (arg1)))
2785 return 0;
2786
2787 STRIP_NOPS (arg0);
2788 STRIP_NOPS (arg1);
2789 }
2790 #if 0
2791 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2792 sanity check once the issue is solved. */
2793 else
2794 /* Addresses of conversions and SSA_NAMEs (and many other things)
2795 are not defined. Check that we did not forget to drop the
2796 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2797 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2798 && TREE_CODE (arg0) != SSA_NAME);
2799 #endif
2800
2801 /* In case both args are comparisons but with different comparison
2802 code, try to swap the comparison operands of one arg to produce
2803 a match and compare that variant. */
2804 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2805 && COMPARISON_CLASS_P (arg0)
2806 && COMPARISON_CLASS_P (arg1))
2807 {
2808 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2809
2810 if (TREE_CODE (arg0) == swap_code)
2811 return operand_equal_p (TREE_OPERAND (arg0, 0),
2812 TREE_OPERAND (arg1, 1), flags)
2813 && operand_equal_p (TREE_OPERAND (arg0, 1),
2814 TREE_OPERAND (arg1, 0), flags);
2815 }
2816
2817 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2818 {
2819 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2820 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2821 ;
2822 else if (flags & OEP_ADDRESS_OF)
2823 {
2824 /* If we are interested in comparing addresses ignore
2825 MEM_REF wrappings of the base that can appear just for
2826 TBAA reasons. */
2827 if (TREE_CODE (arg0) == MEM_REF
2828 && DECL_P (arg1)
2829 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2830 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2831 && integer_zerop (TREE_OPERAND (arg0, 1)))
2832 return 1;
2833 else if (TREE_CODE (arg1) == MEM_REF
2834 && DECL_P (arg0)
2835 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2836 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2837 && integer_zerop (TREE_OPERAND (arg1, 1)))
2838 return 1;
2839 return 0;
2840 }
2841 else
2842 return 0;
2843 }
2844
2845 /* When not checking adddresses, this is needed for conversions and for
2846 COMPONENT_REF. Might as well play it safe and always test this. */
2847 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2848 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2849 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2850 && !(flags & OEP_ADDRESS_OF)))
2851 return 0;
2852
2853 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2854 We don't care about side effects in that case because the SAVE_EXPR
2855 takes care of that for us. In all other cases, two expressions are
2856 equal if they have no side effects. If we have two identical
2857 expressions with side effects that should be treated the same due
2858 to the only side effects being identical SAVE_EXPR's, that will
2859 be detected in the recursive calls below.
2860 If we are taking an invariant address of two identical objects
2861 they are necessarily equal as well. */
2862 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2863 && (TREE_CODE (arg0) == SAVE_EXPR
2864 || (flags & OEP_MATCH_SIDE_EFFECTS)
2865 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2866 return 1;
2867
2868 /* Next handle constant cases, those for which we can return 1 even
2869 if ONLY_CONST is set. */
2870 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2871 switch (TREE_CODE (arg0))
2872 {
2873 case INTEGER_CST:
2874 return tree_int_cst_equal (arg0, arg1);
2875
2876 case FIXED_CST:
2877 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2878 TREE_FIXED_CST (arg1));
2879
2880 case REAL_CST:
2881 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2882 return 1;
2883
2884
2885 if (!HONOR_SIGNED_ZEROS (arg0))
2886 {
2887 /* If we do not distinguish between signed and unsigned zero,
2888 consider them equal. */
2889 if (real_zerop (arg0) && real_zerop (arg1))
2890 return 1;
2891 }
2892 return 0;
2893
2894 case VECTOR_CST:
2895 {
2896 unsigned i;
2897
2898 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2899 return 0;
2900
2901 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2902 {
2903 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2904 VECTOR_CST_ELT (arg1, i), flags))
2905 return 0;
2906 }
2907 return 1;
2908 }
2909
2910 case COMPLEX_CST:
2911 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2912 flags)
2913 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2914 flags));
2915
2916 case STRING_CST:
2917 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2918 && ! memcmp (TREE_STRING_POINTER (arg0),
2919 TREE_STRING_POINTER (arg1),
2920 TREE_STRING_LENGTH (arg0)));
2921
2922 case ADDR_EXPR:
2923 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2924 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2925 flags | OEP_ADDRESS_OF
2926 | OEP_MATCH_SIDE_EFFECTS);
2927 case CONSTRUCTOR:
2928 /* In GIMPLE empty constructors are allowed in initializers of
2929 aggregates. */
2930 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2931 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2932 default:
2933 break;
2934 }
2935
2936 if (flags & OEP_ONLY_CONST)
2937 return 0;
2938
2939 /* Define macros to test an operand from arg0 and arg1 for equality and a
2940 variant that allows null and views null as being different from any
2941 non-null value. In the latter case, if either is null, the both
2942 must be; otherwise, do the normal comparison. */
2943 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2944 TREE_OPERAND (arg1, N), flags)
2945
2946 #define OP_SAME_WITH_NULL(N) \
2947 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2948 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2949
2950 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2951 {
2952 case tcc_unary:
2953 /* Two conversions are equal only if signedness and modes match. */
2954 switch (TREE_CODE (arg0))
2955 {
2956 CASE_CONVERT:
2957 case FIX_TRUNC_EXPR:
2958 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2959 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2960 return 0;
2961 break;
2962 default:
2963 break;
2964 }
2965
2966 return OP_SAME (0);
2967
2968
2969 case tcc_comparison:
2970 case tcc_binary:
2971 if (OP_SAME (0) && OP_SAME (1))
2972 return 1;
2973
2974 /* For commutative ops, allow the other order. */
2975 return (commutative_tree_code (TREE_CODE (arg0))
2976 && operand_equal_p (TREE_OPERAND (arg0, 0),
2977 TREE_OPERAND (arg1, 1), flags)
2978 && operand_equal_p (TREE_OPERAND (arg0, 1),
2979 TREE_OPERAND (arg1, 0), flags));
2980
2981 case tcc_reference:
2982 /* If either of the pointer (or reference) expressions we are
2983 dereferencing contain a side effect, these cannot be equal,
2984 but their addresses can be. */
2985 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2986 && (TREE_SIDE_EFFECTS (arg0)
2987 || TREE_SIDE_EFFECTS (arg1)))
2988 return 0;
2989
2990 switch (TREE_CODE (arg0))
2991 {
2992 case INDIRECT_REF:
2993 if (!(flags & OEP_ADDRESS_OF)
2994 && (TYPE_ALIGN (TREE_TYPE (arg0))
2995 != TYPE_ALIGN (TREE_TYPE (arg1))))
2996 return 0;
2997 flags &= ~OEP_ADDRESS_OF;
2998 return OP_SAME (0);
2999
3000 case REALPART_EXPR:
3001 case IMAGPART_EXPR:
3002 case VIEW_CONVERT_EXPR:
3003 return OP_SAME (0);
3004
3005 case TARGET_MEM_REF:
3006 case MEM_REF:
3007 if (!(flags & OEP_ADDRESS_OF))
3008 {
3009 /* Require equal access sizes */
3010 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3011 && (!TYPE_SIZE (TREE_TYPE (arg0))
3012 || !TYPE_SIZE (TREE_TYPE (arg1))
3013 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3014 TYPE_SIZE (TREE_TYPE (arg1)),
3015 flags)))
3016 return 0;
3017 /* Verify that accesses are TBAA compatible. */
3018 if (!alias_ptr_types_compatible_p
3019 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3020 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3021 || (MR_DEPENDENCE_CLIQUE (arg0)
3022 != MR_DEPENDENCE_CLIQUE (arg1))
3023 || (MR_DEPENDENCE_BASE (arg0)
3024 != MR_DEPENDENCE_BASE (arg1)))
3025 return 0;
3026 /* Verify that alignment is compatible. */
3027 if (TYPE_ALIGN (TREE_TYPE (arg0))
3028 != TYPE_ALIGN (TREE_TYPE (arg1)))
3029 return 0;
3030 }
3031 flags &= ~OEP_ADDRESS_OF;
3032 return (OP_SAME (0) && OP_SAME (1)
3033 /* TARGET_MEM_REF require equal extra operands. */
3034 && (TREE_CODE (arg0) != TARGET_MEM_REF
3035 || (OP_SAME_WITH_NULL (2)
3036 && OP_SAME_WITH_NULL (3)
3037 && OP_SAME_WITH_NULL (4))));
3038
3039 case ARRAY_REF:
3040 case ARRAY_RANGE_REF:
3041 /* Operands 2 and 3 may be null.
3042 Compare the array index by value if it is constant first as we
3043 may have different types but same value here. */
3044 if (!OP_SAME (0))
3045 return 0;
3046 flags &= ~OEP_ADDRESS_OF;
3047 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3048 TREE_OPERAND (arg1, 1))
3049 || OP_SAME (1))
3050 && OP_SAME_WITH_NULL (2)
3051 && OP_SAME_WITH_NULL (3));
3052
3053 case COMPONENT_REF:
3054 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3055 may be NULL when we're called to compare MEM_EXPRs. */
3056 if (!OP_SAME_WITH_NULL (0)
3057 || !OP_SAME (1))
3058 return 0;
3059 flags &= ~OEP_ADDRESS_OF;
3060 return OP_SAME_WITH_NULL (2);
3061
3062 case BIT_FIELD_REF:
3063 if (!OP_SAME (0))
3064 return 0;
3065 flags &= ~OEP_ADDRESS_OF;
3066 return OP_SAME (1) && OP_SAME (2);
3067
3068 default:
3069 return 0;
3070 }
3071
3072 case tcc_expression:
3073 switch (TREE_CODE (arg0))
3074 {
3075 case ADDR_EXPR:
3076 /* Be sure we pass right ADDRESS_OF flag. */
3077 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3078 return operand_equal_p (TREE_OPERAND (arg0, 0),
3079 TREE_OPERAND (arg1, 0),
3080 flags | OEP_ADDRESS_OF);
3081
3082 case TRUTH_NOT_EXPR:
3083 return OP_SAME (0);
3084
3085 case TRUTH_ANDIF_EXPR:
3086 case TRUTH_ORIF_EXPR:
3087 return OP_SAME (0) && OP_SAME (1);
3088
3089 case FMA_EXPR:
3090 case WIDEN_MULT_PLUS_EXPR:
3091 case WIDEN_MULT_MINUS_EXPR:
3092 if (!OP_SAME (2))
3093 return 0;
3094 /* The multiplcation operands are commutative. */
3095 /* FALLTHRU */
3096
3097 case TRUTH_AND_EXPR:
3098 case TRUTH_OR_EXPR:
3099 case TRUTH_XOR_EXPR:
3100 if (OP_SAME (0) && OP_SAME (1))
3101 return 1;
3102
3103 /* Otherwise take into account this is a commutative operation. */
3104 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3105 TREE_OPERAND (arg1, 1), flags)
3106 && operand_equal_p (TREE_OPERAND (arg0, 1),
3107 TREE_OPERAND (arg1, 0), flags));
3108
3109 case COND_EXPR:
3110 case VEC_COND_EXPR:
3111 case DOT_PROD_EXPR:
3112 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3113
3114 default:
3115 return 0;
3116 }
3117
3118 case tcc_vl_exp:
3119 switch (TREE_CODE (arg0))
3120 {
3121 case CALL_EXPR:
3122 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3123 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3124 /* If not both CALL_EXPRs are either internal or normal function
3125 functions, then they are not equal. */
3126 return 0;
3127 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3128 {
3129 /* If the CALL_EXPRs call different internal functions, then they
3130 are not equal. */
3131 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3132 return 0;
3133 }
3134 else
3135 {
3136 /* If the CALL_EXPRs call different functions, then they are not
3137 equal. */
3138 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3139 flags))
3140 return 0;
3141 }
3142
3143 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3144 {
3145 unsigned int cef = call_expr_flags (arg0);
3146 if (flags & OEP_PURE_SAME)
3147 cef &= ECF_CONST | ECF_PURE;
3148 else
3149 cef &= ECF_CONST;
3150 if (!cef)
3151 return 0;
3152 }
3153
3154 /* Now see if all the arguments are the same. */
3155 {
3156 const_call_expr_arg_iterator iter0, iter1;
3157 const_tree a0, a1;
3158 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3159 a1 = first_const_call_expr_arg (arg1, &iter1);
3160 a0 && a1;
3161 a0 = next_const_call_expr_arg (&iter0),
3162 a1 = next_const_call_expr_arg (&iter1))
3163 if (! operand_equal_p (a0, a1, flags))
3164 return 0;
3165
3166 /* If we get here and both argument lists are exhausted
3167 then the CALL_EXPRs are equal. */
3168 return ! (a0 || a1);
3169 }
3170 default:
3171 return 0;
3172 }
3173
3174 case tcc_declaration:
3175 /* Consider __builtin_sqrt equal to sqrt. */
3176 return (TREE_CODE (arg0) == FUNCTION_DECL
3177 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3178 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3179 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3180
3181 case tcc_exceptional:
3182 if (TREE_CODE (arg0) == CONSTRUCTOR)
3183 {
3184 /* In GIMPLE constructors are used only to build vectors from
3185 elements. Individual elements in the constructor must be
3186 indexed in increasing order and form an initial sequence.
3187
3188 We make no effort to compare constructors in generic.
3189 (see sem_variable::equals in ipa-icf which can do so for
3190 constants). */
3191 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3192 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3193 return 0;
3194
3195 /* Be sure that vectors constructed have the same representation.
3196 We only tested element precision and modes to match.
3197 Vectors may be BLKmode and thus also check that the number of
3198 parts match. */
3199 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3200 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3201 return 0;
3202
3203 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3204 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3205 unsigned int len = vec_safe_length (v0);
3206
3207 if (len != vec_safe_length (v1))
3208 return 0;
3209
3210 for (unsigned int i = 0; i < len; i++)
3211 {
3212 constructor_elt *c0 = &(*v0)[i];
3213 constructor_elt *c1 = &(*v1)[i];
3214
3215 if (!operand_equal_p (c0->value, c1->value, flags)
3216 /* In GIMPLE the indexes can be either NULL or matching i.
3217 Double check this so we won't get false
3218 positives for GENERIC. */
3219 || (c0->index
3220 && (TREE_CODE (c0->index) != INTEGER_CST
3221 || !compare_tree_int (c0->index, i)))
3222 || (c1->index
3223 && (TREE_CODE (c1->index) != INTEGER_CST
3224 || !compare_tree_int (c1->index, i))))
3225 return 0;
3226 }
3227 return 1;
3228 }
3229 return 0;
3230
3231 default:
3232 return 0;
3233 }
3234
3235 #undef OP_SAME
3236 #undef OP_SAME_WITH_NULL
3237 }
3238 \f
3239 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3240 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3241
3242 When in doubt, return 0. */
3243
3244 static int
3245 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3246 {
3247 int unsignedp1, unsignedpo;
3248 tree primarg0, primarg1, primother;
3249 unsigned int correct_width;
3250
3251 if (operand_equal_p (arg0, arg1, 0))
3252 return 1;
3253
3254 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3255 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3256 return 0;
3257
3258 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3259 and see if the inner values are the same. This removes any
3260 signedness comparison, which doesn't matter here. */
3261 primarg0 = arg0, primarg1 = arg1;
3262 STRIP_NOPS (primarg0);
3263 STRIP_NOPS (primarg1);
3264 if (operand_equal_p (primarg0, primarg1, 0))
3265 return 1;
3266
3267 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3268 actual comparison operand, ARG0.
3269
3270 First throw away any conversions to wider types
3271 already present in the operands. */
3272
3273 primarg1 = get_narrower (arg1, &unsignedp1);
3274 primother = get_narrower (other, &unsignedpo);
3275
3276 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3277 if (unsignedp1 == unsignedpo
3278 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3279 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3280 {
3281 tree type = TREE_TYPE (arg0);
3282
3283 /* Make sure shorter operand is extended the right way
3284 to match the longer operand. */
3285 primarg1 = fold_convert (signed_or_unsigned_type_for
3286 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3287
3288 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3289 return 1;
3290 }
3291
3292 return 0;
3293 }
3294 \f
3295 /* See if ARG is an expression that is either a comparison or is performing
3296 arithmetic on comparisons. The comparisons must only be comparing
3297 two different values, which will be stored in *CVAL1 and *CVAL2; if
3298 they are nonzero it means that some operands have already been found.
3299 No variables may be used anywhere else in the expression except in the
3300 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3301 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3302
3303 If this is true, return 1. Otherwise, return zero. */
3304
3305 static int
3306 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3307 {
3308 enum tree_code code = TREE_CODE (arg);
3309 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3310
3311 /* We can handle some of the tcc_expression cases here. */
3312 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3313 tclass = tcc_unary;
3314 else if (tclass == tcc_expression
3315 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3316 || code == COMPOUND_EXPR))
3317 tclass = tcc_binary;
3318
3319 else if (tclass == tcc_expression && code == SAVE_EXPR
3320 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3321 {
3322 /* If we've already found a CVAL1 or CVAL2, this expression is
3323 two complex to handle. */
3324 if (*cval1 || *cval2)
3325 return 0;
3326
3327 tclass = tcc_unary;
3328 *save_p = 1;
3329 }
3330
3331 switch (tclass)
3332 {
3333 case tcc_unary:
3334 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3335
3336 case tcc_binary:
3337 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3338 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3339 cval1, cval2, save_p));
3340
3341 case tcc_constant:
3342 return 1;
3343
3344 case tcc_expression:
3345 if (code == COND_EXPR)
3346 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3347 cval1, cval2, save_p)
3348 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3349 cval1, cval2, save_p)
3350 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3351 cval1, cval2, save_p));
3352 return 0;
3353
3354 case tcc_comparison:
3355 /* First see if we can handle the first operand, then the second. For
3356 the second operand, we know *CVAL1 can't be zero. It must be that
3357 one side of the comparison is each of the values; test for the
3358 case where this isn't true by failing if the two operands
3359 are the same. */
3360
3361 if (operand_equal_p (TREE_OPERAND (arg, 0),
3362 TREE_OPERAND (arg, 1), 0))
3363 return 0;
3364
3365 if (*cval1 == 0)
3366 *cval1 = TREE_OPERAND (arg, 0);
3367 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3368 ;
3369 else if (*cval2 == 0)
3370 *cval2 = TREE_OPERAND (arg, 0);
3371 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3372 ;
3373 else
3374 return 0;
3375
3376 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3377 ;
3378 else if (*cval2 == 0)
3379 *cval2 = TREE_OPERAND (arg, 1);
3380 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3381 ;
3382 else
3383 return 0;
3384
3385 return 1;
3386
3387 default:
3388 return 0;
3389 }
3390 }
3391 \f
3392 /* ARG is a tree that is known to contain just arithmetic operations and
3393 comparisons. Evaluate the operations in the tree substituting NEW0 for
3394 any occurrence of OLD0 as an operand of a comparison and likewise for
3395 NEW1 and OLD1. */
3396
3397 static tree
3398 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3399 tree old1, tree new1)
3400 {
3401 tree type = TREE_TYPE (arg);
3402 enum tree_code code = TREE_CODE (arg);
3403 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3404
3405 /* We can handle some of the tcc_expression cases here. */
3406 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3407 tclass = tcc_unary;
3408 else if (tclass == tcc_expression
3409 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3410 tclass = tcc_binary;
3411
3412 switch (tclass)
3413 {
3414 case tcc_unary:
3415 return fold_build1_loc (loc, code, type,
3416 eval_subst (loc, TREE_OPERAND (arg, 0),
3417 old0, new0, old1, new1));
3418
3419 case tcc_binary:
3420 return fold_build2_loc (loc, code, type,
3421 eval_subst (loc, TREE_OPERAND (arg, 0),
3422 old0, new0, old1, new1),
3423 eval_subst (loc, TREE_OPERAND (arg, 1),
3424 old0, new0, old1, new1));
3425
3426 case tcc_expression:
3427 switch (code)
3428 {
3429 case SAVE_EXPR:
3430 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3431 old1, new1);
3432
3433 case COMPOUND_EXPR:
3434 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3435 old1, new1);
3436
3437 case COND_EXPR:
3438 return fold_build3_loc (loc, code, type,
3439 eval_subst (loc, TREE_OPERAND (arg, 0),
3440 old0, new0, old1, new1),
3441 eval_subst (loc, TREE_OPERAND (arg, 1),
3442 old0, new0, old1, new1),
3443 eval_subst (loc, TREE_OPERAND (arg, 2),
3444 old0, new0, old1, new1));
3445 default:
3446 break;
3447 }
3448 /* Fall through - ??? */
3449
3450 case tcc_comparison:
3451 {
3452 tree arg0 = TREE_OPERAND (arg, 0);
3453 tree arg1 = TREE_OPERAND (arg, 1);
3454
3455 /* We need to check both for exact equality and tree equality. The
3456 former will be true if the operand has a side-effect. In that
3457 case, we know the operand occurred exactly once. */
3458
3459 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3460 arg0 = new0;
3461 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3462 arg0 = new1;
3463
3464 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3465 arg1 = new0;
3466 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3467 arg1 = new1;
3468
3469 return fold_build2_loc (loc, code, type, arg0, arg1);
3470 }
3471
3472 default:
3473 return arg;
3474 }
3475 }
3476 \f
3477 /* Return a tree for the case when the result of an expression is RESULT
3478 converted to TYPE and OMITTED was previously an operand of the expression
3479 but is now not needed (e.g., we folded OMITTED * 0).
3480
3481 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3482 the conversion of RESULT to TYPE. */
3483
3484 tree
3485 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3486 {
3487 tree t = fold_convert_loc (loc, type, result);
3488
3489 /* If the resulting operand is an empty statement, just return the omitted
3490 statement casted to void. */
3491 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3492 return build1_loc (loc, NOP_EXPR, void_type_node,
3493 fold_ignored_result (omitted));
3494
3495 if (TREE_SIDE_EFFECTS (omitted))
3496 return build2_loc (loc, COMPOUND_EXPR, type,
3497 fold_ignored_result (omitted), t);
3498
3499 return non_lvalue_loc (loc, t);
3500 }
3501
3502 /* Return a tree for the case when the result of an expression is RESULT
3503 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3504 of the expression but are now not needed.
3505
3506 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3507 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3508 evaluated before OMITTED2. Otherwise, if neither has side effects,
3509 just do the conversion of RESULT to TYPE. */
3510
3511 tree
3512 omit_two_operands_loc (location_t loc, tree type, tree result,
3513 tree omitted1, tree omitted2)
3514 {
3515 tree t = fold_convert_loc (loc, type, result);
3516
3517 if (TREE_SIDE_EFFECTS (omitted2))
3518 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3519 if (TREE_SIDE_EFFECTS (omitted1))
3520 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3521
3522 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3523 }
3524
3525 \f
3526 /* Return a simplified tree node for the truth-negation of ARG. This
3527 never alters ARG itself. We assume that ARG is an operation that
3528 returns a truth value (0 or 1).
3529
3530 FIXME: one would think we would fold the result, but it causes
3531 problems with the dominator optimizer. */
3532
3533 static tree
3534 fold_truth_not_expr (location_t loc, tree arg)
3535 {
3536 tree type = TREE_TYPE (arg);
3537 enum tree_code code = TREE_CODE (arg);
3538 location_t loc1, loc2;
3539
3540 /* If this is a comparison, we can simply invert it, except for
3541 floating-point non-equality comparisons, in which case we just
3542 enclose a TRUTH_NOT_EXPR around what we have. */
3543
3544 if (TREE_CODE_CLASS (code) == tcc_comparison)
3545 {
3546 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3547 if (FLOAT_TYPE_P (op_type)
3548 && flag_trapping_math
3549 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3550 && code != NE_EXPR && code != EQ_EXPR)
3551 return NULL_TREE;
3552
3553 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3554 if (code == ERROR_MARK)
3555 return NULL_TREE;
3556
3557 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3558 TREE_OPERAND (arg, 1));
3559 }
3560
3561 switch (code)
3562 {
3563 case INTEGER_CST:
3564 return constant_boolean_node (integer_zerop (arg), type);
3565
3566 case TRUTH_AND_EXPR:
3567 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3568 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3569 return build2_loc (loc, TRUTH_OR_EXPR, type,
3570 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3571 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3572
3573 case TRUTH_OR_EXPR:
3574 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3575 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3576 return build2_loc (loc, TRUTH_AND_EXPR, type,
3577 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3578 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3579
3580 case TRUTH_XOR_EXPR:
3581 /* Here we can invert either operand. We invert the first operand
3582 unless the second operand is a TRUTH_NOT_EXPR in which case our
3583 result is the XOR of the first operand with the inside of the
3584 negation of the second operand. */
3585
3586 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3587 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3588 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3589 else
3590 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3591 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3592 TREE_OPERAND (arg, 1));
3593
3594 case TRUTH_ANDIF_EXPR:
3595 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3596 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3597 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3598 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3599 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3600
3601 case TRUTH_ORIF_EXPR:
3602 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3603 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3604 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3605 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3606 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3607
3608 case TRUTH_NOT_EXPR:
3609 return TREE_OPERAND (arg, 0);
3610
3611 case COND_EXPR:
3612 {
3613 tree arg1 = TREE_OPERAND (arg, 1);
3614 tree arg2 = TREE_OPERAND (arg, 2);
3615
3616 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3617 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3618
3619 /* A COND_EXPR may have a throw as one operand, which
3620 then has void type. Just leave void operands
3621 as they are. */
3622 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3623 VOID_TYPE_P (TREE_TYPE (arg1))
3624 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3625 VOID_TYPE_P (TREE_TYPE (arg2))
3626 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3627 }
3628
3629 case COMPOUND_EXPR:
3630 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3631 return build2_loc (loc, COMPOUND_EXPR, type,
3632 TREE_OPERAND (arg, 0),
3633 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3634
3635 case NON_LVALUE_EXPR:
3636 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3637 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3638
3639 CASE_CONVERT:
3640 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3641 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3642
3643 /* ... fall through ... */
3644
3645 case FLOAT_EXPR:
3646 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3647 return build1_loc (loc, TREE_CODE (arg), type,
3648 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3649
3650 case BIT_AND_EXPR:
3651 if (!integer_onep (TREE_OPERAND (arg, 1)))
3652 return NULL_TREE;
3653 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3654
3655 case SAVE_EXPR:
3656 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3657
3658 case CLEANUP_POINT_EXPR:
3659 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3660 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3661 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3662
3663 default:
3664 return NULL_TREE;
3665 }
3666 }
3667
3668 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3669 assume that ARG is an operation that returns a truth value (0 or 1
3670 for scalars, 0 or -1 for vectors). Return the folded expression if
3671 folding is successful. Otherwise, return NULL_TREE. */
3672
3673 static tree
3674 fold_invert_truthvalue (location_t loc, tree arg)
3675 {
3676 tree type = TREE_TYPE (arg);
3677 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3678 ? BIT_NOT_EXPR
3679 : TRUTH_NOT_EXPR,
3680 type, arg);
3681 }
3682
3683 /* Return a simplified tree node for the truth-negation of ARG. This
3684 never alters ARG itself. We assume that ARG is an operation that
3685 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3686
3687 tree
3688 invert_truthvalue_loc (location_t loc, tree arg)
3689 {
3690 if (TREE_CODE (arg) == ERROR_MARK)
3691 return arg;
3692
3693 tree type = TREE_TYPE (arg);
3694 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3695 ? BIT_NOT_EXPR
3696 : TRUTH_NOT_EXPR,
3697 type, arg);
3698 }
3699
3700 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3701 with code CODE. This optimization is unsafe. */
3702 static tree
3703 distribute_real_division (location_t loc, enum tree_code code, tree type,
3704 tree arg0, tree arg1)
3705 {
3706 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3707 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3708
3709 /* (A / C) +- (B / C) -> (A +- B) / C. */
3710 if (mul0 == mul1
3711 && operand_equal_p (TREE_OPERAND (arg0, 1),
3712 TREE_OPERAND (arg1, 1), 0))
3713 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3714 fold_build2_loc (loc, code, type,
3715 TREE_OPERAND (arg0, 0),
3716 TREE_OPERAND (arg1, 0)),
3717 TREE_OPERAND (arg0, 1));
3718
3719 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3720 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3721 TREE_OPERAND (arg1, 0), 0)
3722 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3723 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3724 {
3725 REAL_VALUE_TYPE r0, r1;
3726 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3727 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3728 if (!mul0)
3729 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3730 if (!mul1)
3731 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3732 real_arithmetic (&r0, code, &r0, &r1);
3733 return fold_build2_loc (loc, MULT_EXPR, type,
3734 TREE_OPERAND (arg0, 0),
3735 build_real (type, r0));
3736 }
3737
3738 return NULL_TREE;
3739 }
3740 \f
3741 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3742 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3743 and uses reverse storage order if REVERSEP is nonzero. */
3744
3745 static tree
3746 make_bit_field_ref (location_t loc, tree inner, tree type,
3747 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3748 int unsignedp, int reversep)
3749 {
3750 tree result, bftype;
3751
3752 if (bitpos == 0 && !reversep)
3753 {
3754 tree size = TYPE_SIZE (TREE_TYPE (inner));
3755 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3756 || POINTER_TYPE_P (TREE_TYPE (inner)))
3757 && tree_fits_shwi_p (size)
3758 && tree_to_shwi (size) == bitsize)
3759 return fold_convert_loc (loc, type, inner);
3760 }
3761
3762 bftype = type;
3763 if (TYPE_PRECISION (bftype) != bitsize
3764 || TYPE_UNSIGNED (bftype) == !unsignedp)
3765 bftype = build_nonstandard_integer_type (bitsize, 0);
3766
3767 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3768 size_int (bitsize), bitsize_int (bitpos));
3769 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3770
3771 if (bftype != type)
3772 result = fold_convert_loc (loc, type, result);
3773
3774 return result;
3775 }
3776
3777 /* Optimize a bit-field compare.
3778
3779 There are two cases: First is a compare against a constant and the
3780 second is a comparison of two items where the fields are at the same
3781 bit position relative to the start of a chunk (byte, halfword, word)
3782 large enough to contain it. In these cases we can avoid the shift
3783 implicit in bitfield extractions.
3784
3785 For constants, we emit a compare of the shifted constant with the
3786 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3787 compared. For two fields at the same position, we do the ANDs with the
3788 similar mask and compare the result of the ANDs.
3789
3790 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3791 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3792 are the left and right operands of the comparison, respectively.
3793
3794 If the optimization described above can be done, we return the resulting
3795 tree. Otherwise we return zero. */
3796
3797 static tree
3798 optimize_bit_field_compare (location_t loc, enum tree_code code,
3799 tree compare_type, tree lhs, tree rhs)
3800 {
3801 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3802 tree type = TREE_TYPE (lhs);
3803 tree unsigned_type;
3804 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3805 machine_mode lmode, rmode, nmode;
3806 int lunsignedp, runsignedp;
3807 int lreversep, rreversep;
3808 int lvolatilep = 0, rvolatilep = 0;
3809 tree linner, rinner = NULL_TREE;
3810 tree mask;
3811 tree offset;
3812
3813 /* Get all the information about the extractions being done. If the bit size
3814 if the same as the size of the underlying object, we aren't doing an
3815 extraction at all and so can do nothing. We also don't want to
3816 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3817 then will no longer be able to replace it. */
3818 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3819 &lunsignedp, &lreversep, &lvolatilep, false);
3820 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3821 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3822 return 0;
3823
3824 if (const_p)
3825 rreversep = lreversep;
3826 else
3827 {
3828 /* If this is not a constant, we can only do something if bit positions,
3829 sizes, signedness and storage order are the same. */
3830 rinner
3831 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3832 &runsignedp, &rreversep, &rvolatilep, false);
3833
3834 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3835 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3836 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3837 return 0;
3838 }
3839
3840 /* See if we can find a mode to refer to this field. We should be able to,
3841 but fail if we can't. */
3842 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3843 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3844 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3845 TYPE_ALIGN (TREE_TYPE (rinner))),
3846 word_mode, false);
3847 if (nmode == VOIDmode)
3848 return 0;
3849
3850 /* Set signed and unsigned types of the precision of this mode for the
3851 shifts below. */
3852 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3853
3854 /* Compute the bit position and size for the new reference and our offset
3855 within it. If the new reference is the same size as the original, we
3856 won't optimize anything, so return zero. */
3857 nbitsize = GET_MODE_BITSIZE (nmode);
3858 nbitpos = lbitpos & ~ (nbitsize - 1);
3859 lbitpos -= nbitpos;
3860 if (nbitsize == lbitsize)
3861 return 0;
3862
3863 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3864 lbitpos = nbitsize - lbitsize - lbitpos;
3865
3866 /* Make the mask to be used against the extracted field. */
3867 mask = build_int_cst_type (unsigned_type, -1);
3868 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3869 mask = const_binop (RSHIFT_EXPR, mask,
3870 size_int (nbitsize - lbitsize - lbitpos));
3871
3872 if (! const_p)
3873 /* If not comparing with constant, just rework the comparison
3874 and return. */
3875 return fold_build2_loc (loc, code, compare_type,
3876 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3877 make_bit_field_ref (loc, linner,
3878 unsigned_type,
3879 nbitsize, nbitpos,
3880 1, lreversep),
3881 mask),
3882 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3883 make_bit_field_ref (loc, rinner,
3884 unsigned_type,
3885 nbitsize, nbitpos,
3886 1, rreversep),
3887 mask));
3888
3889 /* Otherwise, we are handling the constant case. See if the constant is too
3890 big for the field. Warn and return a tree for 0 (false) if so. We do
3891 this not only for its own sake, but to avoid having to test for this
3892 error case below. If we didn't, we might generate wrong code.
3893
3894 For unsigned fields, the constant shifted right by the field length should
3895 be all zero. For signed fields, the high-order bits should agree with
3896 the sign bit. */
3897
3898 if (lunsignedp)
3899 {
3900 if (wi::lrshift (rhs, lbitsize) != 0)
3901 {
3902 warning (0, "comparison is always %d due to width of bit-field",
3903 code == NE_EXPR);
3904 return constant_boolean_node (code == NE_EXPR, compare_type);
3905 }
3906 }
3907 else
3908 {
3909 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3910 if (tem != 0 && tem != -1)
3911 {
3912 warning (0, "comparison is always %d due to width of bit-field",
3913 code == NE_EXPR);
3914 return constant_boolean_node (code == NE_EXPR, compare_type);
3915 }
3916 }
3917
3918 /* Single-bit compares should always be against zero. */
3919 if (lbitsize == 1 && ! integer_zerop (rhs))
3920 {
3921 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3922 rhs = build_int_cst (type, 0);
3923 }
3924
3925 /* Make a new bitfield reference, shift the constant over the
3926 appropriate number of bits and mask it with the computed mask
3927 (in case this was a signed field). If we changed it, make a new one. */
3928 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3929 lreversep);
3930
3931 rhs = const_binop (BIT_AND_EXPR,
3932 const_binop (LSHIFT_EXPR,
3933 fold_convert_loc (loc, unsigned_type, rhs),
3934 size_int (lbitpos)),
3935 mask);
3936
3937 lhs = build2_loc (loc, code, compare_type,
3938 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3939 return lhs;
3940 }
3941 \f
3942 /* Subroutine for fold_truth_andor_1: decode a field reference.
3943
3944 If EXP is a comparison reference, we return the innermost reference.
3945
3946 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3947 set to the starting bit number.
3948
3949 If the innermost field can be completely contained in a mode-sized
3950 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3951
3952 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3953 otherwise it is not changed.
3954
3955 *PUNSIGNEDP is set to the signedness of the field.
3956
3957 *PREVERSEP is set to the storage order of the field.
3958
3959 *PMASK is set to the mask used. This is either contained in a
3960 BIT_AND_EXPR or derived from the width of the field.
3961
3962 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3963
3964 Return 0 if this is not a component reference or is one that we can't
3965 do anything with. */
3966
3967 static tree
3968 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3969 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3970 int *punsignedp, int *preversep, int *pvolatilep,
3971 tree *pmask, tree *pand_mask)
3972 {
3973 tree outer_type = 0;
3974 tree and_mask = 0;
3975 tree mask, inner, offset;
3976 tree unsigned_type;
3977 unsigned int precision;
3978
3979 /* All the optimizations using this function assume integer fields.
3980 There are problems with FP fields since the type_for_size call
3981 below can fail for, e.g., XFmode. */
3982 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3983 return 0;
3984
3985 /* We are interested in the bare arrangement of bits, so strip everything
3986 that doesn't affect the machine mode. However, record the type of the
3987 outermost expression if it may matter below. */
3988 if (CONVERT_EXPR_P (exp)
3989 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3990 outer_type = TREE_TYPE (exp);
3991 STRIP_NOPS (exp);
3992
3993 if (TREE_CODE (exp) == BIT_AND_EXPR)
3994 {
3995 and_mask = TREE_OPERAND (exp, 1);
3996 exp = TREE_OPERAND (exp, 0);
3997 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3998 if (TREE_CODE (and_mask) != INTEGER_CST)
3999 return 0;
4000 }
4001
4002 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4003 punsignedp, preversep, pvolatilep, false);
4004 if ((inner == exp && and_mask == 0)
4005 || *pbitsize < 0 || offset != 0
4006 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4007 return 0;
4008
4009 /* If the number of bits in the reference is the same as the bitsize of
4010 the outer type, then the outer type gives the signedness. Otherwise
4011 (in case of a small bitfield) the signedness is unchanged. */
4012 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4013 *punsignedp = TYPE_UNSIGNED (outer_type);
4014
4015 /* Compute the mask to access the bitfield. */
4016 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4017 precision = TYPE_PRECISION (unsigned_type);
4018
4019 mask = build_int_cst_type (unsigned_type, -1);
4020
4021 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4022 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4023
4024 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4025 if (and_mask != 0)
4026 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4027 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4028
4029 *pmask = mask;
4030 *pand_mask = and_mask;
4031 return inner;
4032 }
4033
4034 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4035 bit positions and MASK is SIGNED. */
4036
4037 static int
4038 all_ones_mask_p (const_tree mask, unsigned int size)
4039 {
4040 tree type = TREE_TYPE (mask);
4041 unsigned int precision = TYPE_PRECISION (type);
4042
4043 /* If this function returns true when the type of the mask is
4044 UNSIGNED, then there will be errors. In particular see
4045 gcc.c-torture/execute/990326-1.c. There does not appear to be
4046 any documentation paper trail as to why this is so. But the pre
4047 wide-int worked with that restriction and it has been preserved
4048 here. */
4049 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4050 return false;
4051
4052 return wi::mask (size, false, precision) == mask;
4053 }
4054
4055 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4056 represents the sign bit of EXP's type. If EXP represents a sign
4057 or zero extension, also test VAL against the unextended type.
4058 The return value is the (sub)expression whose sign bit is VAL,
4059 or NULL_TREE otherwise. */
4060
4061 tree
4062 sign_bit_p (tree exp, const_tree val)
4063 {
4064 int width;
4065 tree t;
4066
4067 /* Tree EXP must have an integral type. */
4068 t = TREE_TYPE (exp);
4069 if (! INTEGRAL_TYPE_P (t))
4070 return NULL_TREE;
4071
4072 /* Tree VAL must be an integer constant. */
4073 if (TREE_CODE (val) != INTEGER_CST
4074 || TREE_OVERFLOW (val))
4075 return NULL_TREE;
4076
4077 width = TYPE_PRECISION (t);
4078 if (wi::only_sign_bit_p (val, width))
4079 return exp;
4080
4081 /* Handle extension from a narrower type. */
4082 if (TREE_CODE (exp) == NOP_EXPR
4083 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4084 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4085
4086 return NULL_TREE;
4087 }
4088
4089 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4090 to be evaluated unconditionally. */
4091
4092 static int
4093 simple_operand_p (const_tree exp)
4094 {
4095 /* Strip any conversions that don't change the machine mode. */
4096 STRIP_NOPS (exp);
4097
4098 return (CONSTANT_CLASS_P (exp)
4099 || TREE_CODE (exp) == SSA_NAME
4100 || (DECL_P (exp)
4101 && ! TREE_ADDRESSABLE (exp)
4102 && ! TREE_THIS_VOLATILE (exp)
4103 && ! DECL_NONLOCAL (exp)
4104 /* Don't regard global variables as simple. They may be
4105 allocated in ways unknown to the compiler (shared memory,
4106 #pragma weak, etc). */
4107 && ! TREE_PUBLIC (exp)
4108 && ! DECL_EXTERNAL (exp)
4109 /* Weakrefs are not safe to be read, since they can be NULL.
4110 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4111 have DECL_WEAK flag set. */
4112 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4113 /* Loading a static variable is unduly expensive, but global
4114 registers aren't expensive. */
4115 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4116 }
4117
4118 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4119 to be evaluated unconditionally.
4120 I addition to simple_operand_p, we assume that comparisons, conversions,
4121 and logic-not operations are simple, if their operands are simple, too. */
4122
4123 static bool
4124 simple_operand_p_2 (tree exp)
4125 {
4126 enum tree_code code;
4127
4128 if (TREE_SIDE_EFFECTS (exp)
4129 || tree_could_trap_p (exp))
4130 return false;
4131
4132 while (CONVERT_EXPR_P (exp))
4133 exp = TREE_OPERAND (exp, 0);
4134
4135 code = TREE_CODE (exp);
4136
4137 if (TREE_CODE_CLASS (code) == tcc_comparison)
4138 return (simple_operand_p (TREE_OPERAND (exp, 0))
4139 && simple_operand_p (TREE_OPERAND (exp, 1)));
4140
4141 if (code == TRUTH_NOT_EXPR)
4142 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4143
4144 return simple_operand_p (exp);
4145 }
4146
4147 \f
4148 /* The following functions are subroutines to fold_range_test and allow it to
4149 try to change a logical combination of comparisons into a range test.
4150
4151 For example, both
4152 X == 2 || X == 3 || X == 4 || X == 5
4153 and
4154 X >= 2 && X <= 5
4155 are converted to
4156 (unsigned) (X - 2) <= 3
4157
4158 We describe each set of comparisons as being either inside or outside
4159 a range, using a variable named like IN_P, and then describe the
4160 range with a lower and upper bound. If one of the bounds is omitted,
4161 it represents either the highest or lowest value of the type.
4162
4163 In the comments below, we represent a range by two numbers in brackets
4164 preceded by a "+" to designate being inside that range, or a "-" to
4165 designate being outside that range, so the condition can be inverted by
4166 flipping the prefix. An omitted bound is represented by a "-". For
4167 example, "- [-, 10]" means being outside the range starting at the lowest
4168 possible value and ending at 10, in other words, being greater than 10.
4169 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4170 always false.
4171
4172 We set up things so that the missing bounds are handled in a consistent
4173 manner so neither a missing bound nor "true" and "false" need to be
4174 handled using a special case. */
4175
4176 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4177 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4178 and UPPER1_P are nonzero if the respective argument is an upper bound
4179 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4180 must be specified for a comparison. ARG1 will be converted to ARG0's
4181 type if both are specified. */
4182
4183 static tree
4184 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4185 tree arg1, int upper1_p)
4186 {
4187 tree tem;
4188 int result;
4189 int sgn0, sgn1;
4190
4191 /* If neither arg represents infinity, do the normal operation.
4192 Else, if not a comparison, return infinity. Else handle the special
4193 comparison rules. Note that most of the cases below won't occur, but
4194 are handled for consistency. */
4195
4196 if (arg0 != 0 && arg1 != 0)
4197 {
4198 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4199 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4200 STRIP_NOPS (tem);
4201 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4202 }
4203
4204 if (TREE_CODE_CLASS (code) != tcc_comparison)
4205 return 0;
4206
4207 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4208 for neither. In real maths, we cannot assume open ended ranges are
4209 the same. But, this is computer arithmetic, where numbers are finite.
4210 We can therefore make the transformation of any unbounded range with
4211 the value Z, Z being greater than any representable number. This permits
4212 us to treat unbounded ranges as equal. */
4213 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4214 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4215 switch (code)
4216 {
4217 case EQ_EXPR:
4218 result = sgn0 == sgn1;
4219 break;
4220 case NE_EXPR:
4221 result = sgn0 != sgn1;
4222 break;
4223 case LT_EXPR:
4224 result = sgn0 < sgn1;
4225 break;
4226 case LE_EXPR:
4227 result = sgn0 <= sgn1;
4228 break;
4229 case GT_EXPR:
4230 result = sgn0 > sgn1;
4231 break;
4232 case GE_EXPR:
4233 result = sgn0 >= sgn1;
4234 break;
4235 default:
4236 gcc_unreachable ();
4237 }
4238
4239 return constant_boolean_node (result, type);
4240 }
4241 \f
4242 /* Helper routine for make_range. Perform one step for it, return
4243 new expression if the loop should continue or NULL_TREE if it should
4244 stop. */
4245
4246 tree
4247 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4248 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4249 bool *strict_overflow_p)
4250 {
4251 tree arg0_type = TREE_TYPE (arg0);
4252 tree n_low, n_high, low = *p_low, high = *p_high;
4253 int in_p = *p_in_p, n_in_p;
4254
4255 switch (code)
4256 {
4257 case TRUTH_NOT_EXPR:
4258 /* We can only do something if the range is testing for zero. */
4259 if (low == NULL_TREE || high == NULL_TREE
4260 || ! integer_zerop (low) || ! integer_zerop (high))
4261 return NULL_TREE;
4262 *p_in_p = ! in_p;
4263 return arg0;
4264
4265 case EQ_EXPR: case NE_EXPR:
4266 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4267 /* We can only do something if the range is testing for zero
4268 and if the second operand is an integer constant. Note that
4269 saying something is "in" the range we make is done by
4270 complementing IN_P since it will set in the initial case of
4271 being not equal to zero; "out" is leaving it alone. */
4272 if (low == NULL_TREE || high == NULL_TREE
4273 || ! integer_zerop (low) || ! integer_zerop (high)
4274 || TREE_CODE (arg1) != INTEGER_CST)
4275 return NULL_TREE;
4276
4277 switch (code)
4278 {
4279 case NE_EXPR: /* - [c, c] */
4280 low = high = arg1;
4281 break;
4282 case EQ_EXPR: /* + [c, c] */
4283 in_p = ! in_p, low = high = arg1;
4284 break;
4285 case GT_EXPR: /* - [-, c] */
4286 low = 0, high = arg1;
4287 break;
4288 case GE_EXPR: /* + [c, -] */
4289 in_p = ! in_p, low = arg1, high = 0;
4290 break;
4291 case LT_EXPR: /* - [c, -] */
4292 low = arg1, high = 0;
4293 break;
4294 case LE_EXPR: /* + [-, c] */
4295 in_p = ! in_p, low = 0, high = arg1;
4296 break;
4297 default:
4298 gcc_unreachable ();
4299 }
4300
4301 /* If this is an unsigned comparison, we also know that EXP is
4302 greater than or equal to zero. We base the range tests we make
4303 on that fact, so we record it here so we can parse existing
4304 range tests. We test arg0_type since often the return type
4305 of, e.g. EQ_EXPR, is boolean. */
4306 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4307 {
4308 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4309 in_p, low, high, 1,
4310 build_int_cst (arg0_type, 0),
4311 NULL_TREE))
4312 return NULL_TREE;
4313
4314 in_p = n_in_p, low = n_low, high = n_high;
4315
4316 /* If the high bound is missing, but we have a nonzero low
4317 bound, reverse the range so it goes from zero to the low bound
4318 minus 1. */
4319 if (high == 0 && low && ! integer_zerop (low))
4320 {
4321 in_p = ! in_p;
4322 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4323 build_int_cst (TREE_TYPE (low), 1), 0);
4324 low = build_int_cst (arg0_type, 0);
4325 }
4326 }
4327
4328 *p_low = low;
4329 *p_high = high;
4330 *p_in_p = in_p;
4331 return arg0;
4332
4333 case NEGATE_EXPR:
4334 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4335 low and high are non-NULL, then normalize will DTRT. */
4336 if (!TYPE_UNSIGNED (arg0_type)
4337 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4338 {
4339 if (low == NULL_TREE)
4340 low = TYPE_MIN_VALUE (arg0_type);
4341 if (high == NULL_TREE)
4342 high = TYPE_MAX_VALUE (arg0_type);
4343 }
4344
4345 /* (-x) IN [a,b] -> x in [-b, -a] */
4346 n_low = range_binop (MINUS_EXPR, exp_type,
4347 build_int_cst (exp_type, 0),
4348 0, high, 1);
4349 n_high = range_binop (MINUS_EXPR, exp_type,
4350 build_int_cst (exp_type, 0),
4351 0, low, 0);
4352 if (n_high != 0 && TREE_OVERFLOW (n_high))
4353 return NULL_TREE;
4354 goto normalize;
4355
4356 case BIT_NOT_EXPR:
4357 /* ~ X -> -X - 1 */
4358 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4359 build_int_cst (exp_type, 1));
4360
4361 case PLUS_EXPR:
4362 case MINUS_EXPR:
4363 if (TREE_CODE (arg1) != INTEGER_CST)
4364 return NULL_TREE;
4365
4366 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4367 move a constant to the other side. */
4368 if (!TYPE_UNSIGNED (arg0_type)
4369 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4370 return NULL_TREE;
4371
4372 /* If EXP is signed, any overflow in the computation is undefined,
4373 so we don't worry about it so long as our computations on
4374 the bounds don't overflow. For unsigned, overflow is defined
4375 and this is exactly the right thing. */
4376 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4377 arg0_type, low, 0, arg1, 0);
4378 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4379 arg0_type, high, 1, arg1, 0);
4380 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4381 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4382 return NULL_TREE;
4383
4384 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4385 *strict_overflow_p = true;
4386
4387 normalize:
4388 /* Check for an unsigned range which has wrapped around the maximum
4389 value thus making n_high < n_low, and normalize it. */
4390 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4391 {
4392 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4393 build_int_cst (TREE_TYPE (n_high), 1), 0);
4394 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4395 build_int_cst (TREE_TYPE (n_low), 1), 0);
4396
4397 /* If the range is of the form +/- [ x+1, x ], we won't
4398 be able to normalize it. But then, it represents the
4399 whole range or the empty set, so make it
4400 +/- [ -, - ]. */
4401 if (tree_int_cst_equal (n_low, low)
4402 && tree_int_cst_equal (n_high, high))
4403 low = high = 0;
4404 else
4405 in_p = ! in_p;
4406 }
4407 else
4408 low = n_low, high = n_high;
4409
4410 *p_low = low;
4411 *p_high = high;
4412 *p_in_p = in_p;
4413 return arg0;
4414
4415 CASE_CONVERT:
4416 case NON_LVALUE_EXPR:
4417 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4418 return NULL_TREE;
4419
4420 if (! INTEGRAL_TYPE_P (arg0_type)
4421 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4422 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4423 return NULL_TREE;
4424
4425 n_low = low, n_high = high;
4426
4427 if (n_low != 0)
4428 n_low = fold_convert_loc (loc, arg0_type, n_low);
4429
4430 if (n_high != 0)
4431 n_high = fold_convert_loc (loc, arg0_type, n_high);
4432
4433 /* If we're converting arg0 from an unsigned type, to exp,
4434 a signed type, we will be doing the comparison as unsigned.
4435 The tests above have already verified that LOW and HIGH
4436 are both positive.
4437
4438 So we have to ensure that we will handle large unsigned
4439 values the same way that the current signed bounds treat
4440 negative values. */
4441
4442 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4443 {
4444 tree high_positive;
4445 tree equiv_type;
4446 /* For fixed-point modes, we need to pass the saturating flag
4447 as the 2nd parameter. */
4448 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4449 equiv_type
4450 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4451 TYPE_SATURATING (arg0_type));
4452 else
4453 equiv_type
4454 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4455
4456 /* A range without an upper bound is, naturally, unbounded.
4457 Since convert would have cropped a very large value, use
4458 the max value for the destination type. */
4459 high_positive
4460 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4461 : TYPE_MAX_VALUE (arg0_type);
4462
4463 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4464 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4465 fold_convert_loc (loc, arg0_type,
4466 high_positive),
4467 build_int_cst (arg0_type, 1));
4468
4469 /* If the low bound is specified, "and" the range with the
4470 range for which the original unsigned value will be
4471 positive. */
4472 if (low != 0)
4473 {
4474 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4475 1, fold_convert_loc (loc, arg0_type,
4476 integer_zero_node),
4477 high_positive))
4478 return NULL_TREE;
4479
4480 in_p = (n_in_p == in_p);
4481 }
4482 else
4483 {
4484 /* Otherwise, "or" the range with the range of the input
4485 that will be interpreted as negative. */
4486 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4487 1, fold_convert_loc (loc, arg0_type,
4488 integer_zero_node),
4489 high_positive))
4490 return NULL_TREE;
4491
4492 in_p = (in_p != n_in_p);
4493 }
4494 }
4495
4496 *p_low = n_low;
4497 *p_high = n_high;
4498 *p_in_p = in_p;
4499 return arg0;
4500
4501 default:
4502 return NULL_TREE;
4503 }
4504 }
4505
4506 /* Given EXP, a logical expression, set the range it is testing into
4507 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4508 actually being tested. *PLOW and *PHIGH will be made of the same
4509 type as the returned expression. If EXP is not a comparison, we
4510 will most likely not be returning a useful value and range. Set
4511 *STRICT_OVERFLOW_P to true if the return value is only valid
4512 because signed overflow is undefined; otherwise, do not change
4513 *STRICT_OVERFLOW_P. */
4514
4515 tree
4516 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4517 bool *strict_overflow_p)
4518 {
4519 enum tree_code code;
4520 tree arg0, arg1 = NULL_TREE;
4521 tree exp_type, nexp;
4522 int in_p;
4523 tree low, high;
4524 location_t loc = EXPR_LOCATION (exp);
4525
4526 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4527 and see if we can refine the range. Some of the cases below may not
4528 happen, but it doesn't seem worth worrying about this. We "continue"
4529 the outer loop when we've changed something; otherwise we "break"
4530 the switch, which will "break" the while. */
4531
4532 in_p = 0;
4533 low = high = build_int_cst (TREE_TYPE (exp), 0);
4534
4535 while (1)
4536 {
4537 code = TREE_CODE (exp);
4538 exp_type = TREE_TYPE (exp);
4539 arg0 = NULL_TREE;
4540
4541 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4542 {
4543 if (TREE_OPERAND_LENGTH (exp) > 0)
4544 arg0 = TREE_OPERAND (exp, 0);
4545 if (TREE_CODE_CLASS (code) == tcc_binary
4546 || TREE_CODE_CLASS (code) == tcc_comparison
4547 || (TREE_CODE_CLASS (code) == tcc_expression
4548 && TREE_OPERAND_LENGTH (exp) > 1))
4549 arg1 = TREE_OPERAND (exp, 1);
4550 }
4551 if (arg0 == NULL_TREE)
4552 break;
4553
4554 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4555 &high, &in_p, strict_overflow_p);
4556 if (nexp == NULL_TREE)
4557 break;
4558 exp = nexp;
4559 }
4560
4561 /* If EXP is a constant, we can evaluate whether this is true or false. */
4562 if (TREE_CODE (exp) == INTEGER_CST)
4563 {
4564 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4565 exp, 0, low, 0))
4566 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4567 exp, 1, high, 1)));
4568 low = high = 0;
4569 exp = 0;
4570 }
4571
4572 *pin_p = in_p, *plow = low, *phigh = high;
4573 return exp;
4574 }
4575 \f
4576 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4577 type, TYPE, return an expression to test if EXP is in (or out of, depending
4578 on IN_P) the range. Return 0 if the test couldn't be created. */
4579
4580 tree
4581 build_range_check (location_t loc, tree type, tree exp, int in_p,
4582 tree low, tree high)
4583 {
4584 tree etype = TREE_TYPE (exp), value;
4585
4586 /* Disable this optimization for function pointer expressions
4587 on targets that require function pointer canonicalization. */
4588 if (targetm.have_canonicalize_funcptr_for_compare ()
4589 && TREE_CODE (etype) == POINTER_TYPE
4590 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4591 return NULL_TREE;
4592
4593 if (! in_p)
4594 {
4595 value = build_range_check (loc, type, exp, 1, low, high);
4596 if (value != 0)
4597 return invert_truthvalue_loc (loc, value);
4598
4599 return 0;
4600 }
4601
4602 if (low == 0 && high == 0)
4603 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4604
4605 if (low == 0)
4606 return fold_build2_loc (loc, LE_EXPR, type, exp,
4607 fold_convert_loc (loc, etype, high));
4608
4609 if (high == 0)
4610 return fold_build2_loc (loc, GE_EXPR, type, exp,
4611 fold_convert_loc (loc, etype, low));
4612
4613 if (operand_equal_p (low, high, 0))
4614 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4615 fold_convert_loc (loc, etype, low));
4616
4617 if (integer_zerop (low))
4618 {
4619 if (! TYPE_UNSIGNED (etype))
4620 {
4621 etype = unsigned_type_for (etype);
4622 high = fold_convert_loc (loc, etype, high);
4623 exp = fold_convert_loc (loc, etype, exp);
4624 }
4625 return build_range_check (loc, type, exp, 1, 0, high);
4626 }
4627
4628 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4629 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4630 {
4631 int prec = TYPE_PRECISION (etype);
4632
4633 if (wi::mask (prec - 1, false, prec) == high)
4634 {
4635 if (TYPE_UNSIGNED (etype))
4636 {
4637 tree signed_etype = signed_type_for (etype);
4638 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4639 etype
4640 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4641 else
4642 etype = signed_etype;
4643 exp = fold_convert_loc (loc, etype, exp);
4644 }
4645 return fold_build2_loc (loc, GT_EXPR, type, exp,
4646 build_int_cst (etype, 0));
4647 }
4648 }
4649
4650 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4651 This requires wrap-around arithmetics for the type of the expression.
4652 First make sure that arithmetics in this type is valid, then make sure
4653 that it wraps around. */
4654 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4655 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4656 TYPE_UNSIGNED (etype));
4657
4658 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4659 {
4660 tree utype, minv, maxv;
4661
4662 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4663 for the type in question, as we rely on this here. */
4664 utype = unsigned_type_for (etype);
4665 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4666 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4667 build_int_cst (TREE_TYPE (maxv), 1), 1);
4668 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4669
4670 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4671 minv, 1, maxv, 1)))
4672 etype = utype;
4673 else
4674 return 0;
4675 }
4676
4677 high = fold_convert_loc (loc, etype, high);
4678 low = fold_convert_loc (loc, etype, low);
4679 exp = fold_convert_loc (loc, etype, exp);
4680
4681 value = const_binop (MINUS_EXPR, high, low);
4682
4683
4684 if (POINTER_TYPE_P (etype))
4685 {
4686 if (value != 0 && !TREE_OVERFLOW (value))
4687 {
4688 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4689 return build_range_check (loc, type,
4690 fold_build_pointer_plus_loc (loc, exp, low),
4691 1, build_int_cst (etype, 0), value);
4692 }
4693 return 0;
4694 }
4695
4696 if (value != 0 && !TREE_OVERFLOW (value))
4697 return build_range_check (loc, type,
4698 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4699 1, build_int_cst (etype, 0), value);
4700
4701 return 0;
4702 }
4703 \f
4704 /* Return the predecessor of VAL in its type, handling the infinite case. */
4705
4706 static tree
4707 range_predecessor (tree val)
4708 {
4709 tree type = TREE_TYPE (val);
4710
4711 if (INTEGRAL_TYPE_P (type)
4712 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4713 return 0;
4714 else
4715 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4716 build_int_cst (TREE_TYPE (val), 1), 0);
4717 }
4718
4719 /* Return the successor of VAL in its type, handling the infinite case. */
4720
4721 static tree
4722 range_successor (tree val)
4723 {
4724 tree type = TREE_TYPE (val);
4725
4726 if (INTEGRAL_TYPE_P (type)
4727 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4728 return 0;
4729 else
4730 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4731 build_int_cst (TREE_TYPE (val), 1), 0);
4732 }
4733
4734 /* Given two ranges, see if we can merge them into one. Return 1 if we
4735 can, 0 if we can't. Set the output range into the specified parameters. */
4736
4737 bool
4738 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4739 tree high0, int in1_p, tree low1, tree high1)
4740 {
4741 int no_overlap;
4742 int subset;
4743 int temp;
4744 tree tem;
4745 int in_p;
4746 tree low, high;
4747 int lowequal = ((low0 == 0 && low1 == 0)
4748 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4749 low0, 0, low1, 0)));
4750 int highequal = ((high0 == 0 && high1 == 0)
4751 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4752 high0, 1, high1, 1)));
4753
4754 /* Make range 0 be the range that starts first, or ends last if they
4755 start at the same value. Swap them if it isn't. */
4756 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4757 low0, 0, low1, 0))
4758 || (lowequal
4759 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4760 high1, 1, high0, 1))))
4761 {
4762 temp = in0_p, in0_p = in1_p, in1_p = temp;
4763 tem = low0, low0 = low1, low1 = tem;
4764 tem = high0, high0 = high1, high1 = tem;
4765 }
4766
4767 /* Now flag two cases, whether the ranges are disjoint or whether the
4768 second range is totally subsumed in the first. Note that the tests
4769 below are simplified by the ones above. */
4770 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4771 high0, 1, low1, 0));
4772 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4773 high1, 1, high0, 1));
4774
4775 /* We now have four cases, depending on whether we are including or
4776 excluding the two ranges. */
4777 if (in0_p && in1_p)
4778 {
4779 /* If they don't overlap, the result is false. If the second range
4780 is a subset it is the result. Otherwise, the range is from the start
4781 of the second to the end of the first. */
4782 if (no_overlap)
4783 in_p = 0, low = high = 0;
4784 else if (subset)
4785 in_p = 1, low = low1, high = high1;
4786 else
4787 in_p = 1, low = low1, high = high0;
4788 }
4789
4790 else if (in0_p && ! in1_p)
4791 {
4792 /* If they don't overlap, the result is the first range. If they are
4793 equal, the result is false. If the second range is a subset of the
4794 first, and the ranges begin at the same place, we go from just after
4795 the end of the second range to the end of the first. If the second
4796 range is not a subset of the first, or if it is a subset and both
4797 ranges end at the same place, the range starts at the start of the
4798 first range and ends just before the second range.
4799 Otherwise, we can't describe this as a single range. */
4800 if (no_overlap)
4801 in_p = 1, low = low0, high = high0;
4802 else if (lowequal && highequal)
4803 in_p = 0, low = high = 0;
4804 else if (subset && lowequal)
4805 {
4806 low = range_successor (high1);
4807 high = high0;
4808 in_p = 1;
4809 if (low == 0)
4810 {
4811 /* We are in the weird situation where high0 > high1 but
4812 high1 has no successor. Punt. */
4813 return 0;
4814 }
4815 }
4816 else if (! subset || highequal)
4817 {
4818 low = low0;
4819 high = range_predecessor (low1);
4820 in_p = 1;
4821 if (high == 0)
4822 {
4823 /* low0 < low1 but low1 has no predecessor. Punt. */
4824 return 0;
4825 }
4826 }
4827 else
4828 return 0;
4829 }
4830
4831 else if (! in0_p && in1_p)
4832 {
4833 /* If they don't overlap, the result is the second range. If the second
4834 is a subset of the first, the result is false. Otherwise,
4835 the range starts just after the first range and ends at the
4836 end of the second. */
4837 if (no_overlap)
4838 in_p = 1, low = low1, high = high1;
4839 else if (subset || highequal)
4840 in_p = 0, low = high = 0;
4841 else
4842 {
4843 low = range_successor (high0);
4844 high = high1;
4845 in_p = 1;
4846 if (low == 0)
4847 {
4848 /* high1 > high0 but high0 has no successor. Punt. */
4849 return 0;
4850 }
4851 }
4852 }
4853
4854 else
4855 {
4856 /* The case where we are excluding both ranges. Here the complex case
4857 is if they don't overlap. In that case, the only time we have a
4858 range is if they are adjacent. If the second is a subset of the
4859 first, the result is the first. Otherwise, the range to exclude
4860 starts at the beginning of the first range and ends at the end of the
4861 second. */
4862 if (no_overlap)
4863 {
4864 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4865 range_successor (high0),
4866 1, low1, 0)))
4867 in_p = 0, low = low0, high = high1;
4868 else
4869 {
4870 /* Canonicalize - [min, x] into - [-, x]. */
4871 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4872 switch (TREE_CODE (TREE_TYPE (low0)))
4873 {
4874 case ENUMERAL_TYPE:
4875 if (TYPE_PRECISION (TREE_TYPE (low0))
4876 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4877 break;
4878 /* FALLTHROUGH */
4879 case INTEGER_TYPE:
4880 if (tree_int_cst_equal (low0,
4881 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4882 low0 = 0;
4883 break;
4884 case POINTER_TYPE:
4885 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4886 && integer_zerop (low0))
4887 low0 = 0;
4888 break;
4889 default:
4890 break;
4891 }
4892
4893 /* Canonicalize - [x, max] into - [x, -]. */
4894 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4895 switch (TREE_CODE (TREE_TYPE (high1)))
4896 {
4897 case ENUMERAL_TYPE:
4898 if (TYPE_PRECISION (TREE_TYPE (high1))
4899 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4900 break;
4901 /* FALLTHROUGH */
4902 case INTEGER_TYPE:
4903 if (tree_int_cst_equal (high1,
4904 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4905 high1 = 0;
4906 break;
4907 case POINTER_TYPE:
4908 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4909 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4910 high1, 1,
4911 build_int_cst (TREE_TYPE (high1), 1),
4912 1)))
4913 high1 = 0;
4914 break;
4915 default:
4916 break;
4917 }
4918
4919 /* The ranges might be also adjacent between the maximum and
4920 minimum values of the given type. For
4921 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4922 return + [x + 1, y - 1]. */
4923 if (low0 == 0 && high1 == 0)
4924 {
4925 low = range_successor (high0);
4926 high = range_predecessor (low1);
4927 if (low == 0 || high == 0)
4928 return 0;
4929
4930 in_p = 1;
4931 }
4932 else
4933 return 0;
4934 }
4935 }
4936 else if (subset)
4937 in_p = 0, low = low0, high = high0;
4938 else
4939 in_p = 0, low = low0, high = high1;
4940 }
4941
4942 *pin_p = in_p, *plow = low, *phigh = high;
4943 return 1;
4944 }
4945 \f
4946
4947 /* Subroutine of fold, looking inside expressions of the form
4948 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4949 of the COND_EXPR. This function is being used also to optimize
4950 A op B ? C : A, by reversing the comparison first.
4951
4952 Return a folded expression whose code is not a COND_EXPR
4953 anymore, or NULL_TREE if no folding opportunity is found. */
4954
4955 static tree
4956 fold_cond_expr_with_comparison (location_t loc, tree type,
4957 tree arg0, tree arg1, tree arg2)
4958 {
4959 enum tree_code comp_code = TREE_CODE (arg0);
4960 tree arg00 = TREE_OPERAND (arg0, 0);
4961 tree arg01 = TREE_OPERAND (arg0, 1);
4962 tree arg1_type = TREE_TYPE (arg1);
4963 tree tem;
4964
4965 STRIP_NOPS (arg1);
4966 STRIP_NOPS (arg2);
4967
4968 /* If we have A op 0 ? A : -A, consider applying the following
4969 transformations:
4970
4971 A == 0? A : -A same as -A
4972 A != 0? A : -A same as A
4973 A >= 0? A : -A same as abs (A)
4974 A > 0? A : -A same as abs (A)
4975 A <= 0? A : -A same as -abs (A)
4976 A < 0? A : -A same as -abs (A)
4977
4978 None of these transformations work for modes with signed
4979 zeros. If A is +/-0, the first two transformations will
4980 change the sign of the result (from +0 to -0, or vice
4981 versa). The last four will fix the sign of the result,
4982 even though the original expressions could be positive or
4983 negative, depending on the sign of A.
4984
4985 Note that all these transformations are correct if A is
4986 NaN, since the two alternatives (A and -A) are also NaNs. */
4987 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4988 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4989 ? real_zerop (arg01)
4990 : integer_zerop (arg01))
4991 && ((TREE_CODE (arg2) == NEGATE_EXPR
4992 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4993 /* In the case that A is of the form X-Y, '-A' (arg2) may
4994 have already been folded to Y-X, check for that. */
4995 || (TREE_CODE (arg1) == MINUS_EXPR
4996 && TREE_CODE (arg2) == MINUS_EXPR
4997 && operand_equal_p (TREE_OPERAND (arg1, 0),
4998 TREE_OPERAND (arg2, 1), 0)
4999 && operand_equal_p (TREE_OPERAND (arg1, 1),
5000 TREE_OPERAND (arg2, 0), 0))))
5001 switch (comp_code)
5002 {
5003 case EQ_EXPR:
5004 case UNEQ_EXPR:
5005 tem = fold_convert_loc (loc, arg1_type, arg1);
5006 return pedantic_non_lvalue_loc (loc,
5007 fold_convert_loc (loc, type,
5008 negate_expr (tem)));
5009 case NE_EXPR:
5010 case LTGT_EXPR:
5011 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5012 case UNGE_EXPR:
5013 case UNGT_EXPR:
5014 if (flag_trapping_math)
5015 break;
5016 /* Fall through. */
5017 case GE_EXPR:
5018 case GT_EXPR:
5019 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5020 break;
5021 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5022 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5023 case UNLE_EXPR:
5024 case UNLT_EXPR:
5025 if (flag_trapping_math)
5026 break;
5027 case LE_EXPR:
5028 case LT_EXPR:
5029 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5030 break;
5031 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5032 return negate_expr (fold_convert_loc (loc, type, tem));
5033 default:
5034 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5035 break;
5036 }
5037
5038 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5039 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5040 both transformations are correct when A is NaN: A != 0
5041 is then true, and A == 0 is false. */
5042
5043 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5044 && integer_zerop (arg01) && integer_zerop (arg2))
5045 {
5046 if (comp_code == NE_EXPR)
5047 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5048 else if (comp_code == EQ_EXPR)
5049 return build_zero_cst (type);
5050 }
5051
5052 /* Try some transformations of A op B ? A : B.
5053
5054 A == B? A : B same as B
5055 A != B? A : B same as A
5056 A >= B? A : B same as max (A, B)
5057 A > B? A : B same as max (B, A)
5058 A <= B? A : B same as min (A, B)
5059 A < B? A : B same as min (B, A)
5060
5061 As above, these transformations don't work in the presence
5062 of signed zeros. For example, if A and B are zeros of
5063 opposite sign, the first two transformations will change
5064 the sign of the result. In the last four, the original
5065 expressions give different results for (A=+0, B=-0) and
5066 (A=-0, B=+0), but the transformed expressions do not.
5067
5068 The first two transformations are correct if either A or B
5069 is a NaN. In the first transformation, the condition will
5070 be false, and B will indeed be chosen. In the case of the
5071 second transformation, the condition A != B will be true,
5072 and A will be chosen.
5073
5074 The conversions to max() and min() are not correct if B is
5075 a number and A is not. The conditions in the original
5076 expressions will be false, so all four give B. The min()
5077 and max() versions would give a NaN instead. */
5078 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5079 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5080 /* Avoid these transformations if the COND_EXPR may be used
5081 as an lvalue in the C++ front-end. PR c++/19199. */
5082 && (in_gimple_form
5083 || VECTOR_TYPE_P (type)
5084 || (! lang_GNU_CXX ()
5085 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5086 || ! maybe_lvalue_p (arg1)
5087 || ! maybe_lvalue_p (arg2)))
5088 {
5089 tree comp_op0 = arg00;
5090 tree comp_op1 = arg01;
5091 tree comp_type = TREE_TYPE (comp_op0);
5092
5093 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5094 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5095 {
5096 comp_type = type;
5097 comp_op0 = arg1;
5098 comp_op1 = arg2;
5099 }
5100
5101 switch (comp_code)
5102 {
5103 case EQ_EXPR:
5104 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5105 case NE_EXPR:
5106 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5107 case LE_EXPR:
5108 case LT_EXPR:
5109 case UNLE_EXPR:
5110 case UNLT_EXPR:
5111 /* In C++ a ?: expression can be an lvalue, so put the
5112 operand which will be used if they are equal first
5113 so that we can convert this back to the
5114 corresponding COND_EXPR. */
5115 if (!HONOR_NANS (arg1))
5116 {
5117 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5118 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5119 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5120 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5121 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5122 comp_op1, comp_op0);
5123 return pedantic_non_lvalue_loc (loc,
5124 fold_convert_loc (loc, type, tem));
5125 }
5126 break;
5127 case GE_EXPR:
5128 case GT_EXPR:
5129 case UNGE_EXPR:
5130 case UNGT_EXPR:
5131 if (!HONOR_NANS (arg1))
5132 {
5133 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5134 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5135 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5136 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5137 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5138 comp_op1, comp_op0);
5139 return pedantic_non_lvalue_loc (loc,
5140 fold_convert_loc (loc, type, tem));
5141 }
5142 break;
5143 case UNEQ_EXPR:
5144 if (!HONOR_NANS (arg1))
5145 return pedantic_non_lvalue_loc (loc,
5146 fold_convert_loc (loc, type, arg2));
5147 break;
5148 case LTGT_EXPR:
5149 if (!HONOR_NANS (arg1))
5150 return pedantic_non_lvalue_loc (loc,
5151 fold_convert_loc (loc, type, arg1));
5152 break;
5153 default:
5154 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5155 break;
5156 }
5157 }
5158
5159 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5160 we might still be able to simplify this. For example,
5161 if C1 is one less or one more than C2, this might have started
5162 out as a MIN or MAX and been transformed by this function.
5163 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5164
5165 if (INTEGRAL_TYPE_P (type)
5166 && TREE_CODE (arg01) == INTEGER_CST
5167 && TREE_CODE (arg2) == INTEGER_CST)
5168 switch (comp_code)
5169 {
5170 case EQ_EXPR:
5171 if (TREE_CODE (arg1) == INTEGER_CST)
5172 break;
5173 /* We can replace A with C1 in this case. */
5174 arg1 = fold_convert_loc (loc, type, arg01);
5175 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5176
5177 case LT_EXPR:
5178 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5179 MIN_EXPR, to preserve the signedness of the comparison. */
5180 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5181 OEP_ONLY_CONST)
5182 && operand_equal_p (arg01,
5183 const_binop (PLUS_EXPR, arg2,
5184 build_int_cst (type, 1)),
5185 OEP_ONLY_CONST))
5186 {
5187 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5188 fold_convert_loc (loc, TREE_TYPE (arg00),
5189 arg2));
5190 return pedantic_non_lvalue_loc (loc,
5191 fold_convert_loc (loc, type, tem));
5192 }
5193 break;
5194
5195 case LE_EXPR:
5196 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5197 as above. */
5198 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5199 OEP_ONLY_CONST)
5200 && operand_equal_p (arg01,
5201 const_binop (MINUS_EXPR, arg2,
5202 build_int_cst (type, 1)),
5203 OEP_ONLY_CONST))
5204 {
5205 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5206 fold_convert_loc (loc, TREE_TYPE (arg00),
5207 arg2));
5208 return pedantic_non_lvalue_loc (loc,
5209 fold_convert_loc (loc, type, tem));
5210 }
5211 break;
5212
5213 case GT_EXPR:
5214 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5215 MAX_EXPR, to preserve the signedness of the comparison. */
5216 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5217 OEP_ONLY_CONST)
5218 && operand_equal_p (arg01,
5219 const_binop (MINUS_EXPR, arg2,
5220 build_int_cst (type, 1)),
5221 OEP_ONLY_CONST))
5222 {
5223 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5224 fold_convert_loc (loc, TREE_TYPE (arg00),
5225 arg2));
5226 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5227 }
5228 break;
5229
5230 case GE_EXPR:
5231 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5232 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5233 OEP_ONLY_CONST)
5234 && operand_equal_p (arg01,
5235 const_binop (PLUS_EXPR, arg2,
5236 build_int_cst (type, 1)),
5237 OEP_ONLY_CONST))
5238 {
5239 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5240 fold_convert_loc (loc, TREE_TYPE (arg00),
5241 arg2));
5242 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5243 }
5244 break;
5245 case NE_EXPR:
5246 break;
5247 default:
5248 gcc_unreachable ();
5249 }
5250
5251 return NULL_TREE;
5252 }
5253
5254
5255 \f
5256 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5257 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5258 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5259 false) >= 2)
5260 #endif
5261
5262 /* EXP is some logical combination of boolean tests. See if we can
5263 merge it into some range test. Return the new tree if so. */
5264
5265 static tree
5266 fold_range_test (location_t loc, enum tree_code code, tree type,
5267 tree op0, tree op1)
5268 {
5269 int or_op = (code == TRUTH_ORIF_EXPR
5270 || code == TRUTH_OR_EXPR);
5271 int in0_p, in1_p, in_p;
5272 tree low0, low1, low, high0, high1, high;
5273 bool strict_overflow_p = false;
5274 tree tem, lhs, rhs;
5275 const char * const warnmsg = G_("assuming signed overflow does not occur "
5276 "when simplifying range test");
5277
5278 if (!INTEGRAL_TYPE_P (type))
5279 return 0;
5280
5281 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5282 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5283
5284 /* If this is an OR operation, invert both sides; we will invert
5285 again at the end. */
5286 if (or_op)
5287 in0_p = ! in0_p, in1_p = ! in1_p;
5288
5289 /* If both expressions are the same, if we can merge the ranges, and we
5290 can build the range test, return it or it inverted. If one of the
5291 ranges is always true or always false, consider it to be the same
5292 expression as the other. */
5293 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5294 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5295 in1_p, low1, high1)
5296 && 0 != (tem = (build_range_check (loc, type,
5297 lhs != 0 ? lhs
5298 : rhs != 0 ? rhs : integer_zero_node,
5299 in_p, low, high))))
5300 {
5301 if (strict_overflow_p)
5302 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5303 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5304 }
5305
5306 /* On machines where the branch cost is expensive, if this is a
5307 short-circuited branch and the underlying object on both sides
5308 is the same, make a non-short-circuit operation. */
5309 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5310 && lhs != 0 && rhs != 0
5311 && (code == TRUTH_ANDIF_EXPR
5312 || code == TRUTH_ORIF_EXPR)
5313 && operand_equal_p (lhs, rhs, 0))
5314 {
5315 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5316 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5317 which cases we can't do this. */
5318 if (simple_operand_p (lhs))
5319 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5320 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5321 type, op0, op1);
5322
5323 else if (!lang_hooks.decls.global_bindings_p ()
5324 && !CONTAINS_PLACEHOLDER_P (lhs))
5325 {
5326 tree common = save_expr (lhs);
5327
5328 if (0 != (lhs = build_range_check (loc, type, common,
5329 or_op ? ! in0_p : in0_p,
5330 low0, high0))
5331 && (0 != (rhs = build_range_check (loc, type, common,
5332 or_op ? ! in1_p : in1_p,
5333 low1, high1))))
5334 {
5335 if (strict_overflow_p)
5336 fold_overflow_warning (warnmsg,
5337 WARN_STRICT_OVERFLOW_COMPARISON);
5338 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5339 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5340 type, lhs, rhs);
5341 }
5342 }
5343 }
5344
5345 return 0;
5346 }
5347 \f
5348 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5349 bit value. Arrange things so the extra bits will be set to zero if and
5350 only if C is signed-extended to its full width. If MASK is nonzero,
5351 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5352
5353 static tree
5354 unextend (tree c, int p, int unsignedp, tree mask)
5355 {
5356 tree type = TREE_TYPE (c);
5357 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5358 tree temp;
5359
5360 if (p == modesize || unsignedp)
5361 return c;
5362
5363 /* We work by getting just the sign bit into the low-order bit, then
5364 into the high-order bit, then sign-extend. We then XOR that value
5365 with C. */
5366 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5367
5368 /* We must use a signed type in order to get an arithmetic right shift.
5369 However, we must also avoid introducing accidental overflows, so that
5370 a subsequent call to integer_zerop will work. Hence we must
5371 do the type conversion here. At this point, the constant is either
5372 zero or one, and the conversion to a signed type can never overflow.
5373 We could get an overflow if this conversion is done anywhere else. */
5374 if (TYPE_UNSIGNED (type))
5375 temp = fold_convert (signed_type_for (type), temp);
5376
5377 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5378 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5379 if (mask != 0)
5380 temp = const_binop (BIT_AND_EXPR, temp,
5381 fold_convert (TREE_TYPE (c), mask));
5382 /* If necessary, convert the type back to match the type of C. */
5383 if (TYPE_UNSIGNED (type))
5384 temp = fold_convert (type, temp);
5385
5386 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5387 }
5388 \f
5389 /* For an expression that has the form
5390 (A && B) || ~B
5391 or
5392 (A || B) && ~B,
5393 we can drop one of the inner expressions and simplify to
5394 A || ~B
5395 or
5396 A && ~B
5397 LOC is the location of the resulting expression. OP is the inner
5398 logical operation; the left-hand side in the examples above, while CMPOP
5399 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5400 removing a condition that guards another, as in
5401 (A != NULL && A->...) || A == NULL
5402 which we must not transform. If RHS_ONLY is true, only eliminate the
5403 right-most operand of the inner logical operation. */
5404
5405 static tree
5406 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5407 bool rhs_only)
5408 {
5409 tree type = TREE_TYPE (cmpop);
5410 enum tree_code code = TREE_CODE (cmpop);
5411 enum tree_code truthop_code = TREE_CODE (op);
5412 tree lhs = TREE_OPERAND (op, 0);
5413 tree rhs = TREE_OPERAND (op, 1);
5414 tree orig_lhs = lhs, orig_rhs = rhs;
5415 enum tree_code rhs_code = TREE_CODE (rhs);
5416 enum tree_code lhs_code = TREE_CODE (lhs);
5417 enum tree_code inv_code;
5418
5419 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5420 return NULL_TREE;
5421
5422 if (TREE_CODE_CLASS (code) != tcc_comparison)
5423 return NULL_TREE;
5424
5425 if (rhs_code == truthop_code)
5426 {
5427 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5428 if (newrhs != NULL_TREE)
5429 {
5430 rhs = newrhs;
5431 rhs_code = TREE_CODE (rhs);
5432 }
5433 }
5434 if (lhs_code == truthop_code && !rhs_only)
5435 {
5436 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5437 if (newlhs != NULL_TREE)
5438 {
5439 lhs = newlhs;
5440 lhs_code = TREE_CODE (lhs);
5441 }
5442 }
5443
5444 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5445 if (inv_code == rhs_code
5446 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5447 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5448 return lhs;
5449 if (!rhs_only && inv_code == lhs_code
5450 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5451 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5452 return rhs;
5453 if (rhs != orig_rhs || lhs != orig_lhs)
5454 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5455 lhs, rhs);
5456 return NULL_TREE;
5457 }
5458
5459 /* Find ways of folding logical expressions of LHS and RHS:
5460 Try to merge two comparisons to the same innermost item.
5461 Look for range tests like "ch >= '0' && ch <= '9'".
5462 Look for combinations of simple terms on machines with expensive branches
5463 and evaluate the RHS unconditionally.
5464
5465 For example, if we have p->a == 2 && p->b == 4 and we can make an
5466 object large enough to span both A and B, we can do this with a comparison
5467 against the object ANDed with the a mask.
5468
5469 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5470 operations to do this with one comparison.
5471
5472 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5473 function and the one above.
5474
5475 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5476 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5477
5478 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5479 two operands.
5480
5481 We return the simplified tree or 0 if no optimization is possible. */
5482
5483 static tree
5484 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5485 tree lhs, tree rhs)
5486 {
5487 /* If this is the "or" of two comparisons, we can do something if
5488 the comparisons are NE_EXPR. If this is the "and", we can do something
5489 if the comparisons are EQ_EXPR. I.e.,
5490 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5491
5492 WANTED_CODE is this operation code. For single bit fields, we can
5493 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5494 comparison for one-bit fields. */
5495
5496 enum tree_code wanted_code;
5497 enum tree_code lcode, rcode;
5498 tree ll_arg, lr_arg, rl_arg, rr_arg;
5499 tree ll_inner, lr_inner, rl_inner, rr_inner;
5500 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5501 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5502 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5503 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5504 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5505 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5506 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5507 machine_mode lnmode, rnmode;
5508 tree ll_mask, lr_mask, rl_mask, rr_mask;
5509 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5510 tree l_const, r_const;
5511 tree lntype, rntype, result;
5512 HOST_WIDE_INT first_bit, end_bit;
5513 int volatilep;
5514
5515 /* Start by getting the comparison codes. Fail if anything is volatile.
5516 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5517 it were surrounded with a NE_EXPR. */
5518
5519 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5520 return 0;
5521
5522 lcode = TREE_CODE (lhs);
5523 rcode = TREE_CODE (rhs);
5524
5525 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5526 {
5527 lhs = build2 (NE_EXPR, truth_type, lhs,
5528 build_int_cst (TREE_TYPE (lhs), 0));
5529 lcode = NE_EXPR;
5530 }
5531
5532 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5533 {
5534 rhs = build2 (NE_EXPR, truth_type, rhs,
5535 build_int_cst (TREE_TYPE (rhs), 0));
5536 rcode = NE_EXPR;
5537 }
5538
5539 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5540 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5541 return 0;
5542
5543 ll_arg = TREE_OPERAND (lhs, 0);
5544 lr_arg = TREE_OPERAND (lhs, 1);
5545 rl_arg = TREE_OPERAND (rhs, 0);
5546 rr_arg = TREE_OPERAND (rhs, 1);
5547
5548 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5549 if (simple_operand_p (ll_arg)
5550 && simple_operand_p (lr_arg))
5551 {
5552 if (operand_equal_p (ll_arg, rl_arg, 0)
5553 && operand_equal_p (lr_arg, rr_arg, 0))
5554 {
5555 result = combine_comparisons (loc, code, lcode, rcode,
5556 truth_type, ll_arg, lr_arg);
5557 if (result)
5558 return result;
5559 }
5560 else if (operand_equal_p (ll_arg, rr_arg, 0)
5561 && operand_equal_p (lr_arg, rl_arg, 0))
5562 {
5563 result = combine_comparisons (loc, code, lcode,
5564 swap_tree_comparison (rcode),
5565 truth_type, ll_arg, lr_arg);
5566 if (result)
5567 return result;
5568 }
5569 }
5570
5571 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5572 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5573
5574 /* If the RHS can be evaluated unconditionally and its operands are
5575 simple, it wins to evaluate the RHS unconditionally on machines
5576 with expensive branches. In this case, this isn't a comparison
5577 that can be merged. */
5578
5579 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5580 false) >= 2
5581 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5582 && simple_operand_p (rl_arg)
5583 && simple_operand_p (rr_arg))
5584 {
5585 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5586 if (code == TRUTH_OR_EXPR
5587 && lcode == NE_EXPR && integer_zerop (lr_arg)
5588 && rcode == NE_EXPR && integer_zerop (rr_arg)
5589 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5590 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5591 return build2_loc (loc, NE_EXPR, truth_type,
5592 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5593 ll_arg, rl_arg),
5594 build_int_cst (TREE_TYPE (ll_arg), 0));
5595
5596 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5597 if (code == TRUTH_AND_EXPR
5598 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5599 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5600 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5601 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5602 return build2_loc (loc, EQ_EXPR, truth_type,
5603 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5604 ll_arg, rl_arg),
5605 build_int_cst (TREE_TYPE (ll_arg), 0));
5606 }
5607
5608 /* See if the comparisons can be merged. Then get all the parameters for
5609 each side. */
5610
5611 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5612 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5613 return 0;
5614
5615 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5616 volatilep = 0;
5617 ll_inner = decode_field_reference (loc, ll_arg,
5618 &ll_bitsize, &ll_bitpos, &ll_mode,
5619 &ll_unsignedp, &ll_reversep, &volatilep,
5620 &ll_mask, &ll_and_mask);
5621 lr_inner = decode_field_reference (loc, lr_arg,
5622 &lr_bitsize, &lr_bitpos, &lr_mode,
5623 &lr_unsignedp, &lr_reversep, &volatilep,
5624 &lr_mask, &lr_and_mask);
5625 rl_inner = decode_field_reference (loc, rl_arg,
5626 &rl_bitsize, &rl_bitpos, &rl_mode,
5627 &rl_unsignedp, &rl_reversep, &volatilep,
5628 &rl_mask, &rl_and_mask);
5629 rr_inner = decode_field_reference (loc, rr_arg,
5630 &rr_bitsize, &rr_bitpos, &rr_mode,
5631 &rr_unsignedp, &rr_reversep, &volatilep,
5632 &rr_mask, &rr_and_mask);
5633
5634 /* It must be true that the inner operation on the lhs of each
5635 comparison must be the same if we are to be able to do anything.
5636 Then see if we have constants. If not, the same must be true for
5637 the rhs's. */
5638 if (volatilep
5639 || ll_reversep != rl_reversep
5640 || ll_inner == 0 || rl_inner == 0
5641 || ! operand_equal_p (ll_inner, rl_inner, 0))
5642 return 0;
5643
5644 if (TREE_CODE (lr_arg) == INTEGER_CST
5645 && TREE_CODE (rr_arg) == INTEGER_CST)
5646 {
5647 l_const = lr_arg, r_const = rr_arg;
5648 lr_reversep = ll_reversep;
5649 }
5650 else if (lr_reversep != rr_reversep
5651 || lr_inner == 0 || rr_inner == 0
5652 || ! operand_equal_p (lr_inner, rr_inner, 0))
5653 return 0;
5654 else
5655 l_const = r_const = 0;
5656
5657 /* If either comparison code is not correct for our logical operation,
5658 fail. However, we can convert a one-bit comparison against zero into
5659 the opposite comparison against that bit being set in the field. */
5660
5661 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5662 if (lcode != wanted_code)
5663 {
5664 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5665 {
5666 /* Make the left operand unsigned, since we are only interested
5667 in the value of one bit. Otherwise we are doing the wrong
5668 thing below. */
5669 ll_unsignedp = 1;
5670 l_const = ll_mask;
5671 }
5672 else
5673 return 0;
5674 }
5675
5676 /* This is analogous to the code for l_const above. */
5677 if (rcode != wanted_code)
5678 {
5679 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5680 {
5681 rl_unsignedp = 1;
5682 r_const = rl_mask;
5683 }
5684 else
5685 return 0;
5686 }
5687
5688 /* See if we can find a mode that contains both fields being compared on
5689 the left. If we can't, fail. Otherwise, update all constants and masks
5690 to be relative to a field of that size. */
5691 first_bit = MIN (ll_bitpos, rl_bitpos);
5692 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5693 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5694 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5695 volatilep);
5696 if (lnmode == VOIDmode)
5697 return 0;
5698
5699 lnbitsize = GET_MODE_BITSIZE (lnmode);
5700 lnbitpos = first_bit & ~ (lnbitsize - 1);
5701 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5702 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5703
5704 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5705 {
5706 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5707 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5708 }
5709
5710 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5711 size_int (xll_bitpos));
5712 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5713 size_int (xrl_bitpos));
5714
5715 if (l_const)
5716 {
5717 l_const = fold_convert_loc (loc, lntype, l_const);
5718 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5719 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5720 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5721 fold_build1_loc (loc, BIT_NOT_EXPR,
5722 lntype, ll_mask))))
5723 {
5724 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5725
5726 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5727 }
5728 }
5729 if (r_const)
5730 {
5731 r_const = fold_convert_loc (loc, lntype, r_const);
5732 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5733 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5734 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5735 fold_build1_loc (loc, BIT_NOT_EXPR,
5736 lntype, rl_mask))))
5737 {
5738 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5739
5740 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5741 }
5742 }
5743
5744 /* If the right sides are not constant, do the same for it. Also,
5745 disallow this optimization if a size or signedness mismatch occurs
5746 between the left and right sides. */
5747 if (l_const == 0)
5748 {
5749 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5750 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5751 /* Make sure the two fields on the right
5752 correspond to the left without being swapped. */
5753 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5754 return 0;
5755
5756 first_bit = MIN (lr_bitpos, rr_bitpos);
5757 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5758 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5759 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5760 volatilep);
5761 if (rnmode == VOIDmode)
5762 return 0;
5763
5764 rnbitsize = GET_MODE_BITSIZE (rnmode);
5765 rnbitpos = first_bit & ~ (rnbitsize - 1);
5766 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5767 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5768
5769 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5770 {
5771 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5772 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5773 }
5774
5775 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5776 rntype, lr_mask),
5777 size_int (xlr_bitpos));
5778 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5779 rntype, rr_mask),
5780 size_int (xrr_bitpos));
5781
5782 /* Make a mask that corresponds to both fields being compared.
5783 Do this for both items being compared. If the operands are the
5784 same size and the bits being compared are in the same position
5785 then we can do this by masking both and comparing the masked
5786 results. */
5787 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5788 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5789 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5790 {
5791 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5792 ll_unsignedp || rl_unsignedp, ll_reversep);
5793 if (! all_ones_mask_p (ll_mask, lnbitsize))
5794 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5795
5796 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5797 lr_unsignedp || rr_unsignedp, lr_reversep);
5798 if (! all_ones_mask_p (lr_mask, rnbitsize))
5799 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5800
5801 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5802 }
5803
5804 /* There is still another way we can do something: If both pairs of
5805 fields being compared are adjacent, we may be able to make a wider
5806 field containing them both.
5807
5808 Note that we still must mask the lhs/rhs expressions. Furthermore,
5809 the mask must be shifted to account for the shift done by
5810 make_bit_field_ref. */
5811 if ((ll_bitsize + ll_bitpos == rl_bitpos
5812 && lr_bitsize + lr_bitpos == rr_bitpos)
5813 || (ll_bitpos == rl_bitpos + rl_bitsize
5814 && lr_bitpos == rr_bitpos + rr_bitsize))
5815 {
5816 tree type;
5817
5818 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5819 ll_bitsize + rl_bitsize,
5820 MIN (ll_bitpos, rl_bitpos),
5821 ll_unsignedp, ll_reversep);
5822 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5823 lr_bitsize + rr_bitsize,
5824 MIN (lr_bitpos, rr_bitpos),
5825 lr_unsignedp, lr_reversep);
5826
5827 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5828 size_int (MIN (xll_bitpos, xrl_bitpos)));
5829 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5830 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5831
5832 /* Convert to the smaller type before masking out unwanted bits. */
5833 type = lntype;
5834 if (lntype != rntype)
5835 {
5836 if (lnbitsize > rnbitsize)
5837 {
5838 lhs = fold_convert_loc (loc, rntype, lhs);
5839 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5840 type = rntype;
5841 }
5842 else if (lnbitsize < rnbitsize)
5843 {
5844 rhs = fold_convert_loc (loc, lntype, rhs);
5845 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5846 type = lntype;
5847 }
5848 }
5849
5850 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5851 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5852
5853 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5854 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5855
5856 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5857 }
5858
5859 return 0;
5860 }
5861
5862 /* Handle the case of comparisons with constants. If there is something in
5863 common between the masks, those bits of the constants must be the same.
5864 If not, the condition is always false. Test for this to avoid generating
5865 incorrect code below. */
5866 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5867 if (! integer_zerop (result)
5868 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5869 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5870 {
5871 if (wanted_code == NE_EXPR)
5872 {
5873 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5874 return constant_boolean_node (true, truth_type);
5875 }
5876 else
5877 {
5878 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5879 return constant_boolean_node (false, truth_type);
5880 }
5881 }
5882
5883 /* Construct the expression we will return. First get the component
5884 reference we will make. Unless the mask is all ones the width of
5885 that field, perform the mask operation. Then compare with the
5886 merged constant. */
5887 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5888 ll_unsignedp || rl_unsignedp, ll_reversep);
5889
5890 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5891 if (! all_ones_mask_p (ll_mask, lnbitsize))
5892 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5893
5894 return build2_loc (loc, wanted_code, truth_type, result,
5895 const_binop (BIT_IOR_EXPR, l_const, r_const));
5896 }
5897 \f
5898 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5899 constant. */
5900
5901 static tree
5902 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5903 tree op0, tree op1)
5904 {
5905 tree arg0 = op0;
5906 enum tree_code op_code;
5907 tree comp_const;
5908 tree minmax_const;
5909 int consts_equal, consts_lt;
5910 tree inner;
5911
5912 STRIP_SIGN_NOPS (arg0);
5913
5914 op_code = TREE_CODE (arg0);
5915 minmax_const = TREE_OPERAND (arg0, 1);
5916 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5917 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5918 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5919 inner = TREE_OPERAND (arg0, 0);
5920
5921 /* If something does not permit us to optimize, return the original tree. */
5922 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5923 || TREE_CODE (comp_const) != INTEGER_CST
5924 || TREE_OVERFLOW (comp_const)
5925 || TREE_CODE (minmax_const) != INTEGER_CST
5926 || TREE_OVERFLOW (minmax_const))
5927 return NULL_TREE;
5928
5929 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5930 and GT_EXPR, doing the rest with recursive calls using logical
5931 simplifications. */
5932 switch (code)
5933 {
5934 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5935 {
5936 tree tem
5937 = optimize_minmax_comparison (loc,
5938 invert_tree_comparison (code, false),
5939 type, op0, op1);
5940 if (tem)
5941 return invert_truthvalue_loc (loc, tem);
5942 return NULL_TREE;
5943 }
5944
5945 case GE_EXPR:
5946 return
5947 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5948 optimize_minmax_comparison
5949 (loc, EQ_EXPR, type, arg0, comp_const),
5950 optimize_minmax_comparison
5951 (loc, GT_EXPR, type, arg0, comp_const));
5952
5953 case EQ_EXPR:
5954 if (op_code == MAX_EXPR && consts_equal)
5955 /* MAX (X, 0) == 0 -> X <= 0 */
5956 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5957
5958 else if (op_code == MAX_EXPR && consts_lt)
5959 /* MAX (X, 0) == 5 -> X == 5 */
5960 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5961
5962 else if (op_code == MAX_EXPR)
5963 /* MAX (X, 0) == -1 -> false */
5964 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5965
5966 else if (consts_equal)
5967 /* MIN (X, 0) == 0 -> X >= 0 */
5968 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5969
5970 else if (consts_lt)
5971 /* MIN (X, 0) == 5 -> false */
5972 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5973
5974 else
5975 /* MIN (X, 0) == -1 -> X == -1 */
5976 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5977
5978 case GT_EXPR:
5979 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5980 /* MAX (X, 0) > 0 -> X > 0
5981 MAX (X, 0) > 5 -> X > 5 */
5982 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5983
5984 else if (op_code == MAX_EXPR)
5985 /* MAX (X, 0) > -1 -> true */
5986 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5987
5988 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5989 /* MIN (X, 0) > 0 -> false
5990 MIN (X, 0) > 5 -> false */
5991 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5992
5993 else
5994 /* MIN (X, 0) > -1 -> X > -1 */
5995 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5996
5997 default:
5998 return NULL_TREE;
5999 }
6000 }
6001 \f
6002 /* T is an integer expression that is being multiplied, divided, or taken a
6003 modulus (CODE says which and what kind of divide or modulus) by a
6004 constant C. See if we can eliminate that operation by folding it with
6005 other operations already in T. WIDE_TYPE, if non-null, is a type that
6006 should be used for the computation if wider than our type.
6007
6008 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6009 (X * 2) + (Y * 4). We must, however, be assured that either the original
6010 expression would not overflow or that overflow is undefined for the type
6011 in the language in question.
6012
6013 If we return a non-null expression, it is an equivalent form of the
6014 original computation, but need not be in the original type.
6015
6016 We set *STRICT_OVERFLOW_P to true if the return values depends on
6017 signed overflow being undefined. Otherwise we do not change
6018 *STRICT_OVERFLOW_P. */
6019
6020 static tree
6021 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6022 bool *strict_overflow_p)
6023 {
6024 /* To avoid exponential search depth, refuse to allow recursion past
6025 three levels. Beyond that (1) it's highly unlikely that we'll find
6026 something interesting and (2) we've probably processed it before
6027 when we built the inner expression. */
6028
6029 static int depth;
6030 tree ret;
6031
6032 if (depth > 3)
6033 return NULL;
6034
6035 depth++;
6036 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6037 depth--;
6038
6039 return ret;
6040 }
6041
6042 static tree
6043 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6044 bool *strict_overflow_p)
6045 {
6046 tree type = TREE_TYPE (t);
6047 enum tree_code tcode = TREE_CODE (t);
6048 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6049 > GET_MODE_SIZE (TYPE_MODE (type)))
6050 ? wide_type : type);
6051 tree t1, t2;
6052 int same_p = tcode == code;
6053 tree op0 = NULL_TREE, op1 = NULL_TREE;
6054 bool sub_strict_overflow_p;
6055
6056 /* Don't deal with constants of zero here; they confuse the code below. */
6057 if (integer_zerop (c))
6058 return NULL_TREE;
6059
6060 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6061 op0 = TREE_OPERAND (t, 0);
6062
6063 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6064 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6065
6066 /* Note that we need not handle conditional operations here since fold
6067 already handles those cases. So just do arithmetic here. */
6068 switch (tcode)
6069 {
6070 case INTEGER_CST:
6071 /* For a constant, we can always simplify if we are a multiply
6072 or (for divide and modulus) if it is a multiple of our constant. */
6073 if (code == MULT_EXPR
6074 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6075 {
6076 tree tem = const_binop (code, fold_convert (ctype, t),
6077 fold_convert (ctype, c));
6078 /* If the multiplication overflowed to INT_MIN then we lost sign
6079 information on it and a subsequent multiplication might
6080 spuriously overflow. See PR68142. */
6081 if (TREE_OVERFLOW (tem)
6082 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6083 return NULL_TREE;
6084 return tem;
6085 }
6086 break;
6087
6088 CASE_CONVERT: case NON_LVALUE_EXPR:
6089 /* If op0 is an expression ... */
6090 if ((COMPARISON_CLASS_P (op0)
6091 || UNARY_CLASS_P (op0)
6092 || BINARY_CLASS_P (op0)
6093 || VL_EXP_CLASS_P (op0)
6094 || EXPRESSION_CLASS_P (op0))
6095 /* ... and has wrapping overflow, and its type is smaller
6096 than ctype, then we cannot pass through as widening. */
6097 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6098 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6099 && (TYPE_PRECISION (ctype)
6100 > TYPE_PRECISION (TREE_TYPE (op0))))
6101 /* ... or this is a truncation (t is narrower than op0),
6102 then we cannot pass through this narrowing. */
6103 || (TYPE_PRECISION (type)
6104 < TYPE_PRECISION (TREE_TYPE (op0)))
6105 /* ... or signedness changes for division or modulus,
6106 then we cannot pass through this conversion. */
6107 || (code != MULT_EXPR
6108 && (TYPE_UNSIGNED (ctype)
6109 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6110 /* ... or has undefined overflow while the converted to
6111 type has not, we cannot do the operation in the inner type
6112 as that would introduce undefined overflow. */
6113 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6114 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6115 && !TYPE_OVERFLOW_UNDEFINED (type))))
6116 break;
6117
6118 /* Pass the constant down and see if we can make a simplification. If
6119 we can, replace this expression with the inner simplification for
6120 possible later conversion to our or some other type. */
6121 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6122 && TREE_CODE (t2) == INTEGER_CST
6123 && !TREE_OVERFLOW (t2)
6124 && (0 != (t1 = extract_muldiv (op0, t2, code,
6125 code == MULT_EXPR
6126 ? ctype : NULL_TREE,
6127 strict_overflow_p))))
6128 return t1;
6129 break;
6130
6131 case ABS_EXPR:
6132 /* If widening the type changes it from signed to unsigned, then we
6133 must avoid building ABS_EXPR itself as unsigned. */
6134 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6135 {
6136 tree cstype = (*signed_type_for) (ctype);
6137 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6138 != 0)
6139 {
6140 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6141 return fold_convert (ctype, t1);
6142 }
6143 break;
6144 }
6145 /* If the constant is negative, we cannot simplify this. */
6146 if (tree_int_cst_sgn (c) == -1)
6147 break;
6148 /* FALLTHROUGH */
6149 case NEGATE_EXPR:
6150 /* For division and modulus, type can't be unsigned, as e.g.
6151 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6152 For signed types, even with wrapping overflow, this is fine. */
6153 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6154 break;
6155 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6156 != 0)
6157 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6158 break;
6159
6160 case MIN_EXPR: case MAX_EXPR:
6161 /* If widening the type changes the signedness, then we can't perform
6162 this optimization as that changes the result. */
6163 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6164 break;
6165
6166 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6167 sub_strict_overflow_p = false;
6168 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6169 &sub_strict_overflow_p)) != 0
6170 && (t2 = extract_muldiv (op1, c, code, wide_type,
6171 &sub_strict_overflow_p)) != 0)
6172 {
6173 if (tree_int_cst_sgn (c) < 0)
6174 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6175 if (sub_strict_overflow_p)
6176 *strict_overflow_p = true;
6177 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6178 fold_convert (ctype, t2));
6179 }
6180 break;
6181
6182 case LSHIFT_EXPR: case RSHIFT_EXPR:
6183 /* If the second operand is constant, this is a multiplication
6184 or floor division, by a power of two, so we can treat it that
6185 way unless the multiplier or divisor overflows. Signed
6186 left-shift overflow is implementation-defined rather than
6187 undefined in C90, so do not convert signed left shift into
6188 multiplication. */
6189 if (TREE_CODE (op1) == INTEGER_CST
6190 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6191 /* const_binop may not detect overflow correctly,
6192 so check for it explicitly here. */
6193 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6194 && 0 != (t1 = fold_convert (ctype,
6195 const_binop (LSHIFT_EXPR,
6196 size_one_node,
6197 op1)))
6198 && !TREE_OVERFLOW (t1))
6199 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6200 ? MULT_EXPR : FLOOR_DIV_EXPR,
6201 ctype,
6202 fold_convert (ctype, op0),
6203 t1),
6204 c, code, wide_type, strict_overflow_p);
6205 break;
6206
6207 case PLUS_EXPR: case MINUS_EXPR:
6208 /* See if we can eliminate the operation on both sides. If we can, we
6209 can return a new PLUS or MINUS. If we can't, the only remaining
6210 cases where we can do anything are if the second operand is a
6211 constant. */
6212 sub_strict_overflow_p = false;
6213 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6214 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6215 if (t1 != 0 && t2 != 0
6216 && (code == MULT_EXPR
6217 /* If not multiplication, we can only do this if both operands
6218 are divisible by c. */
6219 || (multiple_of_p (ctype, op0, c)
6220 && multiple_of_p (ctype, op1, c))))
6221 {
6222 if (sub_strict_overflow_p)
6223 *strict_overflow_p = true;
6224 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6225 fold_convert (ctype, t2));
6226 }
6227
6228 /* If this was a subtraction, negate OP1 and set it to be an addition.
6229 This simplifies the logic below. */
6230 if (tcode == MINUS_EXPR)
6231 {
6232 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6233 /* If OP1 was not easily negatable, the constant may be OP0. */
6234 if (TREE_CODE (op0) == INTEGER_CST)
6235 {
6236 std::swap (op0, op1);
6237 std::swap (t1, t2);
6238 }
6239 }
6240
6241 if (TREE_CODE (op1) != INTEGER_CST)
6242 break;
6243
6244 /* If either OP1 or C are negative, this optimization is not safe for
6245 some of the division and remainder types while for others we need
6246 to change the code. */
6247 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6248 {
6249 if (code == CEIL_DIV_EXPR)
6250 code = FLOOR_DIV_EXPR;
6251 else if (code == FLOOR_DIV_EXPR)
6252 code = CEIL_DIV_EXPR;
6253 else if (code != MULT_EXPR
6254 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6255 break;
6256 }
6257
6258 /* If it's a multiply or a division/modulus operation of a multiple
6259 of our constant, do the operation and verify it doesn't overflow. */
6260 if (code == MULT_EXPR
6261 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6262 {
6263 op1 = const_binop (code, fold_convert (ctype, op1),
6264 fold_convert (ctype, c));
6265 /* We allow the constant to overflow with wrapping semantics. */
6266 if (op1 == 0
6267 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6268 break;
6269 }
6270 else
6271 break;
6272
6273 /* If we have an unsigned type, we cannot widen the operation since it
6274 will change the result if the original computation overflowed. */
6275 if (TYPE_UNSIGNED (ctype) && ctype != type)
6276 break;
6277
6278 /* If we were able to eliminate our operation from the first side,
6279 apply our operation to the second side and reform the PLUS. */
6280 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6281 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6282
6283 /* The last case is if we are a multiply. In that case, we can
6284 apply the distributive law to commute the multiply and addition
6285 if the multiplication of the constants doesn't overflow
6286 and overflow is defined. With undefined overflow
6287 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6288 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6289 return fold_build2 (tcode, ctype,
6290 fold_build2 (code, ctype,
6291 fold_convert (ctype, op0),
6292 fold_convert (ctype, c)),
6293 op1);
6294
6295 break;
6296
6297 case MULT_EXPR:
6298 /* We have a special case here if we are doing something like
6299 (C * 8) % 4 since we know that's zero. */
6300 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6301 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6302 /* If the multiplication can overflow we cannot optimize this. */
6303 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6304 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6305 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6306 {
6307 *strict_overflow_p = true;
6308 return omit_one_operand (type, integer_zero_node, op0);
6309 }
6310
6311 /* ... fall through ... */
6312
6313 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6314 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6315 /* If we can extract our operation from the LHS, do so and return a
6316 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6317 do something only if the second operand is a constant. */
6318 if (same_p
6319 && (t1 = extract_muldiv (op0, c, code, wide_type,
6320 strict_overflow_p)) != 0)
6321 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6322 fold_convert (ctype, op1));
6323 else if (tcode == MULT_EXPR && code == MULT_EXPR
6324 && (t1 = extract_muldiv (op1, c, code, wide_type,
6325 strict_overflow_p)) != 0)
6326 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6327 fold_convert (ctype, t1));
6328 else if (TREE_CODE (op1) != INTEGER_CST)
6329 return 0;
6330
6331 /* If these are the same operation types, we can associate them
6332 assuming no overflow. */
6333 if (tcode == code)
6334 {
6335 bool overflow_p = false;
6336 bool overflow_mul_p;
6337 signop sign = TYPE_SIGN (ctype);
6338 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6339 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6340 if (overflow_mul_p
6341 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6342 overflow_p = true;
6343 if (!overflow_p)
6344 {
6345 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6346 TYPE_SIGN (TREE_TYPE (op1)));
6347 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6348 wide_int_to_tree (ctype, mul));
6349 }
6350 }
6351
6352 /* If these operations "cancel" each other, we have the main
6353 optimizations of this pass, which occur when either constant is a
6354 multiple of the other, in which case we replace this with either an
6355 operation or CODE or TCODE.
6356
6357 If we have an unsigned type, we cannot do this since it will change
6358 the result if the original computation overflowed. */
6359 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6360 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6361 || (tcode == MULT_EXPR
6362 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6363 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6364 && code != MULT_EXPR)))
6365 {
6366 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6367 {
6368 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6369 *strict_overflow_p = true;
6370 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6371 fold_convert (ctype,
6372 const_binop (TRUNC_DIV_EXPR,
6373 op1, c)));
6374 }
6375 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6376 {
6377 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6378 *strict_overflow_p = true;
6379 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6380 fold_convert (ctype,
6381 const_binop (TRUNC_DIV_EXPR,
6382 c, op1)));
6383 }
6384 }
6385 break;
6386
6387 default:
6388 break;
6389 }
6390
6391 return 0;
6392 }
6393 \f
6394 /* Return a node which has the indicated constant VALUE (either 0 or
6395 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6396 and is of the indicated TYPE. */
6397
6398 tree
6399 constant_boolean_node (bool value, tree type)
6400 {
6401 if (type == integer_type_node)
6402 return value ? integer_one_node : integer_zero_node;
6403 else if (type == boolean_type_node)
6404 return value ? boolean_true_node : boolean_false_node;
6405 else if (TREE_CODE (type) == VECTOR_TYPE)
6406 return build_vector_from_val (type,
6407 build_int_cst (TREE_TYPE (type),
6408 value ? -1 : 0));
6409 else
6410 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6411 }
6412
6413
6414 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6415 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6416 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6417 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6418 COND is the first argument to CODE; otherwise (as in the example
6419 given here), it is the second argument. TYPE is the type of the
6420 original expression. Return NULL_TREE if no simplification is
6421 possible. */
6422
6423 static tree
6424 fold_binary_op_with_conditional_arg (location_t loc,
6425 enum tree_code code,
6426 tree type, tree op0, tree op1,
6427 tree cond, tree arg, int cond_first_p)
6428 {
6429 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6430 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6431 tree test, true_value, false_value;
6432 tree lhs = NULL_TREE;
6433 tree rhs = NULL_TREE;
6434 enum tree_code cond_code = COND_EXPR;
6435
6436 if (TREE_CODE (cond) == COND_EXPR
6437 || TREE_CODE (cond) == VEC_COND_EXPR)
6438 {
6439 test = TREE_OPERAND (cond, 0);
6440 true_value = TREE_OPERAND (cond, 1);
6441 false_value = TREE_OPERAND (cond, 2);
6442 /* If this operand throws an expression, then it does not make
6443 sense to try to perform a logical or arithmetic operation
6444 involving it. */
6445 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6446 lhs = true_value;
6447 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6448 rhs = false_value;
6449 }
6450 else
6451 {
6452 tree testtype = TREE_TYPE (cond);
6453 test = cond;
6454 true_value = constant_boolean_node (true, testtype);
6455 false_value = constant_boolean_node (false, testtype);
6456 }
6457
6458 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6459 cond_code = VEC_COND_EXPR;
6460
6461 /* This transformation is only worthwhile if we don't have to wrap ARG
6462 in a SAVE_EXPR and the operation can be simplified without recursing
6463 on at least one of the branches once its pushed inside the COND_EXPR. */
6464 if (!TREE_CONSTANT (arg)
6465 && (TREE_SIDE_EFFECTS (arg)
6466 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6467 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6468 return NULL_TREE;
6469
6470 arg = fold_convert_loc (loc, arg_type, arg);
6471 if (lhs == 0)
6472 {
6473 true_value = fold_convert_loc (loc, cond_type, true_value);
6474 if (cond_first_p)
6475 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6476 else
6477 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6478 }
6479 if (rhs == 0)
6480 {
6481 false_value = fold_convert_loc (loc, cond_type, false_value);
6482 if (cond_first_p)
6483 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6484 else
6485 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6486 }
6487
6488 /* Check that we have simplified at least one of the branches. */
6489 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6490 return NULL_TREE;
6491
6492 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6493 }
6494
6495 \f
6496 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6497
6498 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6499 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6500 ADDEND is the same as X.
6501
6502 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6503 and finite. The problematic cases are when X is zero, and its mode
6504 has signed zeros. In the case of rounding towards -infinity,
6505 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6506 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6507
6508 bool
6509 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6510 {
6511 if (!real_zerop (addend))
6512 return false;
6513
6514 /* Don't allow the fold with -fsignaling-nans. */
6515 if (HONOR_SNANS (element_mode (type)))
6516 return false;
6517
6518 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6519 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6520 return true;
6521
6522 /* In a vector or complex, we would need to check the sign of all zeros. */
6523 if (TREE_CODE (addend) != REAL_CST)
6524 return false;
6525
6526 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6527 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6528 negate = !negate;
6529
6530 /* The mode has signed zeros, and we have to honor their sign.
6531 In this situation, there is only one case we can return true for.
6532 X - 0 is the same as X unless rounding towards -infinity is
6533 supported. */
6534 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6535 }
6536
6537 /* Subroutine of fold() that optimizes comparisons of a division by
6538 a nonzero integer constant against an integer constant, i.e.
6539 X/C1 op C2.
6540
6541 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6542 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6543 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6544
6545 The function returns the constant folded tree if a simplification
6546 can be made, and NULL_TREE otherwise. */
6547
6548 static tree
6549 fold_div_compare (location_t loc,
6550 enum tree_code code, tree type, tree arg0, tree arg1)
6551 {
6552 tree prod, tmp, hi, lo;
6553 tree arg00 = TREE_OPERAND (arg0, 0);
6554 tree arg01 = TREE_OPERAND (arg0, 1);
6555 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6556 bool neg_overflow = false;
6557 bool overflow;
6558
6559 /* We have to do this the hard way to detect unsigned overflow.
6560 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6561 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6562 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6563 neg_overflow = false;
6564
6565 if (sign == UNSIGNED)
6566 {
6567 tmp = int_const_binop (MINUS_EXPR, arg01,
6568 build_int_cst (TREE_TYPE (arg01), 1));
6569 lo = prod;
6570
6571 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6572 val = wi::add (prod, tmp, sign, &overflow);
6573 hi = force_fit_type (TREE_TYPE (arg00), val,
6574 -1, overflow | TREE_OVERFLOW (prod));
6575 }
6576 else if (tree_int_cst_sgn (arg01) >= 0)
6577 {
6578 tmp = int_const_binop (MINUS_EXPR, arg01,
6579 build_int_cst (TREE_TYPE (arg01), 1));
6580 switch (tree_int_cst_sgn (arg1))
6581 {
6582 case -1:
6583 neg_overflow = true;
6584 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6585 hi = prod;
6586 break;
6587
6588 case 0:
6589 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6590 hi = tmp;
6591 break;
6592
6593 case 1:
6594 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6595 lo = prod;
6596 break;
6597
6598 default:
6599 gcc_unreachable ();
6600 }
6601 }
6602 else
6603 {
6604 /* A negative divisor reverses the relational operators. */
6605 code = swap_tree_comparison (code);
6606
6607 tmp = int_const_binop (PLUS_EXPR, arg01,
6608 build_int_cst (TREE_TYPE (arg01), 1));
6609 switch (tree_int_cst_sgn (arg1))
6610 {
6611 case -1:
6612 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6613 lo = prod;
6614 break;
6615
6616 case 0:
6617 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6618 lo = tmp;
6619 break;
6620
6621 case 1:
6622 neg_overflow = true;
6623 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6624 hi = prod;
6625 break;
6626
6627 default:
6628 gcc_unreachable ();
6629 }
6630 }
6631
6632 switch (code)
6633 {
6634 case EQ_EXPR:
6635 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6636 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6637 if (TREE_OVERFLOW (hi))
6638 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6639 if (TREE_OVERFLOW (lo))
6640 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6641 return build_range_check (loc, type, arg00, 1, lo, hi);
6642
6643 case NE_EXPR:
6644 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6645 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6646 if (TREE_OVERFLOW (hi))
6647 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6648 if (TREE_OVERFLOW (lo))
6649 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6650 return build_range_check (loc, type, arg00, 0, lo, hi);
6651
6652 case LT_EXPR:
6653 if (TREE_OVERFLOW (lo))
6654 {
6655 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6656 return omit_one_operand_loc (loc, type, tmp, arg00);
6657 }
6658 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6659
6660 case LE_EXPR:
6661 if (TREE_OVERFLOW (hi))
6662 {
6663 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6664 return omit_one_operand_loc (loc, type, tmp, arg00);
6665 }
6666 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6667
6668 case GT_EXPR:
6669 if (TREE_OVERFLOW (hi))
6670 {
6671 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6672 return omit_one_operand_loc (loc, type, tmp, arg00);
6673 }
6674 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6675
6676 case GE_EXPR:
6677 if (TREE_OVERFLOW (lo))
6678 {
6679 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6680 return omit_one_operand_loc (loc, type, tmp, arg00);
6681 }
6682 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6683
6684 default:
6685 break;
6686 }
6687
6688 return NULL_TREE;
6689 }
6690
6691
6692 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6693 equality/inequality test, then return a simplified form of the test
6694 using a sign testing. Otherwise return NULL. TYPE is the desired
6695 result type. */
6696
6697 static tree
6698 fold_single_bit_test_into_sign_test (location_t loc,
6699 enum tree_code code, tree arg0, tree arg1,
6700 tree result_type)
6701 {
6702 /* If this is testing a single bit, we can optimize the test. */
6703 if ((code == NE_EXPR || code == EQ_EXPR)
6704 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6705 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6706 {
6707 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6708 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6709 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6710
6711 if (arg00 != NULL_TREE
6712 /* This is only a win if casting to a signed type is cheap,
6713 i.e. when arg00's type is not a partial mode. */
6714 && TYPE_PRECISION (TREE_TYPE (arg00))
6715 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6716 {
6717 tree stype = signed_type_for (TREE_TYPE (arg00));
6718 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6719 result_type,
6720 fold_convert_loc (loc, stype, arg00),
6721 build_int_cst (stype, 0));
6722 }
6723 }
6724
6725 return NULL_TREE;
6726 }
6727
6728 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6729 equality/inequality test, then return a simplified form of
6730 the test using shifts and logical operations. Otherwise return
6731 NULL. TYPE is the desired result type. */
6732
6733 tree
6734 fold_single_bit_test (location_t loc, enum tree_code code,
6735 tree arg0, tree arg1, tree result_type)
6736 {
6737 /* If this is testing a single bit, we can optimize the test. */
6738 if ((code == NE_EXPR || code == EQ_EXPR)
6739 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6740 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6741 {
6742 tree inner = TREE_OPERAND (arg0, 0);
6743 tree type = TREE_TYPE (arg0);
6744 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6745 machine_mode operand_mode = TYPE_MODE (type);
6746 int ops_unsigned;
6747 tree signed_type, unsigned_type, intermediate_type;
6748 tree tem, one;
6749
6750 /* First, see if we can fold the single bit test into a sign-bit
6751 test. */
6752 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6753 result_type);
6754 if (tem)
6755 return tem;
6756
6757 /* Otherwise we have (A & C) != 0 where C is a single bit,
6758 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6759 Similarly for (A & C) == 0. */
6760
6761 /* If INNER is a right shift of a constant and it plus BITNUM does
6762 not overflow, adjust BITNUM and INNER. */
6763 if (TREE_CODE (inner) == RSHIFT_EXPR
6764 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6765 && bitnum < TYPE_PRECISION (type)
6766 && wi::ltu_p (TREE_OPERAND (inner, 1),
6767 TYPE_PRECISION (type) - bitnum))
6768 {
6769 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6770 inner = TREE_OPERAND (inner, 0);
6771 }
6772
6773 /* If we are going to be able to omit the AND below, we must do our
6774 operations as unsigned. If we must use the AND, we have a choice.
6775 Normally unsigned is faster, but for some machines signed is. */
6776 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6777 && !flag_syntax_only) ? 0 : 1;
6778
6779 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6780 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6781 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6782 inner = fold_convert_loc (loc, intermediate_type, inner);
6783
6784 if (bitnum != 0)
6785 inner = build2 (RSHIFT_EXPR, intermediate_type,
6786 inner, size_int (bitnum));
6787
6788 one = build_int_cst (intermediate_type, 1);
6789
6790 if (code == EQ_EXPR)
6791 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6792
6793 /* Put the AND last so it can combine with more things. */
6794 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6795
6796 /* Make sure to return the proper type. */
6797 inner = fold_convert_loc (loc, result_type, inner);
6798
6799 return inner;
6800 }
6801 return NULL_TREE;
6802 }
6803
6804 /* Check whether we are allowed to reorder operands arg0 and arg1,
6805 such that the evaluation of arg1 occurs before arg0. */
6806
6807 static bool
6808 reorder_operands_p (const_tree arg0, const_tree arg1)
6809 {
6810 if (! flag_evaluation_order)
6811 return true;
6812 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6813 return true;
6814 return ! TREE_SIDE_EFFECTS (arg0)
6815 && ! TREE_SIDE_EFFECTS (arg1);
6816 }
6817
6818 /* Test whether it is preferable two swap two operands, ARG0 and
6819 ARG1, for example because ARG0 is an integer constant and ARG1
6820 isn't. If REORDER is true, only recommend swapping if we can
6821 evaluate the operands in reverse order. */
6822
6823 bool
6824 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6825 {
6826 if (CONSTANT_CLASS_P (arg1))
6827 return 0;
6828 if (CONSTANT_CLASS_P (arg0))
6829 return 1;
6830
6831 STRIP_NOPS (arg0);
6832 STRIP_NOPS (arg1);
6833
6834 if (TREE_CONSTANT (arg1))
6835 return 0;
6836 if (TREE_CONSTANT (arg0))
6837 return 1;
6838
6839 if (reorder && flag_evaluation_order
6840 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6841 return 0;
6842
6843 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6844 for commutative and comparison operators. Ensuring a canonical
6845 form allows the optimizers to find additional redundancies without
6846 having to explicitly check for both orderings. */
6847 if (TREE_CODE (arg0) == SSA_NAME
6848 && TREE_CODE (arg1) == SSA_NAME
6849 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6850 return 1;
6851
6852 /* Put SSA_NAMEs last. */
6853 if (TREE_CODE (arg1) == SSA_NAME)
6854 return 0;
6855 if (TREE_CODE (arg0) == SSA_NAME)
6856 return 1;
6857
6858 /* Put variables last. */
6859 if (DECL_P (arg1))
6860 return 0;
6861 if (DECL_P (arg0))
6862 return 1;
6863
6864 return 0;
6865 }
6866
6867
6868 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6869 means A >= Y && A != MAX, but in this case we know that
6870 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6871
6872 static tree
6873 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6874 {
6875 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6876
6877 if (TREE_CODE (bound) == LT_EXPR)
6878 a = TREE_OPERAND (bound, 0);
6879 else if (TREE_CODE (bound) == GT_EXPR)
6880 a = TREE_OPERAND (bound, 1);
6881 else
6882 return NULL_TREE;
6883
6884 typea = TREE_TYPE (a);
6885 if (!INTEGRAL_TYPE_P (typea)
6886 && !POINTER_TYPE_P (typea))
6887 return NULL_TREE;
6888
6889 if (TREE_CODE (ineq) == LT_EXPR)
6890 {
6891 a1 = TREE_OPERAND (ineq, 1);
6892 y = TREE_OPERAND (ineq, 0);
6893 }
6894 else if (TREE_CODE (ineq) == GT_EXPR)
6895 {
6896 a1 = TREE_OPERAND (ineq, 0);
6897 y = TREE_OPERAND (ineq, 1);
6898 }
6899 else
6900 return NULL_TREE;
6901
6902 if (TREE_TYPE (a1) != typea)
6903 return NULL_TREE;
6904
6905 if (POINTER_TYPE_P (typea))
6906 {
6907 /* Convert the pointer types into integer before taking the difference. */
6908 tree ta = fold_convert_loc (loc, ssizetype, a);
6909 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6910 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6911 }
6912 else
6913 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6914
6915 if (!diff || !integer_onep (diff))
6916 return NULL_TREE;
6917
6918 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6919 }
6920
6921 /* Fold a sum or difference of at least one multiplication.
6922 Returns the folded tree or NULL if no simplification could be made. */
6923
6924 static tree
6925 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6926 tree arg0, tree arg1)
6927 {
6928 tree arg00, arg01, arg10, arg11;
6929 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6930
6931 /* (A * C) +- (B * C) -> (A+-B) * C.
6932 (A * C) +- A -> A * (C+-1).
6933 We are most concerned about the case where C is a constant,
6934 but other combinations show up during loop reduction. Since
6935 it is not difficult, try all four possibilities. */
6936
6937 if (TREE_CODE (arg0) == MULT_EXPR)
6938 {
6939 arg00 = TREE_OPERAND (arg0, 0);
6940 arg01 = TREE_OPERAND (arg0, 1);
6941 }
6942 else if (TREE_CODE (arg0) == INTEGER_CST)
6943 {
6944 arg00 = build_one_cst (type);
6945 arg01 = arg0;
6946 }
6947 else
6948 {
6949 /* We cannot generate constant 1 for fract. */
6950 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6951 return NULL_TREE;
6952 arg00 = arg0;
6953 arg01 = build_one_cst (type);
6954 }
6955 if (TREE_CODE (arg1) == MULT_EXPR)
6956 {
6957 arg10 = TREE_OPERAND (arg1, 0);
6958 arg11 = TREE_OPERAND (arg1, 1);
6959 }
6960 else if (TREE_CODE (arg1) == INTEGER_CST)
6961 {
6962 arg10 = build_one_cst (type);
6963 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6964 the purpose of this canonicalization. */
6965 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6966 && negate_expr_p (arg1)
6967 && code == PLUS_EXPR)
6968 {
6969 arg11 = negate_expr (arg1);
6970 code = MINUS_EXPR;
6971 }
6972 else
6973 arg11 = arg1;
6974 }
6975 else
6976 {
6977 /* We cannot generate constant 1 for fract. */
6978 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6979 return NULL_TREE;
6980 arg10 = arg1;
6981 arg11 = build_one_cst (type);
6982 }
6983 same = NULL_TREE;
6984
6985 if (operand_equal_p (arg01, arg11, 0))
6986 same = arg01, alt0 = arg00, alt1 = arg10;
6987 else if (operand_equal_p (arg00, arg10, 0))
6988 same = arg00, alt0 = arg01, alt1 = arg11;
6989 else if (operand_equal_p (arg00, arg11, 0))
6990 same = arg00, alt0 = arg01, alt1 = arg10;
6991 else if (operand_equal_p (arg01, arg10, 0))
6992 same = arg01, alt0 = arg00, alt1 = arg11;
6993
6994 /* No identical multiplicands; see if we can find a common
6995 power-of-two factor in non-power-of-two multiplies. This
6996 can help in multi-dimensional array access. */
6997 else if (tree_fits_shwi_p (arg01)
6998 && tree_fits_shwi_p (arg11))
6999 {
7000 HOST_WIDE_INT int01, int11, tmp;
7001 bool swap = false;
7002 tree maybe_same;
7003 int01 = tree_to_shwi (arg01);
7004 int11 = tree_to_shwi (arg11);
7005
7006 /* Move min of absolute values to int11. */
7007 if (absu_hwi (int01) < absu_hwi (int11))
7008 {
7009 tmp = int01, int01 = int11, int11 = tmp;
7010 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7011 maybe_same = arg01;
7012 swap = true;
7013 }
7014 else
7015 maybe_same = arg11;
7016
7017 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7018 /* The remainder should not be a constant, otherwise we
7019 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7020 increased the number of multiplications necessary. */
7021 && TREE_CODE (arg10) != INTEGER_CST)
7022 {
7023 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7024 build_int_cst (TREE_TYPE (arg00),
7025 int01 / int11));
7026 alt1 = arg10;
7027 same = maybe_same;
7028 if (swap)
7029 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7030 }
7031 }
7032
7033 if (same)
7034 return fold_build2_loc (loc, MULT_EXPR, type,
7035 fold_build2_loc (loc, code, type,
7036 fold_convert_loc (loc, type, alt0),
7037 fold_convert_loc (loc, type, alt1)),
7038 fold_convert_loc (loc, type, same));
7039
7040 return NULL_TREE;
7041 }
7042
7043 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7044 specified by EXPR into the buffer PTR of length LEN bytes.
7045 Return the number of bytes placed in the buffer, or zero
7046 upon failure. */
7047
7048 static int
7049 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7050 {
7051 tree type = TREE_TYPE (expr);
7052 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7053 int byte, offset, word, words;
7054 unsigned char value;
7055
7056 if ((off == -1 && total_bytes > len)
7057 || off >= total_bytes)
7058 return 0;
7059 if (off == -1)
7060 off = 0;
7061 words = total_bytes / UNITS_PER_WORD;
7062
7063 for (byte = 0; byte < total_bytes; byte++)
7064 {
7065 int bitpos = byte * BITS_PER_UNIT;
7066 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7067 number of bytes. */
7068 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7069
7070 if (total_bytes > UNITS_PER_WORD)
7071 {
7072 word = byte / UNITS_PER_WORD;
7073 if (WORDS_BIG_ENDIAN)
7074 word = (words - 1) - word;
7075 offset = word * UNITS_PER_WORD;
7076 if (BYTES_BIG_ENDIAN)
7077 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7078 else
7079 offset += byte % UNITS_PER_WORD;
7080 }
7081 else
7082 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7083 if (offset >= off
7084 && offset - off < len)
7085 ptr[offset - off] = value;
7086 }
7087 return MIN (len, total_bytes - off);
7088 }
7089
7090
7091 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7092 specified by EXPR into the buffer PTR of length LEN bytes.
7093 Return the number of bytes placed in the buffer, or zero
7094 upon failure. */
7095
7096 static int
7097 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7098 {
7099 tree type = TREE_TYPE (expr);
7100 machine_mode mode = TYPE_MODE (type);
7101 int total_bytes = GET_MODE_SIZE (mode);
7102 FIXED_VALUE_TYPE value;
7103 tree i_value, i_type;
7104
7105 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7106 return 0;
7107
7108 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7109
7110 if (NULL_TREE == i_type
7111 || TYPE_PRECISION (i_type) != total_bytes)
7112 return 0;
7113
7114 value = TREE_FIXED_CST (expr);
7115 i_value = double_int_to_tree (i_type, value.data);
7116
7117 return native_encode_int (i_value, ptr, len, off);
7118 }
7119
7120
7121 /* Subroutine of native_encode_expr. Encode the REAL_CST
7122 specified by EXPR into the buffer PTR of length LEN bytes.
7123 Return the number of bytes placed in the buffer, or zero
7124 upon failure. */
7125
7126 static int
7127 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7128 {
7129 tree type = TREE_TYPE (expr);
7130 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7131 int byte, offset, word, words, bitpos;
7132 unsigned char value;
7133
7134 /* There are always 32 bits in each long, no matter the size of
7135 the hosts long. We handle floating point representations with
7136 up to 192 bits. */
7137 long tmp[6];
7138
7139 if ((off == -1 && total_bytes > len)
7140 || off >= total_bytes)
7141 return 0;
7142 if (off == -1)
7143 off = 0;
7144 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7145
7146 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7147
7148 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7149 bitpos += BITS_PER_UNIT)
7150 {
7151 byte = (bitpos / BITS_PER_UNIT) & 3;
7152 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7153
7154 if (UNITS_PER_WORD < 4)
7155 {
7156 word = byte / UNITS_PER_WORD;
7157 if (WORDS_BIG_ENDIAN)
7158 word = (words - 1) - word;
7159 offset = word * UNITS_PER_WORD;
7160 if (BYTES_BIG_ENDIAN)
7161 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7162 else
7163 offset += byte % UNITS_PER_WORD;
7164 }
7165 else
7166 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7167 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7168 if (offset >= off
7169 && offset - off < len)
7170 ptr[offset - off] = value;
7171 }
7172 return MIN (len, total_bytes - off);
7173 }
7174
7175 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7176 specified by EXPR into the buffer PTR of length LEN bytes.
7177 Return the number of bytes placed in the buffer, or zero
7178 upon failure. */
7179
7180 static int
7181 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7182 {
7183 int rsize, isize;
7184 tree part;
7185
7186 part = TREE_REALPART (expr);
7187 rsize = native_encode_expr (part, ptr, len, off);
7188 if (off == -1
7189 && rsize == 0)
7190 return 0;
7191 part = TREE_IMAGPART (expr);
7192 if (off != -1)
7193 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7194 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7195 if (off == -1
7196 && isize != rsize)
7197 return 0;
7198 return rsize + isize;
7199 }
7200
7201
7202 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7203 specified by EXPR into the buffer PTR of length LEN bytes.
7204 Return the number of bytes placed in the buffer, or zero
7205 upon failure. */
7206
7207 static int
7208 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7209 {
7210 unsigned i, count;
7211 int size, offset;
7212 tree itype, elem;
7213
7214 offset = 0;
7215 count = VECTOR_CST_NELTS (expr);
7216 itype = TREE_TYPE (TREE_TYPE (expr));
7217 size = GET_MODE_SIZE (TYPE_MODE (itype));
7218 for (i = 0; i < count; i++)
7219 {
7220 if (off >= size)
7221 {
7222 off -= size;
7223 continue;
7224 }
7225 elem = VECTOR_CST_ELT (expr, i);
7226 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7227 if ((off == -1 && res != size)
7228 || res == 0)
7229 return 0;
7230 offset += res;
7231 if (offset >= len)
7232 return offset;
7233 if (off != -1)
7234 off = 0;
7235 }
7236 return offset;
7237 }
7238
7239
7240 /* Subroutine of native_encode_expr. Encode the STRING_CST
7241 specified by EXPR into the buffer PTR of length LEN bytes.
7242 Return the number of bytes placed in the buffer, or zero
7243 upon failure. */
7244
7245 static int
7246 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7247 {
7248 tree type = TREE_TYPE (expr);
7249 HOST_WIDE_INT total_bytes;
7250
7251 if (TREE_CODE (type) != ARRAY_TYPE
7252 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7253 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7254 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7255 return 0;
7256 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7257 if ((off == -1 && total_bytes > len)
7258 || off >= total_bytes)
7259 return 0;
7260 if (off == -1)
7261 off = 0;
7262 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7263 {
7264 int written = 0;
7265 if (off < TREE_STRING_LENGTH (expr))
7266 {
7267 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7268 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7269 }
7270 memset (ptr + written, 0,
7271 MIN (total_bytes - written, len - written));
7272 }
7273 else
7274 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7275 return MIN (total_bytes - off, len);
7276 }
7277
7278
7279 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7280 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7281 buffer PTR of length LEN bytes. If OFF is not -1 then start
7282 the encoding at byte offset OFF and encode at most LEN bytes.
7283 Return the number of bytes placed in the buffer, or zero upon failure. */
7284
7285 int
7286 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7287 {
7288 /* We don't support starting at negative offset and -1 is special. */
7289 if (off < -1)
7290 return 0;
7291
7292 switch (TREE_CODE (expr))
7293 {
7294 case INTEGER_CST:
7295 return native_encode_int (expr, ptr, len, off);
7296
7297 case REAL_CST:
7298 return native_encode_real (expr, ptr, len, off);
7299
7300 case FIXED_CST:
7301 return native_encode_fixed (expr, ptr, len, off);
7302
7303 case COMPLEX_CST:
7304 return native_encode_complex (expr, ptr, len, off);
7305
7306 case VECTOR_CST:
7307 return native_encode_vector (expr, ptr, len, off);
7308
7309 case STRING_CST:
7310 return native_encode_string (expr, ptr, len, off);
7311
7312 default:
7313 return 0;
7314 }
7315 }
7316
7317
7318 /* Subroutine of native_interpret_expr. Interpret the contents of
7319 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7320 If the buffer cannot be interpreted, return NULL_TREE. */
7321
7322 static tree
7323 native_interpret_int (tree type, const unsigned char *ptr, int len)
7324 {
7325 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7326
7327 if (total_bytes > len
7328 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7329 return NULL_TREE;
7330
7331 wide_int result = wi::from_buffer (ptr, total_bytes);
7332
7333 return wide_int_to_tree (type, result);
7334 }
7335
7336
7337 /* Subroutine of native_interpret_expr. Interpret the contents of
7338 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7339 If the buffer cannot be interpreted, return NULL_TREE. */
7340
7341 static tree
7342 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7343 {
7344 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7345 double_int result;
7346 FIXED_VALUE_TYPE fixed_value;
7347
7348 if (total_bytes > len
7349 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7350 return NULL_TREE;
7351
7352 result = double_int::from_buffer (ptr, total_bytes);
7353 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7354
7355 return build_fixed (type, fixed_value);
7356 }
7357
7358
7359 /* Subroutine of native_interpret_expr. Interpret the contents of
7360 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7361 If the buffer cannot be interpreted, return NULL_TREE. */
7362
7363 static tree
7364 native_interpret_real (tree type, const unsigned char *ptr, int len)
7365 {
7366 machine_mode mode = TYPE_MODE (type);
7367 int total_bytes = GET_MODE_SIZE (mode);
7368 unsigned char value;
7369 /* There are always 32 bits in each long, no matter the size of
7370 the hosts long. We handle floating point representations with
7371 up to 192 bits. */
7372 REAL_VALUE_TYPE r;
7373 long tmp[6];
7374
7375 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7376 if (total_bytes > len || total_bytes > 24)
7377 return NULL_TREE;
7378 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7379
7380 memset (tmp, 0, sizeof (tmp));
7381 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7382 bitpos += BITS_PER_UNIT)
7383 {
7384 /* Both OFFSET and BYTE index within a long;
7385 bitpos indexes the whole float. */
7386 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7387 if (UNITS_PER_WORD < 4)
7388 {
7389 int word = byte / UNITS_PER_WORD;
7390 if (WORDS_BIG_ENDIAN)
7391 word = (words - 1) - word;
7392 offset = word * UNITS_PER_WORD;
7393 if (BYTES_BIG_ENDIAN)
7394 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7395 else
7396 offset += byte % UNITS_PER_WORD;
7397 }
7398 else
7399 {
7400 offset = byte;
7401 if (BYTES_BIG_ENDIAN)
7402 {
7403 /* Reverse bytes within each long, or within the entire float
7404 if it's smaller than a long (for HFmode). */
7405 offset = MIN (3, total_bytes - 1) - offset;
7406 gcc_assert (offset >= 0);
7407 }
7408 }
7409 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7410
7411 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7412 }
7413
7414 real_from_target (&r, tmp, mode);
7415 return build_real (type, r);
7416 }
7417
7418
7419 /* Subroutine of native_interpret_expr. Interpret the contents of
7420 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7421 If the buffer cannot be interpreted, return NULL_TREE. */
7422
7423 static tree
7424 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7425 {
7426 tree etype, rpart, ipart;
7427 int size;
7428
7429 etype = TREE_TYPE (type);
7430 size = GET_MODE_SIZE (TYPE_MODE (etype));
7431 if (size * 2 > len)
7432 return NULL_TREE;
7433 rpart = native_interpret_expr (etype, ptr, size);
7434 if (!rpart)
7435 return NULL_TREE;
7436 ipart = native_interpret_expr (etype, ptr+size, size);
7437 if (!ipart)
7438 return NULL_TREE;
7439 return build_complex (type, rpart, ipart);
7440 }
7441
7442
7443 /* Subroutine of native_interpret_expr. Interpret the contents of
7444 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7445 If the buffer cannot be interpreted, return NULL_TREE. */
7446
7447 static tree
7448 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7449 {
7450 tree etype, elem;
7451 int i, size, count;
7452 tree *elements;
7453
7454 etype = TREE_TYPE (type);
7455 size = GET_MODE_SIZE (TYPE_MODE (etype));
7456 count = TYPE_VECTOR_SUBPARTS (type);
7457 if (size * count > len)
7458 return NULL_TREE;
7459
7460 elements = XALLOCAVEC (tree, count);
7461 for (i = count - 1; i >= 0; i--)
7462 {
7463 elem = native_interpret_expr (etype, ptr+(i*size), size);
7464 if (!elem)
7465 return NULL_TREE;
7466 elements[i] = elem;
7467 }
7468 return build_vector (type, elements);
7469 }
7470
7471
7472 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7473 the buffer PTR of length LEN as a constant of type TYPE. For
7474 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7475 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7476 return NULL_TREE. */
7477
7478 tree
7479 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7480 {
7481 switch (TREE_CODE (type))
7482 {
7483 case INTEGER_TYPE:
7484 case ENUMERAL_TYPE:
7485 case BOOLEAN_TYPE:
7486 case POINTER_TYPE:
7487 case REFERENCE_TYPE:
7488 return native_interpret_int (type, ptr, len);
7489
7490 case REAL_TYPE:
7491 return native_interpret_real (type, ptr, len);
7492
7493 case FIXED_POINT_TYPE:
7494 return native_interpret_fixed (type, ptr, len);
7495
7496 case COMPLEX_TYPE:
7497 return native_interpret_complex (type, ptr, len);
7498
7499 case VECTOR_TYPE:
7500 return native_interpret_vector (type, ptr, len);
7501
7502 default:
7503 return NULL_TREE;
7504 }
7505 }
7506
7507 /* Returns true if we can interpret the contents of a native encoding
7508 as TYPE. */
7509
7510 static bool
7511 can_native_interpret_type_p (tree type)
7512 {
7513 switch (TREE_CODE (type))
7514 {
7515 case INTEGER_TYPE:
7516 case ENUMERAL_TYPE:
7517 case BOOLEAN_TYPE:
7518 case POINTER_TYPE:
7519 case REFERENCE_TYPE:
7520 case FIXED_POINT_TYPE:
7521 case REAL_TYPE:
7522 case COMPLEX_TYPE:
7523 case VECTOR_TYPE:
7524 return true;
7525 default:
7526 return false;
7527 }
7528 }
7529
7530 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7531 TYPE at compile-time. If we're unable to perform the conversion
7532 return NULL_TREE. */
7533
7534 static tree
7535 fold_view_convert_expr (tree type, tree expr)
7536 {
7537 /* We support up to 512-bit values (for V8DFmode). */
7538 unsigned char buffer[64];
7539 int len;
7540
7541 /* Check that the host and target are sane. */
7542 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7543 return NULL_TREE;
7544
7545 len = native_encode_expr (expr, buffer, sizeof (buffer));
7546 if (len == 0)
7547 return NULL_TREE;
7548
7549 return native_interpret_expr (type, buffer, len);
7550 }
7551
7552 /* Build an expression for the address of T. Folds away INDIRECT_REF
7553 to avoid confusing the gimplify process. */
7554
7555 tree
7556 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7557 {
7558 /* The size of the object is not relevant when talking about its address. */
7559 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7560 t = TREE_OPERAND (t, 0);
7561
7562 if (TREE_CODE (t) == INDIRECT_REF)
7563 {
7564 t = TREE_OPERAND (t, 0);
7565
7566 if (TREE_TYPE (t) != ptrtype)
7567 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7568 }
7569 else if (TREE_CODE (t) == MEM_REF
7570 && integer_zerop (TREE_OPERAND (t, 1)))
7571 return TREE_OPERAND (t, 0);
7572 else if (TREE_CODE (t) == MEM_REF
7573 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7574 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7575 TREE_OPERAND (t, 0),
7576 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7577 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7578 {
7579 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7580
7581 if (TREE_TYPE (t) != ptrtype)
7582 t = fold_convert_loc (loc, ptrtype, t);
7583 }
7584 else
7585 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7586
7587 return t;
7588 }
7589
7590 /* Build an expression for the address of T. */
7591
7592 tree
7593 build_fold_addr_expr_loc (location_t loc, tree t)
7594 {
7595 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7596
7597 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7598 }
7599
7600 /* Fold a unary expression of code CODE and type TYPE with operand
7601 OP0. Return the folded expression if folding is successful.
7602 Otherwise, return NULL_TREE. */
7603
7604 tree
7605 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7606 {
7607 tree tem;
7608 tree arg0;
7609 enum tree_code_class kind = TREE_CODE_CLASS (code);
7610
7611 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7612 && TREE_CODE_LENGTH (code) == 1);
7613
7614 arg0 = op0;
7615 if (arg0)
7616 {
7617 if (CONVERT_EXPR_CODE_P (code)
7618 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7619 {
7620 /* Don't use STRIP_NOPS, because signedness of argument type
7621 matters. */
7622 STRIP_SIGN_NOPS (arg0);
7623 }
7624 else
7625 {
7626 /* Strip any conversions that don't change the mode. This
7627 is safe for every expression, except for a comparison
7628 expression because its signedness is derived from its
7629 operands.
7630
7631 Note that this is done as an internal manipulation within
7632 the constant folder, in order to find the simplest
7633 representation of the arguments so that their form can be
7634 studied. In any cases, the appropriate type conversions
7635 should be put back in the tree that will get out of the
7636 constant folder. */
7637 STRIP_NOPS (arg0);
7638 }
7639
7640 if (CONSTANT_CLASS_P (arg0))
7641 {
7642 tree tem = const_unop (code, type, arg0);
7643 if (tem)
7644 {
7645 if (TREE_TYPE (tem) != type)
7646 tem = fold_convert_loc (loc, type, tem);
7647 return tem;
7648 }
7649 }
7650 }
7651
7652 tem = generic_simplify (loc, code, type, op0);
7653 if (tem)
7654 return tem;
7655
7656 if (TREE_CODE_CLASS (code) == tcc_unary)
7657 {
7658 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7659 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7660 fold_build1_loc (loc, code, type,
7661 fold_convert_loc (loc, TREE_TYPE (op0),
7662 TREE_OPERAND (arg0, 1))));
7663 else if (TREE_CODE (arg0) == COND_EXPR)
7664 {
7665 tree arg01 = TREE_OPERAND (arg0, 1);
7666 tree arg02 = TREE_OPERAND (arg0, 2);
7667 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7668 arg01 = fold_build1_loc (loc, code, type,
7669 fold_convert_loc (loc,
7670 TREE_TYPE (op0), arg01));
7671 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7672 arg02 = fold_build1_loc (loc, code, type,
7673 fold_convert_loc (loc,
7674 TREE_TYPE (op0), arg02));
7675 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7676 arg01, arg02);
7677
7678 /* If this was a conversion, and all we did was to move into
7679 inside the COND_EXPR, bring it back out. But leave it if
7680 it is a conversion from integer to integer and the
7681 result precision is no wider than a word since such a
7682 conversion is cheap and may be optimized away by combine,
7683 while it couldn't if it were outside the COND_EXPR. Then return
7684 so we don't get into an infinite recursion loop taking the
7685 conversion out and then back in. */
7686
7687 if ((CONVERT_EXPR_CODE_P (code)
7688 || code == NON_LVALUE_EXPR)
7689 && TREE_CODE (tem) == COND_EXPR
7690 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7691 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7692 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7693 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7694 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7695 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7696 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7697 && (INTEGRAL_TYPE_P
7698 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7699 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7700 || flag_syntax_only))
7701 tem = build1_loc (loc, code, type,
7702 build3 (COND_EXPR,
7703 TREE_TYPE (TREE_OPERAND
7704 (TREE_OPERAND (tem, 1), 0)),
7705 TREE_OPERAND (tem, 0),
7706 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7707 TREE_OPERAND (TREE_OPERAND (tem, 2),
7708 0)));
7709 return tem;
7710 }
7711 }
7712
7713 switch (code)
7714 {
7715 case NON_LVALUE_EXPR:
7716 if (!maybe_lvalue_p (op0))
7717 return fold_convert_loc (loc, type, op0);
7718 return NULL_TREE;
7719
7720 CASE_CONVERT:
7721 case FLOAT_EXPR:
7722 case FIX_TRUNC_EXPR:
7723 if (COMPARISON_CLASS_P (op0))
7724 {
7725 /* If we have (type) (a CMP b) and type is an integral type, return
7726 new expression involving the new type. Canonicalize
7727 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7728 non-integral type.
7729 Do not fold the result as that would not simplify further, also
7730 folding again results in recursions. */
7731 if (TREE_CODE (type) == BOOLEAN_TYPE)
7732 return build2_loc (loc, TREE_CODE (op0), type,
7733 TREE_OPERAND (op0, 0),
7734 TREE_OPERAND (op0, 1));
7735 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7736 && TREE_CODE (type) != VECTOR_TYPE)
7737 return build3_loc (loc, COND_EXPR, type, op0,
7738 constant_boolean_node (true, type),
7739 constant_boolean_node (false, type));
7740 }
7741
7742 /* Handle (T *)&A.B.C for A being of type T and B and C
7743 living at offset zero. This occurs frequently in
7744 C++ upcasting and then accessing the base. */
7745 if (TREE_CODE (op0) == ADDR_EXPR
7746 && POINTER_TYPE_P (type)
7747 && handled_component_p (TREE_OPERAND (op0, 0)))
7748 {
7749 HOST_WIDE_INT bitsize, bitpos;
7750 tree offset;
7751 machine_mode mode;
7752 int unsignedp, reversep, volatilep;
7753 tree base
7754 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7755 &offset, &mode, &unsignedp, &reversep,
7756 &volatilep, false);
7757 /* If the reference was to a (constant) zero offset, we can use
7758 the address of the base if it has the same base type
7759 as the result type and the pointer type is unqualified. */
7760 if (! offset && bitpos == 0
7761 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7762 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7763 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7764 return fold_convert_loc (loc, type,
7765 build_fold_addr_expr_loc (loc, base));
7766 }
7767
7768 if (TREE_CODE (op0) == MODIFY_EXPR
7769 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7770 /* Detect assigning a bitfield. */
7771 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7772 && DECL_BIT_FIELD
7773 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7774 {
7775 /* Don't leave an assignment inside a conversion
7776 unless assigning a bitfield. */
7777 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7778 /* First do the assignment, then return converted constant. */
7779 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7780 TREE_NO_WARNING (tem) = 1;
7781 TREE_USED (tem) = 1;
7782 return tem;
7783 }
7784
7785 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7786 constants (if x has signed type, the sign bit cannot be set
7787 in c). This folds extension into the BIT_AND_EXPR.
7788 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7789 very likely don't have maximal range for their precision and this
7790 transformation effectively doesn't preserve non-maximal ranges. */
7791 if (TREE_CODE (type) == INTEGER_TYPE
7792 && TREE_CODE (op0) == BIT_AND_EXPR
7793 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7794 {
7795 tree and_expr = op0;
7796 tree and0 = TREE_OPERAND (and_expr, 0);
7797 tree and1 = TREE_OPERAND (and_expr, 1);
7798 int change = 0;
7799
7800 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7801 || (TYPE_PRECISION (type)
7802 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7803 change = 1;
7804 else if (TYPE_PRECISION (TREE_TYPE (and1))
7805 <= HOST_BITS_PER_WIDE_INT
7806 && tree_fits_uhwi_p (and1))
7807 {
7808 unsigned HOST_WIDE_INT cst;
7809
7810 cst = tree_to_uhwi (and1);
7811 cst &= HOST_WIDE_INT_M1U
7812 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7813 change = (cst == 0);
7814 if (change
7815 && !flag_syntax_only
7816 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7817 == ZERO_EXTEND))
7818 {
7819 tree uns = unsigned_type_for (TREE_TYPE (and0));
7820 and0 = fold_convert_loc (loc, uns, and0);
7821 and1 = fold_convert_loc (loc, uns, and1);
7822 }
7823 }
7824 if (change)
7825 {
7826 tem = force_fit_type (type, wi::to_widest (and1), 0,
7827 TREE_OVERFLOW (and1));
7828 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7829 fold_convert_loc (loc, type, and0), tem);
7830 }
7831 }
7832
7833 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7834 cast (T1)X will fold away. We assume that this happens when X itself
7835 is a cast. */
7836 if (POINTER_TYPE_P (type)
7837 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7838 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7839 {
7840 tree arg00 = TREE_OPERAND (arg0, 0);
7841 tree arg01 = TREE_OPERAND (arg0, 1);
7842
7843 return fold_build_pointer_plus_loc
7844 (loc, fold_convert_loc (loc, type, arg00), arg01);
7845 }
7846
7847 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7848 of the same precision, and X is an integer type not narrower than
7849 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7850 if (INTEGRAL_TYPE_P (type)
7851 && TREE_CODE (op0) == BIT_NOT_EXPR
7852 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7853 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7854 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7855 {
7856 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7857 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7858 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7859 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7860 fold_convert_loc (loc, type, tem));
7861 }
7862
7863 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7864 type of X and Y (integer types only). */
7865 if (INTEGRAL_TYPE_P (type)
7866 && TREE_CODE (op0) == MULT_EXPR
7867 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7868 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7869 {
7870 /* Be careful not to introduce new overflows. */
7871 tree mult_type;
7872 if (TYPE_OVERFLOW_WRAPS (type))
7873 mult_type = type;
7874 else
7875 mult_type = unsigned_type_for (type);
7876
7877 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7878 {
7879 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7880 fold_convert_loc (loc, mult_type,
7881 TREE_OPERAND (op0, 0)),
7882 fold_convert_loc (loc, mult_type,
7883 TREE_OPERAND (op0, 1)));
7884 return fold_convert_loc (loc, type, tem);
7885 }
7886 }
7887
7888 return NULL_TREE;
7889
7890 case VIEW_CONVERT_EXPR:
7891 if (TREE_CODE (op0) == MEM_REF)
7892 {
7893 tem = fold_build2_loc (loc, MEM_REF, type,
7894 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7895 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7896 return tem;
7897 }
7898
7899 return NULL_TREE;
7900
7901 case NEGATE_EXPR:
7902 tem = fold_negate_expr (loc, arg0);
7903 if (tem)
7904 return fold_convert_loc (loc, type, tem);
7905 return NULL_TREE;
7906
7907 case ABS_EXPR:
7908 /* Convert fabs((double)float) into (double)fabsf(float). */
7909 if (TREE_CODE (arg0) == NOP_EXPR
7910 && TREE_CODE (type) == REAL_TYPE)
7911 {
7912 tree targ0 = strip_float_extensions (arg0);
7913 if (targ0 != arg0)
7914 return fold_convert_loc (loc, type,
7915 fold_build1_loc (loc, ABS_EXPR,
7916 TREE_TYPE (targ0),
7917 targ0));
7918 }
7919 return NULL_TREE;
7920
7921 case BIT_NOT_EXPR:
7922 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7923 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7924 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7925 fold_convert_loc (loc, type,
7926 TREE_OPERAND (arg0, 0)))))
7927 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7928 fold_convert_loc (loc, type,
7929 TREE_OPERAND (arg0, 1)));
7930 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7931 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7932 fold_convert_loc (loc, type,
7933 TREE_OPERAND (arg0, 1)))))
7934 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7935 fold_convert_loc (loc, type,
7936 TREE_OPERAND (arg0, 0)), tem);
7937
7938 return NULL_TREE;
7939
7940 case TRUTH_NOT_EXPR:
7941 /* Note that the operand of this must be an int
7942 and its values must be 0 or 1.
7943 ("true" is a fixed value perhaps depending on the language,
7944 but we don't handle values other than 1 correctly yet.) */
7945 tem = fold_truth_not_expr (loc, arg0);
7946 if (!tem)
7947 return NULL_TREE;
7948 return fold_convert_loc (loc, type, tem);
7949
7950 case INDIRECT_REF:
7951 /* Fold *&X to X if X is an lvalue. */
7952 if (TREE_CODE (op0) == ADDR_EXPR)
7953 {
7954 tree op00 = TREE_OPERAND (op0, 0);
7955 if ((TREE_CODE (op00) == VAR_DECL
7956 || TREE_CODE (op00) == PARM_DECL
7957 || TREE_CODE (op00) == RESULT_DECL)
7958 && !TREE_READONLY (op00))
7959 return op00;
7960 }
7961 return NULL_TREE;
7962
7963 default:
7964 return NULL_TREE;
7965 } /* switch (code) */
7966 }
7967
7968
7969 /* If the operation was a conversion do _not_ mark a resulting constant
7970 with TREE_OVERFLOW if the original constant was not. These conversions
7971 have implementation defined behavior and retaining the TREE_OVERFLOW
7972 flag here would confuse later passes such as VRP. */
7973 tree
7974 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7975 tree type, tree op0)
7976 {
7977 tree res = fold_unary_loc (loc, code, type, op0);
7978 if (res
7979 && TREE_CODE (res) == INTEGER_CST
7980 && TREE_CODE (op0) == INTEGER_CST
7981 && CONVERT_EXPR_CODE_P (code))
7982 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7983
7984 return res;
7985 }
7986
7987 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7988 operands OP0 and OP1. LOC is the location of the resulting expression.
7989 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7990 Return the folded expression if folding is successful. Otherwise,
7991 return NULL_TREE. */
7992 static tree
7993 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7994 tree arg0, tree arg1, tree op0, tree op1)
7995 {
7996 tree tem;
7997
7998 /* We only do these simplifications if we are optimizing. */
7999 if (!optimize)
8000 return NULL_TREE;
8001
8002 /* Check for things like (A || B) && (A || C). We can convert this
8003 to A || (B && C). Note that either operator can be any of the four
8004 truth and/or operations and the transformation will still be
8005 valid. Also note that we only care about order for the
8006 ANDIF and ORIF operators. If B contains side effects, this
8007 might change the truth-value of A. */
8008 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8009 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8010 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8011 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8012 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8013 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8014 {
8015 tree a00 = TREE_OPERAND (arg0, 0);
8016 tree a01 = TREE_OPERAND (arg0, 1);
8017 tree a10 = TREE_OPERAND (arg1, 0);
8018 tree a11 = TREE_OPERAND (arg1, 1);
8019 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8020 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8021 && (code == TRUTH_AND_EXPR
8022 || code == TRUTH_OR_EXPR));
8023
8024 if (operand_equal_p (a00, a10, 0))
8025 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8026 fold_build2_loc (loc, code, type, a01, a11));
8027 else if (commutative && operand_equal_p (a00, a11, 0))
8028 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8029 fold_build2_loc (loc, code, type, a01, a10));
8030 else if (commutative && operand_equal_p (a01, a10, 0))
8031 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8032 fold_build2_loc (loc, code, type, a00, a11));
8033
8034 /* This case if tricky because we must either have commutative
8035 operators or else A10 must not have side-effects. */
8036
8037 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8038 && operand_equal_p (a01, a11, 0))
8039 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8040 fold_build2_loc (loc, code, type, a00, a10),
8041 a01);
8042 }
8043
8044 /* See if we can build a range comparison. */
8045 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8046 return tem;
8047
8048 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8049 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8050 {
8051 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8052 if (tem)
8053 return fold_build2_loc (loc, code, type, tem, arg1);
8054 }
8055
8056 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8057 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8058 {
8059 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8060 if (tem)
8061 return fold_build2_loc (loc, code, type, arg0, tem);
8062 }
8063
8064 /* Check for the possibility of merging component references. If our
8065 lhs is another similar operation, try to merge its rhs with our
8066 rhs. Then try to merge our lhs and rhs. */
8067 if (TREE_CODE (arg0) == code
8068 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8069 TREE_OPERAND (arg0, 1), arg1)))
8070 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8071
8072 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8073 return tem;
8074
8075 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8076 && (code == TRUTH_AND_EXPR
8077 || code == TRUTH_ANDIF_EXPR
8078 || code == TRUTH_OR_EXPR
8079 || code == TRUTH_ORIF_EXPR))
8080 {
8081 enum tree_code ncode, icode;
8082
8083 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8084 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8085 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8086
8087 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8088 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8089 We don't want to pack more than two leafs to a non-IF AND/OR
8090 expression.
8091 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8092 equal to IF-CODE, then we don't want to add right-hand operand.
8093 If the inner right-hand side of left-hand operand has
8094 side-effects, or isn't simple, then we can't add to it,
8095 as otherwise we might destroy if-sequence. */
8096 if (TREE_CODE (arg0) == icode
8097 && simple_operand_p_2 (arg1)
8098 /* Needed for sequence points to handle trappings, and
8099 side-effects. */
8100 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8101 {
8102 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8103 arg1);
8104 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8105 tem);
8106 }
8107 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8108 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8109 else if (TREE_CODE (arg1) == icode
8110 && simple_operand_p_2 (arg0)
8111 /* Needed for sequence points to handle trappings, and
8112 side-effects. */
8113 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8114 {
8115 tem = fold_build2_loc (loc, ncode, type,
8116 arg0, TREE_OPERAND (arg1, 0));
8117 return fold_build2_loc (loc, icode, type, tem,
8118 TREE_OPERAND (arg1, 1));
8119 }
8120 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8121 into (A OR B).
8122 For sequence point consistancy, we need to check for trapping,
8123 and side-effects. */
8124 else if (code == icode && simple_operand_p_2 (arg0)
8125 && simple_operand_p_2 (arg1))
8126 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8127 }
8128
8129 return NULL_TREE;
8130 }
8131
8132 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8133 by changing CODE to reduce the magnitude of constants involved in
8134 ARG0 of the comparison.
8135 Returns a canonicalized comparison tree if a simplification was
8136 possible, otherwise returns NULL_TREE.
8137 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8138 valid if signed overflow is undefined. */
8139
8140 static tree
8141 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8142 tree arg0, tree arg1,
8143 bool *strict_overflow_p)
8144 {
8145 enum tree_code code0 = TREE_CODE (arg0);
8146 tree t, cst0 = NULL_TREE;
8147 int sgn0;
8148
8149 /* Match A +- CST code arg1. We can change this only if overflow
8150 is undefined. */
8151 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8152 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8153 /* In principle pointers also have undefined overflow behavior,
8154 but that causes problems elsewhere. */
8155 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8156 && (code0 == MINUS_EXPR
8157 || code0 == PLUS_EXPR)
8158 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8159 return NULL_TREE;
8160
8161 /* Identify the constant in arg0 and its sign. */
8162 cst0 = TREE_OPERAND (arg0, 1);
8163 sgn0 = tree_int_cst_sgn (cst0);
8164
8165 /* Overflowed constants and zero will cause problems. */
8166 if (integer_zerop (cst0)
8167 || TREE_OVERFLOW (cst0))
8168 return NULL_TREE;
8169
8170 /* See if we can reduce the magnitude of the constant in
8171 arg0 by changing the comparison code. */
8172 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8173 if (code == LT_EXPR
8174 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8175 code = LE_EXPR;
8176 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8177 else if (code == GT_EXPR
8178 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8179 code = GE_EXPR;
8180 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8181 else if (code == LE_EXPR
8182 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8183 code = LT_EXPR;
8184 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8185 else if (code == GE_EXPR
8186 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8187 code = GT_EXPR;
8188 else
8189 return NULL_TREE;
8190 *strict_overflow_p = true;
8191
8192 /* Now build the constant reduced in magnitude. But not if that
8193 would produce one outside of its types range. */
8194 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8195 && ((sgn0 == 1
8196 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8197 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8198 || (sgn0 == -1
8199 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8200 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8201 return NULL_TREE;
8202
8203 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8204 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8205 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8206 t = fold_convert (TREE_TYPE (arg1), t);
8207
8208 return fold_build2_loc (loc, code, type, t, arg1);
8209 }
8210
8211 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8212 overflow further. Try to decrease the magnitude of constants involved
8213 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8214 and put sole constants at the second argument position.
8215 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8216
8217 static tree
8218 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8219 tree arg0, tree arg1)
8220 {
8221 tree t;
8222 bool strict_overflow_p;
8223 const char * const warnmsg = G_("assuming signed overflow does not occur "
8224 "when reducing constant in comparison");
8225
8226 /* Try canonicalization by simplifying arg0. */
8227 strict_overflow_p = false;
8228 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8229 &strict_overflow_p);
8230 if (t)
8231 {
8232 if (strict_overflow_p)
8233 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8234 return t;
8235 }
8236
8237 /* Try canonicalization by simplifying arg1 using the swapped
8238 comparison. */
8239 code = swap_tree_comparison (code);
8240 strict_overflow_p = false;
8241 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8242 &strict_overflow_p);
8243 if (t && strict_overflow_p)
8244 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8245 return t;
8246 }
8247
8248 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8249 space. This is used to avoid issuing overflow warnings for
8250 expressions like &p->x which can not wrap. */
8251
8252 static bool
8253 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8254 {
8255 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8256 return true;
8257
8258 if (bitpos < 0)
8259 return true;
8260
8261 wide_int wi_offset;
8262 int precision = TYPE_PRECISION (TREE_TYPE (base));
8263 if (offset == NULL_TREE)
8264 wi_offset = wi::zero (precision);
8265 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8266 return true;
8267 else
8268 wi_offset = offset;
8269
8270 bool overflow;
8271 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8272 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8273 if (overflow)
8274 return true;
8275
8276 if (!wi::fits_uhwi_p (total))
8277 return true;
8278
8279 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8280 if (size <= 0)
8281 return true;
8282
8283 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8284 array. */
8285 if (TREE_CODE (base) == ADDR_EXPR)
8286 {
8287 HOST_WIDE_INT base_size;
8288
8289 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8290 if (base_size > 0 && size < base_size)
8291 size = base_size;
8292 }
8293
8294 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8295 }
8296
8297 /* Subroutine of fold_binary. This routine performs all of the
8298 transformations that are common to the equality/inequality
8299 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8300 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8301 fold_binary should call fold_binary. Fold a comparison with
8302 tree code CODE and type TYPE with operands OP0 and OP1. Return
8303 the folded comparison or NULL_TREE. */
8304
8305 static tree
8306 fold_comparison (location_t loc, enum tree_code code, tree type,
8307 tree op0, tree op1)
8308 {
8309 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8310 tree arg0, arg1, tem;
8311
8312 arg0 = op0;
8313 arg1 = op1;
8314
8315 STRIP_SIGN_NOPS (arg0);
8316 STRIP_SIGN_NOPS (arg1);
8317
8318 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8319 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8320 && (equality_code
8321 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8322 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8323 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8324 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8325 && TREE_CODE (arg1) == INTEGER_CST
8326 && !TREE_OVERFLOW (arg1))
8327 {
8328 const enum tree_code
8329 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8330 tree const1 = TREE_OPERAND (arg0, 1);
8331 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8332 tree variable = TREE_OPERAND (arg0, 0);
8333 tree new_const = int_const_binop (reverse_op, const2, const1);
8334
8335 /* If the constant operation overflowed this can be
8336 simplified as a comparison against INT_MAX/INT_MIN. */
8337 if (TREE_OVERFLOW (new_const)
8338 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8339 {
8340 int const1_sgn = tree_int_cst_sgn (const1);
8341 enum tree_code code2 = code;
8342
8343 /* Get the sign of the constant on the lhs if the
8344 operation were VARIABLE + CONST1. */
8345 if (TREE_CODE (arg0) == MINUS_EXPR)
8346 const1_sgn = -const1_sgn;
8347
8348 /* The sign of the constant determines if we overflowed
8349 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8350 Canonicalize to the INT_MIN overflow by swapping the comparison
8351 if necessary. */
8352 if (const1_sgn == -1)
8353 code2 = swap_tree_comparison (code);
8354
8355 /* We now can look at the canonicalized case
8356 VARIABLE + 1 CODE2 INT_MIN
8357 and decide on the result. */
8358 switch (code2)
8359 {
8360 case EQ_EXPR:
8361 case LT_EXPR:
8362 case LE_EXPR:
8363 return
8364 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8365
8366 case NE_EXPR:
8367 case GE_EXPR:
8368 case GT_EXPR:
8369 return
8370 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8371
8372 default:
8373 gcc_unreachable ();
8374 }
8375 }
8376 else
8377 {
8378 if (!equality_code)
8379 fold_overflow_warning ("assuming signed overflow does not occur "
8380 "when changing X +- C1 cmp C2 to "
8381 "X cmp C2 -+ C1",
8382 WARN_STRICT_OVERFLOW_COMPARISON);
8383 return fold_build2_loc (loc, code, type, variable, new_const);
8384 }
8385 }
8386
8387 /* For comparisons of pointers we can decompose it to a compile time
8388 comparison of the base objects and the offsets into the object.
8389 This requires at least one operand being an ADDR_EXPR or a
8390 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8391 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8392 && (TREE_CODE (arg0) == ADDR_EXPR
8393 || TREE_CODE (arg1) == ADDR_EXPR
8394 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8395 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8396 {
8397 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8398 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8399 machine_mode mode;
8400 int volatilep, reversep, unsignedp;
8401 bool indirect_base0 = false, indirect_base1 = false;
8402
8403 /* Get base and offset for the access. Strip ADDR_EXPR for
8404 get_inner_reference, but put it back by stripping INDIRECT_REF
8405 off the base object if possible. indirect_baseN will be true
8406 if baseN is not an address but refers to the object itself. */
8407 base0 = arg0;
8408 if (TREE_CODE (arg0) == ADDR_EXPR)
8409 {
8410 base0
8411 = get_inner_reference (TREE_OPERAND (arg0, 0),
8412 &bitsize, &bitpos0, &offset0, &mode,
8413 &unsignedp, &reversep, &volatilep, false);
8414 if (TREE_CODE (base0) == INDIRECT_REF)
8415 base0 = TREE_OPERAND (base0, 0);
8416 else
8417 indirect_base0 = true;
8418 }
8419 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8420 {
8421 base0 = TREE_OPERAND (arg0, 0);
8422 STRIP_SIGN_NOPS (base0);
8423 if (TREE_CODE (base0) == ADDR_EXPR)
8424 {
8425 base0
8426 = get_inner_reference (TREE_OPERAND (base0, 0),
8427 &bitsize, &bitpos0, &offset0, &mode,
8428 &unsignedp, &reversep, &volatilep,
8429 false);
8430 if (TREE_CODE (base0) == INDIRECT_REF)
8431 base0 = TREE_OPERAND (base0, 0);
8432 else
8433 indirect_base0 = true;
8434 }
8435 if (offset0 == NULL_TREE || integer_zerop (offset0))
8436 offset0 = TREE_OPERAND (arg0, 1);
8437 else
8438 offset0 = size_binop (PLUS_EXPR, offset0,
8439 TREE_OPERAND (arg0, 1));
8440 if (TREE_CODE (offset0) == INTEGER_CST)
8441 {
8442 offset_int tem = wi::sext (wi::to_offset (offset0),
8443 TYPE_PRECISION (sizetype));
8444 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8445 tem += bitpos0;
8446 if (wi::fits_shwi_p (tem))
8447 {
8448 bitpos0 = tem.to_shwi ();
8449 offset0 = NULL_TREE;
8450 }
8451 }
8452 }
8453
8454 base1 = arg1;
8455 if (TREE_CODE (arg1) == ADDR_EXPR)
8456 {
8457 base1
8458 = get_inner_reference (TREE_OPERAND (arg1, 0),
8459 &bitsize, &bitpos1, &offset1, &mode,
8460 &unsignedp, &reversep, &volatilep, false);
8461 if (TREE_CODE (base1) == INDIRECT_REF)
8462 base1 = TREE_OPERAND (base1, 0);
8463 else
8464 indirect_base1 = true;
8465 }
8466 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8467 {
8468 base1 = TREE_OPERAND (arg1, 0);
8469 STRIP_SIGN_NOPS (base1);
8470 if (TREE_CODE (base1) == ADDR_EXPR)
8471 {
8472 base1
8473 = get_inner_reference (TREE_OPERAND (base1, 0),
8474 &bitsize, &bitpos1, &offset1, &mode,
8475 &unsignedp, &reversep, &volatilep,
8476 false);
8477 if (TREE_CODE (base1) == INDIRECT_REF)
8478 base1 = TREE_OPERAND (base1, 0);
8479 else
8480 indirect_base1 = true;
8481 }
8482 if (offset1 == NULL_TREE || integer_zerop (offset1))
8483 offset1 = TREE_OPERAND (arg1, 1);
8484 else
8485 offset1 = size_binop (PLUS_EXPR, offset1,
8486 TREE_OPERAND (arg1, 1));
8487 if (TREE_CODE (offset1) == INTEGER_CST)
8488 {
8489 offset_int tem = wi::sext (wi::to_offset (offset1),
8490 TYPE_PRECISION (sizetype));
8491 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8492 tem += bitpos1;
8493 if (wi::fits_shwi_p (tem))
8494 {
8495 bitpos1 = tem.to_shwi ();
8496 offset1 = NULL_TREE;
8497 }
8498 }
8499 }
8500
8501 /* If we have equivalent bases we might be able to simplify. */
8502 if (indirect_base0 == indirect_base1
8503 && operand_equal_p (base0, base1,
8504 indirect_base0 ? OEP_ADDRESS_OF : 0))
8505 {
8506 /* We can fold this expression to a constant if the non-constant
8507 offset parts are equal. */
8508 if ((offset0 == offset1
8509 || (offset0 && offset1
8510 && operand_equal_p (offset0, offset1, 0)))
8511 && (code == EQ_EXPR
8512 || code == NE_EXPR
8513 || (indirect_base0 && DECL_P (base0))
8514 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8515
8516 {
8517 if (!equality_code
8518 && bitpos0 != bitpos1
8519 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8520 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8521 fold_overflow_warning (("assuming pointer wraparound does not "
8522 "occur when comparing P +- C1 with "
8523 "P +- C2"),
8524 WARN_STRICT_OVERFLOW_CONDITIONAL);
8525
8526 switch (code)
8527 {
8528 case EQ_EXPR:
8529 return constant_boolean_node (bitpos0 == bitpos1, type);
8530 case NE_EXPR:
8531 return constant_boolean_node (bitpos0 != bitpos1, type);
8532 case LT_EXPR:
8533 return constant_boolean_node (bitpos0 < bitpos1, type);
8534 case LE_EXPR:
8535 return constant_boolean_node (bitpos0 <= bitpos1, type);
8536 case GE_EXPR:
8537 return constant_boolean_node (bitpos0 >= bitpos1, type);
8538 case GT_EXPR:
8539 return constant_boolean_node (bitpos0 > bitpos1, type);
8540 default:;
8541 }
8542 }
8543 /* We can simplify the comparison to a comparison of the variable
8544 offset parts if the constant offset parts are equal.
8545 Be careful to use signed sizetype here because otherwise we
8546 mess with array offsets in the wrong way. This is possible
8547 because pointer arithmetic is restricted to retain within an
8548 object and overflow on pointer differences is undefined as of
8549 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8550 else if (bitpos0 == bitpos1
8551 && (equality_code
8552 || (indirect_base0 && DECL_P (base0))
8553 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8554 {
8555 /* By converting to signed sizetype we cover middle-end pointer
8556 arithmetic which operates on unsigned pointer types of size
8557 type size and ARRAY_REF offsets which are properly sign or
8558 zero extended from their type in case it is narrower than
8559 sizetype. */
8560 if (offset0 == NULL_TREE)
8561 offset0 = build_int_cst (ssizetype, 0);
8562 else
8563 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8564 if (offset1 == NULL_TREE)
8565 offset1 = build_int_cst (ssizetype, 0);
8566 else
8567 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8568
8569 if (!equality_code
8570 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8571 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8572 fold_overflow_warning (("assuming pointer wraparound does not "
8573 "occur when comparing P +- C1 with "
8574 "P +- C2"),
8575 WARN_STRICT_OVERFLOW_COMPARISON);
8576
8577 return fold_build2_loc (loc, code, type, offset0, offset1);
8578 }
8579 }
8580 /* For equal offsets we can simplify to a comparison of the
8581 base addresses. */
8582 else if (bitpos0 == bitpos1
8583 && (indirect_base0
8584 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8585 && (indirect_base1
8586 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8587 && ((offset0 == offset1)
8588 || (offset0 && offset1
8589 && operand_equal_p (offset0, offset1, 0))))
8590 {
8591 if (indirect_base0)
8592 base0 = build_fold_addr_expr_loc (loc, base0);
8593 if (indirect_base1)
8594 base1 = build_fold_addr_expr_loc (loc, base1);
8595 return fold_build2_loc (loc, code, type, base0, base1);
8596 }
8597 }
8598
8599 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8600 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8601 the resulting offset is smaller in absolute value than the
8602 original one and has the same sign. */
8603 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8604 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8605 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8606 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8607 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8608 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8609 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8610 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8611 {
8612 tree const1 = TREE_OPERAND (arg0, 1);
8613 tree const2 = TREE_OPERAND (arg1, 1);
8614 tree variable1 = TREE_OPERAND (arg0, 0);
8615 tree variable2 = TREE_OPERAND (arg1, 0);
8616 tree cst;
8617 const char * const warnmsg = G_("assuming signed overflow does not "
8618 "occur when combining constants around "
8619 "a comparison");
8620
8621 /* Put the constant on the side where it doesn't overflow and is
8622 of lower absolute value and of same sign than before. */
8623 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8624 ? MINUS_EXPR : PLUS_EXPR,
8625 const2, const1);
8626 if (!TREE_OVERFLOW (cst)
8627 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8628 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8629 {
8630 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8631 return fold_build2_loc (loc, code, type,
8632 variable1,
8633 fold_build2_loc (loc, TREE_CODE (arg1),
8634 TREE_TYPE (arg1),
8635 variable2, cst));
8636 }
8637
8638 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8639 ? MINUS_EXPR : PLUS_EXPR,
8640 const1, const2);
8641 if (!TREE_OVERFLOW (cst)
8642 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8643 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8644 {
8645 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8646 return fold_build2_loc (loc, code, type,
8647 fold_build2_loc (loc, TREE_CODE (arg0),
8648 TREE_TYPE (arg0),
8649 variable1, cst),
8650 variable2);
8651 }
8652 }
8653
8654 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8655 if (tem)
8656 return tem;
8657
8658 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8659 constant, we can simplify it. */
8660 if (TREE_CODE (arg1) == INTEGER_CST
8661 && (TREE_CODE (arg0) == MIN_EXPR
8662 || TREE_CODE (arg0) == MAX_EXPR)
8663 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8664 {
8665 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8666 if (tem)
8667 return tem;
8668 }
8669
8670 /* If we are comparing an expression that just has comparisons
8671 of two integer values, arithmetic expressions of those comparisons,
8672 and constants, we can simplify it. There are only three cases
8673 to check: the two values can either be equal, the first can be
8674 greater, or the second can be greater. Fold the expression for
8675 those three values. Since each value must be 0 or 1, we have
8676 eight possibilities, each of which corresponds to the constant 0
8677 or 1 or one of the six possible comparisons.
8678
8679 This handles common cases like (a > b) == 0 but also handles
8680 expressions like ((x > y) - (y > x)) > 0, which supposedly
8681 occur in macroized code. */
8682
8683 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8684 {
8685 tree cval1 = 0, cval2 = 0;
8686 int save_p = 0;
8687
8688 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8689 /* Don't handle degenerate cases here; they should already
8690 have been handled anyway. */
8691 && cval1 != 0 && cval2 != 0
8692 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8693 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8694 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8695 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8696 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8697 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8698 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8699 {
8700 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8701 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8702
8703 /* We can't just pass T to eval_subst in case cval1 or cval2
8704 was the same as ARG1. */
8705
8706 tree high_result
8707 = fold_build2_loc (loc, code, type,
8708 eval_subst (loc, arg0, cval1, maxval,
8709 cval2, minval),
8710 arg1);
8711 tree equal_result
8712 = fold_build2_loc (loc, code, type,
8713 eval_subst (loc, arg0, cval1, maxval,
8714 cval2, maxval),
8715 arg1);
8716 tree low_result
8717 = fold_build2_loc (loc, code, type,
8718 eval_subst (loc, arg0, cval1, minval,
8719 cval2, maxval),
8720 arg1);
8721
8722 /* All three of these results should be 0 or 1. Confirm they are.
8723 Then use those values to select the proper code to use. */
8724
8725 if (TREE_CODE (high_result) == INTEGER_CST
8726 && TREE_CODE (equal_result) == INTEGER_CST
8727 && TREE_CODE (low_result) == INTEGER_CST)
8728 {
8729 /* Make a 3-bit mask with the high-order bit being the
8730 value for `>', the next for '=', and the low for '<'. */
8731 switch ((integer_onep (high_result) * 4)
8732 + (integer_onep (equal_result) * 2)
8733 + integer_onep (low_result))
8734 {
8735 case 0:
8736 /* Always false. */
8737 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8738 case 1:
8739 code = LT_EXPR;
8740 break;
8741 case 2:
8742 code = EQ_EXPR;
8743 break;
8744 case 3:
8745 code = LE_EXPR;
8746 break;
8747 case 4:
8748 code = GT_EXPR;
8749 break;
8750 case 5:
8751 code = NE_EXPR;
8752 break;
8753 case 6:
8754 code = GE_EXPR;
8755 break;
8756 case 7:
8757 /* Always true. */
8758 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8759 }
8760
8761 if (save_p)
8762 {
8763 tem = save_expr (build2 (code, type, cval1, cval2));
8764 SET_EXPR_LOCATION (tem, loc);
8765 return tem;
8766 }
8767 return fold_build2_loc (loc, code, type, cval1, cval2);
8768 }
8769 }
8770 }
8771
8772 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8773 into a single range test. */
8774 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8775 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8776 && TREE_CODE (arg1) == INTEGER_CST
8777 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8778 && !integer_zerop (TREE_OPERAND (arg0, 1))
8779 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8780 && !TREE_OVERFLOW (arg1))
8781 {
8782 tem = fold_div_compare (loc, code, type, arg0, arg1);
8783 if (tem != NULL_TREE)
8784 return tem;
8785 }
8786
8787 return NULL_TREE;
8788 }
8789
8790
8791 /* Subroutine of fold_binary. Optimize complex multiplications of the
8792 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8793 argument EXPR represents the expression "z" of type TYPE. */
8794
8795 static tree
8796 fold_mult_zconjz (location_t loc, tree type, tree expr)
8797 {
8798 tree itype = TREE_TYPE (type);
8799 tree rpart, ipart, tem;
8800
8801 if (TREE_CODE (expr) == COMPLEX_EXPR)
8802 {
8803 rpart = TREE_OPERAND (expr, 0);
8804 ipart = TREE_OPERAND (expr, 1);
8805 }
8806 else if (TREE_CODE (expr) == COMPLEX_CST)
8807 {
8808 rpart = TREE_REALPART (expr);
8809 ipart = TREE_IMAGPART (expr);
8810 }
8811 else
8812 {
8813 expr = save_expr (expr);
8814 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8815 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8816 }
8817
8818 rpart = save_expr (rpart);
8819 ipart = save_expr (ipart);
8820 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8821 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8822 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8823 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8824 build_zero_cst (itype));
8825 }
8826
8827
8828 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8829 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8830
8831 static bool
8832 vec_cst_ctor_to_array (tree arg, tree *elts)
8833 {
8834 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8835
8836 if (TREE_CODE (arg) == VECTOR_CST)
8837 {
8838 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8839 elts[i] = VECTOR_CST_ELT (arg, i);
8840 }
8841 else if (TREE_CODE (arg) == CONSTRUCTOR)
8842 {
8843 constructor_elt *elt;
8844
8845 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8846 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8847 return false;
8848 else
8849 elts[i] = elt->value;
8850 }
8851 else
8852 return false;
8853 for (; i < nelts; i++)
8854 elts[i]
8855 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8856 return true;
8857 }
8858
8859 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8860 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8861 NULL_TREE otherwise. */
8862
8863 static tree
8864 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8865 {
8866 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8867 tree *elts;
8868 bool need_ctor = false;
8869
8870 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8871 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8872 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8873 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8874 return NULL_TREE;
8875
8876 elts = XALLOCAVEC (tree, nelts * 3);
8877 if (!vec_cst_ctor_to_array (arg0, elts)
8878 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8879 return NULL_TREE;
8880
8881 for (i = 0; i < nelts; i++)
8882 {
8883 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8884 need_ctor = true;
8885 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8886 }
8887
8888 if (need_ctor)
8889 {
8890 vec<constructor_elt, va_gc> *v;
8891 vec_alloc (v, nelts);
8892 for (i = 0; i < nelts; i++)
8893 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8894 return build_constructor (type, v);
8895 }
8896 else
8897 return build_vector (type, &elts[2 * nelts]);
8898 }
8899
8900 /* Try to fold a pointer difference of type TYPE two address expressions of
8901 array references AREF0 and AREF1 using location LOC. Return a
8902 simplified expression for the difference or NULL_TREE. */
8903
8904 static tree
8905 fold_addr_of_array_ref_difference (location_t loc, tree type,
8906 tree aref0, tree aref1)
8907 {
8908 tree base0 = TREE_OPERAND (aref0, 0);
8909 tree base1 = TREE_OPERAND (aref1, 0);
8910 tree base_offset = build_int_cst (type, 0);
8911
8912 /* If the bases are array references as well, recurse. If the bases
8913 are pointer indirections compute the difference of the pointers.
8914 If the bases are equal, we are set. */
8915 if ((TREE_CODE (base0) == ARRAY_REF
8916 && TREE_CODE (base1) == ARRAY_REF
8917 && (base_offset
8918 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8919 || (INDIRECT_REF_P (base0)
8920 && INDIRECT_REF_P (base1)
8921 && (base_offset
8922 = fold_binary_loc (loc, MINUS_EXPR, type,
8923 fold_convert (type, TREE_OPERAND (base0, 0)),
8924 fold_convert (type,
8925 TREE_OPERAND (base1, 0)))))
8926 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8927 {
8928 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8929 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8930 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8931 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8932 return fold_build2_loc (loc, PLUS_EXPR, type,
8933 base_offset,
8934 fold_build2_loc (loc, MULT_EXPR, type,
8935 diff, esz));
8936 }
8937 return NULL_TREE;
8938 }
8939
8940 /* If the real or vector real constant CST of type TYPE has an exact
8941 inverse, return it, else return NULL. */
8942
8943 tree
8944 exact_inverse (tree type, tree cst)
8945 {
8946 REAL_VALUE_TYPE r;
8947 tree unit_type, *elts;
8948 machine_mode mode;
8949 unsigned vec_nelts, i;
8950
8951 switch (TREE_CODE (cst))
8952 {
8953 case REAL_CST:
8954 r = TREE_REAL_CST (cst);
8955
8956 if (exact_real_inverse (TYPE_MODE (type), &r))
8957 return build_real (type, r);
8958
8959 return NULL_TREE;
8960
8961 case VECTOR_CST:
8962 vec_nelts = VECTOR_CST_NELTS (cst);
8963 elts = XALLOCAVEC (tree, vec_nelts);
8964 unit_type = TREE_TYPE (type);
8965 mode = TYPE_MODE (unit_type);
8966
8967 for (i = 0; i < vec_nelts; i++)
8968 {
8969 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8970 if (!exact_real_inverse (mode, &r))
8971 return NULL_TREE;
8972 elts[i] = build_real (unit_type, r);
8973 }
8974
8975 return build_vector (type, elts);
8976
8977 default:
8978 return NULL_TREE;
8979 }
8980 }
8981
8982 /* Mask out the tz least significant bits of X of type TYPE where
8983 tz is the number of trailing zeroes in Y. */
8984 static wide_int
8985 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8986 {
8987 int tz = wi::ctz (y);
8988 if (tz > 0)
8989 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8990 return x;
8991 }
8992
8993 /* Return true when T is an address and is known to be nonzero.
8994 For floating point we further ensure that T is not denormal.
8995 Similar logic is present in nonzero_address in rtlanal.h.
8996
8997 If the return value is based on the assumption that signed overflow
8998 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8999 change *STRICT_OVERFLOW_P. */
9000
9001 static bool
9002 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9003 {
9004 tree type = TREE_TYPE (t);
9005 enum tree_code code;
9006
9007 /* Doing something useful for floating point would need more work. */
9008 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9009 return false;
9010
9011 code = TREE_CODE (t);
9012 switch (TREE_CODE_CLASS (code))
9013 {
9014 case tcc_unary:
9015 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9016 strict_overflow_p);
9017 case tcc_binary:
9018 case tcc_comparison:
9019 return tree_binary_nonzero_warnv_p (code, type,
9020 TREE_OPERAND (t, 0),
9021 TREE_OPERAND (t, 1),
9022 strict_overflow_p);
9023 case tcc_constant:
9024 case tcc_declaration:
9025 case tcc_reference:
9026 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9027
9028 default:
9029 break;
9030 }
9031
9032 switch (code)
9033 {
9034 case TRUTH_NOT_EXPR:
9035 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9036 strict_overflow_p);
9037
9038 case TRUTH_AND_EXPR:
9039 case TRUTH_OR_EXPR:
9040 case TRUTH_XOR_EXPR:
9041 return tree_binary_nonzero_warnv_p (code, type,
9042 TREE_OPERAND (t, 0),
9043 TREE_OPERAND (t, 1),
9044 strict_overflow_p);
9045
9046 case COND_EXPR:
9047 case CONSTRUCTOR:
9048 case OBJ_TYPE_REF:
9049 case ASSERT_EXPR:
9050 case ADDR_EXPR:
9051 case WITH_SIZE_EXPR:
9052 case SSA_NAME:
9053 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9054
9055 case COMPOUND_EXPR:
9056 case MODIFY_EXPR:
9057 case BIND_EXPR:
9058 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9059 strict_overflow_p);
9060
9061 case SAVE_EXPR:
9062 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9063 strict_overflow_p);
9064
9065 case CALL_EXPR:
9066 {
9067 tree fndecl = get_callee_fndecl (t);
9068 if (!fndecl) return false;
9069 if (flag_delete_null_pointer_checks && !flag_check_new
9070 && DECL_IS_OPERATOR_NEW (fndecl)
9071 && !TREE_NOTHROW (fndecl))
9072 return true;
9073 if (flag_delete_null_pointer_checks
9074 && lookup_attribute ("returns_nonnull",
9075 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9076 return true;
9077 return alloca_call_p (t);
9078 }
9079
9080 default:
9081 break;
9082 }
9083 return false;
9084 }
9085
9086 /* Return true when T is an address and is known to be nonzero.
9087 Handle warnings about undefined signed overflow. */
9088
9089 static bool
9090 tree_expr_nonzero_p (tree t)
9091 {
9092 bool ret, strict_overflow_p;
9093
9094 strict_overflow_p = false;
9095 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9096 if (strict_overflow_p)
9097 fold_overflow_warning (("assuming signed overflow does not occur when "
9098 "determining that expression is always "
9099 "non-zero"),
9100 WARN_STRICT_OVERFLOW_MISC);
9101 return ret;
9102 }
9103
9104 /* Fold a binary expression of code CODE and type TYPE with operands
9105 OP0 and OP1. LOC is the location of the resulting expression.
9106 Return the folded expression if folding is successful. Otherwise,
9107 return NULL_TREE. */
9108
9109 tree
9110 fold_binary_loc (location_t loc,
9111 enum tree_code code, tree type, tree op0, tree op1)
9112 {
9113 enum tree_code_class kind = TREE_CODE_CLASS (code);
9114 tree arg0, arg1, tem;
9115 tree t1 = NULL_TREE;
9116 bool strict_overflow_p;
9117 unsigned int prec;
9118
9119 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9120 && TREE_CODE_LENGTH (code) == 2
9121 && op0 != NULL_TREE
9122 && op1 != NULL_TREE);
9123
9124 arg0 = op0;
9125 arg1 = op1;
9126
9127 /* Strip any conversions that don't change the mode. This is
9128 safe for every expression, except for a comparison expression
9129 because its signedness is derived from its operands. So, in
9130 the latter case, only strip conversions that don't change the
9131 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9132 preserved.
9133
9134 Note that this is done as an internal manipulation within the
9135 constant folder, in order to find the simplest representation
9136 of the arguments so that their form can be studied. In any
9137 cases, the appropriate type conversions should be put back in
9138 the tree that will get out of the constant folder. */
9139
9140 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9141 {
9142 STRIP_SIGN_NOPS (arg0);
9143 STRIP_SIGN_NOPS (arg1);
9144 }
9145 else
9146 {
9147 STRIP_NOPS (arg0);
9148 STRIP_NOPS (arg1);
9149 }
9150
9151 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9152 constant but we can't do arithmetic on them. */
9153 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9154 {
9155 tem = const_binop (code, type, arg0, arg1);
9156 if (tem != NULL_TREE)
9157 {
9158 if (TREE_TYPE (tem) != type)
9159 tem = fold_convert_loc (loc, type, tem);
9160 return tem;
9161 }
9162 }
9163
9164 /* If this is a commutative operation, and ARG0 is a constant, move it
9165 to ARG1 to reduce the number of tests below. */
9166 if (commutative_tree_code (code)
9167 && tree_swap_operands_p (arg0, arg1, true))
9168 return fold_build2_loc (loc, code, type, op1, op0);
9169
9170 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9171 to ARG1 to reduce the number of tests below. */
9172 if (kind == tcc_comparison
9173 && tree_swap_operands_p (arg0, arg1, true))
9174 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9175
9176 tem = generic_simplify (loc, code, type, op0, op1);
9177 if (tem)
9178 return tem;
9179
9180 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9181
9182 First check for cases where an arithmetic operation is applied to a
9183 compound, conditional, or comparison operation. Push the arithmetic
9184 operation inside the compound or conditional to see if any folding
9185 can then be done. Convert comparison to conditional for this purpose.
9186 The also optimizes non-constant cases that used to be done in
9187 expand_expr.
9188
9189 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9190 one of the operands is a comparison and the other is a comparison, a
9191 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9192 code below would make the expression more complex. Change it to a
9193 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9194 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9195
9196 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9197 || code == EQ_EXPR || code == NE_EXPR)
9198 && TREE_CODE (type) != VECTOR_TYPE
9199 && ((truth_value_p (TREE_CODE (arg0))
9200 && (truth_value_p (TREE_CODE (arg1))
9201 || (TREE_CODE (arg1) == BIT_AND_EXPR
9202 && integer_onep (TREE_OPERAND (arg1, 1)))))
9203 || (truth_value_p (TREE_CODE (arg1))
9204 && (truth_value_p (TREE_CODE (arg0))
9205 || (TREE_CODE (arg0) == BIT_AND_EXPR
9206 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9207 {
9208 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9209 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9210 : TRUTH_XOR_EXPR,
9211 boolean_type_node,
9212 fold_convert_loc (loc, boolean_type_node, arg0),
9213 fold_convert_loc (loc, boolean_type_node, arg1));
9214
9215 if (code == EQ_EXPR)
9216 tem = invert_truthvalue_loc (loc, tem);
9217
9218 return fold_convert_loc (loc, type, tem);
9219 }
9220
9221 if (TREE_CODE_CLASS (code) == tcc_binary
9222 || TREE_CODE_CLASS (code) == tcc_comparison)
9223 {
9224 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9225 {
9226 tem = fold_build2_loc (loc, code, type,
9227 fold_convert_loc (loc, TREE_TYPE (op0),
9228 TREE_OPERAND (arg0, 1)), op1);
9229 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9230 tem);
9231 }
9232 if (TREE_CODE (arg1) == COMPOUND_EXPR
9233 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9234 {
9235 tem = fold_build2_loc (loc, code, type, op0,
9236 fold_convert_loc (loc, TREE_TYPE (op1),
9237 TREE_OPERAND (arg1, 1)));
9238 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9239 tem);
9240 }
9241
9242 if (TREE_CODE (arg0) == COND_EXPR
9243 || TREE_CODE (arg0) == VEC_COND_EXPR
9244 || COMPARISON_CLASS_P (arg0))
9245 {
9246 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9247 arg0, arg1,
9248 /*cond_first_p=*/1);
9249 if (tem != NULL_TREE)
9250 return tem;
9251 }
9252
9253 if (TREE_CODE (arg1) == COND_EXPR
9254 || TREE_CODE (arg1) == VEC_COND_EXPR
9255 || COMPARISON_CLASS_P (arg1))
9256 {
9257 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9258 arg1, arg0,
9259 /*cond_first_p=*/0);
9260 if (tem != NULL_TREE)
9261 return tem;
9262 }
9263 }
9264
9265 switch (code)
9266 {
9267 case MEM_REF:
9268 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9269 if (TREE_CODE (arg0) == ADDR_EXPR
9270 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9271 {
9272 tree iref = TREE_OPERAND (arg0, 0);
9273 return fold_build2 (MEM_REF, type,
9274 TREE_OPERAND (iref, 0),
9275 int_const_binop (PLUS_EXPR, arg1,
9276 TREE_OPERAND (iref, 1)));
9277 }
9278
9279 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9280 if (TREE_CODE (arg0) == ADDR_EXPR
9281 && handled_component_p (TREE_OPERAND (arg0, 0)))
9282 {
9283 tree base;
9284 HOST_WIDE_INT coffset;
9285 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9286 &coffset);
9287 if (!base)
9288 return NULL_TREE;
9289 return fold_build2 (MEM_REF, type,
9290 build_fold_addr_expr (base),
9291 int_const_binop (PLUS_EXPR, arg1,
9292 size_int (coffset)));
9293 }
9294
9295 return NULL_TREE;
9296
9297 case POINTER_PLUS_EXPR:
9298 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9299 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9300 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9301 return fold_convert_loc (loc, type,
9302 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9303 fold_convert_loc (loc, sizetype,
9304 arg1),
9305 fold_convert_loc (loc, sizetype,
9306 arg0)));
9307
9308 return NULL_TREE;
9309
9310 case PLUS_EXPR:
9311 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9312 {
9313 /* X + (X / CST) * -CST is X % CST. */
9314 if (TREE_CODE (arg1) == MULT_EXPR
9315 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9316 && operand_equal_p (arg0,
9317 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9318 {
9319 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9320 tree cst1 = TREE_OPERAND (arg1, 1);
9321 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9322 cst1, cst0);
9323 if (sum && integer_zerop (sum))
9324 return fold_convert_loc (loc, type,
9325 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9326 TREE_TYPE (arg0), arg0,
9327 cst0));
9328 }
9329 }
9330
9331 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9332 one. Make sure the type is not saturating and has the signedness of
9333 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9334 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9335 if ((TREE_CODE (arg0) == MULT_EXPR
9336 || TREE_CODE (arg1) == MULT_EXPR)
9337 && !TYPE_SATURATING (type)
9338 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9339 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9340 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9341 {
9342 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9343 if (tem)
9344 return tem;
9345 }
9346
9347 if (! FLOAT_TYPE_P (type))
9348 {
9349 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9350 (plus (plus (mult) (mult)) (foo)) so that we can
9351 take advantage of the factoring cases below. */
9352 if (ANY_INTEGRAL_TYPE_P (type)
9353 && TYPE_OVERFLOW_WRAPS (type)
9354 && (((TREE_CODE (arg0) == PLUS_EXPR
9355 || TREE_CODE (arg0) == MINUS_EXPR)
9356 && TREE_CODE (arg1) == MULT_EXPR)
9357 || ((TREE_CODE (arg1) == PLUS_EXPR
9358 || TREE_CODE (arg1) == MINUS_EXPR)
9359 && TREE_CODE (arg0) == MULT_EXPR)))
9360 {
9361 tree parg0, parg1, parg, marg;
9362 enum tree_code pcode;
9363
9364 if (TREE_CODE (arg1) == MULT_EXPR)
9365 parg = arg0, marg = arg1;
9366 else
9367 parg = arg1, marg = arg0;
9368 pcode = TREE_CODE (parg);
9369 parg0 = TREE_OPERAND (parg, 0);
9370 parg1 = TREE_OPERAND (parg, 1);
9371 STRIP_NOPS (parg0);
9372 STRIP_NOPS (parg1);
9373
9374 if (TREE_CODE (parg0) == MULT_EXPR
9375 && TREE_CODE (parg1) != MULT_EXPR)
9376 return fold_build2_loc (loc, pcode, type,
9377 fold_build2_loc (loc, PLUS_EXPR, type,
9378 fold_convert_loc (loc, type,
9379 parg0),
9380 fold_convert_loc (loc, type,
9381 marg)),
9382 fold_convert_loc (loc, type, parg1));
9383 if (TREE_CODE (parg0) != MULT_EXPR
9384 && TREE_CODE (parg1) == MULT_EXPR)
9385 return
9386 fold_build2_loc (loc, PLUS_EXPR, type,
9387 fold_convert_loc (loc, type, parg0),
9388 fold_build2_loc (loc, pcode, type,
9389 fold_convert_loc (loc, type, marg),
9390 fold_convert_loc (loc, type,
9391 parg1)));
9392 }
9393 }
9394 else
9395 {
9396 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9397 to __complex__ ( x, y ). This is not the same for SNaNs or
9398 if signed zeros are involved. */
9399 if (!HONOR_SNANS (element_mode (arg0))
9400 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9401 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9402 {
9403 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9404 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9405 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9406 bool arg0rz = false, arg0iz = false;
9407 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9408 || (arg0i && (arg0iz = real_zerop (arg0i))))
9409 {
9410 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9411 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9412 if (arg0rz && arg1i && real_zerop (arg1i))
9413 {
9414 tree rp = arg1r ? arg1r
9415 : build1 (REALPART_EXPR, rtype, arg1);
9416 tree ip = arg0i ? arg0i
9417 : build1 (IMAGPART_EXPR, rtype, arg0);
9418 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9419 }
9420 else if (arg0iz && arg1r && real_zerop (arg1r))
9421 {
9422 tree rp = arg0r ? arg0r
9423 : build1 (REALPART_EXPR, rtype, arg0);
9424 tree ip = arg1i ? arg1i
9425 : build1 (IMAGPART_EXPR, rtype, arg1);
9426 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9427 }
9428 }
9429 }
9430
9431 if (flag_unsafe_math_optimizations
9432 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9433 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9434 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9435 return tem;
9436
9437 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9438 We associate floats only if the user has specified
9439 -fassociative-math. */
9440 if (flag_associative_math
9441 && TREE_CODE (arg1) == PLUS_EXPR
9442 && TREE_CODE (arg0) != MULT_EXPR)
9443 {
9444 tree tree10 = TREE_OPERAND (arg1, 0);
9445 tree tree11 = TREE_OPERAND (arg1, 1);
9446 if (TREE_CODE (tree11) == MULT_EXPR
9447 && TREE_CODE (tree10) == MULT_EXPR)
9448 {
9449 tree tree0;
9450 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9451 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9452 }
9453 }
9454 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9455 We associate floats only if the user has specified
9456 -fassociative-math. */
9457 if (flag_associative_math
9458 && TREE_CODE (arg0) == PLUS_EXPR
9459 && TREE_CODE (arg1) != MULT_EXPR)
9460 {
9461 tree tree00 = TREE_OPERAND (arg0, 0);
9462 tree tree01 = TREE_OPERAND (arg0, 1);
9463 if (TREE_CODE (tree01) == MULT_EXPR
9464 && TREE_CODE (tree00) == MULT_EXPR)
9465 {
9466 tree tree0;
9467 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9468 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9469 }
9470 }
9471 }
9472
9473 bit_rotate:
9474 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9475 is a rotate of A by C1 bits. */
9476 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9477 is a rotate of A by B bits. */
9478 {
9479 enum tree_code code0, code1;
9480 tree rtype;
9481 code0 = TREE_CODE (arg0);
9482 code1 = TREE_CODE (arg1);
9483 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9484 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9485 && operand_equal_p (TREE_OPERAND (arg0, 0),
9486 TREE_OPERAND (arg1, 0), 0)
9487 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9488 TYPE_UNSIGNED (rtype))
9489 /* Only create rotates in complete modes. Other cases are not
9490 expanded properly. */
9491 && (element_precision (rtype)
9492 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9493 {
9494 tree tree01, tree11;
9495 enum tree_code code01, code11;
9496
9497 tree01 = TREE_OPERAND (arg0, 1);
9498 tree11 = TREE_OPERAND (arg1, 1);
9499 STRIP_NOPS (tree01);
9500 STRIP_NOPS (tree11);
9501 code01 = TREE_CODE (tree01);
9502 code11 = TREE_CODE (tree11);
9503 if (code01 == INTEGER_CST
9504 && code11 == INTEGER_CST
9505 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9506 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9507 {
9508 tem = build2_loc (loc, LROTATE_EXPR,
9509 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9510 TREE_OPERAND (arg0, 0),
9511 code0 == LSHIFT_EXPR
9512 ? TREE_OPERAND (arg0, 1)
9513 : TREE_OPERAND (arg1, 1));
9514 return fold_convert_loc (loc, type, tem);
9515 }
9516 else if (code11 == MINUS_EXPR)
9517 {
9518 tree tree110, tree111;
9519 tree110 = TREE_OPERAND (tree11, 0);
9520 tree111 = TREE_OPERAND (tree11, 1);
9521 STRIP_NOPS (tree110);
9522 STRIP_NOPS (tree111);
9523 if (TREE_CODE (tree110) == INTEGER_CST
9524 && 0 == compare_tree_int (tree110,
9525 element_precision
9526 (TREE_TYPE (TREE_OPERAND
9527 (arg0, 0))))
9528 && operand_equal_p (tree01, tree111, 0))
9529 return
9530 fold_convert_loc (loc, type,
9531 build2 ((code0 == LSHIFT_EXPR
9532 ? LROTATE_EXPR
9533 : RROTATE_EXPR),
9534 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9535 TREE_OPERAND (arg0, 0),
9536 TREE_OPERAND (arg0, 1)));
9537 }
9538 else if (code01 == MINUS_EXPR)
9539 {
9540 tree tree010, tree011;
9541 tree010 = TREE_OPERAND (tree01, 0);
9542 tree011 = TREE_OPERAND (tree01, 1);
9543 STRIP_NOPS (tree010);
9544 STRIP_NOPS (tree011);
9545 if (TREE_CODE (tree010) == INTEGER_CST
9546 && 0 == compare_tree_int (tree010,
9547 element_precision
9548 (TREE_TYPE (TREE_OPERAND
9549 (arg0, 0))))
9550 && operand_equal_p (tree11, tree011, 0))
9551 return fold_convert_loc
9552 (loc, type,
9553 build2 ((code0 != LSHIFT_EXPR
9554 ? LROTATE_EXPR
9555 : RROTATE_EXPR),
9556 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9557 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9558 }
9559 }
9560 }
9561
9562 associate:
9563 /* In most languages, can't associate operations on floats through
9564 parentheses. Rather than remember where the parentheses were, we
9565 don't associate floats at all, unless the user has specified
9566 -fassociative-math.
9567 And, we need to make sure type is not saturating. */
9568
9569 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9570 && !TYPE_SATURATING (type))
9571 {
9572 tree var0, con0, lit0, minus_lit0;
9573 tree var1, con1, lit1, minus_lit1;
9574 tree atype = type;
9575 bool ok = true;
9576
9577 /* Split both trees into variables, constants, and literals. Then
9578 associate each group together, the constants with literals,
9579 then the result with variables. This increases the chances of
9580 literals being recombined later and of generating relocatable
9581 expressions for the sum of a constant and literal. */
9582 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9583 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9584 code == MINUS_EXPR);
9585
9586 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9587 if (code == MINUS_EXPR)
9588 code = PLUS_EXPR;
9589
9590 /* With undefined overflow prefer doing association in a type
9591 which wraps on overflow, if that is one of the operand types. */
9592 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9593 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9594 {
9595 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9596 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9597 atype = TREE_TYPE (arg0);
9598 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9599 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9600 atype = TREE_TYPE (arg1);
9601 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9602 }
9603
9604 /* With undefined overflow we can only associate constants with one
9605 variable, and constants whose association doesn't overflow. */
9606 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9607 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9608 {
9609 if (var0 && var1)
9610 {
9611 tree tmp0 = var0;
9612 tree tmp1 = var1;
9613 bool one_neg = false;
9614
9615 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9616 {
9617 tmp0 = TREE_OPERAND (tmp0, 0);
9618 one_neg = !one_neg;
9619 }
9620 if (CONVERT_EXPR_P (tmp0)
9621 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9622 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9623 <= TYPE_PRECISION (atype)))
9624 tmp0 = TREE_OPERAND (tmp0, 0);
9625 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9626 {
9627 tmp1 = TREE_OPERAND (tmp1, 0);
9628 one_neg = !one_neg;
9629 }
9630 if (CONVERT_EXPR_P (tmp1)
9631 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9632 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9633 <= TYPE_PRECISION (atype)))
9634 tmp1 = TREE_OPERAND (tmp1, 0);
9635 /* The only case we can still associate with two variables
9636 is if they cancel out. */
9637 if (!one_neg
9638 || !operand_equal_p (tmp0, tmp1, 0))
9639 ok = false;
9640 }
9641 }
9642
9643 /* Only do something if we found more than two objects. Otherwise,
9644 nothing has changed and we risk infinite recursion. */
9645 if (ok
9646 && (2 < ((var0 != 0) + (var1 != 0)
9647 + (con0 != 0) + (con1 != 0)
9648 + (lit0 != 0) + (lit1 != 0)
9649 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9650 {
9651 bool any_overflows = false;
9652 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9653 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9654 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9655 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9656 var0 = associate_trees (loc, var0, var1, code, atype);
9657 con0 = associate_trees (loc, con0, con1, code, atype);
9658 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9659 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9660 code, atype);
9661
9662 /* Preserve the MINUS_EXPR if the negative part of the literal is
9663 greater than the positive part. Otherwise, the multiplicative
9664 folding code (i.e extract_muldiv) may be fooled in case
9665 unsigned constants are subtracted, like in the following
9666 example: ((X*2 + 4) - 8U)/2. */
9667 if (minus_lit0 && lit0)
9668 {
9669 if (TREE_CODE (lit0) == INTEGER_CST
9670 && TREE_CODE (minus_lit0) == INTEGER_CST
9671 && tree_int_cst_lt (lit0, minus_lit0))
9672 {
9673 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9674 MINUS_EXPR, atype);
9675 lit0 = 0;
9676 }
9677 else
9678 {
9679 lit0 = associate_trees (loc, lit0, minus_lit0,
9680 MINUS_EXPR, atype);
9681 minus_lit0 = 0;
9682 }
9683 }
9684
9685 /* Don't introduce overflows through reassociation. */
9686 if (!any_overflows
9687 && ((lit0 && TREE_OVERFLOW_P (lit0))
9688 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9689 return NULL_TREE;
9690
9691 if (minus_lit0)
9692 {
9693 if (con0 == 0)
9694 return
9695 fold_convert_loc (loc, type,
9696 associate_trees (loc, var0, minus_lit0,
9697 MINUS_EXPR, atype));
9698 else
9699 {
9700 con0 = associate_trees (loc, con0, minus_lit0,
9701 MINUS_EXPR, atype);
9702 return
9703 fold_convert_loc (loc, type,
9704 associate_trees (loc, var0, con0,
9705 PLUS_EXPR, atype));
9706 }
9707 }
9708
9709 con0 = associate_trees (loc, con0, lit0, code, atype);
9710 return
9711 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9712 code, atype));
9713 }
9714 }
9715
9716 return NULL_TREE;
9717
9718 case MINUS_EXPR:
9719 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9720 if (TREE_CODE (arg0) == NEGATE_EXPR
9721 && negate_expr_p (op1)
9722 && reorder_operands_p (arg0, arg1))
9723 return fold_build2_loc (loc, MINUS_EXPR, type,
9724 negate_expr (op1),
9725 fold_convert_loc (loc, type,
9726 TREE_OPERAND (arg0, 0)));
9727
9728 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9729 __complex__ ( x, -y ). This is not the same for SNaNs or if
9730 signed zeros are involved. */
9731 if (!HONOR_SNANS (element_mode (arg0))
9732 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9733 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9734 {
9735 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9736 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9737 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9738 bool arg0rz = false, arg0iz = false;
9739 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9740 || (arg0i && (arg0iz = real_zerop (arg0i))))
9741 {
9742 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9743 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9744 if (arg0rz && arg1i && real_zerop (arg1i))
9745 {
9746 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9747 arg1r ? arg1r
9748 : build1 (REALPART_EXPR, rtype, arg1));
9749 tree ip = arg0i ? arg0i
9750 : build1 (IMAGPART_EXPR, rtype, arg0);
9751 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9752 }
9753 else if (arg0iz && arg1r && real_zerop (arg1r))
9754 {
9755 tree rp = arg0r ? arg0r
9756 : build1 (REALPART_EXPR, rtype, arg0);
9757 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9758 arg1i ? arg1i
9759 : build1 (IMAGPART_EXPR, rtype, arg1));
9760 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9761 }
9762 }
9763 }
9764
9765 /* A - B -> A + (-B) if B is easily negatable. */
9766 if (negate_expr_p (op1)
9767 && ! TYPE_OVERFLOW_SANITIZED (type)
9768 && ((FLOAT_TYPE_P (type)
9769 /* Avoid this transformation if B is a positive REAL_CST. */
9770 && (TREE_CODE (op1) != REAL_CST
9771 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9772 || INTEGRAL_TYPE_P (type)))
9773 return fold_build2_loc (loc, PLUS_EXPR, type,
9774 fold_convert_loc (loc, type, arg0),
9775 negate_expr (op1));
9776
9777 /* Fold &a[i] - &a[j] to i-j. */
9778 if (TREE_CODE (arg0) == ADDR_EXPR
9779 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9780 && TREE_CODE (arg1) == ADDR_EXPR
9781 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9782 {
9783 tree tem = fold_addr_of_array_ref_difference (loc, type,
9784 TREE_OPERAND (arg0, 0),
9785 TREE_OPERAND (arg1, 0));
9786 if (tem)
9787 return tem;
9788 }
9789
9790 if (FLOAT_TYPE_P (type)
9791 && flag_unsafe_math_optimizations
9792 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9793 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9794 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9795 return tem;
9796
9797 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9798 one. Make sure the type is not saturating and has the signedness of
9799 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9800 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9801 if ((TREE_CODE (arg0) == MULT_EXPR
9802 || TREE_CODE (arg1) == MULT_EXPR)
9803 && !TYPE_SATURATING (type)
9804 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9805 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9806 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9807 {
9808 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9809 if (tem)
9810 return tem;
9811 }
9812
9813 goto associate;
9814
9815 case MULT_EXPR:
9816 if (! FLOAT_TYPE_P (type))
9817 {
9818 /* Transform x * -C into -x * C if x is easily negatable. */
9819 if (TREE_CODE (op1) == INTEGER_CST
9820 && tree_int_cst_sgn (op1) == -1
9821 && negate_expr_p (op0)
9822 && (tem = negate_expr (op1)) != op1
9823 && ! TREE_OVERFLOW (tem))
9824 return fold_build2_loc (loc, MULT_EXPR, type,
9825 fold_convert_loc (loc, type,
9826 negate_expr (op0)), tem);
9827
9828 /* (A + A) * C -> A * 2 * C */
9829 if (TREE_CODE (arg0) == PLUS_EXPR
9830 && TREE_CODE (arg1) == INTEGER_CST
9831 && operand_equal_p (TREE_OPERAND (arg0, 0),
9832 TREE_OPERAND (arg0, 1), 0))
9833 return fold_build2_loc (loc, MULT_EXPR, type,
9834 omit_one_operand_loc (loc, type,
9835 TREE_OPERAND (arg0, 0),
9836 TREE_OPERAND (arg0, 1)),
9837 fold_build2_loc (loc, MULT_EXPR, type,
9838 build_int_cst (type, 2) , arg1));
9839
9840 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9841 sign-changing only. */
9842 if (TREE_CODE (arg1) == INTEGER_CST
9843 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9844 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9845 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9846
9847 strict_overflow_p = false;
9848 if (TREE_CODE (arg1) == INTEGER_CST
9849 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9850 &strict_overflow_p)))
9851 {
9852 if (strict_overflow_p)
9853 fold_overflow_warning (("assuming signed overflow does not "
9854 "occur when simplifying "
9855 "multiplication"),
9856 WARN_STRICT_OVERFLOW_MISC);
9857 return fold_convert_loc (loc, type, tem);
9858 }
9859
9860 /* Optimize z * conj(z) for integer complex numbers. */
9861 if (TREE_CODE (arg0) == CONJ_EXPR
9862 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9863 return fold_mult_zconjz (loc, type, arg1);
9864 if (TREE_CODE (arg1) == CONJ_EXPR
9865 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9866 return fold_mult_zconjz (loc, type, arg0);
9867 }
9868 else
9869 {
9870 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9871 This is not the same for NaNs or if signed zeros are
9872 involved. */
9873 if (!HONOR_NANS (arg0)
9874 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9875 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9876 && TREE_CODE (arg1) == COMPLEX_CST
9877 && real_zerop (TREE_REALPART (arg1)))
9878 {
9879 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9880 if (real_onep (TREE_IMAGPART (arg1)))
9881 return
9882 fold_build2_loc (loc, COMPLEX_EXPR, type,
9883 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9884 rtype, arg0)),
9885 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9886 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9887 return
9888 fold_build2_loc (loc, COMPLEX_EXPR, type,
9889 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9890 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9891 rtype, arg0)));
9892 }
9893
9894 /* Optimize z * conj(z) for floating point complex numbers.
9895 Guarded by flag_unsafe_math_optimizations as non-finite
9896 imaginary components don't produce scalar results. */
9897 if (flag_unsafe_math_optimizations
9898 && TREE_CODE (arg0) == CONJ_EXPR
9899 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9900 return fold_mult_zconjz (loc, type, arg1);
9901 if (flag_unsafe_math_optimizations
9902 && TREE_CODE (arg1) == CONJ_EXPR
9903 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9904 return fold_mult_zconjz (loc, type, arg0);
9905
9906 if (flag_unsafe_math_optimizations)
9907 {
9908
9909 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9910 if (!in_gimple_form
9911 && optimize
9912 && operand_equal_p (arg0, arg1, 0))
9913 {
9914 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9915
9916 if (powfn)
9917 {
9918 tree arg = build_real (type, dconst2);
9919 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9920 }
9921 }
9922 }
9923 }
9924 goto associate;
9925
9926 case BIT_IOR_EXPR:
9927 /* Canonicalize (X & C1) | C2. */
9928 if (TREE_CODE (arg0) == BIT_AND_EXPR
9929 && TREE_CODE (arg1) == INTEGER_CST
9930 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9931 {
9932 int width = TYPE_PRECISION (type), w;
9933 wide_int c1 = TREE_OPERAND (arg0, 1);
9934 wide_int c2 = arg1;
9935
9936 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9937 if ((c1 & c2) == c1)
9938 return omit_one_operand_loc (loc, type, arg1,
9939 TREE_OPERAND (arg0, 0));
9940
9941 wide_int msk = wi::mask (width, false,
9942 TYPE_PRECISION (TREE_TYPE (arg1)));
9943
9944 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9945 if (msk.and_not (c1 | c2) == 0)
9946 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9947 TREE_OPERAND (arg0, 0), arg1);
9948
9949 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9950 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9951 mode which allows further optimizations. */
9952 c1 &= msk;
9953 c2 &= msk;
9954 wide_int c3 = c1.and_not (c2);
9955 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9956 {
9957 wide_int mask = wi::mask (w, false,
9958 TYPE_PRECISION (type));
9959 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9960 {
9961 c3 = mask;
9962 break;
9963 }
9964 }
9965
9966 if (c3 != c1)
9967 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9968 fold_build2_loc (loc, BIT_AND_EXPR, type,
9969 TREE_OPERAND (arg0, 0),
9970 wide_int_to_tree (type,
9971 c3)),
9972 arg1);
9973 }
9974
9975 /* See if this can be simplified into a rotate first. If that
9976 is unsuccessful continue in the association code. */
9977 goto bit_rotate;
9978
9979 case BIT_XOR_EXPR:
9980 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9981 if (TREE_CODE (arg0) == BIT_AND_EXPR
9982 && INTEGRAL_TYPE_P (type)
9983 && integer_onep (TREE_OPERAND (arg0, 1))
9984 && integer_onep (arg1))
9985 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9986 build_zero_cst (TREE_TYPE (arg0)));
9987
9988 /* See if this can be simplified into a rotate first. If that
9989 is unsuccessful continue in the association code. */
9990 goto bit_rotate;
9991
9992 case BIT_AND_EXPR:
9993 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9994 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9995 && INTEGRAL_TYPE_P (type)
9996 && integer_onep (TREE_OPERAND (arg0, 1))
9997 && integer_onep (arg1))
9998 {
9999 tree tem2;
10000 tem = TREE_OPERAND (arg0, 0);
10001 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10002 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10003 tem, tem2);
10004 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10005 build_zero_cst (TREE_TYPE (tem)));
10006 }
10007 /* Fold ~X & 1 as (X & 1) == 0. */
10008 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10009 && INTEGRAL_TYPE_P (type)
10010 && integer_onep (arg1))
10011 {
10012 tree tem2;
10013 tem = TREE_OPERAND (arg0, 0);
10014 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10015 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10016 tem, tem2);
10017 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10018 build_zero_cst (TREE_TYPE (tem)));
10019 }
10020 /* Fold !X & 1 as X == 0. */
10021 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10022 && integer_onep (arg1))
10023 {
10024 tem = TREE_OPERAND (arg0, 0);
10025 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10026 build_zero_cst (TREE_TYPE (tem)));
10027 }
10028
10029 /* Fold (X ^ Y) & Y as ~X & Y. */
10030 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10031 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10032 {
10033 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10034 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10035 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10036 fold_convert_loc (loc, type, arg1));
10037 }
10038 /* Fold (X ^ Y) & X as ~Y & X. */
10039 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10040 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10041 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10042 {
10043 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10044 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10045 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10046 fold_convert_loc (loc, type, arg1));
10047 }
10048 /* Fold X & (X ^ Y) as X & ~Y. */
10049 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10050 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10051 {
10052 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10053 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10054 fold_convert_loc (loc, type, arg0),
10055 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10056 }
10057 /* Fold X & (Y ^ X) as ~Y & X. */
10058 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10059 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10060 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10061 {
10062 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10063 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10064 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10065 fold_convert_loc (loc, type, arg0));
10066 }
10067
10068 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10069 multiple of 1 << CST. */
10070 if (TREE_CODE (arg1) == INTEGER_CST)
10071 {
10072 wide_int cst1 = arg1;
10073 wide_int ncst1 = -cst1;
10074 if ((cst1 & ncst1) == ncst1
10075 && multiple_of_p (type, arg0,
10076 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10077 return fold_convert_loc (loc, type, arg0);
10078 }
10079
10080 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10081 bits from CST2. */
10082 if (TREE_CODE (arg1) == INTEGER_CST
10083 && TREE_CODE (arg0) == MULT_EXPR
10084 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10085 {
10086 wide_int warg1 = arg1;
10087 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10088
10089 if (masked == 0)
10090 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10091 arg0, arg1);
10092 else if (masked != warg1)
10093 {
10094 /* Avoid the transform if arg1 is a mask of some
10095 mode which allows further optimizations. */
10096 int pop = wi::popcount (warg1);
10097 if (!(pop >= BITS_PER_UNIT
10098 && exact_log2 (pop) != -1
10099 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10100 return fold_build2_loc (loc, code, type, op0,
10101 wide_int_to_tree (type, masked));
10102 }
10103 }
10104
10105 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10106 ((A & N) + B) & M -> (A + B) & M
10107 Similarly if (N & M) == 0,
10108 ((A | N) + B) & M -> (A + B) & M
10109 and for - instead of + (or unary - instead of +)
10110 and/or ^ instead of |.
10111 If B is constant and (B & M) == 0, fold into A & M. */
10112 if (TREE_CODE (arg1) == INTEGER_CST)
10113 {
10114 wide_int cst1 = arg1;
10115 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10116 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10117 && (TREE_CODE (arg0) == PLUS_EXPR
10118 || TREE_CODE (arg0) == MINUS_EXPR
10119 || TREE_CODE (arg0) == NEGATE_EXPR)
10120 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10121 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10122 {
10123 tree pmop[2];
10124 int which = 0;
10125 wide_int cst0;
10126
10127 /* Now we know that arg0 is (C + D) or (C - D) or
10128 -C and arg1 (M) is == (1LL << cst) - 1.
10129 Store C into PMOP[0] and D into PMOP[1]. */
10130 pmop[0] = TREE_OPERAND (arg0, 0);
10131 pmop[1] = NULL;
10132 if (TREE_CODE (arg0) != NEGATE_EXPR)
10133 {
10134 pmop[1] = TREE_OPERAND (arg0, 1);
10135 which = 1;
10136 }
10137
10138 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10139 which = -1;
10140
10141 for (; which >= 0; which--)
10142 switch (TREE_CODE (pmop[which]))
10143 {
10144 case BIT_AND_EXPR:
10145 case BIT_IOR_EXPR:
10146 case BIT_XOR_EXPR:
10147 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10148 != INTEGER_CST)
10149 break;
10150 cst0 = TREE_OPERAND (pmop[which], 1);
10151 cst0 &= cst1;
10152 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10153 {
10154 if (cst0 != cst1)
10155 break;
10156 }
10157 else if (cst0 != 0)
10158 break;
10159 /* If C or D is of the form (A & N) where
10160 (N & M) == M, or of the form (A | N) or
10161 (A ^ N) where (N & M) == 0, replace it with A. */
10162 pmop[which] = TREE_OPERAND (pmop[which], 0);
10163 break;
10164 case INTEGER_CST:
10165 /* If C or D is a N where (N & M) == 0, it can be
10166 omitted (assumed 0). */
10167 if ((TREE_CODE (arg0) == PLUS_EXPR
10168 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10169 && (cst1 & pmop[which]) == 0)
10170 pmop[which] = NULL;
10171 break;
10172 default:
10173 break;
10174 }
10175
10176 /* Only build anything new if we optimized one or both arguments
10177 above. */
10178 if (pmop[0] != TREE_OPERAND (arg0, 0)
10179 || (TREE_CODE (arg0) != NEGATE_EXPR
10180 && pmop[1] != TREE_OPERAND (arg0, 1)))
10181 {
10182 tree utype = TREE_TYPE (arg0);
10183 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10184 {
10185 /* Perform the operations in a type that has defined
10186 overflow behavior. */
10187 utype = unsigned_type_for (TREE_TYPE (arg0));
10188 if (pmop[0] != NULL)
10189 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10190 if (pmop[1] != NULL)
10191 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10192 }
10193
10194 if (TREE_CODE (arg0) == NEGATE_EXPR)
10195 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10196 else if (TREE_CODE (arg0) == PLUS_EXPR)
10197 {
10198 if (pmop[0] != NULL && pmop[1] != NULL)
10199 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10200 pmop[0], pmop[1]);
10201 else if (pmop[0] != NULL)
10202 tem = pmop[0];
10203 else if (pmop[1] != NULL)
10204 tem = pmop[1];
10205 else
10206 return build_int_cst (type, 0);
10207 }
10208 else if (pmop[0] == NULL)
10209 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10210 else
10211 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10212 pmop[0], pmop[1]);
10213 /* TEM is now the new binary +, - or unary - replacement. */
10214 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10215 fold_convert_loc (loc, utype, arg1));
10216 return fold_convert_loc (loc, type, tem);
10217 }
10218 }
10219 }
10220
10221 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10222 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10223 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10224 {
10225 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10226
10227 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10228 if (mask == -1)
10229 return
10230 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10231 }
10232
10233 goto associate;
10234
10235 case RDIV_EXPR:
10236 /* Don't touch a floating-point divide by zero unless the mode
10237 of the constant can represent infinity. */
10238 if (TREE_CODE (arg1) == REAL_CST
10239 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10240 && real_zerop (arg1))
10241 return NULL_TREE;
10242
10243 /* (-A) / (-B) -> A / B */
10244 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10245 return fold_build2_loc (loc, RDIV_EXPR, type,
10246 TREE_OPERAND (arg0, 0),
10247 negate_expr (arg1));
10248 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10249 return fold_build2_loc (loc, RDIV_EXPR, type,
10250 negate_expr (arg0),
10251 TREE_OPERAND (arg1, 0));
10252 return NULL_TREE;
10253
10254 case TRUNC_DIV_EXPR:
10255 /* Fall through */
10256
10257 case FLOOR_DIV_EXPR:
10258 /* Simplify A / (B << N) where A and B are positive and B is
10259 a power of 2, to A >> (N + log2(B)). */
10260 strict_overflow_p = false;
10261 if (TREE_CODE (arg1) == LSHIFT_EXPR
10262 && (TYPE_UNSIGNED (type)
10263 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10264 {
10265 tree sval = TREE_OPERAND (arg1, 0);
10266 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10267 {
10268 tree sh_cnt = TREE_OPERAND (arg1, 1);
10269 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10270 wi::exact_log2 (sval));
10271
10272 if (strict_overflow_p)
10273 fold_overflow_warning (("assuming signed overflow does not "
10274 "occur when simplifying A / (B << N)"),
10275 WARN_STRICT_OVERFLOW_MISC);
10276
10277 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10278 sh_cnt, pow2);
10279 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10280 fold_convert_loc (loc, type, arg0), sh_cnt);
10281 }
10282 }
10283
10284 /* Fall through */
10285
10286 case ROUND_DIV_EXPR:
10287 case CEIL_DIV_EXPR:
10288 case EXACT_DIV_EXPR:
10289 if (integer_zerop (arg1))
10290 return NULL_TREE;
10291
10292 /* Convert -A / -B to A / B when the type is signed and overflow is
10293 undefined. */
10294 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10295 && TREE_CODE (arg0) == NEGATE_EXPR
10296 && negate_expr_p (op1))
10297 {
10298 if (INTEGRAL_TYPE_P (type))
10299 fold_overflow_warning (("assuming signed overflow does not occur "
10300 "when distributing negation across "
10301 "division"),
10302 WARN_STRICT_OVERFLOW_MISC);
10303 return fold_build2_loc (loc, code, type,
10304 fold_convert_loc (loc, type,
10305 TREE_OPERAND (arg0, 0)),
10306 negate_expr (op1));
10307 }
10308 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10309 && TREE_CODE (arg1) == NEGATE_EXPR
10310 && negate_expr_p (op0))
10311 {
10312 if (INTEGRAL_TYPE_P (type))
10313 fold_overflow_warning (("assuming signed overflow does not occur "
10314 "when distributing negation across "
10315 "division"),
10316 WARN_STRICT_OVERFLOW_MISC);
10317 return fold_build2_loc (loc, code, type,
10318 negate_expr (op0),
10319 fold_convert_loc (loc, type,
10320 TREE_OPERAND (arg1, 0)));
10321 }
10322
10323 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10324 operation, EXACT_DIV_EXPR.
10325
10326 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10327 At one time others generated faster code, it's not clear if they do
10328 after the last round to changes to the DIV code in expmed.c. */
10329 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10330 && multiple_of_p (type, arg0, arg1))
10331 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10332 fold_convert (type, arg0),
10333 fold_convert (type, arg1));
10334
10335 strict_overflow_p = false;
10336 if (TREE_CODE (arg1) == INTEGER_CST
10337 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10338 &strict_overflow_p)))
10339 {
10340 if (strict_overflow_p)
10341 fold_overflow_warning (("assuming signed overflow does not occur "
10342 "when simplifying division"),
10343 WARN_STRICT_OVERFLOW_MISC);
10344 return fold_convert_loc (loc, type, tem);
10345 }
10346
10347 return NULL_TREE;
10348
10349 case CEIL_MOD_EXPR:
10350 case FLOOR_MOD_EXPR:
10351 case ROUND_MOD_EXPR:
10352 case TRUNC_MOD_EXPR:
10353 strict_overflow_p = false;
10354 if (TREE_CODE (arg1) == INTEGER_CST
10355 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10356 &strict_overflow_p)))
10357 {
10358 if (strict_overflow_p)
10359 fold_overflow_warning (("assuming signed overflow does not occur "
10360 "when simplifying modulus"),
10361 WARN_STRICT_OVERFLOW_MISC);
10362 return fold_convert_loc (loc, type, tem);
10363 }
10364
10365 return NULL_TREE;
10366
10367 case LROTATE_EXPR:
10368 case RROTATE_EXPR:
10369 case RSHIFT_EXPR:
10370 case LSHIFT_EXPR:
10371 /* Since negative shift count is not well-defined,
10372 don't try to compute it in the compiler. */
10373 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10374 return NULL_TREE;
10375
10376 prec = element_precision (type);
10377
10378 /* If we have a rotate of a bit operation with the rotate count and
10379 the second operand of the bit operation both constant,
10380 permute the two operations. */
10381 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10382 && (TREE_CODE (arg0) == BIT_AND_EXPR
10383 || TREE_CODE (arg0) == BIT_IOR_EXPR
10384 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10385 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10386 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10387 fold_build2_loc (loc, code, type,
10388 TREE_OPERAND (arg0, 0), arg1),
10389 fold_build2_loc (loc, code, type,
10390 TREE_OPERAND (arg0, 1), arg1));
10391
10392 /* Two consecutive rotates adding up to the some integer
10393 multiple of the precision of the type can be ignored. */
10394 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10395 && TREE_CODE (arg0) == RROTATE_EXPR
10396 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10397 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10398 prec) == 0)
10399 return TREE_OPERAND (arg0, 0);
10400
10401 return NULL_TREE;
10402
10403 case MIN_EXPR:
10404 case MAX_EXPR:
10405 goto associate;
10406
10407 case TRUTH_ANDIF_EXPR:
10408 /* Note that the operands of this must be ints
10409 and their values must be 0 or 1.
10410 ("true" is a fixed value perhaps depending on the language.) */
10411 /* If first arg is constant zero, return it. */
10412 if (integer_zerop (arg0))
10413 return fold_convert_loc (loc, type, arg0);
10414 case TRUTH_AND_EXPR:
10415 /* If either arg is constant true, drop it. */
10416 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10417 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10418 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10419 /* Preserve sequence points. */
10420 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10421 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10422 /* If second arg is constant zero, result is zero, but first arg
10423 must be evaluated. */
10424 if (integer_zerop (arg1))
10425 return omit_one_operand_loc (loc, type, arg1, arg0);
10426 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10427 case will be handled here. */
10428 if (integer_zerop (arg0))
10429 return omit_one_operand_loc (loc, type, arg0, arg1);
10430
10431 /* !X && X is always false. */
10432 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10433 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10434 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10435 /* X && !X is always false. */
10436 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10437 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10438 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10439
10440 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10441 means A >= Y && A != MAX, but in this case we know that
10442 A < X <= MAX. */
10443
10444 if (!TREE_SIDE_EFFECTS (arg0)
10445 && !TREE_SIDE_EFFECTS (arg1))
10446 {
10447 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10448 if (tem && !operand_equal_p (tem, arg0, 0))
10449 return fold_build2_loc (loc, code, type, tem, arg1);
10450
10451 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10452 if (tem && !operand_equal_p (tem, arg1, 0))
10453 return fold_build2_loc (loc, code, type, arg0, tem);
10454 }
10455
10456 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10457 != NULL_TREE)
10458 return tem;
10459
10460 return NULL_TREE;
10461
10462 case TRUTH_ORIF_EXPR:
10463 /* Note that the operands of this must be ints
10464 and their values must be 0 or true.
10465 ("true" is a fixed value perhaps depending on the language.) */
10466 /* If first arg is constant true, return it. */
10467 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10468 return fold_convert_loc (loc, type, arg0);
10469 case TRUTH_OR_EXPR:
10470 /* If either arg is constant zero, drop it. */
10471 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10472 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10473 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10474 /* Preserve sequence points. */
10475 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10476 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10477 /* If second arg is constant true, result is true, but we must
10478 evaluate first arg. */
10479 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10480 return omit_one_operand_loc (loc, type, arg1, arg0);
10481 /* Likewise for first arg, but note this only occurs here for
10482 TRUTH_OR_EXPR. */
10483 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10484 return omit_one_operand_loc (loc, type, arg0, arg1);
10485
10486 /* !X || X is always true. */
10487 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10488 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10489 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10490 /* X || !X is always true. */
10491 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10492 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10493 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10494
10495 /* (X && !Y) || (!X && Y) is X ^ Y */
10496 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10497 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10498 {
10499 tree a0, a1, l0, l1, n0, n1;
10500
10501 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10502 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10503
10504 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10505 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10506
10507 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10508 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10509
10510 if ((operand_equal_p (n0, a0, 0)
10511 && operand_equal_p (n1, a1, 0))
10512 || (operand_equal_p (n0, a1, 0)
10513 && operand_equal_p (n1, a0, 0)))
10514 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10515 }
10516
10517 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10518 != NULL_TREE)
10519 return tem;
10520
10521 return NULL_TREE;
10522
10523 case TRUTH_XOR_EXPR:
10524 /* If the second arg is constant zero, drop it. */
10525 if (integer_zerop (arg1))
10526 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10527 /* If the second arg is constant true, this is a logical inversion. */
10528 if (integer_onep (arg1))
10529 {
10530 tem = invert_truthvalue_loc (loc, arg0);
10531 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10532 }
10533 /* Identical arguments cancel to zero. */
10534 if (operand_equal_p (arg0, arg1, 0))
10535 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10536
10537 /* !X ^ X is always true. */
10538 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10539 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10540 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10541
10542 /* X ^ !X is always true. */
10543 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10544 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10545 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10546
10547 return NULL_TREE;
10548
10549 case EQ_EXPR:
10550 case NE_EXPR:
10551 STRIP_NOPS (arg0);
10552 STRIP_NOPS (arg1);
10553
10554 tem = fold_comparison (loc, code, type, op0, op1);
10555 if (tem != NULL_TREE)
10556 return tem;
10557
10558 /* bool_var != 1 becomes !bool_var. */
10559 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10560 && code == NE_EXPR)
10561 return fold_convert_loc (loc, type,
10562 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10563 TREE_TYPE (arg0), arg0));
10564
10565 /* bool_var == 0 becomes !bool_var. */
10566 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10567 && code == EQ_EXPR)
10568 return fold_convert_loc (loc, type,
10569 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10570 TREE_TYPE (arg0), arg0));
10571
10572 /* !exp != 0 becomes !exp */
10573 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10574 && code == NE_EXPR)
10575 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10576
10577 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10578 if ((TREE_CODE (arg0) == PLUS_EXPR
10579 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10580 || TREE_CODE (arg0) == MINUS_EXPR)
10581 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10582 0)),
10583 arg1, 0)
10584 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10585 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10586 {
10587 tree val = TREE_OPERAND (arg0, 1);
10588 val = fold_build2_loc (loc, code, type, val,
10589 build_int_cst (TREE_TYPE (val), 0));
10590 return omit_two_operands_loc (loc, type, val,
10591 TREE_OPERAND (arg0, 0), arg1);
10592 }
10593
10594 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10595 if ((TREE_CODE (arg1) == PLUS_EXPR
10596 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10597 || TREE_CODE (arg1) == MINUS_EXPR)
10598 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10599 0)),
10600 arg0, 0)
10601 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10602 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10603 {
10604 tree val = TREE_OPERAND (arg1, 1);
10605 val = fold_build2_loc (loc, code, type, val,
10606 build_int_cst (TREE_TYPE (val), 0));
10607 return omit_two_operands_loc (loc, type, val,
10608 TREE_OPERAND (arg1, 0), arg0);
10609 }
10610
10611 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10612 if (TREE_CODE (arg0) == MINUS_EXPR
10613 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10614 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10615 1)),
10616 arg1, 0)
10617 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10618 return omit_two_operands_loc (loc, type,
10619 code == NE_EXPR
10620 ? boolean_true_node : boolean_false_node,
10621 TREE_OPERAND (arg0, 1), arg1);
10622
10623 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10624 if (TREE_CODE (arg1) == MINUS_EXPR
10625 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10626 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10627 1)),
10628 arg0, 0)
10629 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10630 return omit_two_operands_loc (loc, type,
10631 code == NE_EXPR
10632 ? boolean_true_node : boolean_false_node,
10633 TREE_OPERAND (arg1, 1), arg0);
10634
10635 /* If this is an EQ or NE comparison with zero and ARG0 is
10636 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10637 two operations, but the latter can be done in one less insn
10638 on machines that have only two-operand insns or on which a
10639 constant cannot be the first operand. */
10640 if (TREE_CODE (arg0) == BIT_AND_EXPR
10641 && integer_zerop (arg1))
10642 {
10643 tree arg00 = TREE_OPERAND (arg0, 0);
10644 tree arg01 = TREE_OPERAND (arg0, 1);
10645 if (TREE_CODE (arg00) == LSHIFT_EXPR
10646 && integer_onep (TREE_OPERAND (arg00, 0)))
10647 {
10648 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10649 arg01, TREE_OPERAND (arg00, 1));
10650 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10651 build_int_cst (TREE_TYPE (arg0), 1));
10652 return fold_build2_loc (loc, code, type,
10653 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10654 arg1);
10655 }
10656 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10657 && integer_onep (TREE_OPERAND (arg01, 0)))
10658 {
10659 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10660 arg00, TREE_OPERAND (arg01, 1));
10661 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10662 build_int_cst (TREE_TYPE (arg0), 1));
10663 return fold_build2_loc (loc, code, type,
10664 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10665 arg1);
10666 }
10667 }
10668
10669 /* If this is an NE or EQ comparison of zero against the result of a
10670 signed MOD operation whose second operand is a power of 2, make
10671 the MOD operation unsigned since it is simpler and equivalent. */
10672 if (integer_zerop (arg1)
10673 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10674 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10675 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10676 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10677 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10678 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10679 {
10680 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10681 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10682 fold_convert_loc (loc, newtype,
10683 TREE_OPERAND (arg0, 0)),
10684 fold_convert_loc (loc, newtype,
10685 TREE_OPERAND (arg0, 1)));
10686
10687 return fold_build2_loc (loc, code, type, newmod,
10688 fold_convert_loc (loc, newtype, arg1));
10689 }
10690
10691 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10692 C1 is a valid shift constant, and C2 is a power of two, i.e.
10693 a single bit. */
10694 if (TREE_CODE (arg0) == BIT_AND_EXPR
10695 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10696 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10697 == INTEGER_CST
10698 && integer_pow2p (TREE_OPERAND (arg0, 1))
10699 && integer_zerop (arg1))
10700 {
10701 tree itype = TREE_TYPE (arg0);
10702 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10703 prec = TYPE_PRECISION (itype);
10704
10705 /* Check for a valid shift count. */
10706 if (wi::ltu_p (arg001, prec))
10707 {
10708 tree arg01 = TREE_OPERAND (arg0, 1);
10709 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10710 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10711 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10712 can be rewritten as (X & (C2 << C1)) != 0. */
10713 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10714 {
10715 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10716 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10717 return fold_build2_loc (loc, code, type, tem,
10718 fold_convert_loc (loc, itype, arg1));
10719 }
10720 /* Otherwise, for signed (arithmetic) shifts,
10721 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10722 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10723 else if (!TYPE_UNSIGNED (itype))
10724 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10725 arg000, build_int_cst (itype, 0));
10726 /* Otherwise, of unsigned (logical) shifts,
10727 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10728 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10729 else
10730 return omit_one_operand_loc (loc, type,
10731 code == EQ_EXPR ? integer_one_node
10732 : integer_zero_node,
10733 arg000);
10734 }
10735 }
10736
10737 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10738 Similarly for NE_EXPR. */
10739 if (TREE_CODE (arg0) == BIT_AND_EXPR
10740 && TREE_CODE (arg1) == INTEGER_CST
10741 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10742 {
10743 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10744 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10745 TREE_OPERAND (arg0, 1));
10746 tree dandnotc
10747 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10748 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10749 notc);
10750 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10751 if (integer_nonzerop (dandnotc))
10752 return omit_one_operand_loc (loc, type, rslt, arg0);
10753 }
10754
10755 /* If this is a comparison of a field, we may be able to simplify it. */
10756 if ((TREE_CODE (arg0) == COMPONENT_REF
10757 || TREE_CODE (arg0) == BIT_FIELD_REF)
10758 /* Handle the constant case even without -O
10759 to make sure the warnings are given. */
10760 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10761 {
10762 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10763 if (t1)
10764 return t1;
10765 }
10766
10767 /* Optimize comparisons of strlen vs zero to a compare of the
10768 first character of the string vs zero. To wit,
10769 strlen(ptr) == 0 => *ptr == 0
10770 strlen(ptr) != 0 => *ptr != 0
10771 Other cases should reduce to one of these two (or a constant)
10772 due to the return value of strlen being unsigned. */
10773 if (TREE_CODE (arg0) == CALL_EXPR
10774 && integer_zerop (arg1))
10775 {
10776 tree fndecl = get_callee_fndecl (arg0);
10777
10778 if (fndecl
10779 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10780 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10781 && call_expr_nargs (arg0) == 1
10782 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10783 {
10784 tree iref = build_fold_indirect_ref_loc (loc,
10785 CALL_EXPR_ARG (arg0, 0));
10786 return fold_build2_loc (loc, code, type, iref,
10787 build_int_cst (TREE_TYPE (iref), 0));
10788 }
10789 }
10790
10791 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10792 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10793 if (TREE_CODE (arg0) == RSHIFT_EXPR
10794 && integer_zerop (arg1)
10795 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10796 {
10797 tree arg00 = TREE_OPERAND (arg0, 0);
10798 tree arg01 = TREE_OPERAND (arg0, 1);
10799 tree itype = TREE_TYPE (arg00);
10800 if (wi::eq_p (arg01, element_precision (itype) - 1))
10801 {
10802 if (TYPE_UNSIGNED (itype))
10803 {
10804 itype = signed_type_for (itype);
10805 arg00 = fold_convert_loc (loc, itype, arg00);
10806 }
10807 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10808 type, arg00, build_zero_cst (itype));
10809 }
10810 }
10811
10812 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10813 (X & C) == 0 when C is a single bit. */
10814 if (TREE_CODE (arg0) == BIT_AND_EXPR
10815 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10816 && integer_zerop (arg1)
10817 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10818 {
10819 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10820 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10821 TREE_OPERAND (arg0, 1));
10822 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10823 type, tem,
10824 fold_convert_loc (loc, TREE_TYPE (arg0),
10825 arg1));
10826 }
10827
10828 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10829 constant C is a power of two, i.e. a single bit. */
10830 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10831 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10832 && integer_zerop (arg1)
10833 && integer_pow2p (TREE_OPERAND (arg0, 1))
10834 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10835 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10836 {
10837 tree arg00 = TREE_OPERAND (arg0, 0);
10838 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10839 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10840 }
10841
10842 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10843 when is C is a power of two, i.e. a single bit. */
10844 if (TREE_CODE (arg0) == BIT_AND_EXPR
10845 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10846 && integer_zerop (arg1)
10847 && integer_pow2p (TREE_OPERAND (arg0, 1))
10848 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10849 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10850 {
10851 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10852 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10853 arg000, TREE_OPERAND (arg0, 1));
10854 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10855 tem, build_int_cst (TREE_TYPE (tem), 0));
10856 }
10857
10858 if (integer_zerop (arg1)
10859 && tree_expr_nonzero_p (arg0))
10860 {
10861 tree res = constant_boolean_node (code==NE_EXPR, type);
10862 return omit_one_operand_loc (loc, type, res, arg0);
10863 }
10864
10865 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10866 if (TREE_CODE (arg0) == BIT_AND_EXPR
10867 && TREE_CODE (arg1) == BIT_AND_EXPR)
10868 {
10869 tree arg00 = TREE_OPERAND (arg0, 0);
10870 tree arg01 = TREE_OPERAND (arg0, 1);
10871 tree arg10 = TREE_OPERAND (arg1, 0);
10872 tree arg11 = TREE_OPERAND (arg1, 1);
10873 tree itype = TREE_TYPE (arg0);
10874
10875 if (operand_equal_p (arg01, arg11, 0))
10876 return fold_build2_loc (loc, code, type,
10877 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10878 fold_build2_loc (loc,
10879 BIT_XOR_EXPR, itype,
10880 arg00, arg10),
10881 arg01),
10882 build_zero_cst (itype));
10883
10884 if (operand_equal_p (arg01, arg10, 0))
10885 return fold_build2_loc (loc, code, type,
10886 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10887 fold_build2_loc (loc,
10888 BIT_XOR_EXPR, itype,
10889 arg00, arg11),
10890 arg01),
10891 build_zero_cst (itype));
10892
10893 if (operand_equal_p (arg00, arg11, 0))
10894 return fold_build2_loc (loc, code, type,
10895 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10896 fold_build2_loc (loc,
10897 BIT_XOR_EXPR, itype,
10898 arg01, arg10),
10899 arg00),
10900 build_zero_cst (itype));
10901
10902 if (operand_equal_p (arg00, arg10, 0))
10903 return fold_build2_loc (loc, code, type,
10904 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10905 fold_build2_loc (loc,
10906 BIT_XOR_EXPR, itype,
10907 arg01, arg11),
10908 arg00),
10909 build_zero_cst (itype));
10910 }
10911
10912 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10913 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10914 {
10915 tree arg00 = TREE_OPERAND (arg0, 0);
10916 tree arg01 = TREE_OPERAND (arg0, 1);
10917 tree arg10 = TREE_OPERAND (arg1, 0);
10918 tree arg11 = TREE_OPERAND (arg1, 1);
10919 tree itype = TREE_TYPE (arg0);
10920
10921 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10922 operand_equal_p guarantees no side-effects so we don't need
10923 to use omit_one_operand on Z. */
10924 if (operand_equal_p (arg01, arg11, 0))
10925 return fold_build2_loc (loc, code, type, arg00,
10926 fold_convert_loc (loc, TREE_TYPE (arg00),
10927 arg10));
10928 if (operand_equal_p (arg01, arg10, 0))
10929 return fold_build2_loc (loc, code, type, arg00,
10930 fold_convert_loc (loc, TREE_TYPE (arg00),
10931 arg11));
10932 if (operand_equal_p (arg00, arg11, 0))
10933 return fold_build2_loc (loc, code, type, arg01,
10934 fold_convert_loc (loc, TREE_TYPE (arg01),
10935 arg10));
10936 if (operand_equal_p (arg00, arg10, 0))
10937 return fold_build2_loc (loc, code, type, arg01,
10938 fold_convert_loc (loc, TREE_TYPE (arg01),
10939 arg11));
10940
10941 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10942 if (TREE_CODE (arg01) == INTEGER_CST
10943 && TREE_CODE (arg11) == INTEGER_CST)
10944 {
10945 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10946 fold_convert_loc (loc, itype, arg11));
10947 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10948 return fold_build2_loc (loc, code, type, tem,
10949 fold_convert_loc (loc, itype, arg10));
10950 }
10951 }
10952
10953 /* Attempt to simplify equality/inequality comparisons of complex
10954 values. Only lower the comparison if the result is known or
10955 can be simplified to a single scalar comparison. */
10956 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10957 || TREE_CODE (arg0) == COMPLEX_CST)
10958 && (TREE_CODE (arg1) == COMPLEX_EXPR
10959 || TREE_CODE (arg1) == COMPLEX_CST))
10960 {
10961 tree real0, imag0, real1, imag1;
10962 tree rcond, icond;
10963
10964 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10965 {
10966 real0 = TREE_OPERAND (arg0, 0);
10967 imag0 = TREE_OPERAND (arg0, 1);
10968 }
10969 else
10970 {
10971 real0 = TREE_REALPART (arg0);
10972 imag0 = TREE_IMAGPART (arg0);
10973 }
10974
10975 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10976 {
10977 real1 = TREE_OPERAND (arg1, 0);
10978 imag1 = TREE_OPERAND (arg1, 1);
10979 }
10980 else
10981 {
10982 real1 = TREE_REALPART (arg1);
10983 imag1 = TREE_IMAGPART (arg1);
10984 }
10985
10986 rcond = fold_binary_loc (loc, code, type, real0, real1);
10987 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10988 {
10989 if (integer_zerop (rcond))
10990 {
10991 if (code == EQ_EXPR)
10992 return omit_two_operands_loc (loc, type, boolean_false_node,
10993 imag0, imag1);
10994 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10995 }
10996 else
10997 {
10998 if (code == NE_EXPR)
10999 return omit_two_operands_loc (loc, type, boolean_true_node,
11000 imag0, imag1);
11001 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11002 }
11003 }
11004
11005 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11006 if (icond && TREE_CODE (icond) == INTEGER_CST)
11007 {
11008 if (integer_zerop (icond))
11009 {
11010 if (code == EQ_EXPR)
11011 return omit_two_operands_loc (loc, type, boolean_false_node,
11012 real0, real1);
11013 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11014 }
11015 else
11016 {
11017 if (code == NE_EXPR)
11018 return omit_two_operands_loc (loc, type, boolean_true_node,
11019 real0, real1);
11020 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11021 }
11022 }
11023 }
11024
11025 return NULL_TREE;
11026
11027 case LT_EXPR:
11028 case GT_EXPR:
11029 case LE_EXPR:
11030 case GE_EXPR:
11031 tem = fold_comparison (loc, code, type, op0, op1);
11032 if (tem != NULL_TREE)
11033 return tem;
11034
11035 /* Transform comparisons of the form X +- C CMP X. */
11036 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11037 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11038 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11039 && !HONOR_SNANS (arg0))
11040 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11041 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11042 {
11043 tree arg01 = TREE_OPERAND (arg0, 1);
11044 enum tree_code code0 = TREE_CODE (arg0);
11045 int is_positive;
11046
11047 if (TREE_CODE (arg01) == REAL_CST)
11048 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11049 else
11050 is_positive = tree_int_cst_sgn (arg01);
11051
11052 /* (X - c) > X becomes false. */
11053 if (code == GT_EXPR
11054 && ((code0 == MINUS_EXPR && is_positive >= 0)
11055 || (code0 == PLUS_EXPR && is_positive <= 0)))
11056 {
11057 if (TREE_CODE (arg01) == INTEGER_CST
11058 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11059 fold_overflow_warning (("assuming signed overflow does not "
11060 "occur when assuming that (X - c) > X "
11061 "is always false"),
11062 WARN_STRICT_OVERFLOW_ALL);
11063 return constant_boolean_node (0, type);
11064 }
11065
11066 /* Likewise (X + c) < X becomes false. */
11067 if (code == LT_EXPR
11068 && ((code0 == PLUS_EXPR && is_positive >= 0)
11069 || (code0 == MINUS_EXPR && is_positive <= 0)))
11070 {
11071 if (TREE_CODE (arg01) == INTEGER_CST
11072 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11073 fold_overflow_warning (("assuming signed overflow does not "
11074 "occur when assuming that "
11075 "(X + c) < X is always false"),
11076 WARN_STRICT_OVERFLOW_ALL);
11077 return constant_boolean_node (0, type);
11078 }
11079
11080 /* Convert (X - c) <= X to true. */
11081 if (!HONOR_NANS (arg1)
11082 && code == LE_EXPR
11083 && ((code0 == MINUS_EXPR && is_positive >= 0)
11084 || (code0 == PLUS_EXPR && is_positive <= 0)))
11085 {
11086 if (TREE_CODE (arg01) == INTEGER_CST
11087 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11088 fold_overflow_warning (("assuming signed overflow does not "
11089 "occur when assuming that "
11090 "(X - c) <= X is always true"),
11091 WARN_STRICT_OVERFLOW_ALL);
11092 return constant_boolean_node (1, type);
11093 }
11094
11095 /* Convert (X + c) >= X to true. */
11096 if (!HONOR_NANS (arg1)
11097 && code == GE_EXPR
11098 && ((code0 == PLUS_EXPR && is_positive >= 0)
11099 || (code0 == MINUS_EXPR && is_positive <= 0)))
11100 {
11101 if (TREE_CODE (arg01) == INTEGER_CST
11102 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11103 fold_overflow_warning (("assuming signed overflow does not "
11104 "occur when assuming that "
11105 "(X + c) >= X is always true"),
11106 WARN_STRICT_OVERFLOW_ALL);
11107 return constant_boolean_node (1, type);
11108 }
11109
11110 if (TREE_CODE (arg01) == INTEGER_CST)
11111 {
11112 /* Convert X + c > X and X - c < X to true for integers. */
11113 if (code == GT_EXPR
11114 && ((code0 == PLUS_EXPR && is_positive > 0)
11115 || (code0 == MINUS_EXPR && is_positive < 0)))
11116 {
11117 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11118 fold_overflow_warning (("assuming signed overflow does "
11119 "not occur when assuming that "
11120 "(X + c) > X is always true"),
11121 WARN_STRICT_OVERFLOW_ALL);
11122 return constant_boolean_node (1, type);
11123 }
11124
11125 if (code == LT_EXPR
11126 && ((code0 == MINUS_EXPR && is_positive > 0)
11127 || (code0 == PLUS_EXPR && is_positive < 0)))
11128 {
11129 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11130 fold_overflow_warning (("assuming signed overflow does "
11131 "not occur when assuming that "
11132 "(X - c) < X is always true"),
11133 WARN_STRICT_OVERFLOW_ALL);
11134 return constant_boolean_node (1, type);
11135 }
11136
11137 /* Convert X + c <= X and X - c >= X to false for integers. */
11138 if (code == LE_EXPR
11139 && ((code0 == PLUS_EXPR && is_positive > 0)
11140 || (code0 == MINUS_EXPR && is_positive < 0)))
11141 {
11142 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11143 fold_overflow_warning (("assuming signed overflow does "
11144 "not occur when assuming that "
11145 "(X + c) <= X is always false"),
11146 WARN_STRICT_OVERFLOW_ALL);
11147 return constant_boolean_node (0, type);
11148 }
11149
11150 if (code == GE_EXPR
11151 && ((code0 == MINUS_EXPR && is_positive > 0)
11152 || (code0 == PLUS_EXPR && is_positive < 0)))
11153 {
11154 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11155 fold_overflow_warning (("assuming signed overflow does "
11156 "not occur when assuming that "
11157 "(X - c) >= X is always false"),
11158 WARN_STRICT_OVERFLOW_ALL);
11159 return constant_boolean_node (0, type);
11160 }
11161 }
11162 }
11163
11164 /* If we are comparing an ABS_EXPR with a constant, we can
11165 convert all the cases into explicit comparisons, but they may
11166 well not be faster than doing the ABS and one comparison.
11167 But ABS (X) <= C is a range comparison, which becomes a subtraction
11168 and a comparison, and is probably faster. */
11169 if (code == LE_EXPR
11170 && TREE_CODE (arg1) == INTEGER_CST
11171 && TREE_CODE (arg0) == ABS_EXPR
11172 && ! TREE_SIDE_EFFECTS (arg0)
11173 && (0 != (tem = negate_expr (arg1)))
11174 && TREE_CODE (tem) == INTEGER_CST
11175 && !TREE_OVERFLOW (tem))
11176 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11177 build2 (GE_EXPR, type,
11178 TREE_OPERAND (arg0, 0), tem),
11179 build2 (LE_EXPR, type,
11180 TREE_OPERAND (arg0, 0), arg1));
11181
11182 /* Convert ABS_EXPR<x> >= 0 to true. */
11183 strict_overflow_p = false;
11184 if (code == GE_EXPR
11185 && (integer_zerop (arg1)
11186 || (! HONOR_NANS (arg0)
11187 && real_zerop (arg1)))
11188 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11189 {
11190 if (strict_overflow_p)
11191 fold_overflow_warning (("assuming signed overflow does not occur "
11192 "when simplifying comparison of "
11193 "absolute value and zero"),
11194 WARN_STRICT_OVERFLOW_CONDITIONAL);
11195 return omit_one_operand_loc (loc, type,
11196 constant_boolean_node (true, type),
11197 arg0);
11198 }
11199
11200 /* Convert ABS_EXPR<x> < 0 to false. */
11201 strict_overflow_p = false;
11202 if (code == LT_EXPR
11203 && (integer_zerop (arg1) || real_zerop (arg1))
11204 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11205 {
11206 if (strict_overflow_p)
11207 fold_overflow_warning (("assuming signed overflow does not occur "
11208 "when simplifying comparison of "
11209 "absolute value and zero"),
11210 WARN_STRICT_OVERFLOW_CONDITIONAL);
11211 return omit_one_operand_loc (loc, type,
11212 constant_boolean_node (false, type),
11213 arg0);
11214 }
11215
11216 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11217 and similarly for >= into !=. */
11218 if ((code == LT_EXPR || code == GE_EXPR)
11219 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11220 && TREE_CODE (arg1) == LSHIFT_EXPR
11221 && integer_onep (TREE_OPERAND (arg1, 0)))
11222 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11223 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11224 TREE_OPERAND (arg1, 1)),
11225 build_zero_cst (TREE_TYPE (arg0)));
11226
11227 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11228 otherwise Y might be >= # of bits in X's type and thus e.g.
11229 (unsigned char) (1 << Y) for Y 15 might be 0.
11230 If the cast is widening, then 1 << Y should have unsigned type,
11231 otherwise if Y is number of bits in the signed shift type minus 1,
11232 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11233 31 might be 0xffffffff80000000. */
11234 if ((code == LT_EXPR || code == GE_EXPR)
11235 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11236 && CONVERT_EXPR_P (arg1)
11237 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11238 && (element_precision (TREE_TYPE (arg1))
11239 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11240 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11241 || (element_precision (TREE_TYPE (arg1))
11242 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11243 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11244 {
11245 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11246 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11247 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11248 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11249 build_zero_cst (TREE_TYPE (arg0)));
11250 }
11251
11252 return NULL_TREE;
11253
11254 case UNORDERED_EXPR:
11255 case ORDERED_EXPR:
11256 case UNLT_EXPR:
11257 case UNLE_EXPR:
11258 case UNGT_EXPR:
11259 case UNGE_EXPR:
11260 case UNEQ_EXPR:
11261 case LTGT_EXPR:
11262 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11263 {
11264 tree targ0 = strip_float_extensions (arg0);
11265 tree targ1 = strip_float_extensions (arg1);
11266 tree newtype = TREE_TYPE (targ0);
11267
11268 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11269 newtype = TREE_TYPE (targ1);
11270
11271 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11272 return fold_build2_loc (loc, code, type,
11273 fold_convert_loc (loc, newtype, targ0),
11274 fold_convert_loc (loc, newtype, targ1));
11275 }
11276
11277 return NULL_TREE;
11278
11279 case COMPOUND_EXPR:
11280 /* When pedantic, a compound expression can be neither an lvalue
11281 nor an integer constant expression. */
11282 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11283 return NULL_TREE;
11284 /* Don't let (0, 0) be null pointer constant. */
11285 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11286 : fold_convert_loc (loc, type, arg1);
11287 return pedantic_non_lvalue_loc (loc, tem);
11288
11289 case ASSERT_EXPR:
11290 /* An ASSERT_EXPR should never be passed to fold_binary. */
11291 gcc_unreachable ();
11292
11293 default:
11294 return NULL_TREE;
11295 } /* switch (code) */
11296 }
11297
11298 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11299 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11300 of GOTO_EXPR. */
11301
11302 static tree
11303 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11304 {
11305 switch (TREE_CODE (*tp))
11306 {
11307 case LABEL_EXPR:
11308 return *tp;
11309
11310 case GOTO_EXPR:
11311 *walk_subtrees = 0;
11312
11313 /* ... fall through ... */
11314
11315 default:
11316 return NULL_TREE;
11317 }
11318 }
11319
11320 /* Return whether the sub-tree ST contains a label which is accessible from
11321 outside the sub-tree. */
11322
11323 static bool
11324 contains_label_p (tree st)
11325 {
11326 return
11327 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11328 }
11329
11330 /* Fold a ternary expression of code CODE and type TYPE with operands
11331 OP0, OP1, and OP2. Return the folded expression if folding is
11332 successful. Otherwise, return NULL_TREE. */
11333
11334 tree
11335 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11336 tree op0, tree op1, tree op2)
11337 {
11338 tree tem;
11339 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11340 enum tree_code_class kind = TREE_CODE_CLASS (code);
11341
11342 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11343 && TREE_CODE_LENGTH (code) == 3);
11344
11345 /* If this is a commutative operation, and OP0 is a constant, move it
11346 to OP1 to reduce the number of tests below. */
11347 if (commutative_ternary_tree_code (code)
11348 && tree_swap_operands_p (op0, op1, true))
11349 return fold_build3_loc (loc, code, type, op1, op0, op2);
11350
11351 tem = generic_simplify (loc, code, type, op0, op1, op2);
11352 if (tem)
11353 return tem;
11354
11355 /* Strip any conversions that don't change the mode. This is safe
11356 for every expression, except for a comparison expression because
11357 its signedness is derived from its operands. So, in the latter
11358 case, only strip conversions that don't change the signedness.
11359
11360 Note that this is done as an internal manipulation within the
11361 constant folder, in order to find the simplest representation of
11362 the arguments so that their form can be studied. In any cases,
11363 the appropriate type conversions should be put back in the tree
11364 that will get out of the constant folder. */
11365 if (op0)
11366 {
11367 arg0 = op0;
11368 STRIP_NOPS (arg0);
11369 }
11370
11371 if (op1)
11372 {
11373 arg1 = op1;
11374 STRIP_NOPS (arg1);
11375 }
11376
11377 if (op2)
11378 {
11379 arg2 = op2;
11380 STRIP_NOPS (arg2);
11381 }
11382
11383 switch (code)
11384 {
11385 case COMPONENT_REF:
11386 if (TREE_CODE (arg0) == CONSTRUCTOR
11387 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11388 {
11389 unsigned HOST_WIDE_INT idx;
11390 tree field, value;
11391 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11392 if (field == arg1)
11393 return value;
11394 }
11395 return NULL_TREE;
11396
11397 case COND_EXPR:
11398 case VEC_COND_EXPR:
11399 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11400 so all simple results must be passed through pedantic_non_lvalue. */
11401 if (TREE_CODE (arg0) == INTEGER_CST)
11402 {
11403 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11404 tem = integer_zerop (arg0) ? op2 : op1;
11405 /* Only optimize constant conditions when the selected branch
11406 has the same type as the COND_EXPR. This avoids optimizing
11407 away "c ? x : throw", where the throw has a void type.
11408 Avoid throwing away that operand which contains label. */
11409 if ((!TREE_SIDE_EFFECTS (unused_op)
11410 || !contains_label_p (unused_op))
11411 && (! VOID_TYPE_P (TREE_TYPE (tem))
11412 || VOID_TYPE_P (type)))
11413 return pedantic_non_lvalue_loc (loc, tem);
11414 return NULL_TREE;
11415 }
11416 else if (TREE_CODE (arg0) == VECTOR_CST)
11417 {
11418 if ((TREE_CODE (arg1) == VECTOR_CST
11419 || TREE_CODE (arg1) == CONSTRUCTOR)
11420 && (TREE_CODE (arg2) == VECTOR_CST
11421 || TREE_CODE (arg2) == CONSTRUCTOR))
11422 {
11423 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11424 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11425 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11426 for (i = 0; i < nelts; i++)
11427 {
11428 tree val = VECTOR_CST_ELT (arg0, i);
11429 if (integer_all_onesp (val))
11430 sel[i] = i;
11431 else if (integer_zerop (val))
11432 sel[i] = nelts + i;
11433 else /* Currently unreachable. */
11434 return NULL_TREE;
11435 }
11436 tree t = fold_vec_perm (type, arg1, arg2, sel);
11437 if (t != NULL_TREE)
11438 return t;
11439 }
11440 }
11441
11442 /* If we have A op B ? A : C, we may be able to convert this to a
11443 simpler expression, depending on the operation and the values
11444 of B and C. Signed zeros prevent all of these transformations,
11445 for reasons given above each one.
11446
11447 Also try swapping the arguments and inverting the conditional. */
11448 if (COMPARISON_CLASS_P (arg0)
11449 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11450 arg1, TREE_OPERAND (arg0, 1))
11451 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11452 {
11453 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11454 if (tem)
11455 return tem;
11456 }
11457
11458 if (COMPARISON_CLASS_P (arg0)
11459 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11460 op2,
11461 TREE_OPERAND (arg0, 1))
11462 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11463 {
11464 location_t loc0 = expr_location_or (arg0, loc);
11465 tem = fold_invert_truthvalue (loc0, arg0);
11466 if (tem && COMPARISON_CLASS_P (tem))
11467 {
11468 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11469 if (tem)
11470 return tem;
11471 }
11472 }
11473
11474 /* If the second operand is simpler than the third, swap them
11475 since that produces better jump optimization results. */
11476 if (truth_value_p (TREE_CODE (arg0))
11477 && tree_swap_operands_p (op1, op2, false))
11478 {
11479 location_t loc0 = expr_location_or (arg0, loc);
11480 /* See if this can be inverted. If it can't, possibly because
11481 it was a floating-point inequality comparison, don't do
11482 anything. */
11483 tem = fold_invert_truthvalue (loc0, arg0);
11484 if (tem)
11485 return fold_build3_loc (loc, code, type, tem, op2, op1);
11486 }
11487
11488 /* Convert A ? 1 : 0 to simply A. */
11489 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11490 : (integer_onep (op1)
11491 && !VECTOR_TYPE_P (type)))
11492 && integer_zerop (op2)
11493 /* If we try to convert OP0 to our type, the
11494 call to fold will try to move the conversion inside
11495 a COND, which will recurse. In that case, the COND_EXPR
11496 is probably the best choice, so leave it alone. */
11497 && type == TREE_TYPE (arg0))
11498 return pedantic_non_lvalue_loc (loc, arg0);
11499
11500 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11501 over COND_EXPR in cases such as floating point comparisons. */
11502 if (integer_zerop (op1)
11503 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11504 : (integer_onep (op2)
11505 && !VECTOR_TYPE_P (type)))
11506 && truth_value_p (TREE_CODE (arg0)))
11507 return pedantic_non_lvalue_loc (loc,
11508 fold_convert_loc (loc, type,
11509 invert_truthvalue_loc (loc,
11510 arg0)));
11511
11512 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11513 if (TREE_CODE (arg0) == LT_EXPR
11514 && integer_zerop (TREE_OPERAND (arg0, 1))
11515 && integer_zerop (op2)
11516 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11517 {
11518 /* sign_bit_p looks through both zero and sign extensions,
11519 but for this optimization only sign extensions are
11520 usable. */
11521 tree tem2 = TREE_OPERAND (arg0, 0);
11522 while (tem != tem2)
11523 {
11524 if (TREE_CODE (tem2) != NOP_EXPR
11525 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11526 {
11527 tem = NULL_TREE;
11528 break;
11529 }
11530 tem2 = TREE_OPERAND (tem2, 0);
11531 }
11532 /* sign_bit_p only checks ARG1 bits within A's precision.
11533 If <sign bit of A> has wider type than A, bits outside
11534 of A's precision in <sign bit of A> need to be checked.
11535 If they are all 0, this optimization needs to be done
11536 in unsigned A's type, if they are all 1 in signed A's type,
11537 otherwise this can't be done. */
11538 if (tem
11539 && TYPE_PRECISION (TREE_TYPE (tem))
11540 < TYPE_PRECISION (TREE_TYPE (arg1))
11541 && TYPE_PRECISION (TREE_TYPE (tem))
11542 < TYPE_PRECISION (type))
11543 {
11544 int inner_width, outer_width;
11545 tree tem_type;
11546
11547 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11548 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11549 if (outer_width > TYPE_PRECISION (type))
11550 outer_width = TYPE_PRECISION (type);
11551
11552 wide_int mask = wi::shifted_mask
11553 (inner_width, outer_width - inner_width, false,
11554 TYPE_PRECISION (TREE_TYPE (arg1)));
11555
11556 wide_int common = mask & arg1;
11557 if (common == mask)
11558 {
11559 tem_type = signed_type_for (TREE_TYPE (tem));
11560 tem = fold_convert_loc (loc, tem_type, tem);
11561 }
11562 else if (common == 0)
11563 {
11564 tem_type = unsigned_type_for (TREE_TYPE (tem));
11565 tem = fold_convert_loc (loc, tem_type, tem);
11566 }
11567 else
11568 tem = NULL;
11569 }
11570
11571 if (tem)
11572 return
11573 fold_convert_loc (loc, type,
11574 fold_build2_loc (loc, BIT_AND_EXPR,
11575 TREE_TYPE (tem), tem,
11576 fold_convert_loc (loc,
11577 TREE_TYPE (tem),
11578 arg1)));
11579 }
11580
11581 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11582 already handled above. */
11583 if (TREE_CODE (arg0) == BIT_AND_EXPR
11584 && integer_onep (TREE_OPERAND (arg0, 1))
11585 && integer_zerop (op2)
11586 && integer_pow2p (arg1))
11587 {
11588 tree tem = TREE_OPERAND (arg0, 0);
11589 STRIP_NOPS (tem);
11590 if (TREE_CODE (tem) == RSHIFT_EXPR
11591 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11592 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11593 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11594 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11595 TREE_OPERAND (tem, 0), arg1);
11596 }
11597
11598 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11599 is probably obsolete because the first operand should be a
11600 truth value (that's why we have the two cases above), but let's
11601 leave it in until we can confirm this for all front-ends. */
11602 if (integer_zerop (op2)
11603 && TREE_CODE (arg0) == NE_EXPR
11604 && integer_zerop (TREE_OPERAND (arg0, 1))
11605 && integer_pow2p (arg1)
11606 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11607 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11608 arg1, OEP_ONLY_CONST))
11609 return pedantic_non_lvalue_loc (loc,
11610 fold_convert_loc (loc, type,
11611 TREE_OPERAND (arg0, 0)));
11612
11613 /* Disable the transformations below for vectors, since
11614 fold_binary_op_with_conditional_arg may undo them immediately,
11615 yielding an infinite loop. */
11616 if (code == VEC_COND_EXPR)
11617 return NULL_TREE;
11618
11619 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11620 if (integer_zerop (op2)
11621 && truth_value_p (TREE_CODE (arg0))
11622 && truth_value_p (TREE_CODE (arg1))
11623 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11624 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11625 : TRUTH_ANDIF_EXPR,
11626 type, fold_convert_loc (loc, type, arg0), arg1);
11627
11628 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11629 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11630 && truth_value_p (TREE_CODE (arg0))
11631 && truth_value_p (TREE_CODE (arg1))
11632 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11633 {
11634 location_t loc0 = expr_location_or (arg0, loc);
11635 /* Only perform transformation if ARG0 is easily inverted. */
11636 tem = fold_invert_truthvalue (loc0, arg0);
11637 if (tem)
11638 return fold_build2_loc (loc, code == VEC_COND_EXPR
11639 ? BIT_IOR_EXPR
11640 : TRUTH_ORIF_EXPR,
11641 type, fold_convert_loc (loc, type, tem),
11642 arg1);
11643 }
11644
11645 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11646 if (integer_zerop (arg1)
11647 && truth_value_p (TREE_CODE (arg0))
11648 && truth_value_p (TREE_CODE (op2))
11649 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11650 {
11651 location_t loc0 = expr_location_or (arg0, loc);
11652 /* Only perform transformation if ARG0 is easily inverted. */
11653 tem = fold_invert_truthvalue (loc0, arg0);
11654 if (tem)
11655 return fold_build2_loc (loc, code == VEC_COND_EXPR
11656 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11657 type, fold_convert_loc (loc, type, tem),
11658 op2);
11659 }
11660
11661 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11662 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11663 && truth_value_p (TREE_CODE (arg0))
11664 && truth_value_p (TREE_CODE (op2))
11665 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11666 return fold_build2_loc (loc, code == VEC_COND_EXPR
11667 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11668 type, fold_convert_loc (loc, type, arg0), op2);
11669
11670 return NULL_TREE;
11671
11672 case CALL_EXPR:
11673 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11674 of fold_ternary on them. */
11675 gcc_unreachable ();
11676
11677 case BIT_FIELD_REF:
11678 if ((TREE_CODE (arg0) == VECTOR_CST
11679 || (TREE_CODE (arg0) == CONSTRUCTOR
11680 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11681 && (type == TREE_TYPE (TREE_TYPE (arg0))
11682 || (TREE_CODE (type) == VECTOR_TYPE
11683 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11684 {
11685 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11686 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11687 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11688 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11689
11690 if (n != 0
11691 && (idx % width) == 0
11692 && (n % width) == 0
11693 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11694 {
11695 idx = idx / width;
11696 n = n / width;
11697
11698 if (TREE_CODE (arg0) == VECTOR_CST)
11699 {
11700 if (n == 1)
11701 return VECTOR_CST_ELT (arg0, idx);
11702
11703 tree *vals = XALLOCAVEC (tree, n);
11704 for (unsigned i = 0; i < n; ++i)
11705 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11706 return build_vector (type, vals);
11707 }
11708
11709 /* Constructor elements can be subvectors. */
11710 unsigned HOST_WIDE_INT k = 1;
11711 if (CONSTRUCTOR_NELTS (arg0) != 0)
11712 {
11713 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11714 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11715 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11716 }
11717
11718 /* We keep an exact subset of the constructor elements. */
11719 if ((idx % k) == 0 && (n % k) == 0)
11720 {
11721 if (CONSTRUCTOR_NELTS (arg0) == 0)
11722 return build_constructor (type, NULL);
11723 idx /= k;
11724 n /= k;
11725 if (n == 1)
11726 {
11727 if (idx < CONSTRUCTOR_NELTS (arg0))
11728 return CONSTRUCTOR_ELT (arg0, idx)->value;
11729 return build_zero_cst (type);
11730 }
11731
11732 vec<constructor_elt, va_gc> *vals;
11733 vec_alloc (vals, n);
11734 for (unsigned i = 0;
11735 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11736 ++i)
11737 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11738 CONSTRUCTOR_ELT
11739 (arg0, idx + i)->value);
11740 return build_constructor (type, vals);
11741 }
11742 /* The bitfield references a single constructor element. */
11743 else if (idx + n <= (idx / k + 1) * k)
11744 {
11745 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11746 return build_zero_cst (type);
11747 else if (n == k)
11748 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11749 else
11750 return fold_build3_loc (loc, code, type,
11751 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11752 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11753 }
11754 }
11755 }
11756
11757 /* A bit-field-ref that referenced the full argument can be stripped. */
11758 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11759 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11760 && integer_zerop (op2))
11761 return fold_convert_loc (loc, type, arg0);
11762
11763 /* On constants we can use native encode/interpret to constant
11764 fold (nearly) all BIT_FIELD_REFs. */
11765 if (CONSTANT_CLASS_P (arg0)
11766 && can_native_interpret_type_p (type)
11767 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11768 /* This limitation should not be necessary, we just need to
11769 round this up to mode size. */
11770 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11771 /* Need bit-shifting of the buffer to relax the following. */
11772 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11773 {
11774 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11775 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11776 unsigned HOST_WIDE_INT clen;
11777 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11778 /* ??? We cannot tell native_encode_expr to start at
11779 some random byte only. So limit us to a reasonable amount
11780 of work. */
11781 if (clen <= 4096)
11782 {
11783 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11784 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11785 if (len > 0
11786 && len * BITS_PER_UNIT >= bitpos + bitsize)
11787 {
11788 tree v = native_interpret_expr (type,
11789 b + bitpos / BITS_PER_UNIT,
11790 bitsize / BITS_PER_UNIT);
11791 if (v)
11792 return v;
11793 }
11794 }
11795 }
11796
11797 return NULL_TREE;
11798
11799 case FMA_EXPR:
11800 /* For integers we can decompose the FMA if possible. */
11801 if (TREE_CODE (arg0) == INTEGER_CST
11802 && TREE_CODE (arg1) == INTEGER_CST)
11803 return fold_build2_loc (loc, PLUS_EXPR, type,
11804 const_binop (MULT_EXPR, arg0, arg1), arg2);
11805 if (integer_zerop (arg2))
11806 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11807
11808 return fold_fma (loc, type, arg0, arg1, arg2);
11809
11810 case VEC_PERM_EXPR:
11811 if (TREE_CODE (arg2) == VECTOR_CST)
11812 {
11813 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11814 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11815 unsigned char *sel2 = sel + nelts;
11816 bool need_mask_canon = false;
11817 bool need_mask_canon2 = false;
11818 bool all_in_vec0 = true;
11819 bool all_in_vec1 = true;
11820 bool maybe_identity = true;
11821 bool single_arg = (op0 == op1);
11822 bool changed = false;
11823
11824 mask2 = 2 * nelts - 1;
11825 mask = single_arg ? (nelts - 1) : mask2;
11826 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11827 for (i = 0; i < nelts; i++)
11828 {
11829 tree val = VECTOR_CST_ELT (arg2, i);
11830 if (TREE_CODE (val) != INTEGER_CST)
11831 return NULL_TREE;
11832
11833 /* Make sure that the perm value is in an acceptable
11834 range. */
11835 wide_int t = val;
11836 need_mask_canon |= wi::gtu_p (t, mask);
11837 need_mask_canon2 |= wi::gtu_p (t, mask2);
11838 sel[i] = t.to_uhwi () & mask;
11839 sel2[i] = t.to_uhwi () & mask2;
11840
11841 if (sel[i] < nelts)
11842 all_in_vec1 = false;
11843 else
11844 all_in_vec0 = false;
11845
11846 if ((sel[i] & (nelts-1)) != i)
11847 maybe_identity = false;
11848 }
11849
11850 if (maybe_identity)
11851 {
11852 if (all_in_vec0)
11853 return op0;
11854 if (all_in_vec1)
11855 return op1;
11856 }
11857
11858 if (all_in_vec0)
11859 op1 = op0;
11860 else if (all_in_vec1)
11861 {
11862 op0 = op1;
11863 for (i = 0; i < nelts; i++)
11864 sel[i] -= nelts;
11865 need_mask_canon = true;
11866 }
11867
11868 if ((TREE_CODE (op0) == VECTOR_CST
11869 || TREE_CODE (op0) == CONSTRUCTOR)
11870 && (TREE_CODE (op1) == VECTOR_CST
11871 || TREE_CODE (op1) == CONSTRUCTOR))
11872 {
11873 tree t = fold_vec_perm (type, op0, op1, sel);
11874 if (t != NULL_TREE)
11875 return t;
11876 }
11877
11878 if (op0 == op1 && !single_arg)
11879 changed = true;
11880
11881 /* Some targets are deficient and fail to expand a single
11882 argument permutation while still allowing an equivalent
11883 2-argument version. */
11884 if (need_mask_canon && arg2 == op2
11885 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11886 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11887 {
11888 need_mask_canon = need_mask_canon2;
11889 sel = sel2;
11890 }
11891
11892 if (need_mask_canon && arg2 == op2)
11893 {
11894 tree *tsel = XALLOCAVEC (tree, nelts);
11895 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11896 for (i = 0; i < nelts; i++)
11897 tsel[i] = build_int_cst (eltype, sel[i]);
11898 op2 = build_vector (TREE_TYPE (arg2), tsel);
11899 changed = true;
11900 }
11901
11902 if (changed)
11903 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11904 }
11905 return NULL_TREE;
11906
11907 default:
11908 return NULL_TREE;
11909 } /* switch (code) */
11910 }
11911
11912 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11913 of an array (or vector). */
11914
11915 tree
11916 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11917 {
11918 tree index_type = NULL_TREE;
11919 offset_int low_bound = 0;
11920
11921 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11922 {
11923 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11924 if (domain_type && TYPE_MIN_VALUE (domain_type))
11925 {
11926 /* Static constructors for variably sized objects makes no sense. */
11927 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11928 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11929 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11930 }
11931 }
11932
11933 if (index_type)
11934 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11935 TYPE_SIGN (index_type));
11936
11937 offset_int index = low_bound - 1;
11938 if (index_type)
11939 index = wi::ext (index, TYPE_PRECISION (index_type),
11940 TYPE_SIGN (index_type));
11941
11942 offset_int max_index;
11943 unsigned HOST_WIDE_INT cnt;
11944 tree cfield, cval;
11945
11946 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11947 {
11948 /* Array constructor might explicitly set index, or specify a range,
11949 or leave index NULL meaning that it is next index after previous
11950 one. */
11951 if (cfield)
11952 {
11953 if (TREE_CODE (cfield) == INTEGER_CST)
11954 max_index = index = wi::to_offset (cfield);
11955 else
11956 {
11957 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11958 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11959 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11960 }
11961 }
11962 else
11963 {
11964 index += 1;
11965 if (index_type)
11966 index = wi::ext (index, TYPE_PRECISION (index_type),
11967 TYPE_SIGN (index_type));
11968 max_index = index;
11969 }
11970
11971 /* Do we have match? */
11972 if (wi::cmpu (access_index, index) >= 0
11973 && wi::cmpu (access_index, max_index) <= 0)
11974 return cval;
11975 }
11976 return NULL_TREE;
11977 }
11978
11979 /* Perform constant folding and related simplification of EXPR.
11980 The related simplifications include x*1 => x, x*0 => 0, etc.,
11981 and application of the associative law.
11982 NOP_EXPR conversions may be removed freely (as long as we
11983 are careful not to change the type of the overall expression).
11984 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11985 but we can constant-fold them if they have constant operands. */
11986
11987 #ifdef ENABLE_FOLD_CHECKING
11988 # define fold(x) fold_1 (x)
11989 static tree fold_1 (tree);
11990 static
11991 #endif
11992 tree
11993 fold (tree expr)
11994 {
11995 const tree t = expr;
11996 enum tree_code code = TREE_CODE (t);
11997 enum tree_code_class kind = TREE_CODE_CLASS (code);
11998 tree tem;
11999 location_t loc = EXPR_LOCATION (expr);
12000
12001 /* Return right away if a constant. */
12002 if (kind == tcc_constant)
12003 return t;
12004
12005 /* CALL_EXPR-like objects with variable numbers of operands are
12006 treated specially. */
12007 if (kind == tcc_vl_exp)
12008 {
12009 if (code == CALL_EXPR)
12010 {
12011 tem = fold_call_expr (loc, expr, false);
12012 return tem ? tem : expr;
12013 }
12014 return expr;
12015 }
12016
12017 if (IS_EXPR_CODE_CLASS (kind))
12018 {
12019 tree type = TREE_TYPE (t);
12020 tree op0, op1, op2;
12021
12022 switch (TREE_CODE_LENGTH (code))
12023 {
12024 case 1:
12025 op0 = TREE_OPERAND (t, 0);
12026 tem = fold_unary_loc (loc, code, type, op0);
12027 return tem ? tem : expr;
12028 case 2:
12029 op0 = TREE_OPERAND (t, 0);
12030 op1 = TREE_OPERAND (t, 1);
12031 tem = fold_binary_loc (loc, code, type, op0, op1);
12032 return tem ? tem : expr;
12033 case 3:
12034 op0 = TREE_OPERAND (t, 0);
12035 op1 = TREE_OPERAND (t, 1);
12036 op2 = TREE_OPERAND (t, 2);
12037 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12038 return tem ? tem : expr;
12039 default:
12040 break;
12041 }
12042 }
12043
12044 switch (code)
12045 {
12046 case ARRAY_REF:
12047 {
12048 tree op0 = TREE_OPERAND (t, 0);
12049 tree op1 = TREE_OPERAND (t, 1);
12050
12051 if (TREE_CODE (op1) == INTEGER_CST
12052 && TREE_CODE (op0) == CONSTRUCTOR
12053 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12054 {
12055 tree val = get_array_ctor_element_at_index (op0,
12056 wi::to_offset (op1));
12057 if (val)
12058 return val;
12059 }
12060
12061 return t;
12062 }
12063
12064 /* Return a VECTOR_CST if possible. */
12065 case CONSTRUCTOR:
12066 {
12067 tree type = TREE_TYPE (t);
12068 if (TREE_CODE (type) != VECTOR_TYPE)
12069 return t;
12070
12071 unsigned i;
12072 tree val;
12073 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12074 if (! CONSTANT_CLASS_P (val))
12075 return t;
12076
12077 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12078 }
12079
12080 case CONST_DECL:
12081 return fold (DECL_INITIAL (t));
12082
12083 default:
12084 return t;
12085 } /* switch (code) */
12086 }
12087
12088 #ifdef ENABLE_FOLD_CHECKING
12089 #undef fold
12090
12091 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12092 hash_table<nofree_ptr_hash<const tree_node> > *);
12093 static void fold_check_failed (const_tree, const_tree);
12094 void print_fold_checksum (const_tree);
12095
12096 /* When --enable-checking=fold, compute a digest of expr before
12097 and after actual fold call to see if fold did not accidentally
12098 change original expr. */
12099
12100 tree
12101 fold (tree expr)
12102 {
12103 tree ret;
12104 struct md5_ctx ctx;
12105 unsigned char checksum_before[16], checksum_after[16];
12106 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12107
12108 md5_init_ctx (&ctx);
12109 fold_checksum_tree (expr, &ctx, &ht);
12110 md5_finish_ctx (&ctx, checksum_before);
12111 ht.empty ();
12112
12113 ret = fold_1 (expr);
12114
12115 md5_init_ctx (&ctx);
12116 fold_checksum_tree (expr, &ctx, &ht);
12117 md5_finish_ctx (&ctx, checksum_after);
12118
12119 if (memcmp (checksum_before, checksum_after, 16))
12120 fold_check_failed (expr, ret);
12121
12122 return ret;
12123 }
12124
12125 void
12126 print_fold_checksum (const_tree expr)
12127 {
12128 struct md5_ctx ctx;
12129 unsigned char checksum[16], cnt;
12130 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12131
12132 md5_init_ctx (&ctx);
12133 fold_checksum_tree (expr, &ctx, &ht);
12134 md5_finish_ctx (&ctx, checksum);
12135 for (cnt = 0; cnt < 16; ++cnt)
12136 fprintf (stderr, "%02x", checksum[cnt]);
12137 putc ('\n', stderr);
12138 }
12139
12140 static void
12141 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12142 {
12143 internal_error ("fold check: original tree changed by fold");
12144 }
12145
12146 static void
12147 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12148 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12149 {
12150 const tree_node **slot;
12151 enum tree_code code;
12152 union tree_node buf;
12153 int i, len;
12154
12155 recursive_label:
12156 if (expr == NULL)
12157 return;
12158 slot = ht->find_slot (expr, INSERT);
12159 if (*slot != NULL)
12160 return;
12161 *slot = expr;
12162 code = TREE_CODE (expr);
12163 if (TREE_CODE_CLASS (code) == tcc_declaration
12164 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12165 {
12166 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12167 memcpy ((char *) &buf, expr, tree_size (expr));
12168 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12169 buf.decl_with_vis.symtab_node = NULL;
12170 expr = (tree) &buf;
12171 }
12172 else if (TREE_CODE_CLASS (code) == tcc_type
12173 && (TYPE_POINTER_TO (expr)
12174 || TYPE_REFERENCE_TO (expr)
12175 || TYPE_CACHED_VALUES_P (expr)
12176 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12177 || TYPE_NEXT_VARIANT (expr)))
12178 {
12179 /* Allow these fields to be modified. */
12180 tree tmp;
12181 memcpy ((char *) &buf, expr, tree_size (expr));
12182 expr = tmp = (tree) &buf;
12183 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12184 TYPE_POINTER_TO (tmp) = NULL;
12185 TYPE_REFERENCE_TO (tmp) = NULL;
12186 TYPE_NEXT_VARIANT (tmp) = NULL;
12187 if (TYPE_CACHED_VALUES_P (tmp))
12188 {
12189 TYPE_CACHED_VALUES_P (tmp) = 0;
12190 TYPE_CACHED_VALUES (tmp) = NULL;
12191 }
12192 }
12193 md5_process_bytes (expr, tree_size (expr), ctx);
12194 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12195 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12196 if (TREE_CODE_CLASS (code) != tcc_type
12197 && TREE_CODE_CLASS (code) != tcc_declaration
12198 && code != TREE_LIST
12199 && code != SSA_NAME
12200 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12201 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12202 switch (TREE_CODE_CLASS (code))
12203 {
12204 case tcc_constant:
12205 switch (code)
12206 {
12207 case STRING_CST:
12208 md5_process_bytes (TREE_STRING_POINTER (expr),
12209 TREE_STRING_LENGTH (expr), ctx);
12210 break;
12211 case COMPLEX_CST:
12212 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12213 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12214 break;
12215 case VECTOR_CST:
12216 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12217 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12218 break;
12219 default:
12220 break;
12221 }
12222 break;
12223 case tcc_exceptional:
12224 switch (code)
12225 {
12226 case TREE_LIST:
12227 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12228 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12229 expr = TREE_CHAIN (expr);
12230 goto recursive_label;
12231 break;
12232 case TREE_VEC:
12233 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12234 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12235 break;
12236 default:
12237 break;
12238 }
12239 break;
12240 case tcc_expression:
12241 case tcc_reference:
12242 case tcc_comparison:
12243 case tcc_unary:
12244 case tcc_binary:
12245 case tcc_statement:
12246 case tcc_vl_exp:
12247 len = TREE_OPERAND_LENGTH (expr);
12248 for (i = 0; i < len; ++i)
12249 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12250 break;
12251 case tcc_declaration:
12252 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12253 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12254 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12255 {
12256 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12257 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12258 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12259 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12260 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12261 }
12262
12263 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12264 {
12265 if (TREE_CODE (expr) == FUNCTION_DECL)
12266 {
12267 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12268 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12269 }
12270 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12271 }
12272 break;
12273 case tcc_type:
12274 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12275 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12276 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12277 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12278 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12279 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12280 if (INTEGRAL_TYPE_P (expr)
12281 || SCALAR_FLOAT_TYPE_P (expr))
12282 {
12283 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12284 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12285 }
12286 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12287 if (TREE_CODE (expr) == RECORD_TYPE
12288 || TREE_CODE (expr) == UNION_TYPE
12289 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12290 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12291 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12292 break;
12293 default:
12294 break;
12295 }
12296 }
12297
12298 /* Helper function for outputting the checksum of a tree T. When
12299 debugging with gdb, you can "define mynext" to be "next" followed
12300 by "call debug_fold_checksum (op0)", then just trace down till the
12301 outputs differ. */
12302
12303 DEBUG_FUNCTION void
12304 debug_fold_checksum (const_tree t)
12305 {
12306 int i;
12307 unsigned char checksum[16];
12308 struct md5_ctx ctx;
12309 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12310
12311 md5_init_ctx (&ctx);
12312 fold_checksum_tree (t, &ctx, &ht);
12313 md5_finish_ctx (&ctx, checksum);
12314 ht.empty ();
12315
12316 for (i = 0; i < 16; i++)
12317 fprintf (stderr, "%d ", checksum[i]);
12318
12319 fprintf (stderr, "\n");
12320 }
12321
12322 #endif
12323
12324 /* Fold a unary tree expression with code CODE of type TYPE with an
12325 operand OP0. LOC is the location of the resulting expression.
12326 Return a folded expression if successful. Otherwise, return a tree
12327 expression with code CODE of type TYPE with an operand OP0. */
12328
12329 tree
12330 fold_build1_stat_loc (location_t loc,
12331 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12332 {
12333 tree tem;
12334 #ifdef ENABLE_FOLD_CHECKING
12335 unsigned char checksum_before[16], checksum_after[16];
12336 struct md5_ctx ctx;
12337 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12338
12339 md5_init_ctx (&ctx);
12340 fold_checksum_tree (op0, &ctx, &ht);
12341 md5_finish_ctx (&ctx, checksum_before);
12342 ht.empty ();
12343 #endif
12344
12345 tem = fold_unary_loc (loc, code, type, op0);
12346 if (!tem)
12347 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12348
12349 #ifdef ENABLE_FOLD_CHECKING
12350 md5_init_ctx (&ctx);
12351 fold_checksum_tree (op0, &ctx, &ht);
12352 md5_finish_ctx (&ctx, checksum_after);
12353
12354 if (memcmp (checksum_before, checksum_after, 16))
12355 fold_check_failed (op0, tem);
12356 #endif
12357 return tem;
12358 }
12359
12360 /* Fold a binary tree expression with code CODE of type TYPE with
12361 operands OP0 and OP1. LOC is the location of the resulting
12362 expression. Return a folded expression if successful. Otherwise,
12363 return a tree expression with code CODE of type TYPE with operands
12364 OP0 and OP1. */
12365
12366 tree
12367 fold_build2_stat_loc (location_t loc,
12368 enum tree_code code, tree type, tree op0, tree op1
12369 MEM_STAT_DECL)
12370 {
12371 tree tem;
12372 #ifdef ENABLE_FOLD_CHECKING
12373 unsigned char checksum_before_op0[16],
12374 checksum_before_op1[16],
12375 checksum_after_op0[16],
12376 checksum_after_op1[16];
12377 struct md5_ctx ctx;
12378 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12379
12380 md5_init_ctx (&ctx);
12381 fold_checksum_tree (op0, &ctx, &ht);
12382 md5_finish_ctx (&ctx, checksum_before_op0);
12383 ht.empty ();
12384
12385 md5_init_ctx (&ctx);
12386 fold_checksum_tree (op1, &ctx, &ht);
12387 md5_finish_ctx (&ctx, checksum_before_op1);
12388 ht.empty ();
12389 #endif
12390
12391 tem = fold_binary_loc (loc, code, type, op0, op1);
12392 if (!tem)
12393 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12394
12395 #ifdef ENABLE_FOLD_CHECKING
12396 md5_init_ctx (&ctx);
12397 fold_checksum_tree (op0, &ctx, &ht);
12398 md5_finish_ctx (&ctx, checksum_after_op0);
12399 ht.empty ();
12400
12401 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12402 fold_check_failed (op0, tem);
12403
12404 md5_init_ctx (&ctx);
12405 fold_checksum_tree (op1, &ctx, &ht);
12406 md5_finish_ctx (&ctx, checksum_after_op1);
12407
12408 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12409 fold_check_failed (op1, tem);
12410 #endif
12411 return tem;
12412 }
12413
12414 /* Fold a ternary tree expression with code CODE of type TYPE with
12415 operands OP0, OP1, and OP2. Return a folded expression if
12416 successful. Otherwise, return a tree expression with code CODE of
12417 type TYPE with operands OP0, OP1, and OP2. */
12418
12419 tree
12420 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12421 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12422 {
12423 tree tem;
12424 #ifdef ENABLE_FOLD_CHECKING
12425 unsigned char checksum_before_op0[16],
12426 checksum_before_op1[16],
12427 checksum_before_op2[16],
12428 checksum_after_op0[16],
12429 checksum_after_op1[16],
12430 checksum_after_op2[16];
12431 struct md5_ctx ctx;
12432 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12433
12434 md5_init_ctx (&ctx);
12435 fold_checksum_tree (op0, &ctx, &ht);
12436 md5_finish_ctx (&ctx, checksum_before_op0);
12437 ht.empty ();
12438
12439 md5_init_ctx (&ctx);
12440 fold_checksum_tree (op1, &ctx, &ht);
12441 md5_finish_ctx (&ctx, checksum_before_op1);
12442 ht.empty ();
12443
12444 md5_init_ctx (&ctx);
12445 fold_checksum_tree (op2, &ctx, &ht);
12446 md5_finish_ctx (&ctx, checksum_before_op2);
12447 ht.empty ();
12448 #endif
12449
12450 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12451 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12452 if (!tem)
12453 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12454
12455 #ifdef ENABLE_FOLD_CHECKING
12456 md5_init_ctx (&ctx);
12457 fold_checksum_tree (op0, &ctx, &ht);
12458 md5_finish_ctx (&ctx, checksum_after_op0);
12459 ht.empty ();
12460
12461 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12462 fold_check_failed (op0, tem);
12463
12464 md5_init_ctx (&ctx);
12465 fold_checksum_tree (op1, &ctx, &ht);
12466 md5_finish_ctx (&ctx, checksum_after_op1);
12467 ht.empty ();
12468
12469 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12470 fold_check_failed (op1, tem);
12471
12472 md5_init_ctx (&ctx);
12473 fold_checksum_tree (op2, &ctx, &ht);
12474 md5_finish_ctx (&ctx, checksum_after_op2);
12475
12476 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12477 fold_check_failed (op2, tem);
12478 #endif
12479 return tem;
12480 }
12481
12482 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12483 arguments in ARGARRAY, and a null static chain.
12484 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12485 of type TYPE from the given operands as constructed by build_call_array. */
12486
12487 tree
12488 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12489 int nargs, tree *argarray)
12490 {
12491 tree tem;
12492 #ifdef ENABLE_FOLD_CHECKING
12493 unsigned char checksum_before_fn[16],
12494 checksum_before_arglist[16],
12495 checksum_after_fn[16],
12496 checksum_after_arglist[16];
12497 struct md5_ctx ctx;
12498 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12499 int i;
12500
12501 md5_init_ctx (&ctx);
12502 fold_checksum_tree (fn, &ctx, &ht);
12503 md5_finish_ctx (&ctx, checksum_before_fn);
12504 ht.empty ();
12505
12506 md5_init_ctx (&ctx);
12507 for (i = 0; i < nargs; i++)
12508 fold_checksum_tree (argarray[i], &ctx, &ht);
12509 md5_finish_ctx (&ctx, checksum_before_arglist);
12510 ht.empty ();
12511 #endif
12512
12513 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12514 if (!tem)
12515 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12516
12517 #ifdef ENABLE_FOLD_CHECKING
12518 md5_init_ctx (&ctx);
12519 fold_checksum_tree (fn, &ctx, &ht);
12520 md5_finish_ctx (&ctx, checksum_after_fn);
12521 ht.empty ();
12522
12523 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12524 fold_check_failed (fn, tem);
12525
12526 md5_init_ctx (&ctx);
12527 for (i = 0; i < nargs; i++)
12528 fold_checksum_tree (argarray[i], &ctx, &ht);
12529 md5_finish_ctx (&ctx, checksum_after_arglist);
12530
12531 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12532 fold_check_failed (NULL_TREE, tem);
12533 #endif
12534 return tem;
12535 }
12536
12537 /* Perform constant folding and related simplification of initializer
12538 expression EXPR. These behave identically to "fold_buildN" but ignore
12539 potential run-time traps and exceptions that fold must preserve. */
12540
12541 #define START_FOLD_INIT \
12542 int saved_signaling_nans = flag_signaling_nans;\
12543 int saved_trapping_math = flag_trapping_math;\
12544 int saved_rounding_math = flag_rounding_math;\
12545 int saved_trapv = flag_trapv;\
12546 int saved_folding_initializer = folding_initializer;\
12547 flag_signaling_nans = 0;\
12548 flag_trapping_math = 0;\
12549 flag_rounding_math = 0;\
12550 flag_trapv = 0;\
12551 folding_initializer = 1;
12552
12553 #define END_FOLD_INIT \
12554 flag_signaling_nans = saved_signaling_nans;\
12555 flag_trapping_math = saved_trapping_math;\
12556 flag_rounding_math = saved_rounding_math;\
12557 flag_trapv = saved_trapv;\
12558 folding_initializer = saved_folding_initializer;
12559
12560 tree
12561 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12562 tree type, tree op)
12563 {
12564 tree result;
12565 START_FOLD_INIT;
12566
12567 result = fold_build1_loc (loc, code, type, op);
12568
12569 END_FOLD_INIT;
12570 return result;
12571 }
12572
12573 tree
12574 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12575 tree type, tree op0, tree op1)
12576 {
12577 tree result;
12578 START_FOLD_INIT;
12579
12580 result = fold_build2_loc (loc, code, type, op0, op1);
12581
12582 END_FOLD_INIT;
12583 return result;
12584 }
12585
12586 tree
12587 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12588 int nargs, tree *argarray)
12589 {
12590 tree result;
12591 START_FOLD_INIT;
12592
12593 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12594
12595 END_FOLD_INIT;
12596 return result;
12597 }
12598
12599 #undef START_FOLD_INIT
12600 #undef END_FOLD_INIT
12601
12602 /* Determine if first argument is a multiple of second argument. Return 0 if
12603 it is not, or we cannot easily determined it to be.
12604
12605 An example of the sort of thing we care about (at this point; this routine
12606 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12607 fold cases do now) is discovering that
12608
12609 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12610
12611 is a multiple of
12612
12613 SAVE_EXPR (J * 8)
12614
12615 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12616
12617 This code also handles discovering that
12618
12619 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12620
12621 is a multiple of 8 so we don't have to worry about dealing with a
12622 possible remainder.
12623
12624 Note that we *look* inside a SAVE_EXPR only to determine how it was
12625 calculated; it is not safe for fold to do much of anything else with the
12626 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12627 at run time. For example, the latter example above *cannot* be implemented
12628 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12629 evaluation time of the original SAVE_EXPR is not necessarily the same at
12630 the time the new expression is evaluated. The only optimization of this
12631 sort that would be valid is changing
12632
12633 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12634
12635 divided by 8 to
12636
12637 SAVE_EXPR (I) * SAVE_EXPR (J)
12638
12639 (where the same SAVE_EXPR (J) is used in the original and the
12640 transformed version). */
12641
12642 int
12643 multiple_of_p (tree type, const_tree top, const_tree bottom)
12644 {
12645 if (operand_equal_p (top, bottom, 0))
12646 return 1;
12647
12648 if (TREE_CODE (type) != INTEGER_TYPE)
12649 return 0;
12650
12651 switch (TREE_CODE (top))
12652 {
12653 case BIT_AND_EXPR:
12654 /* Bitwise and provides a power of two multiple. If the mask is
12655 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12656 if (!integer_pow2p (bottom))
12657 return 0;
12658 /* FALLTHRU */
12659
12660 case MULT_EXPR:
12661 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12662 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12663
12664 case PLUS_EXPR:
12665 case MINUS_EXPR:
12666 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12667 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12668
12669 case LSHIFT_EXPR:
12670 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12671 {
12672 tree op1, t1;
12673
12674 op1 = TREE_OPERAND (top, 1);
12675 /* const_binop may not detect overflow correctly,
12676 so check for it explicitly here. */
12677 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12678 && 0 != (t1 = fold_convert (type,
12679 const_binop (LSHIFT_EXPR,
12680 size_one_node,
12681 op1)))
12682 && !TREE_OVERFLOW (t1))
12683 return multiple_of_p (type, t1, bottom);
12684 }
12685 return 0;
12686
12687 case NOP_EXPR:
12688 /* Can't handle conversions from non-integral or wider integral type. */
12689 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12690 || (TYPE_PRECISION (type)
12691 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12692 return 0;
12693
12694 /* .. fall through ... */
12695
12696 case SAVE_EXPR:
12697 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12698
12699 case COND_EXPR:
12700 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12701 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12702
12703 case INTEGER_CST:
12704 if (TREE_CODE (bottom) != INTEGER_CST
12705 || integer_zerop (bottom)
12706 || (TYPE_UNSIGNED (type)
12707 && (tree_int_cst_sgn (top) < 0
12708 || tree_int_cst_sgn (bottom) < 0)))
12709 return 0;
12710 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12711 SIGNED);
12712
12713 default:
12714 return 0;
12715 }
12716 }
12717
12718 #define tree_expr_nonnegative_warnv_p(X, Y) \
12719 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12720
12721 #define RECURSE(X) \
12722 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12723
12724 /* Return true if CODE or TYPE is known to be non-negative. */
12725
12726 static bool
12727 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12728 {
12729 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12730 && truth_value_p (code))
12731 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12732 have a signed:1 type (where the value is -1 and 0). */
12733 return true;
12734 return false;
12735 }
12736
12737 /* Return true if (CODE OP0) is known to be non-negative. If the return
12738 value is based on the assumption that signed overflow is undefined,
12739 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12740 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12741
12742 bool
12743 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12744 bool *strict_overflow_p, int depth)
12745 {
12746 if (TYPE_UNSIGNED (type))
12747 return true;
12748
12749 switch (code)
12750 {
12751 case ABS_EXPR:
12752 /* We can't return 1 if flag_wrapv is set because
12753 ABS_EXPR<INT_MIN> = INT_MIN. */
12754 if (!ANY_INTEGRAL_TYPE_P (type))
12755 return true;
12756 if (TYPE_OVERFLOW_UNDEFINED (type))
12757 {
12758 *strict_overflow_p = true;
12759 return true;
12760 }
12761 break;
12762
12763 case NON_LVALUE_EXPR:
12764 case FLOAT_EXPR:
12765 case FIX_TRUNC_EXPR:
12766 return RECURSE (op0);
12767
12768 CASE_CONVERT:
12769 {
12770 tree inner_type = TREE_TYPE (op0);
12771 tree outer_type = type;
12772
12773 if (TREE_CODE (outer_type) == REAL_TYPE)
12774 {
12775 if (TREE_CODE (inner_type) == REAL_TYPE)
12776 return RECURSE (op0);
12777 if (INTEGRAL_TYPE_P (inner_type))
12778 {
12779 if (TYPE_UNSIGNED (inner_type))
12780 return true;
12781 return RECURSE (op0);
12782 }
12783 }
12784 else if (INTEGRAL_TYPE_P (outer_type))
12785 {
12786 if (TREE_CODE (inner_type) == REAL_TYPE)
12787 return RECURSE (op0);
12788 if (INTEGRAL_TYPE_P (inner_type))
12789 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12790 && TYPE_UNSIGNED (inner_type);
12791 }
12792 }
12793 break;
12794
12795 default:
12796 return tree_simple_nonnegative_warnv_p (code, type);
12797 }
12798
12799 /* We don't know sign of `t', so be conservative and return false. */
12800 return false;
12801 }
12802
12803 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12804 value is based on the assumption that signed overflow is undefined,
12805 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12806 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12807
12808 bool
12809 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12810 tree op1, bool *strict_overflow_p,
12811 int depth)
12812 {
12813 if (TYPE_UNSIGNED (type))
12814 return true;
12815
12816 switch (code)
12817 {
12818 case POINTER_PLUS_EXPR:
12819 case PLUS_EXPR:
12820 if (FLOAT_TYPE_P (type))
12821 return RECURSE (op0) && RECURSE (op1);
12822
12823 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12824 both unsigned and at least 2 bits shorter than the result. */
12825 if (TREE_CODE (type) == INTEGER_TYPE
12826 && TREE_CODE (op0) == NOP_EXPR
12827 && TREE_CODE (op1) == NOP_EXPR)
12828 {
12829 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12830 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12831 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12832 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12833 {
12834 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12835 TYPE_PRECISION (inner2)) + 1;
12836 return prec < TYPE_PRECISION (type);
12837 }
12838 }
12839 break;
12840
12841 case MULT_EXPR:
12842 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12843 {
12844 /* x * x is always non-negative for floating point x
12845 or without overflow. */
12846 if (operand_equal_p (op0, op1, 0)
12847 || (RECURSE (op0) && RECURSE (op1)))
12848 {
12849 if (ANY_INTEGRAL_TYPE_P (type)
12850 && TYPE_OVERFLOW_UNDEFINED (type))
12851 *strict_overflow_p = true;
12852 return true;
12853 }
12854 }
12855
12856 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12857 both unsigned and their total bits is shorter than the result. */
12858 if (TREE_CODE (type) == INTEGER_TYPE
12859 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12860 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12861 {
12862 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12863 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12864 : TREE_TYPE (op0);
12865 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12866 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12867 : TREE_TYPE (op1);
12868
12869 bool unsigned0 = TYPE_UNSIGNED (inner0);
12870 bool unsigned1 = TYPE_UNSIGNED (inner1);
12871
12872 if (TREE_CODE (op0) == INTEGER_CST)
12873 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12874
12875 if (TREE_CODE (op1) == INTEGER_CST)
12876 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12877
12878 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12879 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12880 {
12881 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12882 ? tree_int_cst_min_precision (op0, UNSIGNED)
12883 : TYPE_PRECISION (inner0);
12884
12885 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12886 ? tree_int_cst_min_precision (op1, UNSIGNED)
12887 : TYPE_PRECISION (inner1);
12888
12889 return precision0 + precision1 < TYPE_PRECISION (type);
12890 }
12891 }
12892 return false;
12893
12894 case BIT_AND_EXPR:
12895 case MAX_EXPR:
12896 return RECURSE (op0) || RECURSE (op1);
12897
12898 case BIT_IOR_EXPR:
12899 case BIT_XOR_EXPR:
12900 case MIN_EXPR:
12901 case RDIV_EXPR:
12902 case TRUNC_DIV_EXPR:
12903 case CEIL_DIV_EXPR:
12904 case FLOOR_DIV_EXPR:
12905 case ROUND_DIV_EXPR:
12906 return RECURSE (op0) && RECURSE (op1);
12907
12908 case TRUNC_MOD_EXPR:
12909 return RECURSE (op0);
12910
12911 case FLOOR_MOD_EXPR:
12912 return RECURSE (op1);
12913
12914 case CEIL_MOD_EXPR:
12915 case ROUND_MOD_EXPR:
12916 default:
12917 return tree_simple_nonnegative_warnv_p (code, type);
12918 }
12919
12920 /* We don't know sign of `t', so be conservative and return false. */
12921 return false;
12922 }
12923
12924 /* Return true if T is known to be non-negative. If the return
12925 value is based on the assumption that signed overflow is undefined,
12926 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12927 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12928
12929 bool
12930 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12931 {
12932 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12933 return true;
12934
12935 switch (TREE_CODE (t))
12936 {
12937 case INTEGER_CST:
12938 return tree_int_cst_sgn (t) >= 0;
12939
12940 case REAL_CST:
12941 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12942
12943 case FIXED_CST:
12944 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12945
12946 case COND_EXPR:
12947 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12948
12949 case SSA_NAME:
12950 /* Limit the depth of recursion to avoid quadratic behavior.
12951 This is expected to catch almost all occurrences in practice.
12952 If this code misses important cases that unbounded recursion
12953 would not, passes that need this information could be revised
12954 to provide it through dataflow propagation. */
12955 return (!name_registered_for_update_p (t)
12956 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12957 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12958 strict_overflow_p, depth));
12959
12960 default:
12961 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12962 }
12963 }
12964
12965 /* Return true if T is known to be non-negative. If the return
12966 value is based on the assumption that signed overflow is undefined,
12967 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12968 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12969
12970 bool
12971 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12972 bool *strict_overflow_p, int depth)
12973 {
12974 switch (fn)
12975 {
12976 CASE_CFN_ACOS:
12977 CASE_CFN_ACOSH:
12978 CASE_CFN_CABS:
12979 CASE_CFN_COSH:
12980 CASE_CFN_ERFC:
12981 CASE_CFN_EXP:
12982 CASE_CFN_EXP10:
12983 CASE_CFN_EXP2:
12984 CASE_CFN_FABS:
12985 CASE_CFN_FDIM:
12986 CASE_CFN_HYPOT:
12987 CASE_CFN_POW10:
12988 CASE_CFN_FFS:
12989 CASE_CFN_PARITY:
12990 CASE_CFN_POPCOUNT:
12991 CASE_CFN_CLZ:
12992 CASE_CFN_CLRSB:
12993 case CFN_BUILT_IN_BSWAP32:
12994 case CFN_BUILT_IN_BSWAP64:
12995 /* Always true. */
12996 return true;
12997
12998 CASE_CFN_SQRT:
12999 /* sqrt(-0.0) is -0.0. */
13000 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13001 return true;
13002 return RECURSE (arg0);
13003
13004 CASE_CFN_ASINH:
13005 CASE_CFN_ATAN:
13006 CASE_CFN_ATANH:
13007 CASE_CFN_CBRT:
13008 CASE_CFN_CEIL:
13009 CASE_CFN_ERF:
13010 CASE_CFN_EXPM1:
13011 CASE_CFN_FLOOR:
13012 CASE_CFN_FMOD:
13013 CASE_CFN_FREXP:
13014 CASE_CFN_ICEIL:
13015 CASE_CFN_IFLOOR:
13016 CASE_CFN_IRINT:
13017 CASE_CFN_IROUND:
13018 CASE_CFN_LCEIL:
13019 CASE_CFN_LDEXP:
13020 CASE_CFN_LFLOOR:
13021 CASE_CFN_LLCEIL:
13022 CASE_CFN_LLFLOOR:
13023 CASE_CFN_LLRINT:
13024 CASE_CFN_LLROUND:
13025 CASE_CFN_LRINT:
13026 CASE_CFN_LROUND:
13027 CASE_CFN_MODF:
13028 CASE_CFN_NEARBYINT:
13029 CASE_CFN_RINT:
13030 CASE_CFN_ROUND:
13031 CASE_CFN_SCALB:
13032 CASE_CFN_SCALBLN:
13033 CASE_CFN_SCALBN:
13034 CASE_CFN_SIGNBIT:
13035 CASE_CFN_SIGNIFICAND:
13036 CASE_CFN_SINH:
13037 CASE_CFN_TANH:
13038 CASE_CFN_TRUNC:
13039 /* True if the 1st argument is nonnegative. */
13040 return RECURSE (arg0);
13041
13042 CASE_CFN_FMAX:
13043 /* True if the 1st OR 2nd arguments are nonnegative. */
13044 return RECURSE (arg0) || RECURSE (arg1);
13045
13046 CASE_CFN_FMIN:
13047 /* True if the 1st AND 2nd arguments are nonnegative. */
13048 return RECURSE (arg0) && RECURSE (arg1);
13049
13050 CASE_CFN_COPYSIGN:
13051 /* True if the 2nd argument is nonnegative. */
13052 return RECURSE (arg1);
13053
13054 CASE_CFN_POWI:
13055 /* True if the 1st argument is nonnegative or the second
13056 argument is an even integer. */
13057 if (TREE_CODE (arg1) == INTEGER_CST
13058 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13059 return true;
13060 return RECURSE (arg0);
13061
13062 CASE_CFN_POW:
13063 /* True if the 1st argument is nonnegative or the second
13064 argument is an even integer valued real. */
13065 if (TREE_CODE (arg1) == REAL_CST)
13066 {
13067 REAL_VALUE_TYPE c;
13068 HOST_WIDE_INT n;
13069
13070 c = TREE_REAL_CST (arg1);
13071 n = real_to_integer (&c);
13072 if ((n & 1) == 0)
13073 {
13074 REAL_VALUE_TYPE cint;
13075 real_from_integer (&cint, VOIDmode, n, SIGNED);
13076 if (real_identical (&c, &cint))
13077 return true;
13078 }
13079 }
13080 return RECURSE (arg0);
13081
13082 default:
13083 break;
13084 }
13085 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13086 }
13087
13088 /* Return true if T is known to be non-negative. If the return
13089 value is based on the assumption that signed overflow is undefined,
13090 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13091 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13092
13093 static bool
13094 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13095 {
13096 enum tree_code code = TREE_CODE (t);
13097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13098 return true;
13099
13100 switch (code)
13101 {
13102 case TARGET_EXPR:
13103 {
13104 tree temp = TARGET_EXPR_SLOT (t);
13105 t = TARGET_EXPR_INITIAL (t);
13106
13107 /* If the initializer is non-void, then it's a normal expression
13108 that will be assigned to the slot. */
13109 if (!VOID_TYPE_P (t))
13110 return RECURSE (t);
13111
13112 /* Otherwise, the initializer sets the slot in some way. One common
13113 way is an assignment statement at the end of the initializer. */
13114 while (1)
13115 {
13116 if (TREE_CODE (t) == BIND_EXPR)
13117 t = expr_last (BIND_EXPR_BODY (t));
13118 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13119 || TREE_CODE (t) == TRY_CATCH_EXPR)
13120 t = expr_last (TREE_OPERAND (t, 0));
13121 else if (TREE_CODE (t) == STATEMENT_LIST)
13122 t = expr_last (t);
13123 else
13124 break;
13125 }
13126 if (TREE_CODE (t) == MODIFY_EXPR
13127 && TREE_OPERAND (t, 0) == temp)
13128 return RECURSE (TREE_OPERAND (t, 1));
13129
13130 return false;
13131 }
13132
13133 case CALL_EXPR:
13134 {
13135 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13136 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13137
13138 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13139 get_call_combined_fn (t),
13140 arg0,
13141 arg1,
13142 strict_overflow_p, depth);
13143 }
13144 case COMPOUND_EXPR:
13145 case MODIFY_EXPR:
13146 return RECURSE (TREE_OPERAND (t, 1));
13147
13148 case BIND_EXPR:
13149 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13150
13151 case SAVE_EXPR:
13152 return RECURSE (TREE_OPERAND (t, 0));
13153
13154 default:
13155 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13156 }
13157 }
13158
13159 #undef RECURSE
13160 #undef tree_expr_nonnegative_warnv_p
13161
13162 /* Return true if T is known to be non-negative. If the return
13163 value is based on the assumption that signed overflow is undefined,
13164 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13165 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13166
13167 bool
13168 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13169 {
13170 enum tree_code code;
13171 if (t == error_mark_node)
13172 return false;
13173
13174 code = TREE_CODE (t);
13175 switch (TREE_CODE_CLASS (code))
13176 {
13177 case tcc_binary:
13178 case tcc_comparison:
13179 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13180 TREE_TYPE (t),
13181 TREE_OPERAND (t, 0),
13182 TREE_OPERAND (t, 1),
13183 strict_overflow_p, depth);
13184
13185 case tcc_unary:
13186 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13187 TREE_TYPE (t),
13188 TREE_OPERAND (t, 0),
13189 strict_overflow_p, depth);
13190
13191 case tcc_constant:
13192 case tcc_declaration:
13193 case tcc_reference:
13194 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13195
13196 default:
13197 break;
13198 }
13199
13200 switch (code)
13201 {
13202 case TRUTH_AND_EXPR:
13203 case TRUTH_OR_EXPR:
13204 case TRUTH_XOR_EXPR:
13205 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13206 TREE_TYPE (t),
13207 TREE_OPERAND (t, 0),
13208 TREE_OPERAND (t, 1),
13209 strict_overflow_p, depth);
13210 case TRUTH_NOT_EXPR:
13211 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13212 TREE_TYPE (t),
13213 TREE_OPERAND (t, 0),
13214 strict_overflow_p, depth);
13215
13216 case COND_EXPR:
13217 case CONSTRUCTOR:
13218 case OBJ_TYPE_REF:
13219 case ASSERT_EXPR:
13220 case ADDR_EXPR:
13221 case WITH_SIZE_EXPR:
13222 case SSA_NAME:
13223 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13224
13225 default:
13226 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13227 }
13228 }
13229
13230 /* Return true if `t' is known to be non-negative. Handle warnings
13231 about undefined signed overflow. */
13232
13233 bool
13234 tree_expr_nonnegative_p (tree t)
13235 {
13236 bool ret, strict_overflow_p;
13237
13238 strict_overflow_p = false;
13239 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13240 if (strict_overflow_p)
13241 fold_overflow_warning (("assuming signed overflow does not occur when "
13242 "determining that expression is always "
13243 "non-negative"),
13244 WARN_STRICT_OVERFLOW_MISC);
13245 return ret;
13246 }
13247
13248
13249 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13250 For floating point we further ensure that T is not denormal.
13251 Similar logic is present in nonzero_address in rtlanal.h.
13252
13253 If the return value is based on the assumption that signed overflow
13254 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13255 change *STRICT_OVERFLOW_P. */
13256
13257 bool
13258 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13259 bool *strict_overflow_p)
13260 {
13261 switch (code)
13262 {
13263 case ABS_EXPR:
13264 return tree_expr_nonzero_warnv_p (op0,
13265 strict_overflow_p);
13266
13267 case NOP_EXPR:
13268 {
13269 tree inner_type = TREE_TYPE (op0);
13270 tree outer_type = type;
13271
13272 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13273 && tree_expr_nonzero_warnv_p (op0,
13274 strict_overflow_p));
13275 }
13276 break;
13277
13278 case NON_LVALUE_EXPR:
13279 return tree_expr_nonzero_warnv_p (op0,
13280 strict_overflow_p);
13281
13282 default:
13283 break;
13284 }
13285
13286 return false;
13287 }
13288
13289 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13290 For floating point we further ensure that T is not denormal.
13291 Similar logic is present in nonzero_address in rtlanal.h.
13292
13293 If the return value is based on the assumption that signed overflow
13294 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13295 change *STRICT_OVERFLOW_P. */
13296
13297 bool
13298 tree_binary_nonzero_warnv_p (enum tree_code code,
13299 tree type,
13300 tree op0,
13301 tree op1, bool *strict_overflow_p)
13302 {
13303 bool sub_strict_overflow_p;
13304 switch (code)
13305 {
13306 case POINTER_PLUS_EXPR:
13307 case PLUS_EXPR:
13308 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13309 {
13310 /* With the presence of negative values it is hard
13311 to say something. */
13312 sub_strict_overflow_p = false;
13313 if (!tree_expr_nonnegative_warnv_p (op0,
13314 &sub_strict_overflow_p)
13315 || !tree_expr_nonnegative_warnv_p (op1,
13316 &sub_strict_overflow_p))
13317 return false;
13318 /* One of operands must be positive and the other non-negative. */
13319 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13320 overflows, on a twos-complement machine the sum of two
13321 nonnegative numbers can never be zero. */
13322 return (tree_expr_nonzero_warnv_p (op0,
13323 strict_overflow_p)
13324 || tree_expr_nonzero_warnv_p (op1,
13325 strict_overflow_p));
13326 }
13327 break;
13328
13329 case MULT_EXPR:
13330 if (TYPE_OVERFLOW_UNDEFINED (type))
13331 {
13332 if (tree_expr_nonzero_warnv_p (op0,
13333 strict_overflow_p)
13334 && tree_expr_nonzero_warnv_p (op1,
13335 strict_overflow_p))
13336 {
13337 *strict_overflow_p = true;
13338 return true;
13339 }
13340 }
13341 break;
13342
13343 case MIN_EXPR:
13344 sub_strict_overflow_p = false;
13345 if (tree_expr_nonzero_warnv_p (op0,
13346 &sub_strict_overflow_p)
13347 && tree_expr_nonzero_warnv_p (op1,
13348 &sub_strict_overflow_p))
13349 {
13350 if (sub_strict_overflow_p)
13351 *strict_overflow_p = true;
13352 }
13353 break;
13354
13355 case MAX_EXPR:
13356 sub_strict_overflow_p = false;
13357 if (tree_expr_nonzero_warnv_p (op0,
13358 &sub_strict_overflow_p))
13359 {
13360 if (sub_strict_overflow_p)
13361 *strict_overflow_p = true;
13362
13363 /* When both operands are nonzero, then MAX must be too. */
13364 if (tree_expr_nonzero_warnv_p (op1,
13365 strict_overflow_p))
13366 return true;
13367
13368 /* MAX where operand 0 is positive is positive. */
13369 return tree_expr_nonnegative_warnv_p (op0,
13370 strict_overflow_p);
13371 }
13372 /* MAX where operand 1 is positive is positive. */
13373 else if (tree_expr_nonzero_warnv_p (op1,
13374 &sub_strict_overflow_p)
13375 && tree_expr_nonnegative_warnv_p (op1,
13376 &sub_strict_overflow_p))
13377 {
13378 if (sub_strict_overflow_p)
13379 *strict_overflow_p = true;
13380 return true;
13381 }
13382 break;
13383
13384 case BIT_IOR_EXPR:
13385 return (tree_expr_nonzero_warnv_p (op1,
13386 strict_overflow_p)
13387 || tree_expr_nonzero_warnv_p (op0,
13388 strict_overflow_p));
13389
13390 default:
13391 break;
13392 }
13393
13394 return false;
13395 }
13396
13397 /* Return true when T is an address and is known to be nonzero.
13398 For floating point we further ensure that T is not denormal.
13399 Similar logic is present in nonzero_address in rtlanal.h.
13400
13401 If the return value is based on the assumption that signed overflow
13402 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13403 change *STRICT_OVERFLOW_P. */
13404
13405 bool
13406 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13407 {
13408 bool sub_strict_overflow_p;
13409 switch (TREE_CODE (t))
13410 {
13411 case INTEGER_CST:
13412 return !integer_zerop (t);
13413
13414 case ADDR_EXPR:
13415 {
13416 tree base = TREE_OPERAND (t, 0);
13417
13418 if (!DECL_P (base))
13419 base = get_base_address (base);
13420
13421 if (!base)
13422 return false;
13423
13424 /* For objects in symbol table check if we know they are non-zero.
13425 Don't do anything for variables and functions before symtab is built;
13426 it is quite possible that they will be declared weak later. */
13427 if (DECL_P (base) && decl_in_symtab_p (base))
13428 {
13429 struct symtab_node *symbol;
13430
13431 symbol = symtab_node::get_create (base);
13432 if (symbol)
13433 return symbol->nonzero_address ();
13434 else
13435 return false;
13436 }
13437
13438 /* Function local objects are never NULL. */
13439 if (DECL_P (base)
13440 && (DECL_CONTEXT (base)
13441 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13442 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13443 return true;
13444
13445 /* Constants are never weak. */
13446 if (CONSTANT_CLASS_P (base))
13447 return true;
13448
13449 return false;
13450 }
13451
13452 case COND_EXPR:
13453 sub_strict_overflow_p = false;
13454 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13455 &sub_strict_overflow_p)
13456 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13457 &sub_strict_overflow_p))
13458 {
13459 if (sub_strict_overflow_p)
13460 *strict_overflow_p = true;
13461 return true;
13462 }
13463 break;
13464
13465 default:
13466 break;
13467 }
13468 return false;
13469 }
13470
13471 #define integer_valued_real_p(X) \
13472 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13473
13474 #define RECURSE(X) \
13475 ((integer_valued_real_p) (X, depth + 1))
13476
13477 /* Return true if the floating point result of (CODE OP0) has an
13478 integer value. We also allow +Inf, -Inf and NaN to be considered
13479 integer values. Return false for signaling NaN.
13480
13481 DEPTH is the current nesting depth of the query. */
13482
13483 bool
13484 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13485 {
13486 switch (code)
13487 {
13488 case FLOAT_EXPR:
13489 return true;
13490
13491 case ABS_EXPR:
13492 return RECURSE (op0);
13493
13494 CASE_CONVERT:
13495 {
13496 tree type = TREE_TYPE (op0);
13497 if (TREE_CODE (type) == INTEGER_TYPE)
13498 return true;
13499 if (TREE_CODE (type) == REAL_TYPE)
13500 return RECURSE (op0);
13501 break;
13502 }
13503
13504 default:
13505 break;
13506 }
13507 return false;
13508 }
13509
13510 /* Return true if the floating point result of (CODE OP0 OP1) has an
13511 integer value. We also allow +Inf, -Inf and NaN to be considered
13512 integer values. Return false for signaling NaN.
13513
13514 DEPTH is the current nesting depth of the query. */
13515
13516 bool
13517 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13518 {
13519 switch (code)
13520 {
13521 case PLUS_EXPR:
13522 case MINUS_EXPR:
13523 case MULT_EXPR:
13524 case MIN_EXPR:
13525 case MAX_EXPR:
13526 return RECURSE (op0) && RECURSE (op1);
13527
13528 default:
13529 break;
13530 }
13531 return false;
13532 }
13533
13534 /* Return true if the floating point result of calling FNDECL with arguments
13535 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13536 considered integer values. Return false for signaling NaN. If FNDECL
13537 takes fewer than 2 arguments, the remaining ARGn are null.
13538
13539 DEPTH is the current nesting depth of the query. */
13540
13541 bool
13542 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13543 {
13544 switch (fn)
13545 {
13546 CASE_CFN_CEIL:
13547 CASE_CFN_FLOOR:
13548 CASE_CFN_NEARBYINT:
13549 CASE_CFN_RINT:
13550 CASE_CFN_ROUND:
13551 CASE_CFN_TRUNC:
13552 return true;
13553
13554 CASE_CFN_FMIN:
13555 CASE_CFN_FMAX:
13556 return RECURSE (arg0) && RECURSE (arg1);
13557
13558 default:
13559 break;
13560 }
13561 return false;
13562 }
13563
13564 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13565 has an integer value. We also allow +Inf, -Inf and NaN to be
13566 considered integer values. Return false for signaling NaN.
13567
13568 DEPTH is the current nesting depth of the query. */
13569
13570 bool
13571 integer_valued_real_single_p (tree t, int depth)
13572 {
13573 switch (TREE_CODE (t))
13574 {
13575 case REAL_CST:
13576 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13577
13578 case COND_EXPR:
13579 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13580
13581 case SSA_NAME:
13582 /* Limit the depth of recursion to avoid quadratic behavior.
13583 This is expected to catch almost all occurrences in practice.
13584 If this code misses important cases that unbounded recursion
13585 would not, passes that need this information could be revised
13586 to provide it through dataflow propagation. */
13587 return (!name_registered_for_update_p (t)
13588 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13589 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13590 depth));
13591
13592 default:
13593 break;
13594 }
13595 return false;
13596 }
13597
13598 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13599 has an integer value. We also allow +Inf, -Inf and NaN to be
13600 considered integer values. Return false for signaling NaN.
13601
13602 DEPTH is the current nesting depth of the query. */
13603
13604 static bool
13605 integer_valued_real_invalid_p (tree t, int depth)
13606 {
13607 switch (TREE_CODE (t))
13608 {
13609 case COMPOUND_EXPR:
13610 case MODIFY_EXPR:
13611 case BIND_EXPR:
13612 return RECURSE (TREE_OPERAND (t, 1));
13613
13614 case SAVE_EXPR:
13615 return RECURSE (TREE_OPERAND (t, 0));
13616
13617 default:
13618 break;
13619 }
13620 return false;
13621 }
13622
13623 #undef RECURSE
13624 #undef integer_valued_real_p
13625
13626 /* Return true if the floating point expression T has an integer value.
13627 We also allow +Inf, -Inf and NaN to be considered integer values.
13628 Return false for signaling NaN.
13629
13630 DEPTH is the current nesting depth of the query. */
13631
13632 bool
13633 integer_valued_real_p (tree t, int depth)
13634 {
13635 if (t == error_mark_node)
13636 return false;
13637
13638 tree_code code = TREE_CODE (t);
13639 switch (TREE_CODE_CLASS (code))
13640 {
13641 case tcc_binary:
13642 case tcc_comparison:
13643 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13644 TREE_OPERAND (t, 1), depth);
13645
13646 case tcc_unary:
13647 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13648
13649 case tcc_constant:
13650 case tcc_declaration:
13651 case tcc_reference:
13652 return integer_valued_real_single_p (t, depth);
13653
13654 default:
13655 break;
13656 }
13657
13658 switch (code)
13659 {
13660 case COND_EXPR:
13661 case SSA_NAME:
13662 return integer_valued_real_single_p (t, depth);
13663
13664 case CALL_EXPR:
13665 {
13666 tree arg0 = (call_expr_nargs (t) > 0
13667 ? CALL_EXPR_ARG (t, 0)
13668 : NULL_TREE);
13669 tree arg1 = (call_expr_nargs (t) > 1
13670 ? CALL_EXPR_ARG (t, 1)
13671 : NULL_TREE);
13672 return integer_valued_real_call_p (get_call_combined_fn (t),
13673 arg0, arg1, depth);
13674 }
13675
13676 default:
13677 return integer_valued_real_invalid_p (t, depth);
13678 }
13679 }
13680
13681 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13682 attempt to fold the expression to a constant without modifying TYPE,
13683 OP0 or OP1.
13684
13685 If the expression could be simplified to a constant, then return
13686 the constant. If the expression would not be simplified to a
13687 constant, then return NULL_TREE. */
13688
13689 tree
13690 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13691 {
13692 tree tem = fold_binary (code, type, op0, op1);
13693 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13694 }
13695
13696 /* Given the components of a unary expression CODE, TYPE and OP0,
13697 attempt to fold the expression to a constant without modifying
13698 TYPE or OP0.
13699
13700 If the expression could be simplified to a constant, then return
13701 the constant. If the expression would not be simplified to a
13702 constant, then return NULL_TREE. */
13703
13704 tree
13705 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13706 {
13707 tree tem = fold_unary (code, type, op0);
13708 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13709 }
13710
13711 /* If EXP represents referencing an element in a constant string
13712 (either via pointer arithmetic or array indexing), return the
13713 tree representing the value accessed, otherwise return NULL. */
13714
13715 tree
13716 fold_read_from_constant_string (tree exp)
13717 {
13718 if ((TREE_CODE (exp) == INDIRECT_REF
13719 || TREE_CODE (exp) == ARRAY_REF)
13720 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13721 {
13722 tree exp1 = TREE_OPERAND (exp, 0);
13723 tree index;
13724 tree string;
13725 location_t loc = EXPR_LOCATION (exp);
13726
13727 if (TREE_CODE (exp) == INDIRECT_REF)
13728 string = string_constant (exp1, &index);
13729 else
13730 {
13731 tree low_bound = array_ref_low_bound (exp);
13732 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13733
13734 /* Optimize the special-case of a zero lower bound.
13735
13736 We convert the low_bound to sizetype to avoid some problems
13737 with constant folding. (E.g. suppose the lower bound is 1,
13738 and its mode is QI. Without the conversion,l (ARRAY
13739 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13740 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13741 if (! integer_zerop (low_bound))
13742 index = size_diffop_loc (loc, index,
13743 fold_convert_loc (loc, sizetype, low_bound));
13744
13745 string = exp1;
13746 }
13747
13748 if (string
13749 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13750 && TREE_CODE (string) == STRING_CST
13751 && TREE_CODE (index) == INTEGER_CST
13752 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13753 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13754 == MODE_INT)
13755 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13756 return build_int_cst_type (TREE_TYPE (exp),
13757 (TREE_STRING_POINTER (string)
13758 [TREE_INT_CST_LOW (index)]));
13759 }
13760 return NULL;
13761 }
13762
13763 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13764 an integer constant, real, or fixed-point constant.
13765
13766 TYPE is the type of the result. */
13767
13768 static tree
13769 fold_negate_const (tree arg0, tree type)
13770 {
13771 tree t = NULL_TREE;
13772
13773 switch (TREE_CODE (arg0))
13774 {
13775 case INTEGER_CST:
13776 {
13777 bool overflow;
13778 wide_int val = wi::neg (arg0, &overflow);
13779 t = force_fit_type (type, val, 1,
13780 (overflow | TREE_OVERFLOW (arg0))
13781 && !TYPE_UNSIGNED (type));
13782 break;
13783 }
13784
13785 case REAL_CST:
13786 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13787 break;
13788
13789 case FIXED_CST:
13790 {
13791 FIXED_VALUE_TYPE f;
13792 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13793 &(TREE_FIXED_CST (arg0)), NULL,
13794 TYPE_SATURATING (type));
13795 t = build_fixed (type, f);
13796 /* Propagate overflow flags. */
13797 if (overflow_p | TREE_OVERFLOW (arg0))
13798 TREE_OVERFLOW (t) = 1;
13799 break;
13800 }
13801
13802 default:
13803 gcc_unreachable ();
13804 }
13805
13806 return t;
13807 }
13808
13809 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13810 an integer constant or real constant.
13811
13812 TYPE is the type of the result. */
13813
13814 tree
13815 fold_abs_const (tree arg0, tree type)
13816 {
13817 tree t = NULL_TREE;
13818
13819 switch (TREE_CODE (arg0))
13820 {
13821 case INTEGER_CST:
13822 {
13823 /* If the value is unsigned or non-negative, then the absolute value
13824 is the same as the ordinary value. */
13825 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13826 t = arg0;
13827
13828 /* If the value is negative, then the absolute value is
13829 its negation. */
13830 else
13831 {
13832 bool overflow;
13833 wide_int val = wi::neg (arg0, &overflow);
13834 t = force_fit_type (type, val, -1,
13835 overflow | TREE_OVERFLOW (arg0));
13836 }
13837 }
13838 break;
13839
13840 case REAL_CST:
13841 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13842 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13843 else
13844 t = arg0;
13845 break;
13846
13847 default:
13848 gcc_unreachable ();
13849 }
13850
13851 return t;
13852 }
13853
13854 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13855 constant. TYPE is the type of the result. */
13856
13857 static tree
13858 fold_not_const (const_tree arg0, tree type)
13859 {
13860 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13861
13862 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13863 }
13864
13865 /* Given CODE, a relational operator, the target type, TYPE and two
13866 constant operands OP0 and OP1, return the result of the
13867 relational operation. If the result is not a compile time
13868 constant, then return NULL_TREE. */
13869
13870 static tree
13871 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13872 {
13873 int result, invert;
13874
13875 /* From here on, the only cases we handle are when the result is
13876 known to be a constant. */
13877
13878 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13879 {
13880 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13881 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13882
13883 /* Handle the cases where either operand is a NaN. */
13884 if (real_isnan (c0) || real_isnan (c1))
13885 {
13886 switch (code)
13887 {
13888 case EQ_EXPR:
13889 case ORDERED_EXPR:
13890 result = 0;
13891 break;
13892
13893 case NE_EXPR:
13894 case UNORDERED_EXPR:
13895 case UNLT_EXPR:
13896 case UNLE_EXPR:
13897 case UNGT_EXPR:
13898 case UNGE_EXPR:
13899 case UNEQ_EXPR:
13900 result = 1;
13901 break;
13902
13903 case LT_EXPR:
13904 case LE_EXPR:
13905 case GT_EXPR:
13906 case GE_EXPR:
13907 case LTGT_EXPR:
13908 if (flag_trapping_math)
13909 return NULL_TREE;
13910 result = 0;
13911 break;
13912
13913 default:
13914 gcc_unreachable ();
13915 }
13916
13917 return constant_boolean_node (result, type);
13918 }
13919
13920 return constant_boolean_node (real_compare (code, c0, c1), type);
13921 }
13922
13923 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13924 {
13925 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13926 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13927 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13928 }
13929
13930 /* Handle equality/inequality of complex constants. */
13931 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13932 {
13933 tree rcond = fold_relational_const (code, type,
13934 TREE_REALPART (op0),
13935 TREE_REALPART (op1));
13936 tree icond = fold_relational_const (code, type,
13937 TREE_IMAGPART (op0),
13938 TREE_IMAGPART (op1));
13939 if (code == EQ_EXPR)
13940 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13941 else if (code == NE_EXPR)
13942 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13943 else
13944 return NULL_TREE;
13945 }
13946
13947 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13948 {
13949 unsigned count = VECTOR_CST_NELTS (op0);
13950 tree *elts = XALLOCAVEC (tree, count);
13951 gcc_assert (VECTOR_CST_NELTS (op1) == count
13952 && TYPE_VECTOR_SUBPARTS (type) == count);
13953
13954 for (unsigned i = 0; i < count; i++)
13955 {
13956 tree elem_type = TREE_TYPE (type);
13957 tree elem0 = VECTOR_CST_ELT (op0, i);
13958 tree elem1 = VECTOR_CST_ELT (op1, i);
13959
13960 tree tem = fold_relational_const (code, elem_type,
13961 elem0, elem1);
13962
13963 if (tem == NULL_TREE)
13964 return NULL_TREE;
13965
13966 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13967 }
13968
13969 return build_vector (type, elts);
13970 }
13971
13972 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13973
13974 To compute GT, swap the arguments and do LT.
13975 To compute GE, do LT and invert the result.
13976 To compute LE, swap the arguments, do LT and invert the result.
13977 To compute NE, do EQ and invert the result.
13978
13979 Therefore, the code below must handle only EQ and LT. */
13980
13981 if (code == LE_EXPR || code == GT_EXPR)
13982 {
13983 std::swap (op0, op1);
13984 code = swap_tree_comparison (code);
13985 }
13986
13987 /* Note that it is safe to invert for real values here because we
13988 have already handled the one case that it matters. */
13989
13990 invert = 0;
13991 if (code == NE_EXPR || code == GE_EXPR)
13992 {
13993 invert = 1;
13994 code = invert_tree_comparison (code, false);
13995 }
13996
13997 /* Compute a result for LT or EQ if args permit;
13998 Otherwise return T. */
13999 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14000 {
14001 if (code == EQ_EXPR)
14002 result = tree_int_cst_equal (op0, op1);
14003 else
14004 result = tree_int_cst_lt (op0, op1);
14005 }
14006 else
14007 return NULL_TREE;
14008
14009 if (invert)
14010 result ^= 1;
14011 return constant_boolean_node (result, type);
14012 }
14013
14014 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14015 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14016 itself. */
14017
14018 tree
14019 fold_build_cleanup_point_expr (tree type, tree expr)
14020 {
14021 /* If the expression does not have side effects then we don't have to wrap
14022 it with a cleanup point expression. */
14023 if (!TREE_SIDE_EFFECTS (expr))
14024 return expr;
14025
14026 /* If the expression is a return, check to see if the expression inside the
14027 return has no side effects or the right hand side of the modify expression
14028 inside the return. If either don't have side effects set we don't need to
14029 wrap the expression in a cleanup point expression. Note we don't check the
14030 left hand side of the modify because it should always be a return decl. */
14031 if (TREE_CODE (expr) == RETURN_EXPR)
14032 {
14033 tree op = TREE_OPERAND (expr, 0);
14034 if (!op || !TREE_SIDE_EFFECTS (op))
14035 return expr;
14036 op = TREE_OPERAND (op, 1);
14037 if (!TREE_SIDE_EFFECTS (op))
14038 return expr;
14039 }
14040
14041 return build1 (CLEANUP_POINT_EXPR, type, expr);
14042 }
14043
14044 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14045 of an indirection through OP0, or NULL_TREE if no simplification is
14046 possible. */
14047
14048 tree
14049 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14050 {
14051 tree sub = op0;
14052 tree subtype;
14053
14054 STRIP_NOPS (sub);
14055 subtype = TREE_TYPE (sub);
14056 if (!POINTER_TYPE_P (subtype))
14057 return NULL_TREE;
14058
14059 if (TREE_CODE (sub) == ADDR_EXPR)
14060 {
14061 tree op = TREE_OPERAND (sub, 0);
14062 tree optype = TREE_TYPE (op);
14063 /* *&CONST_DECL -> to the value of the const decl. */
14064 if (TREE_CODE (op) == CONST_DECL)
14065 return DECL_INITIAL (op);
14066 /* *&p => p; make sure to handle *&"str"[cst] here. */
14067 if (type == optype)
14068 {
14069 tree fop = fold_read_from_constant_string (op);
14070 if (fop)
14071 return fop;
14072 else
14073 return op;
14074 }
14075 /* *(foo *)&fooarray => fooarray[0] */
14076 else if (TREE_CODE (optype) == ARRAY_TYPE
14077 && type == TREE_TYPE (optype)
14078 && (!in_gimple_form
14079 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14080 {
14081 tree type_domain = TYPE_DOMAIN (optype);
14082 tree min_val = size_zero_node;
14083 if (type_domain && TYPE_MIN_VALUE (type_domain))
14084 min_val = TYPE_MIN_VALUE (type_domain);
14085 if (in_gimple_form
14086 && TREE_CODE (min_val) != INTEGER_CST)
14087 return NULL_TREE;
14088 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14089 NULL_TREE, NULL_TREE);
14090 }
14091 /* *(foo *)&complexfoo => __real__ complexfoo */
14092 else if (TREE_CODE (optype) == COMPLEX_TYPE
14093 && type == TREE_TYPE (optype))
14094 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14095 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14096 else if (TREE_CODE (optype) == VECTOR_TYPE
14097 && type == TREE_TYPE (optype))
14098 {
14099 tree part_width = TYPE_SIZE (type);
14100 tree index = bitsize_int (0);
14101 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14102 }
14103 }
14104
14105 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14106 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14107 {
14108 tree op00 = TREE_OPERAND (sub, 0);
14109 tree op01 = TREE_OPERAND (sub, 1);
14110
14111 STRIP_NOPS (op00);
14112 if (TREE_CODE (op00) == ADDR_EXPR)
14113 {
14114 tree op00type;
14115 op00 = TREE_OPERAND (op00, 0);
14116 op00type = TREE_TYPE (op00);
14117
14118 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14119 if (TREE_CODE (op00type) == VECTOR_TYPE
14120 && type == TREE_TYPE (op00type))
14121 {
14122 HOST_WIDE_INT offset = tree_to_shwi (op01);
14123 tree part_width = TYPE_SIZE (type);
14124 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14125 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14126 tree index = bitsize_int (indexi);
14127
14128 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14129 return fold_build3_loc (loc,
14130 BIT_FIELD_REF, type, op00,
14131 part_width, index);
14132
14133 }
14134 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14135 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14136 && type == TREE_TYPE (op00type))
14137 {
14138 tree size = TYPE_SIZE_UNIT (type);
14139 if (tree_int_cst_equal (size, op01))
14140 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14141 }
14142 /* ((foo *)&fooarray)[1] => fooarray[1] */
14143 else if (TREE_CODE (op00type) == ARRAY_TYPE
14144 && type == TREE_TYPE (op00type))
14145 {
14146 tree type_domain = TYPE_DOMAIN (op00type);
14147 tree min_val = size_zero_node;
14148 if (type_domain && TYPE_MIN_VALUE (type_domain))
14149 min_val = TYPE_MIN_VALUE (type_domain);
14150 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14151 TYPE_SIZE_UNIT (type));
14152 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14153 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14154 NULL_TREE, NULL_TREE);
14155 }
14156 }
14157 }
14158
14159 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14160 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14161 && type == TREE_TYPE (TREE_TYPE (subtype))
14162 && (!in_gimple_form
14163 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14164 {
14165 tree type_domain;
14166 tree min_val = size_zero_node;
14167 sub = build_fold_indirect_ref_loc (loc, sub);
14168 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14169 if (type_domain && TYPE_MIN_VALUE (type_domain))
14170 min_val = TYPE_MIN_VALUE (type_domain);
14171 if (in_gimple_form
14172 && TREE_CODE (min_val) != INTEGER_CST)
14173 return NULL_TREE;
14174 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14175 NULL_TREE);
14176 }
14177
14178 return NULL_TREE;
14179 }
14180
14181 /* Builds an expression for an indirection through T, simplifying some
14182 cases. */
14183
14184 tree
14185 build_fold_indirect_ref_loc (location_t loc, tree t)
14186 {
14187 tree type = TREE_TYPE (TREE_TYPE (t));
14188 tree sub = fold_indirect_ref_1 (loc, type, t);
14189
14190 if (sub)
14191 return sub;
14192
14193 return build1_loc (loc, INDIRECT_REF, type, t);
14194 }
14195
14196 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14197
14198 tree
14199 fold_indirect_ref_loc (location_t loc, tree t)
14200 {
14201 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14202
14203 if (sub)
14204 return sub;
14205 else
14206 return t;
14207 }
14208
14209 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14210 whose result is ignored. The type of the returned tree need not be
14211 the same as the original expression. */
14212
14213 tree
14214 fold_ignored_result (tree t)
14215 {
14216 if (!TREE_SIDE_EFFECTS (t))
14217 return integer_zero_node;
14218
14219 for (;;)
14220 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14221 {
14222 case tcc_unary:
14223 t = TREE_OPERAND (t, 0);
14224 break;
14225
14226 case tcc_binary:
14227 case tcc_comparison:
14228 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14229 t = TREE_OPERAND (t, 0);
14230 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14231 t = TREE_OPERAND (t, 1);
14232 else
14233 return t;
14234 break;
14235
14236 case tcc_expression:
14237 switch (TREE_CODE (t))
14238 {
14239 case COMPOUND_EXPR:
14240 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14241 return t;
14242 t = TREE_OPERAND (t, 0);
14243 break;
14244
14245 case COND_EXPR:
14246 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14247 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14248 return t;
14249 t = TREE_OPERAND (t, 0);
14250 break;
14251
14252 default:
14253 return t;
14254 }
14255 break;
14256
14257 default:
14258 return t;
14259 }
14260 }
14261
14262 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14263
14264 tree
14265 round_up_loc (location_t loc, tree value, unsigned int divisor)
14266 {
14267 tree div = NULL_TREE;
14268
14269 if (divisor == 1)
14270 return value;
14271
14272 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14273 have to do anything. Only do this when we are not given a const,
14274 because in that case, this check is more expensive than just
14275 doing it. */
14276 if (TREE_CODE (value) != INTEGER_CST)
14277 {
14278 div = build_int_cst (TREE_TYPE (value), divisor);
14279
14280 if (multiple_of_p (TREE_TYPE (value), value, div))
14281 return value;
14282 }
14283
14284 /* If divisor is a power of two, simplify this to bit manipulation. */
14285 if (divisor == (divisor & -divisor))
14286 {
14287 if (TREE_CODE (value) == INTEGER_CST)
14288 {
14289 wide_int val = value;
14290 bool overflow_p;
14291
14292 if ((val & (divisor - 1)) == 0)
14293 return value;
14294
14295 overflow_p = TREE_OVERFLOW (value);
14296 val += divisor - 1;
14297 val &= - (int) divisor;
14298 if (val == 0)
14299 overflow_p = true;
14300
14301 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14302 }
14303 else
14304 {
14305 tree t;
14306
14307 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14308 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14309 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14310 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14311 }
14312 }
14313 else
14314 {
14315 if (!div)
14316 div = build_int_cst (TREE_TYPE (value), divisor);
14317 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14318 value = size_binop_loc (loc, MULT_EXPR, value, div);
14319 }
14320
14321 return value;
14322 }
14323
14324 /* Likewise, but round down. */
14325
14326 tree
14327 round_down_loc (location_t loc, tree value, int divisor)
14328 {
14329 tree div = NULL_TREE;
14330
14331 gcc_assert (divisor > 0);
14332 if (divisor == 1)
14333 return value;
14334
14335 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14336 have to do anything. Only do this when we are not given a const,
14337 because in that case, this check is more expensive than just
14338 doing it. */
14339 if (TREE_CODE (value) != INTEGER_CST)
14340 {
14341 div = build_int_cst (TREE_TYPE (value), divisor);
14342
14343 if (multiple_of_p (TREE_TYPE (value), value, div))
14344 return value;
14345 }
14346
14347 /* If divisor is a power of two, simplify this to bit manipulation. */
14348 if (divisor == (divisor & -divisor))
14349 {
14350 tree t;
14351
14352 t = build_int_cst (TREE_TYPE (value), -divisor);
14353 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14354 }
14355 else
14356 {
14357 if (!div)
14358 div = build_int_cst (TREE_TYPE (value), divisor);
14359 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14360 value = size_binop_loc (loc, MULT_EXPR, value, div);
14361 }
14362
14363 return value;
14364 }
14365
14366 /* Returns the pointer to the base of the object addressed by EXP and
14367 extracts the information about the offset of the access, storing it
14368 to PBITPOS and POFFSET. */
14369
14370 static tree
14371 split_address_to_core_and_offset (tree exp,
14372 HOST_WIDE_INT *pbitpos, tree *poffset)
14373 {
14374 tree core;
14375 machine_mode mode;
14376 int unsignedp, reversep, volatilep;
14377 HOST_WIDE_INT bitsize;
14378 location_t loc = EXPR_LOCATION (exp);
14379
14380 if (TREE_CODE (exp) == ADDR_EXPR)
14381 {
14382 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14383 poffset, &mode, &unsignedp, &reversep,
14384 &volatilep, false);
14385 core = build_fold_addr_expr_loc (loc, core);
14386 }
14387 else
14388 {
14389 core = exp;
14390 *pbitpos = 0;
14391 *poffset = NULL_TREE;
14392 }
14393
14394 return core;
14395 }
14396
14397 /* Returns true if addresses of E1 and E2 differ by a constant, false
14398 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14399
14400 bool
14401 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14402 {
14403 tree core1, core2;
14404 HOST_WIDE_INT bitpos1, bitpos2;
14405 tree toffset1, toffset2, tdiff, type;
14406
14407 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14408 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14409
14410 if (bitpos1 % BITS_PER_UNIT != 0
14411 || bitpos2 % BITS_PER_UNIT != 0
14412 || !operand_equal_p (core1, core2, 0))
14413 return false;
14414
14415 if (toffset1 && toffset2)
14416 {
14417 type = TREE_TYPE (toffset1);
14418 if (type != TREE_TYPE (toffset2))
14419 toffset2 = fold_convert (type, toffset2);
14420
14421 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14422 if (!cst_and_fits_in_hwi (tdiff))
14423 return false;
14424
14425 *diff = int_cst_value (tdiff);
14426 }
14427 else if (toffset1 || toffset2)
14428 {
14429 /* If only one of the offsets is non-constant, the difference cannot
14430 be a constant. */
14431 return false;
14432 }
14433 else
14434 *diff = 0;
14435
14436 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14437 return true;
14438 }
14439
14440 /* Return OFF converted to a pointer offset type suitable as offset for
14441 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14442 tree
14443 convert_to_ptrofftype_loc (location_t loc, tree off)
14444 {
14445 return fold_convert_loc (loc, sizetype, off);
14446 }
14447
14448 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14449 tree
14450 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14451 {
14452 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14453 ptr, convert_to_ptrofftype_loc (loc, off));
14454 }
14455
14456 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14457 tree
14458 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14459 {
14460 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14461 ptr, size_int (off));
14462 }
14463
14464 /* Return a char pointer for a C string if it is a string constant
14465 or sum of string constant and integer constant. */
14466
14467 const char *
14468 c_getstr (tree src)
14469 {
14470 tree offset_node;
14471
14472 src = string_constant (src, &offset_node);
14473 if (src == 0)
14474 return 0;
14475
14476 if (offset_node == 0)
14477 return TREE_STRING_POINTER (src);
14478 else if (!tree_fits_uhwi_p (offset_node)
14479 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14480 return 0;
14481
14482 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
14483 }