re PR tree-optimization/69801 (gcc ICE on valid code on x86_64-linux-gnu in "operand...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
79
80 #ifndef LOAD_EXTEND_OP
81 #define LOAD_EXTEND_OP(M) UNKNOWN
82 #endif
83
84 /* Nonzero if we are folding constants inside an initializer; zero
85 otherwise. */
86 int folding_initializer = 0;
87
88 /* The following constants represent a bit based encoding of GCC's
89 comparison operators. This encoding simplifies transformations
90 on relational comparison operators, such as AND and OR. */
91 enum comparison_code {
92 COMPCODE_FALSE = 0,
93 COMPCODE_LT = 1,
94 COMPCODE_EQ = 2,
95 COMPCODE_LE = 3,
96 COMPCODE_GT = 4,
97 COMPCODE_LTGT = 5,
98 COMPCODE_GE = 6,
99 COMPCODE_ORD = 7,
100 COMPCODE_UNORD = 8,
101 COMPCODE_UNLT = 9,
102 COMPCODE_UNEQ = 10,
103 COMPCODE_UNLE = 11,
104 COMPCODE_UNGT = 12,
105 COMPCODE_NE = 13,
106 COMPCODE_UNGE = 14,
107 COMPCODE_TRUE = 15
108 };
109
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (location_t, tree, tree, enum tree_code,
113 tree *, tree *, tree *, int);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree make_bit_field_ref (location_t, tree, tree,
121 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
122 static tree optimize_bit_field_compare (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
125 HOST_WIDE_INT *,
126 machine_mode *, int *, int *, int *,
127 tree *, tree *);
128 static int simple_operand_p (const_tree);
129 static bool simple_operand_p_2 (tree);
130 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
131 static tree range_predecessor (tree);
132 static tree range_successor (tree);
133 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
135 static tree unextend (tree, int, int, tree);
136 static tree optimize_minmax_comparison (location_t, enum tree_code,
137 tree, tree, tree);
138 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
139 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
140 static tree fold_binary_op_with_conditional_arg (location_t,
141 enum tree_code, tree,
142 tree, tree,
143 tree, tree, int);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static bool vec_cst_ctor_to_array (tree, tree *);
152
153
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
156
157 static location_t
158 expr_location_or (tree t, location_t loc)
159 {
160 location_t tloc = EXPR_LOCATION (t);
161 return tloc == UNKNOWN_LOCATION ? loc : tloc;
162 }
163
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
166
167 static inline tree
168 protected_set_expr_location_unshare (tree x, location_t loc)
169 {
170 if (CAN_HAVE_LOCATION_P (x)
171 && EXPR_LOCATION (x) != loc
172 && !(TREE_CODE (x) == SAVE_EXPR
173 || TREE_CODE (x) == TARGET_EXPR
174 || TREE_CODE (x) == BIND_EXPR))
175 {
176 x = copy_node (x);
177 SET_EXPR_LOCATION (x, loc);
178 }
179 return x;
180 }
181 \f
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
184 NULL_TREE. */
185
186 tree
187 div_if_zero_remainder (const_tree arg1, const_tree arg2)
188 {
189 widest_int quo;
190
191 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
192 SIGNED, &quo))
193 return wide_int_to_tree (TREE_TYPE (arg1), quo);
194
195 return NULL_TREE;
196 }
197 \f
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
205 used. */
206
207 static int fold_deferring_overflow_warnings;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
213
214 static const char* fold_deferred_overflow_warning;
215
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
218
219 static enum warn_strict_overflow_code fold_deferred_overflow_code;
220
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
223
224 void
225 fold_defer_overflow_warnings (void)
226 {
227 ++fold_deferring_overflow_warnings;
228 }
229
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
237 deferred code. */
238
239 void
240 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
241 {
242 const char *warnmsg;
243 location_t locus;
244
245 gcc_assert (fold_deferring_overflow_warnings > 0);
246 --fold_deferring_overflow_warnings;
247 if (fold_deferring_overflow_warnings > 0)
248 {
249 if (fold_deferred_overflow_warning != NULL
250 && code != 0
251 && code < (int) fold_deferred_overflow_code)
252 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
253 return;
254 }
255
256 warnmsg = fold_deferred_overflow_warning;
257 fold_deferred_overflow_warning = NULL;
258
259 if (!issue || warnmsg == NULL)
260 return;
261
262 if (gimple_no_warning_p (stmt))
263 return;
264
265 /* Use the smallest code level when deciding to issue the
266 warning. */
267 if (code == 0 || code > (int) fold_deferred_overflow_code)
268 code = fold_deferred_overflow_code;
269
270 if (!issue_strict_overflow_warning (code))
271 return;
272
273 if (stmt == NULL)
274 locus = input_location;
275 else
276 locus = gimple_location (stmt);
277 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
278 }
279
280 /* Stop deferring overflow warnings, ignoring any deferred
281 warnings. */
282
283 void
284 fold_undefer_and_ignore_overflow_warnings (void)
285 {
286 fold_undefer_overflow_warnings (false, NULL, 0);
287 }
288
289 /* Whether we are deferring overflow warnings. */
290
291 bool
292 fold_deferring_overflow_warnings_p (void)
293 {
294 return fold_deferring_overflow_warnings > 0;
295 }
296
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
299
300 static void
301 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
302 {
303 if (fold_deferring_overflow_warnings > 0)
304 {
305 if (fold_deferred_overflow_warning == NULL
306 || wc < fold_deferred_overflow_code)
307 {
308 fold_deferred_overflow_warning = gmsgid;
309 fold_deferred_overflow_code = wc;
310 }
311 }
312 else if (issue_strict_overflow_warning (wc))
313 warning (OPT_Wstrict_overflow, gmsgid);
314 }
315 \f
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
318
319 bool
320 negate_mathfn_p (combined_fn fn)
321 {
322 switch (fn)
323 {
324 CASE_CFN_ASIN:
325 CASE_CFN_ASINH:
326 CASE_CFN_ATAN:
327 CASE_CFN_ATANH:
328 CASE_CFN_CASIN:
329 CASE_CFN_CASINH:
330 CASE_CFN_CATAN:
331 CASE_CFN_CATANH:
332 CASE_CFN_CBRT:
333 CASE_CFN_CPROJ:
334 CASE_CFN_CSIN:
335 CASE_CFN_CSINH:
336 CASE_CFN_CTAN:
337 CASE_CFN_CTANH:
338 CASE_CFN_ERF:
339 CASE_CFN_LLROUND:
340 CASE_CFN_LROUND:
341 CASE_CFN_ROUND:
342 CASE_CFN_SIN:
343 CASE_CFN_SINH:
344 CASE_CFN_TAN:
345 CASE_CFN_TANH:
346 CASE_CFN_TRUNC:
347 return true;
348
349 CASE_CFN_LLRINT:
350 CASE_CFN_LRINT:
351 CASE_CFN_NEARBYINT:
352 CASE_CFN_RINT:
353 return !flag_rounding_math;
354
355 default:
356 break;
357 }
358 return false;
359 }
360
361 /* Check whether we may negate an integer constant T without causing
362 overflow. */
363
364 bool
365 may_negate_without_overflow_p (const_tree t)
366 {
367 tree type;
368
369 gcc_assert (TREE_CODE (t) == INTEGER_CST);
370
371 type = TREE_TYPE (t);
372 if (TYPE_UNSIGNED (type))
373 return false;
374
375 return !wi::only_sign_bit_p (t);
376 }
377
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
380
381 static bool
382 negate_expr_p (tree t)
383 {
384 tree type;
385
386 if (t == 0)
387 return false;
388
389 type = TREE_TYPE (t);
390
391 STRIP_SIGN_NOPS (t);
392 switch (TREE_CODE (t))
393 {
394 case INTEGER_CST:
395 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
396 return true;
397
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
400 case BIT_NOT_EXPR:
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
403
404 case FIXED_CST:
405 return true;
406
407 case NEGATE_EXPR:
408 return !TYPE_OVERFLOW_SANITIZED (type);
409
410 case REAL_CST:
411 /* We want to canonicalize to positive real constants. Pretend
412 that only negative ones can be easily negated. */
413 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
414
415 case COMPLEX_CST:
416 return negate_expr_p (TREE_REALPART (t))
417 && negate_expr_p (TREE_IMAGPART (t));
418
419 case VECTOR_CST:
420 {
421 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
422 return true;
423
424 int count = TYPE_VECTOR_SUBPARTS (type), i;
425
426 for (i = 0; i < count; i++)
427 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
428 return false;
429
430 return true;
431 }
432
433 case COMPLEX_EXPR:
434 return negate_expr_p (TREE_OPERAND (t, 0))
435 && negate_expr_p (TREE_OPERAND (t, 1));
436
437 case CONJ_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0));
439
440 case PLUS_EXPR:
441 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
442 || HONOR_SIGNED_ZEROS (element_mode (type))
443 || (INTEGRAL_TYPE_P (type)
444 && ! TYPE_OVERFLOW_WRAPS (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
457 && !HONOR_SIGNED_ZEROS (element_mode (type))
458 && (! INTEGRAL_TYPE_P (type)
459 || TYPE_OVERFLOW_WRAPS (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
462
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (type))
465 break;
466 /* INT_MIN/n * n doesn't overflow while negating one operand it does
467 if n is a power of two. */
468 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
469 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
470 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
471 && ! integer_pow2p (TREE_OPERAND (t, 0)))
472 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
473 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
474 break;
475
476 /* Fall through. */
477
478 case RDIV_EXPR:
479 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
480 return negate_expr_p (TREE_OPERAND (t, 1))
481 || negate_expr_p (TREE_OPERAND (t, 0));
482 break;
483
484 case TRUNC_DIV_EXPR:
485 case ROUND_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 if (TYPE_UNSIGNED (type))
488 break;
489 if (negate_expr_p (TREE_OPERAND (t, 0)))
490 return true;
491 /* In general we can't negate B in A / B, because if A is INT_MIN and
492 B is 1, we may turn this into INT_MIN / -1 which is undefined
493 and actually traps on some architectures. */
494 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
495 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
496 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
497 && ! integer_onep (TREE_OPERAND (t, 1))))
498 return negate_expr_p (TREE_OPERAND (t, 1));
499 break;
500
501 case NOP_EXPR:
502 /* Negate -((double)float) as (double)(-float). */
503 if (TREE_CODE (type) == REAL_TYPE)
504 {
505 tree tem = strip_float_extensions (t);
506 if (tem != t)
507 return negate_expr_p (tem);
508 }
509 break;
510
511 case CALL_EXPR:
512 /* Negate -f(x) as f(-x). */
513 if (negate_mathfn_p (get_call_combined_fn (t)))
514 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 break;
516
517 case RSHIFT_EXPR:
518 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
519 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
520 {
521 tree op1 = TREE_OPERAND (t, 1);
522 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
523 return true;
524 }
525 break;
526
527 default:
528 break;
529 }
530 return false;
531 }
532
533 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
534 simplification is possible.
535 If negate_expr_p would return true for T, NULL_TREE will never be
536 returned. */
537
538 static tree
539 fold_negate_expr (location_t loc, tree t)
540 {
541 tree type = TREE_TYPE (t);
542 tree tem;
543
544 switch (TREE_CODE (t))
545 {
546 /* Convert - (~A) to A + 1. */
547 case BIT_NOT_EXPR:
548 if (INTEGRAL_TYPE_P (type))
549 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
550 build_one_cst (type));
551 break;
552
553 case INTEGER_CST:
554 tem = fold_negate_const (t, type);
555 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
556 || (ANY_INTEGRAL_TYPE_P (type)
557 && !TYPE_OVERFLOW_TRAPS (type)
558 && TYPE_OVERFLOW_WRAPS (type))
559 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
560 return tem;
561 break;
562
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
566
567 case FIXED_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
570
571 case COMPLEX_CST:
572 {
573 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
574 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
575 if (rpart && ipart)
576 return build_complex (type, rpart, ipart);
577 }
578 break;
579
580 case VECTOR_CST:
581 {
582 int count = TYPE_VECTOR_SUBPARTS (type), i;
583 tree *elts = XALLOCAVEC (tree, count);
584
585 for (i = 0; i < count; i++)
586 {
587 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
588 if (elts[i] == NULL_TREE)
589 return NULL_TREE;
590 }
591
592 return build_vector (type, elts);
593 }
594
595 case COMPLEX_EXPR:
596 if (negate_expr_p (t))
597 return fold_build2_loc (loc, COMPLEX_EXPR, type,
598 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
599 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
600 break;
601
602 case CONJ_EXPR:
603 if (negate_expr_p (t))
604 return fold_build1_loc (loc, CONJ_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
606 break;
607
608 case NEGATE_EXPR:
609 if (!TYPE_OVERFLOW_SANITIZED (type))
610 return TREE_OPERAND (t, 0);
611 break;
612
613 case PLUS_EXPR:
614 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
615 && !HONOR_SIGNED_ZEROS (element_mode (type)))
616 {
617 /* -(A + B) -> (-B) - A. */
618 if (negate_expr_p (TREE_OPERAND (t, 1))
619 && reorder_operands_p (TREE_OPERAND (t, 0),
620 TREE_OPERAND (t, 1)))
621 {
622 tem = negate_expr (TREE_OPERAND (t, 1));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 0));
625 }
626
627 /* -(A + B) -> (-A) - B. */
628 if (negate_expr_p (TREE_OPERAND (t, 0)))
629 {
630 tem = negate_expr (TREE_OPERAND (t, 0));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 1));
633 }
634 }
635 break;
636
637 case MINUS_EXPR:
638 /* - (A - B) -> B - A */
639 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
640 && !HONOR_SIGNED_ZEROS (element_mode (type))
641 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
642 return fold_build2_loc (loc, MINUS_EXPR, type,
643 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
644 break;
645
646 case MULT_EXPR:
647 if (TYPE_UNSIGNED (type))
648 break;
649
650 /* Fall through. */
651
652 case RDIV_EXPR:
653 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
654 {
655 tem = TREE_OPERAND (t, 1);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 TREE_OPERAND (t, 0), negate_expr (tem));
659 tem = TREE_OPERAND (t, 0);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 negate_expr (tem), TREE_OPERAND (t, 1));
663 }
664 break;
665
666 case TRUNC_DIV_EXPR:
667 case ROUND_DIV_EXPR:
668 case EXACT_DIV_EXPR:
669 if (TYPE_UNSIGNED (type))
670 break;
671 if (negate_expr_p (TREE_OPERAND (t, 0)))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 negate_expr (TREE_OPERAND (t, 0)),
674 TREE_OPERAND (t, 1));
675 /* In general we can't negate B in A / B, because if A is INT_MIN and
676 B is 1, we may turn this into INT_MIN / -1 which is undefined
677 and actually traps on some architectures. */
678 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
679 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
680 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
681 && ! integer_onep (TREE_OPERAND (t, 1))))
682 && negate_expr_p (TREE_OPERAND (t, 1)))
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 TREE_OPERAND (t, 0),
685 negate_expr (TREE_OPERAND (t, 1)));
686 break;
687
688 case NOP_EXPR:
689 /* Convert -((double)float) into (double)(-float). */
690 if (TREE_CODE (type) == REAL_TYPE)
691 {
692 tem = strip_float_extensions (t);
693 if (tem != t && negate_expr_p (tem))
694 return fold_convert_loc (loc, type, negate_expr (tem));
695 }
696 break;
697
698 case CALL_EXPR:
699 /* Negate -f(x) as f(-x). */
700 if (negate_mathfn_p (get_call_combined_fn (t))
701 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
702 {
703 tree fndecl, arg;
704
705 fndecl = get_callee_fndecl (t);
706 arg = negate_expr (CALL_EXPR_ARG (t, 0));
707 return build_call_expr_loc (loc, fndecl, 1, arg);
708 }
709 break;
710
711 case RSHIFT_EXPR:
712 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
713 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
714 {
715 tree op1 = TREE_OPERAND (t, 1);
716 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
717 {
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
724 }
725 }
726 break;
727
728 default:
729 break;
730 }
731
732 return NULL_TREE;
733 }
734
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
738
739 static tree
740 negate_expr (tree t)
741 {
742 tree type, tem;
743 location_t loc;
744
745 if (t == NULL_TREE)
746 return NULL_TREE;
747
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
751
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
756 }
757 \f
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
765
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
769
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead. If a variable part is of pointer
772 type, it is negated after converting to TYPE. This prevents us from
773 generating illegal MINUS pointer expression. LOC is the location of
774 the converted variable part.
775
776 If IN is itself a literal or constant, return it as appropriate.
777
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
780
781 static tree
782 split_tree (location_t loc, tree in, tree type, enum tree_code code,
783 tree *conp, tree *litp, tree *minus_litp, int negate_p)
784 {
785 tree var = 0;
786
787 *conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
790
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
793
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
806 {
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
811
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
819
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
824
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
833
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p)
838 *conp = negate_expr (*conp);
839 if (neg_var_p)
840 {
841 /* Convert to TYPE before negating a pointer type expr. */
842 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
843 var = fold_convert_loc (loc, type, var);
844 var = negate_expr (var);
845 }
846 }
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
849 {
850 /* -X - 1 is folded to ~X, undo that here. */
851 *minus_litp = build_one_cst (TREE_TYPE (in));
852 var = negate_expr (TREE_OPERAND (in, 0));
853 }
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else
857 var = in;
858
859 if (negate_p)
860 {
861 if (*litp)
862 *minus_litp = *litp, *litp = 0;
863 else if (*minus_litp)
864 *litp = *minus_litp, *minus_litp = 0;
865 *conp = negate_expr (*conp);
866 /* Convert to TYPE before negating a pointer type expr. */
867 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
868 var = fold_convert_loc (loc, type, var);
869 var = negate_expr (var);
870 }
871
872 return var;
873 }
874
875 /* Re-associate trees split by the above function. T1 and T2 are
876 either expressions to associate or null. Return the new
877 expression, if any. LOC is the location of the new expression. If
878 we build an operation, do it in TYPE and with CODE. */
879
880 static tree
881 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
882 {
883 if (t1 == 0)
884 return t2;
885 else if (t2 == 0)
886 return t1;
887
888 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
889 try to fold this since we will have infinite recursion. But do
890 deal with any NEGATE_EXPRs. */
891 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
892 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
893 {
894 if (code == PLUS_EXPR)
895 {
896 if (TREE_CODE (t1) == NEGATE_EXPR)
897 return build2_loc (loc, MINUS_EXPR, type,
898 fold_convert_loc (loc, type, t2),
899 fold_convert_loc (loc, type,
900 TREE_OPERAND (t1, 0)));
901 else if (TREE_CODE (t2) == NEGATE_EXPR)
902 return build2_loc (loc, MINUS_EXPR, type,
903 fold_convert_loc (loc, type, t1),
904 fold_convert_loc (loc, type,
905 TREE_OPERAND (t2, 0)));
906 else if (integer_zerop (t2))
907 return fold_convert_loc (loc, type, t1);
908 }
909 else if (code == MINUS_EXPR)
910 {
911 if (integer_zerop (t2))
912 return fold_convert_loc (loc, type, t1);
913 }
914
915 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type, t2));
917 }
918
919 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
920 fold_convert_loc (loc, type, t2));
921 }
922 \f
923 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
924 for use in int_const_binop, size_binop and size_diffop. */
925
926 static bool
927 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
928 {
929 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
930 return false;
931 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
932 return false;
933
934 switch (code)
935 {
936 case LSHIFT_EXPR:
937 case RSHIFT_EXPR:
938 case LROTATE_EXPR:
939 case RROTATE_EXPR:
940 return true;
941
942 default:
943 break;
944 }
945
946 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
947 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
948 && TYPE_MODE (type1) == TYPE_MODE (type2);
949 }
950
951
952 /* Combine two integer constants ARG1 and ARG2 under operation CODE
953 to produce a new constant. Return NULL_TREE if we don't know how
954 to evaluate CODE at compile-time. */
955
956 static tree
957 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
958 int overflowable)
959 {
960 wide_int res;
961 tree t;
962 tree type = TREE_TYPE (arg1);
963 signop sign = TYPE_SIGN (type);
964 bool overflow = false;
965
966 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
967 TYPE_SIGN (TREE_TYPE (parg2)));
968
969 switch (code)
970 {
971 case BIT_IOR_EXPR:
972 res = wi::bit_or (arg1, arg2);
973 break;
974
975 case BIT_XOR_EXPR:
976 res = wi::bit_xor (arg1, arg2);
977 break;
978
979 case BIT_AND_EXPR:
980 res = wi::bit_and (arg1, arg2);
981 break;
982
983 case RSHIFT_EXPR:
984 case LSHIFT_EXPR:
985 if (wi::neg_p (arg2))
986 {
987 arg2 = -arg2;
988 if (code == RSHIFT_EXPR)
989 code = LSHIFT_EXPR;
990 else
991 code = RSHIFT_EXPR;
992 }
993
994 if (code == RSHIFT_EXPR)
995 /* It's unclear from the C standard whether shifts can overflow.
996 The following code ignores overflow; perhaps a C standard
997 interpretation ruling is needed. */
998 res = wi::rshift (arg1, arg2, sign);
999 else
1000 res = wi::lshift (arg1, arg2);
1001 break;
1002
1003 case RROTATE_EXPR:
1004 case LROTATE_EXPR:
1005 if (wi::neg_p (arg2))
1006 {
1007 arg2 = -arg2;
1008 if (code == RROTATE_EXPR)
1009 code = LROTATE_EXPR;
1010 else
1011 code = RROTATE_EXPR;
1012 }
1013
1014 if (code == RROTATE_EXPR)
1015 res = wi::rrotate (arg1, arg2);
1016 else
1017 res = wi::lrotate (arg1, arg2);
1018 break;
1019
1020 case PLUS_EXPR:
1021 res = wi::add (arg1, arg2, sign, &overflow);
1022 break;
1023
1024 case MINUS_EXPR:
1025 res = wi::sub (arg1, arg2, sign, &overflow);
1026 break;
1027
1028 case MULT_EXPR:
1029 res = wi::mul (arg1, arg2, sign, &overflow);
1030 break;
1031
1032 case MULT_HIGHPART_EXPR:
1033 res = wi::mul_high (arg1, arg2, sign);
1034 break;
1035
1036 case TRUNC_DIV_EXPR:
1037 case EXACT_DIV_EXPR:
1038 if (arg2 == 0)
1039 return NULL_TREE;
1040 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case FLOOR_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_floor (arg1, arg2, sign, &overflow);
1047 break;
1048
1049 case CEIL_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case ROUND_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_round (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case TRUNC_MOD_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case FLOOR_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case CEIL_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case ROUND_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_round (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case MIN_EXPR:
1086 res = wi::min (arg1, arg2, sign);
1087 break;
1088
1089 case MAX_EXPR:
1090 res = wi::max (arg1, arg2, sign);
1091 break;
1092
1093 default:
1094 return NULL_TREE;
1095 }
1096
1097 t = force_fit_type (type, res, overflowable,
1098 (((sign == SIGNED || overflowable == -1)
1099 && overflow)
1100 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1101
1102 return t;
1103 }
1104
1105 tree
1106 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1107 {
1108 return int_const_binop_1 (code, arg1, arg2, 1);
1109 }
1110
1111 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1112 constant. We assume ARG1 and ARG2 have the same data type, or at least
1113 are the same kind of constant and the same machine mode. Return zero if
1114 combining the constants is not allowed in the current operating mode. */
1115
1116 static tree
1117 const_binop (enum tree_code code, tree arg1, tree arg2)
1118 {
1119 /* Sanity check for the recursive cases. */
1120 if (!arg1 || !arg2)
1121 return NULL_TREE;
1122
1123 STRIP_NOPS (arg1);
1124 STRIP_NOPS (arg2);
1125
1126 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1127 {
1128 if (code == POINTER_PLUS_EXPR)
1129 return int_const_binop (PLUS_EXPR,
1130 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1131
1132 return int_const_binop (code, arg1, arg2);
1133 }
1134
1135 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1136 {
1137 machine_mode mode;
1138 REAL_VALUE_TYPE d1;
1139 REAL_VALUE_TYPE d2;
1140 REAL_VALUE_TYPE value;
1141 REAL_VALUE_TYPE result;
1142 bool inexact;
1143 tree t, type;
1144
1145 /* The following codes are handled by real_arithmetic. */
1146 switch (code)
1147 {
1148 case PLUS_EXPR:
1149 case MINUS_EXPR:
1150 case MULT_EXPR:
1151 case RDIV_EXPR:
1152 case MIN_EXPR:
1153 case MAX_EXPR:
1154 break;
1155
1156 default:
1157 return NULL_TREE;
1158 }
1159
1160 d1 = TREE_REAL_CST (arg1);
1161 d2 = TREE_REAL_CST (arg2);
1162
1163 type = TREE_TYPE (arg1);
1164 mode = TYPE_MODE (type);
1165
1166 /* Don't perform operation if we honor signaling NaNs and
1167 either operand is a signaling NaN. */
1168 if (HONOR_SNANS (mode)
1169 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1170 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1171 return NULL_TREE;
1172
1173 /* Don't perform operation if it would raise a division
1174 by zero exception. */
1175 if (code == RDIV_EXPR
1176 && real_equal (&d2, &dconst0)
1177 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1178 return NULL_TREE;
1179
1180 /* If either operand is a NaN, just return it. Otherwise, set up
1181 for floating-point trap; we return an overflow. */
1182 if (REAL_VALUE_ISNAN (d1))
1183 {
1184 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1185 is off. */
1186 d1.signalling = 0;
1187 t = build_real (type, d1);
1188 return t;
1189 }
1190 else if (REAL_VALUE_ISNAN (d2))
1191 {
1192 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1193 is off. */
1194 d2.signalling = 0;
1195 t = build_real (type, d2);
1196 return t;
1197 }
1198
1199 inexact = real_arithmetic (&value, code, &d1, &d2);
1200 real_convert (&result, mode, &value);
1201
1202 /* Don't constant fold this floating point operation if
1203 the result has overflowed and flag_trapping_math. */
1204 if (flag_trapping_math
1205 && MODE_HAS_INFINITIES (mode)
1206 && REAL_VALUE_ISINF (result)
1207 && !REAL_VALUE_ISINF (d1)
1208 && !REAL_VALUE_ISINF (d2))
1209 return NULL_TREE;
1210
1211 /* Don't constant fold this floating point operation if the
1212 result may dependent upon the run-time rounding mode and
1213 flag_rounding_math is set, or if GCC's software emulation
1214 is unable to accurately represent the result. */
1215 if ((flag_rounding_math
1216 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1217 && (inexact || !real_identical (&result, &value)))
1218 return NULL_TREE;
1219
1220 t = build_real (type, result);
1221
1222 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1223 return t;
1224 }
1225
1226 if (TREE_CODE (arg1) == FIXED_CST)
1227 {
1228 FIXED_VALUE_TYPE f1;
1229 FIXED_VALUE_TYPE f2;
1230 FIXED_VALUE_TYPE result;
1231 tree t, type;
1232 int sat_p;
1233 bool overflow_p;
1234
1235 /* The following codes are handled by fixed_arithmetic. */
1236 switch (code)
1237 {
1238 case PLUS_EXPR:
1239 case MINUS_EXPR:
1240 case MULT_EXPR:
1241 case TRUNC_DIV_EXPR:
1242 if (TREE_CODE (arg2) != FIXED_CST)
1243 return NULL_TREE;
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1246
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1249 {
1250 if (TREE_CODE (arg2) != INTEGER_CST)
1251 return NULL_TREE;
1252 wide_int w2 = arg2;
1253 f2.data.high = w2.elt (1);
1254 f2.data.low = w2.elt (0);
1255 f2.mode = SImode;
1256 }
1257 break;
1258
1259 default:
1260 return NULL_TREE;
1261 }
1262
1263 f1 = TREE_FIXED_CST (arg1);
1264 type = TREE_TYPE (arg1);
1265 sat_p = TYPE_SATURATING (type);
1266 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1267 t = build_fixed (type, result);
1268 /* Propagate overflow flags. */
1269 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1270 TREE_OVERFLOW (t) = 1;
1271 return t;
1272 }
1273
1274 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1275 {
1276 tree type = TREE_TYPE (arg1);
1277 tree r1 = TREE_REALPART (arg1);
1278 tree i1 = TREE_IMAGPART (arg1);
1279 tree r2 = TREE_REALPART (arg2);
1280 tree i2 = TREE_IMAGPART (arg2);
1281 tree real, imag;
1282
1283 switch (code)
1284 {
1285 case PLUS_EXPR:
1286 case MINUS_EXPR:
1287 real = const_binop (code, r1, r2);
1288 imag = const_binop (code, i1, i2);
1289 break;
1290
1291 case MULT_EXPR:
1292 if (COMPLEX_FLOAT_TYPE_P (type))
1293 return do_mpc_arg2 (arg1, arg2, type,
1294 /* do_nonfinite= */ folding_initializer,
1295 mpc_mul);
1296
1297 real = const_binop (MINUS_EXPR,
1298 const_binop (MULT_EXPR, r1, r2),
1299 const_binop (MULT_EXPR, i1, i2));
1300 imag = const_binop (PLUS_EXPR,
1301 const_binop (MULT_EXPR, r1, i2),
1302 const_binop (MULT_EXPR, i1, r2));
1303 break;
1304
1305 case RDIV_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_div);
1310 /* Fallthru ... */
1311 case TRUNC_DIV_EXPR:
1312 case CEIL_DIV_EXPR:
1313 case FLOOR_DIV_EXPR:
1314 case ROUND_DIV_EXPR:
1315 if (flag_complex_method == 0)
1316 {
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_straight().
1319
1320 Expand complex division to scalars, straightforward algorithm.
1321 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1322 t = br*br + bi*bi
1323 */
1324 tree magsquared
1325 = const_binop (PLUS_EXPR,
1326 const_binop (MULT_EXPR, r2, r2),
1327 const_binop (MULT_EXPR, i2, i2));
1328 tree t1
1329 = const_binop (PLUS_EXPR,
1330 const_binop (MULT_EXPR, r1, r2),
1331 const_binop (MULT_EXPR, i1, i2));
1332 tree t2
1333 = const_binop (MINUS_EXPR,
1334 const_binop (MULT_EXPR, i1, r2),
1335 const_binop (MULT_EXPR, r1, i2));
1336
1337 real = const_binop (code, t1, magsquared);
1338 imag = const_binop (code, t2, magsquared);
1339 }
1340 else
1341 {
1342 /* Keep this algorithm in sync with
1343 tree-complex.c:expand_complex_div_wide().
1344
1345 Expand complex division to scalars, modified algorithm to minimize
1346 overflow with wide input ranges. */
1347 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1348 fold_abs_const (r2, TREE_TYPE (type)),
1349 fold_abs_const (i2, TREE_TYPE (type)));
1350
1351 if (integer_nonzerop (compare))
1352 {
1353 /* In the TRUE branch, we compute
1354 ratio = br/bi;
1355 div = (br * ratio) + bi;
1356 tr = (ar * ratio) + ai;
1357 ti = (ai * ratio) - ar;
1358 tr = tr / div;
1359 ti = ti / div; */
1360 tree ratio = const_binop (code, r2, i2);
1361 tree div = const_binop (PLUS_EXPR, i2,
1362 const_binop (MULT_EXPR, r2, ratio));
1363 real = const_binop (MULT_EXPR, r1, ratio);
1364 real = const_binop (PLUS_EXPR, real, i1);
1365 real = const_binop (code, real, div);
1366
1367 imag = const_binop (MULT_EXPR, i1, ratio);
1368 imag = const_binop (MINUS_EXPR, imag, r1);
1369 imag = const_binop (code, imag, div);
1370 }
1371 else
1372 {
1373 /* In the FALSE branch, we compute
1374 ratio = d/c;
1375 divisor = (d * ratio) + c;
1376 tr = (b * ratio) + a;
1377 ti = b - (a * ratio);
1378 tr = tr / div;
1379 ti = ti / div; */
1380 tree ratio = const_binop (code, i2, r2);
1381 tree div = const_binop (PLUS_EXPR, r2,
1382 const_binop (MULT_EXPR, i2, ratio));
1383
1384 real = const_binop (MULT_EXPR, i1, ratio);
1385 real = const_binop (PLUS_EXPR, real, r1);
1386 real = const_binop (code, real, div);
1387
1388 imag = const_binop (MULT_EXPR, r1, ratio);
1389 imag = const_binop (MINUS_EXPR, i1, imag);
1390 imag = const_binop (code, imag, div);
1391 }
1392 }
1393 break;
1394
1395 default:
1396 return NULL_TREE;
1397 }
1398
1399 if (real && imag)
1400 return build_complex (type, real, imag);
1401 }
1402
1403 if (TREE_CODE (arg1) == VECTOR_CST
1404 && TREE_CODE (arg2) == VECTOR_CST)
1405 {
1406 tree type = TREE_TYPE (arg1);
1407 int count = TYPE_VECTOR_SUBPARTS (type), i;
1408 tree *elts = XALLOCAVEC (tree, count);
1409
1410 for (i = 0; i < count; i++)
1411 {
1412 tree elem1 = VECTOR_CST_ELT (arg1, i);
1413 tree elem2 = VECTOR_CST_ELT (arg2, i);
1414
1415 elts[i] = const_binop (code, elem1, elem2);
1416
1417 /* It is possible that const_binop cannot handle the given
1418 code and return NULL_TREE */
1419 if (elts[i] == NULL_TREE)
1420 return NULL_TREE;
1421 }
1422
1423 return build_vector (type, elts);
1424 }
1425
1426 /* Shifts allow a scalar offset for a vector. */
1427 if (TREE_CODE (arg1) == VECTOR_CST
1428 && TREE_CODE (arg2) == INTEGER_CST)
1429 {
1430 tree type = TREE_TYPE (arg1);
1431 int count = TYPE_VECTOR_SUBPARTS (type), i;
1432 tree *elts = XALLOCAVEC (tree, count);
1433
1434 for (i = 0; i < count; i++)
1435 {
1436 tree elem1 = VECTOR_CST_ELT (arg1, i);
1437
1438 elts[i] = const_binop (code, elem1, arg2);
1439
1440 /* It is possible that const_binop cannot handle the given
1441 code and return NULL_TREE. */
1442 if (elts[i] == NULL_TREE)
1443 return NULL_TREE;
1444 }
1445
1446 return build_vector (type, elts);
1447 }
1448 return NULL_TREE;
1449 }
1450
1451 /* Overload that adds a TYPE parameter to be able to dispatch
1452 to fold_relational_const. */
1453
1454 tree
1455 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1456 {
1457 if (TREE_CODE_CLASS (code) == tcc_comparison)
1458 return fold_relational_const (code, type, arg1, arg2);
1459
1460 /* ??? Until we make the const_binop worker take the type of the
1461 result as argument put those cases that need it here. */
1462 switch (code)
1463 {
1464 case COMPLEX_EXPR:
1465 if ((TREE_CODE (arg1) == REAL_CST
1466 && TREE_CODE (arg2) == REAL_CST)
1467 || (TREE_CODE (arg1) == INTEGER_CST
1468 && TREE_CODE (arg2) == INTEGER_CST))
1469 return build_complex (type, arg1, arg2);
1470 return NULL_TREE;
1471
1472 case VEC_PACK_TRUNC_EXPR:
1473 case VEC_PACK_FIX_TRUNC_EXPR:
1474 {
1475 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1476 tree *elts;
1477
1478 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1479 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1480 if (TREE_CODE (arg1) != VECTOR_CST
1481 || TREE_CODE (arg2) != VECTOR_CST)
1482 return NULL_TREE;
1483
1484 elts = XALLOCAVEC (tree, nelts);
1485 if (!vec_cst_ctor_to_array (arg1, elts)
1486 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1487 return NULL_TREE;
1488
1489 for (i = 0; i < nelts; i++)
1490 {
1491 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1492 ? NOP_EXPR : FIX_TRUNC_EXPR,
1493 TREE_TYPE (type), elts[i]);
1494 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1495 return NULL_TREE;
1496 }
1497
1498 return build_vector (type, elts);
1499 }
1500
1501 case VEC_WIDEN_MULT_LO_EXPR:
1502 case VEC_WIDEN_MULT_HI_EXPR:
1503 case VEC_WIDEN_MULT_EVEN_EXPR:
1504 case VEC_WIDEN_MULT_ODD_EXPR:
1505 {
1506 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1507 unsigned int out, ofs, scale;
1508 tree *elts;
1509
1510 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1511 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1512 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1513 return NULL_TREE;
1514
1515 elts = XALLOCAVEC (tree, nelts * 4);
1516 if (!vec_cst_ctor_to_array (arg1, elts)
1517 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1518 return NULL_TREE;
1519
1520 if (code == VEC_WIDEN_MULT_LO_EXPR)
1521 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1522 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1523 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1524 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1525 scale = 1, ofs = 0;
1526 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1527 scale = 1, ofs = 1;
1528
1529 for (out = 0; out < nelts; out++)
1530 {
1531 unsigned int in1 = (out << scale) + ofs;
1532 unsigned int in2 = in1 + nelts * 2;
1533 tree t1, t2;
1534
1535 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1536 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1537
1538 if (t1 == NULL_TREE || t2 == NULL_TREE)
1539 return NULL_TREE;
1540 elts[out] = const_binop (MULT_EXPR, t1, t2);
1541 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1542 return NULL_TREE;
1543 }
1544
1545 return build_vector (type, elts);
1546 }
1547
1548 default:;
1549 }
1550
1551 if (TREE_CODE_CLASS (code) != tcc_binary)
1552 return NULL_TREE;
1553
1554 /* Make sure type and arg0 have the same saturating flag. */
1555 gcc_checking_assert (TYPE_SATURATING (type)
1556 == TYPE_SATURATING (TREE_TYPE (arg1)));
1557
1558 return const_binop (code, arg1, arg2);
1559 }
1560
1561 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1562 Return zero if computing the constants is not possible. */
1563
1564 tree
1565 const_unop (enum tree_code code, tree type, tree arg0)
1566 {
1567 /* Don't perform the operation, other than NEGATE and ABS, if
1568 flag_signaling_nans is on and the operand is a signaling NaN. */
1569 if (TREE_CODE (arg0) == REAL_CST
1570 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1571 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1572 && code != NEGATE_EXPR
1573 && code != ABS_EXPR)
1574 return NULL_TREE;
1575
1576 switch (code)
1577 {
1578 CASE_CONVERT:
1579 case FLOAT_EXPR:
1580 case FIX_TRUNC_EXPR:
1581 case FIXED_CONVERT_EXPR:
1582 return fold_convert_const (code, type, arg0);
1583
1584 case ADDR_SPACE_CONVERT_EXPR:
1585 /* If the source address is 0, and the source address space
1586 cannot have a valid object at 0, fold to dest type null. */
1587 if (integer_zerop (arg0)
1588 && !(targetm.addr_space.zero_address_valid
1589 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1590 return fold_convert_const (code, type, arg0);
1591 break;
1592
1593 case VIEW_CONVERT_EXPR:
1594 return fold_view_convert_expr (type, arg0);
1595
1596 case NEGATE_EXPR:
1597 {
1598 /* Can't call fold_negate_const directly here as that doesn't
1599 handle all cases and we might not be able to negate some
1600 constants. */
1601 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1602 if (tem && CONSTANT_CLASS_P (tem))
1603 return tem;
1604 break;
1605 }
1606
1607 case ABS_EXPR:
1608 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1609 return fold_abs_const (arg0, type);
1610 break;
1611
1612 case CONJ_EXPR:
1613 if (TREE_CODE (arg0) == COMPLEX_CST)
1614 {
1615 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1616 TREE_TYPE (type));
1617 return build_complex (type, TREE_REALPART (arg0), ipart);
1618 }
1619 break;
1620
1621 case BIT_NOT_EXPR:
1622 if (TREE_CODE (arg0) == INTEGER_CST)
1623 return fold_not_const (arg0, type);
1624 /* Perform BIT_NOT_EXPR on each element individually. */
1625 else if (TREE_CODE (arg0) == VECTOR_CST)
1626 {
1627 tree *elements;
1628 tree elem;
1629 unsigned count = VECTOR_CST_NELTS (arg0), i;
1630
1631 elements = XALLOCAVEC (tree, count);
1632 for (i = 0; i < count; i++)
1633 {
1634 elem = VECTOR_CST_ELT (arg0, i);
1635 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1636 if (elem == NULL_TREE)
1637 break;
1638 elements[i] = elem;
1639 }
1640 if (i == count)
1641 return build_vector (type, elements);
1642 }
1643 break;
1644
1645 case TRUTH_NOT_EXPR:
1646 if (TREE_CODE (arg0) == INTEGER_CST)
1647 return constant_boolean_node (integer_zerop (arg0), type);
1648 break;
1649
1650 case REALPART_EXPR:
1651 if (TREE_CODE (arg0) == COMPLEX_CST)
1652 return fold_convert (type, TREE_REALPART (arg0));
1653 break;
1654
1655 case IMAGPART_EXPR:
1656 if (TREE_CODE (arg0) == COMPLEX_CST)
1657 return fold_convert (type, TREE_IMAGPART (arg0));
1658 break;
1659
1660 case VEC_UNPACK_LO_EXPR:
1661 case VEC_UNPACK_HI_EXPR:
1662 case VEC_UNPACK_FLOAT_LO_EXPR:
1663 case VEC_UNPACK_FLOAT_HI_EXPR:
1664 {
1665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1666 tree *elts;
1667 enum tree_code subcode;
1668
1669 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1670 if (TREE_CODE (arg0) != VECTOR_CST)
1671 return NULL_TREE;
1672
1673 elts = XALLOCAVEC (tree, nelts * 2);
1674 if (!vec_cst_ctor_to_array (arg0, elts))
1675 return NULL_TREE;
1676
1677 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1678 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1679 elts += nelts;
1680
1681 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1682 subcode = NOP_EXPR;
1683 else
1684 subcode = FLOAT_EXPR;
1685
1686 for (i = 0; i < nelts; i++)
1687 {
1688 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1689 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1690 return NULL_TREE;
1691 }
1692
1693 return build_vector (type, elts);
1694 }
1695
1696 case REDUC_MIN_EXPR:
1697 case REDUC_MAX_EXPR:
1698 case REDUC_PLUS_EXPR:
1699 {
1700 unsigned int nelts, i;
1701 tree *elts;
1702 enum tree_code subcode;
1703
1704 if (TREE_CODE (arg0) != VECTOR_CST)
1705 return NULL_TREE;
1706 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1707
1708 elts = XALLOCAVEC (tree, nelts);
1709 if (!vec_cst_ctor_to_array (arg0, elts))
1710 return NULL_TREE;
1711
1712 switch (code)
1713 {
1714 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1715 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1716 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1717 default: gcc_unreachable ();
1718 }
1719
1720 for (i = 1; i < nelts; i++)
1721 {
1722 elts[0] = const_binop (subcode, elts[0], elts[i]);
1723 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1724 return NULL_TREE;
1725 }
1726
1727 return elts[0];
1728 }
1729
1730 default:
1731 break;
1732 }
1733
1734 return NULL_TREE;
1735 }
1736
1737 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1738 indicates which particular sizetype to create. */
1739
1740 tree
1741 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1742 {
1743 return build_int_cst (sizetype_tab[(int) kind], number);
1744 }
1745 \f
1746 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1747 is a tree code. The type of the result is taken from the operands.
1748 Both must be equivalent integer types, ala int_binop_types_match_p.
1749 If the operands are constant, so is the result. */
1750
1751 tree
1752 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1753 {
1754 tree type = TREE_TYPE (arg0);
1755
1756 if (arg0 == error_mark_node || arg1 == error_mark_node)
1757 return error_mark_node;
1758
1759 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1760 TREE_TYPE (arg1)));
1761
1762 /* Handle the special case of two integer constants faster. */
1763 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1764 {
1765 /* And some specific cases even faster than that. */
1766 if (code == PLUS_EXPR)
1767 {
1768 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1769 return arg1;
1770 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1771 return arg0;
1772 }
1773 else if (code == MINUS_EXPR)
1774 {
1775 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1776 return arg0;
1777 }
1778 else if (code == MULT_EXPR)
1779 {
1780 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1781 return arg1;
1782 }
1783
1784 /* Handle general case of two integer constants. For sizetype
1785 constant calculations we always want to know about overflow,
1786 even in the unsigned case. */
1787 return int_const_binop_1 (code, arg0, arg1, -1);
1788 }
1789
1790 return fold_build2_loc (loc, code, type, arg0, arg1);
1791 }
1792
1793 /* Given two values, either both of sizetype or both of bitsizetype,
1794 compute the difference between the two values. Return the value
1795 in signed type corresponding to the type of the operands. */
1796
1797 tree
1798 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1799 {
1800 tree type = TREE_TYPE (arg0);
1801 tree ctype;
1802
1803 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1804 TREE_TYPE (arg1)));
1805
1806 /* If the type is already signed, just do the simple thing. */
1807 if (!TYPE_UNSIGNED (type))
1808 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1809
1810 if (type == sizetype)
1811 ctype = ssizetype;
1812 else if (type == bitsizetype)
1813 ctype = sbitsizetype;
1814 else
1815 ctype = signed_type_for (type);
1816
1817 /* If either operand is not a constant, do the conversions to the signed
1818 type and subtract. The hardware will do the right thing with any
1819 overflow in the subtraction. */
1820 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1821 return size_binop_loc (loc, MINUS_EXPR,
1822 fold_convert_loc (loc, ctype, arg0),
1823 fold_convert_loc (loc, ctype, arg1));
1824
1825 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1826 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1827 overflow) and negate (which can't either). Special-case a result
1828 of zero while we're here. */
1829 if (tree_int_cst_equal (arg0, arg1))
1830 return build_int_cst (ctype, 0);
1831 else if (tree_int_cst_lt (arg1, arg0))
1832 return fold_convert_loc (loc, ctype,
1833 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1834 else
1835 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1836 fold_convert_loc (loc, ctype,
1837 size_binop_loc (loc,
1838 MINUS_EXPR,
1839 arg1, arg0)));
1840 }
1841 \f
1842 /* A subroutine of fold_convert_const handling conversions of an
1843 INTEGER_CST to another integer type. */
1844
1845 static tree
1846 fold_convert_const_int_from_int (tree type, const_tree arg1)
1847 {
1848 /* Given an integer constant, make new constant with new type,
1849 appropriately sign-extended or truncated. Use widest_int
1850 so that any extension is done according ARG1's type. */
1851 return force_fit_type (type, wi::to_widest (arg1),
1852 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1853 TREE_OVERFLOW (arg1));
1854 }
1855
1856 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1857 to an integer type. */
1858
1859 static tree
1860 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1861 {
1862 bool overflow = false;
1863 tree t;
1864
1865 /* The following code implements the floating point to integer
1866 conversion rules required by the Java Language Specification,
1867 that IEEE NaNs are mapped to zero and values that overflow
1868 the target precision saturate, i.e. values greater than
1869 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1870 are mapped to INT_MIN. These semantics are allowed by the
1871 C and C++ standards that simply state that the behavior of
1872 FP-to-integer conversion is unspecified upon overflow. */
1873
1874 wide_int val;
1875 REAL_VALUE_TYPE r;
1876 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1877
1878 switch (code)
1879 {
1880 case FIX_TRUNC_EXPR:
1881 real_trunc (&r, VOIDmode, &x);
1882 break;
1883
1884 default:
1885 gcc_unreachable ();
1886 }
1887
1888 /* If R is NaN, return zero and show we have an overflow. */
1889 if (REAL_VALUE_ISNAN (r))
1890 {
1891 overflow = true;
1892 val = wi::zero (TYPE_PRECISION (type));
1893 }
1894
1895 /* See if R is less than the lower bound or greater than the
1896 upper bound. */
1897
1898 if (! overflow)
1899 {
1900 tree lt = TYPE_MIN_VALUE (type);
1901 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1902 if (real_less (&r, &l))
1903 {
1904 overflow = true;
1905 val = lt;
1906 }
1907 }
1908
1909 if (! overflow)
1910 {
1911 tree ut = TYPE_MAX_VALUE (type);
1912 if (ut)
1913 {
1914 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1915 if (real_less (&u, &r))
1916 {
1917 overflow = true;
1918 val = ut;
1919 }
1920 }
1921 }
1922
1923 if (! overflow)
1924 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1925
1926 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1927 return t;
1928 }
1929
1930 /* A subroutine of fold_convert_const handling conversions of a
1931 FIXED_CST to an integer type. */
1932
1933 static tree
1934 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1935 {
1936 tree t;
1937 double_int temp, temp_trunc;
1938 unsigned int mode;
1939
1940 /* Right shift FIXED_CST to temp by fbit. */
1941 temp = TREE_FIXED_CST (arg1).data;
1942 mode = TREE_FIXED_CST (arg1).mode;
1943 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1944 {
1945 temp = temp.rshift (GET_MODE_FBIT (mode),
1946 HOST_BITS_PER_DOUBLE_INT,
1947 SIGNED_FIXED_POINT_MODE_P (mode));
1948
1949 /* Left shift temp to temp_trunc by fbit. */
1950 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1951 HOST_BITS_PER_DOUBLE_INT,
1952 SIGNED_FIXED_POINT_MODE_P (mode));
1953 }
1954 else
1955 {
1956 temp = double_int_zero;
1957 temp_trunc = double_int_zero;
1958 }
1959
1960 /* If FIXED_CST is negative, we need to round the value toward 0.
1961 By checking if the fractional bits are not zero to add 1 to temp. */
1962 if (SIGNED_FIXED_POINT_MODE_P (mode)
1963 && temp_trunc.is_negative ()
1964 && TREE_FIXED_CST (arg1).data != temp_trunc)
1965 temp += double_int_one;
1966
1967 /* Given a fixed-point constant, make new constant with new type,
1968 appropriately sign-extended or truncated. */
1969 t = force_fit_type (type, temp, -1,
1970 (temp.is_negative ()
1971 && (TYPE_UNSIGNED (type)
1972 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1973 | TREE_OVERFLOW (arg1));
1974
1975 return t;
1976 }
1977
1978 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1979 to another floating point type. */
1980
1981 static tree
1982 fold_convert_const_real_from_real (tree type, const_tree arg1)
1983 {
1984 REAL_VALUE_TYPE value;
1985 tree t;
1986
1987 /* Don't perform the operation if flag_signaling_nans is on
1988 and the operand is a signaling NaN. */
1989 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1990 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1991 return NULL_TREE;
1992
1993 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1994 t = build_real (type, value);
1995
1996 /* If converting an infinity or NAN to a representation that doesn't
1997 have one, set the overflow bit so that we can produce some kind of
1998 error message at the appropriate point if necessary. It's not the
1999 most user-friendly message, but it's better than nothing. */
2000 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2001 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2002 TREE_OVERFLOW (t) = 1;
2003 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2004 && !MODE_HAS_NANS (TYPE_MODE (type)))
2005 TREE_OVERFLOW (t) = 1;
2006 /* Regular overflow, conversion produced an infinity in a mode that
2007 can't represent them. */
2008 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2009 && REAL_VALUE_ISINF (value)
2010 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2011 TREE_OVERFLOW (t) = 1;
2012 else
2013 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2014 return t;
2015 }
2016
2017 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2018 to a floating point type. */
2019
2020 static tree
2021 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2022 {
2023 REAL_VALUE_TYPE value;
2024 tree t;
2025
2026 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2027 t = build_real (type, value);
2028
2029 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2030 return t;
2031 }
2032
2033 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2034 to another fixed-point type. */
2035
2036 static tree
2037 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2038 {
2039 FIXED_VALUE_TYPE value;
2040 tree t;
2041 bool overflow_p;
2042
2043 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2044 TYPE_SATURATING (type));
2045 t = build_fixed (type, value);
2046
2047 /* Propagate overflow flags. */
2048 if (overflow_p | TREE_OVERFLOW (arg1))
2049 TREE_OVERFLOW (t) = 1;
2050 return t;
2051 }
2052
2053 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2054 to a fixed-point type. */
2055
2056 static tree
2057 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2058 {
2059 FIXED_VALUE_TYPE value;
2060 tree t;
2061 bool overflow_p;
2062 double_int di;
2063
2064 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2065
2066 di.low = TREE_INT_CST_ELT (arg1, 0);
2067 if (TREE_INT_CST_NUNITS (arg1) == 1)
2068 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2069 else
2070 di.high = TREE_INT_CST_ELT (arg1, 1);
2071
2072 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2073 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2074 TYPE_SATURATING (type));
2075 t = build_fixed (type, value);
2076
2077 /* Propagate overflow flags. */
2078 if (overflow_p | TREE_OVERFLOW (arg1))
2079 TREE_OVERFLOW (t) = 1;
2080 return t;
2081 }
2082
2083 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2084 to a fixed-point type. */
2085
2086 static tree
2087 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2088 {
2089 FIXED_VALUE_TYPE value;
2090 tree t;
2091 bool overflow_p;
2092
2093 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2094 &TREE_REAL_CST (arg1),
2095 TYPE_SATURATING (type));
2096 t = build_fixed (type, value);
2097
2098 /* Propagate overflow flags. */
2099 if (overflow_p | TREE_OVERFLOW (arg1))
2100 TREE_OVERFLOW (t) = 1;
2101 return t;
2102 }
2103
2104 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2105 type TYPE. If no simplification can be done return NULL_TREE. */
2106
2107 static tree
2108 fold_convert_const (enum tree_code code, tree type, tree arg1)
2109 {
2110 if (TREE_TYPE (arg1) == type)
2111 return arg1;
2112
2113 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2114 || TREE_CODE (type) == OFFSET_TYPE)
2115 {
2116 if (TREE_CODE (arg1) == INTEGER_CST)
2117 return fold_convert_const_int_from_int (type, arg1);
2118 else if (TREE_CODE (arg1) == REAL_CST)
2119 return fold_convert_const_int_from_real (code, type, arg1);
2120 else if (TREE_CODE (arg1) == FIXED_CST)
2121 return fold_convert_const_int_from_fixed (type, arg1);
2122 }
2123 else if (TREE_CODE (type) == REAL_TYPE)
2124 {
2125 if (TREE_CODE (arg1) == INTEGER_CST)
2126 return build_real_from_int_cst (type, arg1);
2127 else if (TREE_CODE (arg1) == REAL_CST)
2128 return fold_convert_const_real_from_real (type, arg1);
2129 else if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_real_from_fixed (type, arg1);
2131 }
2132 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2133 {
2134 if (TREE_CODE (arg1) == FIXED_CST)
2135 return fold_convert_const_fixed_from_fixed (type, arg1);
2136 else if (TREE_CODE (arg1) == INTEGER_CST)
2137 return fold_convert_const_fixed_from_int (type, arg1);
2138 else if (TREE_CODE (arg1) == REAL_CST)
2139 return fold_convert_const_fixed_from_real (type, arg1);
2140 }
2141 else if (TREE_CODE (type) == VECTOR_TYPE)
2142 {
2143 if (TREE_CODE (arg1) == VECTOR_CST
2144 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2145 {
2146 int len = TYPE_VECTOR_SUBPARTS (type);
2147 tree elttype = TREE_TYPE (type);
2148 tree *v = XALLOCAVEC (tree, len);
2149 for (int i = 0; i < len; ++i)
2150 {
2151 tree elt = VECTOR_CST_ELT (arg1, i);
2152 tree cvt = fold_convert_const (code, elttype, elt);
2153 if (cvt == NULL_TREE)
2154 return NULL_TREE;
2155 v[i] = cvt;
2156 }
2157 return build_vector (type, v);
2158 }
2159 }
2160 return NULL_TREE;
2161 }
2162
2163 /* Construct a vector of zero elements of vector type TYPE. */
2164
2165 static tree
2166 build_zero_vector (tree type)
2167 {
2168 tree t;
2169
2170 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2171 return build_vector_from_val (type, t);
2172 }
2173
2174 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2175
2176 bool
2177 fold_convertible_p (const_tree type, const_tree arg)
2178 {
2179 tree orig = TREE_TYPE (arg);
2180
2181 if (type == orig)
2182 return true;
2183
2184 if (TREE_CODE (arg) == ERROR_MARK
2185 || TREE_CODE (type) == ERROR_MARK
2186 || TREE_CODE (orig) == ERROR_MARK)
2187 return false;
2188
2189 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2190 return true;
2191
2192 switch (TREE_CODE (type))
2193 {
2194 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2195 case POINTER_TYPE: case REFERENCE_TYPE:
2196 case OFFSET_TYPE:
2197 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2198 || TREE_CODE (orig) == OFFSET_TYPE);
2199
2200 case REAL_TYPE:
2201 case FIXED_POINT_TYPE:
2202 case COMPLEX_TYPE:
2203 case VECTOR_TYPE:
2204 case VOID_TYPE:
2205 return TREE_CODE (type) == TREE_CODE (orig);
2206
2207 default:
2208 return false;
2209 }
2210 }
2211
2212 /* Convert expression ARG to type TYPE. Used by the middle-end for
2213 simple conversions in preference to calling the front-end's convert. */
2214
2215 tree
2216 fold_convert_loc (location_t loc, tree type, tree arg)
2217 {
2218 tree orig = TREE_TYPE (arg);
2219 tree tem;
2220
2221 if (type == orig)
2222 return arg;
2223
2224 if (TREE_CODE (arg) == ERROR_MARK
2225 || TREE_CODE (type) == ERROR_MARK
2226 || TREE_CODE (orig) == ERROR_MARK)
2227 return error_mark_node;
2228
2229 switch (TREE_CODE (type))
2230 {
2231 case POINTER_TYPE:
2232 case REFERENCE_TYPE:
2233 /* Handle conversions between pointers to different address spaces. */
2234 if (POINTER_TYPE_P (orig)
2235 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2236 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2237 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2238 /* fall through */
2239
2240 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2241 case OFFSET_TYPE:
2242 if (TREE_CODE (arg) == INTEGER_CST)
2243 {
2244 tem = fold_convert_const (NOP_EXPR, type, arg);
2245 if (tem != NULL_TREE)
2246 return tem;
2247 }
2248 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2249 || TREE_CODE (orig) == OFFSET_TYPE)
2250 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2251 if (TREE_CODE (orig) == COMPLEX_TYPE)
2252 return fold_convert_loc (loc, type,
2253 fold_build1_loc (loc, REALPART_EXPR,
2254 TREE_TYPE (orig), arg));
2255 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2256 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2257 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2258
2259 case REAL_TYPE:
2260 if (TREE_CODE (arg) == INTEGER_CST)
2261 {
2262 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 return tem;
2265 }
2266 else if (TREE_CODE (arg) == REAL_CST)
2267 {
2268 tem = fold_convert_const (NOP_EXPR, type, arg);
2269 if (tem != NULL_TREE)
2270 return tem;
2271 }
2272 else if (TREE_CODE (arg) == FIXED_CST)
2273 {
2274 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2275 if (tem != NULL_TREE)
2276 return tem;
2277 }
2278
2279 switch (TREE_CODE (orig))
2280 {
2281 case INTEGER_TYPE:
2282 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2283 case POINTER_TYPE: case REFERENCE_TYPE:
2284 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2285
2286 case REAL_TYPE:
2287 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2288
2289 case FIXED_POINT_TYPE:
2290 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2291
2292 case COMPLEX_TYPE:
2293 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2294 return fold_convert_loc (loc, type, tem);
2295
2296 default:
2297 gcc_unreachable ();
2298 }
2299
2300 case FIXED_POINT_TYPE:
2301 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2302 || TREE_CODE (arg) == REAL_CST)
2303 {
2304 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2305 if (tem != NULL_TREE)
2306 goto fold_convert_exit;
2307 }
2308
2309 switch (TREE_CODE (orig))
2310 {
2311 case FIXED_POINT_TYPE:
2312 case INTEGER_TYPE:
2313 case ENUMERAL_TYPE:
2314 case BOOLEAN_TYPE:
2315 case REAL_TYPE:
2316 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2317
2318 case COMPLEX_TYPE:
2319 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2320 return fold_convert_loc (loc, type, tem);
2321
2322 default:
2323 gcc_unreachable ();
2324 }
2325
2326 case COMPLEX_TYPE:
2327 switch (TREE_CODE (orig))
2328 {
2329 case INTEGER_TYPE:
2330 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2331 case POINTER_TYPE: case REFERENCE_TYPE:
2332 case REAL_TYPE:
2333 case FIXED_POINT_TYPE:
2334 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2335 fold_convert_loc (loc, TREE_TYPE (type), arg),
2336 fold_convert_loc (loc, TREE_TYPE (type),
2337 integer_zero_node));
2338 case COMPLEX_TYPE:
2339 {
2340 tree rpart, ipart;
2341
2342 if (TREE_CODE (arg) == COMPLEX_EXPR)
2343 {
2344 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2345 TREE_OPERAND (arg, 0));
2346 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2347 TREE_OPERAND (arg, 1));
2348 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2349 }
2350
2351 arg = save_expr (arg);
2352 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2353 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2354 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2355 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2356 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2357 }
2358
2359 default:
2360 gcc_unreachable ();
2361 }
2362
2363 case VECTOR_TYPE:
2364 if (integer_zerop (arg))
2365 return build_zero_vector (type);
2366 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2367 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2368 || TREE_CODE (orig) == VECTOR_TYPE);
2369 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2370
2371 case VOID_TYPE:
2372 tem = fold_ignored_result (arg);
2373 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2374
2375 default:
2376 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2377 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2378 gcc_unreachable ();
2379 }
2380 fold_convert_exit:
2381 protected_set_expr_location_unshare (tem, loc);
2382 return tem;
2383 }
2384 \f
2385 /* Return false if expr can be assumed not to be an lvalue, true
2386 otherwise. */
2387
2388 static bool
2389 maybe_lvalue_p (const_tree x)
2390 {
2391 /* We only need to wrap lvalue tree codes. */
2392 switch (TREE_CODE (x))
2393 {
2394 case VAR_DECL:
2395 case PARM_DECL:
2396 case RESULT_DECL:
2397 case LABEL_DECL:
2398 case FUNCTION_DECL:
2399 case SSA_NAME:
2400
2401 case COMPONENT_REF:
2402 case MEM_REF:
2403 case INDIRECT_REF:
2404 case ARRAY_REF:
2405 case ARRAY_RANGE_REF:
2406 case BIT_FIELD_REF:
2407 case OBJ_TYPE_REF:
2408
2409 case REALPART_EXPR:
2410 case IMAGPART_EXPR:
2411 case PREINCREMENT_EXPR:
2412 case PREDECREMENT_EXPR:
2413 case SAVE_EXPR:
2414 case TRY_CATCH_EXPR:
2415 case WITH_CLEANUP_EXPR:
2416 case COMPOUND_EXPR:
2417 case MODIFY_EXPR:
2418 case TARGET_EXPR:
2419 case COND_EXPR:
2420 case BIND_EXPR:
2421 break;
2422
2423 default:
2424 /* Assume the worst for front-end tree codes. */
2425 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2426 break;
2427 return false;
2428 }
2429
2430 return true;
2431 }
2432
2433 /* Return an expr equal to X but certainly not valid as an lvalue. */
2434
2435 tree
2436 non_lvalue_loc (location_t loc, tree x)
2437 {
2438 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2439 us. */
2440 if (in_gimple_form)
2441 return x;
2442
2443 if (! maybe_lvalue_p (x))
2444 return x;
2445 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2446 }
2447
2448 /* When pedantic, return an expr equal to X but certainly not valid as a
2449 pedantic lvalue. Otherwise, return X. */
2450
2451 static tree
2452 pedantic_non_lvalue_loc (location_t loc, tree x)
2453 {
2454 return protected_set_expr_location_unshare (x, loc);
2455 }
2456 \f
2457 /* Given a tree comparison code, return the code that is the logical inverse.
2458 It is generally not safe to do this for floating-point comparisons, except
2459 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2460 ERROR_MARK in this case. */
2461
2462 enum tree_code
2463 invert_tree_comparison (enum tree_code code, bool honor_nans)
2464 {
2465 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2466 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2467 return ERROR_MARK;
2468
2469 switch (code)
2470 {
2471 case EQ_EXPR:
2472 return NE_EXPR;
2473 case NE_EXPR:
2474 return EQ_EXPR;
2475 case GT_EXPR:
2476 return honor_nans ? UNLE_EXPR : LE_EXPR;
2477 case GE_EXPR:
2478 return honor_nans ? UNLT_EXPR : LT_EXPR;
2479 case LT_EXPR:
2480 return honor_nans ? UNGE_EXPR : GE_EXPR;
2481 case LE_EXPR:
2482 return honor_nans ? UNGT_EXPR : GT_EXPR;
2483 case LTGT_EXPR:
2484 return UNEQ_EXPR;
2485 case UNEQ_EXPR:
2486 return LTGT_EXPR;
2487 case UNGT_EXPR:
2488 return LE_EXPR;
2489 case UNGE_EXPR:
2490 return LT_EXPR;
2491 case UNLT_EXPR:
2492 return GE_EXPR;
2493 case UNLE_EXPR:
2494 return GT_EXPR;
2495 case ORDERED_EXPR:
2496 return UNORDERED_EXPR;
2497 case UNORDERED_EXPR:
2498 return ORDERED_EXPR;
2499 default:
2500 gcc_unreachable ();
2501 }
2502 }
2503
2504 /* Similar, but return the comparison that results if the operands are
2505 swapped. This is safe for floating-point. */
2506
2507 enum tree_code
2508 swap_tree_comparison (enum tree_code code)
2509 {
2510 switch (code)
2511 {
2512 case EQ_EXPR:
2513 case NE_EXPR:
2514 case ORDERED_EXPR:
2515 case UNORDERED_EXPR:
2516 case LTGT_EXPR:
2517 case UNEQ_EXPR:
2518 return code;
2519 case GT_EXPR:
2520 return LT_EXPR;
2521 case GE_EXPR:
2522 return LE_EXPR;
2523 case LT_EXPR:
2524 return GT_EXPR;
2525 case LE_EXPR:
2526 return GE_EXPR;
2527 case UNGT_EXPR:
2528 return UNLT_EXPR;
2529 case UNGE_EXPR:
2530 return UNLE_EXPR;
2531 case UNLT_EXPR:
2532 return UNGT_EXPR;
2533 case UNLE_EXPR:
2534 return UNGE_EXPR;
2535 default:
2536 gcc_unreachable ();
2537 }
2538 }
2539
2540
2541 /* Convert a comparison tree code from an enum tree_code representation
2542 into a compcode bit-based encoding. This function is the inverse of
2543 compcode_to_comparison. */
2544
2545 static enum comparison_code
2546 comparison_to_compcode (enum tree_code code)
2547 {
2548 switch (code)
2549 {
2550 case LT_EXPR:
2551 return COMPCODE_LT;
2552 case EQ_EXPR:
2553 return COMPCODE_EQ;
2554 case LE_EXPR:
2555 return COMPCODE_LE;
2556 case GT_EXPR:
2557 return COMPCODE_GT;
2558 case NE_EXPR:
2559 return COMPCODE_NE;
2560 case GE_EXPR:
2561 return COMPCODE_GE;
2562 case ORDERED_EXPR:
2563 return COMPCODE_ORD;
2564 case UNORDERED_EXPR:
2565 return COMPCODE_UNORD;
2566 case UNLT_EXPR:
2567 return COMPCODE_UNLT;
2568 case UNEQ_EXPR:
2569 return COMPCODE_UNEQ;
2570 case UNLE_EXPR:
2571 return COMPCODE_UNLE;
2572 case UNGT_EXPR:
2573 return COMPCODE_UNGT;
2574 case LTGT_EXPR:
2575 return COMPCODE_LTGT;
2576 case UNGE_EXPR:
2577 return COMPCODE_UNGE;
2578 default:
2579 gcc_unreachable ();
2580 }
2581 }
2582
2583 /* Convert a compcode bit-based encoding of a comparison operator back
2584 to GCC's enum tree_code representation. This function is the
2585 inverse of comparison_to_compcode. */
2586
2587 static enum tree_code
2588 compcode_to_comparison (enum comparison_code code)
2589 {
2590 switch (code)
2591 {
2592 case COMPCODE_LT:
2593 return LT_EXPR;
2594 case COMPCODE_EQ:
2595 return EQ_EXPR;
2596 case COMPCODE_LE:
2597 return LE_EXPR;
2598 case COMPCODE_GT:
2599 return GT_EXPR;
2600 case COMPCODE_NE:
2601 return NE_EXPR;
2602 case COMPCODE_GE:
2603 return GE_EXPR;
2604 case COMPCODE_ORD:
2605 return ORDERED_EXPR;
2606 case COMPCODE_UNORD:
2607 return UNORDERED_EXPR;
2608 case COMPCODE_UNLT:
2609 return UNLT_EXPR;
2610 case COMPCODE_UNEQ:
2611 return UNEQ_EXPR;
2612 case COMPCODE_UNLE:
2613 return UNLE_EXPR;
2614 case COMPCODE_UNGT:
2615 return UNGT_EXPR;
2616 case COMPCODE_LTGT:
2617 return LTGT_EXPR;
2618 case COMPCODE_UNGE:
2619 return UNGE_EXPR;
2620 default:
2621 gcc_unreachable ();
2622 }
2623 }
2624
2625 /* Return a tree for the comparison which is the combination of
2626 doing the AND or OR (depending on CODE) of the two operations LCODE
2627 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2628 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2629 if this makes the transformation invalid. */
2630
2631 tree
2632 combine_comparisons (location_t loc,
2633 enum tree_code code, enum tree_code lcode,
2634 enum tree_code rcode, tree truth_type,
2635 tree ll_arg, tree lr_arg)
2636 {
2637 bool honor_nans = HONOR_NANS (ll_arg);
2638 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2639 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2640 int compcode;
2641
2642 switch (code)
2643 {
2644 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2645 compcode = lcompcode & rcompcode;
2646 break;
2647
2648 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2649 compcode = lcompcode | rcompcode;
2650 break;
2651
2652 default:
2653 return NULL_TREE;
2654 }
2655
2656 if (!honor_nans)
2657 {
2658 /* Eliminate unordered comparisons, as well as LTGT and ORD
2659 which are not used unless the mode has NaNs. */
2660 compcode &= ~COMPCODE_UNORD;
2661 if (compcode == COMPCODE_LTGT)
2662 compcode = COMPCODE_NE;
2663 else if (compcode == COMPCODE_ORD)
2664 compcode = COMPCODE_TRUE;
2665 }
2666 else if (flag_trapping_math)
2667 {
2668 /* Check that the original operation and the optimized ones will trap
2669 under the same condition. */
2670 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2671 && (lcompcode != COMPCODE_EQ)
2672 && (lcompcode != COMPCODE_ORD);
2673 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2674 && (rcompcode != COMPCODE_EQ)
2675 && (rcompcode != COMPCODE_ORD);
2676 bool trap = (compcode & COMPCODE_UNORD) == 0
2677 && (compcode != COMPCODE_EQ)
2678 && (compcode != COMPCODE_ORD);
2679
2680 /* In a short-circuited boolean expression the LHS might be
2681 such that the RHS, if evaluated, will never trap. For
2682 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2683 if neither x nor y is NaN. (This is a mixed blessing: for
2684 example, the expression above will never trap, hence
2685 optimizing it to x < y would be invalid). */
2686 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2687 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2688 rtrap = false;
2689
2690 /* If the comparison was short-circuited, and only the RHS
2691 trapped, we may now generate a spurious trap. */
2692 if (rtrap && !ltrap
2693 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2694 return NULL_TREE;
2695
2696 /* If we changed the conditions that cause a trap, we lose. */
2697 if ((ltrap || rtrap) != trap)
2698 return NULL_TREE;
2699 }
2700
2701 if (compcode == COMPCODE_TRUE)
2702 return constant_boolean_node (true, truth_type);
2703 else if (compcode == COMPCODE_FALSE)
2704 return constant_boolean_node (false, truth_type);
2705 else
2706 {
2707 enum tree_code tcode;
2708
2709 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2710 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2711 }
2712 }
2713 \f
2714 /* Return nonzero if two operands (typically of the same tree node)
2715 are necessarily equal. FLAGS modifies behavior as follows:
2716
2717 If OEP_ONLY_CONST is set, only return nonzero for constants.
2718 This function tests whether the operands are indistinguishable;
2719 it does not test whether they are equal using C's == operation.
2720 The distinction is important for IEEE floating point, because
2721 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2722 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2723
2724 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2725 even though it may hold multiple values during a function.
2726 This is because a GCC tree node guarantees that nothing else is
2727 executed between the evaluation of its "operands" (which may often
2728 be evaluated in arbitrary order). Hence if the operands themselves
2729 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2730 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2731 unset means assuming isochronic (or instantaneous) tree equivalence.
2732 Unless comparing arbitrary expression trees, such as from different
2733 statements, this flag can usually be left unset.
2734
2735 If OEP_PURE_SAME is set, then pure functions with identical arguments
2736 are considered the same. It is used when the caller has other ways
2737 to ensure that global memory is unchanged in between.
2738
2739 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2740 not values of expressions.
2741
2742 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2743 any operand with side effect. This is unnecesarily conservative in the
2744 case we know that arg0 and arg1 are in disjoint code paths (such as in
2745 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2746 addresses with TREE_CONSTANT flag set so we know that &var == &var
2747 even if var is volatile. */
2748
2749 int
2750 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2751 {
2752 /* If either is ERROR_MARK, they aren't equal. */
2753 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2754 || TREE_TYPE (arg0) == error_mark_node
2755 || TREE_TYPE (arg1) == error_mark_node)
2756 return 0;
2757
2758 /* Similar, if either does not have a type (like a released SSA name),
2759 they aren't equal. */
2760 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2761 return 0;
2762
2763 /* We cannot consider pointers to different address space equal. */
2764 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2765 && POINTER_TYPE_P (TREE_TYPE (arg1))
2766 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2767 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2768 return 0;
2769
2770 /* Check equality of integer constants before bailing out due to
2771 precision differences. */
2772 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2773 {
2774 /* Address of INTEGER_CST is not defined; check that we did not forget
2775 to drop the OEP_ADDRESS_OF flags. */
2776 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2777 return tree_int_cst_equal (arg0, arg1);
2778 }
2779
2780 if (!(flags & OEP_ADDRESS_OF))
2781 {
2782 /* If both types don't have the same signedness, then we can't consider
2783 them equal. We must check this before the STRIP_NOPS calls
2784 because they may change the signedness of the arguments. As pointers
2785 strictly don't have a signedness, require either two pointers or
2786 two non-pointers as well. */
2787 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2788 || POINTER_TYPE_P (TREE_TYPE (arg0))
2789 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2790 return 0;
2791
2792 /* If both types don't have the same precision, then it is not safe
2793 to strip NOPs. */
2794 if (element_precision (TREE_TYPE (arg0))
2795 != element_precision (TREE_TYPE (arg1)))
2796 return 0;
2797
2798 STRIP_NOPS (arg0);
2799 STRIP_NOPS (arg1);
2800 }
2801 #if 0
2802 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2803 sanity check once the issue is solved. */
2804 else
2805 /* Addresses of conversions and SSA_NAMEs (and many other things)
2806 are not defined. Check that we did not forget to drop the
2807 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2808 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2809 && TREE_CODE (arg0) != SSA_NAME);
2810 #endif
2811
2812 /* In case both args are comparisons but with different comparison
2813 code, try to swap the comparison operands of one arg to produce
2814 a match and compare that variant. */
2815 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2816 && COMPARISON_CLASS_P (arg0)
2817 && COMPARISON_CLASS_P (arg1))
2818 {
2819 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2820
2821 if (TREE_CODE (arg0) == swap_code)
2822 return operand_equal_p (TREE_OPERAND (arg0, 0),
2823 TREE_OPERAND (arg1, 1), flags)
2824 && operand_equal_p (TREE_OPERAND (arg0, 1),
2825 TREE_OPERAND (arg1, 0), flags);
2826 }
2827
2828 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2829 {
2830 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2831 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2832 ;
2833 else if (flags & OEP_ADDRESS_OF)
2834 {
2835 /* If we are interested in comparing addresses ignore
2836 MEM_REF wrappings of the base that can appear just for
2837 TBAA reasons. */
2838 if (TREE_CODE (arg0) == MEM_REF
2839 && DECL_P (arg1)
2840 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2841 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2842 && integer_zerop (TREE_OPERAND (arg0, 1)))
2843 return 1;
2844 else if (TREE_CODE (arg1) == MEM_REF
2845 && DECL_P (arg0)
2846 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2847 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2848 && integer_zerop (TREE_OPERAND (arg1, 1)))
2849 return 1;
2850 return 0;
2851 }
2852 else
2853 return 0;
2854 }
2855
2856 /* When not checking adddresses, this is needed for conversions and for
2857 COMPONENT_REF. Might as well play it safe and always test this. */
2858 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2859 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2860 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2861 && !(flags & OEP_ADDRESS_OF)))
2862 return 0;
2863
2864 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2865 We don't care about side effects in that case because the SAVE_EXPR
2866 takes care of that for us. In all other cases, two expressions are
2867 equal if they have no side effects. If we have two identical
2868 expressions with side effects that should be treated the same due
2869 to the only side effects being identical SAVE_EXPR's, that will
2870 be detected in the recursive calls below.
2871 If we are taking an invariant address of two identical objects
2872 they are necessarily equal as well. */
2873 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2874 && (TREE_CODE (arg0) == SAVE_EXPR
2875 || (flags & OEP_MATCH_SIDE_EFFECTS)
2876 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2877 return 1;
2878
2879 /* Next handle constant cases, those for which we can return 1 even
2880 if ONLY_CONST is set. */
2881 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2882 switch (TREE_CODE (arg0))
2883 {
2884 case INTEGER_CST:
2885 return tree_int_cst_equal (arg0, arg1);
2886
2887 case FIXED_CST:
2888 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2889 TREE_FIXED_CST (arg1));
2890
2891 case REAL_CST:
2892 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2893 return 1;
2894
2895
2896 if (!HONOR_SIGNED_ZEROS (arg0))
2897 {
2898 /* If we do not distinguish between signed and unsigned zero,
2899 consider them equal. */
2900 if (real_zerop (arg0) && real_zerop (arg1))
2901 return 1;
2902 }
2903 return 0;
2904
2905 case VECTOR_CST:
2906 {
2907 unsigned i;
2908
2909 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2910 return 0;
2911
2912 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2913 {
2914 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2915 VECTOR_CST_ELT (arg1, i), flags))
2916 return 0;
2917 }
2918 return 1;
2919 }
2920
2921 case COMPLEX_CST:
2922 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2923 flags)
2924 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2925 flags));
2926
2927 case STRING_CST:
2928 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2929 && ! memcmp (TREE_STRING_POINTER (arg0),
2930 TREE_STRING_POINTER (arg1),
2931 TREE_STRING_LENGTH (arg0)));
2932
2933 case ADDR_EXPR:
2934 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2935 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2936 flags | OEP_ADDRESS_OF
2937 | OEP_MATCH_SIDE_EFFECTS);
2938 case CONSTRUCTOR:
2939 /* In GIMPLE empty constructors are allowed in initializers of
2940 aggregates. */
2941 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2942 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2943 default:
2944 break;
2945 }
2946
2947 if (flags & OEP_ONLY_CONST)
2948 return 0;
2949
2950 /* Define macros to test an operand from arg0 and arg1 for equality and a
2951 variant that allows null and views null as being different from any
2952 non-null value. In the latter case, if either is null, the both
2953 must be; otherwise, do the normal comparison. */
2954 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2955 TREE_OPERAND (arg1, N), flags)
2956
2957 #define OP_SAME_WITH_NULL(N) \
2958 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2959 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2960
2961 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2962 {
2963 case tcc_unary:
2964 /* Two conversions are equal only if signedness and modes match. */
2965 switch (TREE_CODE (arg0))
2966 {
2967 CASE_CONVERT:
2968 case FIX_TRUNC_EXPR:
2969 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2970 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2971 return 0;
2972 break;
2973 default:
2974 break;
2975 }
2976
2977 return OP_SAME (0);
2978
2979
2980 case tcc_comparison:
2981 case tcc_binary:
2982 if (OP_SAME (0) && OP_SAME (1))
2983 return 1;
2984
2985 /* For commutative ops, allow the other order. */
2986 return (commutative_tree_code (TREE_CODE (arg0))
2987 && operand_equal_p (TREE_OPERAND (arg0, 0),
2988 TREE_OPERAND (arg1, 1), flags)
2989 && operand_equal_p (TREE_OPERAND (arg0, 1),
2990 TREE_OPERAND (arg1, 0), flags));
2991
2992 case tcc_reference:
2993 /* If either of the pointer (or reference) expressions we are
2994 dereferencing contain a side effect, these cannot be equal,
2995 but their addresses can be. */
2996 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2997 && (TREE_SIDE_EFFECTS (arg0)
2998 || TREE_SIDE_EFFECTS (arg1)))
2999 return 0;
3000
3001 switch (TREE_CODE (arg0))
3002 {
3003 case INDIRECT_REF:
3004 if (!(flags & OEP_ADDRESS_OF)
3005 && (TYPE_ALIGN (TREE_TYPE (arg0))
3006 != TYPE_ALIGN (TREE_TYPE (arg1))))
3007 return 0;
3008 flags &= ~OEP_ADDRESS_OF;
3009 return OP_SAME (0);
3010
3011 case REALPART_EXPR:
3012 case IMAGPART_EXPR:
3013 case VIEW_CONVERT_EXPR:
3014 return OP_SAME (0);
3015
3016 case TARGET_MEM_REF:
3017 case MEM_REF:
3018 if (!(flags & OEP_ADDRESS_OF))
3019 {
3020 /* Require equal access sizes */
3021 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3022 && (!TYPE_SIZE (TREE_TYPE (arg0))
3023 || !TYPE_SIZE (TREE_TYPE (arg1))
3024 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3025 TYPE_SIZE (TREE_TYPE (arg1)),
3026 flags)))
3027 return 0;
3028 /* Verify that accesses are TBAA compatible. */
3029 if (!alias_ptr_types_compatible_p
3030 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3031 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3032 || (MR_DEPENDENCE_CLIQUE (arg0)
3033 != MR_DEPENDENCE_CLIQUE (arg1))
3034 || (MR_DEPENDENCE_BASE (arg0)
3035 != MR_DEPENDENCE_BASE (arg1)))
3036 return 0;
3037 /* Verify that alignment is compatible. */
3038 if (TYPE_ALIGN (TREE_TYPE (arg0))
3039 != TYPE_ALIGN (TREE_TYPE (arg1)))
3040 return 0;
3041 }
3042 flags &= ~OEP_ADDRESS_OF;
3043 return (OP_SAME (0) && OP_SAME (1)
3044 /* TARGET_MEM_REF require equal extra operands. */
3045 && (TREE_CODE (arg0) != TARGET_MEM_REF
3046 || (OP_SAME_WITH_NULL (2)
3047 && OP_SAME_WITH_NULL (3)
3048 && OP_SAME_WITH_NULL (4))));
3049
3050 case ARRAY_REF:
3051 case ARRAY_RANGE_REF:
3052 /* Operands 2 and 3 may be null.
3053 Compare the array index by value if it is constant first as we
3054 may have different types but same value here. */
3055 if (!OP_SAME (0))
3056 return 0;
3057 flags &= ~OEP_ADDRESS_OF;
3058 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3059 TREE_OPERAND (arg1, 1))
3060 || OP_SAME (1))
3061 && OP_SAME_WITH_NULL (2)
3062 && OP_SAME_WITH_NULL (3));
3063
3064 case COMPONENT_REF:
3065 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3066 may be NULL when we're called to compare MEM_EXPRs. */
3067 if (!OP_SAME_WITH_NULL (0)
3068 || !OP_SAME (1))
3069 return 0;
3070 flags &= ~OEP_ADDRESS_OF;
3071 return OP_SAME_WITH_NULL (2);
3072
3073 case BIT_FIELD_REF:
3074 if (!OP_SAME (0))
3075 return 0;
3076 flags &= ~OEP_ADDRESS_OF;
3077 return OP_SAME (1) && OP_SAME (2);
3078
3079 default:
3080 return 0;
3081 }
3082
3083 case tcc_expression:
3084 switch (TREE_CODE (arg0))
3085 {
3086 case ADDR_EXPR:
3087 /* Be sure we pass right ADDRESS_OF flag. */
3088 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3089 return operand_equal_p (TREE_OPERAND (arg0, 0),
3090 TREE_OPERAND (arg1, 0),
3091 flags | OEP_ADDRESS_OF);
3092
3093 case TRUTH_NOT_EXPR:
3094 return OP_SAME (0);
3095
3096 case TRUTH_ANDIF_EXPR:
3097 case TRUTH_ORIF_EXPR:
3098 return OP_SAME (0) && OP_SAME (1);
3099
3100 case FMA_EXPR:
3101 case WIDEN_MULT_PLUS_EXPR:
3102 case WIDEN_MULT_MINUS_EXPR:
3103 if (!OP_SAME (2))
3104 return 0;
3105 /* The multiplcation operands are commutative. */
3106 /* FALLTHRU */
3107
3108 case TRUTH_AND_EXPR:
3109 case TRUTH_OR_EXPR:
3110 case TRUTH_XOR_EXPR:
3111 if (OP_SAME (0) && OP_SAME (1))
3112 return 1;
3113
3114 /* Otherwise take into account this is a commutative operation. */
3115 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3116 TREE_OPERAND (arg1, 1), flags)
3117 && operand_equal_p (TREE_OPERAND (arg0, 1),
3118 TREE_OPERAND (arg1, 0), flags));
3119
3120 case COND_EXPR:
3121 if (! OP_SAME (1) || ! OP_SAME (2))
3122 return 0;
3123 flags &= ~OEP_ADDRESS_OF;
3124 return OP_SAME (0);
3125
3126 case VEC_COND_EXPR:
3127 case DOT_PROD_EXPR:
3128 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3129
3130 default:
3131 return 0;
3132 }
3133
3134 case tcc_vl_exp:
3135 switch (TREE_CODE (arg0))
3136 {
3137 case CALL_EXPR:
3138 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3139 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3140 /* If not both CALL_EXPRs are either internal or normal function
3141 functions, then they are not equal. */
3142 return 0;
3143 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3144 {
3145 /* If the CALL_EXPRs call different internal functions, then they
3146 are not equal. */
3147 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3148 return 0;
3149 }
3150 else
3151 {
3152 /* If the CALL_EXPRs call different functions, then they are not
3153 equal. */
3154 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3155 flags))
3156 return 0;
3157 }
3158
3159 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3160 {
3161 unsigned int cef = call_expr_flags (arg0);
3162 if (flags & OEP_PURE_SAME)
3163 cef &= ECF_CONST | ECF_PURE;
3164 else
3165 cef &= ECF_CONST;
3166 if (!cef)
3167 return 0;
3168 }
3169
3170 /* Now see if all the arguments are the same. */
3171 {
3172 const_call_expr_arg_iterator iter0, iter1;
3173 const_tree a0, a1;
3174 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3175 a1 = first_const_call_expr_arg (arg1, &iter1);
3176 a0 && a1;
3177 a0 = next_const_call_expr_arg (&iter0),
3178 a1 = next_const_call_expr_arg (&iter1))
3179 if (! operand_equal_p (a0, a1, flags))
3180 return 0;
3181
3182 /* If we get here and both argument lists are exhausted
3183 then the CALL_EXPRs are equal. */
3184 return ! (a0 || a1);
3185 }
3186 default:
3187 return 0;
3188 }
3189
3190 case tcc_declaration:
3191 /* Consider __builtin_sqrt equal to sqrt. */
3192 return (TREE_CODE (arg0) == FUNCTION_DECL
3193 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3194 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3195 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3196
3197 case tcc_exceptional:
3198 if (TREE_CODE (arg0) == CONSTRUCTOR)
3199 {
3200 /* In GIMPLE constructors are used only to build vectors from
3201 elements. Individual elements in the constructor must be
3202 indexed in increasing order and form an initial sequence.
3203
3204 We make no effort to compare constructors in generic.
3205 (see sem_variable::equals in ipa-icf which can do so for
3206 constants). */
3207 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3208 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3209 return 0;
3210
3211 /* Be sure that vectors constructed have the same representation.
3212 We only tested element precision and modes to match.
3213 Vectors may be BLKmode and thus also check that the number of
3214 parts match. */
3215 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3216 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3217 return 0;
3218
3219 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3220 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3221 unsigned int len = vec_safe_length (v0);
3222
3223 if (len != vec_safe_length (v1))
3224 return 0;
3225
3226 for (unsigned int i = 0; i < len; i++)
3227 {
3228 constructor_elt *c0 = &(*v0)[i];
3229 constructor_elt *c1 = &(*v1)[i];
3230
3231 if (!operand_equal_p (c0->value, c1->value, flags)
3232 /* In GIMPLE the indexes can be either NULL or matching i.
3233 Double check this so we won't get false
3234 positives for GENERIC. */
3235 || (c0->index
3236 && (TREE_CODE (c0->index) != INTEGER_CST
3237 || !compare_tree_int (c0->index, i)))
3238 || (c1->index
3239 && (TREE_CODE (c1->index) != INTEGER_CST
3240 || !compare_tree_int (c1->index, i))))
3241 return 0;
3242 }
3243 return 1;
3244 }
3245 return 0;
3246
3247 default:
3248 return 0;
3249 }
3250
3251 #undef OP_SAME
3252 #undef OP_SAME_WITH_NULL
3253 }
3254 \f
3255 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3256 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3257
3258 When in doubt, return 0. */
3259
3260 static int
3261 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3262 {
3263 int unsignedp1, unsignedpo;
3264 tree primarg0, primarg1, primother;
3265 unsigned int correct_width;
3266
3267 if (operand_equal_p (arg0, arg1, 0))
3268 return 1;
3269
3270 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3271 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3272 return 0;
3273
3274 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3275 and see if the inner values are the same. This removes any
3276 signedness comparison, which doesn't matter here. */
3277 primarg0 = arg0, primarg1 = arg1;
3278 STRIP_NOPS (primarg0);
3279 STRIP_NOPS (primarg1);
3280 if (operand_equal_p (primarg0, primarg1, 0))
3281 return 1;
3282
3283 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3284 actual comparison operand, ARG0.
3285
3286 First throw away any conversions to wider types
3287 already present in the operands. */
3288
3289 primarg1 = get_narrower (arg1, &unsignedp1);
3290 primother = get_narrower (other, &unsignedpo);
3291
3292 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3293 if (unsignedp1 == unsignedpo
3294 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3295 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3296 {
3297 tree type = TREE_TYPE (arg0);
3298
3299 /* Make sure shorter operand is extended the right way
3300 to match the longer operand. */
3301 primarg1 = fold_convert (signed_or_unsigned_type_for
3302 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3303
3304 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3305 return 1;
3306 }
3307
3308 return 0;
3309 }
3310 \f
3311 /* See if ARG is an expression that is either a comparison or is performing
3312 arithmetic on comparisons. The comparisons must only be comparing
3313 two different values, which will be stored in *CVAL1 and *CVAL2; if
3314 they are nonzero it means that some operands have already been found.
3315 No variables may be used anywhere else in the expression except in the
3316 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3317 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3318
3319 If this is true, return 1. Otherwise, return zero. */
3320
3321 static int
3322 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3323 {
3324 enum tree_code code = TREE_CODE (arg);
3325 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3326
3327 /* We can handle some of the tcc_expression cases here. */
3328 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3329 tclass = tcc_unary;
3330 else if (tclass == tcc_expression
3331 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3332 || code == COMPOUND_EXPR))
3333 tclass = tcc_binary;
3334
3335 else if (tclass == tcc_expression && code == SAVE_EXPR
3336 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3337 {
3338 /* If we've already found a CVAL1 or CVAL2, this expression is
3339 two complex to handle. */
3340 if (*cval1 || *cval2)
3341 return 0;
3342
3343 tclass = tcc_unary;
3344 *save_p = 1;
3345 }
3346
3347 switch (tclass)
3348 {
3349 case tcc_unary:
3350 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3351
3352 case tcc_binary:
3353 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3354 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3355 cval1, cval2, save_p));
3356
3357 case tcc_constant:
3358 return 1;
3359
3360 case tcc_expression:
3361 if (code == COND_EXPR)
3362 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3363 cval1, cval2, save_p)
3364 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3365 cval1, cval2, save_p)
3366 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3367 cval1, cval2, save_p));
3368 return 0;
3369
3370 case tcc_comparison:
3371 /* First see if we can handle the first operand, then the second. For
3372 the second operand, we know *CVAL1 can't be zero. It must be that
3373 one side of the comparison is each of the values; test for the
3374 case where this isn't true by failing if the two operands
3375 are the same. */
3376
3377 if (operand_equal_p (TREE_OPERAND (arg, 0),
3378 TREE_OPERAND (arg, 1), 0))
3379 return 0;
3380
3381 if (*cval1 == 0)
3382 *cval1 = TREE_OPERAND (arg, 0);
3383 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3384 ;
3385 else if (*cval2 == 0)
3386 *cval2 = TREE_OPERAND (arg, 0);
3387 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3388 ;
3389 else
3390 return 0;
3391
3392 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3393 ;
3394 else if (*cval2 == 0)
3395 *cval2 = TREE_OPERAND (arg, 1);
3396 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3397 ;
3398 else
3399 return 0;
3400
3401 return 1;
3402
3403 default:
3404 return 0;
3405 }
3406 }
3407 \f
3408 /* ARG is a tree that is known to contain just arithmetic operations and
3409 comparisons. Evaluate the operations in the tree substituting NEW0 for
3410 any occurrence of OLD0 as an operand of a comparison and likewise for
3411 NEW1 and OLD1. */
3412
3413 static tree
3414 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3415 tree old1, tree new1)
3416 {
3417 tree type = TREE_TYPE (arg);
3418 enum tree_code code = TREE_CODE (arg);
3419 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3420
3421 /* We can handle some of the tcc_expression cases here. */
3422 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3423 tclass = tcc_unary;
3424 else if (tclass == tcc_expression
3425 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3426 tclass = tcc_binary;
3427
3428 switch (tclass)
3429 {
3430 case tcc_unary:
3431 return fold_build1_loc (loc, code, type,
3432 eval_subst (loc, TREE_OPERAND (arg, 0),
3433 old0, new0, old1, new1));
3434
3435 case tcc_binary:
3436 return fold_build2_loc (loc, code, type,
3437 eval_subst (loc, TREE_OPERAND (arg, 0),
3438 old0, new0, old1, new1),
3439 eval_subst (loc, TREE_OPERAND (arg, 1),
3440 old0, new0, old1, new1));
3441
3442 case tcc_expression:
3443 switch (code)
3444 {
3445 case SAVE_EXPR:
3446 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3447 old1, new1);
3448
3449 case COMPOUND_EXPR:
3450 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3451 old1, new1);
3452
3453 case COND_EXPR:
3454 return fold_build3_loc (loc, code, type,
3455 eval_subst (loc, TREE_OPERAND (arg, 0),
3456 old0, new0, old1, new1),
3457 eval_subst (loc, TREE_OPERAND (arg, 1),
3458 old0, new0, old1, new1),
3459 eval_subst (loc, TREE_OPERAND (arg, 2),
3460 old0, new0, old1, new1));
3461 default:
3462 break;
3463 }
3464 /* Fall through - ??? */
3465
3466 case tcc_comparison:
3467 {
3468 tree arg0 = TREE_OPERAND (arg, 0);
3469 tree arg1 = TREE_OPERAND (arg, 1);
3470
3471 /* We need to check both for exact equality and tree equality. The
3472 former will be true if the operand has a side-effect. In that
3473 case, we know the operand occurred exactly once. */
3474
3475 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3476 arg0 = new0;
3477 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3478 arg0 = new1;
3479
3480 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3481 arg1 = new0;
3482 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3483 arg1 = new1;
3484
3485 return fold_build2_loc (loc, code, type, arg0, arg1);
3486 }
3487
3488 default:
3489 return arg;
3490 }
3491 }
3492 \f
3493 /* Return a tree for the case when the result of an expression is RESULT
3494 converted to TYPE and OMITTED was previously an operand of the expression
3495 but is now not needed (e.g., we folded OMITTED * 0).
3496
3497 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3498 the conversion of RESULT to TYPE. */
3499
3500 tree
3501 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3502 {
3503 tree t = fold_convert_loc (loc, type, result);
3504
3505 /* If the resulting operand is an empty statement, just return the omitted
3506 statement casted to void. */
3507 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3508 return build1_loc (loc, NOP_EXPR, void_type_node,
3509 fold_ignored_result (omitted));
3510
3511 if (TREE_SIDE_EFFECTS (omitted))
3512 return build2_loc (loc, COMPOUND_EXPR, type,
3513 fold_ignored_result (omitted), t);
3514
3515 return non_lvalue_loc (loc, t);
3516 }
3517
3518 /* Return a tree for the case when the result of an expression is RESULT
3519 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3520 of the expression but are now not needed.
3521
3522 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3523 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3524 evaluated before OMITTED2. Otherwise, if neither has side effects,
3525 just do the conversion of RESULT to TYPE. */
3526
3527 tree
3528 omit_two_operands_loc (location_t loc, tree type, tree result,
3529 tree omitted1, tree omitted2)
3530 {
3531 tree t = fold_convert_loc (loc, type, result);
3532
3533 if (TREE_SIDE_EFFECTS (omitted2))
3534 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3535 if (TREE_SIDE_EFFECTS (omitted1))
3536 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3537
3538 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3539 }
3540
3541 \f
3542 /* Return a simplified tree node for the truth-negation of ARG. This
3543 never alters ARG itself. We assume that ARG is an operation that
3544 returns a truth value (0 or 1).
3545
3546 FIXME: one would think we would fold the result, but it causes
3547 problems with the dominator optimizer. */
3548
3549 static tree
3550 fold_truth_not_expr (location_t loc, tree arg)
3551 {
3552 tree type = TREE_TYPE (arg);
3553 enum tree_code code = TREE_CODE (arg);
3554 location_t loc1, loc2;
3555
3556 /* If this is a comparison, we can simply invert it, except for
3557 floating-point non-equality comparisons, in which case we just
3558 enclose a TRUTH_NOT_EXPR around what we have. */
3559
3560 if (TREE_CODE_CLASS (code) == tcc_comparison)
3561 {
3562 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3563 if (FLOAT_TYPE_P (op_type)
3564 && flag_trapping_math
3565 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3566 && code != NE_EXPR && code != EQ_EXPR)
3567 return NULL_TREE;
3568
3569 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3570 if (code == ERROR_MARK)
3571 return NULL_TREE;
3572
3573 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3574 TREE_OPERAND (arg, 1));
3575 }
3576
3577 switch (code)
3578 {
3579 case INTEGER_CST:
3580 return constant_boolean_node (integer_zerop (arg), type);
3581
3582 case TRUTH_AND_EXPR:
3583 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3584 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3585 return build2_loc (loc, TRUTH_OR_EXPR, type,
3586 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3587 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3588
3589 case TRUTH_OR_EXPR:
3590 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3591 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3592 return build2_loc (loc, TRUTH_AND_EXPR, type,
3593 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3594 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3595
3596 case TRUTH_XOR_EXPR:
3597 /* Here we can invert either operand. We invert the first operand
3598 unless the second operand is a TRUTH_NOT_EXPR in which case our
3599 result is the XOR of the first operand with the inside of the
3600 negation of the second operand. */
3601
3602 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3603 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3604 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3605 else
3606 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3607 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3608 TREE_OPERAND (arg, 1));
3609
3610 case TRUTH_ANDIF_EXPR:
3611 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3612 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3613 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3614 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3615 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3616
3617 case TRUTH_ORIF_EXPR:
3618 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3619 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3620 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3621 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3622 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3623
3624 case TRUTH_NOT_EXPR:
3625 return TREE_OPERAND (arg, 0);
3626
3627 case COND_EXPR:
3628 {
3629 tree arg1 = TREE_OPERAND (arg, 1);
3630 tree arg2 = TREE_OPERAND (arg, 2);
3631
3632 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3633 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3634
3635 /* A COND_EXPR may have a throw as one operand, which
3636 then has void type. Just leave void operands
3637 as they are. */
3638 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3639 VOID_TYPE_P (TREE_TYPE (arg1))
3640 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3641 VOID_TYPE_P (TREE_TYPE (arg2))
3642 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3643 }
3644
3645 case COMPOUND_EXPR:
3646 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3647 return build2_loc (loc, COMPOUND_EXPR, type,
3648 TREE_OPERAND (arg, 0),
3649 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3650
3651 case NON_LVALUE_EXPR:
3652 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3653 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3654
3655 CASE_CONVERT:
3656 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3657 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3658
3659 /* ... fall through ... */
3660
3661 case FLOAT_EXPR:
3662 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3663 return build1_loc (loc, TREE_CODE (arg), type,
3664 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3665
3666 case BIT_AND_EXPR:
3667 if (!integer_onep (TREE_OPERAND (arg, 1)))
3668 return NULL_TREE;
3669 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3670
3671 case SAVE_EXPR:
3672 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3673
3674 case CLEANUP_POINT_EXPR:
3675 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3676 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3677 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3678
3679 default:
3680 return NULL_TREE;
3681 }
3682 }
3683
3684 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3685 assume that ARG is an operation that returns a truth value (0 or 1
3686 for scalars, 0 or -1 for vectors). Return the folded expression if
3687 folding is successful. Otherwise, return NULL_TREE. */
3688
3689 static tree
3690 fold_invert_truthvalue (location_t loc, tree arg)
3691 {
3692 tree type = TREE_TYPE (arg);
3693 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3694 ? BIT_NOT_EXPR
3695 : TRUTH_NOT_EXPR,
3696 type, arg);
3697 }
3698
3699 /* Return a simplified tree node for the truth-negation of ARG. This
3700 never alters ARG itself. We assume that ARG is an operation that
3701 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3702
3703 tree
3704 invert_truthvalue_loc (location_t loc, tree arg)
3705 {
3706 if (TREE_CODE (arg) == ERROR_MARK)
3707 return arg;
3708
3709 tree type = TREE_TYPE (arg);
3710 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3711 ? BIT_NOT_EXPR
3712 : TRUTH_NOT_EXPR,
3713 type, arg);
3714 }
3715
3716 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3717 with code CODE. This optimization is unsafe. */
3718 static tree
3719 distribute_real_division (location_t loc, enum tree_code code, tree type,
3720 tree arg0, tree arg1)
3721 {
3722 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3723 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3724
3725 /* (A / C) +- (B / C) -> (A +- B) / C. */
3726 if (mul0 == mul1
3727 && operand_equal_p (TREE_OPERAND (arg0, 1),
3728 TREE_OPERAND (arg1, 1), 0))
3729 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3730 fold_build2_loc (loc, code, type,
3731 TREE_OPERAND (arg0, 0),
3732 TREE_OPERAND (arg1, 0)),
3733 TREE_OPERAND (arg0, 1));
3734
3735 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3736 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3737 TREE_OPERAND (arg1, 0), 0)
3738 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3739 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3740 {
3741 REAL_VALUE_TYPE r0, r1;
3742 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3743 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3744 if (!mul0)
3745 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3746 if (!mul1)
3747 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3748 real_arithmetic (&r0, code, &r0, &r1);
3749 return fold_build2_loc (loc, MULT_EXPR, type,
3750 TREE_OPERAND (arg0, 0),
3751 build_real (type, r0));
3752 }
3753
3754 return NULL_TREE;
3755 }
3756 \f
3757 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3758 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3759 and uses reverse storage order if REVERSEP is nonzero. */
3760
3761 static tree
3762 make_bit_field_ref (location_t loc, tree inner, tree type,
3763 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3764 int unsignedp, int reversep)
3765 {
3766 tree result, bftype;
3767
3768 if (bitpos == 0 && !reversep)
3769 {
3770 tree size = TYPE_SIZE (TREE_TYPE (inner));
3771 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3772 || POINTER_TYPE_P (TREE_TYPE (inner)))
3773 && tree_fits_shwi_p (size)
3774 && tree_to_shwi (size) == bitsize)
3775 return fold_convert_loc (loc, type, inner);
3776 }
3777
3778 bftype = type;
3779 if (TYPE_PRECISION (bftype) != bitsize
3780 || TYPE_UNSIGNED (bftype) == !unsignedp)
3781 bftype = build_nonstandard_integer_type (bitsize, 0);
3782
3783 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3784 size_int (bitsize), bitsize_int (bitpos));
3785 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3786
3787 if (bftype != type)
3788 result = fold_convert_loc (loc, type, result);
3789
3790 return result;
3791 }
3792
3793 /* Optimize a bit-field compare.
3794
3795 There are two cases: First is a compare against a constant and the
3796 second is a comparison of two items where the fields are at the same
3797 bit position relative to the start of a chunk (byte, halfword, word)
3798 large enough to contain it. In these cases we can avoid the shift
3799 implicit in bitfield extractions.
3800
3801 For constants, we emit a compare of the shifted constant with the
3802 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3803 compared. For two fields at the same position, we do the ANDs with the
3804 similar mask and compare the result of the ANDs.
3805
3806 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3807 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3808 are the left and right operands of the comparison, respectively.
3809
3810 If the optimization described above can be done, we return the resulting
3811 tree. Otherwise we return zero. */
3812
3813 static tree
3814 optimize_bit_field_compare (location_t loc, enum tree_code code,
3815 tree compare_type, tree lhs, tree rhs)
3816 {
3817 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3818 tree type = TREE_TYPE (lhs);
3819 tree unsigned_type;
3820 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3821 machine_mode lmode, rmode, nmode;
3822 int lunsignedp, runsignedp;
3823 int lreversep, rreversep;
3824 int lvolatilep = 0, rvolatilep = 0;
3825 tree linner, rinner = NULL_TREE;
3826 tree mask;
3827 tree offset;
3828
3829 /* Get all the information about the extractions being done. If the bit size
3830 if the same as the size of the underlying object, we aren't doing an
3831 extraction at all and so can do nothing. We also don't want to
3832 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3833 then will no longer be able to replace it. */
3834 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3835 &lunsignedp, &lreversep, &lvolatilep, false);
3836 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3837 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3838 return 0;
3839
3840 if (const_p)
3841 rreversep = lreversep;
3842 else
3843 {
3844 /* If this is not a constant, we can only do something if bit positions,
3845 sizes, signedness and storage order are the same. */
3846 rinner
3847 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3848 &runsignedp, &rreversep, &rvolatilep, false);
3849
3850 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3851 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3852 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3853 return 0;
3854 }
3855
3856 /* See if we can find a mode to refer to this field. We should be able to,
3857 but fail if we can't. */
3858 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3859 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3860 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3861 TYPE_ALIGN (TREE_TYPE (rinner))),
3862 word_mode, false);
3863 if (nmode == VOIDmode)
3864 return 0;
3865
3866 /* Set signed and unsigned types of the precision of this mode for the
3867 shifts below. */
3868 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3869
3870 /* Compute the bit position and size for the new reference and our offset
3871 within it. If the new reference is the same size as the original, we
3872 won't optimize anything, so return zero. */
3873 nbitsize = GET_MODE_BITSIZE (nmode);
3874 nbitpos = lbitpos & ~ (nbitsize - 1);
3875 lbitpos -= nbitpos;
3876 if (nbitsize == lbitsize)
3877 return 0;
3878
3879 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3880 lbitpos = nbitsize - lbitsize - lbitpos;
3881
3882 /* Make the mask to be used against the extracted field. */
3883 mask = build_int_cst_type (unsigned_type, -1);
3884 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3885 mask = const_binop (RSHIFT_EXPR, mask,
3886 size_int (nbitsize - lbitsize - lbitpos));
3887
3888 if (! const_p)
3889 /* If not comparing with constant, just rework the comparison
3890 and return. */
3891 return fold_build2_loc (loc, code, compare_type,
3892 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3893 make_bit_field_ref (loc, linner,
3894 unsigned_type,
3895 nbitsize, nbitpos,
3896 1, lreversep),
3897 mask),
3898 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3899 make_bit_field_ref (loc, rinner,
3900 unsigned_type,
3901 nbitsize, nbitpos,
3902 1, rreversep),
3903 mask));
3904
3905 /* Otherwise, we are handling the constant case. See if the constant is too
3906 big for the field. Warn and return a tree for 0 (false) if so. We do
3907 this not only for its own sake, but to avoid having to test for this
3908 error case below. If we didn't, we might generate wrong code.
3909
3910 For unsigned fields, the constant shifted right by the field length should
3911 be all zero. For signed fields, the high-order bits should agree with
3912 the sign bit. */
3913
3914 if (lunsignedp)
3915 {
3916 if (wi::lrshift (rhs, lbitsize) != 0)
3917 {
3918 warning (0, "comparison is always %d due to width of bit-field",
3919 code == NE_EXPR);
3920 return constant_boolean_node (code == NE_EXPR, compare_type);
3921 }
3922 }
3923 else
3924 {
3925 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3926 if (tem != 0 && tem != -1)
3927 {
3928 warning (0, "comparison is always %d due to width of bit-field",
3929 code == NE_EXPR);
3930 return constant_boolean_node (code == NE_EXPR, compare_type);
3931 }
3932 }
3933
3934 /* Single-bit compares should always be against zero. */
3935 if (lbitsize == 1 && ! integer_zerop (rhs))
3936 {
3937 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3938 rhs = build_int_cst (type, 0);
3939 }
3940
3941 /* Make a new bitfield reference, shift the constant over the
3942 appropriate number of bits and mask it with the computed mask
3943 (in case this was a signed field). If we changed it, make a new one. */
3944 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3945 lreversep);
3946
3947 rhs = const_binop (BIT_AND_EXPR,
3948 const_binop (LSHIFT_EXPR,
3949 fold_convert_loc (loc, unsigned_type, rhs),
3950 size_int (lbitpos)),
3951 mask);
3952
3953 lhs = build2_loc (loc, code, compare_type,
3954 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3955 return lhs;
3956 }
3957 \f
3958 /* Subroutine for fold_truth_andor_1: decode a field reference.
3959
3960 If EXP is a comparison reference, we return the innermost reference.
3961
3962 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3963 set to the starting bit number.
3964
3965 If the innermost field can be completely contained in a mode-sized
3966 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3967
3968 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3969 otherwise it is not changed.
3970
3971 *PUNSIGNEDP is set to the signedness of the field.
3972
3973 *PREVERSEP is set to the storage order of the field.
3974
3975 *PMASK is set to the mask used. This is either contained in a
3976 BIT_AND_EXPR or derived from the width of the field.
3977
3978 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3979
3980 Return 0 if this is not a component reference or is one that we can't
3981 do anything with. */
3982
3983 static tree
3984 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3985 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3986 int *punsignedp, int *preversep, int *pvolatilep,
3987 tree *pmask, tree *pand_mask)
3988 {
3989 tree outer_type = 0;
3990 tree and_mask = 0;
3991 tree mask, inner, offset;
3992 tree unsigned_type;
3993 unsigned int precision;
3994
3995 /* All the optimizations using this function assume integer fields.
3996 There are problems with FP fields since the type_for_size call
3997 below can fail for, e.g., XFmode. */
3998 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3999 return 0;
4000
4001 /* We are interested in the bare arrangement of bits, so strip everything
4002 that doesn't affect the machine mode. However, record the type of the
4003 outermost expression if it may matter below. */
4004 if (CONVERT_EXPR_P (exp)
4005 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4006 outer_type = TREE_TYPE (exp);
4007 STRIP_NOPS (exp);
4008
4009 if (TREE_CODE (exp) == BIT_AND_EXPR)
4010 {
4011 and_mask = TREE_OPERAND (exp, 1);
4012 exp = TREE_OPERAND (exp, 0);
4013 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4014 if (TREE_CODE (and_mask) != INTEGER_CST)
4015 return 0;
4016 }
4017
4018 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4019 punsignedp, preversep, pvolatilep, false);
4020 if ((inner == exp && and_mask == 0)
4021 || *pbitsize < 0 || offset != 0
4022 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4023 return 0;
4024
4025 /* If the number of bits in the reference is the same as the bitsize of
4026 the outer type, then the outer type gives the signedness. Otherwise
4027 (in case of a small bitfield) the signedness is unchanged. */
4028 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4029 *punsignedp = TYPE_UNSIGNED (outer_type);
4030
4031 /* Compute the mask to access the bitfield. */
4032 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4033 precision = TYPE_PRECISION (unsigned_type);
4034
4035 mask = build_int_cst_type (unsigned_type, -1);
4036
4037 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4038 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4039
4040 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4041 if (and_mask != 0)
4042 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4043 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4044
4045 *pmask = mask;
4046 *pand_mask = and_mask;
4047 return inner;
4048 }
4049
4050 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4051 bit positions and MASK is SIGNED. */
4052
4053 static int
4054 all_ones_mask_p (const_tree mask, unsigned int size)
4055 {
4056 tree type = TREE_TYPE (mask);
4057 unsigned int precision = TYPE_PRECISION (type);
4058
4059 /* If this function returns true when the type of the mask is
4060 UNSIGNED, then there will be errors. In particular see
4061 gcc.c-torture/execute/990326-1.c. There does not appear to be
4062 any documentation paper trail as to why this is so. But the pre
4063 wide-int worked with that restriction and it has been preserved
4064 here. */
4065 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4066 return false;
4067
4068 return wi::mask (size, false, precision) == mask;
4069 }
4070
4071 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4072 represents the sign bit of EXP's type. If EXP represents a sign
4073 or zero extension, also test VAL against the unextended type.
4074 The return value is the (sub)expression whose sign bit is VAL,
4075 or NULL_TREE otherwise. */
4076
4077 tree
4078 sign_bit_p (tree exp, const_tree val)
4079 {
4080 int width;
4081 tree t;
4082
4083 /* Tree EXP must have an integral type. */
4084 t = TREE_TYPE (exp);
4085 if (! INTEGRAL_TYPE_P (t))
4086 return NULL_TREE;
4087
4088 /* Tree VAL must be an integer constant. */
4089 if (TREE_CODE (val) != INTEGER_CST
4090 || TREE_OVERFLOW (val))
4091 return NULL_TREE;
4092
4093 width = TYPE_PRECISION (t);
4094 if (wi::only_sign_bit_p (val, width))
4095 return exp;
4096
4097 /* Handle extension from a narrower type. */
4098 if (TREE_CODE (exp) == NOP_EXPR
4099 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4100 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4101
4102 return NULL_TREE;
4103 }
4104
4105 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4106 to be evaluated unconditionally. */
4107
4108 static int
4109 simple_operand_p (const_tree exp)
4110 {
4111 /* Strip any conversions that don't change the machine mode. */
4112 STRIP_NOPS (exp);
4113
4114 return (CONSTANT_CLASS_P (exp)
4115 || TREE_CODE (exp) == SSA_NAME
4116 || (DECL_P (exp)
4117 && ! TREE_ADDRESSABLE (exp)
4118 && ! TREE_THIS_VOLATILE (exp)
4119 && ! DECL_NONLOCAL (exp)
4120 /* Don't regard global variables as simple. They may be
4121 allocated in ways unknown to the compiler (shared memory,
4122 #pragma weak, etc). */
4123 && ! TREE_PUBLIC (exp)
4124 && ! DECL_EXTERNAL (exp)
4125 /* Weakrefs are not safe to be read, since they can be NULL.
4126 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4127 have DECL_WEAK flag set. */
4128 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4129 /* Loading a static variable is unduly expensive, but global
4130 registers aren't expensive. */
4131 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4132 }
4133
4134 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4135 to be evaluated unconditionally.
4136 I addition to simple_operand_p, we assume that comparisons, conversions,
4137 and logic-not operations are simple, if their operands are simple, too. */
4138
4139 static bool
4140 simple_operand_p_2 (tree exp)
4141 {
4142 enum tree_code code;
4143
4144 if (TREE_SIDE_EFFECTS (exp)
4145 || tree_could_trap_p (exp))
4146 return false;
4147
4148 while (CONVERT_EXPR_P (exp))
4149 exp = TREE_OPERAND (exp, 0);
4150
4151 code = TREE_CODE (exp);
4152
4153 if (TREE_CODE_CLASS (code) == tcc_comparison)
4154 return (simple_operand_p (TREE_OPERAND (exp, 0))
4155 && simple_operand_p (TREE_OPERAND (exp, 1)));
4156
4157 if (code == TRUTH_NOT_EXPR)
4158 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4159
4160 return simple_operand_p (exp);
4161 }
4162
4163 \f
4164 /* The following functions are subroutines to fold_range_test and allow it to
4165 try to change a logical combination of comparisons into a range test.
4166
4167 For example, both
4168 X == 2 || X == 3 || X == 4 || X == 5
4169 and
4170 X >= 2 && X <= 5
4171 are converted to
4172 (unsigned) (X - 2) <= 3
4173
4174 We describe each set of comparisons as being either inside or outside
4175 a range, using a variable named like IN_P, and then describe the
4176 range with a lower and upper bound. If one of the bounds is omitted,
4177 it represents either the highest or lowest value of the type.
4178
4179 In the comments below, we represent a range by two numbers in brackets
4180 preceded by a "+" to designate being inside that range, or a "-" to
4181 designate being outside that range, so the condition can be inverted by
4182 flipping the prefix. An omitted bound is represented by a "-". For
4183 example, "- [-, 10]" means being outside the range starting at the lowest
4184 possible value and ending at 10, in other words, being greater than 10.
4185 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4186 always false.
4187
4188 We set up things so that the missing bounds are handled in a consistent
4189 manner so neither a missing bound nor "true" and "false" need to be
4190 handled using a special case. */
4191
4192 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4193 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4194 and UPPER1_P are nonzero if the respective argument is an upper bound
4195 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4196 must be specified for a comparison. ARG1 will be converted to ARG0's
4197 type if both are specified. */
4198
4199 static tree
4200 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4201 tree arg1, int upper1_p)
4202 {
4203 tree tem;
4204 int result;
4205 int sgn0, sgn1;
4206
4207 /* If neither arg represents infinity, do the normal operation.
4208 Else, if not a comparison, return infinity. Else handle the special
4209 comparison rules. Note that most of the cases below won't occur, but
4210 are handled for consistency. */
4211
4212 if (arg0 != 0 && arg1 != 0)
4213 {
4214 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4215 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4216 STRIP_NOPS (tem);
4217 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4218 }
4219
4220 if (TREE_CODE_CLASS (code) != tcc_comparison)
4221 return 0;
4222
4223 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4224 for neither. In real maths, we cannot assume open ended ranges are
4225 the same. But, this is computer arithmetic, where numbers are finite.
4226 We can therefore make the transformation of any unbounded range with
4227 the value Z, Z being greater than any representable number. This permits
4228 us to treat unbounded ranges as equal. */
4229 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4230 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4231 switch (code)
4232 {
4233 case EQ_EXPR:
4234 result = sgn0 == sgn1;
4235 break;
4236 case NE_EXPR:
4237 result = sgn0 != sgn1;
4238 break;
4239 case LT_EXPR:
4240 result = sgn0 < sgn1;
4241 break;
4242 case LE_EXPR:
4243 result = sgn0 <= sgn1;
4244 break;
4245 case GT_EXPR:
4246 result = sgn0 > sgn1;
4247 break;
4248 case GE_EXPR:
4249 result = sgn0 >= sgn1;
4250 break;
4251 default:
4252 gcc_unreachable ();
4253 }
4254
4255 return constant_boolean_node (result, type);
4256 }
4257 \f
4258 /* Helper routine for make_range. Perform one step for it, return
4259 new expression if the loop should continue or NULL_TREE if it should
4260 stop. */
4261
4262 tree
4263 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4264 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4265 bool *strict_overflow_p)
4266 {
4267 tree arg0_type = TREE_TYPE (arg0);
4268 tree n_low, n_high, low = *p_low, high = *p_high;
4269 int in_p = *p_in_p, n_in_p;
4270
4271 switch (code)
4272 {
4273 case TRUTH_NOT_EXPR:
4274 /* We can only do something if the range is testing for zero. */
4275 if (low == NULL_TREE || high == NULL_TREE
4276 || ! integer_zerop (low) || ! integer_zerop (high))
4277 return NULL_TREE;
4278 *p_in_p = ! in_p;
4279 return arg0;
4280
4281 case EQ_EXPR: case NE_EXPR:
4282 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4283 /* We can only do something if the range is testing for zero
4284 and if the second operand is an integer constant. Note that
4285 saying something is "in" the range we make is done by
4286 complementing IN_P since it will set in the initial case of
4287 being not equal to zero; "out" is leaving it alone. */
4288 if (low == NULL_TREE || high == NULL_TREE
4289 || ! integer_zerop (low) || ! integer_zerop (high)
4290 || TREE_CODE (arg1) != INTEGER_CST)
4291 return NULL_TREE;
4292
4293 switch (code)
4294 {
4295 case NE_EXPR: /* - [c, c] */
4296 low = high = arg1;
4297 break;
4298 case EQ_EXPR: /* + [c, c] */
4299 in_p = ! in_p, low = high = arg1;
4300 break;
4301 case GT_EXPR: /* - [-, c] */
4302 low = 0, high = arg1;
4303 break;
4304 case GE_EXPR: /* + [c, -] */
4305 in_p = ! in_p, low = arg1, high = 0;
4306 break;
4307 case LT_EXPR: /* - [c, -] */
4308 low = arg1, high = 0;
4309 break;
4310 case LE_EXPR: /* + [-, c] */
4311 in_p = ! in_p, low = 0, high = arg1;
4312 break;
4313 default:
4314 gcc_unreachable ();
4315 }
4316
4317 /* If this is an unsigned comparison, we also know that EXP is
4318 greater than or equal to zero. We base the range tests we make
4319 on that fact, so we record it here so we can parse existing
4320 range tests. We test arg0_type since often the return type
4321 of, e.g. EQ_EXPR, is boolean. */
4322 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4323 {
4324 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4325 in_p, low, high, 1,
4326 build_int_cst (arg0_type, 0),
4327 NULL_TREE))
4328 return NULL_TREE;
4329
4330 in_p = n_in_p, low = n_low, high = n_high;
4331
4332 /* If the high bound is missing, but we have a nonzero low
4333 bound, reverse the range so it goes from zero to the low bound
4334 minus 1. */
4335 if (high == 0 && low && ! integer_zerop (low))
4336 {
4337 in_p = ! in_p;
4338 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4339 build_int_cst (TREE_TYPE (low), 1), 0);
4340 low = build_int_cst (arg0_type, 0);
4341 }
4342 }
4343
4344 *p_low = low;
4345 *p_high = high;
4346 *p_in_p = in_p;
4347 return arg0;
4348
4349 case NEGATE_EXPR:
4350 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4351 low and high are non-NULL, then normalize will DTRT. */
4352 if (!TYPE_UNSIGNED (arg0_type)
4353 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4354 {
4355 if (low == NULL_TREE)
4356 low = TYPE_MIN_VALUE (arg0_type);
4357 if (high == NULL_TREE)
4358 high = TYPE_MAX_VALUE (arg0_type);
4359 }
4360
4361 /* (-x) IN [a,b] -> x in [-b, -a] */
4362 n_low = range_binop (MINUS_EXPR, exp_type,
4363 build_int_cst (exp_type, 0),
4364 0, high, 1);
4365 n_high = range_binop (MINUS_EXPR, exp_type,
4366 build_int_cst (exp_type, 0),
4367 0, low, 0);
4368 if (n_high != 0 && TREE_OVERFLOW (n_high))
4369 return NULL_TREE;
4370 goto normalize;
4371
4372 case BIT_NOT_EXPR:
4373 /* ~ X -> -X - 1 */
4374 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4375 build_int_cst (exp_type, 1));
4376
4377 case PLUS_EXPR:
4378 case MINUS_EXPR:
4379 if (TREE_CODE (arg1) != INTEGER_CST)
4380 return NULL_TREE;
4381
4382 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4383 move a constant to the other side. */
4384 if (!TYPE_UNSIGNED (arg0_type)
4385 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4386 return NULL_TREE;
4387
4388 /* If EXP is signed, any overflow in the computation is undefined,
4389 so we don't worry about it so long as our computations on
4390 the bounds don't overflow. For unsigned, overflow is defined
4391 and this is exactly the right thing. */
4392 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4393 arg0_type, low, 0, arg1, 0);
4394 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4395 arg0_type, high, 1, arg1, 0);
4396 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4397 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4398 return NULL_TREE;
4399
4400 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4401 *strict_overflow_p = true;
4402
4403 normalize:
4404 /* Check for an unsigned range which has wrapped around the maximum
4405 value thus making n_high < n_low, and normalize it. */
4406 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4407 {
4408 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4409 build_int_cst (TREE_TYPE (n_high), 1), 0);
4410 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4411 build_int_cst (TREE_TYPE (n_low), 1), 0);
4412
4413 /* If the range is of the form +/- [ x+1, x ], we won't
4414 be able to normalize it. But then, it represents the
4415 whole range or the empty set, so make it
4416 +/- [ -, - ]. */
4417 if (tree_int_cst_equal (n_low, low)
4418 && tree_int_cst_equal (n_high, high))
4419 low = high = 0;
4420 else
4421 in_p = ! in_p;
4422 }
4423 else
4424 low = n_low, high = n_high;
4425
4426 *p_low = low;
4427 *p_high = high;
4428 *p_in_p = in_p;
4429 return arg0;
4430
4431 CASE_CONVERT:
4432 case NON_LVALUE_EXPR:
4433 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4434 return NULL_TREE;
4435
4436 if (! INTEGRAL_TYPE_P (arg0_type)
4437 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4438 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4439 return NULL_TREE;
4440
4441 n_low = low, n_high = high;
4442
4443 if (n_low != 0)
4444 n_low = fold_convert_loc (loc, arg0_type, n_low);
4445
4446 if (n_high != 0)
4447 n_high = fold_convert_loc (loc, arg0_type, n_high);
4448
4449 /* If we're converting arg0 from an unsigned type, to exp,
4450 a signed type, we will be doing the comparison as unsigned.
4451 The tests above have already verified that LOW and HIGH
4452 are both positive.
4453
4454 So we have to ensure that we will handle large unsigned
4455 values the same way that the current signed bounds treat
4456 negative values. */
4457
4458 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4459 {
4460 tree high_positive;
4461 tree equiv_type;
4462 /* For fixed-point modes, we need to pass the saturating flag
4463 as the 2nd parameter. */
4464 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4465 equiv_type
4466 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4467 TYPE_SATURATING (arg0_type));
4468 else
4469 equiv_type
4470 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4471
4472 /* A range without an upper bound is, naturally, unbounded.
4473 Since convert would have cropped a very large value, use
4474 the max value for the destination type. */
4475 high_positive
4476 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4477 : TYPE_MAX_VALUE (arg0_type);
4478
4479 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4480 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4481 fold_convert_loc (loc, arg0_type,
4482 high_positive),
4483 build_int_cst (arg0_type, 1));
4484
4485 /* If the low bound is specified, "and" the range with the
4486 range for which the original unsigned value will be
4487 positive. */
4488 if (low != 0)
4489 {
4490 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4491 1, fold_convert_loc (loc, arg0_type,
4492 integer_zero_node),
4493 high_positive))
4494 return NULL_TREE;
4495
4496 in_p = (n_in_p == in_p);
4497 }
4498 else
4499 {
4500 /* Otherwise, "or" the range with the range of the input
4501 that will be interpreted as negative. */
4502 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4503 1, fold_convert_loc (loc, arg0_type,
4504 integer_zero_node),
4505 high_positive))
4506 return NULL_TREE;
4507
4508 in_p = (in_p != n_in_p);
4509 }
4510 }
4511
4512 *p_low = n_low;
4513 *p_high = n_high;
4514 *p_in_p = in_p;
4515 return arg0;
4516
4517 default:
4518 return NULL_TREE;
4519 }
4520 }
4521
4522 /* Given EXP, a logical expression, set the range it is testing into
4523 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4524 actually being tested. *PLOW and *PHIGH will be made of the same
4525 type as the returned expression. If EXP is not a comparison, we
4526 will most likely not be returning a useful value and range. Set
4527 *STRICT_OVERFLOW_P to true if the return value is only valid
4528 because signed overflow is undefined; otherwise, do not change
4529 *STRICT_OVERFLOW_P. */
4530
4531 tree
4532 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4533 bool *strict_overflow_p)
4534 {
4535 enum tree_code code;
4536 tree arg0, arg1 = NULL_TREE;
4537 tree exp_type, nexp;
4538 int in_p;
4539 tree low, high;
4540 location_t loc = EXPR_LOCATION (exp);
4541
4542 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4543 and see if we can refine the range. Some of the cases below may not
4544 happen, but it doesn't seem worth worrying about this. We "continue"
4545 the outer loop when we've changed something; otherwise we "break"
4546 the switch, which will "break" the while. */
4547
4548 in_p = 0;
4549 low = high = build_int_cst (TREE_TYPE (exp), 0);
4550
4551 while (1)
4552 {
4553 code = TREE_CODE (exp);
4554 exp_type = TREE_TYPE (exp);
4555 arg0 = NULL_TREE;
4556
4557 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4558 {
4559 if (TREE_OPERAND_LENGTH (exp) > 0)
4560 arg0 = TREE_OPERAND (exp, 0);
4561 if (TREE_CODE_CLASS (code) == tcc_binary
4562 || TREE_CODE_CLASS (code) == tcc_comparison
4563 || (TREE_CODE_CLASS (code) == tcc_expression
4564 && TREE_OPERAND_LENGTH (exp) > 1))
4565 arg1 = TREE_OPERAND (exp, 1);
4566 }
4567 if (arg0 == NULL_TREE)
4568 break;
4569
4570 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4571 &high, &in_p, strict_overflow_p);
4572 if (nexp == NULL_TREE)
4573 break;
4574 exp = nexp;
4575 }
4576
4577 /* If EXP is a constant, we can evaluate whether this is true or false. */
4578 if (TREE_CODE (exp) == INTEGER_CST)
4579 {
4580 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4581 exp, 0, low, 0))
4582 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4583 exp, 1, high, 1)));
4584 low = high = 0;
4585 exp = 0;
4586 }
4587
4588 *pin_p = in_p, *plow = low, *phigh = high;
4589 return exp;
4590 }
4591 \f
4592 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4593 type, TYPE, return an expression to test if EXP is in (or out of, depending
4594 on IN_P) the range. Return 0 if the test couldn't be created. */
4595
4596 tree
4597 build_range_check (location_t loc, tree type, tree exp, int in_p,
4598 tree low, tree high)
4599 {
4600 tree etype = TREE_TYPE (exp), value;
4601
4602 /* Disable this optimization for function pointer expressions
4603 on targets that require function pointer canonicalization. */
4604 if (targetm.have_canonicalize_funcptr_for_compare ()
4605 && TREE_CODE (etype) == POINTER_TYPE
4606 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4607 return NULL_TREE;
4608
4609 if (! in_p)
4610 {
4611 value = build_range_check (loc, type, exp, 1, low, high);
4612 if (value != 0)
4613 return invert_truthvalue_loc (loc, value);
4614
4615 return 0;
4616 }
4617
4618 if (low == 0 && high == 0)
4619 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4620
4621 if (low == 0)
4622 return fold_build2_loc (loc, LE_EXPR, type, exp,
4623 fold_convert_loc (loc, etype, high));
4624
4625 if (high == 0)
4626 return fold_build2_loc (loc, GE_EXPR, type, exp,
4627 fold_convert_loc (loc, etype, low));
4628
4629 if (operand_equal_p (low, high, 0))
4630 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4631 fold_convert_loc (loc, etype, low));
4632
4633 if (integer_zerop (low))
4634 {
4635 if (! TYPE_UNSIGNED (etype))
4636 {
4637 etype = unsigned_type_for (etype);
4638 high = fold_convert_loc (loc, etype, high);
4639 exp = fold_convert_loc (loc, etype, exp);
4640 }
4641 return build_range_check (loc, type, exp, 1, 0, high);
4642 }
4643
4644 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4645 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4646 {
4647 int prec = TYPE_PRECISION (etype);
4648
4649 if (wi::mask (prec - 1, false, prec) == high)
4650 {
4651 if (TYPE_UNSIGNED (etype))
4652 {
4653 tree signed_etype = signed_type_for (etype);
4654 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4655 etype
4656 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4657 else
4658 etype = signed_etype;
4659 exp = fold_convert_loc (loc, etype, exp);
4660 }
4661 return fold_build2_loc (loc, GT_EXPR, type, exp,
4662 build_int_cst (etype, 0));
4663 }
4664 }
4665
4666 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4667 This requires wrap-around arithmetics for the type of the expression.
4668 First make sure that arithmetics in this type is valid, then make sure
4669 that it wraps around. */
4670 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4671 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4672 TYPE_UNSIGNED (etype));
4673
4674 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4675 {
4676 tree utype, minv, maxv;
4677
4678 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4679 for the type in question, as we rely on this here. */
4680 utype = unsigned_type_for (etype);
4681 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4682 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4683 build_int_cst (TREE_TYPE (maxv), 1), 1);
4684 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4685
4686 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4687 minv, 1, maxv, 1)))
4688 etype = utype;
4689 else
4690 return 0;
4691 }
4692
4693 high = fold_convert_loc (loc, etype, high);
4694 low = fold_convert_loc (loc, etype, low);
4695 exp = fold_convert_loc (loc, etype, exp);
4696
4697 value = const_binop (MINUS_EXPR, high, low);
4698
4699
4700 if (POINTER_TYPE_P (etype))
4701 {
4702 if (value != 0 && !TREE_OVERFLOW (value))
4703 {
4704 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4705 return build_range_check (loc, type,
4706 fold_build_pointer_plus_loc (loc, exp, low),
4707 1, build_int_cst (etype, 0), value);
4708 }
4709 return 0;
4710 }
4711
4712 if (value != 0 && !TREE_OVERFLOW (value))
4713 return build_range_check (loc, type,
4714 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4715 1, build_int_cst (etype, 0), value);
4716
4717 return 0;
4718 }
4719 \f
4720 /* Return the predecessor of VAL in its type, handling the infinite case. */
4721
4722 static tree
4723 range_predecessor (tree val)
4724 {
4725 tree type = TREE_TYPE (val);
4726
4727 if (INTEGRAL_TYPE_P (type)
4728 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4729 return 0;
4730 else
4731 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4732 build_int_cst (TREE_TYPE (val), 1), 0);
4733 }
4734
4735 /* Return the successor of VAL in its type, handling the infinite case. */
4736
4737 static tree
4738 range_successor (tree val)
4739 {
4740 tree type = TREE_TYPE (val);
4741
4742 if (INTEGRAL_TYPE_P (type)
4743 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4744 return 0;
4745 else
4746 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4747 build_int_cst (TREE_TYPE (val), 1), 0);
4748 }
4749
4750 /* Given two ranges, see if we can merge them into one. Return 1 if we
4751 can, 0 if we can't. Set the output range into the specified parameters. */
4752
4753 bool
4754 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4755 tree high0, int in1_p, tree low1, tree high1)
4756 {
4757 int no_overlap;
4758 int subset;
4759 int temp;
4760 tree tem;
4761 int in_p;
4762 tree low, high;
4763 int lowequal = ((low0 == 0 && low1 == 0)
4764 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4765 low0, 0, low1, 0)));
4766 int highequal = ((high0 == 0 && high1 == 0)
4767 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4768 high0, 1, high1, 1)));
4769
4770 /* Make range 0 be the range that starts first, or ends last if they
4771 start at the same value. Swap them if it isn't. */
4772 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4773 low0, 0, low1, 0))
4774 || (lowequal
4775 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4776 high1, 1, high0, 1))))
4777 {
4778 temp = in0_p, in0_p = in1_p, in1_p = temp;
4779 tem = low0, low0 = low1, low1 = tem;
4780 tem = high0, high0 = high1, high1 = tem;
4781 }
4782
4783 /* Now flag two cases, whether the ranges are disjoint or whether the
4784 second range is totally subsumed in the first. Note that the tests
4785 below are simplified by the ones above. */
4786 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4787 high0, 1, low1, 0));
4788 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4789 high1, 1, high0, 1));
4790
4791 /* We now have four cases, depending on whether we are including or
4792 excluding the two ranges. */
4793 if (in0_p && in1_p)
4794 {
4795 /* If they don't overlap, the result is false. If the second range
4796 is a subset it is the result. Otherwise, the range is from the start
4797 of the second to the end of the first. */
4798 if (no_overlap)
4799 in_p = 0, low = high = 0;
4800 else if (subset)
4801 in_p = 1, low = low1, high = high1;
4802 else
4803 in_p = 1, low = low1, high = high0;
4804 }
4805
4806 else if (in0_p && ! in1_p)
4807 {
4808 /* If they don't overlap, the result is the first range. If they are
4809 equal, the result is false. If the second range is a subset of the
4810 first, and the ranges begin at the same place, we go from just after
4811 the end of the second range to the end of the first. If the second
4812 range is not a subset of the first, or if it is a subset and both
4813 ranges end at the same place, the range starts at the start of the
4814 first range and ends just before the second range.
4815 Otherwise, we can't describe this as a single range. */
4816 if (no_overlap)
4817 in_p = 1, low = low0, high = high0;
4818 else if (lowequal && highequal)
4819 in_p = 0, low = high = 0;
4820 else if (subset && lowequal)
4821 {
4822 low = range_successor (high1);
4823 high = high0;
4824 in_p = 1;
4825 if (low == 0)
4826 {
4827 /* We are in the weird situation where high0 > high1 but
4828 high1 has no successor. Punt. */
4829 return 0;
4830 }
4831 }
4832 else if (! subset || highequal)
4833 {
4834 low = low0;
4835 high = range_predecessor (low1);
4836 in_p = 1;
4837 if (high == 0)
4838 {
4839 /* low0 < low1 but low1 has no predecessor. Punt. */
4840 return 0;
4841 }
4842 }
4843 else
4844 return 0;
4845 }
4846
4847 else if (! in0_p && in1_p)
4848 {
4849 /* If they don't overlap, the result is the second range. If the second
4850 is a subset of the first, the result is false. Otherwise,
4851 the range starts just after the first range and ends at the
4852 end of the second. */
4853 if (no_overlap)
4854 in_p = 1, low = low1, high = high1;
4855 else if (subset || highequal)
4856 in_p = 0, low = high = 0;
4857 else
4858 {
4859 low = range_successor (high0);
4860 high = high1;
4861 in_p = 1;
4862 if (low == 0)
4863 {
4864 /* high1 > high0 but high0 has no successor. Punt. */
4865 return 0;
4866 }
4867 }
4868 }
4869
4870 else
4871 {
4872 /* The case where we are excluding both ranges. Here the complex case
4873 is if they don't overlap. In that case, the only time we have a
4874 range is if they are adjacent. If the second is a subset of the
4875 first, the result is the first. Otherwise, the range to exclude
4876 starts at the beginning of the first range and ends at the end of the
4877 second. */
4878 if (no_overlap)
4879 {
4880 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4881 range_successor (high0),
4882 1, low1, 0)))
4883 in_p = 0, low = low0, high = high1;
4884 else
4885 {
4886 /* Canonicalize - [min, x] into - [-, x]. */
4887 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4888 switch (TREE_CODE (TREE_TYPE (low0)))
4889 {
4890 case ENUMERAL_TYPE:
4891 if (TYPE_PRECISION (TREE_TYPE (low0))
4892 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4893 break;
4894 /* FALLTHROUGH */
4895 case INTEGER_TYPE:
4896 if (tree_int_cst_equal (low0,
4897 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4898 low0 = 0;
4899 break;
4900 case POINTER_TYPE:
4901 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4902 && integer_zerop (low0))
4903 low0 = 0;
4904 break;
4905 default:
4906 break;
4907 }
4908
4909 /* Canonicalize - [x, max] into - [x, -]. */
4910 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4911 switch (TREE_CODE (TREE_TYPE (high1)))
4912 {
4913 case ENUMERAL_TYPE:
4914 if (TYPE_PRECISION (TREE_TYPE (high1))
4915 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4916 break;
4917 /* FALLTHROUGH */
4918 case INTEGER_TYPE:
4919 if (tree_int_cst_equal (high1,
4920 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4921 high1 = 0;
4922 break;
4923 case POINTER_TYPE:
4924 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4925 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4926 high1, 1,
4927 build_int_cst (TREE_TYPE (high1), 1),
4928 1)))
4929 high1 = 0;
4930 break;
4931 default:
4932 break;
4933 }
4934
4935 /* The ranges might be also adjacent between the maximum and
4936 minimum values of the given type. For
4937 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4938 return + [x + 1, y - 1]. */
4939 if (low0 == 0 && high1 == 0)
4940 {
4941 low = range_successor (high0);
4942 high = range_predecessor (low1);
4943 if (low == 0 || high == 0)
4944 return 0;
4945
4946 in_p = 1;
4947 }
4948 else
4949 return 0;
4950 }
4951 }
4952 else if (subset)
4953 in_p = 0, low = low0, high = high0;
4954 else
4955 in_p = 0, low = low0, high = high1;
4956 }
4957
4958 *pin_p = in_p, *plow = low, *phigh = high;
4959 return 1;
4960 }
4961 \f
4962
4963 /* Subroutine of fold, looking inside expressions of the form
4964 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4965 of the COND_EXPR. This function is being used also to optimize
4966 A op B ? C : A, by reversing the comparison first.
4967
4968 Return a folded expression whose code is not a COND_EXPR
4969 anymore, or NULL_TREE if no folding opportunity is found. */
4970
4971 static tree
4972 fold_cond_expr_with_comparison (location_t loc, tree type,
4973 tree arg0, tree arg1, tree arg2)
4974 {
4975 enum tree_code comp_code = TREE_CODE (arg0);
4976 tree arg00 = TREE_OPERAND (arg0, 0);
4977 tree arg01 = TREE_OPERAND (arg0, 1);
4978 tree arg1_type = TREE_TYPE (arg1);
4979 tree tem;
4980
4981 STRIP_NOPS (arg1);
4982 STRIP_NOPS (arg2);
4983
4984 /* If we have A op 0 ? A : -A, consider applying the following
4985 transformations:
4986
4987 A == 0? A : -A same as -A
4988 A != 0? A : -A same as A
4989 A >= 0? A : -A same as abs (A)
4990 A > 0? A : -A same as abs (A)
4991 A <= 0? A : -A same as -abs (A)
4992 A < 0? A : -A same as -abs (A)
4993
4994 None of these transformations work for modes with signed
4995 zeros. If A is +/-0, the first two transformations will
4996 change the sign of the result (from +0 to -0, or vice
4997 versa). The last four will fix the sign of the result,
4998 even though the original expressions could be positive or
4999 negative, depending on the sign of A.
5000
5001 Note that all these transformations are correct if A is
5002 NaN, since the two alternatives (A and -A) are also NaNs. */
5003 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5004 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5005 ? real_zerop (arg01)
5006 : integer_zerop (arg01))
5007 && ((TREE_CODE (arg2) == NEGATE_EXPR
5008 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5009 /* In the case that A is of the form X-Y, '-A' (arg2) may
5010 have already been folded to Y-X, check for that. */
5011 || (TREE_CODE (arg1) == MINUS_EXPR
5012 && TREE_CODE (arg2) == MINUS_EXPR
5013 && operand_equal_p (TREE_OPERAND (arg1, 0),
5014 TREE_OPERAND (arg2, 1), 0)
5015 && operand_equal_p (TREE_OPERAND (arg1, 1),
5016 TREE_OPERAND (arg2, 0), 0))))
5017 switch (comp_code)
5018 {
5019 case EQ_EXPR:
5020 case UNEQ_EXPR:
5021 tem = fold_convert_loc (loc, arg1_type, arg1);
5022 return pedantic_non_lvalue_loc (loc,
5023 fold_convert_loc (loc, type,
5024 negate_expr (tem)));
5025 case NE_EXPR:
5026 case LTGT_EXPR:
5027 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5028 case UNGE_EXPR:
5029 case UNGT_EXPR:
5030 if (flag_trapping_math)
5031 break;
5032 /* Fall through. */
5033 case GE_EXPR:
5034 case GT_EXPR:
5035 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5036 break;
5037 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5038 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5039 case UNLE_EXPR:
5040 case UNLT_EXPR:
5041 if (flag_trapping_math)
5042 break;
5043 case LE_EXPR:
5044 case LT_EXPR:
5045 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5046 break;
5047 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5048 return negate_expr (fold_convert_loc (loc, type, tem));
5049 default:
5050 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5051 break;
5052 }
5053
5054 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5055 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5056 both transformations are correct when A is NaN: A != 0
5057 is then true, and A == 0 is false. */
5058
5059 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5060 && integer_zerop (arg01) && integer_zerop (arg2))
5061 {
5062 if (comp_code == NE_EXPR)
5063 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5064 else if (comp_code == EQ_EXPR)
5065 return build_zero_cst (type);
5066 }
5067
5068 /* Try some transformations of A op B ? A : B.
5069
5070 A == B? A : B same as B
5071 A != B? A : B same as A
5072 A >= B? A : B same as max (A, B)
5073 A > B? A : B same as max (B, A)
5074 A <= B? A : B same as min (A, B)
5075 A < B? A : B same as min (B, A)
5076
5077 As above, these transformations don't work in the presence
5078 of signed zeros. For example, if A and B are zeros of
5079 opposite sign, the first two transformations will change
5080 the sign of the result. In the last four, the original
5081 expressions give different results for (A=+0, B=-0) and
5082 (A=-0, B=+0), but the transformed expressions do not.
5083
5084 The first two transformations are correct if either A or B
5085 is a NaN. In the first transformation, the condition will
5086 be false, and B will indeed be chosen. In the case of the
5087 second transformation, the condition A != B will be true,
5088 and A will be chosen.
5089
5090 The conversions to max() and min() are not correct if B is
5091 a number and A is not. The conditions in the original
5092 expressions will be false, so all four give B. The min()
5093 and max() versions would give a NaN instead. */
5094 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5095 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5096 /* Avoid these transformations if the COND_EXPR may be used
5097 as an lvalue in the C++ front-end. PR c++/19199. */
5098 && (in_gimple_form
5099 || VECTOR_TYPE_P (type)
5100 || (! lang_GNU_CXX ()
5101 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5102 || ! maybe_lvalue_p (arg1)
5103 || ! maybe_lvalue_p (arg2)))
5104 {
5105 tree comp_op0 = arg00;
5106 tree comp_op1 = arg01;
5107 tree comp_type = TREE_TYPE (comp_op0);
5108
5109 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5110 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5111 {
5112 comp_type = type;
5113 comp_op0 = arg1;
5114 comp_op1 = arg2;
5115 }
5116
5117 switch (comp_code)
5118 {
5119 case EQ_EXPR:
5120 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5121 case NE_EXPR:
5122 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5123 case LE_EXPR:
5124 case LT_EXPR:
5125 case UNLE_EXPR:
5126 case UNLT_EXPR:
5127 /* In C++ a ?: expression can be an lvalue, so put the
5128 operand which will be used if they are equal first
5129 so that we can convert this back to the
5130 corresponding COND_EXPR. */
5131 if (!HONOR_NANS (arg1))
5132 {
5133 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5134 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5135 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5136 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5137 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5138 comp_op1, comp_op0);
5139 return pedantic_non_lvalue_loc (loc,
5140 fold_convert_loc (loc, type, tem));
5141 }
5142 break;
5143 case GE_EXPR:
5144 case GT_EXPR:
5145 case UNGE_EXPR:
5146 case UNGT_EXPR:
5147 if (!HONOR_NANS (arg1))
5148 {
5149 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5150 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5151 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5152 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5153 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5154 comp_op1, comp_op0);
5155 return pedantic_non_lvalue_loc (loc,
5156 fold_convert_loc (loc, type, tem));
5157 }
5158 break;
5159 case UNEQ_EXPR:
5160 if (!HONOR_NANS (arg1))
5161 return pedantic_non_lvalue_loc (loc,
5162 fold_convert_loc (loc, type, arg2));
5163 break;
5164 case LTGT_EXPR:
5165 if (!HONOR_NANS (arg1))
5166 return pedantic_non_lvalue_loc (loc,
5167 fold_convert_loc (loc, type, arg1));
5168 break;
5169 default:
5170 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5171 break;
5172 }
5173 }
5174
5175 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5176 we might still be able to simplify this. For example,
5177 if C1 is one less or one more than C2, this might have started
5178 out as a MIN or MAX and been transformed by this function.
5179 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5180
5181 if (INTEGRAL_TYPE_P (type)
5182 && TREE_CODE (arg01) == INTEGER_CST
5183 && TREE_CODE (arg2) == INTEGER_CST)
5184 switch (comp_code)
5185 {
5186 case EQ_EXPR:
5187 if (TREE_CODE (arg1) == INTEGER_CST)
5188 break;
5189 /* We can replace A with C1 in this case. */
5190 arg1 = fold_convert_loc (loc, type, arg01);
5191 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5192
5193 case LT_EXPR:
5194 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5195 MIN_EXPR, to preserve the signedness of the comparison. */
5196 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5197 OEP_ONLY_CONST)
5198 && operand_equal_p (arg01,
5199 const_binop (PLUS_EXPR, arg2,
5200 build_int_cst (type, 1)),
5201 OEP_ONLY_CONST))
5202 {
5203 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5204 fold_convert_loc (loc, TREE_TYPE (arg00),
5205 arg2));
5206 return pedantic_non_lvalue_loc (loc,
5207 fold_convert_loc (loc, type, tem));
5208 }
5209 break;
5210
5211 case LE_EXPR:
5212 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5213 as above. */
5214 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5215 OEP_ONLY_CONST)
5216 && operand_equal_p (arg01,
5217 const_binop (MINUS_EXPR, arg2,
5218 build_int_cst (type, 1)),
5219 OEP_ONLY_CONST))
5220 {
5221 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5222 fold_convert_loc (loc, TREE_TYPE (arg00),
5223 arg2));
5224 return pedantic_non_lvalue_loc (loc,
5225 fold_convert_loc (loc, type, tem));
5226 }
5227 break;
5228
5229 case GT_EXPR:
5230 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5231 MAX_EXPR, to preserve the signedness of the comparison. */
5232 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5233 OEP_ONLY_CONST)
5234 && operand_equal_p (arg01,
5235 const_binop (MINUS_EXPR, arg2,
5236 build_int_cst (type, 1)),
5237 OEP_ONLY_CONST))
5238 {
5239 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5240 fold_convert_loc (loc, TREE_TYPE (arg00),
5241 arg2));
5242 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5243 }
5244 break;
5245
5246 case GE_EXPR:
5247 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5248 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5249 OEP_ONLY_CONST)
5250 && operand_equal_p (arg01,
5251 const_binop (PLUS_EXPR, arg2,
5252 build_int_cst (type, 1)),
5253 OEP_ONLY_CONST))
5254 {
5255 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5256 fold_convert_loc (loc, TREE_TYPE (arg00),
5257 arg2));
5258 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5259 }
5260 break;
5261 case NE_EXPR:
5262 break;
5263 default:
5264 gcc_unreachable ();
5265 }
5266
5267 return NULL_TREE;
5268 }
5269
5270
5271 \f
5272 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5273 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5274 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5275 false) >= 2)
5276 #endif
5277
5278 /* EXP is some logical combination of boolean tests. See if we can
5279 merge it into some range test. Return the new tree if so. */
5280
5281 static tree
5282 fold_range_test (location_t loc, enum tree_code code, tree type,
5283 tree op0, tree op1)
5284 {
5285 int or_op = (code == TRUTH_ORIF_EXPR
5286 || code == TRUTH_OR_EXPR);
5287 int in0_p, in1_p, in_p;
5288 tree low0, low1, low, high0, high1, high;
5289 bool strict_overflow_p = false;
5290 tree tem, lhs, rhs;
5291 const char * const warnmsg = G_("assuming signed overflow does not occur "
5292 "when simplifying range test");
5293
5294 if (!INTEGRAL_TYPE_P (type))
5295 return 0;
5296
5297 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5298 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5299
5300 /* If this is an OR operation, invert both sides; we will invert
5301 again at the end. */
5302 if (or_op)
5303 in0_p = ! in0_p, in1_p = ! in1_p;
5304
5305 /* If both expressions are the same, if we can merge the ranges, and we
5306 can build the range test, return it or it inverted. If one of the
5307 ranges is always true or always false, consider it to be the same
5308 expression as the other. */
5309 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5310 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5311 in1_p, low1, high1)
5312 && 0 != (tem = (build_range_check (loc, type,
5313 lhs != 0 ? lhs
5314 : rhs != 0 ? rhs : integer_zero_node,
5315 in_p, low, high))))
5316 {
5317 if (strict_overflow_p)
5318 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5319 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5320 }
5321
5322 /* On machines where the branch cost is expensive, if this is a
5323 short-circuited branch and the underlying object on both sides
5324 is the same, make a non-short-circuit operation. */
5325 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5326 && lhs != 0 && rhs != 0
5327 && (code == TRUTH_ANDIF_EXPR
5328 || code == TRUTH_ORIF_EXPR)
5329 && operand_equal_p (lhs, rhs, 0))
5330 {
5331 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5332 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5333 which cases we can't do this. */
5334 if (simple_operand_p (lhs))
5335 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5336 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5337 type, op0, op1);
5338
5339 else if (!lang_hooks.decls.global_bindings_p ()
5340 && !CONTAINS_PLACEHOLDER_P (lhs))
5341 {
5342 tree common = save_expr (lhs);
5343
5344 if (0 != (lhs = build_range_check (loc, type, common,
5345 or_op ? ! in0_p : in0_p,
5346 low0, high0))
5347 && (0 != (rhs = build_range_check (loc, type, common,
5348 or_op ? ! in1_p : in1_p,
5349 low1, high1))))
5350 {
5351 if (strict_overflow_p)
5352 fold_overflow_warning (warnmsg,
5353 WARN_STRICT_OVERFLOW_COMPARISON);
5354 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5355 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5356 type, lhs, rhs);
5357 }
5358 }
5359 }
5360
5361 return 0;
5362 }
5363 \f
5364 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5365 bit value. Arrange things so the extra bits will be set to zero if and
5366 only if C is signed-extended to its full width. If MASK is nonzero,
5367 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5368
5369 static tree
5370 unextend (tree c, int p, int unsignedp, tree mask)
5371 {
5372 tree type = TREE_TYPE (c);
5373 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5374 tree temp;
5375
5376 if (p == modesize || unsignedp)
5377 return c;
5378
5379 /* We work by getting just the sign bit into the low-order bit, then
5380 into the high-order bit, then sign-extend. We then XOR that value
5381 with C. */
5382 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5383
5384 /* We must use a signed type in order to get an arithmetic right shift.
5385 However, we must also avoid introducing accidental overflows, so that
5386 a subsequent call to integer_zerop will work. Hence we must
5387 do the type conversion here. At this point, the constant is either
5388 zero or one, and the conversion to a signed type can never overflow.
5389 We could get an overflow if this conversion is done anywhere else. */
5390 if (TYPE_UNSIGNED (type))
5391 temp = fold_convert (signed_type_for (type), temp);
5392
5393 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5394 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5395 if (mask != 0)
5396 temp = const_binop (BIT_AND_EXPR, temp,
5397 fold_convert (TREE_TYPE (c), mask));
5398 /* If necessary, convert the type back to match the type of C. */
5399 if (TYPE_UNSIGNED (type))
5400 temp = fold_convert (type, temp);
5401
5402 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5403 }
5404 \f
5405 /* For an expression that has the form
5406 (A && B) || ~B
5407 or
5408 (A || B) && ~B,
5409 we can drop one of the inner expressions and simplify to
5410 A || ~B
5411 or
5412 A && ~B
5413 LOC is the location of the resulting expression. OP is the inner
5414 logical operation; the left-hand side in the examples above, while CMPOP
5415 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5416 removing a condition that guards another, as in
5417 (A != NULL && A->...) || A == NULL
5418 which we must not transform. If RHS_ONLY is true, only eliminate the
5419 right-most operand of the inner logical operation. */
5420
5421 static tree
5422 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5423 bool rhs_only)
5424 {
5425 tree type = TREE_TYPE (cmpop);
5426 enum tree_code code = TREE_CODE (cmpop);
5427 enum tree_code truthop_code = TREE_CODE (op);
5428 tree lhs = TREE_OPERAND (op, 0);
5429 tree rhs = TREE_OPERAND (op, 1);
5430 tree orig_lhs = lhs, orig_rhs = rhs;
5431 enum tree_code rhs_code = TREE_CODE (rhs);
5432 enum tree_code lhs_code = TREE_CODE (lhs);
5433 enum tree_code inv_code;
5434
5435 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5436 return NULL_TREE;
5437
5438 if (TREE_CODE_CLASS (code) != tcc_comparison)
5439 return NULL_TREE;
5440
5441 if (rhs_code == truthop_code)
5442 {
5443 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5444 if (newrhs != NULL_TREE)
5445 {
5446 rhs = newrhs;
5447 rhs_code = TREE_CODE (rhs);
5448 }
5449 }
5450 if (lhs_code == truthop_code && !rhs_only)
5451 {
5452 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5453 if (newlhs != NULL_TREE)
5454 {
5455 lhs = newlhs;
5456 lhs_code = TREE_CODE (lhs);
5457 }
5458 }
5459
5460 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5461 if (inv_code == rhs_code
5462 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5463 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5464 return lhs;
5465 if (!rhs_only && inv_code == lhs_code
5466 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5467 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5468 return rhs;
5469 if (rhs != orig_rhs || lhs != orig_lhs)
5470 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5471 lhs, rhs);
5472 return NULL_TREE;
5473 }
5474
5475 /* Find ways of folding logical expressions of LHS and RHS:
5476 Try to merge two comparisons to the same innermost item.
5477 Look for range tests like "ch >= '0' && ch <= '9'".
5478 Look for combinations of simple terms on machines with expensive branches
5479 and evaluate the RHS unconditionally.
5480
5481 For example, if we have p->a == 2 && p->b == 4 and we can make an
5482 object large enough to span both A and B, we can do this with a comparison
5483 against the object ANDed with the a mask.
5484
5485 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5486 operations to do this with one comparison.
5487
5488 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5489 function and the one above.
5490
5491 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5492 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5493
5494 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5495 two operands.
5496
5497 We return the simplified tree or 0 if no optimization is possible. */
5498
5499 static tree
5500 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5501 tree lhs, tree rhs)
5502 {
5503 /* If this is the "or" of two comparisons, we can do something if
5504 the comparisons are NE_EXPR. If this is the "and", we can do something
5505 if the comparisons are EQ_EXPR. I.e.,
5506 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5507
5508 WANTED_CODE is this operation code. For single bit fields, we can
5509 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5510 comparison for one-bit fields. */
5511
5512 enum tree_code wanted_code;
5513 enum tree_code lcode, rcode;
5514 tree ll_arg, lr_arg, rl_arg, rr_arg;
5515 tree ll_inner, lr_inner, rl_inner, rr_inner;
5516 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5517 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5518 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5519 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5520 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5521 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5522 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5523 machine_mode lnmode, rnmode;
5524 tree ll_mask, lr_mask, rl_mask, rr_mask;
5525 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5526 tree l_const, r_const;
5527 tree lntype, rntype, result;
5528 HOST_WIDE_INT first_bit, end_bit;
5529 int volatilep;
5530
5531 /* Start by getting the comparison codes. Fail if anything is volatile.
5532 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5533 it were surrounded with a NE_EXPR. */
5534
5535 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5536 return 0;
5537
5538 lcode = TREE_CODE (lhs);
5539 rcode = TREE_CODE (rhs);
5540
5541 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5542 {
5543 lhs = build2 (NE_EXPR, truth_type, lhs,
5544 build_int_cst (TREE_TYPE (lhs), 0));
5545 lcode = NE_EXPR;
5546 }
5547
5548 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5549 {
5550 rhs = build2 (NE_EXPR, truth_type, rhs,
5551 build_int_cst (TREE_TYPE (rhs), 0));
5552 rcode = NE_EXPR;
5553 }
5554
5555 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5556 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5557 return 0;
5558
5559 ll_arg = TREE_OPERAND (lhs, 0);
5560 lr_arg = TREE_OPERAND (lhs, 1);
5561 rl_arg = TREE_OPERAND (rhs, 0);
5562 rr_arg = TREE_OPERAND (rhs, 1);
5563
5564 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5565 if (simple_operand_p (ll_arg)
5566 && simple_operand_p (lr_arg))
5567 {
5568 if (operand_equal_p (ll_arg, rl_arg, 0)
5569 && operand_equal_p (lr_arg, rr_arg, 0))
5570 {
5571 result = combine_comparisons (loc, code, lcode, rcode,
5572 truth_type, ll_arg, lr_arg);
5573 if (result)
5574 return result;
5575 }
5576 else if (operand_equal_p (ll_arg, rr_arg, 0)
5577 && operand_equal_p (lr_arg, rl_arg, 0))
5578 {
5579 result = combine_comparisons (loc, code, lcode,
5580 swap_tree_comparison (rcode),
5581 truth_type, ll_arg, lr_arg);
5582 if (result)
5583 return result;
5584 }
5585 }
5586
5587 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5588 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5589
5590 /* If the RHS can be evaluated unconditionally and its operands are
5591 simple, it wins to evaluate the RHS unconditionally on machines
5592 with expensive branches. In this case, this isn't a comparison
5593 that can be merged. */
5594
5595 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5596 false) >= 2
5597 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5598 && simple_operand_p (rl_arg)
5599 && simple_operand_p (rr_arg))
5600 {
5601 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5602 if (code == TRUTH_OR_EXPR
5603 && lcode == NE_EXPR && integer_zerop (lr_arg)
5604 && rcode == NE_EXPR && integer_zerop (rr_arg)
5605 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5606 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5607 return build2_loc (loc, NE_EXPR, truth_type,
5608 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5609 ll_arg, rl_arg),
5610 build_int_cst (TREE_TYPE (ll_arg), 0));
5611
5612 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5613 if (code == TRUTH_AND_EXPR
5614 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5615 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5616 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5617 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5618 return build2_loc (loc, EQ_EXPR, truth_type,
5619 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5620 ll_arg, rl_arg),
5621 build_int_cst (TREE_TYPE (ll_arg), 0));
5622 }
5623
5624 /* See if the comparisons can be merged. Then get all the parameters for
5625 each side. */
5626
5627 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5628 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5629 return 0;
5630
5631 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5632 volatilep = 0;
5633 ll_inner = decode_field_reference (loc, ll_arg,
5634 &ll_bitsize, &ll_bitpos, &ll_mode,
5635 &ll_unsignedp, &ll_reversep, &volatilep,
5636 &ll_mask, &ll_and_mask);
5637 lr_inner = decode_field_reference (loc, lr_arg,
5638 &lr_bitsize, &lr_bitpos, &lr_mode,
5639 &lr_unsignedp, &lr_reversep, &volatilep,
5640 &lr_mask, &lr_and_mask);
5641 rl_inner = decode_field_reference (loc, rl_arg,
5642 &rl_bitsize, &rl_bitpos, &rl_mode,
5643 &rl_unsignedp, &rl_reversep, &volatilep,
5644 &rl_mask, &rl_and_mask);
5645 rr_inner = decode_field_reference (loc, rr_arg,
5646 &rr_bitsize, &rr_bitpos, &rr_mode,
5647 &rr_unsignedp, &rr_reversep, &volatilep,
5648 &rr_mask, &rr_and_mask);
5649
5650 /* It must be true that the inner operation on the lhs of each
5651 comparison must be the same if we are to be able to do anything.
5652 Then see if we have constants. If not, the same must be true for
5653 the rhs's. */
5654 if (volatilep
5655 || ll_reversep != rl_reversep
5656 || ll_inner == 0 || rl_inner == 0
5657 || ! operand_equal_p (ll_inner, rl_inner, 0))
5658 return 0;
5659
5660 if (TREE_CODE (lr_arg) == INTEGER_CST
5661 && TREE_CODE (rr_arg) == INTEGER_CST)
5662 {
5663 l_const = lr_arg, r_const = rr_arg;
5664 lr_reversep = ll_reversep;
5665 }
5666 else if (lr_reversep != rr_reversep
5667 || lr_inner == 0 || rr_inner == 0
5668 || ! operand_equal_p (lr_inner, rr_inner, 0))
5669 return 0;
5670 else
5671 l_const = r_const = 0;
5672
5673 /* If either comparison code is not correct for our logical operation,
5674 fail. However, we can convert a one-bit comparison against zero into
5675 the opposite comparison against that bit being set in the field. */
5676
5677 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5678 if (lcode != wanted_code)
5679 {
5680 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5681 {
5682 /* Make the left operand unsigned, since we are only interested
5683 in the value of one bit. Otherwise we are doing the wrong
5684 thing below. */
5685 ll_unsignedp = 1;
5686 l_const = ll_mask;
5687 }
5688 else
5689 return 0;
5690 }
5691
5692 /* This is analogous to the code for l_const above. */
5693 if (rcode != wanted_code)
5694 {
5695 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5696 {
5697 rl_unsignedp = 1;
5698 r_const = rl_mask;
5699 }
5700 else
5701 return 0;
5702 }
5703
5704 /* See if we can find a mode that contains both fields being compared on
5705 the left. If we can't, fail. Otherwise, update all constants and masks
5706 to be relative to a field of that size. */
5707 first_bit = MIN (ll_bitpos, rl_bitpos);
5708 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5709 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5710 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5711 volatilep);
5712 if (lnmode == VOIDmode)
5713 return 0;
5714
5715 lnbitsize = GET_MODE_BITSIZE (lnmode);
5716 lnbitpos = first_bit & ~ (lnbitsize - 1);
5717 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5718 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5719
5720 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5721 {
5722 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5723 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5724 }
5725
5726 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5727 size_int (xll_bitpos));
5728 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5729 size_int (xrl_bitpos));
5730
5731 if (l_const)
5732 {
5733 l_const = fold_convert_loc (loc, lntype, l_const);
5734 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5735 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5736 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5737 fold_build1_loc (loc, BIT_NOT_EXPR,
5738 lntype, ll_mask))))
5739 {
5740 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5741
5742 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5743 }
5744 }
5745 if (r_const)
5746 {
5747 r_const = fold_convert_loc (loc, lntype, r_const);
5748 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5749 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5750 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5751 fold_build1_loc (loc, BIT_NOT_EXPR,
5752 lntype, rl_mask))))
5753 {
5754 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5755
5756 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5757 }
5758 }
5759
5760 /* If the right sides are not constant, do the same for it. Also,
5761 disallow this optimization if a size or signedness mismatch occurs
5762 between the left and right sides. */
5763 if (l_const == 0)
5764 {
5765 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5766 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5767 /* Make sure the two fields on the right
5768 correspond to the left without being swapped. */
5769 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5770 return 0;
5771
5772 first_bit = MIN (lr_bitpos, rr_bitpos);
5773 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5774 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5775 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5776 volatilep);
5777 if (rnmode == VOIDmode)
5778 return 0;
5779
5780 rnbitsize = GET_MODE_BITSIZE (rnmode);
5781 rnbitpos = first_bit & ~ (rnbitsize - 1);
5782 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5783 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5784
5785 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5786 {
5787 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5788 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5789 }
5790
5791 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5792 rntype, lr_mask),
5793 size_int (xlr_bitpos));
5794 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5795 rntype, rr_mask),
5796 size_int (xrr_bitpos));
5797
5798 /* Make a mask that corresponds to both fields being compared.
5799 Do this for both items being compared. If the operands are the
5800 same size and the bits being compared are in the same position
5801 then we can do this by masking both and comparing the masked
5802 results. */
5803 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5804 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5805 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5806 {
5807 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5808 ll_unsignedp || rl_unsignedp, ll_reversep);
5809 if (! all_ones_mask_p (ll_mask, lnbitsize))
5810 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5811
5812 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5813 lr_unsignedp || rr_unsignedp, lr_reversep);
5814 if (! all_ones_mask_p (lr_mask, rnbitsize))
5815 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5816
5817 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5818 }
5819
5820 /* There is still another way we can do something: If both pairs of
5821 fields being compared are adjacent, we may be able to make a wider
5822 field containing them both.
5823
5824 Note that we still must mask the lhs/rhs expressions. Furthermore,
5825 the mask must be shifted to account for the shift done by
5826 make_bit_field_ref. */
5827 if ((ll_bitsize + ll_bitpos == rl_bitpos
5828 && lr_bitsize + lr_bitpos == rr_bitpos)
5829 || (ll_bitpos == rl_bitpos + rl_bitsize
5830 && lr_bitpos == rr_bitpos + rr_bitsize))
5831 {
5832 tree type;
5833
5834 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5835 ll_bitsize + rl_bitsize,
5836 MIN (ll_bitpos, rl_bitpos),
5837 ll_unsignedp, ll_reversep);
5838 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5839 lr_bitsize + rr_bitsize,
5840 MIN (lr_bitpos, rr_bitpos),
5841 lr_unsignedp, lr_reversep);
5842
5843 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5844 size_int (MIN (xll_bitpos, xrl_bitpos)));
5845 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5846 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5847
5848 /* Convert to the smaller type before masking out unwanted bits. */
5849 type = lntype;
5850 if (lntype != rntype)
5851 {
5852 if (lnbitsize > rnbitsize)
5853 {
5854 lhs = fold_convert_loc (loc, rntype, lhs);
5855 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5856 type = rntype;
5857 }
5858 else if (lnbitsize < rnbitsize)
5859 {
5860 rhs = fold_convert_loc (loc, lntype, rhs);
5861 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5862 type = lntype;
5863 }
5864 }
5865
5866 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5867 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5868
5869 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5870 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5871
5872 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5873 }
5874
5875 return 0;
5876 }
5877
5878 /* Handle the case of comparisons with constants. If there is something in
5879 common between the masks, those bits of the constants must be the same.
5880 If not, the condition is always false. Test for this to avoid generating
5881 incorrect code below. */
5882 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5883 if (! integer_zerop (result)
5884 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5885 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5886 {
5887 if (wanted_code == NE_EXPR)
5888 {
5889 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5890 return constant_boolean_node (true, truth_type);
5891 }
5892 else
5893 {
5894 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5895 return constant_boolean_node (false, truth_type);
5896 }
5897 }
5898
5899 /* Construct the expression we will return. First get the component
5900 reference we will make. Unless the mask is all ones the width of
5901 that field, perform the mask operation. Then compare with the
5902 merged constant. */
5903 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5904 ll_unsignedp || rl_unsignedp, ll_reversep);
5905
5906 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5907 if (! all_ones_mask_p (ll_mask, lnbitsize))
5908 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5909
5910 return build2_loc (loc, wanted_code, truth_type, result,
5911 const_binop (BIT_IOR_EXPR, l_const, r_const));
5912 }
5913 \f
5914 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5915 constant. */
5916
5917 static tree
5918 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5919 tree op0, tree op1)
5920 {
5921 tree arg0 = op0;
5922 enum tree_code op_code;
5923 tree comp_const;
5924 tree minmax_const;
5925 int consts_equal, consts_lt;
5926 tree inner;
5927
5928 STRIP_SIGN_NOPS (arg0);
5929
5930 op_code = TREE_CODE (arg0);
5931 minmax_const = TREE_OPERAND (arg0, 1);
5932 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5933 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5934 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5935 inner = TREE_OPERAND (arg0, 0);
5936
5937 /* If something does not permit us to optimize, return the original tree. */
5938 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5939 || TREE_CODE (comp_const) != INTEGER_CST
5940 || TREE_OVERFLOW (comp_const)
5941 || TREE_CODE (minmax_const) != INTEGER_CST
5942 || TREE_OVERFLOW (minmax_const))
5943 return NULL_TREE;
5944
5945 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5946 and GT_EXPR, doing the rest with recursive calls using logical
5947 simplifications. */
5948 switch (code)
5949 {
5950 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5951 {
5952 tree tem
5953 = optimize_minmax_comparison (loc,
5954 invert_tree_comparison (code, false),
5955 type, op0, op1);
5956 if (tem)
5957 return invert_truthvalue_loc (loc, tem);
5958 return NULL_TREE;
5959 }
5960
5961 case GE_EXPR:
5962 return
5963 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5964 optimize_minmax_comparison
5965 (loc, EQ_EXPR, type, arg0, comp_const),
5966 optimize_minmax_comparison
5967 (loc, GT_EXPR, type, arg0, comp_const));
5968
5969 case EQ_EXPR:
5970 if (op_code == MAX_EXPR && consts_equal)
5971 /* MAX (X, 0) == 0 -> X <= 0 */
5972 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5973
5974 else if (op_code == MAX_EXPR && consts_lt)
5975 /* MAX (X, 0) == 5 -> X == 5 */
5976 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5977
5978 else if (op_code == MAX_EXPR)
5979 /* MAX (X, 0) == -1 -> false */
5980 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5981
5982 else if (consts_equal)
5983 /* MIN (X, 0) == 0 -> X >= 0 */
5984 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5985
5986 else if (consts_lt)
5987 /* MIN (X, 0) == 5 -> false */
5988 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5989
5990 else
5991 /* MIN (X, 0) == -1 -> X == -1 */
5992 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5993
5994 case GT_EXPR:
5995 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5996 /* MAX (X, 0) > 0 -> X > 0
5997 MAX (X, 0) > 5 -> X > 5 */
5998 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5999
6000 else if (op_code == MAX_EXPR)
6001 /* MAX (X, 0) > -1 -> true */
6002 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6003
6004 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6005 /* MIN (X, 0) > 0 -> false
6006 MIN (X, 0) > 5 -> false */
6007 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6008
6009 else
6010 /* MIN (X, 0) > -1 -> X > -1 */
6011 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6012
6013 default:
6014 return NULL_TREE;
6015 }
6016 }
6017 \f
6018 /* T is an integer expression that is being multiplied, divided, or taken a
6019 modulus (CODE says which and what kind of divide or modulus) by a
6020 constant C. See if we can eliminate that operation by folding it with
6021 other operations already in T. WIDE_TYPE, if non-null, is a type that
6022 should be used for the computation if wider than our type.
6023
6024 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6025 (X * 2) + (Y * 4). We must, however, be assured that either the original
6026 expression would not overflow or that overflow is undefined for the type
6027 in the language in question.
6028
6029 If we return a non-null expression, it is an equivalent form of the
6030 original computation, but need not be in the original type.
6031
6032 We set *STRICT_OVERFLOW_P to true if the return values depends on
6033 signed overflow being undefined. Otherwise we do not change
6034 *STRICT_OVERFLOW_P. */
6035
6036 static tree
6037 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6038 bool *strict_overflow_p)
6039 {
6040 /* To avoid exponential search depth, refuse to allow recursion past
6041 three levels. Beyond that (1) it's highly unlikely that we'll find
6042 something interesting and (2) we've probably processed it before
6043 when we built the inner expression. */
6044
6045 static int depth;
6046 tree ret;
6047
6048 if (depth > 3)
6049 return NULL;
6050
6051 depth++;
6052 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6053 depth--;
6054
6055 return ret;
6056 }
6057
6058 static tree
6059 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6060 bool *strict_overflow_p)
6061 {
6062 tree type = TREE_TYPE (t);
6063 enum tree_code tcode = TREE_CODE (t);
6064 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6065 > GET_MODE_SIZE (TYPE_MODE (type)))
6066 ? wide_type : type);
6067 tree t1, t2;
6068 int same_p = tcode == code;
6069 tree op0 = NULL_TREE, op1 = NULL_TREE;
6070 bool sub_strict_overflow_p;
6071
6072 /* Don't deal with constants of zero here; they confuse the code below. */
6073 if (integer_zerop (c))
6074 return NULL_TREE;
6075
6076 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6077 op0 = TREE_OPERAND (t, 0);
6078
6079 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6080 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6081
6082 /* Note that we need not handle conditional operations here since fold
6083 already handles those cases. So just do arithmetic here. */
6084 switch (tcode)
6085 {
6086 case INTEGER_CST:
6087 /* For a constant, we can always simplify if we are a multiply
6088 or (for divide and modulus) if it is a multiple of our constant. */
6089 if (code == MULT_EXPR
6090 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6091 {
6092 tree tem = const_binop (code, fold_convert (ctype, t),
6093 fold_convert (ctype, c));
6094 /* If the multiplication overflowed to INT_MIN then we lost sign
6095 information on it and a subsequent multiplication might
6096 spuriously overflow. See PR68142. */
6097 if (TREE_OVERFLOW (tem)
6098 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6099 return NULL_TREE;
6100 return tem;
6101 }
6102 break;
6103
6104 CASE_CONVERT: case NON_LVALUE_EXPR:
6105 /* If op0 is an expression ... */
6106 if ((COMPARISON_CLASS_P (op0)
6107 || UNARY_CLASS_P (op0)
6108 || BINARY_CLASS_P (op0)
6109 || VL_EXP_CLASS_P (op0)
6110 || EXPRESSION_CLASS_P (op0))
6111 /* ... and has wrapping overflow, and its type is smaller
6112 than ctype, then we cannot pass through as widening. */
6113 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6114 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6115 && (TYPE_PRECISION (ctype)
6116 > TYPE_PRECISION (TREE_TYPE (op0))))
6117 /* ... or this is a truncation (t is narrower than op0),
6118 then we cannot pass through this narrowing. */
6119 || (TYPE_PRECISION (type)
6120 < TYPE_PRECISION (TREE_TYPE (op0)))
6121 /* ... or signedness changes for division or modulus,
6122 then we cannot pass through this conversion. */
6123 || (code != MULT_EXPR
6124 && (TYPE_UNSIGNED (ctype)
6125 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6126 /* ... or has undefined overflow while the converted to
6127 type has not, we cannot do the operation in the inner type
6128 as that would introduce undefined overflow. */
6129 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6130 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6131 && !TYPE_OVERFLOW_UNDEFINED (type))))
6132 break;
6133
6134 /* Pass the constant down and see if we can make a simplification. If
6135 we can, replace this expression with the inner simplification for
6136 possible later conversion to our or some other type. */
6137 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6138 && TREE_CODE (t2) == INTEGER_CST
6139 && !TREE_OVERFLOW (t2)
6140 && (0 != (t1 = extract_muldiv (op0, t2, code,
6141 code == MULT_EXPR
6142 ? ctype : NULL_TREE,
6143 strict_overflow_p))))
6144 return t1;
6145 break;
6146
6147 case ABS_EXPR:
6148 /* If widening the type changes it from signed to unsigned, then we
6149 must avoid building ABS_EXPR itself as unsigned. */
6150 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6151 {
6152 tree cstype = (*signed_type_for) (ctype);
6153 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6154 != 0)
6155 {
6156 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6157 return fold_convert (ctype, t1);
6158 }
6159 break;
6160 }
6161 /* If the constant is negative, we cannot simplify this. */
6162 if (tree_int_cst_sgn (c) == -1)
6163 break;
6164 /* FALLTHROUGH */
6165 case NEGATE_EXPR:
6166 /* For division and modulus, type can't be unsigned, as e.g.
6167 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6168 For signed types, even with wrapping overflow, this is fine. */
6169 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6170 break;
6171 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6172 != 0)
6173 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6174 break;
6175
6176 case MIN_EXPR: case MAX_EXPR:
6177 /* If widening the type changes the signedness, then we can't perform
6178 this optimization as that changes the result. */
6179 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6180 break;
6181
6182 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6183 sub_strict_overflow_p = false;
6184 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6185 &sub_strict_overflow_p)) != 0
6186 && (t2 = extract_muldiv (op1, c, code, wide_type,
6187 &sub_strict_overflow_p)) != 0)
6188 {
6189 if (tree_int_cst_sgn (c) < 0)
6190 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6191 if (sub_strict_overflow_p)
6192 *strict_overflow_p = true;
6193 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6194 fold_convert (ctype, t2));
6195 }
6196 break;
6197
6198 case LSHIFT_EXPR: case RSHIFT_EXPR:
6199 /* If the second operand is constant, this is a multiplication
6200 or floor division, by a power of two, so we can treat it that
6201 way unless the multiplier or divisor overflows. Signed
6202 left-shift overflow is implementation-defined rather than
6203 undefined in C90, so do not convert signed left shift into
6204 multiplication. */
6205 if (TREE_CODE (op1) == INTEGER_CST
6206 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6207 /* const_binop may not detect overflow correctly,
6208 so check for it explicitly here. */
6209 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6210 && 0 != (t1 = fold_convert (ctype,
6211 const_binop (LSHIFT_EXPR,
6212 size_one_node,
6213 op1)))
6214 && !TREE_OVERFLOW (t1))
6215 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6216 ? MULT_EXPR : FLOOR_DIV_EXPR,
6217 ctype,
6218 fold_convert (ctype, op0),
6219 t1),
6220 c, code, wide_type, strict_overflow_p);
6221 break;
6222
6223 case PLUS_EXPR: case MINUS_EXPR:
6224 /* See if we can eliminate the operation on both sides. If we can, we
6225 can return a new PLUS or MINUS. If we can't, the only remaining
6226 cases where we can do anything are if the second operand is a
6227 constant. */
6228 sub_strict_overflow_p = false;
6229 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6230 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6231 if (t1 != 0 && t2 != 0
6232 && (code == MULT_EXPR
6233 /* If not multiplication, we can only do this if both operands
6234 are divisible by c. */
6235 || (multiple_of_p (ctype, op0, c)
6236 && multiple_of_p (ctype, op1, c))))
6237 {
6238 if (sub_strict_overflow_p)
6239 *strict_overflow_p = true;
6240 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6241 fold_convert (ctype, t2));
6242 }
6243
6244 /* If this was a subtraction, negate OP1 and set it to be an addition.
6245 This simplifies the logic below. */
6246 if (tcode == MINUS_EXPR)
6247 {
6248 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6249 /* If OP1 was not easily negatable, the constant may be OP0. */
6250 if (TREE_CODE (op0) == INTEGER_CST)
6251 {
6252 std::swap (op0, op1);
6253 std::swap (t1, t2);
6254 }
6255 }
6256
6257 if (TREE_CODE (op1) != INTEGER_CST)
6258 break;
6259
6260 /* If either OP1 or C are negative, this optimization is not safe for
6261 some of the division and remainder types while for others we need
6262 to change the code. */
6263 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6264 {
6265 if (code == CEIL_DIV_EXPR)
6266 code = FLOOR_DIV_EXPR;
6267 else if (code == FLOOR_DIV_EXPR)
6268 code = CEIL_DIV_EXPR;
6269 else if (code != MULT_EXPR
6270 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6271 break;
6272 }
6273
6274 /* If it's a multiply or a division/modulus operation of a multiple
6275 of our constant, do the operation and verify it doesn't overflow. */
6276 if (code == MULT_EXPR
6277 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6278 {
6279 op1 = const_binop (code, fold_convert (ctype, op1),
6280 fold_convert (ctype, c));
6281 /* We allow the constant to overflow with wrapping semantics. */
6282 if (op1 == 0
6283 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6284 break;
6285 }
6286 else
6287 break;
6288
6289 /* If we have an unsigned type, we cannot widen the operation since it
6290 will change the result if the original computation overflowed. */
6291 if (TYPE_UNSIGNED (ctype) && ctype != type)
6292 break;
6293
6294 /* If we were able to eliminate our operation from the first side,
6295 apply our operation to the second side and reform the PLUS. */
6296 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6297 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6298
6299 /* The last case is if we are a multiply. In that case, we can
6300 apply the distributive law to commute the multiply and addition
6301 if the multiplication of the constants doesn't overflow
6302 and overflow is defined. With undefined overflow
6303 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6304 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6305 return fold_build2 (tcode, ctype,
6306 fold_build2 (code, ctype,
6307 fold_convert (ctype, op0),
6308 fold_convert (ctype, c)),
6309 op1);
6310
6311 break;
6312
6313 case MULT_EXPR:
6314 /* We have a special case here if we are doing something like
6315 (C * 8) % 4 since we know that's zero. */
6316 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6317 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6318 /* If the multiplication can overflow we cannot optimize this. */
6319 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6320 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6321 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6322 {
6323 *strict_overflow_p = true;
6324 return omit_one_operand (type, integer_zero_node, op0);
6325 }
6326
6327 /* ... fall through ... */
6328
6329 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6330 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6331 /* If we can extract our operation from the LHS, do so and return a
6332 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6333 do something only if the second operand is a constant. */
6334 if (same_p
6335 && (t1 = extract_muldiv (op0, c, code, wide_type,
6336 strict_overflow_p)) != 0)
6337 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6338 fold_convert (ctype, op1));
6339 else if (tcode == MULT_EXPR && code == MULT_EXPR
6340 && (t1 = extract_muldiv (op1, c, code, wide_type,
6341 strict_overflow_p)) != 0)
6342 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6343 fold_convert (ctype, t1));
6344 else if (TREE_CODE (op1) != INTEGER_CST)
6345 return 0;
6346
6347 /* If these are the same operation types, we can associate them
6348 assuming no overflow. */
6349 if (tcode == code)
6350 {
6351 bool overflow_p = false;
6352 bool overflow_mul_p;
6353 signop sign = TYPE_SIGN (ctype);
6354 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6355 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6356 if (overflow_mul_p
6357 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6358 overflow_p = true;
6359 if (!overflow_p)
6360 {
6361 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6362 TYPE_SIGN (TREE_TYPE (op1)));
6363 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6364 wide_int_to_tree (ctype, mul));
6365 }
6366 }
6367
6368 /* If these operations "cancel" each other, we have the main
6369 optimizations of this pass, which occur when either constant is a
6370 multiple of the other, in which case we replace this with either an
6371 operation or CODE or TCODE.
6372
6373 If we have an unsigned type, we cannot do this since it will change
6374 the result if the original computation overflowed. */
6375 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6376 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6377 || (tcode == MULT_EXPR
6378 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6379 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6380 && code != MULT_EXPR)))
6381 {
6382 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6383 {
6384 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6385 *strict_overflow_p = true;
6386 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6387 fold_convert (ctype,
6388 const_binop (TRUNC_DIV_EXPR,
6389 op1, c)));
6390 }
6391 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6392 {
6393 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6394 *strict_overflow_p = true;
6395 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6396 fold_convert (ctype,
6397 const_binop (TRUNC_DIV_EXPR,
6398 c, op1)));
6399 }
6400 }
6401 break;
6402
6403 default:
6404 break;
6405 }
6406
6407 return 0;
6408 }
6409 \f
6410 /* Return a node which has the indicated constant VALUE (either 0 or
6411 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6412 and is of the indicated TYPE. */
6413
6414 tree
6415 constant_boolean_node (bool value, tree type)
6416 {
6417 if (type == integer_type_node)
6418 return value ? integer_one_node : integer_zero_node;
6419 else if (type == boolean_type_node)
6420 return value ? boolean_true_node : boolean_false_node;
6421 else if (TREE_CODE (type) == VECTOR_TYPE)
6422 return build_vector_from_val (type,
6423 build_int_cst (TREE_TYPE (type),
6424 value ? -1 : 0));
6425 else
6426 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6427 }
6428
6429
6430 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6431 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6432 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6433 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6434 COND is the first argument to CODE; otherwise (as in the example
6435 given here), it is the second argument. TYPE is the type of the
6436 original expression. Return NULL_TREE if no simplification is
6437 possible. */
6438
6439 static tree
6440 fold_binary_op_with_conditional_arg (location_t loc,
6441 enum tree_code code,
6442 tree type, tree op0, tree op1,
6443 tree cond, tree arg, int cond_first_p)
6444 {
6445 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6446 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6447 tree test, true_value, false_value;
6448 tree lhs = NULL_TREE;
6449 tree rhs = NULL_TREE;
6450 enum tree_code cond_code = COND_EXPR;
6451
6452 if (TREE_CODE (cond) == COND_EXPR
6453 || TREE_CODE (cond) == VEC_COND_EXPR)
6454 {
6455 test = TREE_OPERAND (cond, 0);
6456 true_value = TREE_OPERAND (cond, 1);
6457 false_value = TREE_OPERAND (cond, 2);
6458 /* If this operand throws an expression, then it does not make
6459 sense to try to perform a logical or arithmetic operation
6460 involving it. */
6461 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6462 lhs = true_value;
6463 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6464 rhs = false_value;
6465 }
6466 else if (!(TREE_CODE (type) != VECTOR_TYPE
6467 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6468 {
6469 tree testtype = TREE_TYPE (cond);
6470 test = cond;
6471 true_value = constant_boolean_node (true, testtype);
6472 false_value = constant_boolean_node (false, testtype);
6473 }
6474 else
6475 /* Detect the case of mixing vector and scalar types - bail out. */
6476 return NULL_TREE;
6477
6478 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6479 cond_code = VEC_COND_EXPR;
6480
6481 /* This transformation is only worthwhile if we don't have to wrap ARG
6482 in a SAVE_EXPR and the operation can be simplified without recursing
6483 on at least one of the branches once its pushed inside the COND_EXPR. */
6484 if (!TREE_CONSTANT (arg)
6485 && (TREE_SIDE_EFFECTS (arg)
6486 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6487 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6488 return NULL_TREE;
6489
6490 arg = fold_convert_loc (loc, arg_type, arg);
6491 if (lhs == 0)
6492 {
6493 true_value = fold_convert_loc (loc, cond_type, true_value);
6494 if (cond_first_p)
6495 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6496 else
6497 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6498 }
6499 if (rhs == 0)
6500 {
6501 false_value = fold_convert_loc (loc, cond_type, false_value);
6502 if (cond_first_p)
6503 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6504 else
6505 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6506 }
6507
6508 /* Check that we have simplified at least one of the branches. */
6509 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6510 return NULL_TREE;
6511
6512 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6513 }
6514
6515 \f
6516 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6517
6518 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6519 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6520 ADDEND is the same as X.
6521
6522 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6523 and finite. The problematic cases are when X is zero, and its mode
6524 has signed zeros. In the case of rounding towards -infinity,
6525 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6526 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6527
6528 bool
6529 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6530 {
6531 if (!real_zerop (addend))
6532 return false;
6533
6534 /* Don't allow the fold with -fsignaling-nans. */
6535 if (HONOR_SNANS (element_mode (type)))
6536 return false;
6537
6538 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6539 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6540 return true;
6541
6542 /* In a vector or complex, we would need to check the sign of all zeros. */
6543 if (TREE_CODE (addend) != REAL_CST)
6544 return false;
6545
6546 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6547 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6548 negate = !negate;
6549
6550 /* The mode has signed zeros, and we have to honor their sign.
6551 In this situation, there is only one case we can return true for.
6552 X - 0 is the same as X unless rounding towards -infinity is
6553 supported. */
6554 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6555 }
6556
6557 /* Subroutine of fold() that optimizes comparisons of a division by
6558 a nonzero integer constant against an integer constant, i.e.
6559 X/C1 op C2.
6560
6561 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6562 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6563 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6564
6565 The function returns the constant folded tree if a simplification
6566 can be made, and NULL_TREE otherwise. */
6567
6568 static tree
6569 fold_div_compare (location_t loc,
6570 enum tree_code code, tree type, tree arg0, tree arg1)
6571 {
6572 tree prod, tmp, hi, lo;
6573 tree arg00 = TREE_OPERAND (arg0, 0);
6574 tree arg01 = TREE_OPERAND (arg0, 1);
6575 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6576 bool neg_overflow = false;
6577 bool overflow;
6578
6579 /* We have to do this the hard way to detect unsigned overflow.
6580 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6581 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6582 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6583 neg_overflow = false;
6584
6585 if (sign == UNSIGNED)
6586 {
6587 tmp = int_const_binop (MINUS_EXPR, arg01,
6588 build_int_cst (TREE_TYPE (arg01), 1));
6589 lo = prod;
6590
6591 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6592 val = wi::add (prod, tmp, sign, &overflow);
6593 hi = force_fit_type (TREE_TYPE (arg00), val,
6594 -1, overflow | TREE_OVERFLOW (prod));
6595 }
6596 else if (tree_int_cst_sgn (arg01) >= 0)
6597 {
6598 tmp = int_const_binop (MINUS_EXPR, arg01,
6599 build_int_cst (TREE_TYPE (arg01), 1));
6600 switch (tree_int_cst_sgn (arg1))
6601 {
6602 case -1:
6603 neg_overflow = true;
6604 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6605 hi = prod;
6606 break;
6607
6608 case 0:
6609 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6610 hi = tmp;
6611 break;
6612
6613 case 1:
6614 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6615 lo = prod;
6616 break;
6617
6618 default:
6619 gcc_unreachable ();
6620 }
6621 }
6622 else
6623 {
6624 /* A negative divisor reverses the relational operators. */
6625 code = swap_tree_comparison (code);
6626
6627 tmp = int_const_binop (PLUS_EXPR, arg01,
6628 build_int_cst (TREE_TYPE (arg01), 1));
6629 switch (tree_int_cst_sgn (arg1))
6630 {
6631 case -1:
6632 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6633 lo = prod;
6634 break;
6635
6636 case 0:
6637 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6638 lo = tmp;
6639 break;
6640
6641 case 1:
6642 neg_overflow = true;
6643 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6644 hi = prod;
6645 break;
6646
6647 default:
6648 gcc_unreachable ();
6649 }
6650 }
6651
6652 switch (code)
6653 {
6654 case EQ_EXPR:
6655 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6656 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6657 if (TREE_OVERFLOW (hi))
6658 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6659 if (TREE_OVERFLOW (lo))
6660 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6661 return build_range_check (loc, type, arg00, 1, lo, hi);
6662
6663 case NE_EXPR:
6664 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6665 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6666 if (TREE_OVERFLOW (hi))
6667 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6668 if (TREE_OVERFLOW (lo))
6669 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6670 return build_range_check (loc, type, arg00, 0, lo, hi);
6671
6672 case LT_EXPR:
6673 if (TREE_OVERFLOW (lo))
6674 {
6675 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6676 return omit_one_operand_loc (loc, type, tmp, arg00);
6677 }
6678 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6679
6680 case LE_EXPR:
6681 if (TREE_OVERFLOW (hi))
6682 {
6683 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6684 return omit_one_operand_loc (loc, type, tmp, arg00);
6685 }
6686 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6687
6688 case GT_EXPR:
6689 if (TREE_OVERFLOW (hi))
6690 {
6691 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6692 return omit_one_operand_loc (loc, type, tmp, arg00);
6693 }
6694 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6695
6696 case GE_EXPR:
6697 if (TREE_OVERFLOW (lo))
6698 {
6699 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6700 return omit_one_operand_loc (loc, type, tmp, arg00);
6701 }
6702 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6703
6704 default:
6705 break;
6706 }
6707
6708 return NULL_TREE;
6709 }
6710
6711
6712 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6713 equality/inequality test, then return a simplified form of the test
6714 using a sign testing. Otherwise return NULL. TYPE is the desired
6715 result type. */
6716
6717 static tree
6718 fold_single_bit_test_into_sign_test (location_t loc,
6719 enum tree_code code, tree arg0, tree arg1,
6720 tree result_type)
6721 {
6722 /* If this is testing a single bit, we can optimize the test. */
6723 if ((code == NE_EXPR || code == EQ_EXPR)
6724 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6725 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6726 {
6727 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6728 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6729 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6730
6731 if (arg00 != NULL_TREE
6732 /* This is only a win if casting to a signed type is cheap,
6733 i.e. when arg00's type is not a partial mode. */
6734 && TYPE_PRECISION (TREE_TYPE (arg00))
6735 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6736 {
6737 tree stype = signed_type_for (TREE_TYPE (arg00));
6738 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6739 result_type,
6740 fold_convert_loc (loc, stype, arg00),
6741 build_int_cst (stype, 0));
6742 }
6743 }
6744
6745 return NULL_TREE;
6746 }
6747
6748 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6749 equality/inequality test, then return a simplified form of
6750 the test using shifts and logical operations. Otherwise return
6751 NULL. TYPE is the desired result type. */
6752
6753 tree
6754 fold_single_bit_test (location_t loc, enum tree_code code,
6755 tree arg0, tree arg1, tree result_type)
6756 {
6757 /* If this is testing a single bit, we can optimize the test. */
6758 if ((code == NE_EXPR || code == EQ_EXPR)
6759 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6760 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6761 {
6762 tree inner = TREE_OPERAND (arg0, 0);
6763 tree type = TREE_TYPE (arg0);
6764 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6765 machine_mode operand_mode = TYPE_MODE (type);
6766 int ops_unsigned;
6767 tree signed_type, unsigned_type, intermediate_type;
6768 tree tem, one;
6769
6770 /* First, see if we can fold the single bit test into a sign-bit
6771 test. */
6772 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6773 result_type);
6774 if (tem)
6775 return tem;
6776
6777 /* Otherwise we have (A & C) != 0 where C is a single bit,
6778 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6779 Similarly for (A & C) == 0. */
6780
6781 /* If INNER is a right shift of a constant and it plus BITNUM does
6782 not overflow, adjust BITNUM and INNER. */
6783 if (TREE_CODE (inner) == RSHIFT_EXPR
6784 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6785 && bitnum < TYPE_PRECISION (type)
6786 && wi::ltu_p (TREE_OPERAND (inner, 1),
6787 TYPE_PRECISION (type) - bitnum))
6788 {
6789 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6790 inner = TREE_OPERAND (inner, 0);
6791 }
6792
6793 /* If we are going to be able to omit the AND below, we must do our
6794 operations as unsigned. If we must use the AND, we have a choice.
6795 Normally unsigned is faster, but for some machines signed is. */
6796 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6797 && !flag_syntax_only) ? 0 : 1;
6798
6799 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6800 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6801 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6802 inner = fold_convert_loc (loc, intermediate_type, inner);
6803
6804 if (bitnum != 0)
6805 inner = build2 (RSHIFT_EXPR, intermediate_type,
6806 inner, size_int (bitnum));
6807
6808 one = build_int_cst (intermediate_type, 1);
6809
6810 if (code == EQ_EXPR)
6811 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6812
6813 /* Put the AND last so it can combine with more things. */
6814 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6815
6816 /* Make sure to return the proper type. */
6817 inner = fold_convert_loc (loc, result_type, inner);
6818
6819 return inner;
6820 }
6821 return NULL_TREE;
6822 }
6823
6824 /* Check whether we are allowed to reorder operands arg0 and arg1,
6825 such that the evaluation of arg1 occurs before arg0. */
6826
6827 static bool
6828 reorder_operands_p (const_tree arg0, const_tree arg1)
6829 {
6830 if (! flag_evaluation_order)
6831 return true;
6832 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6833 return true;
6834 return ! TREE_SIDE_EFFECTS (arg0)
6835 && ! TREE_SIDE_EFFECTS (arg1);
6836 }
6837
6838 /* Test whether it is preferable two swap two operands, ARG0 and
6839 ARG1, for example because ARG0 is an integer constant and ARG1
6840 isn't. If REORDER is true, only recommend swapping if we can
6841 evaluate the operands in reverse order. */
6842
6843 bool
6844 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6845 {
6846 if (CONSTANT_CLASS_P (arg1))
6847 return 0;
6848 if (CONSTANT_CLASS_P (arg0))
6849 return 1;
6850
6851 STRIP_NOPS (arg0);
6852 STRIP_NOPS (arg1);
6853
6854 if (TREE_CONSTANT (arg1))
6855 return 0;
6856 if (TREE_CONSTANT (arg0))
6857 return 1;
6858
6859 if (reorder && flag_evaluation_order
6860 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6861 return 0;
6862
6863 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6864 for commutative and comparison operators. Ensuring a canonical
6865 form allows the optimizers to find additional redundancies without
6866 having to explicitly check for both orderings. */
6867 if (TREE_CODE (arg0) == SSA_NAME
6868 && TREE_CODE (arg1) == SSA_NAME
6869 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6870 return 1;
6871
6872 /* Put SSA_NAMEs last. */
6873 if (TREE_CODE (arg1) == SSA_NAME)
6874 return 0;
6875 if (TREE_CODE (arg0) == SSA_NAME)
6876 return 1;
6877
6878 /* Put variables last. */
6879 if (DECL_P (arg1))
6880 return 0;
6881 if (DECL_P (arg0))
6882 return 1;
6883
6884 return 0;
6885 }
6886
6887
6888 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6889 means A >= Y && A != MAX, but in this case we know that
6890 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6891
6892 static tree
6893 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6894 {
6895 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6896
6897 if (TREE_CODE (bound) == LT_EXPR)
6898 a = TREE_OPERAND (bound, 0);
6899 else if (TREE_CODE (bound) == GT_EXPR)
6900 a = TREE_OPERAND (bound, 1);
6901 else
6902 return NULL_TREE;
6903
6904 typea = TREE_TYPE (a);
6905 if (!INTEGRAL_TYPE_P (typea)
6906 && !POINTER_TYPE_P (typea))
6907 return NULL_TREE;
6908
6909 if (TREE_CODE (ineq) == LT_EXPR)
6910 {
6911 a1 = TREE_OPERAND (ineq, 1);
6912 y = TREE_OPERAND (ineq, 0);
6913 }
6914 else if (TREE_CODE (ineq) == GT_EXPR)
6915 {
6916 a1 = TREE_OPERAND (ineq, 0);
6917 y = TREE_OPERAND (ineq, 1);
6918 }
6919 else
6920 return NULL_TREE;
6921
6922 if (TREE_TYPE (a1) != typea)
6923 return NULL_TREE;
6924
6925 if (POINTER_TYPE_P (typea))
6926 {
6927 /* Convert the pointer types into integer before taking the difference. */
6928 tree ta = fold_convert_loc (loc, ssizetype, a);
6929 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6930 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6931 }
6932 else
6933 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6934
6935 if (!diff || !integer_onep (diff))
6936 return NULL_TREE;
6937
6938 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6939 }
6940
6941 /* Fold a sum or difference of at least one multiplication.
6942 Returns the folded tree or NULL if no simplification could be made. */
6943
6944 static tree
6945 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6946 tree arg0, tree arg1)
6947 {
6948 tree arg00, arg01, arg10, arg11;
6949 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6950
6951 /* (A * C) +- (B * C) -> (A+-B) * C.
6952 (A * C) +- A -> A * (C+-1).
6953 We are most concerned about the case where C is a constant,
6954 but other combinations show up during loop reduction. Since
6955 it is not difficult, try all four possibilities. */
6956
6957 if (TREE_CODE (arg0) == MULT_EXPR)
6958 {
6959 arg00 = TREE_OPERAND (arg0, 0);
6960 arg01 = TREE_OPERAND (arg0, 1);
6961 }
6962 else if (TREE_CODE (arg0) == INTEGER_CST)
6963 {
6964 arg00 = build_one_cst (type);
6965 arg01 = arg0;
6966 }
6967 else
6968 {
6969 /* We cannot generate constant 1 for fract. */
6970 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6971 return NULL_TREE;
6972 arg00 = arg0;
6973 arg01 = build_one_cst (type);
6974 }
6975 if (TREE_CODE (arg1) == MULT_EXPR)
6976 {
6977 arg10 = TREE_OPERAND (arg1, 0);
6978 arg11 = TREE_OPERAND (arg1, 1);
6979 }
6980 else if (TREE_CODE (arg1) == INTEGER_CST)
6981 {
6982 arg10 = build_one_cst (type);
6983 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6984 the purpose of this canonicalization. */
6985 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6986 && negate_expr_p (arg1)
6987 && code == PLUS_EXPR)
6988 {
6989 arg11 = negate_expr (arg1);
6990 code = MINUS_EXPR;
6991 }
6992 else
6993 arg11 = arg1;
6994 }
6995 else
6996 {
6997 /* We cannot generate constant 1 for fract. */
6998 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6999 return NULL_TREE;
7000 arg10 = arg1;
7001 arg11 = build_one_cst (type);
7002 }
7003 same = NULL_TREE;
7004
7005 if (operand_equal_p (arg01, arg11, 0))
7006 same = arg01, alt0 = arg00, alt1 = arg10;
7007 else if (operand_equal_p (arg00, arg10, 0))
7008 same = arg00, alt0 = arg01, alt1 = arg11;
7009 else if (operand_equal_p (arg00, arg11, 0))
7010 same = arg00, alt0 = arg01, alt1 = arg10;
7011 else if (operand_equal_p (arg01, arg10, 0))
7012 same = arg01, alt0 = arg00, alt1 = arg11;
7013
7014 /* No identical multiplicands; see if we can find a common
7015 power-of-two factor in non-power-of-two multiplies. This
7016 can help in multi-dimensional array access. */
7017 else if (tree_fits_shwi_p (arg01)
7018 && tree_fits_shwi_p (arg11))
7019 {
7020 HOST_WIDE_INT int01, int11, tmp;
7021 bool swap = false;
7022 tree maybe_same;
7023 int01 = tree_to_shwi (arg01);
7024 int11 = tree_to_shwi (arg11);
7025
7026 /* Move min of absolute values to int11. */
7027 if (absu_hwi (int01) < absu_hwi (int11))
7028 {
7029 tmp = int01, int01 = int11, int11 = tmp;
7030 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7031 maybe_same = arg01;
7032 swap = true;
7033 }
7034 else
7035 maybe_same = arg11;
7036
7037 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7038 /* The remainder should not be a constant, otherwise we
7039 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7040 increased the number of multiplications necessary. */
7041 && TREE_CODE (arg10) != INTEGER_CST)
7042 {
7043 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7044 build_int_cst (TREE_TYPE (arg00),
7045 int01 / int11));
7046 alt1 = arg10;
7047 same = maybe_same;
7048 if (swap)
7049 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7050 }
7051 }
7052
7053 if (same)
7054 return fold_build2_loc (loc, MULT_EXPR, type,
7055 fold_build2_loc (loc, code, type,
7056 fold_convert_loc (loc, type, alt0),
7057 fold_convert_loc (loc, type, alt1)),
7058 fold_convert_loc (loc, type, same));
7059
7060 return NULL_TREE;
7061 }
7062
7063 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7064 specified by EXPR into the buffer PTR of length LEN bytes.
7065 Return the number of bytes placed in the buffer, or zero
7066 upon failure. */
7067
7068 static int
7069 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7070 {
7071 tree type = TREE_TYPE (expr);
7072 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7073 int byte, offset, word, words;
7074 unsigned char value;
7075
7076 if ((off == -1 && total_bytes > len)
7077 || off >= total_bytes)
7078 return 0;
7079 if (off == -1)
7080 off = 0;
7081 words = total_bytes / UNITS_PER_WORD;
7082
7083 for (byte = 0; byte < total_bytes; byte++)
7084 {
7085 int bitpos = byte * BITS_PER_UNIT;
7086 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7087 number of bytes. */
7088 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7089
7090 if (total_bytes > UNITS_PER_WORD)
7091 {
7092 word = byte / UNITS_PER_WORD;
7093 if (WORDS_BIG_ENDIAN)
7094 word = (words - 1) - word;
7095 offset = word * UNITS_PER_WORD;
7096 if (BYTES_BIG_ENDIAN)
7097 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7098 else
7099 offset += byte % UNITS_PER_WORD;
7100 }
7101 else
7102 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7103 if (offset >= off
7104 && offset - off < len)
7105 ptr[offset - off] = value;
7106 }
7107 return MIN (len, total_bytes - off);
7108 }
7109
7110
7111 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7112 specified by EXPR into the buffer PTR of length LEN bytes.
7113 Return the number of bytes placed in the buffer, or zero
7114 upon failure. */
7115
7116 static int
7117 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7118 {
7119 tree type = TREE_TYPE (expr);
7120 machine_mode mode = TYPE_MODE (type);
7121 int total_bytes = GET_MODE_SIZE (mode);
7122 FIXED_VALUE_TYPE value;
7123 tree i_value, i_type;
7124
7125 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7126 return 0;
7127
7128 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7129
7130 if (NULL_TREE == i_type
7131 || TYPE_PRECISION (i_type) != total_bytes)
7132 return 0;
7133
7134 value = TREE_FIXED_CST (expr);
7135 i_value = double_int_to_tree (i_type, value.data);
7136
7137 return native_encode_int (i_value, ptr, len, off);
7138 }
7139
7140
7141 /* Subroutine of native_encode_expr. Encode the REAL_CST
7142 specified by EXPR into the buffer PTR of length LEN bytes.
7143 Return the number of bytes placed in the buffer, or zero
7144 upon failure. */
7145
7146 static int
7147 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7148 {
7149 tree type = TREE_TYPE (expr);
7150 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7151 int byte, offset, word, words, bitpos;
7152 unsigned char value;
7153
7154 /* There are always 32 bits in each long, no matter the size of
7155 the hosts long. We handle floating point representations with
7156 up to 192 bits. */
7157 long tmp[6];
7158
7159 if ((off == -1 && total_bytes > len)
7160 || off >= total_bytes)
7161 return 0;
7162 if (off == -1)
7163 off = 0;
7164 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7165
7166 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7167
7168 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7169 bitpos += BITS_PER_UNIT)
7170 {
7171 byte = (bitpos / BITS_PER_UNIT) & 3;
7172 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7173
7174 if (UNITS_PER_WORD < 4)
7175 {
7176 word = byte / UNITS_PER_WORD;
7177 if (WORDS_BIG_ENDIAN)
7178 word = (words - 1) - word;
7179 offset = word * UNITS_PER_WORD;
7180 if (BYTES_BIG_ENDIAN)
7181 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7182 else
7183 offset += byte % UNITS_PER_WORD;
7184 }
7185 else
7186 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7187 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7188 if (offset >= off
7189 && offset - off < len)
7190 ptr[offset - off] = value;
7191 }
7192 return MIN (len, total_bytes - off);
7193 }
7194
7195 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7196 specified by EXPR into the buffer PTR of length LEN bytes.
7197 Return the number of bytes placed in the buffer, or zero
7198 upon failure. */
7199
7200 static int
7201 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7202 {
7203 int rsize, isize;
7204 tree part;
7205
7206 part = TREE_REALPART (expr);
7207 rsize = native_encode_expr (part, ptr, len, off);
7208 if (off == -1
7209 && rsize == 0)
7210 return 0;
7211 part = TREE_IMAGPART (expr);
7212 if (off != -1)
7213 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7214 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7215 if (off == -1
7216 && isize != rsize)
7217 return 0;
7218 return rsize + isize;
7219 }
7220
7221
7222 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7223 specified by EXPR into the buffer PTR of length LEN bytes.
7224 Return the number of bytes placed in the buffer, or zero
7225 upon failure. */
7226
7227 static int
7228 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7229 {
7230 unsigned i, count;
7231 int size, offset;
7232 tree itype, elem;
7233
7234 offset = 0;
7235 count = VECTOR_CST_NELTS (expr);
7236 itype = TREE_TYPE (TREE_TYPE (expr));
7237 size = GET_MODE_SIZE (TYPE_MODE (itype));
7238 for (i = 0; i < count; i++)
7239 {
7240 if (off >= size)
7241 {
7242 off -= size;
7243 continue;
7244 }
7245 elem = VECTOR_CST_ELT (expr, i);
7246 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7247 if ((off == -1 && res != size)
7248 || res == 0)
7249 return 0;
7250 offset += res;
7251 if (offset >= len)
7252 return offset;
7253 if (off != -1)
7254 off = 0;
7255 }
7256 return offset;
7257 }
7258
7259
7260 /* Subroutine of native_encode_expr. Encode the STRING_CST
7261 specified by EXPR into the buffer PTR of length LEN bytes.
7262 Return the number of bytes placed in the buffer, or zero
7263 upon failure. */
7264
7265 static int
7266 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7267 {
7268 tree type = TREE_TYPE (expr);
7269 HOST_WIDE_INT total_bytes;
7270
7271 if (TREE_CODE (type) != ARRAY_TYPE
7272 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7273 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7274 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7275 return 0;
7276 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7277 if ((off == -1 && total_bytes > len)
7278 || off >= total_bytes)
7279 return 0;
7280 if (off == -1)
7281 off = 0;
7282 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7283 {
7284 int written = 0;
7285 if (off < TREE_STRING_LENGTH (expr))
7286 {
7287 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7288 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7289 }
7290 memset (ptr + written, 0,
7291 MIN (total_bytes - written, len - written));
7292 }
7293 else
7294 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7295 return MIN (total_bytes - off, len);
7296 }
7297
7298
7299 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7300 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7301 buffer PTR of length LEN bytes. If OFF is not -1 then start
7302 the encoding at byte offset OFF and encode at most LEN bytes.
7303 Return the number of bytes placed in the buffer, or zero upon failure. */
7304
7305 int
7306 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7307 {
7308 /* We don't support starting at negative offset and -1 is special. */
7309 if (off < -1)
7310 return 0;
7311
7312 switch (TREE_CODE (expr))
7313 {
7314 case INTEGER_CST:
7315 return native_encode_int (expr, ptr, len, off);
7316
7317 case REAL_CST:
7318 return native_encode_real (expr, ptr, len, off);
7319
7320 case FIXED_CST:
7321 return native_encode_fixed (expr, ptr, len, off);
7322
7323 case COMPLEX_CST:
7324 return native_encode_complex (expr, ptr, len, off);
7325
7326 case VECTOR_CST:
7327 return native_encode_vector (expr, ptr, len, off);
7328
7329 case STRING_CST:
7330 return native_encode_string (expr, ptr, len, off);
7331
7332 default:
7333 return 0;
7334 }
7335 }
7336
7337
7338 /* Subroutine of native_interpret_expr. Interpret the contents of
7339 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7340 If the buffer cannot be interpreted, return NULL_TREE. */
7341
7342 static tree
7343 native_interpret_int (tree type, const unsigned char *ptr, int len)
7344 {
7345 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7346
7347 if (total_bytes > len
7348 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7349 return NULL_TREE;
7350
7351 wide_int result = wi::from_buffer (ptr, total_bytes);
7352
7353 return wide_int_to_tree (type, result);
7354 }
7355
7356
7357 /* Subroutine of native_interpret_expr. Interpret the contents of
7358 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7359 If the buffer cannot be interpreted, return NULL_TREE. */
7360
7361 static tree
7362 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7363 {
7364 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7365 double_int result;
7366 FIXED_VALUE_TYPE fixed_value;
7367
7368 if (total_bytes > len
7369 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7370 return NULL_TREE;
7371
7372 result = double_int::from_buffer (ptr, total_bytes);
7373 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7374
7375 return build_fixed (type, fixed_value);
7376 }
7377
7378
7379 /* Subroutine of native_interpret_expr. Interpret the contents of
7380 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7381 If the buffer cannot be interpreted, return NULL_TREE. */
7382
7383 static tree
7384 native_interpret_real (tree type, const unsigned char *ptr, int len)
7385 {
7386 machine_mode mode = TYPE_MODE (type);
7387 int total_bytes = GET_MODE_SIZE (mode);
7388 unsigned char value;
7389 /* There are always 32 bits in each long, no matter the size of
7390 the hosts long. We handle floating point representations with
7391 up to 192 bits. */
7392 REAL_VALUE_TYPE r;
7393 long tmp[6];
7394
7395 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7396 if (total_bytes > len || total_bytes > 24)
7397 return NULL_TREE;
7398 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7399
7400 memset (tmp, 0, sizeof (tmp));
7401 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7402 bitpos += BITS_PER_UNIT)
7403 {
7404 /* Both OFFSET and BYTE index within a long;
7405 bitpos indexes the whole float. */
7406 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7407 if (UNITS_PER_WORD < 4)
7408 {
7409 int word = byte / UNITS_PER_WORD;
7410 if (WORDS_BIG_ENDIAN)
7411 word = (words - 1) - word;
7412 offset = word * UNITS_PER_WORD;
7413 if (BYTES_BIG_ENDIAN)
7414 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7415 else
7416 offset += byte % UNITS_PER_WORD;
7417 }
7418 else
7419 {
7420 offset = byte;
7421 if (BYTES_BIG_ENDIAN)
7422 {
7423 /* Reverse bytes within each long, or within the entire float
7424 if it's smaller than a long (for HFmode). */
7425 offset = MIN (3, total_bytes - 1) - offset;
7426 gcc_assert (offset >= 0);
7427 }
7428 }
7429 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7430
7431 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7432 }
7433
7434 real_from_target (&r, tmp, mode);
7435 return build_real (type, r);
7436 }
7437
7438
7439 /* Subroutine of native_interpret_expr. Interpret the contents of
7440 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7441 If the buffer cannot be interpreted, return NULL_TREE. */
7442
7443 static tree
7444 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7445 {
7446 tree etype, rpart, ipart;
7447 int size;
7448
7449 etype = TREE_TYPE (type);
7450 size = GET_MODE_SIZE (TYPE_MODE (etype));
7451 if (size * 2 > len)
7452 return NULL_TREE;
7453 rpart = native_interpret_expr (etype, ptr, size);
7454 if (!rpart)
7455 return NULL_TREE;
7456 ipart = native_interpret_expr (etype, ptr+size, size);
7457 if (!ipart)
7458 return NULL_TREE;
7459 return build_complex (type, rpart, ipart);
7460 }
7461
7462
7463 /* Subroutine of native_interpret_expr. Interpret the contents of
7464 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7465 If the buffer cannot be interpreted, return NULL_TREE. */
7466
7467 static tree
7468 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7469 {
7470 tree etype, elem;
7471 int i, size, count;
7472 tree *elements;
7473
7474 etype = TREE_TYPE (type);
7475 size = GET_MODE_SIZE (TYPE_MODE (etype));
7476 count = TYPE_VECTOR_SUBPARTS (type);
7477 if (size * count > len)
7478 return NULL_TREE;
7479
7480 elements = XALLOCAVEC (tree, count);
7481 for (i = count - 1; i >= 0; i--)
7482 {
7483 elem = native_interpret_expr (etype, ptr+(i*size), size);
7484 if (!elem)
7485 return NULL_TREE;
7486 elements[i] = elem;
7487 }
7488 return build_vector (type, elements);
7489 }
7490
7491
7492 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7493 the buffer PTR of length LEN as a constant of type TYPE. For
7494 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7495 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7496 return NULL_TREE. */
7497
7498 tree
7499 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7500 {
7501 switch (TREE_CODE (type))
7502 {
7503 case INTEGER_TYPE:
7504 case ENUMERAL_TYPE:
7505 case BOOLEAN_TYPE:
7506 case POINTER_TYPE:
7507 case REFERENCE_TYPE:
7508 return native_interpret_int (type, ptr, len);
7509
7510 case REAL_TYPE:
7511 return native_interpret_real (type, ptr, len);
7512
7513 case FIXED_POINT_TYPE:
7514 return native_interpret_fixed (type, ptr, len);
7515
7516 case COMPLEX_TYPE:
7517 return native_interpret_complex (type, ptr, len);
7518
7519 case VECTOR_TYPE:
7520 return native_interpret_vector (type, ptr, len);
7521
7522 default:
7523 return NULL_TREE;
7524 }
7525 }
7526
7527 /* Returns true if we can interpret the contents of a native encoding
7528 as TYPE. */
7529
7530 static bool
7531 can_native_interpret_type_p (tree type)
7532 {
7533 switch (TREE_CODE (type))
7534 {
7535 case INTEGER_TYPE:
7536 case ENUMERAL_TYPE:
7537 case BOOLEAN_TYPE:
7538 case POINTER_TYPE:
7539 case REFERENCE_TYPE:
7540 case FIXED_POINT_TYPE:
7541 case REAL_TYPE:
7542 case COMPLEX_TYPE:
7543 case VECTOR_TYPE:
7544 return true;
7545 default:
7546 return false;
7547 }
7548 }
7549
7550 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7551 TYPE at compile-time. If we're unable to perform the conversion
7552 return NULL_TREE. */
7553
7554 static tree
7555 fold_view_convert_expr (tree type, tree expr)
7556 {
7557 /* We support up to 512-bit values (for V8DFmode). */
7558 unsigned char buffer[64];
7559 int len;
7560
7561 /* Check that the host and target are sane. */
7562 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7563 return NULL_TREE;
7564
7565 len = native_encode_expr (expr, buffer, sizeof (buffer));
7566 if (len == 0)
7567 return NULL_TREE;
7568
7569 return native_interpret_expr (type, buffer, len);
7570 }
7571
7572 /* Build an expression for the address of T. Folds away INDIRECT_REF
7573 to avoid confusing the gimplify process. */
7574
7575 tree
7576 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7577 {
7578 /* The size of the object is not relevant when talking about its address. */
7579 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7580 t = TREE_OPERAND (t, 0);
7581
7582 if (TREE_CODE (t) == INDIRECT_REF)
7583 {
7584 t = TREE_OPERAND (t, 0);
7585
7586 if (TREE_TYPE (t) != ptrtype)
7587 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7588 }
7589 else if (TREE_CODE (t) == MEM_REF
7590 && integer_zerop (TREE_OPERAND (t, 1)))
7591 return TREE_OPERAND (t, 0);
7592 else if (TREE_CODE (t) == MEM_REF
7593 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7594 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7595 TREE_OPERAND (t, 0),
7596 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7597 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7598 {
7599 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7600
7601 if (TREE_TYPE (t) != ptrtype)
7602 t = fold_convert_loc (loc, ptrtype, t);
7603 }
7604 else
7605 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7606
7607 return t;
7608 }
7609
7610 /* Build an expression for the address of T. */
7611
7612 tree
7613 build_fold_addr_expr_loc (location_t loc, tree t)
7614 {
7615 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7616
7617 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7618 }
7619
7620 /* Fold a unary expression of code CODE and type TYPE with operand
7621 OP0. Return the folded expression if folding is successful.
7622 Otherwise, return NULL_TREE. */
7623
7624 tree
7625 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7626 {
7627 tree tem;
7628 tree arg0;
7629 enum tree_code_class kind = TREE_CODE_CLASS (code);
7630
7631 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7632 && TREE_CODE_LENGTH (code) == 1);
7633
7634 arg0 = op0;
7635 if (arg0)
7636 {
7637 if (CONVERT_EXPR_CODE_P (code)
7638 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7639 {
7640 /* Don't use STRIP_NOPS, because signedness of argument type
7641 matters. */
7642 STRIP_SIGN_NOPS (arg0);
7643 }
7644 else
7645 {
7646 /* Strip any conversions that don't change the mode. This
7647 is safe for every expression, except for a comparison
7648 expression because its signedness is derived from its
7649 operands.
7650
7651 Note that this is done as an internal manipulation within
7652 the constant folder, in order to find the simplest
7653 representation of the arguments so that their form can be
7654 studied. In any cases, the appropriate type conversions
7655 should be put back in the tree that will get out of the
7656 constant folder. */
7657 STRIP_NOPS (arg0);
7658 }
7659
7660 if (CONSTANT_CLASS_P (arg0))
7661 {
7662 tree tem = const_unop (code, type, arg0);
7663 if (tem)
7664 {
7665 if (TREE_TYPE (tem) != type)
7666 tem = fold_convert_loc (loc, type, tem);
7667 return tem;
7668 }
7669 }
7670 }
7671
7672 tem = generic_simplify (loc, code, type, op0);
7673 if (tem)
7674 return tem;
7675
7676 if (TREE_CODE_CLASS (code) == tcc_unary)
7677 {
7678 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7679 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7680 fold_build1_loc (loc, code, type,
7681 fold_convert_loc (loc, TREE_TYPE (op0),
7682 TREE_OPERAND (arg0, 1))));
7683 else if (TREE_CODE (arg0) == COND_EXPR)
7684 {
7685 tree arg01 = TREE_OPERAND (arg0, 1);
7686 tree arg02 = TREE_OPERAND (arg0, 2);
7687 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7688 arg01 = fold_build1_loc (loc, code, type,
7689 fold_convert_loc (loc,
7690 TREE_TYPE (op0), arg01));
7691 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7692 arg02 = fold_build1_loc (loc, code, type,
7693 fold_convert_loc (loc,
7694 TREE_TYPE (op0), arg02));
7695 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7696 arg01, arg02);
7697
7698 /* If this was a conversion, and all we did was to move into
7699 inside the COND_EXPR, bring it back out. But leave it if
7700 it is a conversion from integer to integer and the
7701 result precision is no wider than a word since such a
7702 conversion is cheap and may be optimized away by combine,
7703 while it couldn't if it were outside the COND_EXPR. Then return
7704 so we don't get into an infinite recursion loop taking the
7705 conversion out and then back in. */
7706
7707 if ((CONVERT_EXPR_CODE_P (code)
7708 || code == NON_LVALUE_EXPR)
7709 && TREE_CODE (tem) == COND_EXPR
7710 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7711 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7712 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7713 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7714 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7715 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7716 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7717 && (INTEGRAL_TYPE_P
7718 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7719 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7720 || flag_syntax_only))
7721 tem = build1_loc (loc, code, type,
7722 build3 (COND_EXPR,
7723 TREE_TYPE (TREE_OPERAND
7724 (TREE_OPERAND (tem, 1), 0)),
7725 TREE_OPERAND (tem, 0),
7726 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7727 TREE_OPERAND (TREE_OPERAND (tem, 2),
7728 0)));
7729 return tem;
7730 }
7731 }
7732
7733 switch (code)
7734 {
7735 case NON_LVALUE_EXPR:
7736 if (!maybe_lvalue_p (op0))
7737 return fold_convert_loc (loc, type, op0);
7738 return NULL_TREE;
7739
7740 CASE_CONVERT:
7741 case FLOAT_EXPR:
7742 case FIX_TRUNC_EXPR:
7743 if (COMPARISON_CLASS_P (op0))
7744 {
7745 /* If we have (type) (a CMP b) and type is an integral type, return
7746 new expression involving the new type. Canonicalize
7747 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7748 non-integral type.
7749 Do not fold the result as that would not simplify further, also
7750 folding again results in recursions. */
7751 if (TREE_CODE (type) == BOOLEAN_TYPE)
7752 return build2_loc (loc, TREE_CODE (op0), type,
7753 TREE_OPERAND (op0, 0),
7754 TREE_OPERAND (op0, 1));
7755 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7756 && TREE_CODE (type) != VECTOR_TYPE)
7757 return build3_loc (loc, COND_EXPR, type, op0,
7758 constant_boolean_node (true, type),
7759 constant_boolean_node (false, type));
7760 }
7761
7762 /* Handle (T *)&A.B.C for A being of type T and B and C
7763 living at offset zero. This occurs frequently in
7764 C++ upcasting and then accessing the base. */
7765 if (TREE_CODE (op0) == ADDR_EXPR
7766 && POINTER_TYPE_P (type)
7767 && handled_component_p (TREE_OPERAND (op0, 0)))
7768 {
7769 HOST_WIDE_INT bitsize, bitpos;
7770 tree offset;
7771 machine_mode mode;
7772 int unsignedp, reversep, volatilep;
7773 tree base
7774 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7775 &offset, &mode, &unsignedp, &reversep,
7776 &volatilep, false);
7777 /* If the reference was to a (constant) zero offset, we can use
7778 the address of the base if it has the same base type
7779 as the result type and the pointer type is unqualified. */
7780 if (! offset && bitpos == 0
7781 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7782 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7783 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7784 return fold_convert_loc (loc, type,
7785 build_fold_addr_expr_loc (loc, base));
7786 }
7787
7788 if (TREE_CODE (op0) == MODIFY_EXPR
7789 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7790 /* Detect assigning a bitfield. */
7791 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7792 && DECL_BIT_FIELD
7793 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7794 {
7795 /* Don't leave an assignment inside a conversion
7796 unless assigning a bitfield. */
7797 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7798 /* First do the assignment, then return converted constant. */
7799 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7800 TREE_NO_WARNING (tem) = 1;
7801 TREE_USED (tem) = 1;
7802 return tem;
7803 }
7804
7805 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7806 constants (if x has signed type, the sign bit cannot be set
7807 in c). This folds extension into the BIT_AND_EXPR.
7808 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7809 very likely don't have maximal range for their precision and this
7810 transformation effectively doesn't preserve non-maximal ranges. */
7811 if (TREE_CODE (type) == INTEGER_TYPE
7812 && TREE_CODE (op0) == BIT_AND_EXPR
7813 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7814 {
7815 tree and_expr = op0;
7816 tree and0 = TREE_OPERAND (and_expr, 0);
7817 tree and1 = TREE_OPERAND (and_expr, 1);
7818 int change = 0;
7819
7820 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7821 || (TYPE_PRECISION (type)
7822 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7823 change = 1;
7824 else if (TYPE_PRECISION (TREE_TYPE (and1))
7825 <= HOST_BITS_PER_WIDE_INT
7826 && tree_fits_uhwi_p (and1))
7827 {
7828 unsigned HOST_WIDE_INT cst;
7829
7830 cst = tree_to_uhwi (and1);
7831 cst &= HOST_WIDE_INT_M1U
7832 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7833 change = (cst == 0);
7834 if (change
7835 && !flag_syntax_only
7836 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7837 == ZERO_EXTEND))
7838 {
7839 tree uns = unsigned_type_for (TREE_TYPE (and0));
7840 and0 = fold_convert_loc (loc, uns, and0);
7841 and1 = fold_convert_loc (loc, uns, and1);
7842 }
7843 }
7844 if (change)
7845 {
7846 tem = force_fit_type (type, wi::to_widest (and1), 0,
7847 TREE_OVERFLOW (and1));
7848 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7849 fold_convert_loc (loc, type, and0), tem);
7850 }
7851 }
7852
7853 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7854 cast (T1)X will fold away. We assume that this happens when X itself
7855 is a cast. */
7856 if (POINTER_TYPE_P (type)
7857 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7858 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7859 {
7860 tree arg00 = TREE_OPERAND (arg0, 0);
7861 tree arg01 = TREE_OPERAND (arg0, 1);
7862
7863 return fold_build_pointer_plus_loc
7864 (loc, fold_convert_loc (loc, type, arg00), arg01);
7865 }
7866
7867 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7868 of the same precision, and X is an integer type not narrower than
7869 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7870 if (INTEGRAL_TYPE_P (type)
7871 && TREE_CODE (op0) == BIT_NOT_EXPR
7872 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7873 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7874 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7875 {
7876 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7877 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7878 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7879 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7880 fold_convert_loc (loc, type, tem));
7881 }
7882
7883 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7884 type of X and Y (integer types only). */
7885 if (INTEGRAL_TYPE_P (type)
7886 && TREE_CODE (op0) == MULT_EXPR
7887 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7888 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7889 {
7890 /* Be careful not to introduce new overflows. */
7891 tree mult_type;
7892 if (TYPE_OVERFLOW_WRAPS (type))
7893 mult_type = type;
7894 else
7895 mult_type = unsigned_type_for (type);
7896
7897 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7898 {
7899 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7900 fold_convert_loc (loc, mult_type,
7901 TREE_OPERAND (op0, 0)),
7902 fold_convert_loc (loc, mult_type,
7903 TREE_OPERAND (op0, 1)));
7904 return fold_convert_loc (loc, type, tem);
7905 }
7906 }
7907
7908 return NULL_TREE;
7909
7910 case VIEW_CONVERT_EXPR:
7911 if (TREE_CODE (op0) == MEM_REF)
7912 {
7913 tem = fold_build2_loc (loc, MEM_REF, type,
7914 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7915 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7916 return tem;
7917 }
7918
7919 return NULL_TREE;
7920
7921 case NEGATE_EXPR:
7922 tem = fold_negate_expr (loc, arg0);
7923 if (tem)
7924 return fold_convert_loc (loc, type, tem);
7925 return NULL_TREE;
7926
7927 case ABS_EXPR:
7928 /* Convert fabs((double)float) into (double)fabsf(float). */
7929 if (TREE_CODE (arg0) == NOP_EXPR
7930 && TREE_CODE (type) == REAL_TYPE)
7931 {
7932 tree targ0 = strip_float_extensions (arg0);
7933 if (targ0 != arg0)
7934 return fold_convert_loc (loc, type,
7935 fold_build1_loc (loc, ABS_EXPR,
7936 TREE_TYPE (targ0),
7937 targ0));
7938 }
7939 return NULL_TREE;
7940
7941 case BIT_NOT_EXPR:
7942 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7943 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7944 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7945 fold_convert_loc (loc, type,
7946 TREE_OPERAND (arg0, 0)))))
7947 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7948 fold_convert_loc (loc, type,
7949 TREE_OPERAND (arg0, 1)));
7950 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7951 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7952 fold_convert_loc (loc, type,
7953 TREE_OPERAND (arg0, 1)))))
7954 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7955 fold_convert_loc (loc, type,
7956 TREE_OPERAND (arg0, 0)), tem);
7957
7958 return NULL_TREE;
7959
7960 case TRUTH_NOT_EXPR:
7961 /* Note that the operand of this must be an int
7962 and its values must be 0 or 1.
7963 ("true" is a fixed value perhaps depending on the language,
7964 but we don't handle values other than 1 correctly yet.) */
7965 tem = fold_truth_not_expr (loc, arg0);
7966 if (!tem)
7967 return NULL_TREE;
7968 return fold_convert_loc (loc, type, tem);
7969
7970 case INDIRECT_REF:
7971 /* Fold *&X to X if X is an lvalue. */
7972 if (TREE_CODE (op0) == ADDR_EXPR)
7973 {
7974 tree op00 = TREE_OPERAND (op0, 0);
7975 if ((TREE_CODE (op00) == VAR_DECL
7976 || TREE_CODE (op00) == PARM_DECL
7977 || TREE_CODE (op00) == RESULT_DECL)
7978 && !TREE_READONLY (op00))
7979 return op00;
7980 }
7981 return NULL_TREE;
7982
7983 default:
7984 return NULL_TREE;
7985 } /* switch (code) */
7986 }
7987
7988
7989 /* If the operation was a conversion do _not_ mark a resulting constant
7990 with TREE_OVERFLOW if the original constant was not. These conversions
7991 have implementation defined behavior and retaining the TREE_OVERFLOW
7992 flag here would confuse later passes such as VRP. */
7993 tree
7994 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7995 tree type, tree op0)
7996 {
7997 tree res = fold_unary_loc (loc, code, type, op0);
7998 if (res
7999 && TREE_CODE (res) == INTEGER_CST
8000 && TREE_CODE (op0) == INTEGER_CST
8001 && CONVERT_EXPR_CODE_P (code))
8002 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8003
8004 return res;
8005 }
8006
8007 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8008 operands OP0 and OP1. LOC is the location of the resulting expression.
8009 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8010 Return the folded expression if folding is successful. Otherwise,
8011 return NULL_TREE. */
8012 static tree
8013 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8014 tree arg0, tree arg1, tree op0, tree op1)
8015 {
8016 tree tem;
8017
8018 /* We only do these simplifications if we are optimizing. */
8019 if (!optimize)
8020 return NULL_TREE;
8021
8022 /* Check for things like (A || B) && (A || C). We can convert this
8023 to A || (B && C). Note that either operator can be any of the four
8024 truth and/or operations and the transformation will still be
8025 valid. Also note that we only care about order for the
8026 ANDIF and ORIF operators. If B contains side effects, this
8027 might change the truth-value of A. */
8028 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8029 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8030 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8031 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8032 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8033 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8034 {
8035 tree a00 = TREE_OPERAND (arg0, 0);
8036 tree a01 = TREE_OPERAND (arg0, 1);
8037 tree a10 = TREE_OPERAND (arg1, 0);
8038 tree a11 = TREE_OPERAND (arg1, 1);
8039 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8040 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8041 && (code == TRUTH_AND_EXPR
8042 || code == TRUTH_OR_EXPR));
8043
8044 if (operand_equal_p (a00, a10, 0))
8045 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8046 fold_build2_loc (loc, code, type, a01, a11));
8047 else if (commutative && operand_equal_p (a00, a11, 0))
8048 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8049 fold_build2_loc (loc, code, type, a01, a10));
8050 else if (commutative && operand_equal_p (a01, a10, 0))
8051 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8052 fold_build2_loc (loc, code, type, a00, a11));
8053
8054 /* This case if tricky because we must either have commutative
8055 operators or else A10 must not have side-effects. */
8056
8057 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8058 && operand_equal_p (a01, a11, 0))
8059 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8060 fold_build2_loc (loc, code, type, a00, a10),
8061 a01);
8062 }
8063
8064 /* See if we can build a range comparison. */
8065 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8066 return tem;
8067
8068 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8069 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8070 {
8071 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8072 if (tem)
8073 return fold_build2_loc (loc, code, type, tem, arg1);
8074 }
8075
8076 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8077 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8078 {
8079 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8080 if (tem)
8081 return fold_build2_loc (loc, code, type, arg0, tem);
8082 }
8083
8084 /* Check for the possibility of merging component references. If our
8085 lhs is another similar operation, try to merge its rhs with our
8086 rhs. Then try to merge our lhs and rhs. */
8087 if (TREE_CODE (arg0) == code
8088 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8089 TREE_OPERAND (arg0, 1), arg1)))
8090 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8091
8092 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8093 return tem;
8094
8095 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8096 && (code == TRUTH_AND_EXPR
8097 || code == TRUTH_ANDIF_EXPR
8098 || code == TRUTH_OR_EXPR
8099 || code == TRUTH_ORIF_EXPR))
8100 {
8101 enum tree_code ncode, icode;
8102
8103 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8104 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8105 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8106
8107 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8108 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8109 We don't want to pack more than two leafs to a non-IF AND/OR
8110 expression.
8111 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8112 equal to IF-CODE, then we don't want to add right-hand operand.
8113 If the inner right-hand side of left-hand operand has
8114 side-effects, or isn't simple, then we can't add to it,
8115 as otherwise we might destroy if-sequence. */
8116 if (TREE_CODE (arg0) == icode
8117 && simple_operand_p_2 (arg1)
8118 /* Needed for sequence points to handle trappings, and
8119 side-effects. */
8120 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8121 {
8122 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8123 arg1);
8124 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8125 tem);
8126 }
8127 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8128 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8129 else if (TREE_CODE (arg1) == icode
8130 && simple_operand_p_2 (arg0)
8131 /* Needed for sequence points to handle trappings, and
8132 side-effects. */
8133 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8134 {
8135 tem = fold_build2_loc (loc, ncode, type,
8136 arg0, TREE_OPERAND (arg1, 0));
8137 return fold_build2_loc (loc, icode, type, tem,
8138 TREE_OPERAND (arg1, 1));
8139 }
8140 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8141 into (A OR B).
8142 For sequence point consistancy, we need to check for trapping,
8143 and side-effects. */
8144 else if (code == icode && simple_operand_p_2 (arg0)
8145 && simple_operand_p_2 (arg1))
8146 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8147 }
8148
8149 return NULL_TREE;
8150 }
8151
8152 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8153 by changing CODE to reduce the magnitude of constants involved in
8154 ARG0 of the comparison.
8155 Returns a canonicalized comparison tree if a simplification was
8156 possible, otherwise returns NULL_TREE.
8157 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8158 valid if signed overflow is undefined. */
8159
8160 static tree
8161 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8162 tree arg0, tree arg1,
8163 bool *strict_overflow_p)
8164 {
8165 enum tree_code code0 = TREE_CODE (arg0);
8166 tree t, cst0 = NULL_TREE;
8167 int sgn0;
8168
8169 /* Match A +- CST code arg1. We can change this only if overflow
8170 is undefined. */
8171 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8172 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8173 /* In principle pointers also have undefined overflow behavior,
8174 but that causes problems elsewhere. */
8175 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8176 && (code0 == MINUS_EXPR
8177 || code0 == PLUS_EXPR)
8178 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8179 return NULL_TREE;
8180
8181 /* Identify the constant in arg0 and its sign. */
8182 cst0 = TREE_OPERAND (arg0, 1);
8183 sgn0 = tree_int_cst_sgn (cst0);
8184
8185 /* Overflowed constants and zero will cause problems. */
8186 if (integer_zerop (cst0)
8187 || TREE_OVERFLOW (cst0))
8188 return NULL_TREE;
8189
8190 /* See if we can reduce the magnitude of the constant in
8191 arg0 by changing the comparison code. */
8192 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8193 if (code == LT_EXPR
8194 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8195 code = LE_EXPR;
8196 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8197 else if (code == GT_EXPR
8198 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8199 code = GE_EXPR;
8200 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8201 else if (code == LE_EXPR
8202 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8203 code = LT_EXPR;
8204 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8205 else if (code == GE_EXPR
8206 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8207 code = GT_EXPR;
8208 else
8209 return NULL_TREE;
8210 *strict_overflow_p = true;
8211
8212 /* Now build the constant reduced in magnitude. But not if that
8213 would produce one outside of its types range. */
8214 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8215 && ((sgn0 == 1
8216 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8217 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8218 || (sgn0 == -1
8219 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8220 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8221 return NULL_TREE;
8222
8223 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8224 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8225 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8226 t = fold_convert (TREE_TYPE (arg1), t);
8227
8228 return fold_build2_loc (loc, code, type, t, arg1);
8229 }
8230
8231 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8232 overflow further. Try to decrease the magnitude of constants involved
8233 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8234 and put sole constants at the second argument position.
8235 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8236
8237 static tree
8238 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8239 tree arg0, tree arg1)
8240 {
8241 tree t;
8242 bool strict_overflow_p;
8243 const char * const warnmsg = G_("assuming signed overflow does not occur "
8244 "when reducing constant in comparison");
8245
8246 /* Try canonicalization by simplifying arg0. */
8247 strict_overflow_p = false;
8248 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8249 &strict_overflow_p);
8250 if (t)
8251 {
8252 if (strict_overflow_p)
8253 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8254 return t;
8255 }
8256
8257 /* Try canonicalization by simplifying arg1 using the swapped
8258 comparison. */
8259 code = swap_tree_comparison (code);
8260 strict_overflow_p = false;
8261 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8262 &strict_overflow_p);
8263 if (t && strict_overflow_p)
8264 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8265 return t;
8266 }
8267
8268 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8269 space. This is used to avoid issuing overflow warnings for
8270 expressions like &p->x which can not wrap. */
8271
8272 static bool
8273 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8274 {
8275 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8276 return true;
8277
8278 if (bitpos < 0)
8279 return true;
8280
8281 wide_int wi_offset;
8282 int precision = TYPE_PRECISION (TREE_TYPE (base));
8283 if (offset == NULL_TREE)
8284 wi_offset = wi::zero (precision);
8285 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8286 return true;
8287 else
8288 wi_offset = offset;
8289
8290 bool overflow;
8291 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8292 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8293 if (overflow)
8294 return true;
8295
8296 if (!wi::fits_uhwi_p (total))
8297 return true;
8298
8299 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8300 if (size <= 0)
8301 return true;
8302
8303 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8304 array. */
8305 if (TREE_CODE (base) == ADDR_EXPR)
8306 {
8307 HOST_WIDE_INT base_size;
8308
8309 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8310 if (base_size > 0 && size < base_size)
8311 size = base_size;
8312 }
8313
8314 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8315 }
8316
8317 /* Subroutine of fold_binary. This routine performs all of the
8318 transformations that are common to the equality/inequality
8319 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8320 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8321 fold_binary should call fold_binary. Fold a comparison with
8322 tree code CODE and type TYPE with operands OP0 and OP1. Return
8323 the folded comparison or NULL_TREE. */
8324
8325 static tree
8326 fold_comparison (location_t loc, enum tree_code code, tree type,
8327 tree op0, tree op1)
8328 {
8329 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8330 tree arg0, arg1, tem;
8331
8332 arg0 = op0;
8333 arg1 = op1;
8334
8335 STRIP_SIGN_NOPS (arg0);
8336 STRIP_SIGN_NOPS (arg1);
8337
8338 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8339 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8340 && (equality_code
8341 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8342 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8343 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8344 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8345 && TREE_CODE (arg1) == INTEGER_CST
8346 && !TREE_OVERFLOW (arg1))
8347 {
8348 const enum tree_code
8349 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8350 tree const1 = TREE_OPERAND (arg0, 1);
8351 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8352 tree variable = TREE_OPERAND (arg0, 0);
8353 tree new_const = int_const_binop (reverse_op, const2, const1);
8354
8355 /* If the constant operation overflowed this can be
8356 simplified as a comparison against INT_MAX/INT_MIN. */
8357 if (TREE_OVERFLOW (new_const)
8358 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8359 {
8360 int const1_sgn = tree_int_cst_sgn (const1);
8361 enum tree_code code2 = code;
8362
8363 /* Get the sign of the constant on the lhs if the
8364 operation were VARIABLE + CONST1. */
8365 if (TREE_CODE (arg0) == MINUS_EXPR)
8366 const1_sgn = -const1_sgn;
8367
8368 /* The sign of the constant determines if we overflowed
8369 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8370 Canonicalize to the INT_MIN overflow by swapping the comparison
8371 if necessary. */
8372 if (const1_sgn == -1)
8373 code2 = swap_tree_comparison (code);
8374
8375 /* We now can look at the canonicalized case
8376 VARIABLE + 1 CODE2 INT_MIN
8377 and decide on the result. */
8378 switch (code2)
8379 {
8380 case EQ_EXPR:
8381 case LT_EXPR:
8382 case LE_EXPR:
8383 return
8384 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8385
8386 case NE_EXPR:
8387 case GE_EXPR:
8388 case GT_EXPR:
8389 return
8390 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8391
8392 default:
8393 gcc_unreachable ();
8394 }
8395 }
8396 else
8397 {
8398 if (!equality_code)
8399 fold_overflow_warning ("assuming signed overflow does not occur "
8400 "when changing X +- C1 cmp C2 to "
8401 "X cmp C2 -+ C1",
8402 WARN_STRICT_OVERFLOW_COMPARISON);
8403 return fold_build2_loc (loc, code, type, variable, new_const);
8404 }
8405 }
8406
8407 /* For comparisons of pointers we can decompose it to a compile time
8408 comparison of the base objects and the offsets into the object.
8409 This requires at least one operand being an ADDR_EXPR or a
8410 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8411 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8412 && (TREE_CODE (arg0) == ADDR_EXPR
8413 || TREE_CODE (arg1) == ADDR_EXPR
8414 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8415 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8416 {
8417 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8418 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8419 machine_mode mode;
8420 int volatilep, reversep, unsignedp;
8421 bool indirect_base0 = false, indirect_base1 = false;
8422
8423 /* Get base and offset for the access. Strip ADDR_EXPR for
8424 get_inner_reference, but put it back by stripping INDIRECT_REF
8425 off the base object if possible. indirect_baseN will be true
8426 if baseN is not an address but refers to the object itself. */
8427 base0 = arg0;
8428 if (TREE_CODE (arg0) == ADDR_EXPR)
8429 {
8430 base0
8431 = get_inner_reference (TREE_OPERAND (arg0, 0),
8432 &bitsize, &bitpos0, &offset0, &mode,
8433 &unsignedp, &reversep, &volatilep, false);
8434 if (TREE_CODE (base0) == INDIRECT_REF)
8435 base0 = TREE_OPERAND (base0, 0);
8436 else
8437 indirect_base0 = true;
8438 }
8439 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8440 {
8441 base0 = TREE_OPERAND (arg0, 0);
8442 STRIP_SIGN_NOPS (base0);
8443 if (TREE_CODE (base0) == ADDR_EXPR)
8444 {
8445 base0
8446 = get_inner_reference (TREE_OPERAND (base0, 0),
8447 &bitsize, &bitpos0, &offset0, &mode,
8448 &unsignedp, &reversep, &volatilep,
8449 false);
8450 if (TREE_CODE (base0) == INDIRECT_REF)
8451 base0 = TREE_OPERAND (base0, 0);
8452 else
8453 indirect_base0 = true;
8454 }
8455 if (offset0 == NULL_TREE || integer_zerop (offset0))
8456 offset0 = TREE_OPERAND (arg0, 1);
8457 else
8458 offset0 = size_binop (PLUS_EXPR, offset0,
8459 TREE_OPERAND (arg0, 1));
8460 if (TREE_CODE (offset0) == INTEGER_CST)
8461 {
8462 offset_int tem = wi::sext (wi::to_offset (offset0),
8463 TYPE_PRECISION (sizetype));
8464 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8465 tem += bitpos0;
8466 if (wi::fits_shwi_p (tem))
8467 {
8468 bitpos0 = tem.to_shwi ();
8469 offset0 = NULL_TREE;
8470 }
8471 }
8472 }
8473
8474 base1 = arg1;
8475 if (TREE_CODE (arg1) == ADDR_EXPR)
8476 {
8477 base1
8478 = get_inner_reference (TREE_OPERAND (arg1, 0),
8479 &bitsize, &bitpos1, &offset1, &mode,
8480 &unsignedp, &reversep, &volatilep, false);
8481 if (TREE_CODE (base1) == INDIRECT_REF)
8482 base1 = TREE_OPERAND (base1, 0);
8483 else
8484 indirect_base1 = true;
8485 }
8486 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8487 {
8488 base1 = TREE_OPERAND (arg1, 0);
8489 STRIP_SIGN_NOPS (base1);
8490 if (TREE_CODE (base1) == ADDR_EXPR)
8491 {
8492 base1
8493 = get_inner_reference (TREE_OPERAND (base1, 0),
8494 &bitsize, &bitpos1, &offset1, &mode,
8495 &unsignedp, &reversep, &volatilep,
8496 false);
8497 if (TREE_CODE (base1) == INDIRECT_REF)
8498 base1 = TREE_OPERAND (base1, 0);
8499 else
8500 indirect_base1 = true;
8501 }
8502 if (offset1 == NULL_TREE || integer_zerop (offset1))
8503 offset1 = TREE_OPERAND (arg1, 1);
8504 else
8505 offset1 = size_binop (PLUS_EXPR, offset1,
8506 TREE_OPERAND (arg1, 1));
8507 if (TREE_CODE (offset1) == INTEGER_CST)
8508 {
8509 offset_int tem = wi::sext (wi::to_offset (offset1),
8510 TYPE_PRECISION (sizetype));
8511 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8512 tem += bitpos1;
8513 if (wi::fits_shwi_p (tem))
8514 {
8515 bitpos1 = tem.to_shwi ();
8516 offset1 = NULL_TREE;
8517 }
8518 }
8519 }
8520
8521 /* If we have equivalent bases we might be able to simplify. */
8522 if (indirect_base0 == indirect_base1
8523 && operand_equal_p (base0, base1,
8524 indirect_base0 ? OEP_ADDRESS_OF : 0))
8525 {
8526 /* We can fold this expression to a constant if the non-constant
8527 offset parts are equal. */
8528 if ((offset0 == offset1
8529 || (offset0 && offset1
8530 && operand_equal_p (offset0, offset1, 0)))
8531 && (code == EQ_EXPR
8532 || code == NE_EXPR
8533 || (indirect_base0 && DECL_P (base0))
8534 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8535
8536 {
8537 if (!equality_code
8538 && bitpos0 != bitpos1
8539 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8540 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8541 fold_overflow_warning (("assuming pointer wraparound does not "
8542 "occur when comparing P +- C1 with "
8543 "P +- C2"),
8544 WARN_STRICT_OVERFLOW_CONDITIONAL);
8545
8546 switch (code)
8547 {
8548 case EQ_EXPR:
8549 return constant_boolean_node (bitpos0 == bitpos1, type);
8550 case NE_EXPR:
8551 return constant_boolean_node (bitpos0 != bitpos1, type);
8552 case LT_EXPR:
8553 return constant_boolean_node (bitpos0 < bitpos1, type);
8554 case LE_EXPR:
8555 return constant_boolean_node (bitpos0 <= bitpos1, type);
8556 case GE_EXPR:
8557 return constant_boolean_node (bitpos0 >= bitpos1, type);
8558 case GT_EXPR:
8559 return constant_boolean_node (bitpos0 > bitpos1, type);
8560 default:;
8561 }
8562 }
8563 /* We can simplify the comparison to a comparison of the variable
8564 offset parts if the constant offset parts are equal.
8565 Be careful to use signed sizetype here because otherwise we
8566 mess with array offsets in the wrong way. This is possible
8567 because pointer arithmetic is restricted to retain within an
8568 object and overflow on pointer differences is undefined as of
8569 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8570 else if (bitpos0 == bitpos1
8571 && (equality_code
8572 || (indirect_base0 && DECL_P (base0))
8573 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8574 {
8575 /* By converting to signed sizetype we cover middle-end pointer
8576 arithmetic which operates on unsigned pointer types of size
8577 type size and ARRAY_REF offsets which are properly sign or
8578 zero extended from their type in case it is narrower than
8579 sizetype. */
8580 if (offset0 == NULL_TREE)
8581 offset0 = build_int_cst (ssizetype, 0);
8582 else
8583 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8584 if (offset1 == NULL_TREE)
8585 offset1 = build_int_cst (ssizetype, 0);
8586 else
8587 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8588
8589 if (!equality_code
8590 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8591 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8592 fold_overflow_warning (("assuming pointer wraparound does not "
8593 "occur when comparing P +- C1 with "
8594 "P +- C2"),
8595 WARN_STRICT_OVERFLOW_COMPARISON);
8596
8597 return fold_build2_loc (loc, code, type, offset0, offset1);
8598 }
8599 }
8600 /* For equal offsets we can simplify to a comparison of the
8601 base addresses. */
8602 else if (bitpos0 == bitpos1
8603 && (indirect_base0
8604 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8605 && (indirect_base1
8606 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8607 && ((offset0 == offset1)
8608 || (offset0 && offset1
8609 && operand_equal_p (offset0, offset1, 0))))
8610 {
8611 if (indirect_base0)
8612 base0 = build_fold_addr_expr_loc (loc, base0);
8613 if (indirect_base1)
8614 base1 = build_fold_addr_expr_loc (loc, base1);
8615 return fold_build2_loc (loc, code, type, base0, base1);
8616 }
8617 }
8618
8619 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8620 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8621 the resulting offset is smaller in absolute value than the
8622 original one and has the same sign. */
8623 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8624 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8625 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8626 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8627 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8628 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8629 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8630 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8631 {
8632 tree const1 = TREE_OPERAND (arg0, 1);
8633 tree const2 = TREE_OPERAND (arg1, 1);
8634 tree variable1 = TREE_OPERAND (arg0, 0);
8635 tree variable2 = TREE_OPERAND (arg1, 0);
8636 tree cst;
8637 const char * const warnmsg = G_("assuming signed overflow does not "
8638 "occur when combining constants around "
8639 "a comparison");
8640
8641 /* Put the constant on the side where it doesn't overflow and is
8642 of lower absolute value and of same sign than before. */
8643 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8644 ? MINUS_EXPR : PLUS_EXPR,
8645 const2, const1);
8646 if (!TREE_OVERFLOW (cst)
8647 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8648 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8649 {
8650 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8651 return fold_build2_loc (loc, code, type,
8652 variable1,
8653 fold_build2_loc (loc, TREE_CODE (arg1),
8654 TREE_TYPE (arg1),
8655 variable2, cst));
8656 }
8657
8658 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8659 ? MINUS_EXPR : PLUS_EXPR,
8660 const1, const2);
8661 if (!TREE_OVERFLOW (cst)
8662 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8663 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8664 {
8665 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8666 return fold_build2_loc (loc, code, type,
8667 fold_build2_loc (loc, TREE_CODE (arg0),
8668 TREE_TYPE (arg0),
8669 variable1, cst),
8670 variable2);
8671 }
8672 }
8673
8674 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8675 if (tem)
8676 return tem;
8677
8678 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8679 constant, we can simplify it. */
8680 if (TREE_CODE (arg1) == INTEGER_CST
8681 && (TREE_CODE (arg0) == MIN_EXPR
8682 || TREE_CODE (arg0) == MAX_EXPR)
8683 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8684 {
8685 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8686 if (tem)
8687 return tem;
8688 }
8689
8690 /* If we are comparing an expression that just has comparisons
8691 of two integer values, arithmetic expressions of those comparisons,
8692 and constants, we can simplify it. There are only three cases
8693 to check: the two values can either be equal, the first can be
8694 greater, or the second can be greater. Fold the expression for
8695 those three values. Since each value must be 0 or 1, we have
8696 eight possibilities, each of which corresponds to the constant 0
8697 or 1 or one of the six possible comparisons.
8698
8699 This handles common cases like (a > b) == 0 but also handles
8700 expressions like ((x > y) - (y > x)) > 0, which supposedly
8701 occur in macroized code. */
8702
8703 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8704 {
8705 tree cval1 = 0, cval2 = 0;
8706 int save_p = 0;
8707
8708 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8709 /* Don't handle degenerate cases here; they should already
8710 have been handled anyway. */
8711 && cval1 != 0 && cval2 != 0
8712 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8713 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8714 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8715 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8716 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8717 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8718 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8719 {
8720 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8721 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8722
8723 /* We can't just pass T to eval_subst in case cval1 or cval2
8724 was the same as ARG1. */
8725
8726 tree high_result
8727 = fold_build2_loc (loc, code, type,
8728 eval_subst (loc, arg0, cval1, maxval,
8729 cval2, minval),
8730 arg1);
8731 tree equal_result
8732 = fold_build2_loc (loc, code, type,
8733 eval_subst (loc, arg0, cval1, maxval,
8734 cval2, maxval),
8735 arg1);
8736 tree low_result
8737 = fold_build2_loc (loc, code, type,
8738 eval_subst (loc, arg0, cval1, minval,
8739 cval2, maxval),
8740 arg1);
8741
8742 /* All three of these results should be 0 or 1. Confirm they are.
8743 Then use those values to select the proper code to use. */
8744
8745 if (TREE_CODE (high_result) == INTEGER_CST
8746 && TREE_CODE (equal_result) == INTEGER_CST
8747 && TREE_CODE (low_result) == INTEGER_CST)
8748 {
8749 /* Make a 3-bit mask with the high-order bit being the
8750 value for `>', the next for '=', and the low for '<'. */
8751 switch ((integer_onep (high_result) * 4)
8752 + (integer_onep (equal_result) * 2)
8753 + integer_onep (low_result))
8754 {
8755 case 0:
8756 /* Always false. */
8757 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8758 case 1:
8759 code = LT_EXPR;
8760 break;
8761 case 2:
8762 code = EQ_EXPR;
8763 break;
8764 case 3:
8765 code = LE_EXPR;
8766 break;
8767 case 4:
8768 code = GT_EXPR;
8769 break;
8770 case 5:
8771 code = NE_EXPR;
8772 break;
8773 case 6:
8774 code = GE_EXPR;
8775 break;
8776 case 7:
8777 /* Always true. */
8778 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8779 }
8780
8781 if (save_p)
8782 {
8783 tem = save_expr (build2 (code, type, cval1, cval2));
8784 SET_EXPR_LOCATION (tem, loc);
8785 return tem;
8786 }
8787 return fold_build2_loc (loc, code, type, cval1, cval2);
8788 }
8789 }
8790 }
8791
8792 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8793 into a single range test. */
8794 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8795 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8796 && TREE_CODE (arg1) == INTEGER_CST
8797 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8798 && !integer_zerop (TREE_OPERAND (arg0, 1))
8799 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8800 && !TREE_OVERFLOW (arg1))
8801 {
8802 tem = fold_div_compare (loc, code, type, arg0, arg1);
8803 if (tem != NULL_TREE)
8804 return tem;
8805 }
8806
8807 return NULL_TREE;
8808 }
8809
8810
8811 /* Subroutine of fold_binary. Optimize complex multiplications of the
8812 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8813 argument EXPR represents the expression "z" of type TYPE. */
8814
8815 static tree
8816 fold_mult_zconjz (location_t loc, tree type, tree expr)
8817 {
8818 tree itype = TREE_TYPE (type);
8819 tree rpart, ipart, tem;
8820
8821 if (TREE_CODE (expr) == COMPLEX_EXPR)
8822 {
8823 rpart = TREE_OPERAND (expr, 0);
8824 ipart = TREE_OPERAND (expr, 1);
8825 }
8826 else if (TREE_CODE (expr) == COMPLEX_CST)
8827 {
8828 rpart = TREE_REALPART (expr);
8829 ipart = TREE_IMAGPART (expr);
8830 }
8831 else
8832 {
8833 expr = save_expr (expr);
8834 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8835 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8836 }
8837
8838 rpart = save_expr (rpart);
8839 ipart = save_expr (ipart);
8840 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8841 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8842 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8843 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8844 build_zero_cst (itype));
8845 }
8846
8847
8848 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8849 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8850
8851 static bool
8852 vec_cst_ctor_to_array (tree arg, tree *elts)
8853 {
8854 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8855
8856 if (TREE_CODE (arg) == VECTOR_CST)
8857 {
8858 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8859 elts[i] = VECTOR_CST_ELT (arg, i);
8860 }
8861 else if (TREE_CODE (arg) == CONSTRUCTOR)
8862 {
8863 constructor_elt *elt;
8864
8865 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8866 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8867 return false;
8868 else
8869 elts[i] = elt->value;
8870 }
8871 else
8872 return false;
8873 for (; i < nelts; i++)
8874 elts[i]
8875 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8876 return true;
8877 }
8878
8879 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8880 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8881 NULL_TREE otherwise. */
8882
8883 static tree
8884 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8885 {
8886 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8887 tree *elts;
8888 bool need_ctor = false;
8889
8890 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8891 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8892 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8893 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8894 return NULL_TREE;
8895
8896 elts = XALLOCAVEC (tree, nelts * 3);
8897 if (!vec_cst_ctor_to_array (arg0, elts)
8898 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8899 return NULL_TREE;
8900
8901 for (i = 0; i < nelts; i++)
8902 {
8903 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8904 need_ctor = true;
8905 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8906 }
8907
8908 if (need_ctor)
8909 {
8910 vec<constructor_elt, va_gc> *v;
8911 vec_alloc (v, nelts);
8912 for (i = 0; i < nelts; i++)
8913 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8914 return build_constructor (type, v);
8915 }
8916 else
8917 return build_vector (type, &elts[2 * nelts]);
8918 }
8919
8920 /* Try to fold a pointer difference of type TYPE two address expressions of
8921 array references AREF0 and AREF1 using location LOC. Return a
8922 simplified expression for the difference or NULL_TREE. */
8923
8924 static tree
8925 fold_addr_of_array_ref_difference (location_t loc, tree type,
8926 tree aref0, tree aref1)
8927 {
8928 tree base0 = TREE_OPERAND (aref0, 0);
8929 tree base1 = TREE_OPERAND (aref1, 0);
8930 tree base_offset = build_int_cst (type, 0);
8931
8932 /* If the bases are array references as well, recurse. If the bases
8933 are pointer indirections compute the difference of the pointers.
8934 If the bases are equal, we are set. */
8935 if ((TREE_CODE (base0) == ARRAY_REF
8936 && TREE_CODE (base1) == ARRAY_REF
8937 && (base_offset
8938 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8939 || (INDIRECT_REF_P (base0)
8940 && INDIRECT_REF_P (base1)
8941 && (base_offset
8942 = fold_binary_loc (loc, MINUS_EXPR, type,
8943 fold_convert (type, TREE_OPERAND (base0, 0)),
8944 fold_convert (type,
8945 TREE_OPERAND (base1, 0)))))
8946 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8947 {
8948 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8949 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8950 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8951 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8952 return fold_build2_loc (loc, PLUS_EXPR, type,
8953 base_offset,
8954 fold_build2_loc (loc, MULT_EXPR, type,
8955 diff, esz));
8956 }
8957 return NULL_TREE;
8958 }
8959
8960 /* If the real or vector real constant CST of type TYPE has an exact
8961 inverse, return it, else return NULL. */
8962
8963 tree
8964 exact_inverse (tree type, tree cst)
8965 {
8966 REAL_VALUE_TYPE r;
8967 tree unit_type, *elts;
8968 machine_mode mode;
8969 unsigned vec_nelts, i;
8970
8971 switch (TREE_CODE (cst))
8972 {
8973 case REAL_CST:
8974 r = TREE_REAL_CST (cst);
8975
8976 if (exact_real_inverse (TYPE_MODE (type), &r))
8977 return build_real (type, r);
8978
8979 return NULL_TREE;
8980
8981 case VECTOR_CST:
8982 vec_nelts = VECTOR_CST_NELTS (cst);
8983 elts = XALLOCAVEC (tree, vec_nelts);
8984 unit_type = TREE_TYPE (type);
8985 mode = TYPE_MODE (unit_type);
8986
8987 for (i = 0; i < vec_nelts; i++)
8988 {
8989 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8990 if (!exact_real_inverse (mode, &r))
8991 return NULL_TREE;
8992 elts[i] = build_real (unit_type, r);
8993 }
8994
8995 return build_vector (type, elts);
8996
8997 default:
8998 return NULL_TREE;
8999 }
9000 }
9001
9002 /* Mask out the tz least significant bits of X of type TYPE where
9003 tz is the number of trailing zeroes in Y. */
9004 static wide_int
9005 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9006 {
9007 int tz = wi::ctz (y);
9008 if (tz > 0)
9009 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9010 return x;
9011 }
9012
9013 /* Return true when T is an address and is known to be nonzero.
9014 For floating point we further ensure that T is not denormal.
9015 Similar logic is present in nonzero_address in rtlanal.h.
9016
9017 If the return value is based on the assumption that signed overflow
9018 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9019 change *STRICT_OVERFLOW_P. */
9020
9021 static bool
9022 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9023 {
9024 tree type = TREE_TYPE (t);
9025 enum tree_code code;
9026
9027 /* Doing something useful for floating point would need more work. */
9028 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9029 return false;
9030
9031 code = TREE_CODE (t);
9032 switch (TREE_CODE_CLASS (code))
9033 {
9034 case tcc_unary:
9035 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9036 strict_overflow_p);
9037 case tcc_binary:
9038 case tcc_comparison:
9039 return tree_binary_nonzero_warnv_p (code, type,
9040 TREE_OPERAND (t, 0),
9041 TREE_OPERAND (t, 1),
9042 strict_overflow_p);
9043 case tcc_constant:
9044 case tcc_declaration:
9045 case tcc_reference:
9046 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9047
9048 default:
9049 break;
9050 }
9051
9052 switch (code)
9053 {
9054 case TRUTH_NOT_EXPR:
9055 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9056 strict_overflow_p);
9057
9058 case TRUTH_AND_EXPR:
9059 case TRUTH_OR_EXPR:
9060 case TRUTH_XOR_EXPR:
9061 return tree_binary_nonzero_warnv_p (code, type,
9062 TREE_OPERAND (t, 0),
9063 TREE_OPERAND (t, 1),
9064 strict_overflow_p);
9065
9066 case COND_EXPR:
9067 case CONSTRUCTOR:
9068 case OBJ_TYPE_REF:
9069 case ASSERT_EXPR:
9070 case ADDR_EXPR:
9071 case WITH_SIZE_EXPR:
9072 case SSA_NAME:
9073 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9074
9075 case COMPOUND_EXPR:
9076 case MODIFY_EXPR:
9077 case BIND_EXPR:
9078 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9079 strict_overflow_p);
9080
9081 case SAVE_EXPR:
9082 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9083 strict_overflow_p);
9084
9085 case CALL_EXPR:
9086 {
9087 tree fndecl = get_callee_fndecl (t);
9088 if (!fndecl) return false;
9089 if (flag_delete_null_pointer_checks && !flag_check_new
9090 && DECL_IS_OPERATOR_NEW (fndecl)
9091 && !TREE_NOTHROW (fndecl))
9092 return true;
9093 if (flag_delete_null_pointer_checks
9094 && lookup_attribute ("returns_nonnull",
9095 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9096 return true;
9097 return alloca_call_p (t);
9098 }
9099
9100 default:
9101 break;
9102 }
9103 return false;
9104 }
9105
9106 /* Return true when T is an address and is known to be nonzero.
9107 Handle warnings about undefined signed overflow. */
9108
9109 static bool
9110 tree_expr_nonzero_p (tree t)
9111 {
9112 bool ret, strict_overflow_p;
9113
9114 strict_overflow_p = false;
9115 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9116 if (strict_overflow_p)
9117 fold_overflow_warning (("assuming signed overflow does not occur when "
9118 "determining that expression is always "
9119 "non-zero"),
9120 WARN_STRICT_OVERFLOW_MISC);
9121 return ret;
9122 }
9123
9124 /* Return true if T is known not to be equal to an integer W. */
9125
9126 bool
9127 expr_not_equal_to (tree t, const wide_int &w)
9128 {
9129 wide_int min, max, nz;
9130 value_range_type rtype;
9131 switch (TREE_CODE (t))
9132 {
9133 case INTEGER_CST:
9134 return wi::ne_p (t, w);
9135
9136 case SSA_NAME:
9137 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9138 return false;
9139 rtype = get_range_info (t, &min, &max);
9140 if (rtype == VR_RANGE)
9141 {
9142 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9143 return true;
9144 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9145 return true;
9146 }
9147 else if (rtype == VR_ANTI_RANGE
9148 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9149 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9150 return true;
9151 /* If T has some known zero bits and W has any of those bits set,
9152 then T is known not to be equal to W. */
9153 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9154 TYPE_PRECISION (TREE_TYPE (t))), 0))
9155 return true;
9156 return false;
9157
9158 default:
9159 return false;
9160 }
9161 }
9162
9163 /* Fold a binary expression of code CODE and type TYPE with operands
9164 OP0 and OP1. LOC is the location of the resulting expression.
9165 Return the folded expression if folding is successful. Otherwise,
9166 return NULL_TREE. */
9167
9168 tree
9169 fold_binary_loc (location_t loc,
9170 enum tree_code code, tree type, tree op0, tree op1)
9171 {
9172 enum tree_code_class kind = TREE_CODE_CLASS (code);
9173 tree arg0, arg1, tem;
9174 tree t1 = NULL_TREE;
9175 bool strict_overflow_p;
9176 unsigned int prec;
9177
9178 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9179 && TREE_CODE_LENGTH (code) == 2
9180 && op0 != NULL_TREE
9181 && op1 != NULL_TREE);
9182
9183 arg0 = op0;
9184 arg1 = op1;
9185
9186 /* Strip any conversions that don't change the mode. This is
9187 safe for every expression, except for a comparison expression
9188 because its signedness is derived from its operands. So, in
9189 the latter case, only strip conversions that don't change the
9190 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9191 preserved.
9192
9193 Note that this is done as an internal manipulation within the
9194 constant folder, in order to find the simplest representation
9195 of the arguments so that their form can be studied. In any
9196 cases, the appropriate type conversions should be put back in
9197 the tree that will get out of the constant folder. */
9198
9199 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9200 {
9201 STRIP_SIGN_NOPS (arg0);
9202 STRIP_SIGN_NOPS (arg1);
9203 }
9204 else
9205 {
9206 STRIP_NOPS (arg0);
9207 STRIP_NOPS (arg1);
9208 }
9209
9210 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9211 constant but we can't do arithmetic on them. */
9212 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9213 {
9214 tem = const_binop (code, type, arg0, arg1);
9215 if (tem != NULL_TREE)
9216 {
9217 if (TREE_TYPE (tem) != type)
9218 tem = fold_convert_loc (loc, type, tem);
9219 return tem;
9220 }
9221 }
9222
9223 /* If this is a commutative operation, and ARG0 is a constant, move it
9224 to ARG1 to reduce the number of tests below. */
9225 if (commutative_tree_code (code)
9226 && tree_swap_operands_p (arg0, arg1, true))
9227 return fold_build2_loc (loc, code, type, op1, op0);
9228
9229 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9230 to ARG1 to reduce the number of tests below. */
9231 if (kind == tcc_comparison
9232 && tree_swap_operands_p (arg0, arg1, true))
9233 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9234
9235 tem = generic_simplify (loc, code, type, op0, op1);
9236 if (tem)
9237 return tem;
9238
9239 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9240
9241 First check for cases where an arithmetic operation is applied to a
9242 compound, conditional, or comparison operation. Push the arithmetic
9243 operation inside the compound or conditional to see if any folding
9244 can then be done. Convert comparison to conditional for this purpose.
9245 The also optimizes non-constant cases that used to be done in
9246 expand_expr.
9247
9248 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9249 one of the operands is a comparison and the other is a comparison, a
9250 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9251 code below would make the expression more complex. Change it to a
9252 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9253 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9254
9255 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9256 || code == EQ_EXPR || code == NE_EXPR)
9257 && TREE_CODE (type) != VECTOR_TYPE
9258 && ((truth_value_p (TREE_CODE (arg0))
9259 && (truth_value_p (TREE_CODE (arg1))
9260 || (TREE_CODE (arg1) == BIT_AND_EXPR
9261 && integer_onep (TREE_OPERAND (arg1, 1)))))
9262 || (truth_value_p (TREE_CODE (arg1))
9263 && (truth_value_p (TREE_CODE (arg0))
9264 || (TREE_CODE (arg0) == BIT_AND_EXPR
9265 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9266 {
9267 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9268 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9269 : TRUTH_XOR_EXPR,
9270 boolean_type_node,
9271 fold_convert_loc (loc, boolean_type_node, arg0),
9272 fold_convert_loc (loc, boolean_type_node, arg1));
9273
9274 if (code == EQ_EXPR)
9275 tem = invert_truthvalue_loc (loc, tem);
9276
9277 return fold_convert_loc (loc, type, tem);
9278 }
9279
9280 if (TREE_CODE_CLASS (code) == tcc_binary
9281 || TREE_CODE_CLASS (code) == tcc_comparison)
9282 {
9283 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9284 {
9285 tem = fold_build2_loc (loc, code, type,
9286 fold_convert_loc (loc, TREE_TYPE (op0),
9287 TREE_OPERAND (arg0, 1)), op1);
9288 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9289 tem);
9290 }
9291 if (TREE_CODE (arg1) == COMPOUND_EXPR
9292 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9293 {
9294 tem = fold_build2_loc (loc, code, type, op0,
9295 fold_convert_loc (loc, TREE_TYPE (op1),
9296 TREE_OPERAND (arg1, 1)));
9297 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9298 tem);
9299 }
9300
9301 if (TREE_CODE (arg0) == COND_EXPR
9302 || TREE_CODE (arg0) == VEC_COND_EXPR
9303 || COMPARISON_CLASS_P (arg0))
9304 {
9305 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9306 arg0, arg1,
9307 /*cond_first_p=*/1);
9308 if (tem != NULL_TREE)
9309 return tem;
9310 }
9311
9312 if (TREE_CODE (arg1) == COND_EXPR
9313 || TREE_CODE (arg1) == VEC_COND_EXPR
9314 || COMPARISON_CLASS_P (arg1))
9315 {
9316 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9317 arg1, arg0,
9318 /*cond_first_p=*/0);
9319 if (tem != NULL_TREE)
9320 return tem;
9321 }
9322 }
9323
9324 switch (code)
9325 {
9326 case MEM_REF:
9327 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9328 if (TREE_CODE (arg0) == ADDR_EXPR
9329 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9330 {
9331 tree iref = TREE_OPERAND (arg0, 0);
9332 return fold_build2 (MEM_REF, type,
9333 TREE_OPERAND (iref, 0),
9334 int_const_binop (PLUS_EXPR, arg1,
9335 TREE_OPERAND (iref, 1)));
9336 }
9337
9338 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9339 if (TREE_CODE (arg0) == ADDR_EXPR
9340 && handled_component_p (TREE_OPERAND (arg0, 0)))
9341 {
9342 tree base;
9343 HOST_WIDE_INT coffset;
9344 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9345 &coffset);
9346 if (!base)
9347 return NULL_TREE;
9348 return fold_build2 (MEM_REF, type,
9349 build_fold_addr_expr (base),
9350 int_const_binop (PLUS_EXPR, arg1,
9351 size_int (coffset)));
9352 }
9353
9354 return NULL_TREE;
9355
9356 case POINTER_PLUS_EXPR:
9357 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9358 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9359 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9360 return fold_convert_loc (loc, type,
9361 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9362 fold_convert_loc (loc, sizetype,
9363 arg1),
9364 fold_convert_loc (loc, sizetype,
9365 arg0)));
9366
9367 return NULL_TREE;
9368
9369 case PLUS_EXPR:
9370 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9371 {
9372 /* X + (X / CST) * -CST is X % CST. */
9373 if (TREE_CODE (arg1) == MULT_EXPR
9374 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9375 && operand_equal_p (arg0,
9376 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9377 {
9378 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9379 tree cst1 = TREE_OPERAND (arg1, 1);
9380 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9381 cst1, cst0);
9382 if (sum && integer_zerop (sum))
9383 return fold_convert_loc (loc, type,
9384 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9385 TREE_TYPE (arg0), arg0,
9386 cst0));
9387 }
9388 }
9389
9390 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9391 one. Make sure the type is not saturating and has the signedness of
9392 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9393 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9394 if ((TREE_CODE (arg0) == MULT_EXPR
9395 || TREE_CODE (arg1) == MULT_EXPR)
9396 && !TYPE_SATURATING (type)
9397 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9398 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9399 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9400 {
9401 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9402 if (tem)
9403 return tem;
9404 }
9405
9406 if (! FLOAT_TYPE_P (type))
9407 {
9408 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9409 (plus (plus (mult) (mult)) (foo)) so that we can
9410 take advantage of the factoring cases below. */
9411 if (ANY_INTEGRAL_TYPE_P (type)
9412 && TYPE_OVERFLOW_WRAPS (type)
9413 && (((TREE_CODE (arg0) == PLUS_EXPR
9414 || TREE_CODE (arg0) == MINUS_EXPR)
9415 && TREE_CODE (arg1) == MULT_EXPR)
9416 || ((TREE_CODE (arg1) == PLUS_EXPR
9417 || TREE_CODE (arg1) == MINUS_EXPR)
9418 && TREE_CODE (arg0) == MULT_EXPR)))
9419 {
9420 tree parg0, parg1, parg, marg;
9421 enum tree_code pcode;
9422
9423 if (TREE_CODE (arg1) == MULT_EXPR)
9424 parg = arg0, marg = arg1;
9425 else
9426 parg = arg1, marg = arg0;
9427 pcode = TREE_CODE (parg);
9428 parg0 = TREE_OPERAND (parg, 0);
9429 parg1 = TREE_OPERAND (parg, 1);
9430 STRIP_NOPS (parg0);
9431 STRIP_NOPS (parg1);
9432
9433 if (TREE_CODE (parg0) == MULT_EXPR
9434 && TREE_CODE (parg1) != MULT_EXPR)
9435 return fold_build2_loc (loc, pcode, type,
9436 fold_build2_loc (loc, PLUS_EXPR, type,
9437 fold_convert_loc (loc, type,
9438 parg0),
9439 fold_convert_loc (loc, type,
9440 marg)),
9441 fold_convert_loc (loc, type, parg1));
9442 if (TREE_CODE (parg0) != MULT_EXPR
9443 && TREE_CODE (parg1) == MULT_EXPR)
9444 return
9445 fold_build2_loc (loc, PLUS_EXPR, type,
9446 fold_convert_loc (loc, type, parg0),
9447 fold_build2_loc (loc, pcode, type,
9448 fold_convert_loc (loc, type, marg),
9449 fold_convert_loc (loc, type,
9450 parg1)));
9451 }
9452 }
9453 else
9454 {
9455 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9456 to __complex__ ( x, y ). This is not the same for SNaNs or
9457 if signed zeros are involved. */
9458 if (!HONOR_SNANS (element_mode (arg0))
9459 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9460 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9461 {
9462 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9463 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9464 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9465 bool arg0rz = false, arg0iz = false;
9466 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9467 || (arg0i && (arg0iz = real_zerop (arg0i))))
9468 {
9469 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9470 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9471 if (arg0rz && arg1i && real_zerop (arg1i))
9472 {
9473 tree rp = arg1r ? arg1r
9474 : build1 (REALPART_EXPR, rtype, arg1);
9475 tree ip = arg0i ? arg0i
9476 : build1 (IMAGPART_EXPR, rtype, arg0);
9477 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9478 }
9479 else if (arg0iz && arg1r && real_zerop (arg1r))
9480 {
9481 tree rp = arg0r ? arg0r
9482 : build1 (REALPART_EXPR, rtype, arg0);
9483 tree ip = arg1i ? arg1i
9484 : build1 (IMAGPART_EXPR, rtype, arg1);
9485 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9486 }
9487 }
9488 }
9489
9490 if (flag_unsafe_math_optimizations
9491 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9492 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9493 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9494 return tem;
9495
9496 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9497 We associate floats only if the user has specified
9498 -fassociative-math. */
9499 if (flag_associative_math
9500 && TREE_CODE (arg1) == PLUS_EXPR
9501 && TREE_CODE (arg0) != MULT_EXPR)
9502 {
9503 tree tree10 = TREE_OPERAND (arg1, 0);
9504 tree tree11 = TREE_OPERAND (arg1, 1);
9505 if (TREE_CODE (tree11) == MULT_EXPR
9506 && TREE_CODE (tree10) == MULT_EXPR)
9507 {
9508 tree tree0;
9509 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9510 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9511 }
9512 }
9513 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9514 We associate floats only if the user has specified
9515 -fassociative-math. */
9516 if (flag_associative_math
9517 && TREE_CODE (arg0) == PLUS_EXPR
9518 && TREE_CODE (arg1) != MULT_EXPR)
9519 {
9520 tree tree00 = TREE_OPERAND (arg0, 0);
9521 tree tree01 = TREE_OPERAND (arg0, 1);
9522 if (TREE_CODE (tree01) == MULT_EXPR
9523 && TREE_CODE (tree00) == MULT_EXPR)
9524 {
9525 tree tree0;
9526 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9527 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9528 }
9529 }
9530 }
9531
9532 bit_rotate:
9533 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9534 is a rotate of A by C1 bits. */
9535 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9536 is a rotate of A by B bits. */
9537 {
9538 enum tree_code code0, code1;
9539 tree rtype;
9540 code0 = TREE_CODE (arg0);
9541 code1 = TREE_CODE (arg1);
9542 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9543 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9544 && operand_equal_p (TREE_OPERAND (arg0, 0),
9545 TREE_OPERAND (arg1, 0), 0)
9546 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9547 TYPE_UNSIGNED (rtype))
9548 /* Only create rotates in complete modes. Other cases are not
9549 expanded properly. */
9550 && (element_precision (rtype)
9551 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9552 {
9553 tree tree01, tree11;
9554 enum tree_code code01, code11;
9555
9556 tree01 = TREE_OPERAND (arg0, 1);
9557 tree11 = TREE_OPERAND (arg1, 1);
9558 STRIP_NOPS (tree01);
9559 STRIP_NOPS (tree11);
9560 code01 = TREE_CODE (tree01);
9561 code11 = TREE_CODE (tree11);
9562 if (code01 == INTEGER_CST
9563 && code11 == INTEGER_CST
9564 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9565 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9566 {
9567 tem = build2_loc (loc, LROTATE_EXPR,
9568 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9569 TREE_OPERAND (arg0, 0),
9570 code0 == LSHIFT_EXPR
9571 ? TREE_OPERAND (arg0, 1)
9572 : TREE_OPERAND (arg1, 1));
9573 return fold_convert_loc (loc, type, tem);
9574 }
9575 else if (code11 == MINUS_EXPR)
9576 {
9577 tree tree110, tree111;
9578 tree110 = TREE_OPERAND (tree11, 0);
9579 tree111 = TREE_OPERAND (tree11, 1);
9580 STRIP_NOPS (tree110);
9581 STRIP_NOPS (tree111);
9582 if (TREE_CODE (tree110) == INTEGER_CST
9583 && 0 == compare_tree_int (tree110,
9584 element_precision
9585 (TREE_TYPE (TREE_OPERAND
9586 (arg0, 0))))
9587 && operand_equal_p (tree01, tree111, 0))
9588 return
9589 fold_convert_loc (loc, type,
9590 build2 ((code0 == LSHIFT_EXPR
9591 ? LROTATE_EXPR
9592 : RROTATE_EXPR),
9593 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9594 TREE_OPERAND (arg0, 0),
9595 TREE_OPERAND (arg0, 1)));
9596 }
9597 else if (code01 == MINUS_EXPR)
9598 {
9599 tree tree010, tree011;
9600 tree010 = TREE_OPERAND (tree01, 0);
9601 tree011 = TREE_OPERAND (tree01, 1);
9602 STRIP_NOPS (tree010);
9603 STRIP_NOPS (tree011);
9604 if (TREE_CODE (tree010) == INTEGER_CST
9605 && 0 == compare_tree_int (tree010,
9606 element_precision
9607 (TREE_TYPE (TREE_OPERAND
9608 (arg0, 0))))
9609 && operand_equal_p (tree11, tree011, 0))
9610 return fold_convert_loc
9611 (loc, type,
9612 build2 ((code0 != LSHIFT_EXPR
9613 ? LROTATE_EXPR
9614 : RROTATE_EXPR),
9615 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9616 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9617 }
9618 }
9619 }
9620
9621 associate:
9622 /* In most languages, can't associate operations on floats through
9623 parentheses. Rather than remember where the parentheses were, we
9624 don't associate floats at all, unless the user has specified
9625 -fassociative-math.
9626 And, we need to make sure type is not saturating. */
9627
9628 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9629 && !TYPE_SATURATING (type))
9630 {
9631 tree var0, con0, lit0, minus_lit0;
9632 tree var1, con1, lit1, minus_lit1;
9633 tree atype = type;
9634 bool ok = true;
9635
9636 /* Split both trees into variables, constants, and literals. Then
9637 associate each group together, the constants with literals,
9638 then the result with variables. This increases the chances of
9639 literals being recombined later and of generating relocatable
9640 expressions for the sum of a constant and literal. */
9641 var0 = split_tree (loc, arg0, type, code,
9642 &con0, &lit0, &minus_lit0, 0);
9643 var1 = split_tree (loc, arg1, type, code,
9644 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9645
9646 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9647 if (code == MINUS_EXPR)
9648 code = PLUS_EXPR;
9649
9650 /* With undefined overflow prefer doing association in a type
9651 which wraps on overflow, if that is one of the operand types. */
9652 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9653 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9654 {
9655 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9656 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9657 atype = TREE_TYPE (arg0);
9658 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9659 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9660 atype = TREE_TYPE (arg1);
9661 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9662 }
9663
9664 /* With undefined overflow we can only associate constants with one
9665 variable, and constants whose association doesn't overflow. */
9666 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9667 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9668 {
9669 if (var0 && var1)
9670 {
9671 tree tmp0 = var0;
9672 tree tmp1 = var1;
9673 bool one_neg = false;
9674
9675 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9676 {
9677 tmp0 = TREE_OPERAND (tmp0, 0);
9678 one_neg = !one_neg;
9679 }
9680 if (CONVERT_EXPR_P (tmp0)
9681 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9682 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9683 <= TYPE_PRECISION (atype)))
9684 tmp0 = TREE_OPERAND (tmp0, 0);
9685 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9686 {
9687 tmp1 = TREE_OPERAND (tmp1, 0);
9688 one_neg = !one_neg;
9689 }
9690 if (CONVERT_EXPR_P (tmp1)
9691 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9692 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9693 <= TYPE_PRECISION (atype)))
9694 tmp1 = TREE_OPERAND (tmp1, 0);
9695 /* The only case we can still associate with two variables
9696 is if they cancel out. */
9697 if (!one_neg
9698 || !operand_equal_p (tmp0, tmp1, 0))
9699 ok = false;
9700 }
9701 }
9702
9703 /* Only do something if we found more than two objects. Otherwise,
9704 nothing has changed and we risk infinite recursion. */
9705 if (ok
9706 && (2 < ((var0 != 0) + (var1 != 0)
9707 + (con0 != 0) + (con1 != 0)
9708 + (lit0 != 0) + (lit1 != 0)
9709 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9710 {
9711 bool any_overflows = false;
9712 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9713 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9714 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9715 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9716 var0 = associate_trees (loc, var0, var1, code, atype);
9717 con0 = associate_trees (loc, con0, con1, code, atype);
9718 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9719 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9720 code, atype);
9721
9722 /* Preserve the MINUS_EXPR if the negative part of the literal is
9723 greater than the positive part. Otherwise, the multiplicative
9724 folding code (i.e extract_muldiv) may be fooled in case
9725 unsigned constants are subtracted, like in the following
9726 example: ((X*2 + 4) - 8U)/2. */
9727 if (minus_lit0 && lit0)
9728 {
9729 if (TREE_CODE (lit0) == INTEGER_CST
9730 && TREE_CODE (minus_lit0) == INTEGER_CST
9731 && tree_int_cst_lt (lit0, minus_lit0))
9732 {
9733 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9734 MINUS_EXPR, atype);
9735 lit0 = 0;
9736 }
9737 else
9738 {
9739 lit0 = associate_trees (loc, lit0, minus_lit0,
9740 MINUS_EXPR, atype);
9741 minus_lit0 = 0;
9742 }
9743 }
9744
9745 /* Don't introduce overflows through reassociation. */
9746 if (!any_overflows
9747 && ((lit0 && TREE_OVERFLOW_P (lit0))
9748 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9749 return NULL_TREE;
9750
9751 if (minus_lit0)
9752 {
9753 if (con0 == 0)
9754 return
9755 fold_convert_loc (loc, type,
9756 associate_trees (loc, var0, minus_lit0,
9757 MINUS_EXPR, atype));
9758 else
9759 {
9760 con0 = associate_trees (loc, con0, minus_lit0,
9761 MINUS_EXPR, atype);
9762 return
9763 fold_convert_loc (loc, type,
9764 associate_trees (loc, var0, con0,
9765 PLUS_EXPR, atype));
9766 }
9767 }
9768
9769 con0 = associate_trees (loc, con0, lit0, code, atype);
9770 return
9771 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9772 code, atype));
9773 }
9774 }
9775
9776 return NULL_TREE;
9777
9778 case MINUS_EXPR:
9779 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9780 if (TREE_CODE (arg0) == NEGATE_EXPR
9781 && negate_expr_p (op1)
9782 && reorder_operands_p (arg0, arg1))
9783 return fold_build2_loc (loc, MINUS_EXPR, type,
9784 negate_expr (op1),
9785 fold_convert_loc (loc, type,
9786 TREE_OPERAND (arg0, 0)));
9787
9788 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9789 __complex__ ( x, -y ). This is not the same for SNaNs or if
9790 signed zeros are involved. */
9791 if (!HONOR_SNANS (element_mode (arg0))
9792 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9793 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9794 {
9795 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9796 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9797 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9798 bool arg0rz = false, arg0iz = false;
9799 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9800 || (arg0i && (arg0iz = real_zerop (arg0i))))
9801 {
9802 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9803 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9804 if (arg0rz && arg1i && real_zerop (arg1i))
9805 {
9806 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9807 arg1r ? arg1r
9808 : build1 (REALPART_EXPR, rtype, arg1));
9809 tree ip = arg0i ? arg0i
9810 : build1 (IMAGPART_EXPR, rtype, arg0);
9811 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9812 }
9813 else if (arg0iz && arg1r && real_zerop (arg1r))
9814 {
9815 tree rp = arg0r ? arg0r
9816 : build1 (REALPART_EXPR, rtype, arg0);
9817 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9818 arg1i ? arg1i
9819 : build1 (IMAGPART_EXPR, rtype, arg1));
9820 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9821 }
9822 }
9823 }
9824
9825 /* A - B -> A + (-B) if B is easily negatable. */
9826 if (negate_expr_p (op1)
9827 && ! TYPE_OVERFLOW_SANITIZED (type)
9828 && ((FLOAT_TYPE_P (type)
9829 /* Avoid this transformation if B is a positive REAL_CST. */
9830 && (TREE_CODE (op1) != REAL_CST
9831 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9832 || INTEGRAL_TYPE_P (type)))
9833 return fold_build2_loc (loc, PLUS_EXPR, type,
9834 fold_convert_loc (loc, type, arg0),
9835 negate_expr (op1));
9836
9837 /* Fold &a[i] - &a[j] to i-j. */
9838 if (TREE_CODE (arg0) == ADDR_EXPR
9839 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9840 && TREE_CODE (arg1) == ADDR_EXPR
9841 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9842 {
9843 tree tem = fold_addr_of_array_ref_difference (loc, type,
9844 TREE_OPERAND (arg0, 0),
9845 TREE_OPERAND (arg1, 0));
9846 if (tem)
9847 return tem;
9848 }
9849
9850 if (FLOAT_TYPE_P (type)
9851 && flag_unsafe_math_optimizations
9852 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9853 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9854 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9855 return tem;
9856
9857 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9858 one. Make sure the type is not saturating and has the signedness of
9859 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9860 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9861 if ((TREE_CODE (arg0) == MULT_EXPR
9862 || TREE_CODE (arg1) == MULT_EXPR)
9863 && !TYPE_SATURATING (type)
9864 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9865 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9866 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9867 {
9868 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9869 if (tem)
9870 return tem;
9871 }
9872
9873 goto associate;
9874
9875 case MULT_EXPR:
9876 if (! FLOAT_TYPE_P (type))
9877 {
9878 /* Transform x * -C into -x * C if x is easily negatable. */
9879 if (TREE_CODE (op1) == INTEGER_CST
9880 && tree_int_cst_sgn (op1) == -1
9881 && negate_expr_p (op0)
9882 && (tem = negate_expr (op1)) != op1
9883 && ! TREE_OVERFLOW (tem))
9884 return fold_build2_loc (loc, MULT_EXPR, type,
9885 fold_convert_loc (loc, type,
9886 negate_expr (op0)), tem);
9887
9888 /* (A + A) * C -> A * 2 * C */
9889 if (TREE_CODE (arg0) == PLUS_EXPR
9890 && TREE_CODE (arg1) == INTEGER_CST
9891 && operand_equal_p (TREE_OPERAND (arg0, 0),
9892 TREE_OPERAND (arg0, 1), 0))
9893 return fold_build2_loc (loc, MULT_EXPR, type,
9894 omit_one_operand_loc (loc, type,
9895 TREE_OPERAND (arg0, 0),
9896 TREE_OPERAND (arg0, 1)),
9897 fold_build2_loc (loc, MULT_EXPR, type,
9898 build_int_cst (type, 2) , arg1));
9899
9900 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9901 sign-changing only. */
9902 if (TREE_CODE (arg1) == INTEGER_CST
9903 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9904 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9905 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9906
9907 strict_overflow_p = false;
9908 if (TREE_CODE (arg1) == INTEGER_CST
9909 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9910 &strict_overflow_p)))
9911 {
9912 if (strict_overflow_p)
9913 fold_overflow_warning (("assuming signed overflow does not "
9914 "occur when simplifying "
9915 "multiplication"),
9916 WARN_STRICT_OVERFLOW_MISC);
9917 return fold_convert_loc (loc, type, tem);
9918 }
9919
9920 /* Optimize z * conj(z) for integer complex numbers. */
9921 if (TREE_CODE (arg0) == CONJ_EXPR
9922 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9923 return fold_mult_zconjz (loc, type, arg1);
9924 if (TREE_CODE (arg1) == CONJ_EXPR
9925 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9926 return fold_mult_zconjz (loc, type, arg0);
9927 }
9928 else
9929 {
9930 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9931 This is not the same for NaNs or if signed zeros are
9932 involved. */
9933 if (!HONOR_NANS (arg0)
9934 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9935 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9936 && TREE_CODE (arg1) == COMPLEX_CST
9937 && real_zerop (TREE_REALPART (arg1)))
9938 {
9939 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9940 if (real_onep (TREE_IMAGPART (arg1)))
9941 return
9942 fold_build2_loc (loc, COMPLEX_EXPR, type,
9943 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9944 rtype, arg0)),
9945 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9946 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9947 return
9948 fold_build2_loc (loc, COMPLEX_EXPR, type,
9949 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9950 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9951 rtype, arg0)));
9952 }
9953
9954 /* Optimize z * conj(z) for floating point complex numbers.
9955 Guarded by flag_unsafe_math_optimizations as non-finite
9956 imaginary components don't produce scalar results. */
9957 if (flag_unsafe_math_optimizations
9958 && TREE_CODE (arg0) == CONJ_EXPR
9959 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9960 return fold_mult_zconjz (loc, type, arg1);
9961 if (flag_unsafe_math_optimizations
9962 && TREE_CODE (arg1) == CONJ_EXPR
9963 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9964 return fold_mult_zconjz (loc, type, arg0);
9965
9966 if (flag_unsafe_math_optimizations)
9967 {
9968
9969 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9970 if (!in_gimple_form
9971 && optimize
9972 && operand_equal_p (arg0, arg1, 0))
9973 {
9974 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9975
9976 if (powfn)
9977 {
9978 tree arg = build_real (type, dconst2);
9979 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9980 }
9981 }
9982 }
9983 }
9984 goto associate;
9985
9986 case BIT_IOR_EXPR:
9987 /* Canonicalize (X & C1) | C2. */
9988 if (TREE_CODE (arg0) == BIT_AND_EXPR
9989 && TREE_CODE (arg1) == INTEGER_CST
9990 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9991 {
9992 int width = TYPE_PRECISION (type), w;
9993 wide_int c1 = TREE_OPERAND (arg0, 1);
9994 wide_int c2 = arg1;
9995
9996 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9997 if ((c1 & c2) == c1)
9998 return omit_one_operand_loc (loc, type, arg1,
9999 TREE_OPERAND (arg0, 0));
10000
10001 wide_int msk = wi::mask (width, false,
10002 TYPE_PRECISION (TREE_TYPE (arg1)));
10003
10004 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10005 if (msk.and_not (c1 | c2) == 0)
10006 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10007 TREE_OPERAND (arg0, 0), arg1);
10008
10009 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10010 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10011 mode which allows further optimizations. */
10012 c1 &= msk;
10013 c2 &= msk;
10014 wide_int c3 = c1.and_not (c2);
10015 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10016 {
10017 wide_int mask = wi::mask (w, false,
10018 TYPE_PRECISION (type));
10019 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10020 {
10021 c3 = mask;
10022 break;
10023 }
10024 }
10025
10026 if (c3 != c1)
10027 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10028 fold_build2_loc (loc, BIT_AND_EXPR, type,
10029 TREE_OPERAND (arg0, 0),
10030 wide_int_to_tree (type,
10031 c3)),
10032 arg1);
10033 }
10034
10035 /* See if this can be simplified into a rotate first. If that
10036 is unsuccessful continue in the association code. */
10037 goto bit_rotate;
10038
10039 case BIT_XOR_EXPR:
10040 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10041 if (TREE_CODE (arg0) == BIT_AND_EXPR
10042 && INTEGRAL_TYPE_P (type)
10043 && integer_onep (TREE_OPERAND (arg0, 1))
10044 && integer_onep (arg1))
10045 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10046 build_zero_cst (TREE_TYPE (arg0)));
10047
10048 /* See if this can be simplified into a rotate first. If that
10049 is unsuccessful continue in the association code. */
10050 goto bit_rotate;
10051
10052 case BIT_AND_EXPR:
10053 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10054 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10055 && INTEGRAL_TYPE_P (type)
10056 && integer_onep (TREE_OPERAND (arg0, 1))
10057 && integer_onep (arg1))
10058 {
10059 tree tem2;
10060 tem = TREE_OPERAND (arg0, 0);
10061 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10062 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10063 tem, tem2);
10064 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10065 build_zero_cst (TREE_TYPE (tem)));
10066 }
10067 /* Fold ~X & 1 as (X & 1) == 0. */
10068 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10069 && INTEGRAL_TYPE_P (type)
10070 && integer_onep (arg1))
10071 {
10072 tree tem2;
10073 tem = TREE_OPERAND (arg0, 0);
10074 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10075 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10076 tem, tem2);
10077 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10078 build_zero_cst (TREE_TYPE (tem)));
10079 }
10080 /* Fold !X & 1 as X == 0. */
10081 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10082 && integer_onep (arg1))
10083 {
10084 tem = TREE_OPERAND (arg0, 0);
10085 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10086 build_zero_cst (TREE_TYPE (tem)));
10087 }
10088
10089 /* Fold (X ^ Y) & Y as ~X & Y. */
10090 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10091 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10092 {
10093 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10094 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10095 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10096 fold_convert_loc (loc, type, arg1));
10097 }
10098 /* Fold (X ^ Y) & X as ~Y & X. */
10099 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10100 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10101 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10102 {
10103 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10104 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10105 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10106 fold_convert_loc (loc, type, arg1));
10107 }
10108 /* Fold X & (X ^ Y) as X & ~Y. */
10109 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10110 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10111 {
10112 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10113 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10114 fold_convert_loc (loc, type, arg0),
10115 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10116 }
10117 /* Fold X & (Y ^ X) as ~Y & X. */
10118 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10119 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10120 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10121 {
10122 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10123 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10124 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10125 fold_convert_loc (loc, type, arg0));
10126 }
10127
10128 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10129 multiple of 1 << CST. */
10130 if (TREE_CODE (arg1) == INTEGER_CST)
10131 {
10132 wide_int cst1 = arg1;
10133 wide_int ncst1 = -cst1;
10134 if ((cst1 & ncst1) == ncst1
10135 && multiple_of_p (type, arg0,
10136 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10137 return fold_convert_loc (loc, type, arg0);
10138 }
10139
10140 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10141 bits from CST2. */
10142 if (TREE_CODE (arg1) == INTEGER_CST
10143 && TREE_CODE (arg0) == MULT_EXPR
10144 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10145 {
10146 wide_int warg1 = arg1;
10147 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10148
10149 if (masked == 0)
10150 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10151 arg0, arg1);
10152 else if (masked != warg1)
10153 {
10154 /* Avoid the transform if arg1 is a mask of some
10155 mode which allows further optimizations. */
10156 int pop = wi::popcount (warg1);
10157 if (!(pop >= BITS_PER_UNIT
10158 && exact_log2 (pop) != -1
10159 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10160 return fold_build2_loc (loc, code, type, op0,
10161 wide_int_to_tree (type, masked));
10162 }
10163 }
10164
10165 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10166 ((A & N) + B) & M -> (A + B) & M
10167 Similarly if (N & M) == 0,
10168 ((A | N) + B) & M -> (A + B) & M
10169 and for - instead of + (or unary - instead of +)
10170 and/or ^ instead of |.
10171 If B is constant and (B & M) == 0, fold into A & M. */
10172 if (TREE_CODE (arg1) == INTEGER_CST)
10173 {
10174 wide_int cst1 = arg1;
10175 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10176 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10177 && (TREE_CODE (arg0) == PLUS_EXPR
10178 || TREE_CODE (arg0) == MINUS_EXPR
10179 || TREE_CODE (arg0) == NEGATE_EXPR)
10180 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10181 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10182 {
10183 tree pmop[2];
10184 int which = 0;
10185 wide_int cst0;
10186
10187 /* Now we know that arg0 is (C + D) or (C - D) or
10188 -C and arg1 (M) is == (1LL << cst) - 1.
10189 Store C into PMOP[0] and D into PMOP[1]. */
10190 pmop[0] = TREE_OPERAND (arg0, 0);
10191 pmop[1] = NULL;
10192 if (TREE_CODE (arg0) != NEGATE_EXPR)
10193 {
10194 pmop[1] = TREE_OPERAND (arg0, 1);
10195 which = 1;
10196 }
10197
10198 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10199 which = -1;
10200
10201 for (; which >= 0; which--)
10202 switch (TREE_CODE (pmop[which]))
10203 {
10204 case BIT_AND_EXPR:
10205 case BIT_IOR_EXPR:
10206 case BIT_XOR_EXPR:
10207 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10208 != INTEGER_CST)
10209 break;
10210 cst0 = TREE_OPERAND (pmop[which], 1);
10211 cst0 &= cst1;
10212 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10213 {
10214 if (cst0 != cst1)
10215 break;
10216 }
10217 else if (cst0 != 0)
10218 break;
10219 /* If C or D is of the form (A & N) where
10220 (N & M) == M, or of the form (A | N) or
10221 (A ^ N) where (N & M) == 0, replace it with A. */
10222 pmop[which] = TREE_OPERAND (pmop[which], 0);
10223 break;
10224 case INTEGER_CST:
10225 /* If C or D is a N where (N & M) == 0, it can be
10226 omitted (assumed 0). */
10227 if ((TREE_CODE (arg0) == PLUS_EXPR
10228 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10229 && (cst1 & pmop[which]) == 0)
10230 pmop[which] = NULL;
10231 break;
10232 default:
10233 break;
10234 }
10235
10236 /* Only build anything new if we optimized one or both arguments
10237 above. */
10238 if (pmop[0] != TREE_OPERAND (arg0, 0)
10239 || (TREE_CODE (arg0) != NEGATE_EXPR
10240 && pmop[1] != TREE_OPERAND (arg0, 1)))
10241 {
10242 tree utype = TREE_TYPE (arg0);
10243 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10244 {
10245 /* Perform the operations in a type that has defined
10246 overflow behavior. */
10247 utype = unsigned_type_for (TREE_TYPE (arg0));
10248 if (pmop[0] != NULL)
10249 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10250 if (pmop[1] != NULL)
10251 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10252 }
10253
10254 if (TREE_CODE (arg0) == NEGATE_EXPR)
10255 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10256 else if (TREE_CODE (arg0) == PLUS_EXPR)
10257 {
10258 if (pmop[0] != NULL && pmop[1] != NULL)
10259 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10260 pmop[0], pmop[1]);
10261 else if (pmop[0] != NULL)
10262 tem = pmop[0];
10263 else if (pmop[1] != NULL)
10264 tem = pmop[1];
10265 else
10266 return build_int_cst (type, 0);
10267 }
10268 else if (pmop[0] == NULL)
10269 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10270 else
10271 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10272 pmop[0], pmop[1]);
10273 /* TEM is now the new binary +, - or unary - replacement. */
10274 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10275 fold_convert_loc (loc, utype, arg1));
10276 return fold_convert_loc (loc, type, tem);
10277 }
10278 }
10279 }
10280
10281 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10282 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10283 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10284 {
10285 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10286
10287 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10288 if (mask == -1)
10289 return
10290 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10291 }
10292
10293 goto associate;
10294
10295 case RDIV_EXPR:
10296 /* Don't touch a floating-point divide by zero unless the mode
10297 of the constant can represent infinity. */
10298 if (TREE_CODE (arg1) == REAL_CST
10299 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10300 && real_zerop (arg1))
10301 return NULL_TREE;
10302
10303 /* (-A) / (-B) -> A / B */
10304 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10305 return fold_build2_loc (loc, RDIV_EXPR, type,
10306 TREE_OPERAND (arg0, 0),
10307 negate_expr (arg1));
10308 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10309 return fold_build2_loc (loc, RDIV_EXPR, type,
10310 negate_expr (arg0),
10311 TREE_OPERAND (arg1, 0));
10312 return NULL_TREE;
10313
10314 case TRUNC_DIV_EXPR:
10315 /* Fall through */
10316
10317 case FLOOR_DIV_EXPR:
10318 /* Simplify A / (B << N) where A and B are positive and B is
10319 a power of 2, to A >> (N + log2(B)). */
10320 strict_overflow_p = false;
10321 if (TREE_CODE (arg1) == LSHIFT_EXPR
10322 && (TYPE_UNSIGNED (type)
10323 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10324 {
10325 tree sval = TREE_OPERAND (arg1, 0);
10326 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10327 {
10328 tree sh_cnt = TREE_OPERAND (arg1, 1);
10329 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10330 wi::exact_log2 (sval));
10331
10332 if (strict_overflow_p)
10333 fold_overflow_warning (("assuming signed overflow does not "
10334 "occur when simplifying A / (B << N)"),
10335 WARN_STRICT_OVERFLOW_MISC);
10336
10337 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10338 sh_cnt, pow2);
10339 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10340 fold_convert_loc (loc, type, arg0), sh_cnt);
10341 }
10342 }
10343
10344 /* Fall through */
10345
10346 case ROUND_DIV_EXPR:
10347 case CEIL_DIV_EXPR:
10348 case EXACT_DIV_EXPR:
10349 if (integer_zerop (arg1))
10350 return NULL_TREE;
10351
10352 /* Convert -A / -B to A / B when the type is signed and overflow is
10353 undefined. */
10354 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10355 && TREE_CODE (arg0) == NEGATE_EXPR
10356 && negate_expr_p (op1))
10357 {
10358 if (INTEGRAL_TYPE_P (type))
10359 fold_overflow_warning (("assuming signed overflow does not occur "
10360 "when distributing negation across "
10361 "division"),
10362 WARN_STRICT_OVERFLOW_MISC);
10363 return fold_build2_loc (loc, code, type,
10364 fold_convert_loc (loc, type,
10365 TREE_OPERAND (arg0, 0)),
10366 negate_expr (op1));
10367 }
10368 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10369 && TREE_CODE (arg1) == NEGATE_EXPR
10370 && negate_expr_p (op0))
10371 {
10372 if (INTEGRAL_TYPE_P (type))
10373 fold_overflow_warning (("assuming signed overflow does not occur "
10374 "when distributing negation across "
10375 "division"),
10376 WARN_STRICT_OVERFLOW_MISC);
10377 return fold_build2_loc (loc, code, type,
10378 negate_expr (op0),
10379 fold_convert_loc (loc, type,
10380 TREE_OPERAND (arg1, 0)));
10381 }
10382
10383 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10384 operation, EXACT_DIV_EXPR.
10385
10386 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10387 At one time others generated faster code, it's not clear if they do
10388 after the last round to changes to the DIV code in expmed.c. */
10389 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10390 && multiple_of_p (type, arg0, arg1))
10391 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10392 fold_convert (type, arg0),
10393 fold_convert (type, arg1));
10394
10395 strict_overflow_p = false;
10396 if (TREE_CODE (arg1) == INTEGER_CST
10397 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10398 &strict_overflow_p)))
10399 {
10400 if (strict_overflow_p)
10401 fold_overflow_warning (("assuming signed overflow does not occur "
10402 "when simplifying division"),
10403 WARN_STRICT_OVERFLOW_MISC);
10404 return fold_convert_loc (loc, type, tem);
10405 }
10406
10407 return NULL_TREE;
10408
10409 case CEIL_MOD_EXPR:
10410 case FLOOR_MOD_EXPR:
10411 case ROUND_MOD_EXPR:
10412 case TRUNC_MOD_EXPR:
10413 strict_overflow_p = false;
10414 if (TREE_CODE (arg1) == INTEGER_CST
10415 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10416 &strict_overflow_p)))
10417 {
10418 if (strict_overflow_p)
10419 fold_overflow_warning (("assuming signed overflow does not occur "
10420 "when simplifying modulus"),
10421 WARN_STRICT_OVERFLOW_MISC);
10422 return fold_convert_loc (loc, type, tem);
10423 }
10424
10425 return NULL_TREE;
10426
10427 case LROTATE_EXPR:
10428 case RROTATE_EXPR:
10429 case RSHIFT_EXPR:
10430 case LSHIFT_EXPR:
10431 /* Since negative shift count is not well-defined,
10432 don't try to compute it in the compiler. */
10433 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10434 return NULL_TREE;
10435
10436 prec = element_precision (type);
10437
10438 /* If we have a rotate of a bit operation with the rotate count and
10439 the second operand of the bit operation both constant,
10440 permute the two operations. */
10441 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10442 && (TREE_CODE (arg0) == BIT_AND_EXPR
10443 || TREE_CODE (arg0) == BIT_IOR_EXPR
10444 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10445 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10446 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10447 fold_build2_loc (loc, code, type,
10448 TREE_OPERAND (arg0, 0), arg1),
10449 fold_build2_loc (loc, code, type,
10450 TREE_OPERAND (arg0, 1), arg1));
10451
10452 /* Two consecutive rotates adding up to the some integer
10453 multiple of the precision of the type can be ignored. */
10454 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10455 && TREE_CODE (arg0) == RROTATE_EXPR
10456 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10457 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10458 prec) == 0)
10459 return TREE_OPERAND (arg0, 0);
10460
10461 return NULL_TREE;
10462
10463 case MIN_EXPR:
10464 case MAX_EXPR:
10465 goto associate;
10466
10467 case TRUTH_ANDIF_EXPR:
10468 /* Note that the operands of this must be ints
10469 and their values must be 0 or 1.
10470 ("true" is a fixed value perhaps depending on the language.) */
10471 /* If first arg is constant zero, return it. */
10472 if (integer_zerop (arg0))
10473 return fold_convert_loc (loc, type, arg0);
10474 case TRUTH_AND_EXPR:
10475 /* If either arg is constant true, drop it. */
10476 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10477 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10478 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10479 /* Preserve sequence points. */
10480 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10481 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10482 /* If second arg is constant zero, result is zero, but first arg
10483 must be evaluated. */
10484 if (integer_zerop (arg1))
10485 return omit_one_operand_loc (loc, type, arg1, arg0);
10486 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10487 case will be handled here. */
10488 if (integer_zerop (arg0))
10489 return omit_one_operand_loc (loc, type, arg0, arg1);
10490
10491 /* !X && X is always false. */
10492 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10493 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10494 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10495 /* X && !X is always false. */
10496 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10497 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10498 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10499
10500 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10501 means A >= Y && A != MAX, but in this case we know that
10502 A < X <= MAX. */
10503
10504 if (!TREE_SIDE_EFFECTS (arg0)
10505 && !TREE_SIDE_EFFECTS (arg1))
10506 {
10507 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10508 if (tem && !operand_equal_p (tem, arg0, 0))
10509 return fold_build2_loc (loc, code, type, tem, arg1);
10510
10511 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10512 if (tem && !operand_equal_p (tem, arg1, 0))
10513 return fold_build2_loc (loc, code, type, arg0, tem);
10514 }
10515
10516 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10517 != NULL_TREE)
10518 return tem;
10519
10520 return NULL_TREE;
10521
10522 case TRUTH_ORIF_EXPR:
10523 /* Note that the operands of this must be ints
10524 and their values must be 0 or true.
10525 ("true" is a fixed value perhaps depending on the language.) */
10526 /* If first arg is constant true, return it. */
10527 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10528 return fold_convert_loc (loc, type, arg0);
10529 case TRUTH_OR_EXPR:
10530 /* If either arg is constant zero, drop it. */
10531 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10532 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10533 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10534 /* Preserve sequence points. */
10535 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10536 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10537 /* If second arg is constant true, result is true, but we must
10538 evaluate first arg. */
10539 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10540 return omit_one_operand_loc (loc, type, arg1, arg0);
10541 /* Likewise for first arg, but note this only occurs here for
10542 TRUTH_OR_EXPR. */
10543 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10544 return omit_one_operand_loc (loc, type, arg0, arg1);
10545
10546 /* !X || X is always true. */
10547 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10548 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10549 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10550 /* X || !X is always true. */
10551 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10552 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10553 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10554
10555 /* (X && !Y) || (!X && Y) is X ^ Y */
10556 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10557 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10558 {
10559 tree a0, a1, l0, l1, n0, n1;
10560
10561 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10562 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10563
10564 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10565 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10566
10567 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10568 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10569
10570 if ((operand_equal_p (n0, a0, 0)
10571 && operand_equal_p (n1, a1, 0))
10572 || (operand_equal_p (n0, a1, 0)
10573 && operand_equal_p (n1, a0, 0)))
10574 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10575 }
10576
10577 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10578 != NULL_TREE)
10579 return tem;
10580
10581 return NULL_TREE;
10582
10583 case TRUTH_XOR_EXPR:
10584 /* If the second arg is constant zero, drop it. */
10585 if (integer_zerop (arg1))
10586 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10587 /* If the second arg is constant true, this is a logical inversion. */
10588 if (integer_onep (arg1))
10589 {
10590 tem = invert_truthvalue_loc (loc, arg0);
10591 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10592 }
10593 /* Identical arguments cancel to zero. */
10594 if (operand_equal_p (arg0, arg1, 0))
10595 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10596
10597 /* !X ^ X is always true. */
10598 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10599 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10600 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10601
10602 /* X ^ !X is always true. */
10603 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10604 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10605 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10606
10607 return NULL_TREE;
10608
10609 case EQ_EXPR:
10610 case NE_EXPR:
10611 STRIP_NOPS (arg0);
10612 STRIP_NOPS (arg1);
10613
10614 tem = fold_comparison (loc, code, type, op0, op1);
10615 if (tem != NULL_TREE)
10616 return tem;
10617
10618 /* bool_var != 1 becomes !bool_var. */
10619 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10620 && code == NE_EXPR)
10621 return fold_convert_loc (loc, type,
10622 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10623 TREE_TYPE (arg0), arg0));
10624
10625 /* bool_var == 0 becomes !bool_var. */
10626 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10627 && code == EQ_EXPR)
10628 return fold_convert_loc (loc, type,
10629 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10630 TREE_TYPE (arg0), arg0));
10631
10632 /* !exp != 0 becomes !exp */
10633 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10634 && code == NE_EXPR)
10635 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10636
10637 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10638 if ((TREE_CODE (arg0) == PLUS_EXPR
10639 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10640 || TREE_CODE (arg0) == MINUS_EXPR)
10641 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10642 0)),
10643 arg1, 0)
10644 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10645 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10646 {
10647 tree val = TREE_OPERAND (arg0, 1);
10648 val = fold_build2_loc (loc, code, type, val,
10649 build_int_cst (TREE_TYPE (val), 0));
10650 return omit_two_operands_loc (loc, type, val,
10651 TREE_OPERAND (arg0, 0), arg1);
10652 }
10653
10654 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10655 if ((TREE_CODE (arg1) == PLUS_EXPR
10656 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10657 || TREE_CODE (arg1) == MINUS_EXPR)
10658 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10659 0)),
10660 arg0, 0)
10661 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10662 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10663 {
10664 tree val = TREE_OPERAND (arg1, 1);
10665 val = fold_build2_loc (loc, code, type, val,
10666 build_int_cst (TREE_TYPE (val), 0));
10667 return omit_two_operands_loc (loc, type, val,
10668 TREE_OPERAND (arg1, 0), arg0);
10669 }
10670
10671 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10672 if (TREE_CODE (arg0) == MINUS_EXPR
10673 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10674 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10675 1)),
10676 arg1, 0)
10677 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10678 return omit_two_operands_loc (loc, type,
10679 code == NE_EXPR
10680 ? boolean_true_node : boolean_false_node,
10681 TREE_OPERAND (arg0, 1), arg1);
10682
10683 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10684 if (TREE_CODE (arg1) == MINUS_EXPR
10685 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10686 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10687 1)),
10688 arg0, 0)
10689 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10690 return omit_two_operands_loc (loc, type,
10691 code == NE_EXPR
10692 ? boolean_true_node : boolean_false_node,
10693 TREE_OPERAND (arg1, 1), arg0);
10694
10695 /* If this is an EQ or NE comparison with zero and ARG0 is
10696 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10697 two operations, but the latter can be done in one less insn
10698 on machines that have only two-operand insns or on which a
10699 constant cannot be the first operand. */
10700 if (TREE_CODE (arg0) == BIT_AND_EXPR
10701 && integer_zerop (arg1))
10702 {
10703 tree arg00 = TREE_OPERAND (arg0, 0);
10704 tree arg01 = TREE_OPERAND (arg0, 1);
10705 if (TREE_CODE (arg00) == LSHIFT_EXPR
10706 && integer_onep (TREE_OPERAND (arg00, 0)))
10707 {
10708 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10709 arg01, TREE_OPERAND (arg00, 1));
10710 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10711 build_int_cst (TREE_TYPE (arg0), 1));
10712 return fold_build2_loc (loc, code, type,
10713 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10714 arg1);
10715 }
10716 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10717 && integer_onep (TREE_OPERAND (arg01, 0)))
10718 {
10719 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10720 arg00, TREE_OPERAND (arg01, 1));
10721 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10722 build_int_cst (TREE_TYPE (arg0), 1));
10723 return fold_build2_loc (loc, code, type,
10724 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10725 arg1);
10726 }
10727 }
10728
10729 /* If this is an NE or EQ comparison of zero against the result of a
10730 signed MOD operation whose second operand is a power of 2, make
10731 the MOD operation unsigned since it is simpler and equivalent. */
10732 if (integer_zerop (arg1)
10733 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10734 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10735 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10736 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10737 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10738 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10739 {
10740 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10741 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10742 fold_convert_loc (loc, newtype,
10743 TREE_OPERAND (arg0, 0)),
10744 fold_convert_loc (loc, newtype,
10745 TREE_OPERAND (arg0, 1)));
10746
10747 return fold_build2_loc (loc, code, type, newmod,
10748 fold_convert_loc (loc, newtype, arg1));
10749 }
10750
10751 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10752 C1 is a valid shift constant, and C2 is a power of two, i.e.
10753 a single bit. */
10754 if (TREE_CODE (arg0) == BIT_AND_EXPR
10755 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10756 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10757 == INTEGER_CST
10758 && integer_pow2p (TREE_OPERAND (arg0, 1))
10759 && integer_zerop (arg1))
10760 {
10761 tree itype = TREE_TYPE (arg0);
10762 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10763 prec = TYPE_PRECISION (itype);
10764
10765 /* Check for a valid shift count. */
10766 if (wi::ltu_p (arg001, prec))
10767 {
10768 tree arg01 = TREE_OPERAND (arg0, 1);
10769 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10770 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10771 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10772 can be rewritten as (X & (C2 << C1)) != 0. */
10773 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10774 {
10775 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10776 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10777 return fold_build2_loc (loc, code, type, tem,
10778 fold_convert_loc (loc, itype, arg1));
10779 }
10780 /* Otherwise, for signed (arithmetic) shifts,
10781 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10782 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10783 else if (!TYPE_UNSIGNED (itype))
10784 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10785 arg000, build_int_cst (itype, 0));
10786 /* Otherwise, of unsigned (logical) shifts,
10787 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10788 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10789 else
10790 return omit_one_operand_loc (loc, type,
10791 code == EQ_EXPR ? integer_one_node
10792 : integer_zero_node,
10793 arg000);
10794 }
10795 }
10796
10797 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10798 Similarly for NE_EXPR. */
10799 if (TREE_CODE (arg0) == BIT_AND_EXPR
10800 && TREE_CODE (arg1) == INTEGER_CST
10801 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10802 {
10803 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10804 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10805 TREE_OPERAND (arg0, 1));
10806 tree dandnotc
10807 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10808 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10809 notc);
10810 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10811 if (integer_nonzerop (dandnotc))
10812 return omit_one_operand_loc (loc, type, rslt, arg0);
10813 }
10814
10815 /* If this is a comparison of a field, we may be able to simplify it. */
10816 if ((TREE_CODE (arg0) == COMPONENT_REF
10817 || TREE_CODE (arg0) == BIT_FIELD_REF)
10818 /* Handle the constant case even without -O
10819 to make sure the warnings are given. */
10820 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10821 {
10822 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10823 if (t1)
10824 return t1;
10825 }
10826
10827 /* Optimize comparisons of strlen vs zero to a compare of the
10828 first character of the string vs zero. To wit,
10829 strlen(ptr) == 0 => *ptr == 0
10830 strlen(ptr) != 0 => *ptr != 0
10831 Other cases should reduce to one of these two (or a constant)
10832 due to the return value of strlen being unsigned. */
10833 if (TREE_CODE (arg0) == CALL_EXPR
10834 && integer_zerop (arg1))
10835 {
10836 tree fndecl = get_callee_fndecl (arg0);
10837
10838 if (fndecl
10839 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10840 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10841 && call_expr_nargs (arg0) == 1
10842 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10843 {
10844 tree iref = build_fold_indirect_ref_loc (loc,
10845 CALL_EXPR_ARG (arg0, 0));
10846 return fold_build2_loc (loc, code, type, iref,
10847 build_int_cst (TREE_TYPE (iref), 0));
10848 }
10849 }
10850
10851 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10852 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10853 if (TREE_CODE (arg0) == RSHIFT_EXPR
10854 && integer_zerop (arg1)
10855 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10856 {
10857 tree arg00 = TREE_OPERAND (arg0, 0);
10858 tree arg01 = TREE_OPERAND (arg0, 1);
10859 tree itype = TREE_TYPE (arg00);
10860 if (wi::eq_p (arg01, element_precision (itype) - 1))
10861 {
10862 if (TYPE_UNSIGNED (itype))
10863 {
10864 itype = signed_type_for (itype);
10865 arg00 = fold_convert_loc (loc, itype, arg00);
10866 }
10867 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10868 type, arg00, build_zero_cst (itype));
10869 }
10870 }
10871
10872 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10873 (X & C) == 0 when C is a single bit. */
10874 if (TREE_CODE (arg0) == BIT_AND_EXPR
10875 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10876 && integer_zerop (arg1)
10877 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10878 {
10879 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10880 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10881 TREE_OPERAND (arg0, 1));
10882 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10883 type, tem,
10884 fold_convert_loc (loc, TREE_TYPE (arg0),
10885 arg1));
10886 }
10887
10888 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10889 constant C is a power of two, i.e. a single bit. */
10890 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10891 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10892 && integer_zerop (arg1)
10893 && integer_pow2p (TREE_OPERAND (arg0, 1))
10894 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10895 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10896 {
10897 tree arg00 = TREE_OPERAND (arg0, 0);
10898 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10899 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10900 }
10901
10902 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10903 when is C is a power of two, i.e. a single bit. */
10904 if (TREE_CODE (arg0) == BIT_AND_EXPR
10905 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10906 && integer_zerop (arg1)
10907 && integer_pow2p (TREE_OPERAND (arg0, 1))
10908 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10909 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10910 {
10911 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10912 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10913 arg000, TREE_OPERAND (arg0, 1));
10914 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10915 tem, build_int_cst (TREE_TYPE (tem), 0));
10916 }
10917
10918 if (integer_zerop (arg1)
10919 && tree_expr_nonzero_p (arg0))
10920 {
10921 tree res = constant_boolean_node (code==NE_EXPR, type);
10922 return omit_one_operand_loc (loc, type, res, arg0);
10923 }
10924
10925 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10926 if (TREE_CODE (arg0) == BIT_AND_EXPR
10927 && TREE_CODE (arg1) == BIT_AND_EXPR)
10928 {
10929 tree arg00 = TREE_OPERAND (arg0, 0);
10930 tree arg01 = TREE_OPERAND (arg0, 1);
10931 tree arg10 = TREE_OPERAND (arg1, 0);
10932 tree arg11 = TREE_OPERAND (arg1, 1);
10933 tree itype = TREE_TYPE (arg0);
10934
10935 if (operand_equal_p (arg01, arg11, 0))
10936 return fold_build2_loc (loc, code, type,
10937 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10938 fold_build2_loc (loc,
10939 BIT_XOR_EXPR, itype,
10940 arg00, arg10),
10941 arg01),
10942 build_zero_cst (itype));
10943
10944 if (operand_equal_p (arg01, arg10, 0))
10945 return fold_build2_loc (loc, code, type,
10946 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10947 fold_build2_loc (loc,
10948 BIT_XOR_EXPR, itype,
10949 arg00, arg11),
10950 arg01),
10951 build_zero_cst (itype));
10952
10953 if (operand_equal_p (arg00, arg11, 0))
10954 return fold_build2_loc (loc, code, type,
10955 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10956 fold_build2_loc (loc,
10957 BIT_XOR_EXPR, itype,
10958 arg01, arg10),
10959 arg00),
10960 build_zero_cst (itype));
10961
10962 if (operand_equal_p (arg00, arg10, 0))
10963 return fold_build2_loc (loc, code, type,
10964 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10965 fold_build2_loc (loc,
10966 BIT_XOR_EXPR, itype,
10967 arg01, arg11),
10968 arg00),
10969 build_zero_cst (itype));
10970 }
10971
10972 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10973 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10974 {
10975 tree arg00 = TREE_OPERAND (arg0, 0);
10976 tree arg01 = TREE_OPERAND (arg0, 1);
10977 tree arg10 = TREE_OPERAND (arg1, 0);
10978 tree arg11 = TREE_OPERAND (arg1, 1);
10979 tree itype = TREE_TYPE (arg0);
10980
10981 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10982 operand_equal_p guarantees no side-effects so we don't need
10983 to use omit_one_operand on Z. */
10984 if (operand_equal_p (arg01, arg11, 0))
10985 return fold_build2_loc (loc, code, type, arg00,
10986 fold_convert_loc (loc, TREE_TYPE (arg00),
10987 arg10));
10988 if (operand_equal_p (arg01, arg10, 0))
10989 return fold_build2_loc (loc, code, type, arg00,
10990 fold_convert_loc (loc, TREE_TYPE (arg00),
10991 arg11));
10992 if (operand_equal_p (arg00, arg11, 0))
10993 return fold_build2_loc (loc, code, type, arg01,
10994 fold_convert_loc (loc, TREE_TYPE (arg01),
10995 arg10));
10996 if (operand_equal_p (arg00, arg10, 0))
10997 return fold_build2_loc (loc, code, type, arg01,
10998 fold_convert_loc (loc, TREE_TYPE (arg01),
10999 arg11));
11000
11001 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11002 if (TREE_CODE (arg01) == INTEGER_CST
11003 && TREE_CODE (arg11) == INTEGER_CST)
11004 {
11005 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11006 fold_convert_loc (loc, itype, arg11));
11007 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11008 return fold_build2_loc (loc, code, type, tem,
11009 fold_convert_loc (loc, itype, arg10));
11010 }
11011 }
11012
11013 /* Attempt to simplify equality/inequality comparisons of complex
11014 values. Only lower the comparison if the result is known or
11015 can be simplified to a single scalar comparison. */
11016 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11017 || TREE_CODE (arg0) == COMPLEX_CST)
11018 && (TREE_CODE (arg1) == COMPLEX_EXPR
11019 || TREE_CODE (arg1) == COMPLEX_CST))
11020 {
11021 tree real0, imag0, real1, imag1;
11022 tree rcond, icond;
11023
11024 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11025 {
11026 real0 = TREE_OPERAND (arg0, 0);
11027 imag0 = TREE_OPERAND (arg0, 1);
11028 }
11029 else
11030 {
11031 real0 = TREE_REALPART (arg0);
11032 imag0 = TREE_IMAGPART (arg0);
11033 }
11034
11035 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11036 {
11037 real1 = TREE_OPERAND (arg1, 0);
11038 imag1 = TREE_OPERAND (arg1, 1);
11039 }
11040 else
11041 {
11042 real1 = TREE_REALPART (arg1);
11043 imag1 = TREE_IMAGPART (arg1);
11044 }
11045
11046 rcond = fold_binary_loc (loc, code, type, real0, real1);
11047 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11048 {
11049 if (integer_zerop (rcond))
11050 {
11051 if (code == EQ_EXPR)
11052 return omit_two_operands_loc (loc, type, boolean_false_node,
11053 imag0, imag1);
11054 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11055 }
11056 else
11057 {
11058 if (code == NE_EXPR)
11059 return omit_two_operands_loc (loc, type, boolean_true_node,
11060 imag0, imag1);
11061 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11062 }
11063 }
11064
11065 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11066 if (icond && TREE_CODE (icond) == INTEGER_CST)
11067 {
11068 if (integer_zerop (icond))
11069 {
11070 if (code == EQ_EXPR)
11071 return omit_two_operands_loc (loc, type, boolean_false_node,
11072 real0, real1);
11073 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11074 }
11075 else
11076 {
11077 if (code == NE_EXPR)
11078 return omit_two_operands_loc (loc, type, boolean_true_node,
11079 real0, real1);
11080 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11081 }
11082 }
11083 }
11084
11085 return NULL_TREE;
11086
11087 case LT_EXPR:
11088 case GT_EXPR:
11089 case LE_EXPR:
11090 case GE_EXPR:
11091 tem = fold_comparison (loc, code, type, op0, op1);
11092 if (tem != NULL_TREE)
11093 return tem;
11094
11095 /* Transform comparisons of the form X +- C CMP X. */
11096 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11097 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11098 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11099 && !HONOR_SNANS (arg0))
11100 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11101 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11102 {
11103 tree arg01 = TREE_OPERAND (arg0, 1);
11104 enum tree_code code0 = TREE_CODE (arg0);
11105 int is_positive;
11106
11107 if (TREE_CODE (arg01) == REAL_CST)
11108 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11109 else
11110 is_positive = tree_int_cst_sgn (arg01);
11111
11112 /* (X - c) > X becomes false. */
11113 if (code == GT_EXPR
11114 && ((code0 == MINUS_EXPR && is_positive >= 0)
11115 || (code0 == PLUS_EXPR && is_positive <= 0)))
11116 {
11117 if (TREE_CODE (arg01) == INTEGER_CST
11118 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11119 fold_overflow_warning (("assuming signed overflow does not "
11120 "occur when assuming that (X - c) > X "
11121 "is always false"),
11122 WARN_STRICT_OVERFLOW_ALL);
11123 return constant_boolean_node (0, type);
11124 }
11125
11126 /* Likewise (X + c) < X becomes false. */
11127 if (code == LT_EXPR
11128 && ((code0 == PLUS_EXPR && is_positive >= 0)
11129 || (code0 == MINUS_EXPR && is_positive <= 0)))
11130 {
11131 if (TREE_CODE (arg01) == INTEGER_CST
11132 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11133 fold_overflow_warning (("assuming signed overflow does not "
11134 "occur when assuming that "
11135 "(X + c) < X is always false"),
11136 WARN_STRICT_OVERFLOW_ALL);
11137 return constant_boolean_node (0, type);
11138 }
11139
11140 /* Convert (X - c) <= X to true. */
11141 if (!HONOR_NANS (arg1)
11142 && code == LE_EXPR
11143 && ((code0 == MINUS_EXPR && is_positive >= 0)
11144 || (code0 == PLUS_EXPR && is_positive <= 0)))
11145 {
11146 if (TREE_CODE (arg01) == INTEGER_CST
11147 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11148 fold_overflow_warning (("assuming signed overflow does not "
11149 "occur when assuming that "
11150 "(X - c) <= X is always true"),
11151 WARN_STRICT_OVERFLOW_ALL);
11152 return constant_boolean_node (1, type);
11153 }
11154
11155 /* Convert (X + c) >= X to true. */
11156 if (!HONOR_NANS (arg1)
11157 && code == GE_EXPR
11158 && ((code0 == PLUS_EXPR && is_positive >= 0)
11159 || (code0 == MINUS_EXPR && is_positive <= 0)))
11160 {
11161 if (TREE_CODE (arg01) == INTEGER_CST
11162 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11163 fold_overflow_warning (("assuming signed overflow does not "
11164 "occur when assuming that "
11165 "(X + c) >= X is always true"),
11166 WARN_STRICT_OVERFLOW_ALL);
11167 return constant_boolean_node (1, type);
11168 }
11169
11170 if (TREE_CODE (arg01) == INTEGER_CST)
11171 {
11172 /* Convert X + c > X and X - c < X to true for integers. */
11173 if (code == GT_EXPR
11174 && ((code0 == PLUS_EXPR && is_positive > 0)
11175 || (code0 == MINUS_EXPR && is_positive < 0)))
11176 {
11177 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11178 fold_overflow_warning (("assuming signed overflow does "
11179 "not occur when assuming that "
11180 "(X + c) > X is always true"),
11181 WARN_STRICT_OVERFLOW_ALL);
11182 return constant_boolean_node (1, type);
11183 }
11184
11185 if (code == LT_EXPR
11186 && ((code0 == MINUS_EXPR && is_positive > 0)
11187 || (code0 == PLUS_EXPR && is_positive < 0)))
11188 {
11189 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11190 fold_overflow_warning (("assuming signed overflow does "
11191 "not occur when assuming that "
11192 "(X - c) < X is always true"),
11193 WARN_STRICT_OVERFLOW_ALL);
11194 return constant_boolean_node (1, type);
11195 }
11196
11197 /* Convert X + c <= X and X - c >= X to false for integers. */
11198 if (code == LE_EXPR
11199 && ((code0 == PLUS_EXPR && is_positive > 0)
11200 || (code0 == MINUS_EXPR && is_positive < 0)))
11201 {
11202 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11203 fold_overflow_warning (("assuming signed overflow does "
11204 "not occur when assuming that "
11205 "(X + c) <= X is always false"),
11206 WARN_STRICT_OVERFLOW_ALL);
11207 return constant_boolean_node (0, type);
11208 }
11209
11210 if (code == GE_EXPR
11211 && ((code0 == MINUS_EXPR && is_positive > 0)
11212 || (code0 == PLUS_EXPR && is_positive < 0)))
11213 {
11214 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11215 fold_overflow_warning (("assuming signed overflow does "
11216 "not occur when assuming that "
11217 "(X - c) >= X is always false"),
11218 WARN_STRICT_OVERFLOW_ALL);
11219 return constant_boolean_node (0, type);
11220 }
11221 }
11222 }
11223
11224 /* If we are comparing an ABS_EXPR with a constant, we can
11225 convert all the cases into explicit comparisons, but they may
11226 well not be faster than doing the ABS and one comparison.
11227 But ABS (X) <= C is a range comparison, which becomes a subtraction
11228 and a comparison, and is probably faster. */
11229 if (code == LE_EXPR
11230 && TREE_CODE (arg1) == INTEGER_CST
11231 && TREE_CODE (arg0) == ABS_EXPR
11232 && ! TREE_SIDE_EFFECTS (arg0)
11233 && (0 != (tem = negate_expr (arg1)))
11234 && TREE_CODE (tem) == INTEGER_CST
11235 && !TREE_OVERFLOW (tem))
11236 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11237 build2 (GE_EXPR, type,
11238 TREE_OPERAND (arg0, 0), tem),
11239 build2 (LE_EXPR, type,
11240 TREE_OPERAND (arg0, 0), arg1));
11241
11242 /* Convert ABS_EXPR<x> >= 0 to true. */
11243 strict_overflow_p = false;
11244 if (code == GE_EXPR
11245 && (integer_zerop (arg1)
11246 || (! HONOR_NANS (arg0)
11247 && real_zerop (arg1)))
11248 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11249 {
11250 if (strict_overflow_p)
11251 fold_overflow_warning (("assuming signed overflow does not occur "
11252 "when simplifying comparison of "
11253 "absolute value and zero"),
11254 WARN_STRICT_OVERFLOW_CONDITIONAL);
11255 return omit_one_operand_loc (loc, type,
11256 constant_boolean_node (true, type),
11257 arg0);
11258 }
11259
11260 /* Convert ABS_EXPR<x> < 0 to false. */
11261 strict_overflow_p = false;
11262 if (code == LT_EXPR
11263 && (integer_zerop (arg1) || real_zerop (arg1))
11264 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11265 {
11266 if (strict_overflow_p)
11267 fold_overflow_warning (("assuming signed overflow does not occur "
11268 "when simplifying comparison of "
11269 "absolute value and zero"),
11270 WARN_STRICT_OVERFLOW_CONDITIONAL);
11271 return omit_one_operand_loc (loc, type,
11272 constant_boolean_node (false, type),
11273 arg0);
11274 }
11275
11276 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11277 and similarly for >= into !=. */
11278 if ((code == LT_EXPR || code == GE_EXPR)
11279 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11280 && TREE_CODE (arg1) == LSHIFT_EXPR
11281 && integer_onep (TREE_OPERAND (arg1, 0)))
11282 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11283 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11284 TREE_OPERAND (arg1, 1)),
11285 build_zero_cst (TREE_TYPE (arg0)));
11286
11287 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11288 otherwise Y might be >= # of bits in X's type and thus e.g.
11289 (unsigned char) (1 << Y) for Y 15 might be 0.
11290 If the cast is widening, then 1 << Y should have unsigned type,
11291 otherwise if Y is number of bits in the signed shift type minus 1,
11292 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11293 31 might be 0xffffffff80000000. */
11294 if ((code == LT_EXPR || code == GE_EXPR)
11295 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11296 && CONVERT_EXPR_P (arg1)
11297 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11298 && (element_precision (TREE_TYPE (arg1))
11299 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11300 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11301 || (element_precision (TREE_TYPE (arg1))
11302 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11303 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11304 {
11305 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11306 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11307 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11308 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11309 build_zero_cst (TREE_TYPE (arg0)));
11310 }
11311
11312 return NULL_TREE;
11313
11314 case UNORDERED_EXPR:
11315 case ORDERED_EXPR:
11316 case UNLT_EXPR:
11317 case UNLE_EXPR:
11318 case UNGT_EXPR:
11319 case UNGE_EXPR:
11320 case UNEQ_EXPR:
11321 case LTGT_EXPR:
11322 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11323 {
11324 tree targ0 = strip_float_extensions (arg0);
11325 tree targ1 = strip_float_extensions (arg1);
11326 tree newtype = TREE_TYPE (targ0);
11327
11328 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11329 newtype = TREE_TYPE (targ1);
11330
11331 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11332 return fold_build2_loc (loc, code, type,
11333 fold_convert_loc (loc, newtype, targ0),
11334 fold_convert_loc (loc, newtype, targ1));
11335 }
11336
11337 return NULL_TREE;
11338
11339 case COMPOUND_EXPR:
11340 /* When pedantic, a compound expression can be neither an lvalue
11341 nor an integer constant expression. */
11342 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11343 return NULL_TREE;
11344 /* Don't let (0, 0) be null pointer constant. */
11345 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11346 : fold_convert_loc (loc, type, arg1);
11347 return pedantic_non_lvalue_loc (loc, tem);
11348
11349 case ASSERT_EXPR:
11350 /* An ASSERT_EXPR should never be passed to fold_binary. */
11351 gcc_unreachable ();
11352
11353 default:
11354 return NULL_TREE;
11355 } /* switch (code) */
11356 }
11357
11358 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11359 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11360 of GOTO_EXPR. */
11361
11362 static tree
11363 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11364 {
11365 switch (TREE_CODE (*tp))
11366 {
11367 case LABEL_EXPR:
11368 return *tp;
11369
11370 case GOTO_EXPR:
11371 *walk_subtrees = 0;
11372
11373 /* ... fall through ... */
11374
11375 default:
11376 return NULL_TREE;
11377 }
11378 }
11379
11380 /* Return whether the sub-tree ST contains a label which is accessible from
11381 outside the sub-tree. */
11382
11383 static bool
11384 contains_label_p (tree st)
11385 {
11386 return
11387 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11388 }
11389
11390 /* Fold a ternary expression of code CODE and type TYPE with operands
11391 OP0, OP1, and OP2. Return the folded expression if folding is
11392 successful. Otherwise, return NULL_TREE. */
11393
11394 tree
11395 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11396 tree op0, tree op1, tree op2)
11397 {
11398 tree tem;
11399 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11400 enum tree_code_class kind = TREE_CODE_CLASS (code);
11401
11402 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11403 && TREE_CODE_LENGTH (code) == 3);
11404
11405 /* If this is a commutative operation, and OP0 is a constant, move it
11406 to OP1 to reduce the number of tests below. */
11407 if (commutative_ternary_tree_code (code)
11408 && tree_swap_operands_p (op0, op1, true))
11409 return fold_build3_loc (loc, code, type, op1, op0, op2);
11410
11411 tem = generic_simplify (loc, code, type, op0, op1, op2);
11412 if (tem)
11413 return tem;
11414
11415 /* Strip any conversions that don't change the mode. This is safe
11416 for every expression, except for a comparison expression because
11417 its signedness is derived from its operands. So, in the latter
11418 case, only strip conversions that don't change the signedness.
11419
11420 Note that this is done as an internal manipulation within the
11421 constant folder, in order to find the simplest representation of
11422 the arguments so that their form can be studied. In any cases,
11423 the appropriate type conversions should be put back in the tree
11424 that will get out of the constant folder. */
11425 if (op0)
11426 {
11427 arg0 = op0;
11428 STRIP_NOPS (arg0);
11429 }
11430
11431 if (op1)
11432 {
11433 arg1 = op1;
11434 STRIP_NOPS (arg1);
11435 }
11436
11437 if (op2)
11438 {
11439 arg2 = op2;
11440 STRIP_NOPS (arg2);
11441 }
11442
11443 switch (code)
11444 {
11445 case COMPONENT_REF:
11446 if (TREE_CODE (arg0) == CONSTRUCTOR
11447 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11448 {
11449 unsigned HOST_WIDE_INT idx;
11450 tree field, value;
11451 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11452 if (field == arg1)
11453 return value;
11454 }
11455 return NULL_TREE;
11456
11457 case COND_EXPR:
11458 case VEC_COND_EXPR:
11459 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11460 so all simple results must be passed through pedantic_non_lvalue. */
11461 if (TREE_CODE (arg0) == INTEGER_CST)
11462 {
11463 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11464 tem = integer_zerop (arg0) ? op2 : op1;
11465 /* Only optimize constant conditions when the selected branch
11466 has the same type as the COND_EXPR. This avoids optimizing
11467 away "c ? x : throw", where the throw has a void type.
11468 Avoid throwing away that operand which contains label. */
11469 if ((!TREE_SIDE_EFFECTS (unused_op)
11470 || !contains_label_p (unused_op))
11471 && (! VOID_TYPE_P (TREE_TYPE (tem))
11472 || VOID_TYPE_P (type)))
11473 return pedantic_non_lvalue_loc (loc, tem);
11474 return NULL_TREE;
11475 }
11476 else if (TREE_CODE (arg0) == VECTOR_CST)
11477 {
11478 if ((TREE_CODE (arg1) == VECTOR_CST
11479 || TREE_CODE (arg1) == CONSTRUCTOR)
11480 && (TREE_CODE (arg2) == VECTOR_CST
11481 || TREE_CODE (arg2) == CONSTRUCTOR))
11482 {
11483 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11484 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11485 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11486 for (i = 0; i < nelts; i++)
11487 {
11488 tree val = VECTOR_CST_ELT (arg0, i);
11489 if (integer_all_onesp (val))
11490 sel[i] = i;
11491 else if (integer_zerop (val))
11492 sel[i] = nelts + i;
11493 else /* Currently unreachable. */
11494 return NULL_TREE;
11495 }
11496 tree t = fold_vec_perm (type, arg1, arg2, sel);
11497 if (t != NULL_TREE)
11498 return t;
11499 }
11500 }
11501
11502 /* If we have A op B ? A : C, we may be able to convert this to a
11503 simpler expression, depending on the operation and the values
11504 of B and C. Signed zeros prevent all of these transformations,
11505 for reasons given above each one.
11506
11507 Also try swapping the arguments and inverting the conditional. */
11508 if (COMPARISON_CLASS_P (arg0)
11509 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11510 arg1, TREE_OPERAND (arg0, 1))
11511 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11512 {
11513 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11514 if (tem)
11515 return tem;
11516 }
11517
11518 if (COMPARISON_CLASS_P (arg0)
11519 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11520 op2,
11521 TREE_OPERAND (arg0, 1))
11522 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11523 {
11524 location_t loc0 = expr_location_or (arg0, loc);
11525 tem = fold_invert_truthvalue (loc0, arg0);
11526 if (tem && COMPARISON_CLASS_P (tem))
11527 {
11528 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11529 if (tem)
11530 return tem;
11531 }
11532 }
11533
11534 /* If the second operand is simpler than the third, swap them
11535 since that produces better jump optimization results. */
11536 if (truth_value_p (TREE_CODE (arg0))
11537 && tree_swap_operands_p (op1, op2, false))
11538 {
11539 location_t loc0 = expr_location_or (arg0, loc);
11540 /* See if this can be inverted. If it can't, possibly because
11541 it was a floating-point inequality comparison, don't do
11542 anything. */
11543 tem = fold_invert_truthvalue (loc0, arg0);
11544 if (tem)
11545 return fold_build3_loc (loc, code, type, tem, op2, op1);
11546 }
11547
11548 /* Convert A ? 1 : 0 to simply A. */
11549 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11550 : (integer_onep (op1)
11551 && !VECTOR_TYPE_P (type)))
11552 && integer_zerop (op2)
11553 /* If we try to convert OP0 to our type, the
11554 call to fold will try to move the conversion inside
11555 a COND, which will recurse. In that case, the COND_EXPR
11556 is probably the best choice, so leave it alone. */
11557 && type == TREE_TYPE (arg0))
11558 return pedantic_non_lvalue_loc (loc, arg0);
11559
11560 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11561 over COND_EXPR in cases such as floating point comparisons. */
11562 if (integer_zerop (op1)
11563 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11564 : (integer_onep (op2)
11565 && !VECTOR_TYPE_P (type)))
11566 && truth_value_p (TREE_CODE (arg0)))
11567 return pedantic_non_lvalue_loc (loc,
11568 fold_convert_loc (loc, type,
11569 invert_truthvalue_loc (loc,
11570 arg0)));
11571
11572 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11573 if (TREE_CODE (arg0) == LT_EXPR
11574 && integer_zerop (TREE_OPERAND (arg0, 1))
11575 && integer_zerop (op2)
11576 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11577 {
11578 /* sign_bit_p looks through both zero and sign extensions,
11579 but for this optimization only sign extensions are
11580 usable. */
11581 tree tem2 = TREE_OPERAND (arg0, 0);
11582 while (tem != tem2)
11583 {
11584 if (TREE_CODE (tem2) != NOP_EXPR
11585 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11586 {
11587 tem = NULL_TREE;
11588 break;
11589 }
11590 tem2 = TREE_OPERAND (tem2, 0);
11591 }
11592 /* sign_bit_p only checks ARG1 bits within A's precision.
11593 If <sign bit of A> has wider type than A, bits outside
11594 of A's precision in <sign bit of A> need to be checked.
11595 If they are all 0, this optimization needs to be done
11596 in unsigned A's type, if they are all 1 in signed A's type,
11597 otherwise this can't be done. */
11598 if (tem
11599 && TYPE_PRECISION (TREE_TYPE (tem))
11600 < TYPE_PRECISION (TREE_TYPE (arg1))
11601 && TYPE_PRECISION (TREE_TYPE (tem))
11602 < TYPE_PRECISION (type))
11603 {
11604 int inner_width, outer_width;
11605 tree tem_type;
11606
11607 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11608 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11609 if (outer_width > TYPE_PRECISION (type))
11610 outer_width = TYPE_PRECISION (type);
11611
11612 wide_int mask = wi::shifted_mask
11613 (inner_width, outer_width - inner_width, false,
11614 TYPE_PRECISION (TREE_TYPE (arg1)));
11615
11616 wide_int common = mask & arg1;
11617 if (common == mask)
11618 {
11619 tem_type = signed_type_for (TREE_TYPE (tem));
11620 tem = fold_convert_loc (loc, tem_type, tem);
11621 }
11622 else if (common == 0)
11623 {
11624 tem_type = unsigned_type_for (TREE_TYPE (tem));
11625 tem = fold_convert_loc (loc, tem_type, tem);
11626 }
11627 else
11628 tem = NULL;
11629 }
11630
11631 if (tem)
11632 return
11633 fold_convert_loc (loc, type,
11634 fold_build2_loc (loc, BIT_AND_EXPR,
11635 TREE_TYPE (tem), tem,
11636 fold_convert_loc (loc,
11637 TREE_TYPE (tem),
11638 arg1)));
11639 }
11640
11641 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11642 already handled above. */
11643 if (TREE_CODE (arg0) == BIT_AND_EXPR
11644 && integer_onep (TREE_OPERAND (arg0, 1))
11645 && integer_zerop (op2)
11646 && integer_pow2p (arg1))
11647 {
11648 tree tem = TREE_OPERAND (arg0, 0);
11649 STRIP_NOPS (tem);
11650 if (TREE_CODE (tem) == RSHIFT_EXPR
11651 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11652 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11653 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11654 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11655 TREE_OPERAND (tem, 0), arg1);
11656 }
11657
11658 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11659 is probably obsolete because the first operand should be a
11660 truth value (that's why we have the two cases above), but let's
11661 leave it in until we can confirm this for all front-ends. */
11662 if (integer_zerop (op2)
11663 && TREE_CODE (arg0) == NE_EXPR
11664 && integer_zerop (TREE_OPERAND (arg0, 1))
11665 && integer_pow2p (arg1)
11666 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11667 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11668 arg1, OEP_ONLY_CONST))
11669 return pedantic_non_lvalue_loc (loc,
11670 fold_convert_loc (loc, type,
11671 TREE_OPERAND (arg0, 0)));
11672
11673 /* Disable the transformations below for vectors, since
11674 fold_binary_op_with_conditional_arg may undo them immediately,
11675 yielding an infinite loop. */
11676 if (code == VEC_COND_EXPR)
11677 return NULL_TREE;
11678
11679 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11680 if (integer_zerop (op2)
11681 && truth_value_p (TREE_CODE (arg0))
11682 && truth_value_p (TREE_CODE (arg1))
11683 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11684 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11685 : TRUTH_ANDIF_EXPR,
11686 type, fold_convert_loc (loc, type, arg0), arg1);
11687
11688 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11689 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11690 && truth_value_p (TREE_CODE (arg0))
11691 && truth_value_p (TREE_CODE (arg1))
11692 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11693 {
11694 location_t loc0 = expr_location_or (arg0, loc);
11695 /* Only perform transformation if ARG0 is easily inverted. */
11696 tem = fold_invert_truthvalue (loc0, arg0);
11697 if (tem)
11698 return fold_build2_loc (loc, code == VEC_COND_EXPR
11699 ? BIT_IOR_EXPR
11700 : TRUTH_ORIF_EXPR,
11701 type, fold_convert_loc (loc, type, tem),
11702 arg1);
11703 }
11704
11705 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11706 if (integer_zerop (arg1)
11707 && truth_value_p (TREE_CODE (arg0))
11708 && truth_value_p (TREE_CODE (op2))
11709 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11710 {
11711 location_t loc0 = expr_location_or (arg0, loc);
11712 /* Only perform transformation if ARG0 is easily inverted. */
11713 tem = fold_invert_truthvalue (loc0, arg0);
11714 if (tem)
11715 return fold_build2_loc (loc, code == VEC_COND_EXPR
11716 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11717 type, fold_convert_loc (loc, type, tem),
11718 op2);
11719 }
11720
11721 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11722 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11723 && truth_value_p (TREE_CODE (arg0))
11724 && truth_value_p (TREE_CODE (op2))
11725 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11726 return fold_build2_loc (loc, code == VEC_COND_EXPR
11727 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11728 type, fold_convert_loc (loc, type, arg0), op2);
11729
11730 return NULL_TREE;
11731
11732 case CALL_EXPR:
11733 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11734 of fold_ternary on them. */
11735 gcc_unreachable ();
11736
11737 case BIT_FIELD_REF:
11738 if ((TREE_CODE (arg0) == VECTOR_CST
11739 || (TREE_CODE (arg0) == CONSTRUCTOR
11740 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11741 && (type == TREE_TYPE (TREE_TYPE (arg0))
11742 || (TREE_CODE (type) == VECTOR_TYPE
11743 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11744 {
11745 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11746 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11747 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11748 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11749
11750 if (n != 0
11751 && (idx % width) == 0
11752 && (n % width) == 0
11753 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11754 {
11755 idx = idx / width;
11756 n = n / width;
11757
11758 if (TREE_CODE (arg0) == VECTOR_CST)
11759 {
11760 if (n == 1)
11761 return VECTOR_CST_ELT (arg0, idx);
11762
11763 tree *vals = XALLOCAVEC (tree, n);
11764 for (unsigned i = 0; i < n; ++i)
11765 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11766 return build_vector (type, vals);
11767 }
11768
11769 /* Constructor elements can be subvectors. */
11770 unsigned HOST_WIDE_INT k = 1;
11771 if (CONSTRUCTOR_NELTS (arg0) != 0)
11772 {
11773 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11774 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11775 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11776 }
11777
11778 /* We keep an exact subset of the constructor elements. */
11779 if ((idx % k) == 0 && (n % k) == 0)
11780 {
11781 if (CONSTRUCTOR_NELTS (arg0) == 0)
11782 return build_constructor (type, NULL);
11783 idx /= k;
11784 n /= k;
11785 if (n == 1)
11786 {
11787 if (idx < CONSTRUCTOR_NELTS (arg0))
11788 return CONSTRUCTOR_ELT (arg0, idx)->value;
11789 return build_zero_cst (type);
11790 }
11791
11792 vec<constructor_elt, va_gc> *vals;
11793 vec_alloc (vals, n);
11794 for (unsigned i = 0;
11795 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11796 ++i)
11797 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11798 CONSTRUCTOR_ELT
11799 (arg0, idx + i)->value);
11800 return build_constructor (type, vals);
11801 }
11802 /* The bitfield references a single constructor element. */
11803 else if (idx + n <= (idx / k + 1) * k)
11804 {
11805 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11806 return build_zero_cst (type);
11807 else if (n == k)
11808 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11809 else
11810 return fold_build3_loc (loc, code, type,
11811 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11812 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11813 }
11814 }
11815 }
11816
11817 /* A bit-field-ref that referenced the full argument can be stripped. */
11818 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11819 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11820 && integer_zerop (op2))
11821 return fold_convert_loc (loc, type, arg0);
11822
11823 /* On constants we can use native encode/interpret to constant
11824 fold (nearly) all BIT_FIELD_REFs. */
11825 if (CONSTANT_CLASS_P (arg0)
11826 && can_native_interpret_type_p (type)
11827 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11828 /* This limitation should not be necessary, we just need to
11829 round this up to mode size. */
11830 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11831 /* Need bit-shifting of the buffer to relax the following. */
11832 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11833 {
11834 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11835 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11836 unsigned HOST_WIDE_INT clen;
11837 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11838 /* ??? We cannot tell native_encode_expr to start at
11839 some random byte only. So limit us to a reasonable amount
11840 of work. */
11841 if (clen <= 4096)
11842 {
11843 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11844 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11845 if (len > 0
11846 && len * BITS_PER_UNIT >= bitpos + bitsize)
11847 {
11848 tree v = native_interpret_expr (type,
11849 b + bitpos / BITS_PER_UNIT,
11850 bitsize / BITS_PER_UNIT);
11851 if (v)
11852 return v;
11853 }
11854 }
11855 }
11856
11857 return NULL_TREE;
11858
11859 case FMA_EXPR:
11860 /* For integers we can decompose the FMA if possible. */
11861 if (TREE_CODE (arg0) == INTEGER_CST
11862 && TREE_CODE (arg1) == INTEGER_CST)
11863 return fold_build2_loc (loc, PLUS_EXPR, type,
11864 const_binop (MULT_EXPR, arg0, arg1), arg2);
11865 if (integer_zerop (arg2))
11866 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11867
11868 return fold_fma (loc, type, arg0, arg1, arg2);
11869
11870 case VEC_PERM_EXPR:
11871 if (TREE_CODE (arg2) == VECTOR_CST)
11872 {
11873 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11874 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11875 unsigned char *sel2 = sel + nelts;
11876 bool need_mask_canon = false;
11877 bool need_mask_canon2 = false;
11878 bool all_in_vec0 = true;
11879 bool all_in_vec1 = true;
11880 bool maybe_identity = true;
11881 bool single_arg = (op0 == op1);
11882 bool changed = false;
11883
11884 mask2 = 2 * nelts - 1;
11885 mask = single_arg ? (nelts - 1) : mask2;
11886 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11887 for (i = 0; i < nelts; i++)
11888 {
11889 tree val = VECTOR_CST_ELT (arg2, i);
11890 if (TREE_CODE (val) != INTEGER_CST)
11891 return NULL_TREE;
11892
11893 /* Make sure that the perm value is in an acceptable
11894 range. */
11895 wide_int t = val;
11896 need_mask_canon |= wi::gtu_p (t, mask);
11897 need_mask_canon2 |= wi::gtu_p (t, mask2);
11898 sel[i] = t.to_uhwi () & mask;
11899 sel2[i] = t.to_uhwi () & mask2;
11900
11901 if (sel[i] < nelts)
11902 all_in_vec1 = false;
11903 else
11904 all_in_vec0 = false;
11905
11906 if ((sel[i] & (nelts-1)) != i)
11907 maybe_identity = false;
11908 }
11909
11910 if (maybe_identity)
11911 {
11912 if (all_in_vec0)
11913 return op0;
11914 if (all_in_vec1)
11915 return op1;
11916 }
11917
11918 if (all_in_vec0)
11919 op1 = op0;
11920 else if (all_in_vec1)
11921 {
11922 op0 = op1;
11923 for (i = 0; i < nelts; i++)
11924 sel[i] -= nelts;
11925 need_mask_canon = true;
11926 }
11927
11928 if ((TREE_CODE (op0) == VECTOR_CST
11929 || TREE_CODE (op0) == CONSTRUCTOR)
11930 && (TREE_CODE (op1) == VECTOR_CST
11931 || TREE_CODE (op1) == CONSTRUCTOR))
11932 {
11933 tree t = fold_vec_perm (type, op0, op1, sel);
11934 if (t != NULL_TREE)
11935 return t;
11936 }
11937
11938 if (op0 == op1 && !single_arg)
11939 changed = true;
11940
11941 /* Some targets are deficient and fail to expand a single
11942 argument permutation while still allowing an equivalent
11943 2-argument version. */
11944 if (need_mask_canon && arg2 == op2
11945 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11946 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11947 {
11948 need_mask_canon = need_mask_canon2;
11949 sel = sel2;
11950 }
11951
11952 if (need_mask_canon && arg2 == op2)
11953 {
11954 tree *tsel = XALLOCAVEC (tree, nelts);
11955 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11956 for (i = 0; i < nelts; i++)
11957 tsel[i] = build_int_cst (eltype, sel[i]);
11958 op2 = build_vector (TREE_TYPE (arg2), tsel);
11959 changed = true;
11960 }
11961
11962 if (changed)
11963 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11964 }
11965 return NULL_TREE;
11966
11967 default:
11968 return NULL_TREE;
11969 } /* switch (code) */
11970 }
11971
11972 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11973 of an array (or vector). */
11974
11975 tree
11976 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11977 {
11978 tree index_type = NULL_TREE;
11979 offset_int low_bound = 0;
11980
11981 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11982 {
11983 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11984 if (domain_type && TYPE_MIN_VALUE (domain_type))
11985 {
11986 /* Static constructors for variably sized objects makes no sense. */
11987 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11988 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11989 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11990 }
11991 }
11992
11993 if (index_type)
11994 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11995 TYPE_SIGN (index_type));
11996
11997 offset_int index = low_bound - 1;
11998 if (index_type)
11999 index = wi::ext (index, TYPE_PRECISION (index_type),
12000 TYPE_SIGN (index_type));
12001
12002 offset_int max_index;
12003 unsigned HOST_WIDE_INT cnt;
12004 tree cfield, cval;
12005
12006 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12007 {
12008 /* Array constructor might explicitly set index, or specify a range,
12009 or leave index NULL meaning that it is next index after previous
12010 one. */
12011 if (cfield)
12012 {
12013 if (TREE_CODE (cfield) == INTEGER_CST)
12014 max_index = index = wi::to_offset (cfield);
12015 else
12016 {
12017 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12018 index = wi::to_offset (TREE_OPERAND (cfield, 0));
12019 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
12020 }
12021 }
12022 else
12023 {
12024 index += 1;
12025 if (index_type)
12026 index = wi::ext (index, TYPE_PRECISION (index_type),
12027 TYPE_SIGN (index_type));
12028 max_index = index;
12029 }
12030
12031 /* Do we have match? */
12032 if (wi::cmpu (access_index, index) >= 0
12033 && wi::cmpu (access_index, max_index) <= 0)
12034 return cval;
12035 }
12036 return NULL_TREE;
12037 }
12038
12039 /* Perform constant folding and related simplification of EXPR.
12040 The related simplifications include x*1 => x, x*0 => 0, etc.,
12041 and application of the associative law.
12042 NOP_EXPR conversions may be removed freely (as long as we
12043 are careful not to change the type of the overall expression).
12044 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12045 but we can constant-fold them if they have constant operands. */
12046
12047 #ifdef ENABLE_FOLD_CHECKING
12048 # define fold(x) fold_1 (x)
12049 static tree fold_1 (tree);
12050 static
12051 #endif
12052 tree
12053 fold (tree expr)
12054 {
12055 const tree t = expr;
12056 enum tree_code code = TREE_CODE (t);
12057 enum tree_code_class kind = TREE_CODE_CLASS (code);
12058 tree tem;
12059 location_t loc = EXPR_LOCATION (expr);
12060
12061 /* Return right away if a constant. */
12062 if (kind == tcc_constant)
12063 return t;
12064
12065 /* CALL_EXPR-like objects with variable numbers of operands are
12066 treated specially. */
12067 if (kind == tcc_vl_exp)
12068 {
12069 if (code == CALL_EXPR)
12070 {
12071 tem = fold_call_expr (loc, expr, false);
12072 return tem ? tem : expr;
12073 }
12074 return expr;
12075 }
12076
12077 if (IS_EXPR_CODE_CLASS (kind))
12078 {
12079 tree type = TREE_TYPE (t);
12080 tree op0, op1, op2;
12081
12082 switch (TREE_CODE_LENGTH (code))
12083 {
12084 case 1:
12085 op0 = TREE_OPERAND (t, 0);
12086 tem = fold_unary_loc (loc, code, type, op0);
12087 return tem ? tem : expr;
12088 case 2:
12089 op0 = TREE_OPERAND (t, 0);
12090 op1 = TREE_OPERAND (t, 1);
12091 tem = fold_binary_loc (loc, code, type, op0, op1);
12092 return tem ? tem : expr;
12093 case 3:
12094 op0 = TREE_OPERAND (t, 0);
12095 op1 = TREE_OPERAND (t, 1);
12096 op2 = TREE_OPERAND (t, 2);
12097 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12098 return tem ? tem : expr;
12099 default:
12100 break;
12101 }
12102 }
12103
12104 switch (code)
12105 {
12106 case ARRAY_REF:
12107 {
12108 tree op0 = TREE_OPERAND (t, 0);
12109 tree op1 = TREE_OPERAND (t, 1);
12110
12111 if (TREE_CODE (op1) == INTEGER_CST
12112 && TREE_CODE (op0) == CONSTRUCTOR
12113 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12114 {
12115 tree val = get_array_ctor_element_at_index (op0,
12116 wi::to_offset (op1));
12117 if (val)
12118 return val;
12119 }
12120
12121 return t;
12122 }
12123
12124 /* Return a VECTOR_CST if possible. */
12125 case CONSTRUCTOR:
12126 {
12127 tree type = TREE_TYPE (t);
12128 if (TREE_CODE (type) != VECTOR_TYPE)
12129 return t;
12130
12131 unsigned i;
12132 tree val;
12133 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12134 if (! CONSTANT_CLASS_P (val))
12135 return t;
12136
12137 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12138 }
12139
12140 case CONST_DECL:
12141 return fold (DECL_INITIAL (t));
12142
12143 default:
12144 return t;
12145 } /* switch (code) */
12146 }
12147
12148 #ifdef ENABLE_FOLD_CHECKING
12149 #undef fold
12150
12151 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12152 hash_table<nofree_ptr_hash<const tree_node> > *);
12153 static void fold_check_failed (const_tree, const_tree);
12154 void print_fold_checksum (const_tree);
12155
12156 /* When --enable-checking=fold, compute a digest of expr before
12157 and after actual fold call to see if fold did not accidentally
12158 change original expr. */
12159
12160 tree
12161 fold (tree expr)
12162 {
12163 tree ret;
12164 struct md5_ctx ctx;
12165 unsigned char checksum_before[16], checksum_after[16];
12166 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12167
12168 md5_init_ctx (&ctx);
12169 fold_checksum_tree (expr, &ctx, &ht);
12170 md5_finish_ctx (&ctx, checksum_before);
12171 ht.empty ();
12172
12173 ret = fold_1 (expr);
12174
12175 md5_init_ctx (&ctx);
12176 fold_checksum_tree (expr, &ctx, &ht);
12177 md5_finish_ctx (&ctx, checksum_after);
12178
12179 if (memcmp (checksum_before, checksum_after, 16))
12180 fold_check_failed (expr, ret);
12181
12182 return ret;
12183 }
12184
12185 void
12186 print_fold_checksum (const_tree expr)
12187 {
12188 struct md5_ctx ctx;
12189 unsigned char checksum[16], cnt;
12190 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12191
12192 md5_init_ctx (&ctx);
12193 fold_checksum_tree (expr, &ctx, &ht);
12194 md5_finish_ctx (&ctx, checksum);
12195 for (cnt = 0; cnt < 16; ++cnt)
12196 fprintf (stderr, "%02x", checksum[cnt]);
12197 putc ('\n', stderr);
12198 }
12199
12200 static void
12201 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12202 {
12203 internal_error ("fold check: original tree changed by fold");
12204 }
12205
12206 static void
12207 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12208 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12209 {
12210 const tree_node **slot;
12211 enum tree_code code;
12212 union tree_node buf;
12213 int i, len;
12214
12215 recursive_label:
12216 if (expr == NULL)
12217 return;
12218 slot = ht->find_slot (expr, INSERT);
12219 if (*slot != NULL)
12220 return;
12221 *slot = expr;
12222 code = TREE_CODE (expr);
12223 if (TREE_CODE_CLASS (code) == tcc_declaration
12224 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12225 {
12226 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12227 memcpy ((char *) &buf, expr, tree_size (expr));
12228 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12229 buf.decl_with_vis.symtab_node = NULL;
12230 expr = (tree) &buf;
12231 }
12232 else if (TREE_CODE_CLASS (code) == tcc_type
12233 && (TYPE_POINTER_TO (expr)
12234 || TYPE_REFERENCE_TO (expr)
12235 || TYPE_CACHED_VALUES_P (expr)
12236 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12237 || TYPE_NEXT_VARIANT (expr)))
12238 {
12239 /* Allow these fields to be modified. */
12240 tree tmp;
12241 memcpy ((char *) &buf, expr, tree_size (expr));
12242 expr = tmp = (tree) &buf;
12243 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12244 TYPE_POINTER_TO (tmp) = NULL;
12245 TYPE_REFERENCE_TO (tmp) = NULL;
12246 TYPE_NEXT_VARIANT (tmp) = NULL;
12247 if (TYPE_CACHED_VALUES_P (tmp))
12248 {
12249 TYPE_CACHED_VALUES_P (tmp) = 0;
12250 TYPE_CACHED_VALUES (tmp) = NULL;
12251 }
12252 }
12253 md5_process_bytes (expr, tree_size (expr), ctx);
12254 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12255 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12256 if (TREE_CODE_CLASS (code) != tcc_type
12257 && TREE_CODE_CLASS (code) != tcc_declaration
12258 && code != TREE_LIST
12259 && code != SSA_NAME
12260 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12261 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12262 switch (TREE_CODE_CLASS (code))
12263 {
12264 case tcc_constant:
12265 switch (code)
12266 {
12267 case STRING_CST:
12268 md5_process_bytes (TREE_STRING_POINTER (expr),
12269 TREE_STRING_LENGTH (expr), ctx);
12270 break;
12271 case COMPLEX_CST:
12272 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12273 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12274 break;
12275 case VECTOR_CST:
12276 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12277 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12278 break;
12279 default:
12280 break;
12281 }
12282 break;
12283 case tcc_exceptional:
12284 switch (code)
12285 {
12286 case TREE_LIST:
12287 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12288 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12289 expr = TREE_CHAIN (expr);
12290 goto recursive_label;
12291 break;
12292 case TREE_VEC:
12293 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12294 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12295 break;
12296 default:
12297 break;
12298 }
12299 break;
12300 case tcc_expression:
12301 case tcc_reference:
12302 case tcc_comparison:
12303 case tcc_unary:
12304 case tcc_binary:
12305 case tcc_statement:
12306 case tcc_vl_exp:
12307 len = TREE_OPERAND_LENGTH (expr);
12308 for (i = 0; i < len; ++i)
12309 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12310 break;
12311 case tcc_declaration:
12312 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12313 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12314 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12315 {
12316 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12317 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12318 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12319 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12320 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12321 }
12322
12323 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12324 {
12325 if (TREE_CODE (expr) == FUNCTION_DECL)
12326 {
12327 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12328 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12329 }
12330 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12331 }
12332 break;
12333 case tcc_type:
12334 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12335 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12336 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12337 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12338 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12339 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12340 if (INTEGRAL_TYPE_P (expr)
12341 || SCALAR_FLOAT_TYPE_P (expr))
12342 {
12343 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12344 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12345 }
12346 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12347 if (TREE_CODE (expr) == RECORD_TYPE
12348 || TREE_CODE (expr) == UNION_TYPE
12349 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12350 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12351 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12352 break;
12353 default:
12354 break;
12355 }
12356 }
12357
12358 /* Helper function for outputting the checksum of a tree T. When
12359 debugging with gdb, you can "define mynext" to be "next" followed
12360 by "call debug_fold_checksum (op0)", then just trace down till the
12361 outputs differ. */
12362
12363 DEBUG_FUNCTION void
12364 debug_fold_checksum (const_tree t)
12365 {
12366 int i;
12367 unsigned char checksum[16];
12368 struct md5_ctx ctx;
12369 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12370
12371 md5_init_ctx (&ctx);
12372 fold_checksum_tree (t, &ctx, &ht);
12373 md5_finish_ctx (&ctx, checksum);
12374 ht.empty ();
12375
12376 for (i = 0; i < 16; i++)
12377 fprintf (stderr, "%d ", checksum[i]);
12378
12379 fprintf (stderr, "\n");
12380 }
12381
12382 #endif
12383
12384 /* Fold a unary tree expression with code CODE of type TYPE with an
12385 operand OP0. LOC is the location of the resulting expression.
12386 Return a folded expression if successful. Otherwise, return a tree
12387 expression with code CODE of type TYPE with an operand OP0. */
12388
12389 tree
12390 fold_build1_stat_loc (location_t loc,
12391 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12392 {
12393 tree tem;
12394 #ifdef ENABLE_FOLD_CHECKING
12395 unsigned char checksum_before[16], checksum_after[16];
12396 struct md5_ctx ctx;
12397 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12398
12399 md5_init_ctx (&ctx);
12400 fold_checksum_tree (op0, &ctx, &ht);
12401 md5_finish_ctx (&ctx, checksum_before);
12402 ht.empty ();
12403 #endif
12404
12405 tem = fold_unary_loc (loc, code, type, op0);
12406 if (!tem)
12407 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12408
12409 #ifdef ENABLE_FOLD_CHECKING
12410 md5_init_ctx (&ctx);
12411 fold_checksum_tree (op0, &ctx, &ht);
12412 md5_finish_ctx (&ctx, checksum_after);
12413
12414 if (memcmp (checksum_before, checksum_after, 16))
12415 fold_check_failed (op0, tem);
12416 #endif
12417 return tem;
12418 }
12419
12420 /* Fold a binary tree expression with code CODE of type TYPE with
12421 operands OP0 and OP1. LOC is the location of the resulting
12422 expression. Return a folded expression if successful. Otherwise,
12423 return a tree expression with code CODE of type TYPE with operands
12424 OP0 and OP1. */
12425
12426 tree
12427 fold_build2_stat_loc (location_t loc,
12428 enum tree_code code, tree type, tree op0, tree op1
12429 MEM_STAT_DECL)
12430 {
12431 tree tem;
12432 #ifdef ENABLE_FOLD_CHECKING
12433 unsigned char checksum_before_op0[16],
12434 checksum_before_op1[16],
12435 checksum_after_op0[16],
12436 checksum_after_op1[16];
12437 struct md5_ctx ctx;
12438 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12439
12440 md5_init_ctx (&ctx);
12441 fold_checksum_tree (op0, &ctx, &ht);
12442 md5_finish_ctx (&ctx, checksum_before_op0);
12443 ht.empty ();
12444
12445 md5_init_ctx (&ctx);
12446 fold_checksum_tree (op1, &ctx, &ht);
12447 md5_finish_ctx (&ctx, checksum_before_op1);
12448 ht.empty ();
12449 #endif
12450
12451 tem = fold_binary_loc (loc, code, type, op0, op1);
12452 if (!tem)
12453 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12454
12455 #ifdef ENABLE_FOLD_CHECKING
12456 md5_init_ctx (&ctx);
12457 fold_checksum_tree (op0, &ctx, &ht);
12458 md5_finish_ctx (&ctx, checksum_after_op0);
12459 ht.empty ();
12460
12461 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12462 fold_check_failed (op0, tem);
12463
12464 md5_init_ctx (&ctx);
12465 fold_checksum_tree (op1, &ctx, &ht);
12466 md5_finish_ctx (&ctx, checksum_after_op1);
12467
12468 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12469 fold_check_failed (op1, tem);
12470 #endif
12471 return tem;
12472 }
12473
12474 /* Fold a ternary tree expression with code CODE of type TYPE with
12475 operands OP0, OP1, and OP2. Return a folded expression if
12476 successful. Otherwise, return a tree expression with code CODE of
12477 type TYPE with operands OP0, OP1, and OP2. */
12478
12479 tree
12480 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12481 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12482 {
12483 tree tem;
12484 #ifdef ENABLE_FOLD_CHECKING
12485 unsigned char checksum_before_op0[16],
12486 checksum_before_op1[16],
12487 checksum_before_op2[16],
12488 checksum_after_op0[16],
12489 checksum_after_op1[16],
12490 checksum_after_op2[16];
12491 struct md5_ctx ctx;
12492 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12493
12494 md5_init_ctx (&ctx);
12495 fold_checksum_tree (op0, &ctx, &ht);
12496 md5_finish_ctx (&ctx, checksum_before_op0);
12497 ht.empty ();
12498
12499 md5_init_ctx (&ctx);
12500 fold_checksum_tree (op1, &ctx, &ht);
12501 md5_finish_ctx (&ctx, checksum_before_op1);
12502 ht.empty ();
12503
12504 md5_init_ctx (&ctx);
12505 fold_checksum_tree (op2, &ctx, &ht);
12506 md5_finish_ctx (&ctx, checksum_before_op2);
12507 ht.empty ();
12508 #endif
12509
12510 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12511 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12512 if (!tem)
12513 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12514
12515 #ifdef ENABLE_FOLD_CHECKING
12516 md5_init_ctx (&ctx);
12517 fold_checksum_tree (op0, &ctx, &ht);
12518 md5_finish_ctx (&ctx, checksum_after_op0);
12519 ht.empty ();
12520
12521 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12522 fold_check_failed (op0, tem);
12523
12524 md5_init_ctx (&ctx);
12525 fold_checksum_tree (op1, &ctx, &ht);
12526 md5_finish_ctx (&ctx, checksum_after_op1);
12527 ht.empty ();
12528
12529 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12530 fold_check_failed (op1, tem);
12531
12532 md5_init_ctx (&ctx);
12533 fold_checksum_tree (op2, &ctx, &ht);
12534 md5_finish_ctx (&ctx, checksum_after_op2);
12535
12536 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12537 fold_check_failed (op2, tem);
12538 #endif
12539 return tem;
12540 }
12541
12542 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12543 arguments in ARGARRAY, and a null static chain.
12544 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12545 of type TYPE from the given operands as constructed by build_call_array. */
12546
12547 tree
12548 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12549 int nargs, tree *argarray)
12550 {
12551 tree tem;
12552 #ifdef ENABLE_FOLD_CHECKING
12553 unsigned char checksum_before_fn[16],
12554 checksum_before_arglist[16],
12555 checksum_after_fn[16],
12556 checksum_after_arglist[16];
12557 struct md5_ctx ctx;
12558 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12559 int i;
12560
12561 md5_init_ctx (&ctx);
12562 fold_checksum_tree (fn, &ctx, &ht);
12563 md5_finish_ctx (&ctx, checksum_before_fn);
12564 ht.empty ();
12565
12566 md5_init_ctx (&ctx);
12567 for (i = 0; i < nargs; i++)
12568 fold_checksum_tree (argarray[i], &ctx, &ht);
12569 md5_finish_ctx (&ctx, checksum_before_arglist);
12570 ht.empty ();
12571 #endif
12572
12573 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12574 if (!tem)
12575 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12576
12577 #ifdef ENABLE_FOLD_CHECKING
12578 md5_init_ctx (&ctx);
12579 fold_checksum_tree (fn, &ctx, &ht);
12580 md5_finish_ctx (&ctx, checksum_after_fn);
12581 ht.empty ();
12582
12583 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12584 fold_check_failed (fn, tem);
12585
12586 md5_init_ctx (&ctx);
12587 for (i = 0; i < nargs; i++)
12588 fold_checksum_tree (argarray[i], &ctx, &ht);
12589 md5_finish_ctx (&ctx, checksum_after_arglist);
12590
12591 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12592 fold_check_failed (NULL_TREE, tem);
12593 #endif
12594 return tem;
12595 }
12596
12597 /* Perform constant folding and related simplification of initializer
12598 expression EXPR. These behave identically to "fold_buildN" but ignore
12599 potential run-time traps and exceptions that fold must preserve. */
12600
12601 #define START_FOLD_INIT \
12602 int saved_signaling_nans = flag_signaling_nans;\
12603 int saved_trapping_math = flag_trapping_math;\
12604 int saved_rounding_math = flag_rounding_math;\
12605 int saved_trapv = flag_trapv;\
12606 int saved_folding_initializer = folding_initializer;\
12607 flag_signaling_nans = 0;\
12608 flag_trapping_math = 0;\
12609 flag_rounding_math = 0;\
12610 flag_trapv = 0;\
12611 folding_initializer = 1;
12612
12613 #define END_FOLD_INIT \
12614 flag_signaling_nans = saved_signaling_nans;\
12615 flag_trapping_math = saved_trapping_math;\
12616 flag_rounding_math = saved_rounding_math;\
12617 flag_trapv = saved_trapv;\
12618 folding_initializer = saved_folding_initializer;
12619
12620 tree
12621 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12622 tree type, tree op)
12623 {
12624 tree result;
12625 START_FOLD_INIT;
12626
12627 result = fold_build1_loc (loc, code, type, op);
12628
12629 END_FOLD_INIT;
12630 return result;
12631 }
12632
12633 tree
12634 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12635 tree type, tree op0, tree op1)
12636 {
12637 tree result;
12638 START_FOLD_INIT;
12639
12640 result = fold_build2_loc (loc, code, type, op0, op1);
12641
12642 END_FOLD_INIT;
12643 return result;
12644 }
12645
12646 tree
12647 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12648 int nargs, tree *argarray)
12649 {
12650 tree result;
12651 START_FOLD_INIT;
12652
12653 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12654
12655 END_FOLD_INIT;
12656 return result;
12657 }
12658
12659 #undef START_FOLD_INIT
12660 #undef END_FOLD_INIT
12661
12662 /* Determine if first argument is a multiple of second argument. Return 0 if
12663 it is not, or we cannot easily determined it to be.
12664
12665 An example of the sort of thing we care about (at this point; this routine
12666 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12667 fold cases do now) is discovering that
12668
12669 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12670
12671 is a multiple of
12672
12673 SAVE_EXPR (J * 8)
12674
12675 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12676
12677 This code also handles discovering that
12678
12679 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12680
12681 is a multiple of 8 so we don't have to worry about dealing with a
12682 possible remainder.
12683
12684 Note that we *look* inside a SAVE_EXPR only to determine how it was
12685 calculated; it is not safe for fold to do much of anything else with the
12686 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12687 at run time. For example, the latter example above *cannot* be implemented
12688 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12689 evaluation time of the original SAVE_EXPR is not necessarily the same at
12690 the time the new expression is evaluated. The only optimization of this
12691 sort that would be valid is changing
12692
12693 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12694
12695 divided by 8 to
12696
12697 SAVE_EXPR (I) * SAVE_EXPR (J)
12698
12699 (where the same SAVE_EXPR (J) is used in the original and the
12700 transformed version). */
12701
12702 int
12703 multiple_of_p (tree type, const_tree top, const_tree bottom)
12704 {
12705 if (operand_equal_p (top, bottom, 0))
12706 return 1;
12707
12708 if (TREE_CODE (type) != INTEGER_TYPE)
12709 return 0;
12710
12711 switch (TREE_CODE (top))
12712 {
12713 case BIT_AND_EXPR:
12714 /* Bitwise and provides a power of two multiple. If the mask is
12715 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12716 if (!integer_pow2p (bottom))
12717 return 0;
12718 /* FALLTHRU */
12719
12720 case MULT_EXPR:
12721 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12722 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12723
12724 case PLUS_EXPR:
12725 case MINUS_EXPR:
12726 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12727 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12728
12729 case LSHIFT_EXPR:
12730 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12731 {
12732 tree op1, t1;
12733
12734 op1 = TREE_OPERAND (top, 1);
12735 /* const_binop may not detect overflow correctly,
12736 so check for it explicitly here. */
12737 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12738 && 0 != (t1 = fold_convert (type,
12739 const_binop (LSHIFT_EXPR,
12740 size_one_node,
12741 op1)))
12742 && !TREE_OVERFLOW (t1))
12743 return multiple_of_p (type, t1, bottom);
12744 }
12745 return 0;
12746
12747 case NOP_EXPR:
12748 /* Can't handle conversions from non-integral or wider integral type. */
12749 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12750 || (TYPE_PRECISION (type)
12751 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12752 return 0;
12753
12754 /* .. fall through ... */
12755
12756 case SAVE_EXPR:
12757 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12758
12759 case COND_EXPR:
12760 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12761 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12762
12763 case INTEGER_CST:
12764 if (TREE_CODE (bottom) != INTEGER_CST
12765 || integer_zerop (bottom)
12766 || (TYPE_UNSIGNED (type)
12767 && (tree_int_cst_sgn (top) < 0
12768 || tree_int_cst_sgn (bottom) < 0)))
12769 return 0;
12770 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12771 SIGNED);
12772
12773 default:
12774 return 0;
12775 }
12776 }
12777
12778 #define tree_expr_nonnegative_warnv_p(X, Y) \
12779 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12780
12781 #define RECURSE(X) \
12782 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12783
12784 /* Return true if CODE or TYPE is known to be non-negative. */
12785
12786 static bool
12787 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12788 {
12789 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12790 && truth_value_p (code))
12791 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12792 have a signed:1 type (where the value is -1 and 0). */
12793 return true;
12794 return false;
12795 }
12796
12797 /* Return true if (CODE OP0) is known to be non-negative. If the return
12798 value is based on the assumption that signed overflow is undefined,
12799 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12800 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12801
12802 bool
12803 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12804 bool *strict_overflow_p, int depth)
12805 {
12806 if (TYPE_UNSIGNED (type))
12807 return true;
12808
12809 switch (code)
12810 {
12811 case ABS_EXPR:
12812 /* We can't return 1 if flag_wrapv is set because
12813 ABS_EXPR<INT_MIN> = INT_MIN. */
12814 if (!ANY_INTEGRAL_TYPE_P (type))
12815 return true;
12816 if (TYPE_OVERFLOW_UNDEFINED (type))
12817 {
12818 *strict_overflow_p = true;
12819 return true;
12820 }
12821 break;
12822
12823 case NON_LVALUE_EXPR:
12824 case FLOAT_EXPR:
12825 case FIX_TRUNC_EXPR:
12826 return RECURSE (op0);
12827
12828 CASE_CONVERT:
12829 {
12830 tree inner_type = TREE_TYPE (op0);
12831 tree outer_type = type;
12832
12833 if (TREE_CODE (outer_type) == REAL_TYPE)
12834 {
12835 if (TREE_CODE (inner_type) == REAL_TYPE)
12836 return RECURSE (op0);
12837 if (INTEGRAL_TYPE_P (inner_type))
12838 {
12839 if (TYPE_UNSIGNED (inner_type))
12840 return true;
12841 return RECURSE (op0);
12842 }
12843 }
12844 else if (INTEGRAL_TYPE_P (outer_type))
12845 {
12846 if (TREE_CODE (inner_type) == REAL_TYPE)
12847 return RECURSE (op0);
12848 if (INTEGRAL_TYPE_P (inner_type))
12849 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12850 && TYPE_UNSIGNED (inner_type);
12851 }
12852 }
12853 break;
12854
12855 default:
12856 return tree_simple_nonnegative_warnv_p (code, type);
12857 }
12858
12859 /* We don't know sign of `t', so be conservative and return false. */
12860 return false;
12861 }
12862
12863 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12864 value is based on the assumption that signed overflow is undefined,
12865 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12866 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12867
12868 bool
12869 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12870 tree op1, bool *strict_overflow_p,
12871 int depth)
12872 {
12873 if (TYPE_UNSIGNED (type))
12874 return true;
12875
12876 switch (code)
12877 {
12878 case POINTER_PLUS_EXPR:
12879 case PLUS_EXPR:
12880 if (FLOAT_TYPE_P (type))
12881 return RECURSE (op0) && RECURSE (op1);
12882
12883 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12884 both unsigned and at least 2 bits shorter than the result. */
12885 if (TREE_CODE (type) == INTEGER_TYPE
12886 && TREE_CODE (op0) == NOP_EXPR
12887 && TREE_CODE (op1) == NOP_EXPR)
12888 {
12889 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12890 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12891 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12892 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12893 {
12894 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12895 TYPE_PRECISION (inner2)) + 1;
12896 return prec < TYPE_PRECISION (type);
12897 }
12898 }
12899 break;
12900
12901 case MULT_EXPR:
12902 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12903 {
12904 /* x * x is always non-negative for floating point x
12905 or without overflow. */
12906 if (operand_equal_p (op0, op1, 0)
12907 || (RECURSE (op0) && RECURSE (op1)))
12908 {
12909 if (ANY_INTEGRAL_TYPE_P (type)
12910 && TYPE_OVERFLOW_UNDEFINED (type))
12911 *strict_overflow_p = true;
12912 return true;
12913 }
12914 }
12915
12916 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12917 both unsigned and their total bits is shorter than the result. */
12918 if (TREE_CODE (type) == INTEGER_TYPE
12919 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12920 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12921 {
12922 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12923 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12924 : TREE_TYPE (op0);
12925 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12926 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12927 : TREE_TYPE (op1);
12928
12929 bool unsigned0 = TYPE_UNSIGNED (inner0);
12930 bool unsigned1 = TYPE_UNSIGNED (inner1);
12931
12932 if (TREE_CODE (op0) == INTEGER_CST)
12933 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12934
12935 if (TREE_CODE (op1) == INTEGER_CST)
12936 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12937
12938 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12939 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12940 {
12941 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12942 ? tree_int_cst_min_precision (op0, UNSIGNED)
12943 : TYPE_PRECISION (inner0);
12944
12945 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12946 ? tree_int_cst_min_precision (op1, UNSIGNED)
12947 : TYPE_PRECISION (inner1);
12948
12949 return precision0 + precision1 < TYPE_PRECISION (type);
12950 }
12951 }
12952 return false;
12953
12954 case BIT_AND_EXPR:
12955 case MAX_EXPR:
12956 return RECURSE (op0) || RECURSE (op1);
12957
12958 case BIT_IOR_EXPR:
12959 case BIT_XOR_EXPR:
12960 case MIN_EXPR:
12961 case RDIV_EXPR:
12962 case TRUNC_DIV_EXPR:
12963 case CEIL_DIV_EXPR:
12964 case FLOOR_DIV_EXPR:
12965 case ROUND_DIV_EXPR:
12966 return RECURSE (op0) && RECURSE (op1);
12967
12968 case TRUNC_MOD_EXPR:
12969 return RECURSE (op0);
12970
12971 case FLOOR_MOD_EXPR:
12972 return RECURSE (op1);
12973
12974 case CEIL_MOD_EXPR:
12975 case ROUND_MOD_EXPR:
12976 default:
12977 return tree_simple_nonnegative_warnv_p (code, type);
12978 }
12979
12980 /* We don't know sign of `t', so be conservative and return false. */
12981 return false;
12982 }
12983
12984 /* Return true if T is known to be non-negative. If the return
12985 value is based on the assumption that signed overflow is undefined,
12986 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12987 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12988
12989 bool
12990 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12991 {
12992 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12993 return true;
12994
12995 switch (TREE_CODE (t))
12996 {
12997 case INTEGER_CST:
12998 return tree_int_cst_sgn (t) >= 0;
12999
13000 case REAL_CST:
13001 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13002
13003 case FIXED_CST:
13004 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13005
13006 case COND_EXPR:
13007 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13008
13009 case SSA_NAME:
13010 /* Limit the depth of recursion to avoid quadratic behavior.
13011 This is expected to catch almost all occurrences in practice.
13012 If this code misses important cases that unbounded recursion
13013 would not, passes that need this information could be revised
13014 to provide it through dataflow propagation. */
13015 return (!name_registered_for_update_p (t)
13016 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13017 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13018 strict_overflow_p, depth));
13019
13020 default:
13021 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13022 }
13023 }
13024
13025 /* Return true if T is known to be non-negative. If the return
13026 value is based on the assumption that signed overflow is undefined,
13027 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13028 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13029
13030 bool
13031 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13032 bool *strict_overflow_p, int depth)
13033 {
13034 switch (fn)
13035 {
13036 CASE_CFN_ACOS:
13037 CASE_CFN_ACOSH:
13038 CASE_CFN_CABS:
13039 CASE_CFN_COSH:
13040 CASE_CFN_ERFC:
13041 CASE_CFN_EXP:
13042 CASE_CFN_EXP10:
13043 CASE_CFN_EXP2:
13044 CASE_CFN_FABS:
13045 CASE_CFN_FDIM:
13046 CASE_CFN_HYPOT:
13047 CASE_CFN_POW10:
13048 CASE_CFN_FFS:
13049 CASE_CFN_PARITY:
13050 CASE_CFN_POPCOUNT:
13051 CASE_CFN_CLZ:
13052 CASE_CFN_CLRSB:
13053 case CFN_BUILT_IN_BSWAP32:
13054 case CFN_BUILT_IN_BSWAP64:
13055 /* Always true. */
13056 return true;
13057
13058 CASE_CFN_SQRT:
13059 /* sqrt(-0.0) is -0.0. */
13060 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13061 return true;
13062 return RECURSE (arg0);
13063
13064 CASE_CFN_ASINH:
13065 CASE_CFN_ATAN:
13066 CASE_CFN_ATANH:
13067 CASE_CFN_CBRT:
13068 CASE_CFN_CEIL:
13069 CASE_CFN_ERF:
13070 CASE_CFN_EXPM1:
13071 CASE_CFN_FLOOR:
13072 CASE_CFN_FMOD:
13073 CASE_CFN_FREXP:
13074 CASE_CFN_ICEIL:
13075 CASE_CFN_IFLOOR:
13076 CASE_CFN_IRINT:
13077 CASE_CFN_IROUND:
13078 CASE_CFN_LCEIL:
13079 CASE_CFN_LDEXP:
13080 CASE_CFN_LFLOOR:
13081 CASE_CFN_LLCEIL:
13082 CASE_CFN_LLFLOOR:
13083 CASE_CFN_LLRINT:
13084 CASE_CFN_LLROUND:
13085 CASE_CFN_LRINT:
13086 CASE_CFN_LROUND:
13087 CASE_CFN_MODF:
13088 CASE_CFN_NEARBYINT:
13089 CASE_CFN_RINT:
13090 CASE_CFN_ROUND:
13091 CASE_CFN_SCALB:
13092 CASE_CFN_SCALBLN:
13093 CASE_CFN_SCALBN:
13094 CASE_CFN_SIGNBIT:
13095 CASE_CFN_SIGNIFICAND:
13096 CASE_CFN_SINH:
13097 CASE_CFN_TANH:
13098 CASE_CFN_TRUNC:
13099 /* True if the 1st argument is nonnegative. */
13100 return RECURSE (arg0);
13101
13102 CASE_CFN_FMAX:
13103 /* True if the 1st OR 2nd arguments are nonnegative. */
13104 return RECURSE (arg0) || RECURSE (arg1);
13105
13106 CASE_CFN_FMIN:
13107 /* True if the 1st AND 2nd arguments are nonnegative. */
13108 return RECURSE (arg0) && RECURSE (arg1);
13109
13110 CASE_CFN_COPYSIGN:
13111 /* True if the 2nd argument is nonnegative. */
13112 return RECURSE (arg1);
13113
13114 CASE_CFN_POWI:
13115 /* True if the 1st argument is nonnegative or the second
13116 argument is an even integer. */
13117 if (TREE_CODE (arg1) == INTEGER_CST
13118 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13119 return true;
13120 return RECURSE (arg0);
13121
13122 CASE_CFN_POW:
13123 /* True if the 1st argument is nonnegative or the second
13124 argument is an even integer valued real. */
13125 if (TREE_CODE (arg1) == REAL_CST)
13126 {
13127 REAL_VALUE_TYPE c;
13128 HOST_WIDE_INT n;
13129
13130 c = TREE_REAL_CST (arg1);
13131 n = real_to_integer (&c);
13132 if ((n & 1) == 0)
13133 {
13134 REAL_VALUE_TYPE cint;
13135 real_from_integer (&cint, VOIDmode, n, SIGNED);
13136 if (real_identical (&c, &cint))
13137 return true;
13138 }
13139 }
13140 return RECURSE (arg0);
13141
13142 default:
13143 break;
13144 }
13145 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13146 }
13147
13148 /* Return true if T is known to be non-negative. If the return
13149 value is based on the assumption that signed overflow is undefined,
13150 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13151 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13152
13153 static bool
13154 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13155 {
13156 enum tree_code code = TREE_CODE (t);
13157 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13158 return true;
13159
13160 switch (code)
13161 {
13162 case TARGET_EXPR:
13163 {
13164 tree temp = TARGET_EXPR_SLOT (t);
13165 t = TARGET_EXPR_INITIAL (t);
13166
13167 /* If the initializer is non-void, then it's a normal expression
13168 that will be assigned to the slot. */
13169 if (!VOID_TYPE_P (t))
13170 return RECURSE (t);
13171
13172 /* Otherwise, the initializer sets the slot in some way. One common
13173 way is an assignment statement at the end of the initializer. */
13174 while (1)
13175 {
13176 if (TREE_CODE (t) == BIND_EXPR)
13177 t = expr_last (BIND_EXPR_BODY (t));
13178 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13179 || TREE_CODE (t) == TRY_CATCH_EXPR)
13180 t = expr_last (TREE_OPERAND (t, 0));
13181 else if (TREE_CODE (t) == STATEMENT_LIST)
13182 t = expr_last (t);
13183 else
13184 break;
13185 }
13186 if (TREE_CODE (t) == MODIFY_EXPR
13187 && TREE_OPERAND (t, 0) == temp)
13188 return RECURSE (TREE_OPERAND (t, 1));
13189
13190 return false;
13191 }
13192
13193 case CALL_EXPR:
13194 {
13195 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13196 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13197
13198 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13199 get_call_combined_fn (t),
13200 arg0,
13201 arg1,
13202 strict_overflow_p, depth);
13203 }
13204 case COMPOUND_EXPR:
13205 case MODIFY_EXPR:
13206 return RECURSE (TREE_OPERAND (t, 1));
13207
13208 case BIND_EXPR:
13209 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13210
13211 case SAVE_EXPR:
13212 return RECURSE (TREE_OPERAND (t, 0));
13213
13214 default:
13215 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13216 }
13217 }
13218
13219 #undef RECURSE
13220 #undef tree_expr_nonnegative_warnv_p
13221
13222 /* Return true if T is known to be non-negative. If the return
13223 value is based on the assumption that signed overflow is undefined,
13224 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13225 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13226
13227 bool
13228 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13229 {
13230 enum tree_code code;
13231 if (t == error_mark_node)
13232 return false;
13233
13234 code = TREE_CODE (t);
13235 switch (TREE_CODE_CLASS (code))
13236 {
13237 case tcc_binary:
13238 case tcc_comparison:
13239 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13240 TREE_TYPE (t),
13241 TREE_OPERAND (t, 0),
13242 TREE_OPERAND (t, 1),
13243 strict_overflow_p, depth);
13244
13245 case tcc_unary:
13246 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13247 TREE_TYPE (t),
13248 TREE_OPERAND (t, 0),
13249 strict_overflow_p, depth);
13250
13251 case tcc_constant:
13252 case tcc_declaration:
13253 case tcc_reference:
13254 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13255
13256 default:
13257 break;
13258 }
13259
13260 switch (code)
13261 {
13262 case TRUTH_AND_EXPR:
13263 case TRUTH_OR_EXPR:
13264 case TRUTH_XOR_EXPR:
13265 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13266 TREE_TYPE (t),
13267 TREE_OPERAND (t, 0),
13268 TREE_OPERAND (t, 1),
13269 strict_overflow_p, depth);
13270 case TRUTH_NOT_EXPR:
13271 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13272 TREE_TYPE (t),
13273 TREE_OPERAND (t, 0),
13274 strict_overflow_p, depth);
13275
13276 case COND_EXPR:
13277 case CONSTRUCTOR:
13278 case OBJ_TYPE_REF:
13279 case ASSERT_EXPR:
13280 case ADDR_EXPR:
13281 case WITH_SIZE_EXPR:
13282 case SSA_NAME:
13283 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13284
13285 default:
13286 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13287 }
13288 }
13289
13290 /* Return true if `t' is known to be non-negative. Handle warnings
13291 about undefined signed overflow. */
13292
13293 bool
13294 tree_expr_nonnegative_p (tree t)
13295 {
13296 bool ret, strict_overflow_p;
13297
13298 strict_overflow_p = false;
13299 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13300 if (strict_overflow_p)
13301 fold_overflow_warning (("assuming signed overflow does not occur when "
13302 "determining that expression is always "
13303 "non-negative"),
13304 WARN_STRICT_OVERFLOW_MISC);
13305 return ret;
13306 }
13307
13308
13309 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13310 For floating point we further ensure that T is not denormal.
13311 Similar logic is present in nonzero_address in rtlanal.h.
13312
13313 If the return value is based on the assumption that signed overflow
13314 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13315 change *STRICT_OVERFLOW_P. */
13316
13317 bool
13318 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13319 bool *strict_overflow_p)
13320 {
13321 switch (code)
13322 {
13323 case ABS_EXPR:
13324 return tree_expr_nonzero_warnv_p (op0,
13325 strict_overflow_p);
13326
13327 case NOP_EXPR:
13328 {
13329 tree inner_type = TREE_TYPE (op0);
13330 tree outer_type = type;
13331
13332 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13333 && tree_expr_nonzero_warnv_p (op0,
13334 strict_overflow_p));
13335 }
13336 break;
13337
13338 case NON_LVALUE_EXPR:
13339 return tree_expr_nonzero_warnv_p (op0,
13340 strict_overflow_p);
13341
13342 default:
13343 break;
13344 }
13345
13346 return false;
13347 }
13348
13349 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13350 For floating point we further ensure that T is not denormal.
13351 Similar logic is present in nonzero_address in rtlanal.h.
13352
13353 If the return value is based on the assumption that signed overflow
13354 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13355 change *STRICT_OVERFLOW_P. */
13356
13357 bool
13358 tree_binary_nonzero_warnv_p (enum tree_code code,
13359 tree type,
13360 tree op0,
13361 tree op1, bool *strict_overflow_p)
13362 {
13363 bool sub_strict_overflow_p;
13364 switch (code)
13365 {
13366 case POINTER_PLUS_EXPR:
13367 case PLUS_EXPR:
13368 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13369 {
13370 /* With the presence of negative values it is hard
13371 to say something. */
13372 sub_strict_overflow_p = false;
13373 if (!tree_expr_nonnegative_warnv_p (op0,
13374 &sub_strict_overflow_p)
13375 || !tree_expr_nonnegative_warnv_p (op1,
13376 &sub_strict_overflow_p))
13377 return false;
13378 /* One of operands must be positive and the other non-negative. */
13379 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13380 overflows, on a twos-complement machine the sum of two
13381 nonnegative numbers can never be zero. */
13382 return (tree_expr_nonzero_warnv_p (op0,
13383 strict_overflow_p)
13384 || tree_expr_nonzero_warnv_p (op1,
13385 strict_overflow_p));
13386 }
13387 break;
13388
13389 case MULT_EXPR:
13390 if (TYPE_OVERFLOW_UNDEFINED (type))
13391 {
13392 if (tree_expr_nonzero_warnv_p (op0,
13393 strict_overflow_p)
13394 && tree_expr_nonzero_warnv_p (op1,
13395 strict_overflow_p))
13396 {
13397 *strict_overflow_p = true;
13398 return true;
13399 }
13400 }
13401 break;
13402
13403 case MIN_EXPR:
13404 sub_strict_overflow_p = false;
13405 if (tree_expr_nonzero_warnv_p (op0,
13406 &sub_strict_overflow_p)
13407 && tree_expr_nonzero_warnv_p (op1,
13408 &sub_strict_overflow_p))
13409 {
13410 if (sub_strict_overflow_p)
13411 *strict_overflow_p = true;
13412 }
13413 break;
13414
13415 case MAX_EXPR:
13416 sub_strict_overflow_p = false;
13417 if (tree_expr_nonzero_warnv_p (op0,
13418 &sub_strict_overflow_p))
13419 {
13420 if (sub_strict_overflow_p)
13421 *strict_overflow_p = true;
13422
13423 /* When both operands are nonzero, then MAX must be too. */
13424 if (tree_expr_nonzero_warnv_p (op1,
13425 strict_overflow_p))
13426 return true;
13427
13428 /* MAX where operand 0 is positive is positive. */
13429 return tree_expr_nonnegative_warnv_p (op0,
13430 strict_overflow_p);
13431 }
13432 /* MAX where operand 1 is positive is positive. */
13433 else if (tree_expr_nonzero_warnv_p (op1,
13434 &sub_strict_overflow_p)
13435 && tree_expr_nonnegative_warnv_p (op1,
13436 &sub_strict_overflow_p))
13437 {
13438 if (sub_strict_overflow_p)
13439 *strict_overflow_p = true;
13440 return true;
13441 }
13442 break;
13443
13444 case BIT_IOR_EXPR:
13445 return (tree_expr_nonzero_warnv_p (op1,
13446 strict_overflow_p)
13447 || tree_expr_nonzero_warnv_p (op0,
13448 strict_overflow_p));
13449
13450 default:
13451 break;
13452 }
13453
13454 return false;
13455 }
13456
13457 /* Return true when T is an address and is known to be nonzero.
13458 For floating point we further ensure that T is not denormal.
13459 Similar logic is present in nonzero_address in rtlanal.h.
13460
13461 If the return value is based on the assumption that signed overflow
13462 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13463 change *STRICT_OVERFLOW_P. */
13464
13465 bool
13466 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13467 {
13468 bool sub_strict_overflow_p;
13469 switch (TREE_CODE (t))
13470 {
13471 case INTEGER_CST:
13472 return !integer_zerop (t);
13473
13474 case ADDR_EXPR:
13475 {
13476 tree base = TREE_OPERAND (t, 0);
13477
13478 if (!DECL_P (base))
13479 base = get_base_address (base);
13480
13481 if (!base)
13482 return false;
13483
13484 /* For objects in symbol table check if we know they are non-zero.
13485 Don't do anything for variables and functions before symtab is built;
13486 it is quite possible that they will be declared weak later. */
13487 if (DECL_P (base) && decl_in_symtab_p (base))
13488 {
13489 struct symtab_node *symbol;
13490
13491 symbol = symtab_node::get_create (base);
13492 if (symbol)
13493 return symbol->nonzero_address ();
13494 else
13495 return false;
13496 }
13497
13498 /* Function local objects are never NULL. */
13499 if (DECL_P (base)
13500 && (DECL_CONTEXT (base)
13501 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13502 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13503 return true;
13504
13505 /* Constants are never weak. */
13506 if (CONSTANT_CLASS_P (base))
13507 return true;
13508
13509 return false;
13510 }
13511
13512 case COND_EXPR:
13513 sub_strict_overflow_p = false;
13514 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13515 &sub_strict_overflow_p)
13516 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13517 &sub_strict_overflow_p))
13518 {
13519 if (sub_strict_overflow_p)
13520 *strict_overflow_p = true;
13521 return true;
13522 }
13523 break;
13524
13525 default:
13526 break;
13527 }
13528 return false;
13529 }
13530
13531 #define integer_valued_real_p(X) \
13532 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13533
13534 #define RECURSE(X) \
13535 ((integer_valued_real_p) (X, depth + 1))
13536
13537 /* Return true if the floating point result of (CODE OP0) has an
13538 integer value. We also allow +Inf, -Inf and NaN to be considered
13539 integer values. Return false for signaling NaN.
13540
13541 DEPTH is the current nesting depth of the query. */
13542
13543 bool
13544 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13545 {
13546 switch (code)
13547 {
13548 case FLOAT_EXPR:
13549 return true;
13550
13551 case ABS_EXPR:
13552 return RECURSE (op0);
13553
13554 CASE_CONVERT:
13555 {
13556 tree type = TREE_TYPE (op0);
13557 if (TREE_CODE (type) == INTEGER_TYPE)
13558 return true;
13559 if (TREE_CODE (type) == REAL_TYPE)
13560 return RECURSE (op0);
13561 break;
13562 }
13563
13564 default:
13565 break;
13566 }
13567 return false;
13568 }
13569
13570 /* Return true if the floating point result of (CODE OP0 OP1) has an
13571 integer value. We also allow +Inf, -Inf and NaN to be considered
13572 integer values. Return false for signaling NaN.
13573
13574 DEPTH is the current nesting depth of the query. */
13575
13576 bool
13577 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13578 {
13579 switch (code)
13580 {
13581 case PLUS_EXPR:
13582 case MINUS_EXPR:
13583 case MULT_EXPR:
13584 case MIN_EXPR:
13585 case MAX_EXPR:
13586 return RECURSE (op0) && RECURSE (op1);
13587
13588 default:
13589 break;
13590 }
13591 return false;
13592 }
13593
13594 /* Return true if the floating point result of calling FNDECL with arguments
13595 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13596 considered integer values. Return false for signaling NaN. If FNDECL
13597 takes fewer than 2 arguments, the remaining ARGn are null.
13598
13599 DEPTH is the current nesting depth of the query. */
13600
13601 bool
13602 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13603 {
13604 switch (fn)
13605 {
13606 CASE_CFN_CEIL:
13607 CASE_CFN_FLOOR:
13608 CASE_CFN_NEARBYINT:
13609 CASE_CFN_RINT:
13610 CASE_CFN_ROUND:
13611 CASE_CFN_TRUNC:
13612 return true;
13613
13614 CASE_CFN_FMIN:
13615 CASE_CFN_FMAX:
13616 return RECURSE (arg0) && RECURSE (arg1);
13617
13618 default:
13619 break;
13620 }
13621 return false;
13622 }
13623
13624 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13625 has an integer value. We also allow +Inf, -Inf and NaN to be
13626 considered integer values. Return false for signaling NaN.
13627
13628 DEPTH is the current nesting depth of the query. */
13629
13630 bool
13631 integer_valued_real_single_p (tree t, int depth)
13632 {
13633 switch (TREE_CODE (t))
13634 {
13635 case REAL_CST:
13636 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13637
13638 case COND_EXPR:
13639 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13640
13641 case SSA_NAME:
13642 /* Limit the depth of recursion to avoid quadratic behavior.
13643 This is expected to catch almost all occurrences in practice.
13644 If this code misses important cases that unbounded recursion
13645 would not, passes that need this information could be revised
13646 to provide it through dataflow propagation. */
13647 return (!name_registered_for_update_p (t)
13648 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13649 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13650 depth));
13651
13652 default:
13653 break;
13654 }
13655 return false;
13656 }
13657
13658 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13659 has an integer value. We also allow +Inf, -Inf and NaN to be
13660 considered integer values. Return false for signaling NaN.
13661
13662 DEPTH is the current nesting depth of the query. */
13663
13664 static bool
13665 integer_valued_real_invalid_p (tree t, int depth)
13666 {
13667 switch (TREE_CODE (t))
13668 {
13669 case COMPOUND_EXPR:
13670 case MODIFY_EXPR:
13671 case BIND_EXPR:
13672 return RECURSE (TREE_OPERAND (t, 1));
13673
13674 case SAVE_EXPR:
13675 return RECURSE (TREE_OPERAND (t, 0));
13676
13677 default:
13678 break;
13679 }
13680 return false;
13681 }
13682
13683 #undef RECURSE
13684 #undef integer_valued_real_p
13685
13686 /* Return true if the floating point expression T has an integer value.
13687 We also allow +Inf, -Inf and NaN to be considered integer values.
13688 Return false for signaling NaN.
13689
13690 DEPTH is the current nesting depth of the query. */
13691
13692 bool
13693 integer_valued_real_p (tree t, int depth)
13694 {
13695 if (t == error_mark_node)
13696 return false;
13697
13698 tree_code code = TREE_CODE (t);
13699 switch (TREE_CODE_CLASS (code))
13700 {
13701 case tcc_binary:
13702 case tcc_comparison:
13703 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13704 TREE_OPERAND (t, 1), depth);
13705
13706 case tcc_unary:
13707 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13708
13709 case tcc_constant:
13710 case tcc_declaration:
13711 case tcc_reference:
13712 return integer_valued_real_single_p (t, depth);
13713
13714 default:
13715 break;
13716 }
13717
13718 switch (code)
13719 {
13720 case COND_EXPR:
13721 case SSA_NAME:
13722 return integer_valued_real_single_p (t, depth);
13723
13724 case CALL_EXPR:
13725 {
13726 tree arg0 = (call_expr_nargs (t) > 0
13727 ? CALL_EXPR_ARG (t, 0)
13728 : NULL_TREE);
13729 tree arg1 = (call_expr_nargs (t) > 1
13730 ? CALL_EXPR_ARG (t, 1)
13731 : NULL_TREE);
13732 return integer_valued_real_call_p (get_call_combined_fn (t),
13733 arg0, arg1, depth);
13734 }
13735
13736 default:
13737 return integer_valued_real_invalid_p (t, depth);
13738 }
13739 }
13740
13741 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13742 attempt to fold the expression to a constant without modifying TYPE,
13743 OP0 or OP1.
13744
13745 If the expression could be simplified to a constant, then return
13746 the constant. If the expression would not be simplified to a
13747 constant, then return NULL_TREE. */
13748
13749 tree
13750 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13751 {
13752 tree tem = fold_binary (code, type, op0, op1);
13753 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13754 }
13755
13756 /* Given the components of a unary expression CODE, TYPE and OP0,
13757 attempt to fold the expression to a constant without modifying
13758 TYPE or OP0.
13759
13760 If the expression could be simplified to a constant, then return
13761 the constant. If the expression would not be simplified to a
13762 constant, then return NULL_TREE. */
13763
13764 tree
13765 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13766 {
13767 tree tem = fold_unary (code, type, op0);
13768 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13769 }
13770
13771 /* If EXP represents referencing an element in a constant string
13772 (either via pointer arithmetic or array indexing), return the
13773 tree representing the value accessed, otherwise return NULL. */
13774
13775 tree
13776 fold_read_from_constant_string (tree exp)
13777 {
13778 if ((TREE_CODE (exp) == INDIRECT_REF
13779 || TREE_CODE (exp) == ARRAY_REF)
13780 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13781 {
13782 tree exp1 = TREE_OPERAND (exp, 0);
13783 tree index;
13784 tree string;
13785 location_t loc = EXPR_LOCATION (exp);
13786
13787 if (TREE_CODE (exp) == INDIRECT_REF)
13788 string = string_constant (exp1, &index);
13789 else
13790 {
13791 tree low_bound = array_ref_low_bound (exp);
13792 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13793
13794 /* Optimize the special-case of a zero lower bound.
13795
13796 We convert the low_bound to sizetype to avoid some problems
13797 with constant folding. (E.g. suppose the lower bound is 1,
13798 and its mode is QI. Without the conversion,l (ARRAY
13799 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13800 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13801 if (! integer_zerop (low_bound))
13802 index = size_diffop_loc (loc, index,
13803 fold_convert_loc (loc, sizetype, low_bound));
13804
13805 string = exp1;
13806 }
13807
13808 if (string
13809 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13810 && TREE_CODE (string) == STRING_CST
13811 && TREE_CODE (index) == INTEGER_CST
13812 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13813 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13814 == MODE_INT)
13815 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13816 return build_int_cst_type (TREE_TYPE (exp),
13817 (TREE_STRING_POINTER (string)
13818 [TREE_INT_CST_LOW (index)]));
13819 }
13820 return NULL;
13821 }
13822
13823 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13824 an integer constant, real, or fixed-point constant.
13825
13826 TYPE is the type of the result. */
13827
13828 static tree
13829 fold_negate_const (tree arg0, tree type)
13830 {
13831 tree t = NULL_TREE;
13832
13833 switch (TREE_CODE (arg0))
13834 {
13835 case INTEGER_CST:
13836 {
13837 bool overflow;
13838 wide_int val = wi::neg (arg0, &overflow);
13839 t = force_fit_type (type, val, 1,
13840 (overflow | TREE_OVERFLOW (arg0))
13841 && !TYPE_UNSIGNED (type));
13842 break;
13843 }
13844
13845 case REAL_CST:
13846 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13847 break;
13848
13849 case FIXED_CST:
13850 {
13851 FIXED_VALUE_TYPE f;
13852 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13853 &(TREE_FIXED_CST (arg0)), NULL,
13854 TYPE_SATURATING (type));
13855 t = build_fixed (type, f);
13856 /* Propagate overflow flags. */
13857 if (overflow_p | TREE_OVERFLOW (arg0))
13858 TREE_OVERFLOW (t) = 1;
13859 break;
13860 }
13861
13862 default:
13863 gcc_unreachable ();
13864 }
13865
13866 return t;
13867 }
13868
13869 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13870 an integer constant or real constant.
13871
13872 TYPE is the type of the result. */
13873
13874 tree
13875 fold_abs_const (tree arg0, tree type)
13876 {
13877 tree t = NULL_TREE;
13878
13879 switch (TREE_CODE (arg0))
13880 {
13881 case INTEGER_CST:
13882 {
13883 /* If the value is unsigned or non-negative, then the absolute value
13884 is the same as the ordinary value. */
13885 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13886 t = arg0;
13887
13888 /* If the value is negative, then the absolute value is
13889 its negation. */
13890 else
13891 {
13892 bool overflow;
13893 wide_int val = wi::neg (arg0, &overflow);
13894 t = force_fit_type (type, val, -1,
13895 overflow | TREE_OVERFLOW (arg0));
13896 }
13897 }
13898 break;
13899
13900 case REAL_CST:
13901 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13902 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13903 else
13904 t = arg0;
13905 break;
13906
13907 default:
13908 gcc_unreachable ();
13909 }
13910
13911 return t;
13912 }
13913
13914 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13915 constant. TYPE is the type of the result. */
13916
13917 static tree
13918 fold_not_const (const_tree arg0, tree type)
13919 {
13920 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13921
13922 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13923 }
13924
13925 /* Given CODE, a relational operator, the target type, TYPE and two
13926 constant operands OP0 and OP1, return the result of the
13927 relational operation. If the result is not a compile time
13928 constant, then return NULL_TREE. */
13929
13930 static tree
13931 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13932 {
13933 int result, invert;
13934
13935 /* From here on, the only cases we handle are when the result is
13936 known to be a constant. */
13937
13938 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13939 {
13940 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13941 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13942
13943 /* Handle the cases where either operand is a NaN. */
13944 if (real_isnan (c0) || real_isnan (c1))
13945 {
13946 switch (code)
13947 {
13948 case EQ_EXPR:
13949 case ORDERED_EXPR:
13950 result = 0;
13951 break;
13952
13953 case NE_EXPR:
13954 case UNORDERED_EXPR:
13955 case UNLT_EXPR:
13956 case UNLE_EXPR:
13957 case UNGT_EXPR:
13958 case UNGE_EXPR:
13959 case UNEQ_EXPR:
13960 result = 1;
13961 break;
13962
13963 case LT_EXPR:
13964 case LE_EXPR:
13965 case GT_EXPR:
13966 case GE_EXPR:
13967 case LTGT_EXPR:
13968 if (flag_trapping_math)
13969 return NULL_TREE;
13970 result = 0;
13971 break;
13972
13973 default:
13974 gcc_unreachable ();
13975 }
13976
13977 return constant_boolean_node (result, type);
13978 }
13979
13980 return constant_boolean_node (real_compare (code, c0, c1), type);
13981 }
13982
13983 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13984 {
13985 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13986 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13987 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13988 }
13989
13990 /* Handle equality/inequality of complex constants. */
13991 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13992 {
13993 tree rcond = fold_relational_const (code, type,
13994 TREE_REALPART (op0),
13995 TREE_REALPART (op1));
13996 tree icond = fold_relational_const (code, type,
13997 TREE_IMAGPART (op0),
13998 TREE_IMAGPART (op1));
13999 if (code == EQ_EXPR)
14000 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14001 else if (code == NE_EXPR)
14002 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14003 else
14004 return NULL_TREE;
14005 }
14006
14007 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14008 {
14009 if (!VECTOR_TYPE_P (type))
14010 {
14011 /* Have vector comparison with scalar boolean result. */
14012 bool result = true;
14013 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14014 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
14015 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
14016 {
14017 tree elem0 = VECTOR_CST_ELT (op0, i);
14018 tree elem1 = VECTOR_CST_ELT (op1, i);
14019 tree tmp = fold_relational_const (code, type, elem0, elem1);
14020 result &= integer_onep (tmp);
14021 }
14022 if (code == NE_EXPR)
14023 result = !result;
14024 return constant_boolean_node (result, type);
14025 }
14026 unsigned count = VECTOR_CST_NELTS (op0);
14027 tree *elts = XALLOCAVEC (tree, count);
14028 gcc_assert (VECTOR_CST_NELTS (op1) == count
14029 && TYPE_VECTOR_SUBPARTS (type) == count);
14030
14031 for (unsigned i = 0; i < count; i++)
14032 {
14033 tree elem_type = TREE_TYPE (type);
14034 tree elem0 = VECTOR_CST_ELT (op0, i);
14035 tree elem1 = VECTOR_CST_ELT (op1, i);
14036
14037 tree tem = fold_relational_const (code, elem_type,
14038 elem0, elem1);
14039
14040 if (tem == NULL_TREE)
14041 return NULL_TREE;
14042
14043 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14044 }
14045
14046 return build_vector (type, elts);
14047 }
14048
14049 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14050
14051 To compute GT, swap the arguments and do LT.
14052 To compute GE, do LT and invert the result.
14053 To compute LE, swap the arguments, do LT and invert the result.
14054 To compute NE, do EQ and invert the result.
14055
14056 Therefore, the code below must handle only EQ and LT. */
14057
14058 if (code == LE_EXPR || code == GT_EXPR)
14059 {
14060 std::swap (op0, op1);
14061 code = swap_tree_comparison (code);
14062 }
14063
14064 /* Note that it is safe to invert for real values here because we
14065 have already handled the one case that it matters. */
14066
14067 invert = 0;
14068 if (code == NE_EXPR || code == GE_EXPR)
14069 {
14070 invert = 1;
14071 code = invert_tree_comparison (code, false);
14072 }
14073
14074 /* Compute a result for LT or EQ if args permit;
14075 Otherwise return T. */
14076 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14077 {
14078 if (code == EQ_EXPR)
14079 result = tree_int_cst_equal (op0, op1);
14080 else
14081 result = tree_int_cst_lt (op0, op1);
14082 }
14083 else
14084 return NULL_TREE;
14085
14086 if (invert)
14087 result ^= 1;
14088 return constant_boolean_node (result, type);
14089 }
14090
14091 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14092 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14093 itself. */
14094
14095 tree
14096 fold_build_cleanup_point_expr (tree type, tree expr)
14097 {
14098 /* If the expression does not have side effects then we don't have to wrap
14099 it with a cleanup point expression. */
14100 if (!TREE_SIDE_EFFECTS (expr))
14101 return expr;
14102
14103 /* If the expression is a return, check to see if the expression inside the
14104 return has no side effects or the right hand side of the modify expression
14105 inside the return. If either don't have side effects set we don't need to
14106 wrap the expression in a cleanup point expression. Note we don't check the
14107 left hand side of the modify because it should always be a return decl. */
14108 if (TREE_CODE (expr) == RETURN_EXPR)
14109 {
14110 tree op = TREE_OPERAND (expr, 0);
14111 if (!op || !TREE_SIDE_EFFECTS (op))
14112 return expr;
14113 op = TREE_OPERAND (op, 1);
14114 if (!TREE_SIDE_EFFECTS (op))
14115 return expr;
14116 }
14117
14118 return build1 (CLEANUP_POINT_EXPR, type, expr);
14119 }
14120
14121 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14122 of an indirection through OP0, or NULL_TREE if no simplification is
14123 possible. */
14124
14125 tree
14126 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14127 {
14128 tree sub = op0;
14129 tree subtype;
14130
14131 STRIP_NOPS (sub);
14132 subtype = TREE_TYPE (sub);
14133 if (!POINTER_TYPE_P (subtype))
14134 return NULL_TREE;
14135
14136 if (TREE_CODE (sub) == ADDR_EXPR)
14137 {
14138 tree op = TREE_OPERAND (sub, 0);
14139 tree optype = TREE_TYPE (op);
14140 /* *&CONST_DECL -> to the value of the const decl. */
14141 if (TREE_CODE (op) == CONST_DECL)
14142 return DECL_INITIAL (op);
14143 /* *&p => p; make sure to handle *&"str"[cst] here. */
14144 if (type == optype)
14145 {
14146 tree fop = fold_read_from_constant_string (op);
14147 if (fop)
14148 return fop;
14149 else
14150 return op;
14151 }
14152 /* *(foo *)&fooarray => fooarray[0] */
14153 else if (TREE_CODE (optype) == ARRAY_TYPE
14154 && type == TREE_TYPE (optype)
14155 && (!in_gimple_form
14156 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14157 {
14158 tree type_domain = TYPE_DOMAIN (optype);
14159 tree min_val = size_zero_node;
14160 if (type_domain && TYPE_MIN_VALUE (type_domain))
14161 min_val = TYPE_MIN_VALUE (type_domain);
14162 if (in_gimple_form
14163 && TREE_CODE (min_val) != INTEGER_CST)
14164 return NULL_TREE;
14165 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14166 NULL_TREE, NULL_TREE);
14167 }
14168 /* *(foo *)&complexfoo => __real__ complexfoo */
14169 else if (TREE_CODE (optype) == COMPLEX_TYPE
14170 && type == TREE_TYPE (optype))
14171 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14172 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14173 else if (TREE_CODE (optype) == VECTOR_TYPE
14174 && type == TREE_TYPE (optype))
14175 {
14176 tree part_width = TYPE_SIZE (type);
14177 tree index = bitsize_int (0);
14178 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14179 }
14180 }
14181
14182 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14183 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14184 {
14185 tree op00 = TREE_OPERAND (sub, 0);
14186 tree op01 = TREE_OPERAND (sub, 1);
14187
14188 STRIP_NOPS (op00);
14189 if (TREE_CODE (op00) == ADDR_EXPR)
14190 {
14191 tree op00type;
14192 op00 = TREE_OPERAND (op00, 0);
14193 op00type = TREE_TYPE (op00);
14194
14195 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14196 if (TREE_CODE (op00type) == VECTOR_TYPE
14197 && type == TREE_TYPE (op00type))
14198 {
14199 HOST_WIDE_INT offset = tree_to_shwi (op01);
14200 tree part_width = TYPE_SIZE (type);
14201 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14202 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14203 tree index = bitsize_int (indexi);
14204
14205 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14206 return fold_build3_loc (loc,
14207 BIT_FIELD_REF, type, op00,
14208 part_width, index);
14209
14210 }
14211 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14212 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14213 && type == TREE_TYPE (op00type))
14214 {
14215 tree size = TYPE_SIZE_UNIT (type);
14216 if (tree_int_cst_equal (size, op01))
14217 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14218 }
14219 /* ((foo *)&fooarray)[1] => fooarray[1] */
14220 else if (TREE_CODE (op00type) == ARRAY_TYPE
14221 && type == TREE_TYPE (op00type))
14222 {
14223 tree type_domain = TYPE_DOMAIN (op00type);
14224 tree min_val = size_zero_node;
14225 if (type_domain && TYPE_MIN_VALUE (type_domain))
14226 min_val = TYPE_MIN_VALUE (type_domain);
14227 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14228 TYPE_SIZE_UNIT (type));
14229 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14230 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14231 NULL_TREE, NULL_TREE);
14232 }
14233 }
14234 }
14235
14236 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14237 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14238 && type == TREE_TYPE (TREE_TYPE (subtype))
14239 && (!in_gimple_form
14240 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14241 {
14242 tree type_domain;
14243 tree min_val = size_zero_node;
14244 sub = build_fold_indirect_ref_loc (loc, sub);
14245 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14246 if (type_domain && TYPE_MIN_VALUE (type_domain))
14247 min_val = TYPE_MIN_VALUE (type_domain);
14248 if (in_gimple_form
14249 && TREE_CODE (min_val) != INTEGER_CST)
14250 return NULL_TREE;
14251 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14252 NULL_TREE);
14253 }
14254
14255 return NULL_TREE;
14256 }
14257
14258 /* Builds an expression for an indirection through T, simplifying some
14259 cases. */
14260
14261 tree
14262 build_fold_indirect_ref_loc (location_t loc, tree t)
14263 {
14264 tree type = TREE_TYPE (TREE_TYPE (t));
14265 tree sub = fold_indirect_ref_1 (loc, type, t);
14266
14267 if (sub)
14268 return sub;
14269
14270 return build1_loc (loc, INDIRECT_REF, type, t);
14271 }
14272
14273 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14274
14275 tree
14276 fold_indirect_ref_loc (location_t loc, tree t)
14277 {
14278 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14279
14280 if (sub)
14281 return sub;
14282 else
14283 return t;
14284 }
14285
14286 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14287 whose result is ignored. The type of the returned tree need not be
14288 the same as the original expression. */
14289
14290 tree
14291 fold_ignored_result (tree t)
14292 {
14293 if (!TREE_SIDE_EFFECTS (t))
14294 return integer_zero_node;
14295
14296 for (;;)
14297 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14298 {
14299 case tcc_unary:
14300 t = TREE_OPERAND (t, 0);
14301 break;
14302
14303 case tcc_binary:
14304 case tcc_comparison:
14305 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14306 t = TREE_OPERAND (t, 0);
14307 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14308 t = TREE_OPERAND (t, 1);
14309 else
14310 return t;
14311 break;
14312
14313 case tcc_expression:
14314 switch (TREE_CODE (t))
14315 {
14316 case COMPOUND_EXPR:
14317 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14318 return t;
14319 t = TREE_OPERAND (t, 0);
14320 break;
14321
14322 case COND_EXPR:
14323 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14324 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14325 return t;
14326 t = TREE_OPERAND (t, 0);
14327 break;
14328
14329 default:
14330 return t;
14331 }
14332 break;
14333
14334 default:
14335 return t;
14336 }
14337 }
14338
14339 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14340
14341 tree
14342 round_up_loc (location_t loc, tree value, unsigned int divisor)
14343 {
14344 tree div = NULL_TREE;
14345
14346 if (divisor == 1)
14347 return value;
14348
14349 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14350 have to do anything. Only do this when we are not given a const,
14351 because in that case, this check is more expensive than just
14352 doing it. */
14353 if (TREE_CODE (value) != INTEGER_CST)
14354 {
14355 div = build_int_cst (TREE_TYPE (value), divisor);
14356
14357 if (multiple_of_p (TREE_TYPE (value), value, div))
14358 return value;
14359 }
14360
14361 /* If divisor is a power of two, simplify this to bit manipulation. */
14362 if (divisor == (divisor & -divisor))
14363 {
14364 if (TREE_CODE (value) == INTEGER_CST)
14365 {
14366 wide_int val = value;
14367 bool overflow_p;
14368
14369 if ((val & (divisor - 1)) == 0)
14370 return value;
14371
14372 overflow_p = TREE_OVERFLOW (value);
14373 val += divisor - 1;
14374 val &= - (int) divisor;
14375 if (val == 0)
14376 overflow_p = true;
14377
14378 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14379 }
14380 else
14381 {
14382 tree t;
14383
14384 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14385 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14386 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14387 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14388 }
14389 }
14390 else
14391 {
14392 if (!div)
14393 div = build_int_cst (TREE_TYPE (value), divisor);
14394 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14395 value = size_binop_loc (loc, MULT_EXPR, value, div);
14396 }
14397
14398 return value;
14399 }
14400
14401 /* Likewise, but round down. */
14402
14403 tree
14404 round_down_loc (location_t loc, tree value, int divisor)
14405 {
14406 tree div = NULL_TREE;
14407
14408 gcc_assert (divisor > 0);
14409 if (divisor == 1)
14410 return value;
14411
14412 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14413 have to do anything. Only do this when we are not given a const,
14414 because in that case, this check is more expensive than just
14415 doing it. */
14416 if (TREE_CODE (value) != INTEGER_CST)
14417 {
14418 div = build_int_cst (TREE_TYPE (value), divisor);
14419
14420 if (multiple_of_p (TREE_TYPE (value), value, div))
14421 return value;
14422 }
14423
14424 /* If divisor is a power of two, simplify this to bit manipulation. */
14425 if (divisor == (divisor & -divisor))
14426 {
14427 tree t;
14428
14429 t = build_int_cst (TREE_TYPE (value), -divisor);
14430 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14431 }
14432 else
14433 {
14434 if (!div)
14435 div = build_int_cst (TREE_TYPE (value), divisor);
14436 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14437 value = size_binop_loc (loc, MULT_EXPR, value, div);
14438 }
14439
14440 return value;
14441 }
14442
14443 /* Returns the pointer to the base of the object addressed by EXP and
14444 extracts the information about the offset of the access, storing it
14445 to PBITPOS and POFFSET. */
14446
14447 static tree
14448 split_address_to_core_and_offset (tree exp,
14449 HOST_WIDE_INT *pbitpos, tree *poffset)
14450 {
14451 tree core;
14452 machine_mode mode;
14453 int unsignedp, reversep, volatilep;
14454 HOST_WIDE_INT bitsize;
14455 location_t loc = EXPR_LOCATION (exp);
14456
14457 if (TREE_CODE (exp) == ADDR_EXPR)
14458 {
14459 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14460 poffset, &mode, &unsignedp, &reversep,
14461 &volatilep, false);
14462 core = build_fold_addr_expr_loc (loc, core);
14463 }
14464 else
14465 {
14466 core = exp;
14467 *pbitpos = 0;
14468 *poffset = NULL_TREE;
14469 }
14470
14471 return core;
14472 }
14473
14474 /* Returns true if addresses of E1 and E2 differ by a constant, false
14475 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14476
14477 bool
14478 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14479 {
14480 tree core1, core2;
14481 HOST_WIDE_INT bitpos1, bitpos2;
14482 tree toffset1, toffset2, tdiff, type;
14483
14484 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14485 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14486
14487 if (bitpos1 % BITS_PER_UNIT != 0
14488 || bitpos2 % BITS_PER_UNIT != 0
14489 || !operand_equal_p (core1, core2, 0))
14490 return false;
14491
14492 if (toffset1 && toffset2)
14493 {
14494 type = TREE_TYPE (toffset1);
14495 if (type != TREE_TYPE (toffset2))
14496 toffset2 = fold_convert (type, toffset2);
14497
14498 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14499 if (!cst_and_fits_in_hwi (tdiff))
14500 return false;
14501
14502 *diff = int_cst_value (tdiff);
14503 }
14504 else if (toffset1 || toffset2)
14505 {
14506 /* If only one of the offsets is non-constant, the difference cannot
14507 be a constant. */
14508 return false;
14509 }
14510 else
14511 *diff = 0;
14512
14513 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14514 return true;
14515 }
14516
14517 /* Return OFF converted to a pointer offset type suitable as offset for
14518 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14519 tree
14520 convert_to_ptrofftype_loc (location_t loc, tree off)
14521 {
14522 return fold_convert_loc (loc, sizetype, off);
14523 }
14524
14525 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14526 tree
14527 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14528 {
14529 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14530 ptr, convert_to_ptrofftype_loc (loc, off));
14531 }
14532
14533 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14534 tree
14535 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14536 {
14537 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14538 ptr, size_int (off));
14539 }
14540
14541 /* Return a char pointer for a C string if it is a string constant
14542 or sum of string constant and integer constant. */
14543
14544 const char *
14545 c_getstr (tree src)
14546 {
14547 tree offset_node;
14548
14549 src = string_constant (src, &offset_node);
14550 if (src == 0)
14551 return 0;
14552
14553 if (offset_node == 0)
14554 return TREE_STRING_POINTER (src);
14555 else if (!tree_fits_uhwi_p (offset_node)
14556 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14557 return 0;
14558
14559 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
14560 }