[multiple changes]
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76
77 #ifndef LOAD_EXTEND_OP
78 #define LOAD_EXTEND_OP(M) UNKNOWN
79 #endif
80
81 /* Nonzero if we are folding constants inside an initializer; zero
82 otherwise. */
83 int folding_initializer = 0;
84
85 /* The following constants represent a bit based encoding of GCC's
86 comparison operators. This encoding simplifies transformations
87 on relational comparison operators, such as AND and OR. */
88 enum comparison_code {
89 COMPCODE_FALSE = 0,
90 COMPCODE_LT = 1,
91 COMPCODE_EQ = 2,
92 COMPCODE_LE = 3,
93 COMPCODE_GT = 4,
94 COMPCODE_LTGT = 5,
95 COMPCODE_GE = 6,
96 COMPCODE_ORD = 7,
97 COMPCODE_UNORD = 8,
98 COMPCODE_UNLT = 9,
99 COMPCODE_UNEQ = 10,
100 COMPCODE_UNLE = 11,
101 COMPCODE_UNGT = 12,
102 COMPCODE_NE = 13,
103 COMPCODE_UNGE = 14,
104 COMPCODE_TRUE = 15
105 };
106
107 static bool negate_expr_p (tree);
108 static tree negate_expr (tree);
109 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
110 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
111 static enum comparison_code comparison_to_compcode (enum tree_code);
112 static enum tree_code compcode_to_comparison (enum comparison_code);
113 static int operand_equal_for_comparison_p (tree, tree, tree);
114 static int twoval_comparison_p (tree, tree *, tree *, int *);
115 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
116 static tree make_bit_field_ref (location_t, tree, tree,
117 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
118 static tree optimize_bit_field_compare (location_t, enum tree_code,
119 tree, tree, tree);
120 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
121 HOST_WIDE_INT *,
122 machine_mode *, int *, int *, int *,
123 tree *, tree *);
124 static int simple_operand_p (const_tree);
125 static bool simple_operand_p_2 (tree);
126 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
127 static tree range_predecessor (tree);
128 static tree range_successor (tree);
129 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
130 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
131 static tree unextend (tree, int, int, tree);
132 static tree optimize_minmax_comparison (location_t, enum tree_code,
133 tree, tree, tree);
134 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136 static tree fold_binary_op_with_conditional_arg (location_t,
137 enum tree_code, tree,
138 tree, tree,
139 tree, tree, int);
140 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (const_tree, const_tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (const_tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static tree fold_convert_const (enum tree_code, tree, tree);
146 static tree fold_view_convert_expr (tree, tree);
147 static bool vec_cst_ctor_to_array (tree, tree *);
148
149
150 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
151 Otherwise, return LOC. */
152
153 static location_t
154 expr_location_or (tree t, location_t loc)
155 {
156 location_t tloc = EXPR_LOCATION (t);
157 return tloc == UNKNOWN_LOCATION ? loc : tloc;
158 }
159
160 /* Similar to protected_set_expr_location, but never modify x in place,
161 if location can and needs to be set, unshare it. */
162
163 static inline tree
164 protected_set_expr_location_unshare (tree x, location_t loc)
165 {
166 if (CAN_HAVE_LOCATION_P (x)
167 && EXPR_LOCATION (x) != loc
168 && !(TREE_CODE (x) == SAVE_EXPR
169 || TREE_CODE (x) == TARGET_EXPR
170 || TREE_CODE (x) == BIND_EXPR))
171 {
172 x = copy_node (x);
173 SET_EXPR_LOCATION (x, loc);
174 }
175 return x;
176 }
177 \f
178 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
179 division and returns the quotient. Otherwise returns
180 NULL_TREE. */
181
182 tree
183 div_if_zero_remainder (const_tree arg1, const_tree arg2)
184 {
185 widest_int quo;
186
187 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
188 SIGNED, &quo))
189 return wide_int_to_tree (TREE_TYPE (arg1), quo);
190
191 return NULL_TREE;
192 }
193 \f
194 /* This is nonzero if we should defer warnings about undefined
195 overflow. This facility exists because these warnings are a
196 special case. The code to estimate loop iterations does not want
197 to issue any warnings, since it works with expressions which do not
198 occur in user code. Various bits of cleanup code call fold(), but
199 only use the result if it has certain characteristics (e.g., is a
200 constant); that code only wants to issue a warning if the result is
201 used. */
202
203 static int fold_deferring_overflow_warnings;
204
205 /* If a warning about undefined overflow is deferred, this is the
206 warning. Note that this may cause us to turn two warnings into
207 one, but that is fine since it is sufficient to only give one
208 warning per expression. */
209
210 static const char* fold_deferred_overflow_warning;
211
212 /* If a warning about undefined overflow is deferred, this is the
213 level at which the warning should be emitted. */
214
215 static enum warn_strict_overflow_code fold_deferred_overflow_code;
216
217 /* Start deferring overflow warnings. We could use a stack here to
218 permit nested calls, but at present it is not necessary. */
219
220 void
221 fold_defer_overflow_warnings (void)
222 {
223 ++fold_deferring_overflow_warnings;
224 }
225
226 /* Stop deferring overflow warnings. If there is a pending warning,
227 and ISSUE is true, then issue the warning if appropriate. STMT is
228 the statement with which the warning should be associated (used for
229 location information); STMT may be NULL. CODE is the level of the
230 warning--a warn_strict_overflow_code value. This function will use
231 the smaller of CODE and the deferred code when deciding whether to
232 issue the warning. CODE may be zero to mean to always use the
233 deferred code. */
234
235 void
236 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
237 {
238 const char *warnmsg;
239 location_t locus;
240
241 gcc_assert (fold_deferring_overflow_warnings > 0);
242 --fold_deferring_overflow_warnings;
243 if (fold_deferring_overflow_warnings > 0)
244 {
245 if (fold_deferred_overflow_warning != NULL
246 && code != 0
247 && code < (int) fold_deferred_overflow_code)
248 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
249 return;
250 }
251
252 warnmsg = fold_deferred_overflow_warning;
253 fold_deferred_overflow_warning = NULL;
254
255 if (!issue || warnmsg == NULL)
256 return;
257
258 if (gimple_no_warning_p (stmt))
259 return;
260
261 /* Use the smallest code level when deciding to issue the
262 warning. */
263 if (code == 0 || code > (int) fold_deferred_overflow_code)
264 code = fold_deferred_overflow_code;
265
266 if (!issue_strict_overflow_warning (code))
267 return;
268
269 if (stmt == NULL)
270 locus = input_location;
271 else
272 locus = gimple_location (stmt);
273 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
274 }
275
276 /* Stop deferring overflow warnings, ignoring any deferred
277 warnings. */
278
279 void
280 fold_undefer_and_ignore_overflow_warnings (void)
281 {
282 fold_undefer_overflow_warnings (false, NULL, 0);
283 }
284
285 /* Whether we are deferring overflow warnings. */
286
287 bool
288 fold_deferring_overflow_warnings_p (void)
289 {
290 return fold_deferring_overflow_warnings > 0;
291 }
292
293 /* This is called when we fold something based on the fact that signed
294 overflow is undefined. */
295
296 static void
297 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
298 {
299 if (fold_deferring_overflow_warnings > 0)
300 {
301 if (fold_deferred_overflow_warning == NULL
302 || wc < fold_deferred_overflow_code)
303 {
304 fold_deferred_overflow_warning = gmsgid;
305 fold_deferred_overflow_code = wc;
306 }
307 }
308 else if (issue_strict_overflow_warning (wc))
309 warning (OPT_Wstrict_overflow, gmsgid);
310 }
311 \f
312 /* Return true if the built-in mathematical function specified by CODE
313 is odd, i.e. -f(x) == f(-x). */
314
315 bool
316 negate_mathfn_p (enum built_in_function code)
317 {
318 switch (code)
319 {
320 CASE_FLT_FN (BUILT_IN_ASIN):
321 CASE_FLT_FN (BUILT_IN_ASINH):
322 CASE_FLT_FN (BUILT_IN_ATAN):
323 CASE_FLT_FN (BUILT_IN_ATANH):
324 CASE_FLT_FN (BUILT_IN_CASIN):
325 CASE_FLT_FN (BUILT_IN_CASINH):
326 CASE_FLT_FN (BUILT_IN_CATAN):
327 CASE_FLT_FN (BUILT_IN_CATANH):
328 CASE_FLT_FN (BUILT_IN_CBRT):
329 CASE_FLT_FN (BUILT_IN_CPROJ):
330 CASE_FLT_FN (BUILT_IN_CSIN):
331 CASE_FLT_FN (BUILT_IN_CSINH):
332 CASE_FLT_FN (BUILT_IN_CTAN):
333 CASE_FLT_FN (BUILT_IN_CTANH):
334 CASE_FLT_FN (BUILT_IN_ERF):
335 CASE_FLT_FN (BUILT_IN_LLROUND):
336 CASE_FLT_FN (BUILT_IN_LROUND):
337 CASE_FLT_FN (BUILT_IN_ROUND):
338 CASE_FLT_FN (BUILT_IN_SIN):
339 CASE_FLT_FN (BUILT_IN_SINH):
340 CASE_FLT_FN (BUILT_IN_TAN):
341 CASE_FLT_FN (BUILT_IN_TANH):
342 CASE_FLT_FN (BUILT_IN_TRUNC):
343 return true;
344
345 CASE_FLT_FN (BUILT_IN_LLRINT):
346 CASE_FLT_FN (BUILT_IN_LRINT):
347 CASE_FLT_FN (BUILT_IN_NEARBYINT):
348 CASE_FLT_FN (BUILT_IN_RINT):
349 return !flag_rounding_math;
350
351 default:
352 break;
353 }
354 return false;
355 }
356
357 /* Check whether we may negate an integer constant T without causing
358 overflow. */
359
360 bool
361 may_negate_without_overflow_p (const_tree t)
362 {
363 tree type;
364
365 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366
367 type = TREE_TYPE (t);
368 if (TYPE_UNSIGNED (type))
369 return false;
370
371 return !wi::only_sign_bit_p (t);
372 }
373
374 /* Determine whether an expression T can be cheaply negated using
375 the function negate_expr without introducing undefined overflow. */
376
377 static bool
378 negate_expr_p (tree t)
379 {
380 tree type;
381
382 if (t == 0)
383 return false;
384
385 type = TREE_TYPE (t);
386
387 STRIP_SIGN_NOPS (t);
388 switch (TREE_CODE (t))
389 {
390 case INTEGER_CST:
391 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
392 return true;
393
394 /* Check that -CST will not overflow type. */
395 return may_negate_without_overflow_p (t);
396 case BIT_NOT_EXPR:
397 return (INTEGRAL_TYPE_P (type)
398 && TYPE_OVERFLOW_WRAPS (type));
399
400 case FIXED_CST:
401 return true;
402
403 case NEGATE_EXPR:
404 return !TYPE_OVERFLOW_SANITIZED (type);
405
406 case REAL_CST:
407 /* We want to canonicalize to positive real constants. Pretend
408 that only negative ones can be easily negated. */
409 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
410
411 case COMPLEX_CST:
412 return negate_expr_p (TREE_REALPART (t))
413 && negate_expr_p (TREE_IMAGPART (t));
414
415 case VECTOR_CST:
416 {
417 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
418 return true;
419
420 int count = TYPE_VECTOR_SUBPARTS (type), i;
421
422 for (i = 0; i < count; i++)
423 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
424 return false;
425
426 return true;
427 }
428
429 case COMPLEX_EXPR:
430 return negate_expr_p (TREE_OPERAND (t, 0))
431 && negate_expr_p (TREE_OPERAND (t, 1));
432
433 case CONJ_EXPR:
434 return negate_expr_p (TREE_OPERAND (t, 0));
435
436 case PLUS_EXPR:
437 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
438 || HONOR_SIGNED_ZEROS (element_mode (type))
439 || (INTEGRAL_TYPE_P (type)
440 && ! TYPE_OVERFLOW_WRAPS (type)))
441 return false;
442 /* -(A + B) -> (-B) - A. */
443 if (negate_expr_p (TREE_OPERAND (t, 1))
444 && reorder_operands_p (TREE_OPERAND (t, 0),
445 TREE_OPERAND (t, 1)))
446 return true;
447 /* -(A + B) -> (-A) - B. */
448 return negate_expr_p (TREE_OPERAND (t, 0));
449
450 case MINUS_EXPR:
451 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
452 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
453 && !HONOR_SIGNED_ZEROS (element_mode (type))
454 && (! INTEGRAL_TYPE_P (type)
455 || TYPE_OVERFLOW_WRAPS (type))
456 && reorder_operands_p (TREE_OPERAND (t, 0),
457 TREE_OPERAND (t, 1));
458
459 case MULT_EXPR:
460 if (TYPE_UNSIGNED (type))
461 break;
462 /* INT_MIN/n * n doesn't overflow while negating one operand it does
463 if n is a power of two. */
464 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
465 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
466 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
467 && ! integer_pow2p (TREE_OPERAND (t, 0)))
468 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
469 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
470 break;
471
472 /* Fall through. */
473
474 case RDIV_EXPR:
475 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
476 return negate_expr_p (TREE_OPERAND (t, 1))
477 || negate_expr_p (TREE_OPERAND (t, 0));
478 break;
479
480 case TRUNC_DIV_EXPR:
481 case ROUND_DIV_EXPR:
482 case EXACT_DIV_EXPR:
483 if (TYPE_UNSIGNED (type))
484 break;
485 if (negate_expr_p (TREE_OPERAND (t, 0)))
486 return true;
487 /* In general we can't negate B in A / B, because if A is INT_MIN and
488 B is 1, we may turn this into INT_MIN / -1 which is undefined
489 and actually traps on some architectures. */
490 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
491 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
492 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
493 && ! integer_onep (TREE_OPERAND (t, 1))))
494 return negate_expr_p (TREE_OPERAND (t, 1));
495 break;
496
497 case NOP_EXPR:
498 /* Negate -((double)float) as (double)(-float). */
499 if (TREE_CODE (type) == REAL_TYPE)
500 {
501 tree tem = strip_float_extensions (t);
502 if (tem != t)
503 return negate_expr_p (tem);
504 }
505 break;
506
507 case CALL_EXPR:
508 /* Negate -f(x) as f(-x). */
509 if (negate_mathfn_p (builtin_mathfn_code (t)))
510 return negate_expr_p (CALL_EXPR_ARG (t, 0));
511 break;
512
513 case RSHIFT_EXPR:
514 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
515 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
516 {
517 tree op1 = TREE_OPERAND (t, 1);
518 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
519 return true;
520 }
521 break;
522
523 default:
524 break;
525 }
526 return false;
527 }
528
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
532 returned. */
533
534 static tree
535 fold_negate_expr (location_t loc, tree t)
536 {
537 tree type = TREE_TYPE (t);
538 tree tem;
539
540 switch (TREE_CODE (t))
541 {
542 /* Convert - (~A) to A + 1. */
543 case BIT_NOT_EXPR:
544 if (INTEGRAL_TYPE_P (type))
545 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
546 build_one_cst (type));
547 break;
548
549 case INTEGER_CST:
550 tem = fold_negate_const (t, type);
551 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
552 || (ANY_INTEGRAL_TYPE_P (type)
553 && !TYPE_OVERFLOW_TRAPS (type)
554 && TYPE_OVERFLOW_WRAPS (type))
555 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
556 return tem;
557 break;
558
559 case REAL_CST:
560 tem = fold_negate_const (t, type);
561 return tem;
562
563 case FIXED_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
566
567 case COMPLEX_CST:
568 {
569 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
570 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
571 if (rpart && ipart)
572 return build_complex (type, rpart, ipart);
573 }
574 break;
575
576 case VECTOR_CST:
577 {
578 int count = TYPE_VECTOR_SUBPARTS (type), i;
579 tree *elts = XALLOCAVEC (tree, count);
580
581 for (i = 0; i < count; i++)
582 {
583 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
584 if (elts[i] == NULL_TREE)
585 return NULL_TREE;
586 }
587
588 return build_vector (type, elts);
589 }
590
591 case COMPLEX_EXPR:
592 if (negate_expr_p (t))
593 return fold_build2_loc (loc, COMPLEX_EXPR, type,
594 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
595 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
596 break;
597
598 case CONJ_EXPR:
599 if (negate_expr_p (t))
600 return fold_build1_loc (loc, CONJ_EXPR, type,
601 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
602 break;
603
604 case NEGATE_EXPR:
605 if (!TYPE_OVERFLOW_SANITIZED (type))
606 return TREE_OPERAND (t, 0);
607 break;
608
609 case PLUS_EXPR:
610 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
611 && !HONOR_SIGNED_ZEROS (element_mode (type)))
612 {
613 /* -(A + B) -> (-B) - A. */
614 if (negate_expr_p (TREE_OPERAND (t, 1))
615 && reorder_operands_p (TREE_OPERAND (t, 0),
616 TREE_OPERAND (t, 1)))
617 {
618 tem = negate_expr (TREE_OPERAND (t, 1));
619 return fold_build2_loc (loc, MINUS_EXPR, type,
620 tem, TREE_OPERAND (t, 0));
621 }
622
623 /* -(A + B) -> (-A) - B. */
624 if (negate_expr_p (TREE_OPERAND (t, 0)))
625 {
626 tem = negate_expr (TREE_OPERAND (t, 0));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 1));
629 }
630 }
631 break;
632
633 case MINUS_EXPR:
634 /* - (A - B) -> B - A */
635 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
636 && !HONOR_SIGNED_ZEROS (element_mode (type))
637 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
638 return fold_build2_loc (loc, MINUS_EXPR, type,
639 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
640 break;
641
642 case MULT_EXPR:
643 if (TYPE_UNSIGNED (type))
644 break;
645
646 /* Fall through. */
647
648 case RDIV_EXPR:
649 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
650 {
651 tem = TREE_OPERAND (t, 1);
652 if (negate_expr_p (tem))
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
655 tem = TREE_OPERAND (t, 0);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 negate_expr (tem), TREE_OPERAND (t, 1));
659 }
660 break;
661
662 case TRUNC_DIV_EXPR:
663 case ROUND_DIV_EXPR:
664 case EXACT_DIV_EXPR:
665 if (TYPE_UNSIGNED (type))
666 break;
667 if (negate_expr_p (TREE_OPERAND (t, 0)))
668 return fold_build2_loc (loc, TREE_CODE (t), type,
669 negate_expr (TREE_OPERAND (t, 0)),
670 TREE_OPERAND (t, 1));
671 /* In general we can't negate B in A / B, because if A is INT_MIN and
672 B is 1, we may turn this into INT_MIN / -1 which is undefined
673 and actually traps on some architectures. */
674 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
675 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
676 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
677 && ! integer_onep (TREE_OPERAND (t, 1))))
678 && negate_expr_p (TREE_OPERAND (t, 1)))
679 return fold_build2_loc (loc, TREE_CODE (t), type,
680 TREE_OPERAND (t, 0),
681 negate_expr (TREE_OPERAND (t, 1)));
682 break;
683
684 case NOP_EXPR:
685 /* Convert -((double)float) into (double)(-float). */
686 if (TREE_CODE (type) == REAL_TYPE)
687 {
688 tem = strip_float_extensions (t);
689 if (tem != t && negate_expr_p (tem))
690 return fold_convert_loc (loc, type, negate_expr (tem));
691 }
692 break;
693
694 case CALL_EXPR:
695 /* Negate -f(x) as f(-x). */
696 if (negate_mathfn_p (builtin_mathfn_code (t))
697 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
698 {
699 tree fndecl, arg;
700
701 fndecl = get_callee_fndecl (t);
702 arg = negate_expr (CALL_EXPR_ARG (t, 0));
703 return build_call_expr_loc (loc, fndecl, 1, arg);
704 }
705 break;
706
707 case RSHIFT_EXPR:
708 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
709 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
710 {
711 tree op1 = TREE_OPERAND (t, 1);
712 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
713 {
714 tree ntype = TYPE_UNSIGNED (type)
715 ? signed_type_for (type)
716 : unsigned_type_for (type);
717 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
718 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
719 return fold_convert_loc (loc, type, temp);
720 }
721 }
722 break;
723
724 default:
725 break;
726 }
727
728 return NULL_TREE;
729 }
730
731 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
732 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
733 return NULL_TREE. */
734
735 static tree
736 negate_expr (tree t)
737 {
738 tree type, tem;
739 location_t loc;
740
741 if (t == NULL_TREE)
742 return NULL_TREE;
743
744 loc = EXPR_LOCATION (t);
745 type = TREE_TYPE (t);
746 STRIP_SIGN_NOPS (t);
747
748 tem = fold_negate_expr (loc, t);
749 if (!tem)
750 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
751 return fold_convert_loc (loc, type, tem);
752 }
753 \f
754 /* Split a tree IN into a constant, literal and variable parts that could be
755 combined with CODE to make IN. "constant" means an expression with
756 TREE_CONSTANT but that isn't an actual constant. CODE must be a
757 commutative arithmetic operation. Store the constant part into *CONP,
758 the literal in *LITP and return the variable part. If a part isn't
759 present, set it to null. If the tree does not decompose in this way,
760 return the entire tree as the variable part and the other parts as null.
761
762 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
763 case, we negate an operand that was subtracted. Except if it is a
764 literal for which we use *MINUS_LITP instead.
765
766 If NEGATE_P is true, we are negating all of IN, again except a literal
767 for which we use *MINUS_LITP instead.
768
769 If IN is itself a literal or constant, return it as appropriate.
770
771 Note that we do not guarantee that any of the three values will be the
772 same type as IN, but they will have the same signedness and mode. */
773
774 static tree
775 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
776 tree *minus_litp, int negate_p)
777 {
778 tree var = 0;
779
780 *conp = 0;
781 *litp = 0;
782 *minus_litp = 0;
783
784 /* Strip any conversions that don't change the machine mode or signedness. */
785 STRIP_SIGN_NOPS (in);
786
787 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
788 || TREE_CODE (in) == FIXED_CST)
789 *litp = in;
790 else if (TREE_CODE (in) == code
791 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
792 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
793 /* We can associate addition and subtraction together (even
794 though the C standard doesn't say so) for integers because
795 the value is not affected. For reals, the value might be
796 affected, so we can't. */
797 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
798 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
799 {
800 tree op0 = TREE_OPERAND (in, 0);
801 tree op1 = TREE_OPERAND (in, 1);
802 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
803 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
804
805 /* First see if either of the operands is a literal, then a constant. */
806 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
807 || TREE_CODE (op0) == FIXED_CST)
808 *litp = op0, op0 = 0;
809 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
810 || TREE_CODE (op1) == FIXED_CST)
811 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
812
813 if (op0 != 0 && TREE_CONSTANT (op0))
814 *conp = op0, op0 = 0;
815 else if (op1 != 0 && TREE_CONSTANT (op1))
816 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
817
818 /* If we haven't dealt with either operand, this is not a case we can
819 decompose. Otherwise, VAR is either of the ones remaining, if any. */
820 if (op0 != 0 && op1 != 0)
821 var = in;
822 else if (op0 != 0)
823 var = op0;
824 else
825 var = op1, neg_var_p = neg1_p;
826
827 /* Now do any needed negations. */
828 if (neg_litp_p)
829 *minus_litp = *litp, *litp = 0;
830 if (neg_conp_p)
831 *conp = negate_expr (*conp);
832 if (neg_var_p)
833 var = negate_expr (var);
834 }
835 else if (TREE_CODE (in) == BIT_NOT_EXPR
836 && code == PLUS_EXPR)
837 {
838 /* -X - 1 is folded to ~X, undo that here. */
839 *minus_litp = build_one_cst (TREE_TYPE (in));
840 var = negate_expr (TREE_OPERAND (in, 0));
841 }
842 else if (TREE_CONSTANT (in))
843 *conp = in;
844 else
845 var = in;
846
847 if (negate_p)
848 {
849 if (*litp)
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
855 }
856
857 return var;
858 }
859
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
864
865 static tree
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
867 {
868 if (t1 == 0)
869 return t2;
870 else if (t2 == 0)
871 return t1;
872
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
878 {
879 if (code == PLUS_EXPR)
880 {
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
893 }
894 else if (code == MINUS_EXPR)
895 {
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
898 }
899
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
902 }
903
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
906 }
907 \f
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
910
911 static bool
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
913 {
914 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
915 return false;
916 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
917 return false;
918
919 switch (code)
920 {
921 case LSHIFT_EXPR:
922 case RSHIFT_EXPR:
923 case LROTATE_EXPR:
924 case RROTATE_EXPR:
925 return true;
926
927 default:
928 break;
929 }
930
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
934 }
935
936
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time. */
940
941 static tree
942 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
943 int overflowable)
944 {
945 wide_int res;
946 tree t;
947 tree type = TREE_TYPE (arg1);
948 signop sign = TYPE_SIGN (type);
949 bool overflow = false;
950
951 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
952 TYPE_SIGN (TREE_TYPE (parg2)));
953
954 switch (code)
955 {
956 case BIT_IOR_EXPR:
957 res = wi::bit_or (arg1, arg2);
958 break;
959
960 case BIT_XOR_EXPR:
961 res = wi::bit_xor (arg1, arg2);
962 break;
963
964 case BIT_AND_EXPR:
965 res = wi::bit_and (arg1, arg2);
966 break;
967
968 case RSHIFT_EXPR:
969 case LSHIFT_EXPR:
970 if (wi::neg_p (arg2))
971 {
972 arg2 = -arg2;
973 if (code == RSHIFT_EXPR)
974 code = LSHIFT_EXPR;
975 else
976 code = RSHIFT_EXPR;
977 }
978
979 if (code == RSHIFT_EXPR)
980 /* It's unclear from the C standard whether shifts can overflow.
981 The following code ignores overflow; perhaps a C standard
982 interpretation ruling is needed. */
983 res = wi::rshift (arg1, arg2, sign);
984 else
985 res = wi::lshift (arg1, arg2);
986 break;
987
988 case RROTATE_EXPR:
989 case LROTATE_EXPR:
990 if (wi::neg_p (arg2))
991 {
992 arg2 = -arg2;
993 if (code == RROTATE_EXPR)
994 code = LROTATE_EXPR;
995 else
996 code = RROTATE_EXPR;
997 }
998
999 if (code == RROTATE_EXPR)
1000 res = wi::rrotate (arg1, arg2);
1001 else
1002 res = wi::lrotate (arg1, arg2);
1003 break;
1004
1005 case PLUS_EXPR:
1006 res = wi::add (arg1, arg2, sign, &overflow);
1007 break;
1008
1009 case MINUS_EXPR:
1010 res = wi::sub (arg1, arg2, sign, &overflow);
1011 break;
1012
1013 case MULT_EXPR:
1014 res = wi::mul (arg1, arg2, sign, &overflow);
1015 break;
1016
1017 case MULT_HIGHPART_EXPR:
1018 res = wi::mul_high (arg1, arg2, sign);
1019 break;
1020
1021 case TRUNC_DIV_EXPR:
1022 case EXACT_DIV_EXPR:
1023 if (arg2 == 0)
1024 return NULL_TREE;
1025 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1026 break;
1027
1028 case FLOOR_DIV_EXPR:
1029 if (arg2 == 0)
1030 return NULL_TREE;
1031 res = wi::div_floor (arg1, arg2, sign, &overflow);
1032 break;
1033
1034 case CEIL_DIV_EXPR:
1035 if (arg2 == 0)
1036 return NULL_TREE;
1037 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1038 break;
1039
1040 case ROUND_DIV_EXPR:
1041 if (arg2 == 0)
1042 return NULL_TREE;
1043 res = wi::div_round (arg1, arg2, sign, &overflow);
1044 break;
1045
1046 case TRUNC_MOD_EXPR:
1047 if (arg2 == 0)
1048 return NULL_TREE;
1049 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1050 break;
1051
1052 case FLOOR_MOD_EXPR:
1053 if (arg2 == 0)
1054 return NULL_TREE;
1055 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1056 break;
1057
1058 case CEIL_MOD_EXPR:
1059 if (arg2 == 0)
1060 return NULL_TREE;
1061 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1062 break;
1063
1064 case ROUND_MOD_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::mod_round (arg1, arg2, sign, &overflow);
1068 break;
1069
1070 case MIN_EXPR:
1071 res = wi::min (arg1, arg2, sign);
1072 break;
1073
1074 case MAX_EXPR:
1075 res = wi::max (arg1, arg2, sign);
1076 break;
1077
1078 default:
1079 return NULL_TREE;
1080 }
1081
1082 t = force_fit_type (type, res, overflowable,
1083 (((sign == SIGNED || overflowable == -1)
1084 && overflow)
1085 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1086
1087 return t;
1088 }
1089
1090 tree
1091 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1092 {
1093 return int_const_binop_1 (code, arg1, arg2, 1);
1094 }
1095
1096 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1097 constant. We assume ARG1 and ARG2 have the same data type, or at least
1098 are the same kind of constant and the same machine mode. Return zero if
1099 combining the constants is not allowed in the current operating mode. */
1100
1101 static tree
1102 const_binop (enum tree_code code, tree arg1, tree arg2)
1103 {
1104 /* Sanity check for the recursive cases. */
1105 if (!arg1 || !arg2)
1106 return NULL_TREE;
1107
1108 STRIP_NOPS (arg1);
1109 STRIP_NOPS (arg2);
1110
1111 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1112 {
1113 if (code == POINTER_PLUS_EXPR)
1114 return int_const_binop (PLUS_EXPR,
1115 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1116
1117 return int_const_binop (code, arg1, arg2);
1118 }
1119
1120 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1121 {
1122 machine_mode mode;
1123 REAL_VALUE_TYPE d1;
1124 REAL_VALUE_TYPE d2;
1125 REAL_VALUE_TYPE value;
1126 REAL_VALUE_TYPE result;
1127 bool inexact;
1128 tree t, type;
1129
1130 /* The following codes are handled by real_arithmetic. */
1131 switch (code)
1132 {
1133 case PLUS_EXPR:
1134 case MINUS_EXPR:
1135 case MULT_EXPR:
1136 case RDIV_EXPR:
1137 case MIN_EXPR:
1138 case MAX_EXPR:
1139 break;
1140
1141 default:
1142 return NULL_TREE;
1143 }
1144
1145 d1 = TREE_REAL_CST (arg1);
1146 d2 = TREE_REAL_CST (arg2);
1147
1148 type = TREE_TYPE (arg1);
1149 mode = TYPE_MODE (type);
1150
1151 /* Don't perform operation if we honor signaling NaNs and
1152 either operand is a NaN. */
1153 if (HONOR_SNANS (mode)
1154 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1155 return NULL_TREE;
1156
1157 /* Don't perform operation if it would raise a division
1158 by zero exception. */
1159 if (code == RDIV_EXPR
1160 && real_equal (&d2, &dconst0)
1161 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1162 return NULL_TREE;
1163
1164 /* If either operand is a NaN, just return it. Otherwise, set up
1165 for floating-point trap; we return an overflow. */
1166 if (REAL_VALUE_ISNAN (d1))
1167 return arg1;
1168 else if (REAL_VALUE_ISNAN (d2))
1169 return arg2;
1170
1171 inexact = real_arithmetic (&value, code, &d1, &d2);
1172 real_convert (&result, mode, &value);
1173
1174 /* Don't constant fold this floating point operation if
1175 the result has overflowed and flag_trapping_math. */
1176 if (flag_trapping_math
1177 && MODE_HAS_INFINITIES (mode)
1178 && REAL_VALUE_ISINF (result)
1179 && !REAL_VALUE_ISINF (d1)
1180 && !REAL_VALUE_ISINF (d2))
1181 return NULL_TREE;
1182
1183 /* Don't constant fold this floating point operation if the
1184 result may dependent upon the run-time rounding mode and
1185 flag_rounding_math is set, or if GCC's software emulation
1186 is unable to accurately represent the result. */
1187 if ((flag_rounding_math
1188 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1189 && (inexact || !real_identical (&result, &value)))
1190 return NULL_TREE;
1191
1192 t = build_real (type, result);
1193
1194 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1195 return t;
1196 }
1197
1198 if (TREE_CODE (arg1) == FIXED_CST)
1199 {
1200 FIXED_VALUE_TYPE f1;
1201 FIXED_VALUE_TYPE f2;
1202 FIXED_VALUE_TYPE result;
1203 tree t, type;
1204 int sat_p;
1205 bool overflow_p;
1206
1207 /* The following codes are handled by fixed_arithmetic. */
1208 switch (code)
1209 {
1210 case PLUS_EXPR:
1211 case MINUS_EXPR:
1212 case MULT_EXPR:
1213 case TRUNC_DIV_EXPR:
1214 if (TREE_CODE (arg2) != FIXED_CST)
1215 return NULL_TREE;
1216 f2 = TREE_FIXED_CST (arg2);
1217 break;
1218
1219 case LSHIFT_EXPR:
1220 case RSHIFT_EXPR:
1221 {
1222 if (TREE_CODE (arg2) != INTEGER_CST)
1223 return NULL_TREE;
1224 wide_int w2 = arg2;
1225 f2.data.high = w2.elt (1);
1226 f2.data.low = w2.elt (0);
1227 f2.mode = SImode;
1228 }
1229 break;
1230
1231 default:
1232 return NULL_TREE;
1233 }
1234
1235 f1 = TREE_FIXED_CST (arg1);
1236 type = TREE_TYPE (arg1);
1237 sat_p = TYPE_SATURATING (type);
1238 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1239 t = build_fixed (type, result);
1240 /* Propagate overflow flags. */
1241 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1242 TREE_OVERFLOW (t) = 1;
1243 return t;
1244 }
1245
1246 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1247 {
1248 tree type = TREE_TYPE (arg1);
1249 tree r1 = TREE_REALPART (arg1);
1250 tree i1 = TREE_IMAGPART (arg1);
1251 tree r2 = TREE_REALPART (arg2);
1252 tree i2 = TREE_IMAGPART (arg2);
1253 tree real, imag;
1254
1255 switch (code)
1256 {
1257 case PLUS_EXPR:
1258 case MINUS_EXPR:
1259 real = const_binop (code, r1, r2);
1260 imag = const_binop (code, i1, i2);
1261 break;
1262
1263 case MULT_EXPR:
1264 if (COMPLEX_FLOAT_TYPE_P (type))
1265 return do_mpc_arg2 (arg1, arg2, type,
1266 /* do_nonfinite= */ folding_initializer,
1267 mpc_mul);
1268
1269 real = const_binop (MINUS_EXPR,
1270 const_binop (MULT_EXPR, r1, r2),
1271 const_binop (MULT_EXPR, i1, i2));
1272 imag = const_binop (PLUS_EXPR,
1273 const_binop (MULT_EXPR, r1, i2),
1274 const_binop (MULT_EXPR, i1, r2));
1275 break;
1276
1277 case RDIV_EXPR:
1278 if (COMPLEX_FLOAT_TYPE_P (type))
1279 return do_mpc_arg2 (arg1, arg2, type,
1280 /* do_nonfinite= */ folding_initializer,
1281 mpc_div);
1282 /* Fallthru ... */
1283 case TRUNC_DIV_EXPR:
1284 case CEIL_DIV_EXPR:
1285 case FLOOR_DIV_EXPR:
1286 case ROUND_DIV_EXPR:
1287 if (flag_complex_method == 0)
1288 {
1289 /* Keep this algorithm in sync with
1290 tree-complex.c:expand_complex_div_straight().
1291
1292 Expand complex division to scalars, straightforward algorithm.
1293 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1294 t = br*br + bi*bi
1295 */
1296 tree magsquared
1297 = const_binop (PLUS_EXPR,
1298 const_binop (MULT_EXPR, r2, r2),
1299 const_binop (MULT_EXPR, i2, i2));
1300 tree t1
1301 = const_binop (PLUS_EXPR,
1302 const_binop (MULT_EXPR, r1, r2),
1303 const_binop (MULT_EXPR, i1, i2));
1304 tree t2
1305 = const_binop (MINUS_EXPR,
1306 const_binop (MULT_EXPR, i1, r2),
1307 const_binop (MULT_EXPR, r1, i2));
1308
1309 real = const_binop (code, t1, magsquared);
1310 imag = const_binop (code, t2, magsquared);
1311 }
1312 else
1313 {
1314 /* Keep this algorithm in sync with
1315 tree-complex.c:expand_complex_div_wide().
1316
1317 Expand complex division to scalars, modified algorithm to minimize
1318 overflow with wide input ranges. */
1319 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1320 fold_abs_const (r2, TREE_TYPE (type)),
1321 fold_abs_const (i2, TREE_TYPE (type)));
1322
1323 if (integer_nonzerop (compare))
1324 {
1325 /* In the TRUE branch, we compute
1326 ratio = br/bi;
1327 div = (br * ratio) + bi;
1328 tr = (ar * ratio) + ai;
1329 ti = (ai * ratio) - ar;
1330 tr = tr / div;
1331 ti = ti / div; */
1332 tree ratio = const_binop (code, r2, i2);
1333 tree div = const_binop (PLUS_EXPR, i2,
1334 const_binop (MULT_EXPR, r2, ratio));
1335 real = const_binop (MULT_EXPR, r1, ratio);
1336 real = const_binop (PLUS_EXPR, real, i1);
1337 real = const_binop (code, real, div);
1338
1339 imag = const_binop (MULT_EXPR, i1, ratio);
1340 imag = const_binop (MINUS_EXPR, imag, r1);
1341 imag = const_binop (code, imag, div);
1342 }
1343 else
1344 {
1345 /* In the FALSE branch, we compute
1346 ratio = d/c;
1347 divisor = (d * ratio) + c;
1348 tr = (b * ratio) + a;
1349 ti = b - (a * ratio);
1350 tr = tr / div;
1351 ti = ti / div; */
1352 tree ratio = const_binop (code, i2, r2);
1353 tree div = const_binop (PLUS_EXPR, r2,
1354 const_binop (MULT_EXPR, i2, ratio));
1355
1356 real = const_binop (MULT_EXPR, i1, ratio);
1357 real = const_binop (PLUS_EXPR, real, r1);
1358 real = const_binop (code, real, div);
1359
1360 imag = const_binop (MULT_EXPR, r1, ratio);
1361 imag = const_binop (MINUS_EXPR, i1, imag);
1362 imag = const_binop (code, imag, div);
1363 }
1364 }
1365 break;
1366
1367 default:
1368 return NULL_TREE;
1369 }
1370
1371 if (real && imag)
1372 return build_complex (type, real, imag);
1373 }
1374
1375 if (TREE_CODE (arg1) == VECTOR_CST
1376 && TREE_CODE (arg2) == VECTOR_CST)
1377 {
1378 tree type = TREE_TYPE (arg1);
1379 int count = TYPE_VECTOR_SUBPARTS (type), i;
1380 tree *elts = XALLOCAVEC (tree, count);
1381
1382 for (i = 0; i < count; i++)
1383 {
1384 tree elem1 = VECTOR_CST_ELT (arg1, i);
1385 tree elem2 = VECTOR_CST_ELT (arg2, i);
1386
1387 elts[i] = const_binop (code, elem1, elem2);
1388
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if (elts[i] == NULL_TREE)
1392 return NULL_TREE;
1393 }
1394
1395 return build_vector (type, elts);
1396 }
1397
1398 /* Shifts allow a scalar offset for a vector. */
1399 if (TREE_CODE (arg1) == VECTOR_CST
1400 && TREE_CODE (arg2) == INTEGER_CST)
1401 {
1402 tree type = TREE_TYPE (arg1);
1403 int count = TYPE_VECTOR_SUBPARTS (type), i;
1404 tree *elts = XALLOCAVEC (tree, count);
1405
1406 for (i = 0; i < count; i++)
1407 {
1408 tree elem1 = VECTOR_CST_ELT (arg1, i);
1409
1410 elts[i] = const_binop (code, elem1, arg2);
1411
1412 /* It is possible that const_binop cannot handle the given
1413 code and return NULL_TREE. */
1414 if (elts[i] == NULL_TREE)
1415 return NULL_TREE;
1416 }
1417
1418 return build_vector (type, elts);
1419 }
1420 return NULL_TREE;
1421 }
1422
1423 /* Overload that adds a TYPE parameter to be able to dispatch
1424 to fold_relational_const. */
1425
1426 tree
1427 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1428 {
1429 if (TREE_CODE_CLASS (code) == tcc_comparison)
1430 return fold_relational_const (code, type, arg1, arg2);
1431
1432 /* ??? Until we make the const_binop worker take the type of the
1433 result as argument put those cases that need it here. */
1434 switch (code)
1435 {
1436 case COMPLEX_EXPR:
1437 if ((TREE_CODE (arg1) == REAL_CST
1438 && TREE_CODE (arg2) == REAL_CST)
1439 || (TREE_CODE (arg1) == INTEGER_CST
1440 && TREE_CODE (arg2) == INTEGER_CST))
1441 return build_complex (type, arg1, arg2);
1442 return NULL_TREE;
1443
1444 case VEC_PACK_TRUNC_EXPR:
1445 case VEC_PACK_FIX_TRUNC_EXPR:
1446 {
1447 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1448 tree *elts;
1449
1450 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1451 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1452 if (TREE_CODE (arg1) != VECTOR_CST
1453 || TREE_CODE (arg2) != VECTOR_CST)
1454 return NULL_TREE;
1455
1456 elts = XALLOCAVEC (tree, nelts);
1457 if (!vec_cst_ctor_to_array (arg1, elts)
1458 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1459 return NULL_TREE;
1460
1461 for (i = 0; i < nelts; i++)
1462 {
1463 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1464 ? NOP_EXPR : FIX_TRUNC_EXPR,
1465 TREE_TYPE (type), elts[i]);
1466 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1467 return NULL_TREE;
1468 }
1469
1470 return build_vector (type, elts);
1471 }
1472
1473 case VEC_WIDEN_MULT_LO_EXPR:
1474 case VEC_WIDEN_MULT_HI_EXPR:
1475 case VEC_WIDEN_MULT_EVEN_EXPR:
1476 case VEC_WIDEN_MULT_ODD_EXPR:
1477 {
1478 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1479 unsigned int out, ofs, scale;
1480 tree *elts;
1481
1482 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1483 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1484 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1485 return NULL_TREE;
1486
1487 elts = XALLOCAVEC (tree, nelts * 4);
1488 if (!vec_cst_ctor_to_array (arg1, elts)
1489 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1490 return NULL_TREE;
1491
1492 if (code == VEC_WIDEN_MULT_LO_EXPR)
1493 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1494 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1495 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1496 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1497 scale = 1, ofs = 0;
1498 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1499 scale = 1, ofs = 1;
1500
1501 for (out = 0; out < nelts; out++)
1502 {
1503 unsigned int in1 = (out << scale) + ofs;
1504 unsigned int in2 = in1 + nelts * 2;
1505 tree t1, t2;
1506
1507 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1508 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1509
1510 if (t1 == NULL_TREE || t2 == NULL_TREE)
1511 return NULL_TREE;
1512 elts[out] = const_binop (MULT_EXPR, t1, t2);
1513 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1514 return NULL_TREE;
1515 }
1516
1517 return build_vector (type, elts);
1518 }
1519
1520 default:;
1521 }
1522
1523 if (TREE_CODE_CLASS (code) != tcc_binary)
1524 return NULL_TREE;
1525
1526 /* Make sure type and arg0 have the same saturating flag. */
1527 gcc_checking_assert (TYPE_SATURATING (type)
1528 == TYPE_SATURATING (TREE_TYPE (arg1)));
1529
1530 return const_binop (code, arg1, arg2);
1531 }
1532
1533 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1534 Return zero if computing the constants is not possible. */
1535
1536 tree
1537 const_unop (enum tree_code code, tree type, tree arg0)
1538 {
1539 switch (code)
1540 {
1541 CASE_CONVERT:
1542 case FLOAT_EXPR:
1543 case FIX_TRUNC_EXPR:
1544 case FIXED_CONVERT_EXPR:
1545 return fold_convert_const (code, type, arg0);
1546
1547 case ADDR_SPACE_CONVERT_EXPR:
1548 /* If the source address is 0, and the source address space
1549 cannot have a valid object at 0, fold to dest type null. */
1550 if (integer_zerop (arg0)
1551 && !(targetm.addr_space.zero_address_valid
1552 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1553 return fold_convert_const (code, type, arg0);
1554 break;
1555
1556 case VIEW_CONVERT_EXPR:
1557 return fold_view_convert_expr (type, arg0);
1558
1559 case NEGATE_EXPR:
1560 {
1561 /* Can't call fold_negate_const directly here as that doesn't
1562 handle all cases and we might not be able to negate some
1563 constants. */
1564 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1565 if (tem && CONSTANT_CLASS_P (tem))
1566 return tem;
1567 break;
1568 }
1569
1570 case ABS_EXPR:
1571 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1572 return fold_abs_const (arg0, type);
1573 break;
1574
1575 case CONJ_EXPR:
1576 if (TREE_CODE (arg0) == COMPLEX_CST)
1577 {
1578 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1579 TREE_TYPE (type));
1580 return build_complex (type, TREE_REALPART (arg0), ipart);
1581 }
1582 break;
1583
1584 case BIT_NOT_EXPR:
1585 if (TREE_CODE (arg0) == INTEGER_CST)
1586 return fold_not_const (arg0, type);
1587 /* Perform BIT_NOT_EXPR on each element individually. */
1588 else if (TREE_CODE (arg0) == VECTOR_CST)
1589 {
1590 tree *elements;
1591 tree elem;
1592 unsigned count = VECTOR_CST_NELTS (arg0), i;
1593
1594 elements = XALLOCAVEC (tree, count);
1595 for (i = 0; i < count; i++)
1596 {
1597 elem = VECTOR_CST_ELT (arg0, i);
1598 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1599 if (elem == NULL_TREE)
1600 break;
1601 elements[i] = elem;
1602 }
1603 if (i == count)
1604 return build_vector (type, elements);
1605 }
1606 break;
1607
1608 case TRUTH_NOT_EXPR:
1609 if (TREE_CODE (arg0) == INTEGER_CST)
1610 return constant_boolean_node (integer_zerop (arg0), type);
1611 break;
1612
1613 case REALPART_EXPR:
1614 if (TREE_CODE (arg0) == COMPLEX_CST)
1615 return fold_convert (type, TREE_REALPART (arg0));
1616 break;
1617
1618 case IMAGPART_EXPR:
1619 if (TREE_CODE (arg0) == COMPLEX_CST)
1620 return fold_convert (type, TREE_IMAGPART (arg0));
1621 break;
1622
1623 case VEC_UNPACK_LO_EXPR:
1624 case VEC_UNPACK_HI_EXPR:
1625 case VEC_UNPACK_FLOAT_LO_EXPR:
1626 case VEC_UNPACK_FLOAT_HI_EXPR:
1627 {
1628 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1629 tree *elts;
1630 enum tree_code subcode;
1631
1632 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1633 if (TREE_CODE (arg0) != VECTOR_CST)
1634 return NULL_TREE;
1635
1636 elts = XALLOCAVEC (tree, nelts * 2);
1637 if (!vec_cst_ctor_to_array (arg0, elts))
1638 return NULL_TREE;
1639
1640 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1641 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1642 elts += nelts;
1643
1644 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1645 subcode = NOP_EXPR;
1646 else
1647 subcode = FLOAT_EXPR;
1648
1649 for (i = 0; i < nelts; i++)
1650 {
1651 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1652 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1653 return NULL_TREE;
1654 }
1655
1656 return build_vector (type, elts);
1657 }
1658
1659 case REDUC_MIN_EXPR:
1660 case REDUC_MAX_EXPR:
1661 case REDUC_PLUS_EXPR:
1662 {
1663 unsigned int nelts, i;
1664 tree *elts;
1665 enum tree_code subcode;
1666
1667 if (TREE_CODE (arg0) != VECTOR_CST)
1668 return NULL_TREE;
1669 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1670
1671 elts = XALLOCAVEC (tree, nelts);
1672 if (!vec_cst_ctor_to_array (arg0, elts))
1673 return NULL_TREE;
1674
1675 switch (code)
1676 {
1677 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1678 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1679 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1680 default: gcc_unreachable ();
1681 }
1682
1683 for (i = 1; i < nelts; i++)
1684 {
1685 elts[0] = const_binop (subcode, elts[0], elts[i]);
1686 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1687 return NULL_TREE;
1688 }
1689
1690 return elts[0];
1691 }
1692
1693 default:
1694 break;
1695 }
1696
1697 return NULL_TREE;
1698 }
1699
1700 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1701 indicates which particular sizetype to create. */
1702
1703 tree
1704 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1705 {
1706 return build_int_cst (sizetype_tab[(int) kind], number);
1707 }
1708 \f
1709 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1710 is a tree code. The type of the result is taken from the operands.
1711 Both must be equivalent integer types, ala int_binop_types_match_p.
1712 If the operands are constant, so is the result. */
1713
1714 tree
1715 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1716 {
1717 tree type = TREE_TYPE (arg0);
1718
1719 if (arg0 == error_mark_node || arg1 == error_mark_node)
1720 return error_mark_node;
1721
1722 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1723 TREE_TYPE (arg1)));
1724
1725 /* Handle the special case of two integer constants faster. */
1726 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1727 {
1728 /* And some specific cases even faster than that. */
1729 if (code == PLUS_EXPR)
1730 {
1731 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1732 return arg1;
1733 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1734 return arg0;
1735 }
1736 else if (code == MINUS_EXPR)
1737 {
1738 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1739 return arg0;
1740 }
1741 else if (code == MULT_EXPR)
1742 {
1743 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1744 return arg1;
1745 }
1746
1747 /* Handle general case of two integer constants. For sizetype
1748 constant calculations we always want to know about overflow,
1749 even in the unsigned case. */
1750 return int_const_binop_1 (code, arg0, arg1, -1);
1751 }
1752
1753 return fold_build2_loc (loc, code, type, arg0, arg1);
1754 }
1755
1756 /* Given two values, either both of sizetype or both of bitsizetype,
1757 compute the difference between the two values. Return the value
1758 in signed type corresponding to the type of the operands. */
1759
1760 tree
1761 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1762 {
1763 tree type = TREE_TYPE (arg0);
1764 tree ctype;
1765
1766 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1767 TREE_TYPE (arg1)));
1768
1769 /* If the type is already signed, just do the simple thing. */
1770 if (!TYPE_UNSIGNED (type))
1771 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1772
1773 if (type == sizetype)
1774 ctype = ssizetype;
1775 else if (type == bitsizetype)
1776 ctype = sbitsizetype;
1777 else
1778 ctype = signed_type_for (type);
1779
1780 /* If either operand is not a constant, do the conversions to the signed
1781 type and subtract. The hardware will do the right thing with any
1782 overflow in the subtraction. */
1783 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1784 return size_binop_loc (loc, MINUS_EXPR,
1785 fold_convert_loc (loc, ctype, arg0),
1786 fold_convert_loc (loc, ctype, arg1));
1787
1788 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1789 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1790 overflow) and negate (which can't either). Special-case a result
1791 of zero while we're here. */
1792 if (tree_int_cst_equal (arg0, arg1))
1793 return build_int_cst (ctype, 0);
1794 else if (tree_int_cst_lt (arg1, arg0))
1795 return fold_convert_loc (loc, ctype,
1796 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1797 else
1798 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1799 fold_convert_loc (loc, ctype,
1800 size_binop_loc (loc,
1801 MINUS_EXPR,
1802 arg1, arg0)));
1803 }
1804 \f
1805 /* A subroutine of fold_convert_const handling conversions of an
1806 INTEGER_CST to another integer type. */
1807
1808 static tree
1809 fold_convert_const_int_from_int (tree type, const_tree arg1)
1810 {
1811 /* Given an integer constant, make new constant with new type,
1812 appropriately sign-extended or truncated. Use widest_int
1813 so that any extension is done according ARG1's type. */
1814 return force_fit_type (type, wi::to_widest (arg1),
1815 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1816 TREE_OVERFLOW (arg1));
1817 }
1818
1819 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1820 to an integer type. */
1821
1822 static tree
1823 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1824 {
1825 bool overflow = false;
1826 tree t;
1827
1828 /* The following code implements the floating point to integer
1829 conversion rules required by the Java Language Specification,
1830 that IEEE NaNs are mapped to zero and values that overflow
1831 the target precision saturate, i.e. values greater than
1832 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1833 are mapped to INT_MIN. These semantics are allowed by the
1834 C and C++ standards that simply state that the behavior of
1835 FP-to-integer conversion is unspecified upon overflow. */
1836
1837 wide_int val;
1838 REAL_VALUE_TYPE r;
1839 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1840
1841 switch (code)
1842 {
1843 case FIX_TRUNC_EXPR:
1844 real_trunc (&r, VOIDmode, &x);
1845 break;
1846
1847 default:
1848 gcc_unreachable ();
1849 }
1850
1851 /* If R is NaN, return zero and show we have an overflow. */
1852 if (REAL_VALUE_ISNAN (r))
1853 {
1854 overflow = true;
1855 val = wi::zero (TYPE_PRECISION (type));
1856 }
1857
1858 /* See if R is less than the lower bound or greater than the
1859 upper bound. */
1860
1861 if (! overflow)
1862 {
1863 tree lt = TYPE_MIN_VALUE (type);
1864 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1865 if (real_less (&r, &l))
1866 {
1867 overflow = true;
1868 val = lt;
1869 }
1870 }
1871
1872 if (! overflow)
1873 {
1874 tree ut = TYPE_MAX_VALUE (type);
1875 if (ut)
1876 {
1877 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1878 if (real_less (&u, &r))
1879 {
1880 overflow = true;
1881 val = ut;
1882 }
1883 }
1884 }
1885
1886 if (! overflow)
1887 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1888
1889 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1890 return t;
1891 }
1892
1893 /* A subroutine of fold_convert_const handling conversions of a
1894 FIXED_CST to an integer type. */
1895
1896 static tree
1897 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1898 {
1899 tree t;
1900 double_int temp, temp_trunc;
1901 unsigned int mode;
1902
1903 /* Right shift FIXED_CST to temp by fbit. */
1904 temp = TREE_FIXED_CST (arg1).data;
1905 mode = TREE_FIXED_CST (arg1).mode;
1906 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1907 {
1908 temp = temp.rshift (GET_MODE_FBIT (mode),
1909 HOST_BITS_PER_DOUBLE_INT,
1910 SIGNED_FIXED_POINT_MODE_P (mode));
1911
1912 /* Left shift temp to temp_trunc by fbit. */
1913 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1914 HOST_BITS_PER_DOUBLE_INT,
1915 SIGNED_FIXED_POINT_MODE_P (mode));
1916 }
1917 else
1918 {
1919 temp = double_int_zero;
1920 temp_trunc = double_int_zero;
1921 }
1922
1923 /* If FIXED_CST is negative, we need to round the value toward 0.
1924 By checking if the fractional bits are not zero to add 1 to temp. */
1925 if (SIGNED_FIXED_POINT_MODE_P (mode)
1926 && temp_trunc.is_negative ()
1927 && TREE_FIXED_CST (arg1).data != temp_trunc)
1928 temp += double_int_one;
1929
1930 /* Given a fixed-point constant, make new constant with new type,
1931 appropriately sign-extended or truncated. */
1932 t = force_fit_type (type, temp, -1,
1933 (temp.is_negative ()
1934 && (TYPE_UNSIGNED (type)
1935 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1936 | TREE_OVERFLOW (arg1));
1937
1938 return t;
1939 }
1940
1941 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1942 to another floating point type. */
1943
1944 static tree
1945 fold_convert_const_real_from_real (tree type, const_tree arg1)
1946 {
1947 REAL_VALUE_TYPE value;
1948 tree t;
1949
1950 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1951 t = build_real (type, value);
1952
1953 /* If converting an infinity or NAN to a representation that doesn't
1954 have one, set the overflow bit so that we can produce some kind of
1955 error message at the appropriate point if necessary. It's not the
1956 most user-friendly message, but it's better than nothing. */
1957 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1958 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1959 TREE_OVERFLOW (t) = 1;
1960 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1961 && !MODE_HAS_NANS (TYPE_MODE (type)))
1962 TREE_OVERFLOW (t) = 1;
1963 /* Regular overflow, conversion produced an infinity in a mode that
1964 can't represent them. */
1965 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1966 && REAL_VALUE_ISINF (value)
1967 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1968 TREE_OVERFLOW (t) = 1;
1969 else
1970 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1971 return t;
1972 }
1973
1974 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1975 to a floating point type. */
1976
1977 static tree
1978 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1979 {
1980 REAL_VALUE_TYPE value;
1981 tree t;
1982
1983 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1984 t = build_real (type, value);
1985
1986 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1987 return t;
1988 }
1989
1990 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1991 to another fixed-point type. */
1992
1993 static tree
1994 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1995 {
1996 FIXED_VALUE_TYPE value;
1997 tree t;
1998 bool overflow_p;
1999
2000 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2001 TYPE_SATURATING (type));
2002 t = build_fixed (type, value);
2003
2004 /* Propagate overflow flags. */
2005 if (overflow_p | TREE_OVERFLOW (arg1))
2006 TREE_OVERFLOW (t) = 1;
2007 return t;
2008 }
2009
2010 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2011 to a fixed-point type. */
2012
2013 static tree
2014 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2015 {
2016 FIXED_VALUE_TYPE value;
2017 tree t;
2018 bool overflow_p;
2019 double_int di;
2020
2021 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2022
2023 di.low = TREE_INT_CST_ELT (arg1, 0);
2024 if (TREE_INT_CST_NUNITS (arg1) == 1)
2025 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2026 else
2027 di.high = TREE_INT_CST_ELT (arg1, 1);
2028
2029 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2030 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2031 TYPE_SATURATING (type));
2032 t = build_fixed (type, value);
2033
2034 /* Propagate overflow flags. */
2035 if (overflow_p | TREE_OVERFLOW (arg1))
2036 TREE_OVERFLOW (t) = 1;
2037 return t;
2038 }
2039
2040 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2041 to a fixed-point type. */
2042
2043 static tree
2044 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2045 {
2046 FIXED_VALUE_TYPE value;
2047 tree t;
2048 bool overflow_p;
2049
2050 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2051 &TREE_REAL_CST (arg1),
2052 TYPE_SATURATING (type));
2053 t = build_fixed (type, value);
2054
2055 /* Propagate overflow flags. */
2056 if (overflow_p | TREE_OVERFLOW (arg1))
2057 TREE_OVERFLOW (t) = 1;
2058 return t;
2059 }
2060
2061 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2062 type TYPE. If no simplification can be done return NULL_TREE. */
2063
2064 static tree
2065 fold_convert_const (enum tree_code code, tree type, tree arg1)
2066 {
2067 if (TREE_TYPE (arg1) == type)
2068 return arg1;
2069
2070 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2071 || TREE_CODE (type) == OFFSET_TYPE)
2072 {
2073 if (TREE_CODE (arg1) == INTEGER_CST)
2074 return fold_convert_const_int_from_int (type, arg1);
2075 else if (TREE_CODE (arg1) == REAL_CST)
2076 return fold_convert_const_int_from_real (code, type, arg1);
2077 else if (TREE_CODE (arg1) == FIXED_CST)
2078 return fold_convert_const_int_from_fixed (type, arg1);
2079 }
2080 else if (TREE_CODE (type) == REAL_TYPE)
2081 {
2082 if (TREE_CODE (arg1) == INTEGER_CST)
2083 return build_real_from_int_cst (type, arg1);
2084 else if (TREE_CODE (arg1) == REAL_CST)
2085 return fold_convert_const_real_from_real (type, arg1);
2086 else if (TREE_CODE (arg1) == FIXED_CST)
2087 return fold_convert_const_real_from_fixed (type, arg1);
2088 }
2089 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2090 {
2091 if (TREE_CODE (arg1) == FIXED_CST)
2092 return fold_convert_const_fixed_from_fixed (type, arg1);
2093 else if (TREE_CODE (arg1) == INTEGER_CST)
2094 return fold_convert_const_fixed_from_int (type, arg1);
2095 else if (TREE_CODE (arg1) == REAL_CST)
2096 return fold_convert_const_fixed_from_real (type, arg1);
2097 }
2098 return NULL_TREE;
2099 }
2100
2101 /* Construct a vector of zero elements of vector type TYPE. */
2102
2103 static tree
2104 build_zero_vector (tree type)
2105 {
2106 tree t;
2107
2108 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2109 return build_vector_from_val (type, t);
2110 }
2111
2112 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2113
2114 bool
2115 fold_convertible_p (const_tree type, const_tree arg)
2116 {
2117 tree orig = TREE_TYPE (arg);
2118
2119 if (type == orig)
2120 return true;
2121
2122 if (TREE_CODE (arg) == ERROR_MARK
2123 || TREE_CODE (type) == ERROR_MARK
2124 || TREE_CODE (orig) == ERROR_MARK)
2125 return false;
2126
2127 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2128 return true;
2129
2130 switch (TREE_CODE (type))
2131 {
2132 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2133 case POINTER_TYPE: case REFERENCE_TYPE:
2134 case OFFSET_TYPE:
2135 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2136 || TREE_CODE (orig) == OFFSET_TYPE)
2137 return true;
2138 return (TREE_CODE (orig) == VECTOR_TYPE
2139 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2140
2141 case REAL_TYPE:
2142 case FIXED_POINT_TYPE:
2143 case COMPLEX_TYPE:
2144 case VECTOR_TYPE:
2145 case VOID_TYPE:
2146 return TREE_CODE (type) == TREE_CODE (orig);
2147
2148 default:
2149 return false;
2150 }
2151 }
2152
2153 /* Convert expression ARG to type TYPE. Used by the middle-end for
2154 simple conversions in preference to calling the front-end's convert. */
2155
2156 tree
2157 fold_convert_loc (location_t loc, tree type, tree arg)
2158 {
2159 tree orig = TREE_TYPE (arg);
2160 tree tem;
2161
2162 if (type == orig)
2163 return arg;
2164
2165 if (TREE_CODE (arg) == ERROR_MARK
2166 || TREE_CODE (type) == ERROR_MARK
2167 || TREE_CODE (orig) == ERROR_MARK)
2168 return error_mark_node;
2169
2170 switch (TREE_CODE (type))
2171 {
2172 case POINTER_TYPE:
2173 case REFERENCE_TYPE:
2174 /* Handle conversions between pointers to different address spaces. */
2175 if (POINTER_TYPE_P (orig)
2176 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2177 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2178 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2179 /* fall through */
2180
2181 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2182 case OFFSET_TYPE:
2183 if (TREE_CODE (arg) == INTEGER_CST)
2184 {
2185 tem = fold_convert_const (NOP_EXPR, type, arg);
2186 if (tem != NULL_TREE)
2187 return tem;
2188 }
2189 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2190 || TREE_CODE (orig) == OFFSET_TYPE)
2191 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2192 if (TREE_CODE (orig) == COMPLEX_TYPE)
2193 return fold_convert_loc (loc, type,
2194 fold_build1_loc (loc, REALPART_EXPR,
2195 TREE_TYPE (orig), arg));
2196 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2197 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2198 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2199
2200 case REAL_TYPE:
2201 if (TREE_CODE (arg) == INTEGER_CST)
2202 {
2203 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2204 if (tem != NULL_TREE)
2205 return tem;
2206 }
2207 else if (TREE_CODE (arg) == REAL_CST)
2208 {
2209 tem = fold_convert_const (NOP_EXPR, type, arg);
2210 if (tem != NULL_TREE)
2211 return tem;
2212 }
2213 else if (TREE_CODE (arg) == FIXED_CST)
2214 {
2215 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2216 if (tem != NULL_TREE)
2217 return tem;
2218 }
2219
2220 switch (TREE_CODE (orig))
2221 {
2222 case INTEGER_TYPE:
2223 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2224 case POINTER_TYPE: case REFERENCE_TYPE:
2225 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2226
2227 case REAL_TYPE:
2228 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2229
2230 case FIXED_POINT_TYPE:
2231 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2232
2233 case COMPLEX_TYPE:
2234 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2235 return fold_convert_loc (loc, type, tem);
2236
2237 default:
2238 gcc_unreachable ();
2239 }
2240
2241 case FIXED_POINT_TYPE:
2242 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2243 || TREE_CODE (arg) == REAL_CST)
2244 {
2245 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2246 if (tem != NULL_TREE)
2247 goto fold_convert_exit;
2248 }
2249
2250 switch (TREE_CODE (orig))
2251 {
2252 case FIXED_POINT_TYPE:
2253 case INTEGER_TYPE:
2254 case ENUMERAL_TYPE:
2255 case BOOLEAN_TYPE:
2256 case REAL_TYPE:
2257 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2258
2259 case COMPLEX_TYPE:
2260 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2261 return fold_convert_loc (loc, type, tem);
2262
2263 default:
2264 gcc_unreachable ();
2265 }
2266
2267 case COMPLEX_TYPE:
2268 switch (TREE_CODE (orig))
2269 {
2270 case INTEGER_TYPE:
2271 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2272 case POINTER_TYPE: case REFERENCE_TYPE:
2273 case REAL_TYPE:
2274 case FIXED_POINT_TYPE:
2275 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2276 fold_convert_loc (loc, TREE_TYPE (type), arg),
2277 fold_convert_loc (loc, TREE_TYPE (type),
2278 integer_zero_node));
2279 case COMPLEX_TYPE:
2280 {
2281 tree rpart, ipart;
2282
2283 if (TREE_CODE (arg) == COMPLEX_EXPR)
2284 {
2285 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2286 TREE_OPERAND (arg, 0));
2287 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2288 TREE_OPERAND (arg, 1));
2289 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2290 }
2291
2292 arg = save_expr (arg);
2293 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2294 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2295 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2296 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2297 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2298 }
2299
2300 default:
2301 gcc_unreachable ();
2302 }
2303
2304 case VECTOR_TYPE:
2305 if (integer_zerop (arg))
2306 return build_zero_vector (type);
2307 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2308 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2309 || TREE_CODE (orig) == VECTOR_TYPE);
2310 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2311
2312 case VOID_TYPE:
2313 tem = fold_ignored_result (arg);
2314 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2315
2316 default:
2317 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2318 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2319 gcc_unreachable ();
2320 }
2321 fold_convert_exit:
2322 protected_set_expr_location_unshare (tem, loc);
2323 return tem;
2324 }
2325 \f
2326 /* Return false if expr can be assumed not to be an lvalue, true
2327 otherwise. */
2328
2329 static bool
2330 maybe_lvalue_p (const_tree x)
2331 {
2332 /* We only need to wrap lvalue tree codes. */
2333 switch (TREE_CODE (x))
2334 {
2335 case VAR_DECL:
2336 case PARM_DECL:
2337 case RESULT_DECL:
2338 case LABEL_DECL:
2339 case FUNCTION_DECL:
2340 case SSA_NAME:
2341
2342 case COMPONENT_REF:
2343 case MEM_REF:
2344 case INDIRECT_REF:
2345 case ARRAY_REF:
2346 case ARRAY_RANGE_REF:
2347 case BIT_FIELD_REF:
2348 case OBJ_TYPE_REF:
2349
2350 case REALPART_EXPR:
2351 case IMAGPART_EXPR:
2352 case PREINCREMENT_EXPR:
2353 case PREDECREMENT_EXPR:
2354 case SAVE_EXPR:
2355 case TRY_CATCH_EXPR:
2356 case WITH_CLEANUP_EXPR:
2357 case COMPOUND_EXPR:
2358 case MODIFY_EXPR:
2359 case TARGET_EXPR:
2360 case COND_EXPR:
2361 case BIND_EXPR:
2362 break;
2363
2364 default:
2365 /* Assume the worst for front-end tree codes. */
2366 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2367 break;
2368 return false;
2369 }
2370
2371 return true;
2372 }
2373
2374 /* Return an expr equal to X but certainly not valid as an lvalue. */
2375
2376 tree
2377 non_lvalue_loc (location_t loc, tree x)
2378 {
2379 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2380 us. */
2381 if (in_gimple_form)
2382 return x;
2383
2384 if (! maybe_lvalue_p (x))
2385 return x;
2386 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2387 }
2388
2389 /* When pedantic, return an expr equal to X but certainly not valid as a
2390 pedantic lvalue. Otherwise, return X. */
2391
2392 static tree
2393 pedantic_non_lvalue_loc (location_t loc, tree x)
2394 {
2395 return protected_set_expr_location_unshare (x, loc);
2396 }
2397 \f
2398 /* Given a tree comparison code, return the code that is the logical inverse.
2399 It is generally not safe to do this for floating-point comparisons, except
2400 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2401 ERROR_MARK in this case. */
2402
2403 enum tree_code
2404 invert_tree_comparison (enum tree_code code, bool honor_nans)
2405 {
2406 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2407 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2408 return ERROR_MARK;
2409
2410 switch (code)
2411 {
2412 case EQ_EXPR:
2413 return NE_EXPR;
2414 case NE_EXPR:
2415 return EQ_EXPR;
2416 case GT_EXPR:
2417 return honor_nans ? UNLE_EXPR : LE_EXPR;
2418 case GE_EXPR:
2419 return honor_nans ? UNLT_EXPR : LT_EXPR;
2420 case LT_EXPR:
2421 return honor_nans ? UNGE_EXPR : GE_EXPR;
2422 case LE_EXPR:
2423 return honor_nans ? UNGT_EXPR : GT_EXPR;
2424 case LTGT_EXPR:
2425 return UNEQ_EXPR;
2426 case UNEQ_EXPR:
2427 return LTGT_EXPR;
2428 case UNGT_EXPR:
2429 return LE_EXPR;
2430 case UNGE_EXPR:
2431 return LT_EXPR;
2432 case UNLT_EXPR:
2433 return GE_EXPR;
2434 case UNLE_EXPR:
2435 return GT_EXPR;
2436 case ORDERED_EXPR:
2437 return UNORDERED_EXPR;
2438 case UNORDERED_EXPR:
2439 return ORDERED_EXPR;
2440 default:
2441 gcc_unreachable ();
2442 }
2443 }
2444
2445 /* Similar, but return the comparison that results if the operands are
2446 swapped. This is safe for floating-point. */
2447
2448 enum tree_code
2449 swap_tree_comparison (enum tree_code code)
2450 {
2451 switch (code)
2452 {
2453 case EQ_EXPR:
2454 case NE_EXPR:
2455 case ORDERED_EXPR:
2456 case UNORDERED_EXPR:
2457 case LTGT_EXPR:
2458 case UNEQ_EXPR:
2459 return code;
2460 case GT_EXPR:
2461 return LT_EXPR;
2462 case GE_EXPR:
2463 return LE_EXPR;
2464 case LT_EXPR:
2465 return GT_EXPR;
2466 case LE_EXPR:
2467 return GE_EXPR;
2468 case UNGT_EXPR:
2469 return UNLT_EXPR;
2470 case UNGE_EXPR:
2471 return UNLE_EXPR;
2472 case UNLT_EXPR:
2473 return UNGT_EXPR;
2474 case UNLE_EXPR:
2475 return UNGE_EXPR;
2476 default:
2477 gcc_unreachable ();
2478 }
2479 }
2480
2481
2482 /* Convert a comparison tree code from an enum tree_code representation
2483 into a compcode bit-based encoding. This function is the inverse of
2484 compcode_to_comparison. */
2485
2486 static enum comparison_code
2487 comparison_to_compcode (enum tree_code code)
2488 {
2489 switch (code)
2490 {
2491 case LT_EXPR:
2492 return COMPCODE_LT;
2493 case EQ_EXPR:
2494 return COMPCODE_EQ;
2495 case LE_EXPR:
2496 return COMPCODE_LE;
2497 case GT_EXPR:
2498 return COMPCODE_GT;
2499 case NE_EXPR:
2500 return COMPCODE_NE;
2501 case GE_EXPR:
2502 return COMPCODE_GE;
2503 case ORDERED_EXPR:
2504 return COMPCODE_ORD;
2505 case UNORDERED_EXPR:
2506 return COMPCODE_UNORD;
2507 case UNLT_EXPR:
2508 return COMPCODE_UNLT;
2509 case UNEQ_EXPR:
2510 return COMPCODE_UNEQ;
2511 case UNLE_EXPR:
2512 return COMPCODE_UNLE;
2513 case UNGT_EXPR:
2514 return COMPCODE_UNGT;
2515 case LTGT_EXPR:
2516 return COMPCODE_LTGT;
2517 case UNGE_EXPR:
2518 return COMPCODE_UNGE;
2519 default:
2520 gcc_unreachable ();
2521 }
2522 }
2523
2524 /* Convert a compcode bit-based encoding of a comparison operator back
2525 to GCC's enum tree_code representation. This function is the
2526 inverse of comparison_to_compcode. */
2527
2528 static enum tree_code
2529 compcode_to_comparison (enum comparison_code code)
2530 {
2531 switch (code)
2532 {
2533 case COMPCODE_LT:
2534 return LT_EXPR;
2535 case COMPCODE_EQ:
2536 return EQ_EXPR;
2537 case COMPCODE_LE:
2538 return LE_EXPR;
2539 case COMPCODE_GT:
2540 return GT_EXPR;
2541 case COMPCODE_NE:
2542 return NE_EXPR;
2543 case COMPCODE_GE:
2544 return GE_EXPR;
2545 case COMPCODE_ORD:
2546 return ORDERED_EXPR;
2547 case COMPCODE_UNORD:
2548 return UNORDERED_EXPR;
2549 case COMPCODE_UNLT:
2550 return UNLT_EXPR;
2551 case COMPCODE_UNEQ:
2552 return UNEQ_EXPR;
2553 case COMPCODE_UNLE:
2554 return UNLE_EXPR;
2555 case COMPCODE_UNGT:
2556 return UNGT_EXPR;
2557 case COMPCODE_LTGT:
2558 return LTGT_EXPR;
2559 case COMPCODE_UNGE:
2560 return UNGE_EXPR;
2561 default:
2562 gcc_unreachable ();
2563 }
2564 }
2565
2566 /* Return a tree for the comparison which is the combination of
2567 doing the AND or OR (depending on CODE) of the two operations LCODE
2568 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2569 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2570 if this makes the transformation invalid. */
2571
2572 tree
2573 combine_comparisons (location_t loc,
2574 enum tree_code code, enum tree_code lcode,
2575 enum tree_code rcode, tree truth_type,
2576 tree ll_arg, tree lr_arg)
2577 {
2578 bool honor_nans = HONOR_NANS (ll_arg);
2579 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2580 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2581 int compcode;
2582
2583 switch (code)
2584 {
2585 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2586 compcode = lcompcode & rcompcode;
2587 break;
2588
2589 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2590 compcode = lcompcode | rcompcode;
2591 break;
2592
2593 default:
2594 return NULL_TREE;
2595 }
2596
2597 if (!honor_nans)
2598 {
2599 /* Eliminate unordered comparisons, as well as LTGT and ORD
2600 which are not used unless the mode has NaNs. */
2601 compcode &= ~COMPCODE_UNORD;
2602 if (compcode == COMPCODE_LTGT)
2603 compcode = COMPCODE_NE;
2604 else if (compcode == COMPCODE_ORD)
2605 compcode = COMPCODE_TRUE;
2606 }
2607 else if (flag_trapping_math)
2608 {
2609 /* Check that the original operation and the optimized ones will trap
2610 under the same condition. */
2611 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2612 && (lcompcode != COMPCODE_EQ)
2613 && (lcompcode != COMPCODE_ORD);
2614 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2615 && (rcompcode != COMPCODE_EQ)
2616 && (rcompcode != COMPCODE_ORD);
2617 bool trap = (compcode & COMPCODE_UNORD) == 0
2618 && (compcode != COMPCODE_EQ)
2619 && (compcode != COMPCODE_ORD);
2620
2621 /* In a short-circuited boolean expression the LHS might be
2622 such that the RHS, if evaluated, will never trap. For
2623 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2624 if neither x nor y is NaN. (This is a mixed blessing: for
2625 example, the expression above will never trap, hence
2626 optimizing it to x < y would be invalid). */
2627 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2628 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2629 rtrap = false;
2630
2631 /* If the comparison was short-circuited, and only the RHS
2632 trapped, we may now generate a spurious trap. */
2633 if (rtrap && !ltrap
2634 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2635 return NULL_TREE;
2636
2637 /* If we changed the conditions that cause a trap, we lose. */
2638 if ((ltrap || rtrap) != trap)
2639 return NULL_TREE;
2640 }
2641
2642 if (compcode == COMPCODE_TRUE)
2643 return constant_boolean_node (true, truth_type);
2644 else if (compcode == COMPCODE_FALSE)
2645 return constant_boolean_node (false, truth_type);
2646 else
2647 {
2648 enum tree_code tcode;
2649
2650 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2651 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2652 }
2653 }
2654 \f
2655 /* Return nonzero if two operands (typically of the same tree node)
2656 are necessarily equal. FLAGS modifies behavior as follows:
2657
2658 If OEP_ONLY_CONST is set, only return nonzero for constants.
2659 This function tests whether the operands are indistinguishable;
2660 it does not test whether they are equal using C's == operation.
2661 The distinction is important for IEEE floating point, because
2662 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2663 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2664
2665 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2666 even though it may hold multiple values during a function.
2667 This is because a GCC tree node guarantees that nothing else is
2668 executed between the evaluation of its "operands" (which may often
2669 be evaluated in arbitrary order). Hence if the operands themselves
2670 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2671 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2672 unset means assuming isochronic (or instantaneous) tree equivalence.
2673 Unless comparing arbitrary expression trees, such as from different
2674 statements, this flag can usually be left unset.
2675
2676 If OEP_PURE_SAME is set, then pure functions with identical arguments
2677 are considered the same. It is used when the caller has other ways
2678 to ensure that global memory is unchanged in between.
2679
2680 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2681 not values of expressions.
2682
2683 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2684 any operand with side effect. This is unnecesarily conservative in the
2685 case we know that arg0 and arg1 are in disjoint code paths (such as in
2686 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2687 addresses with TREE_CONSTANT flag set so we know that &var == &var
2688 even if var is volatile. */
2689
2690 int
2691 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2692 {
2693 /* If either is ERROR_MARK, they aren't equal. */
2694 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2695 || TREE_TYPE (arg0) == error_mark_node
2696 || TREE_TYPE (arg1) == error_mark_node)
2697 return 0;
2698
2699 /* Similar, if either does not have a type (like a released SSA name),
2700 they aren't equal. */
2701 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2702 return 0;
2703
2704 /* We cannot consider pointers to different address space equal. */
2705 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2706 && POINTER_TYPE_P (TREE_TYPE (arg1))
2707 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2708 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2709 return 0;
2710
2711 /* Check equality of integer constants before bailing out due to
2712 precision differences. */
2713 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2714 {
2715 /* Address of INTEGER_CST is not defined; check that we did not forget
2716 to drop the OEP_ADDRESS_OF flags. */
2717 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2718 return tree_int_cst_equal (arg0, arg1);
2719 }
2720
2721 if (!(flags & OEP_ADDRESS_OF))
2722 {
2723 /* If both types don't have the same signedness, then we can't consider
2724 them equal. We must check this before the STRIP_NOPS calls
2725 because they may change the signedness of the arguments. As pointers
2726 strictly don't have a signedness, require either two pointers or
2727 two non-pointers as well. */
2728 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2729 || POINTER_TYPE_P (TREE_TYPE (arg0))
2730 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2731 return 0;
2732
2733 /* If both types don't have the same precision, then it is not safe
2734 to strip NOPs. */
2735 if (element_precision (TREE_TYPE (arg0))
2736 != element_precision (TREE_TYPE (arg1)))
2737 return 0;
2738
2739 STRIP_NOPS (arg0);
2740 STRIP_NOPS (arg1);
2741 }
2742 #if 0
2743 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2744 sanity check once the issue is solved. */
2745 else
2746 /* Addresses of conversions and SSA_NAMEs (and many other things)
2747 are not defined. Check that we did not forget to drop the
2748 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2749 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2750 && TREE_CODE (arg0) != SSA_NAME);
2751 #endif
2752
2753 /* In case both args are comparisons but with different comparison
2754 code, try to swap the comparison operands of one arg to produce
2755 a match and compare that variant. */
2756 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2757 && COMPARISON_CLASS_P (arg0)
2758 && COMPARISON_CLASS_P (arg1))
2759 {
2760 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2761
2762 if (TREE_CODE (arg0) == swap_code)
2763 return operand_equal_p (TREE_OPERAND (arg0, 0),
2764 TREE_OPERAND (arg1, 1), flags)
2765 && operand_equal_p (TREE_OPERAND (arg0, 1),
2766 TREE_OPERAND (arg1, 0), flags);
2767 }
2768
2769 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2770 {
2771 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2772 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2773 ;
2774 else if (flags & OEP_ADDRESS_OF)
2775 {
2776 /* If we are interested in comparing addresses ignore
2777 MEM_REF wrappings of the base that can appear just for
2778 TBAA reasons. */
2779 if (TREE_CODE (arg0) == MEM_REF
2780 && DECL_P (arg1)
2781 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2782 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2783 && integer_zerop (TREE_OPERAND (arg0, 1)))
2784 return 1;
2785 else if (TREE_CODE (arg1) == MEM_REF
2786 && DECL_P (arg0)
2787 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2788 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2789 && integer_zerop (TREE_OPERAND (arg1, 1)))
2790 return 1;
2791 return 0;
2792 }
2793 else
2794 return 0;
2795 }
2796
2797 /* When not checking adddresses, this is needed for conversions and for
2798 COMPONENT_REF. Might as well play it safe and always test this. */
2799 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2800 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2801 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2802 && !(flags & OEP_ADDRESS_OF)))
2803 return 0;
2804
2805 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2806 We don't care about side effects in that case because the SAVE_EXPR
2807 takes care of that for us. In all other cases, two expressions are
2808 equal if they have no side effects. If we have two identical
2809 expressions with side effects that should be treated the same due
2810 to the only side effects being identical SAVE_EXPR's, that will
2811 be detected in the recursive calls below.
2812 If we are taking an invariant address of two identical objects
2813 they are necessarily equal as well. */
2814 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2815 && (TREE_CODE (arg0) == SAVE_EXPR
2816 || (flags & OEP_MATCH_SIDE_EFFECTS)
2817 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2818 return 1;
2819
2820 /* Next handle constant cases, those for which we can return 1 even
2821 if ONLY_CONST is set. */
2822 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2823 switch (TREE_CODE (arg0))
2824 {
2825 case INTEGER_CST:
2826 return tree_int_cst_equal (arg0, arg1);
2827
2828 case FIXED_CST:
2829 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2830 TREE_FIXED_CST (arg1));
2831
2832 case REAL_CST:
2833 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2834 return 1;
2835
2836
2837 if (!HONOR_SIGNED_ZEROS (arg0))
2838 {
2839 /* If we do not distinguish between signed and unsigned zero,
2840 consider them equal. */
2841 if (real_zerop (arg0) && real_zerop (arg1))
2842 return 1;
2843 }
2844 return 0;
2845
2846 case VECTOR_CST:
2847 {
2848 unsigned i;
2849
2850 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2851 return 0;
2852
2853 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2854 {
2855 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2856 VECTOR_CST_ELT (arg1, i), flags))
2857 return 0;
2858 }
2859 return 1;
2860 }
2861
2862 case COMPLEX_CST:
2863 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2864 flags)
2865 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2866 flags));
2867
2868 case STRING_CST:
2869 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2870 && ! memcmp (TREE_STRING_POINTER (arg0),
2871 TREE_STRING_POINTER (arg1),
2872 TREE_STRING_LENGTH (arg0)));
2873
2874 case ADDR_EXPR:
2875 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2876 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2877 flags | OEP_ADDRESS_OF
2878 | OEP_MATCH_SIDE_EFFECTS);
2879 case CONSTRUCTOR:
2880 /* In GIMPLE empty constructors are allowed in initializers of
2881 aggregates. */
2882 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2883 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2884 default:
2885 break;
2886 }
2887
2888 if (flags & OEP_ONLY_CONST)
2889 return 0;
2890
2891 /* Define macros to test an operand from arg0 and arg1 for equality and a
2892 variant that allows null and views null as being different from any
2893 non-null value. In the latter case, if either is null, the both
2894 must be; otherwise, do the normal comparison. */
2895 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2896 TREE_OPERAND (arg1, N), flags)
2897
2898 #define OP_SAME_WITH_NULL(N) \
2899 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2900 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2901
2902 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2903 {
2904 case tcc_unary:
2905 /* Two conversions are equal only if signedness and modes match. */
2906 switch (TREE_CODE (arg0))
2907 {
2908 CASE_CONVERT:
2909 case FIX_TRUNC_EXPR:
2910 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2911 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2912 return 0;
2913 break;
2914 default:
2915 break;
2916 }
2917
2918 return OP_SAME (0);
2919
2920
2921 case tcc_comparison:
2922 case tcc_binary:
2923 if (OP_SAME (0) && OP_SAME (1))
2924 return 1;
2925
2926 /* For commutative ops, allow the other order. */
2927 return (commutative_tree_code (TREE_CODE (arg0))
2928 && operand_equal_p (TREE_OPERAND (arg0, 0),
2929 TREE_OPERAND (arg1, 1), flags)
2930 && operand_equal_p (TREE_OPERAND (arg0, 1),
2931 TREE_OPERAND (arg1, 0), flags));
2932
2933 case tcc_reference:
2934 /* If either of the pointer (or reference) expressions we are
2935 dereferencing contain a side effect, these cannot be equal,
2936 but their addresses can be. */
2937 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2938 && (TREE_SIDE_EFFECTS (arg0)
2939 || TREE_SIDE_EFFECTS (arg1)))
2940 return 0;
2941
2942 switch (TREE_CODE (arg0))
2943 {
2944 case INDIRECT_REF:
2945 if (!(flags & OEP_ADDRESS_OF)
2946 && (TYPE_ALIGN (TREE_TYPE (arg0))
2947 != TYPE_ALIGN (TREE_TYPE (arg1))))
2948 return 0;
2949 flags &= ~OEP_ADDRESS_OF;
2950 return OP_SAME (0);
2951
2952 case REALPART_EXPR:
2953 case IMAGPART_EXPR:
2954 case VIEW_CONVERT_EXPR:
2955 return OP_SAME (0);
2956
2957 case TARGET_MEM_REF:
2958 case MEM_REF:
2959 if (!(flags & OEP_ADDRESS_OF))
2960 {
2961 /* Require equal access sizes */
2962 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
2963 && (!TYPE_SIZE (TREE_TYPE (arg0))
2964 || !TYPE_SIZE (TREE_TYPE (arg1))
2965 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2966 TYPE_SIZE (TREE_TYPE (arg1)),
2967 flags)))
2968 return 0;
2969 /* Verify that accesses are TBAA compatible. */
2970 if (flag_strict_aliasing
2971 && (!alias_ptr_types_compatible_p
2972 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2973 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2974 || (MR_DEPENDENCE_CLIQUE (arg0)
2975 != MR_DEPENDENCE_CLIQUE (arg1))
2976 || (MR_DEPENDENCE_BASE (arg0)
2977 != MR_DEPENDENCE_BASE (arg1))))
2978 return 0;
2979 /* Verify that alignment is compatible. */
2980 if (TYPE_ALIGN (TREE_TYPE (arg0))
2981 != TYPE_ALIGN (TREE_TYPE (arg1)))
2982 return 0;
2983 }
2984 flags &= ~OEP_ADDRESS_OF;
2985 return (OP_SAME (0) && OP_SAME (1)
2986 /* TARGET_MEM_REF require equal extra operands. */
2987 && (TREE_CODE (arg0) != TARGET_MEM_REF
2988 || (OP_SAME_WITH_NULL (2)
2989 && OP_SAME_WITH_NULL (3)
2990 && OP_SAME_WITH_NULL (4))));
2991
2992 case ARRAY_REF:
2993 case ARRAY_RANGE_REF:
2994 /* Operands 2 and 3 may be null.
2995 Compare the array index by value if it is constant first as we
2996 may have different types but same value here. */
2997 if (!OP_SAME (0))
2998 return 0;
2999 flags &= ~OEP_ADDRESS_OF;
3000 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3001 TREE_OPERAND (arg1, 1))
3002 || OP_SAME (1))
3003 && OP_SAME_WITH_NULL (2)
3004 && OP_SAME_WITH_NULL (3));
3005
3006 case COMPONENT_REF:
3007 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3008 may be NULL when we're called to compare MEM_EXPRs. */
3009 if (!OP_SAME_WITH_NULL (0)
3010 || !OP_SAME (1))
3011 return 0;
3012 flags &= ~OEP_ADDRESS_OF;
3013 return OP_SAME_WITH_NULL (2);
3014
3015 case BIT_FIELD_REF:
3016 if (!OP_SAME (0))
3017 return 0;
3018 flags &= ~OEP_ADDRESS_OF;
3019 return OP_SAME (1) && OP_SAME (2);
3020
3021 default:
3022 return 0;
3023 }
3024
3025 case tcc_expression:
3026 switch (TREE_CODE (arg0))
3027 {
3028 case ADDR_EXPR:
3029 /* Be sure we pass right ADDRESS_OF flag. */
3030 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3031 return operand_equal_p (TREE_OPERAND (arg0, 0),
3032 TREE_OPERAND (arg1, 0),
3033 flags | OEP_ADDRESS_OF);
3034
3035 case TRUTH_NOT_EXPR:
3036 return OP_SAME (0);
3037
3038 case TRUTH_ANDIF_EXPR:
3039 case TRUTH_ORIF_EXPR:
3040 return OP_SAME (0) && OP_SAME (1);
3041
3042 case FMA_EXPR:
3043 case WIDEN_MULT_PLUS_EXPR:
3044 case WIDEN_MULT_MINUS_EXPR:
3045 if (!OP_SAME (2))
3046 return 0;
3047 /* The multiplcation operands are commutative. */
3048 /* FALLTHRU */
3049
3050 case TRUTH_AND_EXPR:
3051 case TRUTH_OR_EXPR:
3052 case TRUTH_XOR_EXPR:
3053 if (OP_SAME (0) && OP_SAME (1))
3054 return 1;
3055
3056 /* Otherwise take into account this is a commutative operation. */
3057 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3058 TREE_OPERAND (arg1, 1), flags)
3059 && operand_equal_p (TREE_OPERAND (arg0, 1),
3060 TREE_OPERAND (arg1, 0), flags));
3061
3062 case COND_EXPR:
3063 case VEC_COND_EXPR:
3064 case DOT_PROD_EXPR:
3065 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3066
3067 default:
3068 return 0;
3069 }
3070
3071 case tcc_vl_exp:
3072 switch (TREE_CODE (arg0))
3073 {
3074 case CALL_EXPR:
3075 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3076 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3077 /* If not both CALL_EXPRs are either internal or normal function
3078 functions, then they are not equal. */
3079 return 0;
3080 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3081 {
3082 /* If the CALL_EXPRs call different internal functions, then they
3083 are not equal. */
3084 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3085 return 0;
3086 }
3087 else
3088 {
3089 /* If the CALL_EXPRs call different functions, then they are not
3090 equal. */
3091 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3092 flags))
3093 return 0;
3094 }
3095
3096 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3097 {
3098 unsigned int cef = call_expr_flags (arg0);
3099 if (flags & OEP_PURE_SAME)
3100 cef &= ECF_CONST | ECF_PURE;
3101 else
3102 cef &= ECF_CONST;
3103 if (!cef)
3104 return 0;
3105 }
3106
3107 /* Now see if all the arguments are the same. */
3108 {
3109 const_call_expr_arg_iterator iter0, iter1;
3110 const_tree a0, a1;
3111 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3112 a1 = first_const_call_expr_arg (arg1, &iter1);
3113 a0 && a1;
3114 a0 = next_const_call_expr_arg (&iter0),
3115 a1 = next_const_call_expr_arg (&iter1))
3116 if (! operand_equal_p (a0, a1, flags))
3117 return 0;
3118
3119 /* If we get here and both argument lists are exhausted
3120 then the CALL_EXPRs are equal. */
3121 return ! (a0 || a1);
3122 }
3123 default:
3124 return 0;
3125 }
3126
3127 case tcc_declaration:
3128 /* Consider __builtin_sqrt equal to sqrt. */
3129 return (TREE_CODE (arg0) == FUNCTION_DECL
3130 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3131 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3132 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3133
3134 case tcc_exceptional:
3135 if (TREE_CODE (arg0) == CONSTRUCTOR)
3136 {
3137 /* In GIMPLE constructors are used only to build vectors from
3138 elements. Individual elements in the constructor must be
3139 indexed in increasing order and form an initial sequence.
3140
3141 We make no effort to compare constructors in generic.
3142 (see sem_variable::equals in ipa-icf which can do so for
3143 constants). */
3144 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3145 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3146 return 0;
3147
3148 /* Be sure that vectors constructed have the same representation.
3149 We only tested element precision and modes to match.
3150 Vectors may be BLKmode and thus also check that the number of
3151 parts match. */
3152 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3153 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3154 return 0;
3155
3156 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3157 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3158 unsigned int len = vec_safe_length (v0);
3159
3160 if (len != vec_safe_length (v1))
3161 return 0;
3162
3163 for (unsigned int i = 0; i < len; i++)
3164 {
3165 constructor_elt *c0 = &(*v0)[i];
3166 constructor_elt *c1 = &(*v1)[i];
3167
3168 if (!operand_equal_p (c0->value, c1->value, flags)
3169 /* In GIMPLE the indexes can be either NULL or matching i.
3170 Double check this so we won't get false
3171 positives for GENERIC. */
3172 || (c0->index
3173 && (TREE_CODE (c0->index) != INTEGER_CST
3174 || !compare_tree_int (c0->index, i)))
3175 || (c1->index
3176 && (TREE_CODE (c1->index) != INTEGER_CST
3177 || !compare_tree_int (c1->index, i))))
3178 return 0;
3179 }
3180 return 1;
3181 }
3182 return 0;
3183
3184 default:
3185 return 0;
3186 }
3187
3188 #undef OP_SAME
3189 #undef OP_SAME_WITH_NULL
3190 }
3191 \f
3192 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3193 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3194
3195 When in doubt, return 0. */
3196
3197 static int
3198 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3199 {
3200 int unsignedp1, unsignedpo;
3201 tree primarg0, primarg1, primother;
3202 unsigned int correct_width;
3203
3204 if (operand_equal_p (arg0, arg1, 0))
3205 return 1;
3206
3207 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3208 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3209 return 0;
3210
3211 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3212 and see if the inner values are the same. This removes any
3213 signedness comparison, which doesn't matter here. */
3214 primarg0 = arg0, primarg1 = arg1;
3215 STRIP_NOPS (primarg0);
3216 STRIP_NOPS (primarg1);
3217 if (operand_equal_p (primarg0, primarg1, 0))
3218 return 1;
3219
3220 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3221 actual comparison operand, ARG0.
3222
3223 First throw away any conversions to wider types
3224 already present in the operands. */
3225
3226 primarg1 = get_narrower (arg1, &unsignedp1);
3227 primother = get_narrower (other, &unsignedpo);
3228
3229 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3230 if (unsignedp1 == unsignedpo
3231 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3232 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3233 {
3234 tree type = TREE_TYPE (arg0);
3235
3236 /* Make sure shorter operand is extended the right way
3237 to match the longer operand. */
3238 primarg1 = fold_convert (signed_or_unsigned_type_for
3239 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3240
3241 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3242 return 1;
3243 }
3244
3245 return 0;
3246 }
3247 \f
3248 /* See if ARG is an expression that is either a comparison or is performing
3249 arithmetic on comparisons. The comparisons must only be comparing
3250 two different values, which will be stored in *CVAL1 and *CVAL2; if
3251 they are nonzero it means that some operands have already been found.
3252 No variables may be used anywhere else in the expression except in the
3253 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3254 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3255
3256 If this is true, return 1. Otherwise, return zero. */
3257
3258 static int
3259 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3260 {
3261 enum tree_code code = TREE_CODE (arg);
3262 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3263
3264 /* We can handle some of the tcc_expression cases here. */
3265 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3266 tclass = tcc_unary;
3267 else if (tclass == tcc_expression
3268 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3269 || code == COMPOUND_EXPR))
3270 tclass = tcc_binary;
3271
3272 else if (tclass == tcc_expression && code == SAVE_EXPR
3273 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3274 {
3275 /* If we've already found a CVAL1 or CVAL2, this expression is
3276 two complex to handle. */
3277 if (*cval1 || *cval2)
3278 return 0;
3279
3280 tclass = tcc_unary;
3281 *save_p = 1;
3282 }
3283
3284 switch (tclass)
3285 {
3286 case tcc_unary:
3287 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3288
3289 case tcc_binary:
3290 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3291 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3292 cval1, cval2, save_p));
3293
3294 case tcc_constant:
3295 return 1;
3296
3297 case tcc_expression:
3298 if (code == COND_EXPR)
3299 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3300 cval1, cval2, save_p)
3301 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3302 cval1, cval2, save_p)
3303 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3304 cval1, cval2, save_p));
3305 return 0;
3306
3307 case tcc_comparison:
3308 /* First see if we can handle the first operand, then the second. For
3309 the second operand, we know *CVAL1 can't be zero. It must be that
3310 one side of the comparison is each of the values; test for the
3311 case where this isn't true by failing if the two operands
3312 are the same. */
3313
3314 if (operand_equal_p (TREE_OPERAND (arg, 0),
3315 TREE_OPERAND (arg, 1), 0))
3316 return 0;
3317
3318 if (*cval1 == 0)
3319 *cval1 = TREE_OPERAND (arg, 0);
3320 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3321 ;
3322 else if (*cval2 == 0)
3323 *cval2 = TREE_OPERAND (arg, 0);
3324 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3325 ;
3326 else
3327 return 0;
3328
3329 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3330 ;
3331 else if (*cval2 == 0)
3332 *cval2 = TREE_OPERAND (arg, 1);
3333 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3334 ;
3335 else
3336 return 0;
3337
3338 return 1;
3339
3340 default:
3341 return 0;
3342 }
3343 }
3344 \f
3345 /* ARG is a tree that is known to contain just arithmetic operations and
3346 comparisons. Evaluate the operations in the tree substituting NEW0 for
3347 any occurrence of OLD0 as an operand of a comparison and likewise for
3348 NEW1 and OLD1. */
3349
3350 static tree
3351 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3352 tree old1, tree new1)
3353 {
3354 tree type = TREE_TYPE (arg);
3355 enum tree_code code = TREE_CODE (arg);
3356 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3357
3358 /* We can handle some of the tcc_expression cases here. */
3359 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3360 tclass = tcc_unary;
3361 else if (tclass == tcc_expression
3362 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3363 tclass = tcc_binary;
3364
3365 switch (tclass)
3366 {
3367 case tcc_unary:
3368 return fold_build1_loc (loc, code, type,
3369 eval_subst (loc, TREE_OPERAND (arg, 0),
3370 old0, new0, old1, new1));
3371
3372 case tcc_binary:
3373 return fold_build2_loc (loc, code, type,
3374 eval_subst (loc, TREE_OPERAND (arg, 0),
3375 old0, new0, old1, new1),
3376 eval_subst (loc, TREE_OPERAND (arg, 1),
3377 old0, new0, old1, new1));
3378
3379 case tcc_expression:
3380 switch (code)
3381 {
3382 case SAVE_EXPR:
3383 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3384 old1, new1);
3385
3386 case COMPOUND_EXPR:
3387 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3388 old1, new1);
3389
3390 case COND_EXPR:
3391 return fold_build3_loc (loc, code, type,
3392 eval_subst (loc, TREE_OPERAND (arg, 0),
3393 old0, new0, old1, new1),
3394 eval_subst (loc, TREE_OPERAND (arg, 1),
3395 old0, new0, old1, new1),
3396 eval_subst (loc, TREE_OPERAND (arg, 2),
3397 old0, new0, old1, new1));
3398 default:
3399 break;
3400 }
3401 /* Fall through - ??? */
3402
3403 case tcc_comparison:
3404 {
3405 tree arg0 = TREE_OPERAND (arg, 0);
3406 tree arg1 = TREE_OPERAND (arg, 1);
3407
3408 /* We need to check both for exact equality and tree equality. The
3409 former will be true if the operand has a side-effect. In that
3410 case, we know the operand occurred exactly once. */
3411
3412 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3413 arg0 = new0;
3414 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3415 arg0 = new1;
3416
3417 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3418 arg1 = new0;
3419 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3420 arg1 = new1;
3421
3422 return fold_build2_loc (loc, code, type, arg0, arg1);
3423 }
3424
3425 default:
3426 return arg;
3427 }
3428 }
3429 \f
3430 /* Return a tree for the case when the result of an expression is RESULT
3431 converted to TYPE and OMITTED was previously an operand of the expression
3432 but is now not needed (e.g., we folded OMITTED * 0).
3433
3434 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3435 the conversion of RESULT to TYPE. */
3436
3437 tree
3438 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3439 {
3440 tree t = fold_convert_loc (loc, type, result);
3441
3442 /* If the resulting operand is an empty statement, just return the omitted
3443 statement casted to void. */
3444 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3445 return build1_loc (loc, NOP_EXPR, void_type_node,
3446 fold_ignored_result (omitted));
3447
3448 if (TREE_SIDE_EFFECTS (omitted))
3449 return build2_loc (loc, COMPOUND_EXPR, type,
3450 fold_ignored_result (omitted), t);
3451
3452 return non_lvalue_loc (loc, t);
3453 }
3454
3455 /* Return a tree for the case when the result of an expression is RESULT
3456 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3457 of the expression but are now not needed.
3458
3459 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3460 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3461 evaluated before OMITTED2. Otherwise, if neither has side effects,
3462 just do the conversion of RESULT to TYPE. */
3463
3464 tree
3465 omit_two_operands_loc (location_t loc, tree type, tree result,
3466 tree omitted1, tree omitted2)
3467 {
3468 tree t = fold_convert_loc (loc, type, result);
3469
3470 if (TREE_SIDE_EFFECTS (omitted2))
3471 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3472 if (TREE_SIDE_EFFECTS (omitted1))
3473 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3474
3475 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3476 }
3477
3478 \f
3479 /* Return a simplified tree node for the truth-negation of ARG. This
3480 never alters ARG itself. We assume that ARG is an operation that
3481 returns a truth value (0 or 1).
3482
3483 FIXME: one would think we would fold the result, but it causes
3484 problems with the dominator optimizer. */
3485
3486 static tree
3487 fold_truth_not_expr (location_t loc, tree arg)
3488 {
3489 tree type = TREE_TYPE (arg);
3490 enum tree_code code = TREE_CODE (arg);
3491 location_t loc1, loc2;
3492
3493 /* If this is a comparison, we can simply invert it, except for
3494 floating-point non-equality comparisons, in which case we just
3495 enclose a TRUTH_NOT_EXPR around what we have. */
3496
3497 if (TREE_CODE_CLASS (code) == tcc_comparison)
3498 {
3499 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3500 if (FLOAT_TYPE_P (op_type)
3501 && flag_trapping_math
3502 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3503 && code != NE_EXPR && code != EQ_EXPR)
3504 return NULL_TREE;
3505
3506 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3507 if (code == ERROR_MARK)
3508 return NULL_TREE;
3509
3510 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3511 TREE_OPERAND (arg, 1));
3512 }
3513
3514 switch (code)
3515 {
3516 case INTEGER_CST:
3517 return constant_boolean_node (integer_zerop (arg), type);
3518
3519 case TRUTH_AND_EXPR:
3520 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3521 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3522 return build2_loc (loc, TRUTH_OR_EXPR, type,
3523 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3524 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3525
3526 case TRUTH_OR_EXPR:
3527 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3528 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3529 return build2_loc (loc, TRUTH_AND_EXPR, type,
3530 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3531 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3532
3533 case TRUTH_XOR_EXPR:
3534 /* Here we can invert either operand. We invert the first operand
3535 unless the second operand is a TRUTH_NOT_EXPR in which case our
3536 result is the XOR of the first operand with the inside of the
3537 negation of the second operand. */
3538
3539 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3540 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3541 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3542 else
3543 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3544 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3545 TREE_OPERAND (arg, 1));
3546
3547 case TRUTH_ANDIF_EXPR:
3548 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3549 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3550 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3551 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3552 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3553
3554 case TRUTH_ORIF_EXPR:
3555 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3556 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3557 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3558 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3559 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3560
3561 case TRUTH_NOT_EXPR:
3562 return TREE_OPERAND (arg, 0);
3563
3564 case COND_EXPR:
3565 {
3566 tree arg1 = TREE_OPERAND (arg, 1);
3567 tree arg2 = TREE_OPERAND (arg, 2);
3568
3569 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3570 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3571
3572 /* A COND_EXPR may have a throw as one operand, which
3573 then has void type. Just leave void operands
3574 as they are. */
3575 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3576 VOID_TYPE_P (TREE_TYPE (arg1))
3577 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3578 VOID_TYPE_P (TREE_TYPE (arg2))
3579 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3580 }
3581
3582 case COMPOUND_EXPR:
3583 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3584 return build2_loc (loc, COMPOUND_EXPR, type,
3585 TREE_OPERAND (arg, 0),
3586 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3587
3588 case NON_LVALUE_EXPR:
3589 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3590 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3591
3592 CASE_CONVERT:
3593 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3594 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3595
3596 /* ... fall through ... */
3597
3598 case FLOAT_EXPR:
3599 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3600 return build1_loc (loc, TREE_CODE (arg), type,
3601 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3602
3603 case BIT_AND_EXPR:
3604 if (!integer_onep (TREE_OPERAND (arg, 1)))
3605 return NULL_TREE;
3606 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3607
3608 case SAVE_EXPR:
3609 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3610
3611 case CLEANUP_POINT_EXPR:
3612 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3613 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3614 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3615
3616 default:
3617 return NULL_TREE;
3618 }
3619 }
3620
3621 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3622 assume that ARG is an operation that returns a truth value (0 or 1
3623 for scalars, 0 or -1 for vectors). Return the folded expression if
3624 folding is successful. Otherwise, return NULL_TREE. */
3625
3626 static tree
3627 fold_invert_truthvalue (location_t loc, tree arg)
3628 {
3629 tree type = TREE_TYPE (arg);
3630 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3631 ? BIT_NOT_EXPR
3632 : TRUTH_NOT_EXPR,
3633 type, arg);
3634 }
3635
3636 /* Return a simplified tree node for the truth-negation of ARG. This
3637 never alters ARG itself. We assume that ARG is an operation that
3638 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3639
3640 tree
3641 invert_truthvalue_loc (location_t loc, tree arg)
3642 {
3643 if (TREE_CODE (arg) == ERROR_MARK)
3644 return arg;
3645
3646 tree type = TREE_TYPE (arg);
3647 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3648 ? BIT_NOT_EXPR
3649 : TRUTH_NOT_EXPR,
3650 type, arg);
3651 }
3652
3653 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3654 with code CODE. This optimization is unsafe. */
3655 static tree
3656 distribute_real_division (location_t loc, enum tree_code code, tree type,
3657 tree arg0, tree arg1)
3658 {
3659 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3660 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3661
3662 /* (A / C) +- (B / C) -> (A +- B) / C. */
3663 if (mul0 == mul1
3664 && operand_equal_p (TREE_OPERAND (arg0, 1),
3665 TREE_OPERAND (arg1, 1), 0))
3666 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3667 fold_build2_loc (loc, code, type,
3668 TREE_OPERAND (arg0, 0),
3669 TREE_OPERAND (arg1, 0)),
3670 TREE_OPERAND (arg0, 1));
3671
3672 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3673 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3674 TREE_OPERAND (arg1, 0), 0)
3675 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3676 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3677 {
3678 REAL_VALUE_TYPE r0, r1;
3679 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3680 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3681 if (!mul0)
3682 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3683 if (!mul1)
3684 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3685 real_arithmetic (&r0, code, &r0, &r1);
3686 return fold_build2_loc (loc, MULT_EXPR, type,
3687 TREE_OPERAND (arg0, 0),
3688 build_real (type, r0));
3689 }
3690
3691 return NULL_TREE;
3692 }
3693 \f
3694 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3695 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3696 and uses reverse storage order if REVERSEP is nonzero. */
3697
3698 static tree
3699 make_bit_field_ref (location_t loc, tree inner, tree type,
3700 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3701 int unsignedp, int reversep)
3702 {
3703 tree result, bftype;
3704
3705 if (bitpos == 0 && !reversep)
3706 {
3707 tree size = TYPE_SIZE (TREE_TYPE (inner));
3708 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3709 || POINTER_TYPE_P (TREE_TYPE (inner)))
3710 && tree_fits_shwi_p (size)
3711 && tree_to_shwi (size) == bitsize)
3712 return fold_convert_loc (loc, type, inner);
3713 }
3714
3715 bftype = type;
3716 if (TYPE_PRECISION (bftype) != bitsize
3717 || TYPE_UNSIGNED (bftype) == !unsignedp)
3718 bftype = build_nonstandard_integer_type (bitsize, 0);
3719
3720 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3721 size_int (bitsize), bitsize_int (bitpos));
3722 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3723
3724 if (bftype != type)
3725 result = fold_convert_loc (loc, type, result);
3726
3727 return result;
3728 }
3729
3730 /* Optimize a bit-field compare.
3731
3732 There are two cases: First is a compare against a constant and the
3733 second is a comparison of two items where the fields are at the same
3734 bit position relative to the start of a chunk (byte, halfword, word)
3735 large enough to contain it. In these cases we can avoid the shift
3736 implicit in bitfield extractions.
3737
3738 For constants, we emit a compare of the shifted constant with the
3739 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3740 compared. For two fields at the same position, we do the ANDs with the
3741 similar mask and compare the result of the ANDs.
3742
3743 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3744 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3745 are the left and right operands of the comparison, respectively.
3746
3747 If the optimization described above can be done, we return the resulting
3748 tree. Otherwise we return zero. */
3749
3750 static tree
3751 optimize_bit_field_compare (location_t loc, enum tree_code code,
3752 tree compare_type, tree lhs, tree rhs)
3753 {
3754 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3755 tree type = TREE_TYPE (lhs);
3756 tree unsigned_type;
3757 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3758 machine_mode lmode, rmode, nmode;
3759 int lunsignedp, runsignedp;
3760 int lreversep, rreversep;
3761 int lvolatilep = 0, rvolatilep = 0;
3762 tree linner, rinner = NULL_TREE;
3763 tree mask;
3764 tree offset;
3765
3766 /* Get all the information about the extractions being done. If the bit size
3767 if the same as the size of the underlying object, we aren't doing an
3768 extraction at all and so can do nothing. We also don't want to
3769 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3770 then will no longer be able to replace it. */
3771 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3772 &lunsignedp, &lreversep, &lvolatilep, false);
3773 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3774 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3775 return 0;
3776
3777 if (const_p)
3778 rreversep = lreversep;
3779 else
3780 {
3781 /* If this is not a constant, we can only do something if bit positions,
3782 sizes, signedness and storage order are the same. */
3783 rinner
3784 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3785 &runsignedp, &rreversep, &rvolatilep, false);
3786
3787 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3788 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3789 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3790 return 0;
3791 }
3792
3793 /* See if we can find a mode to refer to this field. We should be able to,
3794 but fail if we can't. */
3795 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3796 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3797 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3798 TYPE_ALIGN (TREE_TYPE (rinner))),
3799 word_mode, false);
3800 if (nmode == VOIDmode)
3801 return 0;
3802
3803 /* Set signed and unsigned types of the precision of this mode for the
3804 shifts below. */
3805 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3806
3807 /* Compute the bit position and size for the new reference and our offset
3808 within it. If the new reference is the same size as the original, we
3809 won't optimize anything, so return zero. */
3810 nbitsize = GET_MODE_BITSIZE (nmode);
3811 nbitpos = lbitpos & ~ (nbitsize - 1);
3812 lbitpos -= nbitpos;
3813 if (nbitsize == lbitsize)
3814 return 0;
3815
3816 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3817 lbitpos = nbitsize - lbitsize - lbitpos;
3818
3819 /* Make the mask to be used against the extracted field. */
3820 mask = build_int_cst_type (unsigned_type, -1);
3821 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3822 mask = const_binop (RSHIFT_EXPR, mask,
3823 size_int (nbitsize - lbitsize - lbitpos));
3824
3825 if (! const_p)
3826 /* If not comparing with constant, just rework the comparison
3827 and return. */
3828 return fold_build2_loc (loc, code, compare_type,
3829 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3830 make_bit_field_ref (loc, linner,
3831 unsigned_type,
3832 nbitsize, nbitpos,
3833 1, lreversep),
3834 mask),
3835 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3836 make_bit_field_ref (loc, rinner,
3837 unsigned_type,
3838 nbitsize, nbitpos,
3839 1, rreversep),
3840 mask));
3841
3842 /* Otherwise, we are handling the constant case. See if the constant is too
3843 big for the field. Warn and return a tree for 0 (false) if so. We do
3844 this not only for its own sake, but to avoid having to test for this
3845 error case below. If we didn't, we might generate wrong code.
3846
3847 For unsigned fields, the constant shifted right by the field length should
3848 be all zero. For signed fields, the high-order bits should agree with
3849 the sign bit. */
3850
3851 if (lunsignedp)
3852 {
3853 if (wi::lrshift (rhs, lbitsize) != 0)
3854 {
3855 warning (0, "comparison is always %d due to width of bit-field",
3856 code == NE_EXPR);
3857 return constant_boolean_node (code == NE_EXPR, compare_type);
3858 }
3859 }
3860 else
3861 {
3862 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3863 if (tem != 0 && tem != -1)
3864 {
3865 warning (0, "comparison is always %d due to width of bit-field",
3866 code == NE_EXPR);
3867 return constant_boolean_node (code == NE_EXPR, compare_type);
3868 }
3869 }
3870
3871 /* Single-bit compares should always be against zero. */
3872 if (lbitsize == 1 && ! integer_zerop (rhs))
3873 {
3874 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3875 rhs = build_int_cst (type, 0);
3876 }
3877
3878 /* Make a new bitfield reference, shift the constant over the
3879 appropriate number of bits and mask it with the computed mask
3880 (in case this was a signed field). If we changed it, make a new one. */
3881 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3882 lreversep);
3883
3884 rhs = const_binop (BIT_AND_EXPR,
3885 const_binop (LSHIFT_EXPR,
3886 fold_convert_loc (loc, unsigned_type, rhs),
3887 size_int (lbitpos)),
3888 mask);
3889
3890 lhs = build2_loc (loc, code, compare_type,
3891 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3892 return lhs;
3893 }
3894 \f
3895 /* Subroutine for fold_truth_andor_1: decode a field reference.
3896
3897 If EXP is a comparison reference, we return the innermost reference.
3898
3899 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3900 set to the starting bit number.
3901
3902 If the innermost field can be completely contained in a mode-sized
3903 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3904
3905 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3906 otherwise it is not changed.
3907
3908 *PUNSIGNEDP is set to the signedness of the field.
3909
3910 *PREVERSEP is set to the storage order of the field.
3911
3912 *PMASK is set to the mask used. This is either contained in a
3913 BIT_AND_EXPR or derived from the width of the field.
3914
3915 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3916
3917 Return 0 if this is not a component reference or is one that we can't
3918 do anything with. */
3919
3920 static tree
3921 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3922 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3923 int *punsignedp, int *preversep, int *pvolatilep,
3924 tree *pmask, tree *pand_mask)
3925 {
3926 tree outer_type = 0;
3927 tree and_mask = 0;
3928 tree mask, inner, offset;
3929 tree unsigned_type;
3930 unsigned int precision;
3931
3932 /* All the optimizations using this function assume integer fields.
3933 There are problems with FP fields since the type_for_size call
3934 below can fail for, e.g., XFmode. */
3935 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3936 return 0;
3937
3938 /* We are interested in the bare arrangement of bits, so strip everything
3939 that doesn't affect the machine mode. However, record the type of the
3940 outermost expression if it may matter below. */
3941 if (CONVERT_EXPR_P (exp)
3942 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3943 outer_type = TREE_TYPE (exp);
3944 STRIP_NOPS (exp);
3945
3946 if (TREE_CODE (exp) == BIT_AND_EXPR)
3947 {
3948 and_mask = TREE_OPERAND (exp, 1);
3949 exp = TREE_OPERAND (exp, 0);
3950 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3951 if (TREE_CODE (and_mask) != INTEGER_CST)
3952 return 0;
3953 }
3954
3955 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3956 punsignedp, preversep, pvolatilep, false);
3957 if ((inner == exp && and_mask == 0)
3958 || *pbitsize < 0 || offset != 0
3959 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3960 return 0;
3961
3962 /* If the number of bits in the reference is the same as the bitsize of
3963 the outer type, then the outer type gives the signedness. Otherwise
3964 (in case of a small bitfield) the signedness is unchanged. */
3965 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3966 *punsignedp = TYPE_UNSIGNED (outer_type);
3967
3968 /* Compute the mask to access the bitfield. */
3969 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3970 precision = TYPE_PRECISION (unsigned_type);
3971
3972 mask = build_int_cst_type (unsigned_type, -1);
3973
3974 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3975 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3976
3977 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3978 if (and_mask != 0)
3979 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3980 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3981
3982 *pmask = mask;
3983 *pand_mask = and_mask;
3984 return inner;
3985 }
3986
3987 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3988 bit positions and MASK is SIGNED. */
3989
3990 static int
3991 all_ones_mask_p (const_tree mask, unsigned int size)
3992 {
3993 tree type = TREE_TYPE (mask);
3994 unsigned int precision = TYPE_PRECISION (type);
3995
3996 /* If this function returns true when the type of the mask is
3997 UNSIGNED, then there will be errors. In particular see
3998 gcc.c-torture/execute/990326-1.c. There does not appear to be
3999 any documentation paper trail as to why this is so. But the pre
4000 wide-int worked with that restriction and it has been preserved
4001 here. */
4002 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4003 return false;
4004
4005 return wi::mask (size, false, precision) == mask;
4006 }
4007
4008 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4009 represents the sign bit of EXP's type. If EXP represents a sign
4010 or zero extension, also test VAL against the unextended type.
4011 The return value is the (sub)expression whose sign bit is VAL,
4012 or NULL_TREE otherwise. */
4013
4014 tree
4015 sign_bit_p (tree exp, const_tree val)
4016 {
4017 int width;
4018 tree t;
4019
4020 /* Tree EXP must have an integral type. */
4021 t = TREE_TYPE (exp);
4022 if (! INTEGRAL_TYPE_P (t))
4023 return NULL_TREE;
4024
4025 /* Tree VAL must be an integer constant. */
4026 if (TREE_CODE (val) != INTEGER_CST
4027 || TREE_OVERFLOW (val))
4028 return NULL_TREE;
4029
4030 width = TYPE_PRECISION (t);
4031 if (wi::only_sign_bit_p (val, width))
4032 return exp;
4033
4034 /* Handle extension from a narrower type. */
4035 if (TREE_CODE (exp) == NOP_EXPR
4036 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4037 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4038
4039 return NULL_TREE;
4040 }
4041
4042 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4043 to be evaluated unconditionally. */
4044
4045 static int
4046 simple_operand_p (const_tree exp)
4047 {
4048 /* Strip any conversions that don't change the machine mode. */
4049 STRIP_NOPS (exp);
4050
4051 return (CONSTANT_CLASS_P (exp)
4052 || TREE_CODE (exp) == SSA_NAME
4053 || (DECL_P (exp)
4054 && ! TREE_ADDRESSABLE (exp)
4055 && ! TREE_THIS_VOLATILE (exp)
4056 && ! DECL_NONLOCAL (exp)
4057 /* Don't regard global variables as simple. They may be
4058 allocated in ways unknown to the compiler (shared memory,
4059 #pragma weak, etc). */
4060 && ! TREE_PUBLIC (exp)
4061 && ! DECL_EXTERNAL (exp)
4062 /* Weakrefs are not safe to be read, since they can be NULL.
4063 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4064 have DECL_WEAK flag set. */
4065 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4066 /* Loading a static variable is unduly expensive, but global
4067 registers aren't expensive. */
4068 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4069 }
4070
4071 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4072 to be evaluated unconditionally.
4073 I addition to simple_operand_p, we assume that comparisons, conversions,
4074 and logic-not operations are simple, if their operands are simple, too. */
4075
4076 static bool
4077 simple_operand_p_2 (tree exp)
4078 {
4079 enum tree_code code;
4080
4081 if (TREE_SIDE_EFFECTS (exp)
4082 || tree_could_trap_p (exp))
4083 return false;
4084
4085 while (CONVERT_EXPR_P (exp))
4086 exp = TREE_OPERAND (exp, 0);
4087
4088 code = TREE_CODE (exp);
4089
4090 if (TREE_CODE_CLASS (code) == tcc_comparison)
4091 return (simple_operand_p (TREE_OPERAND (exp, 0))
4092 && simple_operand_p (TREE_OPERAND (exp, 1)));
4093
4094 if (code == TRUTH_NOT_EXPR)
4095 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4096
4097 return simple_operand_p (exp);
4098 }
4099
4100 \f
4101 /* The following functions are subroutines to fold_range_test and allow it to
4102 try to change a logical combination of comparisons into a range test.
4103
4104 For example, both
4105 X == 2 || X == 3 || X == 4 || X == 5
4106 and
4107 X >= 2 && X <= 5
4108 are converted to
4109 (unsigned) (X - 2) <= 3
4110
4111 We describe each set of comparisons as being either inside or outside
4112 a range, using a variable named like IN_P, and then describe the
4113 range with a lower and upper bound. If one of the bounds is omitted,
4114 it represents either the highest or lowest value of the type.
4115
4116 In the comments below, we represent a range by two numbers in brackets
4117 preceded by a "+" to designate being inside that range, or a "-" to
4118 designate being outside that range, so the condition can be inverted by
4119 flipping the prefix. An omitted bound is represented by a "-". For
4120 example, "- [-, 10]" means being outside the range starting at the lowest
4121 possible value and ending at 10, in other words, being greater than 10.
4122 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4123 always false.
4124
4125 We set up things so that the missing bounds are handled in a consistent
4126 manner so neither a missing bound nor "true" and "false" need to be
4127 handled using a special case. */
4128
4129 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4130 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4131 and UPPER1_P are nonzero if the respective argument is an upper bound
4132 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4133 must be specified for a comparison. ARG1 will be converted to ARG0's
4134 type if both are specified. */
4135
4136 static tree
4137 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4138 tree arg1, int upper1_p)
4139 {
4140 tree tem;
4141 int result;
4142 int sgn0, sgn1;
4143
4144 /* If neither arg represents infinity, do the normal operation.
4145 Else, if not a comparison, return infinity. Else handle the special
4146 comparison rules. Note that most of the cases below won't occur, but
4147 are handled for consistency. */
4148
4149 if (arg0 != 0 && arg1 != 0)
4150 {
4151 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4152 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4153 STRIP_NOPS (tem);
4154 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4155 }
4156
4157 if (TREE_CODE_CLASS (code) != tcc_comparison)
4158 return 0;
4159
4160 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4161 for neither. In real maths, we cannot assume open ended ranges are
4162 the same. But, this is computer arithmetic, where numbers are finite.
4163 We can therefore make the transformation of any unbounded range with
4164 the value Z, Z being greater than any representable number. This permits
4165 us to treat unbounded ranges as equal. */
4166 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4167 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4168 switch (code)
4169 {
4170 case EQ_EXPR:
4171 result = sgn0 == sgn1;
4172 break;
4173 case NE_EXPR:
4174 result = sgn0 != sgn1;
4175 break;
4176 case LT_EXPR:
4177 result = sgn0 < sgn1;
4178 break;
4179 case LE_EXPR:
4180 result = sgn0 <= sgn1;
4181 break;
4182 case GT_EXPR:
4183 result = sgn0 > sgn1;
4184 break;
4185 case GE_EXPR:
4186 result = sgn0 >= sgn1;
4187 break;
4188 default:
4189 gcc_unreachable ();
4190 }
4191
4192 return constant_boolean_node (result, type);
4193 }
4194 \f
4195 /* Helper routine for make_range. Perform one step for it, return
4196 new expression if the loop should continue or NULL_TREE if it should
4197 stop. */
4198
4199 tree
4200 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4201 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4202 bool *strict_overflow_p)
4203 {
4204 tree arg0_type = TREE_TYPE (arg0);
4205 tree n_low, n_high, low = *p_low, high = *p_high;
4206 int in_p = *p_in_p, n_in_p;
4207
4208 switch (code)
4209 {
4210 case TRUTH_NOT_EXPR:
4211 /* We can only do something if the range is testing for zero. */
4212 if (low == NULL_TREE || high == NULL_TREE
4213 || ! integer_zerop (low) || ! integer_zerop (high))
4214 return NULL_TREE;
4215 *p_in_p = ! in_p;
4216 return arg0;
4217
4218 case EQ_EXPR: case NE_EXPR:
4219 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4220 /* We can only do something if the range is testing for zero
4221 and if the second operand is an integer constant. Note that
4222 saying something is "in" the range we make is done by
4223 complementing IN_P since it will set in the initial case of
4224 being not equal to zero; "out" is leaving it alone. */
4225 if (low == NULL_TREE || high == NULL_TREE
4226 || ! integer_zerop (low) || ! integer_zerop (high)
4227 || TREE_CODE (arg1) != INTEGER_CST)
4228 return NULL_TREE;
4229
4230 switch (code)
4231 {
4232 case NE_EXPR: /* - [c, c] */
4233 low = high = arg1;
4234 break;
4235 case EQ_EXPR: /* + [c, c] */
4236 in_p = ! in_p, low = high = arg1;
4237 break;
4238 case GT_EXPR: /* - [-, c] */
4239 low = 0, high = arg1;
4240 break;
4241 case GE_EXPR: /* + [c, -] */
4242 in_p = ! in_p, low = arg1, high = 0;
4243 break;
4244 case LT_EXPR: /* - [c, -] */
4245 low = arg1, high = 0;
4246 break;
4247 case LE_EXPR: /* + [-, c] */
4248 in_p = ! in_p, low = 0, high = arg1;
4249 break;
4250 default:
4251 gcc_unreachable ();
4252 }
4253
4254 /* If this is an unsigned comparison, we also know that EXP is
4255 greater than or equal to zero. We base the range tests we make
4256 on that fact, so we record it here so we can parse existing
4257 range tests. We test arg0_type since often the return type
4258 of, e.g. EQ_EXPR, is boolean. */
4259 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4260 {
4261 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4262 in_p, low, high, 1,
4263 build_int_cst (arg0_type, 0),
4264 NULL_TREE))
4265 return NULL_TREE;
4266
4267 in_p = n_in_p, low = n_low, high = n_high;
4268
4269 /* If the high bound is missing, but we have a nonzero low
4270 bound, reverse the range so it goes from zero to the low bound
4271 minus 1. */
4272 if (high == 0 && low && ! integer_zerop (low))
4273 {
4274 in_p = ! in_p;
4275 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4276 build_int_cst (TREE_TYPE (low), 1), 0);
4277 low = build_int_cst (arg0_type, 0);
4278 }
4279 }
4280
4281 *p_low = low;
4282 *p_high = high;
4283 *p_in_p = in_p;
4284 return arg0;
4285
4286 case NEGATE_EXPR:
4287 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4288 low and high are non-NULL, then normalize will DTRT. */
4289 if (!TYPE_UNSIGNED (arg0_type)
4290 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4291 {
4292 if (low == NULL_TREE)
4293 low = TYPE_MIN_VALUE (arg0_type);
4294 if (high == NULL_TREE)
4295 high = TYPE_MAX_VALUE (arg0_type);
4296 }
4297
4298 /* (-x) IN [a,b] -> x in [-b, -a] */
4299 n_low = range_binop (MINUS_EXPR, exp_type,
4300 build_int_cst (exp_type, 0),
4301 0, high, 1);
4302 n_high = range_binop (MINUS_EXPR, exp_type,
4303 build_int_cst (exp_type, 0),
4304 0, low, 0);
4305 if (n_high != 0 && TREE_OVERFLOW (n_high))
4306 return NULL_TREE;
4307 goto normalize;
4308
4309 case BIT_NOT_EXPR:
4310 /* ~ X -> -X - 1 */
4311 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4312 build_int_cst (exp_type, 1));
4313
4314 case PLUS_EXPR:
4315 case MINUS_EXPR:
4316 if (TREE_CODE (arg1) != INTEGER_CST)
4317 return NULL_TREE;
4318
4319 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4320 move a constant to the other side. */
4321 if (!TYPE_UNSIGNED (arg0_type)
4322 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4323 return NULL_TREE;
4324
4325 /* If EXP is signed, any overflow in the computation is undefined,
4326 so we don't worry about it so long as our computations on
4327 the bounds don't overflow. For unsigned, overflow is defined
4328 and this is exactly the right thing. */
4329 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4330 arg0_type, low, 0, arg1, 0);
4331 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4332 arg0_type, high, 1, arg1, 0);
4333 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4334 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4335 return NULL_TREE;
4336
4337 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4338 *strict_overflow_p = true;
4339
4340 normalize:
4341 /* Check for an unsigned range which has wrapped around the maximum
4342 value thus making n_high < n_low, and normalize it. */
4343 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4344 {
4345 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4346 build_int_cst (TREE_TYPE (n_high), 1), 0);
4347 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4348 build_int_cst (TREE_TYPE (n_low), 1), 0);
4349
4350 /* If the range is of the form +/- [ x+1, x ], we won't
4351 be able to normalize it. But then, it represents the
4352 whole range or the empty set, so make it
4353 +/- [ -, - ]. */
4354 if (tree_int_cst_equal (n_low, low)
4355 && tree_int_cst_equal (n_high, high))
4356 low = high = 0;
4357 else
4358 in_p = ! in_p;
4359 }
4360 else
4361 low = n_low, high = n_high;
4362
4363 *p_low = low;
4364 *p_high = high;
4365 *p_in_p = in_p;
4366 return arg0;
4367
4368 CASE_CONVERT:
4369 case NON_LVALUE_EXPR:
4370 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4371 return NULL_TREE;
4372
4373 if (! INTEGRAL_TYPE_P (arg0_type)
4374 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4375 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4376 return NULL_TREE;
4377
4378 n_low = low, n_high = high;
4379
4380 if (n_low != 0)
4381 n_low = fold_convert_loc (loc, arg0_type, n_low);
4382
4383 if (n_high != 0)
4384 n_high = fold_convert_loc (loc, arg0_type, n_high);
4385
4386 /* If we're converting arg0 from an unsigned type, to exp,
4387 a signed type, we will be doing the comparison as unsigned.
4388 The tests above have already verified that LOW and HIGH
4389 are both positive.
4390
4391 So we have to ensure that we will handle large unsigned
4392 values the same way that the current signed bounds treat
4393 negative values. */
4394
4395 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4396 {
4397 tree high_positive;
4398 tree equiv_type;
4399 /* For fixed-point modes, we need to pass the saturating flag
4400 as the 2nd parameter. */
4401 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4402 equiv_type
4403 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4404 TYPE_SATURATING (arg0_type));
4405 else
4406 equiv_type
4407 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4408
4409 /* A range without an upper bound is, naturally, unbounded.
4410 Since convert would have cropped a very large value, use
4411 the max value for the destination type. */
4412 high_positive
4413 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4414 : TYPE_MAX_VALUE (arg0_type);
4415
4416 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4417 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4418 fold_convert_loc (loc, arg0_type,
4419 high_positive),
4420 build_int_cst (arg0_type, 1));
4421
4422 /* If the low bound is specified, "and" the range with the
4423 range for which the original unsigned value will be
4424 positive. */
4425 if (low != 0)
4426 {
4427 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4428 1, fold_convert_loc (loc, arg0_type,
4429 integer_zero_node),
4430 high_positive))
4431 return NULL_TREE;
4432
4433 in_p = (n_in_p == in_p);
4434 }
4435 else
4436 {
4437 /* Otherwise, "or" the range with the range of the input
4438 that will be interpreted as negative. */
4439 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4440 1, fold_convert_loc (loc, arg0_type,
4441 integer_zero_node),
4442 high_positive))
4443 return NULL_TREE;
4444
4445 in_p = (in_p != n_in_p);
4446 }
4447 }
4448
4449 *p_low = n_low;
4450 *p_high = n_high;
4451 *p_in_p = in_p;
4452 return arg0;
4453
4454 default:
4455 return NULL_TREE;
4456 }
4457 }
4458
4459 /* Given EXP, a logical expression, set the range it is testing into
4460 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4461 actually being tested. *PLOW and *PHIGH will be made of the same
4462 type as the returned expression. If EXP is not a comparison, we
4463 will most likely not be returning a useful value and range. Set
4464 *STRICT_OVERFLOW_P to true if the return value is only valid
4465 because signed overflow is undefined; otherwise, do not change
4466 *STRICT_OVERFLOW_P. */
4467
4468 tree
4469 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4470 bool *strict_overflow_p)
4471 {
4472 enum tree_code code;
4473 tree arg0, arg1 = NULL_TREE;
4474 tree exp_type, nexp;
4475 int in_p;
4476 tree low, high;
4477 location_t loc = EXPR_LOCATION (exp);
4478
4479 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4480 and see if we can refine the range. Some of the cases below may not
4481 happen, but it doesn't seem worth worrying about this. We "continue"
4482 the outer loop when we've changed something; otherwise we "break"
4483 the switch, which will "break" the while. */
4484
4485 in_p = 0;
4486 low = high = build_int_cst (TREE_TYPE (exp), 0);
4487
4488 while (1)
4489 {
4490 code = TREE_CODE (exp);
4491 exp_type = TREE_TYPE (exp);
4492 arg0 = NULL_TREE;
4493
4494 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4495 {
4496 if (TREE_OPERAND_LENGTH (exp) > 0)
4497 arg0 = TREE_OPERAND (exp, 0);
4498 if (TREE_CODE_CLASS (code) == tcc_binary
4499 || TREE_CODE_CLASS (code) == tcc_comparison
4500 || (TREE_CODE_CLASS (code) == tcc_expression
4501 && TREE_OPERAND_LENGTH (exp) > 1))
4502 arg1 = TREE_OPERAND (exp, 1);
4503 }
4504 if (arg0 == NULL_TREE)
4505 break;
4506
4507 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4508 &high, &in_p, strict_overflow_p);
4509 if (nexp == NULL_TREE)
4510 break;
4511 exp = nexp;
4512 }
4513
4514 /* If EXP is a constant, we can evaluate whether this is true or false. */
4515 if (TREE_CODE (exp) == INTEGER_CST)
4516 {
4517 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4518 exp, 0, low, 0))
4519 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4520 exp, 1, high, 1)));
4521 low = high = 0;
4522 exp = 0;
4523 }
4524
4525 *pin_p = in_p, *plow = low, *phigh = high;
4526 return exp;
4527 }
4528 \f
4529 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4530 type, TYPE, return an expression to test if EXP is in (or out of, depending
4531 on IN_P) the range. Return 0 if the test couldn't be created. */
4532
4533 tree
4534 build_range_check (location_t loc, tree type, tree exp, int in_p,
4535 tree low, tree high)
4536 {
4537 tree etype = TREE_TYPE (exp), value;
4538
4539 /* Disable this optimization for function pointer expressions
4540 on targets that require function pointer canonicalization. */
4541 if (targetm.have_canonicalize_funcptr_for_compare ()
4542 && TREE_CODE (etype) == POINTER_TYPE
4543 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4544 return NULL_TREE;
4545
4546 if (! in_p)
4547 {
4548 value = build_range_check (loc, type, exp, 1, low, high);
4549 if (value != 0)
4550 return invert_truthvalue_loc (loc, value);
4551
4552 return 0;
4553 }
4554
4555 if (low == 0 && high == 0)
4556 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4557
4558 if (low == 0)
4559 return fold_build2_loc (loc, LE_EXPR, type, exp,
4560 fold_convert_loc (loc, etype, high));
4561
4562 if (high == 0)
4563 return fold_build2_loc (loc, GE_EXPR, type, exp,
4564 fold_convert_loc (loc, etype, low));
4565
4566 if (operand_equal_p (low, high, 0))
4567 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4568 fold_convert_loc (loc, etype, low));
4569
4570 if (integer_zerop (low))
4571 {
4572 if (! TYPE_UNSIGNED (etype))
4573 {
4574 etype = unsigned_type_for (etype);
4575 high = fold_convert_loc (loc, etype, high);
4576 exp = fold_convert_loc (loc, etype, exp);
4577 }
4578 return build_range_check (loc, type, exp, 1, 0, high);
4579 }
4580
4581 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4582 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4583 {
4584 int prec = TYPE_PRECISION (etype);
4585
4586 if (wi::mask (prec - 1, false, prec) == high)
4587 {
4588 if (TYPE_UNSIGNED (etype))
4589 {
4590 tree signed_etype = signed_type_for (etype);
4591 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4592 etype
4593 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4594 else
4595 etype = signed_etype;
4596 exp = fold_convert_loc (loc, etype, exp);
4597 }
4598 return fold_build2_loc (loc, GT_EXPR, type, exp,
4599 build_int_cst (etype, 0));
4600 }
4601 }
4602
4603 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4604 This requires wrap-around arithmetics for the type of the expression.
4605 First make sure that arithmetics in this type is valid, then make sure
4606 that it wraps around. */
4607 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4608 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4609 TYPE_UNSIGNED (etype));
4610
4611 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4612 {
4613 tree utype, minv, maxv;
4614
4615 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4616 for the type in question, as we rely on this here. */
4617 utype = unsigned_type_for (etype);
4618 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4619 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4620 build_int_cst (TREE_TYPE (maxv), 1), 1);
4621 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4622
4623 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4624 minv, 1, maxv, 1)))
4625 etype = utype;
4626 else
4627 return 0;
4628 }
4629
4630 high = fold_convert_loc (loc, etype, high);
4631 low = fold_convert_loc (loc, etype, low);
4632 exp = fold_convert_loc (loc, etype, exp);
4633
4634 value = const_binop (MINUS_EXPR, high, low);
4635
4636
4637 if (POINTER_TYPE_P (etype))
4638 {
4639 if (value != 0 && !TREE_OVERFLOW (value))
4640 {
4641 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4642 return build_range_check (loc, type,
4643 fold_build_pointer_plus_loc (loc, exp, low),
4644 1, build_int_cst (etype, 0), value);
4645 }
4646 return 0;
4647 }
4648
4649 if (value != 0 && !TREE_OVERFLOW (value))
4650 return build_range_check (loc, type,
4651 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4652 1, build_int_cst (etype, 0), value);
4653
4654 return 0;
4655 }
4656 \f
4657 /* Return the predecessor of VAL in its type, handling the infinite case. */
4658
4659 static tree
4660 range_predecessor (tree val)
4661 {
4662 tree type = TREE_TYPE (val);
4663
4664 if (INTEGRAL_TYPE_P (type)
4665 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4666 return 0;
4667 else
4668 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4669 build_int_cst (TREE_TYPE (val), 1), 0);
4670 }
4671
4672 /* Return the successor of VAL in its type, handling the infinite case. */
4673
4674 static tree
4675 range_successor (tree val)
4676 {
4677 tree type = TREE_TYPE (val);
4678
4679 if (INTEGRAL_TYPE_P (type)
4680 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4681 return 0;
4682 else
4683 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4684 build_int_cst (TREE_TYPE (val), 1), 0);
4685 }
4686
4687 /* Given two ranges, see if we can merge them into one. Return 1 if we
4688 can, 0 if we can't. Set the output range into the specified parameters. */
4689
4690 bool
4691 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4692 tree high0, int in1_p, tree low1, tree high1)
4693 {
4694 int no_overlap;
4695 int subset;
4696 int temp;
4697 tree tem;
4698 int in_p;
4699 tree low, high;
4700 int lowequal = ((low0 == 0 && low1 == 0)
4701 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4702 low0, 0, low1, 0)));
4703 int highequal = ((high0 == 0 && high1 == 0)
4704 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4705 high0, 1, high1, 1)));
4706
4707 /* Make range 0 be the range that starts first, or ends last if they
4708 start at the same value. Swap them if it isn't. */
4709 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4710 low0, 0, low1, 0))
4711 || (lowequal
4712 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4713 high1, 1, high0, 1))))
4714 {
4715 temp = in0_p, in0_p = in1_p, in1_p = temp;
4716 tem = low0, low0 = low1, low1 = tem;
4717 tem = high0, high0 = high1, high1 = tem;
4718 }
4719
4720 /* Now flag two cases, whether the ranges are disjoint or whether the
4721 second range is totally subsumed in the first. Note that the tests
4722 below are simplified by the ones above. */
4723 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4724 high0, 1, low1, 0));
4725 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4726 high1, 1, high0, 1));
4727
4728 /* We now have four cases, depending on whether we are including or
4729 excluding the two ranges. */
4730 if (in0_p && in1_p)
4731 {
4732 /* If they don't overlap, the result is false. If the second range
4733 is a subset it is the result. Otherwise, the range is from the start
4734 of the second to the end of the first. */
4735 if (no_overlap)
4736 in_p = 0, low = high = 0;
4737 else if (subset)
4738 in_p = 1, low = low1, high = high1;
4739 else
4740 in_p = 1, low = low1, high = high0;
4741 }
4742
4743 else if (in0_p && ! in1_p)
4744 {
4745 /* If they don't overlap, the result is the first range. If they are
4746 equal, the result is false. If the second range is a subset of the
4747 first, and the ranges begin at the same place, we go from just after
4748 the end of the second range to the end of the first. If the second
4749 range is not a subset of the first, or if it is a subset and both
4750 ranges end at the same place, the range starts at the start of the
4751 first range and ends just before the second range.
4752 Otherwise, we can't describe this as a single range. */
4753 if (no_overlap)
4754 in_p = 1, low = low0, high = high0;
4755 else if (lowequal && highequal)
4756 in_p = 0, low = high = 0;
4757 else if (subset && lowequal)
4758 {
4759 low = range_successor (high1);
4760 high = high0;
4761 in_p = 1;
4762 if (low == 0)
4763 {
4764 /* We are in the weird situation where high0 > high1 but
4765 high1 has no successor. Punt. */
4766 return 0;
4767 }
4768 }
4769 else if (! subset || highequal)
4770 {
4771 low = low0;
4772 high = range_predecessor (low1);
4773 in_p = 1;
4774 if (high == 0)
4775 {
4776 /* low0 < low1 but low1 has no predecessor. Punt. */
4777 return 0;
4778 }
4779 }
4780 else
4781 return 0;
4782 }
4783
4784 else if (! in0_p && in1_p)
4785 {
4786 /* If they don't overlap, the result is the second range. If the second
4787 is a subset of the first, the result is false. Otherwise,
4788 the range starts just after the first range and ends at the
4789 end of the second. */
4790 if (no_overlap)
4791 in_p = 1, low = low1, high = high1;
4792 else if (subset || highequal)
4793 in_p = 0, low = high = 0;
4794 else
4795 {
4796 low = range_successor (high0);
4797 high = high1;
4798 in_p = 1;
4799 if (low == 0)
4800 {
4801 /* high1 > high0 but high0 has no successor. Punt. */
4802 return 0;
4803 }
4804 }
4805 }
4806
4807 else
4808 {
4809 /* The case where we are excluding both ranges. Here the complex case
4810 is if they don't overlap. In that case, the only time we have a
4811 range is if they are adjacent. If the second is a subset of the
4812 first, the result is the first. Otherwise, the range to exclude
4813 starts at the beginning of the first range and ends at the end of the
4814 second. */
4815 if (no_overlap)
4816 {
4817 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4818 range_successor (high0),
4819 1, low1, 0)))
4820 in_p = 0, low = low0, high = high1;
4821 else
4822 {
4823 /* Canonicalize - [min, x] into - [-, x]. */
4824 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4825 switch (TREE_CODE (TREE_TYPE (low0)))
4826 {
4827 case ENUMERAL_TYPE:
4828 if (TYPE_PRECISION (TREE_TYPE (low0))
4829 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4830 break;
4831 /* FALLTHROUGH */
4832 case INTEGER_TYPE:
4833 if (tree_int_cst_equal (low0,
4834 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4835 low0 = 0;
4836 break;
4837 case POINTER_TYPE:
4838 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4839 && integer_zerop (low0))
4840 low0 = 0;
4841 break;
4842 default:
4843 break;
4844 }
4845
4846 /* Canonicalize - [x, max] into - [x, -]. */
4847 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4848 switch (TREE_CODE (TREE_TYPE (high1)))
4849 {
4850 case ENUMERAL_TYPE:
4851 if (TYPE_PRECISION (TREE_TYPE (high1))
4852 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4853 break;
4854 /* FALLTHROUGH */
4855 case INTEGER_TYPE:
4856 if (tree_int_cst_equal (high1,
4857 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4858 high1 = 0;
4859 break;
4860 case POINTER_TYPE:
4861 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4862 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4863 high1, 1,
4864 build_int_cst (TREE_TYPE (high1), 1),
4865 1)))
4866 high1 = 0;
4867 break;
4868 default:
4869 break;
4870 }
4871
4872 /* The ranges might be also adjacent between the maximum and
4873 minimum values of the given type. For
4874 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4875 return + [x + 1, y - 1]. */
4876 if (low0 == 0 && high1 == 0)
4877 {
4878 low = range_successor (high0);
4879 high = range_predecessor (low1);
4880 if (low == 0 || high == 0)
4881 return 0;
4882
4883 in_p = 1;
4884 }
4885 else
4886 return 0;
4887 }
4888 }
4889 else if (subset)
4890 in_p = 0, low = low0, high = high0;
4891 else
4892 in_p = 0, low = low0, high = high1;
4893 }
4894
4895 *pin_p = in_p, *plow = low, *phigh = high;
4896 return 1;
4897 }
4898 \f
4899
4900 /* Subroutine of fold, looking inside expressions of the form
4901 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4902 of the COND_EXPR. This function is being used also to optimize
4903 A op B ? C : A, by reversing the comparison first.
4904
4905 Return a folded expression whose code is not a COND_EXPR
4906 anymore, or NULL_TREE if no folding opportunity is found. */
4907
4908 static tree
4909 fold_cond_expr_with_comparison (location_t loc, tree type,
4910 tree arg0, tree arg1, tree arg2)
4911 {
4912 enum tree_code comp_code = TREE_CODE (arg0);
4913 tree arg00 = TREE_OPERAND (arg0, 0);
4914 tree arg01 = TREE_OPERAND (arg0, 1);
4915 tree arg1_type = TREE_TYPE (arg1);
4916 tree tem;
4917
4918 STRIP_NOPS (arg1);
4919 STRIP_NOPS (arg2);
4920
4921 /* If we have A op 0 ? A : -A, consider applying the following
4922 transformations:
4923
4924 A == 0? A : -A same as -A
4925 A != 0? A : -A same as A
4926 A >= 0? A : -A same as abs (A)
4927 A > 0? A : -A same as abs (A)
4928 A <= 0? A : -A same as -abs (A)
4929 A < 0? A : -A same as -abs (A)
4930
4931 None of these transformations work for modes with signed
4932 zeros. If A is +/-0, the first two transformations will
4933 change the sign of the result (from +0 to -0, or vice
4934 versa). The last four will fix the sign of the result,
4935 even though the original expressions could be positive or
4936 negative, depending on the sign of A.
4937
4938 Note that all these transformations are correct if A is
4939 NaN, since the two alternatives (A and -A) are also NaNs. */
4940 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4941 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4942 ? real_zerop (arg01)
4943 : integer_zerop (arg01))
4944 && ((TREE_CODE (arg2) == NEGATE_EXPR
4945 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4946 /* In the case that A is of the form X-Y, '-A' (arg2) may
4947 have already been folded to Y-X, check for that. */
4948 || (TREE_CODE (arg1) == MINUS_EXPR
4949 && TREE_CODE (arg2) == MINUS_EXPR
4950 && operand_equal_p (TREE_OPERAND (arg1, 0),
4951 TREE_OPERAND (arg2, 1), 0)
4952 && operand_equal_p (TREE_OPERAND (arg1, 1),
4953 TREE_OPERAND (arg2, 0), 0))))
4954 switch (comp_code)
4955 {
4956 case EQ_EXPR:
4957 case UNEQ_EXPR:
4958 tem = fold_convert_loc (loc, arg1_type, arg1);
4959 return pedantic_non_lvalue_loc (loc,
4960 fold_convert_loc (loc, type,
4961 negate_expr (tem)));
4962 case NE_EXPR:
4963 case LTGT_EXPR:
4964 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4965 case UNGE_EXPR:
4966 case UNGT_EXPR:
4967 if (flag_trapping_math)
4968 break;
4969 /* Fall through. */
4970 case GE_EXPR:
4971 case GT_EXPR:
4972 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4973 break;
4974 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4975 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4976 case UNLE_EXPR:
4977 case UNLT_EXPR:
4978 if (flag_trapping_math)
4979 break;
4980 case LE_EXPR:
4981 case LT_EXPR:
4982 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4983 break;
4984 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4985 return negate_expr (fold_convert_loc (loc, type, tem));
4986 default:
4987 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4988 break;
4989 }
4990
4991 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4992 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4993 both transformations are correct when A is NaN: A != 0
4994 is then true, and A == 0 is false. */
4995
4996 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4997 && integer_zerop (arg01) && integer_zerop (arg2))
4998 {
4999 if (comp_code == NE_EXPR)
5000 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5001 else if (comp_code == EQ_EXPR)
5002 return build_zero_cst (type);
5003 }
5004
5005 /* Try some transformations of A op B ? A : B.
5006
5007 A == B? A : B same as B
5008 A != B? A : B same as A
5009 A >= B? A : B same as max (A, B)
5010 A > B? A : B same as max (B, A)
5011 A <= B? A : B same as min (A, B)
5012 A < B? A : B same as min (B, A)
5013
5014 As above, these transformations don't work in the presence
5015 of signed zeros. For example, if A and B are zeros of
5016 opposite sign, the first two transformations will change
5017 the sign of the result. In the last four, the original
5018 expressions give different results for (A=+0, B=-0) and
5019 (A=-0, B=+0), but the transformed expressions do not.
5020
5021 The first two transformations are correct if either A or B
5022 is a NaN. In the first transformation, the condition will
5023 be false, and B will indeed be chosen. In the case of the
5024 second transformation, the condition A != B will be true,
5025 and A will be chosen.
5026
5027 The conversions to max() and min() are not correct if B is
5028 a number and A is not. The conditions in the original
5029 expressions will be false, so all four give B. The min()
5030 and max() versions would give a NaN instead. */
5031 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5032 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5033 /* Avoid these transformations if the COND_EXPR may be used
5034 as an lvalue in the C++ front-end. PR c++/19199. */
5035 && (in_gimple_form
5036 || VECTOR_TYPE_P (type)
5037 || (! lang_GNU_CXX ()
5038 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5039 || ! maybe_lvalue_p (arg1)
5040 || ! maybe_lvalue_p (arg2)))
5041 {
5042 tree comp_op0 = arg00;
5043 tree comp_op1 = arg01;
5044 tree comp_type = TREE_TYPE (comp_op0);
5045
5046 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5047 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5048 {
5049 comp_type = type;
5050 comp_op0 = arg1;
5051 comp_op1 = arg2;
5052 }
5053
5054 switch (comp_code)
5055 {
5056 case EQ_EXPR:
5057 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5058 case NE_EXPR:
5059 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5060 case LE_EXPR:
5061 case LT_EXPR:
5062 case UNLE_EXPR:
5063 case UNLT_EXPR:
5064 /* In C++ a ?: expression can be an lvalue, so put the
5065 operand which will be used if they are equal first
5066 so that we can convert this back to the
5067 corresponding COND_EXPR. */
5068 if (!HONOR_NANS (arg1))
5069 {
5070 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5071 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5072 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5073 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5074 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5075 comp_op1, comp_op0);
5076 return pedantic_non_lvalue_loc (loc,
5077 fold_convert_loc (loc, type, tem));
5078 }
5079 break;
5080 case GE_EXPR:
5081 case GT_EXPR:
5082 case UNGE_EXPR:
5083 case UNGT_EXPR:
5084 if (!HONOR_NANS (arg1))
5085 {
5086 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5087 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5088 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5089 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5090 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5091 comp_op1, comp_op0);
5092 return pedantic_non_lvalue_loc (loc,
5093 fold_convert_loc (loc, type, tem));
5094 }
5095 break;
5096 case UNEQ_EXPR:
5097 if (!HONOR_NANS (arg1))
5098 return pedantic_non_lvalue_loc (loc,
5099 fold_convert_loc (loc, type, arg2));
5100 break;
5101 case LTGT_EXPR:
5102 if (!HONOR_NANS (arg1))
5103 return pedantic_non_lvalue_loc (loc,
5104 fold_convert_loc (loc, type, arg1));
5105 break;
5106 default:
5107 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5108 break;
5109 }
5110 }
5111
5112 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5113 we might still be able to simplify this. For example,
5114 if C1 is one less or one more than C2, this might have started
5115 out as a MIN or MAX and been transformed by this function.
5116 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5117
5118 if (INTEGRAL_TYPE_P (type)
5119 && TREE_CODE (arg01) == INTEGER_CST
5120 && TREE_CODE (arg2) == INTEGER_CST)
5121 switch (comp_code)
5122 {
5123 case EQ_EXPR:
5124 if (TREE_CODE (arg1) == INTEGER_CST)
5125 break;
5126 /* We can replace A with C1 in this case. */
5127 arg1 = fold_convert_loc (loc, type, arg01);
5128 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5129
5130 case LT_EXPR:
5131 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5132 MIN_EXPR, to preserve the signedness of the comparison. */
5133 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5134 OEP_ONLY_CONST)
5135 && operand_equal_p (arg01,
5136 const_binop (PLUS_EXPR, arg2,
5137 build_int_cst (type, 1)),
5138 OEP_ONLY_CONST))
5139 {
5140 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5141 fold_convert_loc (loc, TREE_TYPE (arg00),
5142 arg2));
5143 return pedantic_non_lvalue_loc (loc,
5144 fold_convert_loc (loc, type, tem));
5145 }
5146 break;
5147
5148 case LE_EXPR:
5149 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5150 as above. */
5151 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5152 OEP_ONLY_CONST)
5153 && operand_equal_p (arg01,
5154 const_binop (MINUS_EXPR, arg2,
5155 build_int_cst (type, 1)),
5156 OEP_ONLY_CONST))
5157 {
5158 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5159 fold_convert_loc (loc, TREE_TYPE (arg00),
5160 arg2));
5161 return pedantic_non_lvalue_loc (loc,
5162 fold_convert_loc (loc, type, tem));
5163 }
5164 break;
5165
5166 case GT_EXPR:
5167 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5168 MAX_EXPR, to preserve the signedness of the comparison. */
5169 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5170 OEP_ONLY_CONST)
5171 && operand_equal_p (arg01,
5172 const_binop (MINUS_EXPR, arg2,
5173 build_int_cst (type, 1)),
5174 OEP_ONLY_CONST))
5175 {
5176 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5177 fold_convert_loc (loc, TREE_TYPE (arg00),
5178 arg2));
5179 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5180 }
5181 break;
5182
5183 case GE_EXPR:
5184 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5185 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5186 OEP_ONLY_CONST)
5187 && operand_equal_p (arg01,
5188 const_binop (PLUS_EXPR, arg2,
5189 build_int_cst (type, 1)),
5190 OEP_ONLY_CONST))
5191 {
5192 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5193 fold_convert_loc (loc, TREE_TYPE (arg00),
5194 arg2));
5195 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5196 }
5197 break;
5198 case NE_EXPR:
5199 break;
5200 default:
5201 gcc_unreachable ();
5202 }
5203
5204 return NULL_TREE;
5205 }
5206
5207
5208 \f
5209 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5210 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5211 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5212 false) >= 2)
5213 #endif
5214
5215 /* EXP is some logical combination of boolean tests. See if we can
5216 merge it into some range test. Return the new tree if so. */
5217
5218 static tree
5219 fold_range_test (location_t loc, enum tree_code code, tree type,
5220 tree op0, tree op1)
5221 {
5222 int or_op = (code == TRUTH_ORIF_EXPR
5223 || code == TRUTH_OR_EXPR);
5224 int in0_p, in1_p, in_p;
5225 tree low0, low1, low, high0, high1, high;
5226 bool strict_overflow_p = false;
5227 tree tem, lhs, rhs;
5228 const char * const warnmsg = G_("assuming signed overflow does not occur "
5229 "when simplifying range test");
5230
5231 if (!INTEGRAL_TYPE_P (type))
5232 return 0;
5233
5234 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5235 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5236
5237 /* If this is an OR operation, invert both sides; we will invert
5238 again at the end. */
5239 if (or_op)
5240 in0_p = ! in0_p, in1_p = ! in1_p;
5241
5242 /* If both expressions are the same, if we can merge the ranges, and we
5243 can build the range test, return it or it inverted. If one of the
5244 ranges is always true or always false, consider it to be the same
5245 expression as the other. */
5246 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5247 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5248 in1_p, low1, high1)
5249 && 0 != (tem = (build_range_check (loc, type,
5250 lhs != 0 ? lhs
5251 : rhs != 0 ? rhs : integer_zero_node,
5252 in_p, low, high))))
5253 {
5254 if (strict_overflow_p)
5255 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5256 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5257 }
5258
5259 /* On machines where the branch cost is expensive, if this is a
5260 short-circuited branch and the underlying object on both sides
5261 is the same, make a non-short-circuit operation. */
5262 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5263 && lhs != 0 && rhs != 0
5264 && (code == TRUTH_ANDIF_EXPR
5265 || code == TRUTH_ORIF_EXPR)
5266 && operand_equal_p (lhs, rhs, 0))
5267 {
5268 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5269 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5270 which cases we can't do this. */
5271 if (simple_operand_p (lhs))
5272 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5273 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5274 type, op0, op1);
5275
5276 else if (!lang_hooks.decls.global_bindings_p ()
5277 && !CONTAINS_PLACEHOLDER_P (lhs))
5278 {
5279 tree common = save_expr (lhs);
5280
5281 if (0 != (lhs = build_range_check (loc, type, common,
5282 or_op ? ! in0_p : in0_p,
5283 low0, high0))
5284 && (0 != (rhs = build_range_check (loc, type, common,
5285 or_op ? ! in1_p : in1_p,
5286 low1, high1))))
5287 {
5288 if (strict_overflow_p)
5289 fold_overflow_warning (warnmsg,
5290 WARN_STRICT_OVERFLOW_COMPARISON);
5291 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5292 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5293 type, lhs, rhs);
5294 }
5295 }
5296 }
5297
5298 return 0;
5299 }
5300 \f
5301 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5302 bit value. Arrange things so the extra bits will be set to zero if and
5303 only if C is signed-extended to its full width. If MASK is nonzero,
5304 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5305
5306 static tree
5307 unextend (tree c, int p, int unsignedp, tree mask)
5308 {
5309 tree type = TREE_TYPE (c);
5310 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5311 tree temp;
5312
5313 if (p == modesize || unsignedp)
5314 return c;
5315
5316 /* We work by getting just the sign bit into the low-order bit, then
5317 into the high-order bit, then sign-extend. We then XOR that value
5318 with C. */
5319 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5320
5321 /* We must use a signed type in order to get an arithmetic right shift.
5322 However, we must also avoid introducing accidental overflows, so that
5323 a subsequent call to integer_zerop will work. Hence we must
5324 do the type conversion here. At this point, the constant is either
5325 zero or one, and the conversion to a signed type can never overflow.
5326 We could get an overflow if this conversion is done anywhere else. */
5327 if (TYPE_UNSIGNED (type))
5328 temp = fold_convert (signed_type_for (type), temp);
5329
5330 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5331 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5332 if (mask != 0)
5333 temp = const_binop (BIT_AND_EXPR, temp,
5334 fold_convert (TREE_TYPE (c), mask));
5335 /* If necessary, convert the type back to match the type of C. */
5336 if (TYPE_UNSIGNED (type))
5337 temp = fold_convert (type, temp);
5338
5339 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5340 }
5341 \f
5342 /* For an expression that has the form
5343 (A && B) || ~B
5344 or
5345 (A || B) && ~B,
5346 we can drop one of the inner expressions and simplify to
5347 A || ~B
5348 or
5349 A && ~B
5350 LOC is the location of the resulting expression. OP is the inner
5351 logical operation; the left-hand side in the examples above, while CMPOP
5352 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5353 removing a condition that guards another, as in
5354 (A != NULL && A->...) || A == NULL
5355 which we must not transform. If RHS_ONLY is true, only eliminate the
5356 right-most operand of the inner logical operation. */
5357
5358 static tree
5359 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5360 bool rhs_only)
5361 {
5362 tree type = TREE_TYPE (cmpop);
5363 enum tree_code code = TREE_CODE (cmpop);
5364 enum tree_code truthop_code = TREE_CODE (op);
5365 tree lhs = TREE_OPERAND (op, 0);
5366 tree rhs = TREE_OPERAND (op, 1);
5367 tree orig_lhs = lhs, orig_rhs = rhs;
5368 enum tree_code rhs_code = TREE_CODE (rhs);
5369 enum tree_code lhs_code = TREE_CODE (lhs);
5370 enum tree_code inv_code;
5371
5372 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5373 return NULL_TREE;
5374
5375 if (TREE_CODE_CLASS (code) != tcc_comparison)
5376 return NULL_TREE;
5377
5378 if (rhs_code == truthop_code)
5379 {
5380 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5381 if (newrhs != NULL_TREE)
5382 {
5383 rhs = newrhs;
5384 rhs_code = TREE_CODE (rhs);
5385 }
5386 }
5387 if (lhs_code == truthop_code && !rhs_only)
5388 {
5389 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5390 if (newlhs != NULL_TREE)
5391 {
5392 lhs = newlhs;
5393 lhs_code = TREE_CODE (lhs);
5394 }
5395 }
5396
5397 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5398 if (inv_code == rhs_code
5399 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5400 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5401 return lhs;
5402 if (!rhs_only && inv_code == lhs_code
5403 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5404 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5405 return rhs;
5406 if (rhs != orig_rhs || lhs != orig_lhs)
5407 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5408 lhs, rhs);
5409 return NULL_TREE;
5410 }
5411
5412 /* Find ways of folding logical expressions of LHS and RHS:
5413 Try to merge two comparisons to the same innermost item.
5414 Look for range tests like "ch >= '0' && ch <= '9'".
5415 Look for combinations of simple terms on machines with expensive branches
5416 and evaluate the RHS unconditionally.
5417
5418 For example, if we have p->a == 2 && p->b == 4 and we can make an
5419 object large enough to span both A and B, we can do this with a comparison
5420 against the object ANDed with the a mask.
5421
5422 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5423 operations to do this with one comparison.
5424
5425 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5426 function and the one above.
5427
5428 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5429 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5430
5431 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5432 two operands.
5433
5434 We return the simplified tree or 0 if no optimization is possible. */
5435
5436 static tree
5437 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5438 tree lhs, tree rhs)
5439 {
5440 /* If this is the "or" of two comparisons, we can do something if
5441 the comparisons are NE_EXPR. If this is the "and", we can do something
5442 if the comparisons are EQ_EXPR. I.e.,
5443 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5444
5445 WANTED_CODE is this operation code. For single bit fields, we can
5446 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5447 comparison for one-bit fields. */
5448
5449 enum tree_code wanted_code;
5450 enum tree_code lcode, rcode;
5451 tree ll_arg, lr_arg, rl_arg, rr_arg;
5452 tree ll_inner, lr_inner, rl_inner, rr_inner;
5453 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5454 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5455 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5456 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5457 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5458 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5459 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5460 machine_mode lnmode, rnmode;
5461 tree ll_mask, lr_mask, rl_mask, rr_mask;
5462 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5463 tree l_const, r_const;
5464 tree lntype, rntype, result;
5465 HOST_WIDE_INT first_bit, end_bit;
5466 int volatilep;
5467
5468 /* Start by getting the comparison codes. Fail if anything is volatile.
5469 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5470 it were surrounded with a NE_EXPR. */
5471
5472 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5473 return 0;
5474
5475 lcode = TREE_CODE (lhs);
5476 rcode = TREE_CODE (rhs);
5477
5478 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5479 {
5480 lhs = build2 (NE_EXPR, truth_type, lhs,
5481 build_int_cst (TREE_TYPE (lhs), 0));
5482 lcode = NE_EXPR;
5483 }
5484
5485 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5486 {
5487 rhs = build2 (NE_EXPR, truth_type, rhs,
5488 build_int_cst (TREE_TYPE (rhs), 0));
5489 rcode = NE_EXPR;
5490 }
5491
5492 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5493 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5494 return 0;
5495
5496 ll_arg = TREE_OPERAND (lhs, 0);
5497 lr_arg = TREE_OPERAND (lhs, 1);
5498 rl_arg = TREE_OPERAND (rhs, 0);
5499 rr_arg = TREE_OPERAND (rhs, 1);
5500
5501 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5502 if (simple_operand_p (ll_arg)
5503 && simple_operand_p (lr_arg))
5504 {
5505 if (operand_equal_p (ll_arg, rl_arg, 0)
5506 && operand_equal_p (lr_arg, rr_arg, 0))
5507 {
5508 result = combine_comparisons (loc, code, lcode, rcode,
5509 truth_type, ll_arg, lr_arg);
5510 if (result)
5511 return result;
5512 }
5513 else if (operand_equal_p (ll_arg, rr_arg, 0)
5514 && operand_equal_p (lr_arg, rl_arg, 0))
5515 {
5516 result = combine_comparisons (loc, code, lcode,
5517 swap_tree_comparison (rcode),
5518 truth_type, ll_arg, lr_arg);
5519 if (result)
5520 return result;
5521 }
5522 }
5523
5524 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5525 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5526
5527 /* If the RHS can be evaluated unconditionally and its operands are
5528 simple, it wins to evaluate the RHS unconditionally on machines
5529 with expensive branches. In this case, this isn't a comparison
5530 that can be merged. */
5531
5532 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5533 false) >= 2
5534 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5535 && simple_operand_p (rl_arg)
5536 && simple_operand_p (rr_arg))
5537 {
5538 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5539 if (code == TRUTH_OR_EXPR
5540 && lcode == NE_EXPR && integer_zerop (lr_arg)
5541 && rcode == NE_EXPR && integer_zerop (rr_arg)
5542 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5543 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5544 return build2_loc (loc, NE_EXPR, truth_type,
5545 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5546 ll_arg, rl_arg),
5547 build_int_cst (TREE_TYPE (ll_arg), 0));
5548
5549 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5550 if (code == TRUTH_AND_EXPR
5551 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5552 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5553 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5554 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5555 return build2_loc (loc, EQ_EXPR, truth_type,
5556 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5557 ll_arg, rl_arg),
5558 build_int_cst (TREE_TYPE (ll_arg), 0));
5559 }
5560
5561 /* See if the comparisons can be merged. Then get all the parameters for
5562 each side. */
5563
5564 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5565 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5566 return 0;
5567
5568 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5569 volatilep = 0;
5570 ll_inner = decode_field_reference (loc, ll_arg,
5571 &ll_bitsize, &ll_bitpos, &ll_mode,
5572 &ll_unsignedp, &ll_reversep, &volatilep,
5573 &ll_mask, &ll_and_mask);
5574 lr_inner = decode_field_reference (loc, lr_arg,
5575 &lr_bitsize, &lr_bitpos, &lr_mode,
5576 &lr_unsignedp, &lr_reversep, &volatilep,
5577 &lr_mask, &lr_and_mask);
5578 rl_inner = decode_field_reference (loc, rl_arg,
5579 &rl_bitsize, &rl_bitpos, &rl_mode,
5580 &rl_unsignedp, &rl_reversep, &volatilep,
5581 &rl_mask, &rl_and_mask);
5582 rr_inner = decode_field_reference (loc, rr_arg,
5583 &rr_bitsize, &rr_bitpos, &rr_mode,
5584 &rr_unsignedp, &rr_reversep, &volatilep,
5585 &rr_mask, &rr_and_mask);
5586
5587 /* It must be true that the inner operation on the lhs of each
5588 comparison must be the same if we are to be able to do anything.
5589 Then see if we have constants. If not, the same must be true for
5590 the rhs's. */
5591 if (volatilep
5592 || ll_reversep != rl_reversep
5593 || ll_inner == 0 || rl_inner == 0
5594 || ! operand_equal_p (ll_inner, rl_inner, 0))
5595 return 0;
5596
5597 if (TREE_CODE (lr_arg) == INTEGER_CST
5598 && TREE_CODE (rr_arg) == INTEGER_CST)
5599 {
5600 l_const = lr_arg, r_const = rr_arg;
5601 lr_reversep = ll_reversep;
5602 }
5603 else if (lr_reversep != rr_reversep
5604 || lr_inner == 0 || rr_inner == 0
5605 || ! operand_equal_p (lr_inner, rr_inner, 0))
5606 return 0;
5607 else
5608 l_const = r_const = 0;
5609
5610 /* If either comparison code is not correct for our logical operation,
5611 fail. However, we can convert a one-bit comparison against zero into
5612 the opposite comparison against that bit being set in the field. */
5613
5614 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5615 if (lcode != wanted_code)
5616 {
5617 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5618 {
5619 /* Make the left operand unsigned, since we are only interested
5620 in the value of one bit. Otherwise we are doing the wrong
5621 thing below. */
5622 ll_unsignedp = 1;
5623 l_const = ll_mask;
5624 }
5625 else
5626 return 0;
5627 }
5628
5629 /* This is analogous to the code for l_const above. */
5630 if (rcode != wanted_code)
5631 {
5632 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5633 {
5634 rl_unsignedp = 1;
5635 r_const = rl_mask;
5636 }
5637 else
5638 return 0;
5639 }
5640
5641 /* See if we can find a mode that contains both fields being compared on
5642 the left. If we can't, fail. Otherwise, update all constants and masks
5643 to be relative to a field of that size. */
5644 first_bit = MIN (ll_bitpos, rl_bitpos);
5645 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5646 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5647 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5648 volatilep);
5649 if (lnmode == VOIDmode)
5650 return 0;
5651
5652 lnbitsize = GET_MODE_BITSIZE (lnmode);
5653 lnbitpos = first_bit & ~ (lnbitsize - 1);
5654 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5655 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5656
5657 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5658 {
5659 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5660 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5661 }
5662
5663 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5664 size_int (xll_bitpos));
5665 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5666 size_int (xrl_bitpos));
5667
5668 if (l_const)
5669 {
5670 l_const = fold_convert_loc (loc, lntype, l_const);
5671 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5672 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5673 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5674 fold_build1_loc (loc, BIT_NOT_EXPR,
5675 lntype, ll_mask))))
5676 {
5677 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5678
5679 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5680 }
5681 }
5682 if (r_const)
5683 {
5684 r_const = fold_convert_loc (loc, lntype, r_const);
5685 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5686 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5687 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5688 fold_build1_loc (loc, BIT_NOT_EXPR,
5689 lntype, rl_mask))))
5690 {
5691 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5692
5693 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5694 }
5695 }
5696
5697 /* If the right sides are not constant, do the same for it. Also,
5698 disallow this optimization if a size or signedness mismatch occurs
5699 between the left and right sides. */
5700 if (l_const == 0)
5701 {
5702 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5703 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5704 /* Make sure the two fields on the right
5705 correspond to the left without being swapped. */
5706 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5707 return 0;
5708
5709 first_bit = MIN (lr_bitpos, rr_bitpos);
5710 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5711 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5712 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5713 volatilep);
5714 if (rnmode == VOIDmode)
5715 return 0;
5716
5717 rnbitsize = GET_MODE_BITSIZE (rnmode);
5718 rnbitpos = first_bit & ~ (rnbitsize - 1);
5719 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5720 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5721
5722 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5723 {
5724 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5725 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5726 }
5727
5728 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5729 rntype, lr_mask),
5730 size_int (xlr_bitpos));
5731 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5732 rntype, rr_mask),
5733 size_int (xrr_bitpos));
5734
5735 /* Make a mask that corresponds to both fields being compared.
5736 Do this for both items being compared. If the operands are the
5737 same size and the bits being compared are in the same position
5738 then we can do this by masking both and comparing the masked
5739 results. */
5740 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5741 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5742 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5743 {
5744 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5745 ll_unsignedp || rl_unsignedp, ll_reversep);
5746 if (! all_ones_mask_p (ll_mask, lnbitsize))
5747 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5748
5749 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5750 lr_unsignedp || rr_unsignedp, lr_reversep);
5751 if (! all_ones_mask_p (lr_mask, rnbitsize))
5752 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5753
5754 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5755 }
5756
5757 /* There is still another way we can do something: If both pairs of
5758 fields being compared are adjacent, we may be able to make a wider
5759 field containing them both.
5760
5761 Note that we still must mask the lhs/rhs expressions. Furthermore,
5762 the mask must be shifted to account for the shift done by
5763 make_bit_field_ref. */
5764 if ((ll_bitsize + ll_bitpos == rl_bitpos
5765 && lr_bitsize + lr_bitpos == rr_bitpos)
5766 || (ll_bitpos == rl_bitpos + rl_bitsize
5767 && lr_bitpos == rr_bitpos + rr_bitsize))
5768 {
5769 tree type;
5770
5771 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5772 ll_bitsize + rl_bitsize,
5773 MIN (ll_bitpos, rl_bitpos),
5774 ll_unsignedp, ll_reversep);
5775 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5776 lr_bitsize + rr_bitsize,
5777 MIN (lr_bitpos, rr_bitpos),
5778 lr_unsignedp, lr_reversep);
5779
5780 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5781 size_int (MIN (xll_bitpos, xrl_bitpos)));
5782 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5783 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5784
5785 /* Convert to the smaller type before masking out unwanted bits. */
5786 type = lntype;
5787 if (lntype != rntype)
5788 {
5789 if (lnbitsize > rnbitsize)
5790 {
5791 lhs = fold_convert_loc (loc, rntype, lhs);
5792 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5793 type = rntype;
5794 }
5795 else if (lnbitsize < rnbitsize)
5796 {
5797 rhs = fold_convert_loc (loc, lntype, rhs);
5798 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5799 type = lntype;
5800 }
5801 }
5802
5803 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5804 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5805
5806 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5807 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5808
5809 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5810 }
5811
5812 return 0;
5813 }
5814
5815 /* Handle the case of comparisons with constants. If there is something in
5816 common between the masks, those bits of the constants must be the same.
5817 If not, the condition is always false. Test for this to avoid generating
5818 incorrect code below. */
5819 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5820 if (! integer_zerop (result)
5821 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5822 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5823 {
5824 if (wanted_code == NE_EXPR)
5825 {
5826 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5827 return constant_boolean_node (true, truth_type);
5828 }
5829 else
5830 {
5831 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5832 return constant_boolean_node (false, truth_type);
5833 }
5834 }
5835
5836 /* Construct the expression we will return. First get the component
5837 reference we will make. Unless the mask is all ones the width of
5838 that field, perform the mask operation. Then compare with the
5839 merged constant. */
5840 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5841 ll_unsignedp || rl_unsignedp, ll_reversep);
5842
5843 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5844 if (! all_ones_mask_p (ll_mask, lnbitsize))
5845 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5846
5847 return build2_loc (loc, wanted_code, truth_type, result,
5848 const_binop (BIT_IOR_EXPR, l_const, r_const));
5849 }
5850 \f
5851 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5852 constant. */
5853
5854 static tree
5855 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5856 tree op0, tree op1)
5857 {
5858 tree arg0 = op0;
5859 enum tree_code op_code;
5860 tree comp_const;
5861 tree minmax_const;
5862 int consts_equal, consts_lt;
5863 tree inner;
5864
5865 STRIP_SIGN_NOPS (arg0);
5866
5867 op_code = TREE_CODE (arg0);
5868 minmax_const = TREE_OPERAND (arg0, 1);
5869 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5870 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5871 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5872 inner = TREE_OPERAND (arg0, 0);
5873
5874 /* If something does not permit us to optimize, return the original tree. */
5875 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5876 || TREE_CODE (comp_const) != INTEGER_CST
5877 || TREE_OVERFLOW (comp_const)
5878 || TREE_CODE (minmax_const) != INTEGER_CST
5879 || TREE_OVERFLOW (minmax_const))
5880 return NULL_TREE;
5881
5882 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5883 and GT_EXPR, doing the rest with recursive calls using logical
5884 simplifications. */
5885 switch (code)
5886 {
5887 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5888 {
5889 tree tem
5890 = optimize_minmax_comparison (loc,
5891 invert_tree_comparison (code, false),
5892 type, op0, op1);
5893 if (tem)
5894 return invert_truthvalue_loc (loc, tem);
5895 return NULL_TREE;
5896 }
5897
5898 case GE_EXPR:
5899 return
5900 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5901 optimize_minmax_comparison
5902 (loc, EQ_EXPR, type, arg0, comp_const),
5903 optimize_minmax_comparison
5904 (loc, GT_EXPR, type, arg0, comp_const));
5905
5906 case EQ_EXPR:
5907 if (op_code == MAX_EXPR && consts_equal)
5908 /* MAX (X, 0) == 0 -> X <= 0 */
5909 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5910
5911 else if (op_code == MAX_EXPR && consts_lt)
5912 /* MAX (X, 0) == 5 -> X == 5 */
5913 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5914
5915 else if (op_code == MAX_EXPR)
5916 /* MAX (X, 0) == -1 -> false */
5917 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5918
5919 else if (consts_equal)
5920 /* MIN (X, 0) == 0 -> X >= 0 */
5921 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5922
5923 else if (consts_lt)
5924 /* MIN (X, 0) == 5 -> false */
5925 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5926
5927 else
5928 /* MIN (X, 0) == -1 -> X == -1 */
5929 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5930
5931 case GT_EXPR:
5932 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5933 /* MAX (X, 0) > 0 -> X > 0
5934 MAX (X, 0) > 5 -> X > 5 */
5935 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5936
5937 else if (op_code == MAX_EXPR)
5938 /* MAX (X, 0) > -1 -> true */
5939 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5940
5941 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5942 /* MIN (X, 0) > 0 -> false
5943 MIN (X, 0) > 5 -> false */
5944 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5945
5946 else
5947 /* MIN (X, 0) > -1 -> X > -1 */
5948 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5949
5950 default:
5951 return NULL_TREE;
5952 }
5953 }
5954 \f
5955 /* T is an integer expression that is being multiplied, divided, or taken a
5956 modulus (CODE says which and what kind of divide or modulus) by a
5957 constant C. See if we can eliminate that operation by folding it with
5958 other operations already in T. WIDE_TYPE, if non-null, is a type that
5959 should be used for the computation if wider than our type.
5960
5961 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5962 (X * 2) + (Y * 4). We must, however, be assured that either the original
5963 expression would not overflow or that overflow is undefined for the type
5964 in the language in question.
5965
5966 If we return a non-null expression, it is an equivalent form of the
5967 original computation, but need not be in the original type.
5968
5969 We set *STRICT_OVERFLOW_P to true if the return values depends on
5970 signed overflow being undefined. Otherwise we do not change
5971 *STRICT_OVERFLOW_P. */
5972
5973 static tree
5974 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5975 bool *strict_overflow_p)
5976 {
5977 /* To avoid exponential search depth, refuse to allow recursion past
5978 three levels. Beyond that (1) it's highly unlikely that we'll find
5979 something interesting and (2) we've probably processed it before
5980 when we built the inner expression. */
5981
5982 static int depth;
5983 tree ret;
5984
5985 if (depth > 3)
5986 return NULL;
5987
5988 depth++;
5989 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5990 depth--;
5991
5992 return ret;
5993 }
5994
5995 static tree
5996 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5997 bool *strict_overflow_p)
5998 {
5999 tree type = TREE_TYPE (t);
6000 enum tree_code tcode = TREE_CODE (t);
6001 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6002 > GET_MODE_SIZE (TYPE_MODE (type)))
6003 ? wide_type : type);
6004 tree t1, t2;
6005 int same_p = tcode == code;
6006 tree op0 = NULL_TREE, op1 = NULL_TREE;
6007 bool sub_strict_overflow_p;
6008
6009 /* Don't deal with constants of zero here; they confuse the code below. */
6010 if (integer_zerop (c))
6011 return NULL_TREE;
6012
6013 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6014 op0 = TREE_OPERAND (t, 0);
6015
6016 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6017 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6018
6019 /* Note that we need not handle conditional operations here since fold
6020 already handles those cases. So just do arithmetic here. */
6021 switch (tcode)
6022 {
6023 case INTEGER_CST:
6024 /* For a constant, we can always simplify if we are a multiply
6025 or (for divide and modulus) if it is a multiple of our constant. */
6026 if (code == MULT_EXPR
6027 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6028 {
6029 tree tem = const_binop (code, fold_convert (ctype, t),
6030 fold_convert (ctype, c));
6031 /* If the multiplication overflowed to INT_MIN then we lost sign
6032 information on it and a subsequent multiplication might
6033 spuriously overflow. See PR68142. */
6034 if (TREE_OVERFLOW (tem)
6035 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6036 return NULL_TREE;
6037 return tem;
6038 }
6039 break;
6040
6041 CASE_CONVERT: case NON_LVALUE_EXPR:
6042 /* If op0 is an expression ... */
6043 if ((COMPARISON_CLASS_P (op0)
6044 || UNARY_CLASS_P (op0)
6045 || BINARY_CLASS_P (op0)
6046 || VL_EXP_CLASS_P (op0)
6047 || EXPRESSION_CLASS_P (op0))
6048 /* ... and has wrapping overflow, and its type is smaller
6049 than ctype, then we cannot pass through as widening. */
6050 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6051 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6052 && (TYPE_PRECISION (ctype)
6053 > TYPE_PRECISION (TREE_TYPE (op0))))
6054 /* ... or this is a truncation (t is narrower than op0),
6055 then we cannot pass through this narrowing. */
6056 || (TYPE_PRECISION (type)
6057 < TYPE_PRECISION (TREE_TYPE (op0)))
6058 /* ... or signedness changes for division or modulus,
6059 then we cannot pass through this conversion. */
6060 || (code != MULT_EXPR
6061 && (TYPE_UNSIGNED (ctype)
6062 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6063 /* ... or has undefined overflow while the converted to
6064 type has not, we cannot do the operation in the inner type
6065 as that would introduce undefined overflow. */
6066 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6067 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6068 && !TYPE_OVERFLOW_UNDEFINED (type))))
6069 break;
6070
6071 /* Pass the constant down and see if we can make a simplification. If
6072 we can, replace this expression with the inner simplification for
6073 possible later conversion to our or some other type. */
6074 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6075 && TREE_CODE (t2) == INTEGER_CST
6076 && !TREE_OVERFLOW (t2)
6077 && (0 != (t1 = extract_muldiv (op0, t2, code,
6078 code == MULT_EXPR
6079 ? ctype : NULL_TREE,
6080 strict_overflow_p))))
6081 return t1;
6082 break;
6083
6084 case ABS_EXPR:
6085 /* If widening the type changes it from signed to unsigned, then we
6086 must avoid building ABS_EXPR itself as unsigned. */
6087 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6088 {
6089 tree cstype = (*signed_type_for) (ctype);
6090 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6091 != 0)
6092 {
6093 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6094 return fold_convert (ctype, t1);
6095 }
6096 break;
6097 }
6098 /* If the constant is negative, we cannot simplify this. */
6099 if (tree_int_cst_sgn (c) == -1)
6100 break;
6101 /* FALLTHROUGH */
6102 case NEGATE_EXPR:
6103 /* For division and modulus, type can't be unsigned, as e.g.
6104 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6105 For signed types, even with wrapping overflow, this is fine. */
6106 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6107 break;
6108 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6109 != 0)
6110 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6111 break;
6112
6113 case MIN_EXPR: case MAX_EXPR:
6114 /* If widening the type changes the signedness, then we can't perform
6115 this optimization as that changes the result. */
6116 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6117 break;
6118
6119 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6120 sub_strict_overflow_p = false;
6121 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6122 &sub_strict_overflow_p)) != 0
6123 && (t2 = extract_muldiv (op1, c, code, wide_type,
6124 &sub_strict_overflow_p)) != 0)
6125 {
6126 if (tree_int_cst_sgn (c) < 0)
6127 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6128 if (sub_strict_overflow_p)
6129 *strict_overflow_p = true;
6130 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6131 fold_convert (ctype, t2));
6132 }
6133 break;
6134
6135 case LSHIFT_EXPR: case RSHIFT_EXPR:
6136 /* If the second operand is constant, this is a multiplication
6137 or floor division, by a power of two, so we can treat it that
6138 way unless the multiplier or divisor overflows. Signed
6139 left-shift overflow is implementation-defined rather than
6140 undefined in C90, so do not convert signed left shift into
6141 multiplication. */
6142 if (TREE_CODE (op1) == INTEGER_CST
6143 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6144 /* const_binop may not detect overflow correctly,
6145 so check for it explicitly here. */
6146 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6147 && 0 != (t1 = fold_convert (ctype,
6148 const_binop (LSHIFT_EXPR,
6149 size_one_node,
6150 op1)))
6151 && !TREE_OVERFLOW (t1))
6152 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6153 ? MULT_EXPR : FLOOR_DIV_EXPR,
6154 ctype,
6155 fold_convert (ctype, op0),
6156 t1),
6157 c, code, wide_type, strict_overflow_p);
6158 break;
6159
6160 case PLUS_EXPR: case MINUS_EXPR:
6161 /* See if we can eliminate the operation on both sides. If we can, we
6162 can return a new PLUS or MINUS. If we can't, the only remaining
6163 cases where we can do anything are if the second operand is a
6164 constant. */
6165 sub_strict_overflow_p = false;
6166 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6167 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6168 if (t1 != 0 && t2 != 0
6169 && (code == MULT_EXPR
6170 /* If not multiplication, we can only do this if both operands
6171 are divisible by c. */
6172 || (multiple_of_p (ctype, op0, c)
6173 && multiple_of_p (ctype, op1, c))))
6174 {
6175 if (sub_strict_overflow_p)
6176 *strict_overflow_p = true;
6177 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6178 fold_convert (ctype, t2));
6179 }
6180
6181 /* If this was a subtraction, negate OP1 and set it to be an addition.
6182 This simplifies the logic below. */
6183 if (tcode == MINUS_EXPR)
6184 {
6185 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6186 /* If OP1 was not easily negatable, the constant may be OP0. */
6187 if (TREE_CODE (op0) == INTEGER_CST)
6188 {
6189 std::swap (op0, op1);
6190 std::swap (t1, t2);
6191 }
6192 }
6193
6194 if (TREE_CODE (op1) != INTEGER_CST)
6195 break;
6196
6197 /* If either OP1 or C are negative, this optimization is not safe for
6198 some of the division and remainder types while for others we need
6199 to change the code. */
6200 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6201 {
6202 if (code == CEIL_DIV_EXPR)
6203 code = FLOOR_DIV_EXPR;
6204 else if (code == FLOOR_DIV_EXPR)
6205 code = CEIL_DIV_EXPR;
6206 else if (code != MULT_EXPR
6207 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6208 break;
6209 }
6210
6211 /* If it's a multiply or a division/modulus operation of a multiple
6212 of our constant, do the operation and verify it doesn't overflow. */
6213 if (code == MULT_EXPR
6214 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6215 {
6216 op1 = const_binop (code, fold_convert (ctype, op1),
6217 fold_convert (ctype, c));
6218 /* We allow the constant to overflow with wrapping semantics. */
6219 if (op1 == 0
6220 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6221 break;
6222 }
6223 else
6224 break;
6225
6226 /* If we have an unsigned type, we cannot widen the operation since it
6227 will change the result if the original computation overflowed. */
6228 if (TYPE_UNSIGNED (ctype) && ctype != type)
6229 break;
6230
6231 /* If we were able to eliminate our operation from the first side,
6232 apply our operation to the second side and reform the PLUS. */
6233 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6234 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6235
6236 /* The last case is if we are a multiply. In that case, we can
6237 apply the distributive law to commute the multiply and addition
6238 if the multiplication of the constants doesn't overflow
6239 and overflow is defined. With undefined overflow
6240 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6241 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6242 return fold_build2 (tcode, ctype,
6243 fold_build2 (code, ctype,
6244 fold_convert (ctype, op0),
6245 fold_convert (ctype, c)),
6246 op1);
6247
6248 break;
6249
6250 case MULT_EXPR:
6251 /* We have a special case here if we are doing something like
6252 (C * 8) % 4 since we know that's zero. */
6253 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6254 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6255 /* If the multiplication can overflow we cannot optimize this. */
6256 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6257 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6258 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6259 {
6260 *strict_overflow_p = true;
6261 return omit_one_operand (type, integer_zero_node, op0);
6262 }
6263
6264 /* ... fall through ... */
6265
6266 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6267 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6268 /* If we can extract our operation from the LHS, do so and return a
6269 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6270 do something only if the second operand is a constant. */
6271 if (same_p
6272 && (t1 = extract_muldiv (op0, c, code, wide_type,
6273 strict_overflow_p)) != 0)
6274 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6275 fold_convert (ctype, op1));
6276 else if (tcode == MULT_EXPR && code == MULT_EXPR
6277 && (t1 = extract_muldiv (op1, c, code, wide_type,
6278 strict_overflow_p)) != 0)
6279 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6280 fold_convert (ctype, t1));
6281 else if (TREE_CODE (op1) != INTEGER_CST)
6282 return 0;
6283
6284 /* If these are the same operation types, we can associate them
6285 assuming no overflow. */
6286 if (tcode == code)
6287 {
6288 bool overflow_p = false;
6289 bool overflow_mul_p;
6290 signop sign = TYPE_SIGN (ctype);
6291 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6292 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6293 if (overflow_mul_p
6294 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6295 overflow_p = true;
6296 if (!overflow_p)
6297 {
6298 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6299 TYPE_SIGN (TREE_TYPE (op1)));
6300 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6301 wide_int_to_tree (ctype, mul));
6302 }
6303 }
6304
6305 /* If these operations "cancel" each other, we have the main
6306 optimizations of this pass, which occur when either constant is a
6307 multiple of the other, in which case we replace this with either an
6308 operation or CODE or TCODE.
6309
6310 If we have an unsigned type, we cannot do this since it will change
6311 the result if the original computation overflowed. */
6312 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6313 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6314 || (tcode == MULT_EXPR
6315 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6316 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6317 && code != MULT_EXPR)))
6318 {
6319 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6320 {
6321 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6322 *strict_overflow_p = true;
6323 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6324 fold_convert (ctype,
6325 const_binop (TRUNC_DIV_EXPR,
6326 op1, c)));
6327 }
6328 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6329 {
6330 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6331 *strict_overflow_p = true;
6332 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6333 fold_convert (ctype,
6334 const_binop (TRUNC_DIV_EXPR,
6335 c, op1)));
6336 }
6337 }
6338 break;
6339
6340 default:
6341 break;
6342 }
6343
6344 return 0;
6345 }
6346 \f
6347 /* Return a node which has the indicated constant VALUE (either 0 or
6348 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6349 and is of the indicated TYPE. */
6350
6351 tree
6352 constant_boolean_node (bool value, tree type)
6353 {
6354 if (type == integer_type_node)
6355 return value ? integer_one_node : integer_zero_node;
6356 else if (type == boolean_type_node)
6357 return value ? boolean_true_node : boolean_false_node;
6358 else if (TREE_CODE (type) == VECTOR_TYPE)
6359 return build_vector_from_val (type,
6360 build_int_cst (TREE_TYPE (type),
6361 value ? -1 : 0));
6362 else
6363 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6364 }
6365
6366
6367 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6368 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6369 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6370 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6371 COND is the first argument to CODE; otherwise (as in the example
6372 given here), it is the second argument. TYPE is the type of the
6373 original expression. Return NULL_TREE if no simplification is
6374 possible. */
6375
6376 static tree
6377 fold_binary_op_with_conditional_arg (location_t loc,
6378 enum tree_code code,
6379 tree type, tree op0, tree op1,
6380 tree cond, tree arg, int cond_first_p)
6381 {
6382 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6383 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6384 tree test, true_value, false_value;
6385 tree lhs = NULL_TREE;
6386 tree rhs = NULL_TREE;
6387 enum tree_code cond_code = COND_EXPR;
6388
6389 if (TREE_CODE (cond) == COND_EXPR
6390 || TREE_CODE (cond) == VEC_COND_EXPR)
6391 {
6392 test = TREE_OPERAND (cond, 0);
6393 true_value = TREE_OPERAND (cond, 1);
6394 false_value = TREE_OPERAND (cond, 2);
6395 /* If this operand throws an expression, then it does not make
6396 sense to try to perform a logical or arithmetic operation
6397 involving it. */
6398 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6399 lhs = true_value;
6400 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6401 rhs = false_value;
6402 }
6403 else
6404 {
6405 tree testtype = TREE_TYPE (cond);
6406 test = cond;
6407 true_value = constant_boolean_node (true, testtype);
6408 false_value = constant_boolean_node (false, testtype);
6409 }
6410
6411 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6412 cond_code = VEC_COND_EXPR;
6413
6414 /* This transformation is only worthwhile if we don't have to wrap ARG
6415 in a SAVE_EXPR and the operation can be simplified without recursing
6416 on at least one of the branches once its pushed inside the COND_EXPR. */
6417 if (!TREE_CONSTANT (arg)
6418 && (TREE_SIDE_EFFECTS (arg)
6419 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6420 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6421 return NULL_TREE;
6422
6423 arg = fold_convert_loc (loc, arg_type, arg);
6424 if (lhs == 0)
6425 {
6426 true_value = fold_convert_loc (loc, cond_type, true_value);
6427 if (cond_first_p)
6428 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6429 else
6430 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6431 }
6432 if (rhs == 0)
6433 {
6434 false_value = fold_convert_loc (loc, cond_type, false_value);
6435 if (cond_first_p)
6436 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6437 else
6438 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6439 }
6440
6441 /* Check that we have simplified at least one of the branches. */
6442 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6443 return NULL_TREE;
6444
6445 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6446 }
6447
6448 \f
6449 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6450
6451 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6452 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6453 ADDEND is the same as X.
6454
6455 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6456 and finite. The problematic cases are when X is zero, and its mode
6457 has signed zeros. In the case of rounding towards -infinity,
6458 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6459 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6460
6461 bool
6462 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6463 {
6464 if (!real_zerop (addend))
6465 return false;
6466
6467 /* Don't allow the fold with -fsignaling-nans. */
6468 if (HONOR_SNANS (element_mode (type)))
6469 return false;
6470
6471 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6472 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6473 return true;
6474
6475 /* In a vector or complex, we would need to check the sign of all zeros. */
6476 if (TREE_CODE (addend) != REAL_CST)
6477 return false;
6478
6479 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6480 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6481 negate = !negate;
6482
6483 /* The mode has signed zeros, and we have to honor their sign.
6484 In this situation, there is only one case we can return true for.
6485 X - 0 is the same as X unless rounding towards -infinity is
6486 supported. */
6487 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6488 }
6489
6490 /* Subroutine of fold() that optimizes comparisons of a division by
6491 a nonzero integer constant against an integer constant, i.e.
6492 X/C1 op C2.
6493
6494 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6495 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6496 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6497
6498 The function returns the constant folded tree if a simplification
6499 can be made, and NULL_TREE otherwise. */
6500
6501 static tree
6502 fold_div_compare (location_t loc,
6503 enum tree_code code, tree type, tree arg0, tree arg1)
6504 {
6505 tree prod, tmp, hi, lo;
6506 tree arg00 = TREE_OPERAND (arg0, 0);
6507 tree arg01 = TREE_OPERAND (arg0, 1);
6508 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6509 bool neg_overflow = false;
6510 bool overflow;
6511
6512 /* We have to do this the hard way to detect unsigned overflow.
6513 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6514 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6515 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6516 neg_overflow = false;
6517
6518 if (sign == UNSIGNED)
6519 {
6520 tmp = int_const_binop (MINUS_EXPR, arg01,
6521 build_int_cst (TREE_TYPE (arg01), 1));
6522 lo = prod;
6523
6524 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6525 val = wi::add (prod, tmp, sign, &overflow);
6526 hi = force_fit_type (TREE_TYPE (arg00), val,
6527 -1, overflow | TREE_OVERFLOW (prod));
6528 }
6529 else if (tree_int_cst_sgn (arg01) >= 0)
6530 {
6531 tmp = int_const_binop (MINUS_EXPR, arg01,
6532 build_int_cst (TREE_TYPE (arg01), 1));
6533 switch (tree_int_cst_sgn (arg1))
6534 {
6535 case -1:
6536 neg_overflow = true;
6537 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6538 hi = prod;
6539 break;
6540
6541 case 0:
6542 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6543 hi = tmp;
6544 break;
6545
6546 case 1:
6547 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6548 lo = prod;
6549 break;
6550
6551 default:
6552 gcc_unreachable ();
6553 }
6554 }
6555 else
6556 {
6557 /* A negative divisor reverses the relational operators. */
6558 code = swap_tree_comparison (code);
6559
6560 tmp = int_const_binop (PLUS_EXPR, arg01,
6561 build_int_cst (TREE_TYPE (arg01), 1));
6562 switch (tree_int_cst_sgn (arg1))
6563 {
6564 case -1:
6565 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6566 lo = prod;
6567 break;
6568
6569 case 0:
6570 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6571 lo = tmp;
6572 break;
6573
6574 case 1:
6575 neg_overflow = true;
6576 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6577 hi = prod;
6578 break;
6579
6580 default:
6581 gcc_unreachable ();
6582 }
6583 }
6584
6585 switch (code)
6586 {
6587 case EQ_EXPR:
6588 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6589 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6590 if (TREE_OVERFLOW (hi))
6591 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6592 if (TREE_OVERFLOW (lo))
6593 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6594 return build_range_check (loc, type, arg00, 1, lo, hi);
6595
6596 case NE_EXPR:
6597 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6598 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6599 if (TREE_OVERFLOW (hi))
6600 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6601 if (TREE_OVERFLOW (lo))
6602 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6603 return build_range_check (loc, type, arg00, 0, lo, hi);
6604
6605 case LT_EXPR:
6606 if (TREE_OVERFLOW (lo))
6607 {
6608 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6609 return omit_one_operand_loc (loc, type, tmp, arg00);
6610 }
6611 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6612
6613 case LE_EXPR:
6614 if (TREE_OVERFLOW (hi))
6615 {
6616 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6617 return omit_one_operand_loc (loc, type, tmp, arg00);
6618 }
6619 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6620
6621 case GT_EXPR:
6622 if (TREE_OVERFLOW (hi))
6623 {
6624 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6625 return omit_one_operand_loc (loc, type, tmp, arg00);
6626 }
6627 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6628
6629 case GE_EXPR:
6630 if (TREE_OVERFLOW (lo))
6631 {
6632 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6633 return omit_one_operand_loc (loc, type, tmp, arg00);
6634 }
6635 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6636
6637 default:
6638 break;
6639 }
6640
6641 return NULL_TREE;
6642 }
6643
6644
6645 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6646 equality/inequality test, then return a simplified form of the test
6647 using a sign testing. Otherwise return NULL. TYPE is the desired
6648 result type. */
6649
6650 static tree
6651 fold_single_bit_test_into_sign_test (location_t loc,
6652 enum tree_code code, tree arg0, tree arg1,
6653 tree result_type)
6654 {
6655 /* If this is testing a single bit, we can optimize the test. */
6656 if ((code == NE_EXPR || code == EQ_EXPR)
6657 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6658 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6659 {
6660 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6661 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6662 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6663
6664 if (arg00 != NULL_TREE
6665 /* This is only a win if casting to a signed type is cheap,
6666 i.e. when arg00's type is not a partial mode. */
6667 && TYPE_PRECISION (TREE_TYPE (arg00))
6668 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6669 {
6670 tree stype = signed_type_for (TREE_TYPE (arg00));
6671 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6672 result_type,
6673 fold_convert_loc (loc, stype, arg00),
6674 build_int_cst (stype, 0));
6675 }
6676 }
6677
6678 return NULL_TREE;
6679 }
6680
6681 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6682 equality/inequality test, then return a simplified form of
6683 the test using shifts and logical operations. Otherwise return
6684 NULL. TYPE is the desired result type. */
6685
6686 tree
6687 fold_single_bit_test (location_t loc, enum tree_code code,
6688 tree arg0, tree arg1, tree result_type)
6689 {
6690 /* If this is testing a single bit, we can optimize the test. */
6691 if ((code == NE_EXPR || code == EQ_EXPR)
6692 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6693 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6694 {
6695 tree inner = TREE_OPERAND (arg0, 0);
6696 tree type = TREE_TYPE (arg0);
6697 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6698 machine_mode operand_mode = TYPE_MODE (type);
6699 int ops_unsigned;
6700 tree signed_type, unsigned_type, intermediate_type;
6701 tree tem, one;
6702
6703 /* First, see if we can fold the single bit test into a sign-bit
6704 test. */
6705 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6706 result_type);
6707 if (tem)
6708 return tem;
6709
6710 /* Otherwise we have (A & C) != 0 where C is a single bit,
6711 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6712 Similarly for (A & C) == 0. */
6713
6714 /* If INNER is a right shift of a constant and it plus BITNUM does
6715 not overflow, adjust BITNUM and INNER. */
6716 if (TREE_CODE (inner) == RSHIFT_EXPR
6717 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6718 && bitnum < TYPE_PRECISION (type)
6719 && wi::ltu_p (TREE_OPERAND (inner, 1),
6720 TYPE_PRECISION (type) - bitnum))
6721 {
6722 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6723 inner = TREE_OPERAND (inner, 0);
6724 }
6725
6726 /* If we are going to be able to omit the AND below, we must do our
6727 operations as unsigned. If we must use the AND, we have a choice.
6728 Normally unsigned is faster, but for some machines signed is. */
6729 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6730 && !flag_syntax_only) ? 0 : 1;
6731
6732 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6733 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6734 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6735 inner = fold_convert_loc (loc, intermediate_type, inner);
6736
6737 if (bitnum != 0)
6738 inner = build2 (RSHIFT_EXPR, intermediate_type,
6739 inner, size_int (bitnum));
6740
6741 one = build_int_cst (intermediate_type, 1);
6742
6743 if (code == EQ_EXPR)
6744 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6745
6746 /* Put the AND last so it can combine with more things. */
6747 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6748
6749 /* Make sure to return the proper type. */
6750 inner = fold_convert_loc (loc, result_type, inner);
6751
6752 return inner;
6753 }
6754 return NULL_TREE;
6755 }
6756
6757 /* Check whether we are allowed to reorder operands arg0 and arg1,
6758 such that the evaluation of arg1 occurs before arg0. */
6759
6760 static bool
6761 reorder_operands_p (const_tree arg0, const_tree arg1)
6762 {
6763 if (! flag_evaluation_order)
6764 return true;
6765 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6766 return true;
6767 return ! TREE_SIDE_EFFECTS (arg0)
6768 && ! TREE_SIDE_EFFECTS (arg1);
6769 }
6770
6771 /* Test whether it is preferable two swap two operands, ARG0 and
6772 ARG1, for example because ARG0 is an integer constant and ARG1
6773 isn't. If REORDER is true, only recommend swapping if we can
6774 evaluate the operands in reverse order. */
6775
6776 bool
6777 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6778 {
6779 if (CONSTANT_CLASS_P (arg1))
6780 return 0;
6781 if (CONSTANT_CLASS_P (arg0))
6782 return 1;
6783
6784 STRIP_NOPS (arg0);
6785 STRIP_NOPS (arg1);
6786
6787 if (TREE_CONSTANT (arg1))
6788 return 0;
6789 if (TREE_CONSTANT (arg0))
6790 return 1;
6791
6792 if (reorder && flag_evaluation_order
6793 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6794 return 0;
6795
6796 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6797 for commutative and comparison operators. Ensuring a canonical
6798 form allows the optimizers to find additional redundancies without
6799 having to explicitly check for both orderings. */
6800 if (TREE_CODE (arg0) == SSA_NAME
6801 && TREE_CODE (arg1) == SSA_NAME
6802 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6803 return 1;
6804
6805 /* Put SSA_NAMEs last. */
6806 if (TREE_CODE (arg1) == SSA_NAME)
6807 return 0;
6808 if (TREE_CODE (arg0) == SSA_NAME)
6809 return 1;
6810
6811 /* Put variables last. */
6812 if (DECL_P (arg1))
6813 return 0;
6814 if (DECL_P (arg0))
6815 return 1;
6816
6817 return 0;
6818 }
6819
6820
6821 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6822 means A >= Y && A != MAX, but in this case we know that
6823 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6824
6825 static tree
6826 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6827 {
6828 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6829
6830 if (TREE_CODE (bound) == LT_EXPR)
6831 a = TREE_OPERAND (bound, 0);
6832 else if (TREE_CODE (bound) == GT_EXPR)
6833 a = TREE_OPERAND (bound, 1);
6834 else
6835 return NULL_TREE;
6836
6837 typea = TREE_TYPE (a);
6838 if (!INTEGRAL_TYPE_P (typea)
6839 && !POINTER_TYPE_P (typea))
6840 return NULL_TREE;
6841
6842 if (TREE_CODE (ineq) == LT_EXPR)
6843 {
6844 a1 = TREE_OPERAND (ineq, 1);
6845 y = TREE_OPERAND (ineq, 0);
6846 }
6847 else if (TREE_CODE (ineq) == GT_EXPR)
6848 {
6849 a1 = TREE_OPERAND (ineq, 0);
6850 y = TREE_OPERAND (ineq, 1);
6851 }
6852 else
6853 return NULL_TREE;
6854
6855 if (TREE_TYPE (a1) != typea)
6856 return NULL_TREE;
6857
6858 if (POINTER_TYPE_P (typea))
6859 {
6860 /* Convert the pointer types into integer before taking the difference. */
6861 tree ta = fold_convert_loc (loc, ssizetype, a);
6862 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6863 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6864 }
6865 else
6866 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6867
6868 if (!diff || !integer_onep (diff))
6869 return NULL_TREE;
6870
6871 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6872 }
6873
6874 /* Fold a sum or difference of at least one multiplication.
6875 Returns the folded tree or NULL if no simplification could be made. */
6876
6877 static tree
6878 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6879 tree arg0, tree arg1)
6880 {
6881 tree arg00, arg01, arg10, arg11;
6882 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6883
6884 /* (A * C) +- (B * C) -> (A+-B) * C.
6885 (A * C) +- A -> A * (C+-1).
6886 We are most concerned about the case where C is a constant,
6887 but other combinations show up during loop reduction. Since
6888 it is not difficult, try all four possibilities. */
6889
6890 if (TREE_CODE (arg0) == MULT_EXPR)
6891 {
6892 arg00 = TREE_OPERAND (arg0, 0);
6893 arg01 = TREE_OPERAND (arg0, 1);
6894 }
6895 else if (TREE_CODE (arg0) == INTEGER_CST)
6896 {
6897 arg00 = build_one_cst (type);
6898 arg01 = arg0;
6899 }
6900 else
6901 {
6902 /* We cannot generate constant 1 for fract. */
6903 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6904 return NULL_TREE;
6905 arg00 = arg0;
6906 arg01 = build_one_cst (type);
6907 }
6908 if (TREE_CODE (arg1) == MULT_EXPR)
6909 {
6910 arg10 = TREE_OPERAND (arg1, 0);
6911 arg11 = TREE_OPERAND (arg1, 1);
6912 }
6913 else if (TREE_CODE (arg1) == INTEGER_CST)
6914 {
6915 arg10 = build_one_cst (type);
6916 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6917 the purpose of this canonicalization. */
6918 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6919 && negate_expr_p (arg1)
6920 && code == PLUS_EXPR)
6921 {
6922 arg11 = negate_expr (arg1);
6923 code = MINUS_EXPR;
6924 }
6925 else
6926 arg11 = arg1;
6927 }
6928 else
6929 {
6930 /* We cannot generate constant 1 for fract. */
6931 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6932 return NULL_TREE;
6933 arg10 = arg1;
6934 arg11 = build_one_cst (type);
6935 }
6936 same = NULL_TREE;
6937
6938 if (operand_equal_p (arg01, arg11, 0))
6939 same = arg01, alt0 = arg00, alt1 = arg10;
6940 else if (operand_equal_p (arg00, arg10, 0))
6941 same = arg00, alt0 = arg01, alt1 = arg11;
6942 else if (operand_equal_p (arg00, arg11, 0))
6943 same = arg00, alt0 = arg01, alt1 = arg10;
6944 else if (operand_equal_p (arg01, arg10, 0))
6945 same = arg01, alt0 = arg00, alt1 = arg11;
6946
6947 /* No identical multiplicands; see if we can find a common
6948 power-of-two factor in non-power-of-two multiplies. This
6949 can help in multi-dimensional array access. */
6950 else if (tree_fits_shwi_p (arg01)
6951 && tree_fits_shwi_p (arg11))
6952 {
6953 HOST_WIDE_INT int01, int11, tmp;
6954 bool swap = false;
6955 tree maybe_same;
6956 int01 = tree_to_shwi (arg01);
6957 int11 = tree_to_shwi (arg11);
6958
6959 /* Move min of absolute values to int11. */
6960 if (absu_hwi (int01) < absu_hwi (int11))
6961 {
6962 tmp = int01, int01 = int11, int11 = tmp;
6963 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6964 maybe_same = arg01;
6965 swap = true;
6966 }
6967 else
6968 maybe_same = arg11;
6969
6970 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6971 /* The remainder should not be a constant, otherwise we
6972 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6973 increased the number of multiplications necessary. */
6974 && TREE_CODE (arg10) != INTEGER_CST)
6975 {
6976 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6977 build_int_cst (TREE_TYPE (arg00),
6978 int01 / int11));
6979 alt1 = arg10;
6980 same = maybe_same;
6981 if (swap)
6982 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6983 }
6984 }
6985
6986 if (same)
6987 return fold_build2_loc (loc, MULT_EXPR, type,
6988 fold_build2_loc (loc, code, type,
6989 fold_convert_loc (loc, type, alt0),
6990 fold_convert_loc (loc, type, alt1)),
6991 fold_convert_loc (loc, type, same));
6992
6993 return NULL_TREE;
6994 }
6995
6996 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6997 specified by EXPR into the buffer PTR of length LEN bytes.
6998 Return the number of bytes placed in the buffer, or zero
6999 upon failure. */
7000
7001 static int
7002 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7003 {
7004 tree type = TREE_TYPE (expr);
7005 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7006 int byte, offset, word, words;
7007 unsigned char value;
7008
7009 if ((off == -1 && total_bytes > len)
7010 || off >= total_bytes)
7011 return 0;
7012 if (off == -1)
7013 off = 0;
7014 words = total_bytes / UNITS_PER_WORD;
7015
7016 for (byte = 0; byte < total_bytes; byte++)
7017 {
7018 int bitpos = byte * BITS_PER_UNIT;
7019 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7020 number of bytes. */
7021 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7022
7023 if (total_bytes > UNITS_PER_WORD)
7024 {
7025 word = byte / UNITS_PER_WORD;
7026 if (WORDS_BIG_ENDIAN)
7027 word = (words - 1) - word;
7028 offset = word * UNITS_PER_WORD;
7029 if (BYTES_BIG_ENDIAN)
7030 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7031 else
7032 offset += byte % UNITS_PER_WORD;
7033 }
7034 else
7035 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7036 if (offset >= off
7037 && offset - off < len)
7038 ptr[offset - off] = value;
7039 }
7040 return MIN (len, total_bytes - off);
7041 }
7042
7043
7044 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7045 specified by EXPR into the buffer PTR of length LEN bytes.
7046 Return the number of bytes placed in the buffer, or zero
7047 upon failure. */
7048
7049 static int
7050 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7051 {
7052 tree type = TREE_TYPE (expr);
7053 machine_mode mode = TYPE_MODE (type);
7054 int total_bytes = GET_MODE_SIZE (mode);
7055 FIXED_VALUE_TYPE value;
7056 tree i_value, i_type;
7057
7058 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7059 return 0;
7060
7061 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7062
7063 if (NULL_TREE == i_type
7064 || TYPE_PRECISION (i_type) != total_bytes)
7065 return 0;
7066
7067 value = TREE_FIXED_CST (expr);
7068 i_value = double_int_to_tree (i_type, value.data);
7069
7070 return native_encode_int (i_value, ptr, len, off);
7071 }
7072
7073
7074 /* Subroutine of native_encode_expr. Encode the REAL_CST
7075 specified by EXPR into the buffer PTR of length LEN bytes.
7076 Return the number of bytes placed in the buffer, or zero
7077 upon failure. */
7078
7079 static int
7080 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7081 {
7082 tree type = TREE_TYPE (expr);
7083 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7084 int byte, offset, word, words, bitpos;
7085 unsigned char value;
7086
7087 /* There are always 32 bits in each long, no matter the size of
7088 the hosts long. We handle floating point representations with
7089 up to 192 bits. */
7090 long tmp[6];
7091
7092 if ((off == -1 && total_bytes > len)
7093 || off >= total_bytes)
7094 return 0;
7095 if (off == -1)
7096 off = 0;
7097 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7098
7099 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7100
7101 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7102 bitpos += BITS_PER_UNIT)
7103 {
7104 byte = (bitpos / BITS_PER_UNIT) & 3;
7105 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7106
7107 if (UNITS_PER_WORD < 4)
7108 {
7109 word = byte / UNITS_PER_WORD;
7110 if (WORDS_BIG_ENDIAN)
7111 word = (words - 1) - word;
7112 offset = word * UNITS_PER_WORD;
7113 if (BYTES_BIG_ENDIAN)
7114 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7115 else
7116 offset += byte % UNITS_PER_WORD;
7117 }
7118 else
7119 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7120 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7121 if (offset >= off
7122 && offset - off < len)
7123 ptr[offset - off] = value;
7124 }
7125 return MIN (len, total_bytes - off);
7126 }
7127
7128 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7129 specified by EXPR into the buffer PTR of length LEN bytes.
7130 Return the number of bytes placed in the buffer, or zero
7131 upon failure. */
7132
7133 static int
7134 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7135 {
7136 int rsize, isize;
7137 tree part;
7138
7139 part = TREE_REALPART (expr);
7140 rsize = native_encode_expr (part, ptr, len, off);
7141 if (off == -1
7142 && rsize == 0)
7143 return 0;
7144 part = TREE_IMAGPART (expr);
7145 if (off != -1)
7146 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7147 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7148 if (off == -1
7149 && isize != rsize)
7150 return 0;
7151 return rsize + isize;
7152 }
7153
7154
7155 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7156 specified by EXPR into the buffer PTR of length LEN bytes.
7157 Return the number of bytes placed in the buffer, or zero
7158 upon failure. */
7159
7160 static int
7161 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7162 {
7163 unsigned i, count;
7164 int size, offset;
7165 tree itype, elem;
7166
7167 offset = 0;
7168 count = VECTOR_CST_NELTS (expr);
7169 itype = TREE_TYPE (TREE_TYPE (expr));
7170 size = GET_MODE_SIZE (TYPE_MODE (itype));
7171 for (i = 0; i < count; i++)
7172 {
7173 if (off >= size)
7174 {
7175 off -= size;
7176 continue;
7177 }
7178 elem = VECTOR_CST_ELT (expr, i);
7179 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7180 if ((off == -1 && res != size)
7181 || res == 0)
7182 return 0;
7183 offset += res;
7184 if (offset >= len)
7185 return offset;
7186 if (off != -1)
7187 off = 0;
7188 }
7189 return offset;
7190 }
7191
7192
7193 /* Subroutine of native_encode_expr. Encode the STRING_CST
7194 specified by EXPR into the buffer PTR of length LEN bytes.
7195 Return the number of bytes placed in the buffer, or zero
7196 upon failure. */
7197
7198 static int
7199 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7200 {
7201 tree type = TREE_TYPE (expr);
7202 HOST_WIDE_INT total_bytes;
7203
7204 if (TREE_CODE (type) != ARRAY_TYPE
7205 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7206 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7207 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7208 return 0;
7209 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7210 if ((off == -1 && total_bytes > len)
7211 || off >= total_bytes)
7212 return 0;
7213 if (off == -1)
7214 off = 0;
7215 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7216 {
7217 int written = 0;
7218 if (off < TREE_STRING_LENGTH (expr))
7219 {
7220 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7221 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7222 }
7223 memset (ptr + written, 0,
7224 MIN (total_bytes - written, len - written));
7225 }
7226 else
7227 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7228 return MIN (total_bytes - off, len);
7229 }
7230
7231
7232 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7233 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7234 buffer PTR of length LEN bytes. If OFF is not -1 then start
7235 the encoding at byte offset OFF and encode at most LEN bytes.
7236 Return the number of bytes placed in the buffer, or zero upon failure. */
7237
7238 int
7239 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7240 {
7241 /* We don't support starting at negative offset and -1 is special. */
7242 if (off < -1)
7243 return 0;
7244
7245 switch (TREE_CODE (expr))
7246 {
7247 case INTEGER_CST:
7248 return native_encode_int (expr, ptr, len, off);
7249
7250 case REAL_CST:
7251 return native_encode_real (expr, ptr, len, off);
7252
7253 case FIXED_CST:
7254 return native_encode_fixed (expr, ptr, len, off);
7255
7256 case COMPLEX_CST:
7257 return native_encode_complex (expr, ptr, len, off);
7258
7259 case VECTOR_CST:
7260 return native_encode_vector (expr, ptr, len, off);
7261
7262 case STRING_CST:
7263 return native_encode_string (expr, ptr, len, off);
7264
7265 default:
7266 return 0;
7267 }
7268 }
7269
7270
7271 /* Subroutine of native_interpret_expr. Interpret the contents of
7272 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7273 If the buffer cannot be interpreted, return NULL_TREE. */
7274
7275 static tree
7276 native_interpret_int (tree type, const unsigned char *ptr, int len)
7277 {
7278 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7279
7280 if (total_bytes > len
7281 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7282 return NULL_TREE;
7283
7284 wide_int result = wi::from_buffer (ptr, total_bytes);
7285
7286 return wide_int_to_tree (type, result);
7287 }
7288
7289
7290 /* Subroutine of native_interpret_expr. Interpret the contents of
7291 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7292 If the buffer cannot be interpreted, return NULL_TREE. */
7293
7294 static tree
7295 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7296 {
7297 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7298 double_int result;
7299 FIXED_VALUE_TYPE fixed_value;
7300
7301 if (total_bytes > len
7302 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7303 return NULL_TREE;
7304
7305 result = double_int::from_buffer (ptr, total_bytes);
7306 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7307
7308 return build_fixed (type, fixed_value);
7309 }
7310
7311
7312 /* Subroutine of native_interpret_expr. Interpret the contents of
7313 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7314 If the buffer cannot be interpreted, return NULL_TREE. */
7315
7316 static tree
7317 native_interpret_real (tree type, const unsigned char *ptr, int len)
7318 {
7319 machine_mode mode = TYPE_MODE (type);
7320 int total_bytes = GET_MODE_SIZE (mode);
7321 unsigned char value;
7322 /* There are always 32 bits in each long, no matter the size of
7323 the hosts long. We handle floating point representations with
7324 up to 192 bits. */
7325 REAL_VALUE_TYPE r;
7326 long tmp[6];
7327
7328 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7329 if (total_bytes > len || total_bytes > 24)
7330 return NULL_TREE;
7331 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7332
7333 memset (tmp, 0, sizeof (tmp));
7334 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7335 bitpos += BITS_PER_UNIT)
7336 {
7337 /* Both OFFSET and BYTE index within a long;
7338 bitpos indexes the whole float. */
7339 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7340 if (UNITS_PER_WORD < 4)
7341 {
7342 int word = byte / UNITS_PER_WORD;
7343 if (WORDS_BIG_ENDIAN)
7344 word = (words - 1) - word;
7345 offset = word * UNITS_PER_WORD;
7346 if (BYTES_BIG_ENDIAN)
7347 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7348 else
7349 offset += byte % UNITS_PER_WORD;
7350 }
7351 else
7352 {
7353 offset = byte;
7354 if (BYTES_BIG_ENDIAN)
7355 {
7356 /* Reverse bytes within each long, or within the entire float
7357 if it's smaller than a long (for HFmode). */
7358 offset = MIN (3, total_bytes - 1) - offset;
7359 gcc_assert (offset >= 0);
7360 }
7361 }
7362 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7363
7364 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7365 }
7366
7367 real_from_target (&r, tmp, mode);
7368 return build_real (type, r);
7369 }
7370
7371
7372 /* Subroutine of native_interpret_expr. Interpret the contents of
7373 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7374 If the buffer cannot be interpreted, return NULL_TREE. */
7375
7376 static tree
7377 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7378 {
7379 tree etype, rpart, ipart;
7380 int size;
7381
7382 etype = TREE_TYPE (type);
7383 size = GET_MODE_SIZE (TYPE_MODE (etype));
7384 if (size * 2 > len)
7385 return NULL_TREE;
7386 rpart = native_interpret_expr (etype, ptr, size);
7387 if (!rpart)
7388 return NULL_TREE;
7389 ipart = native_interpret_expr (etype, ptr+size, size);
7390 if (!ipart)
7391 return NULL_TREE;
7392 return build_complex (type, rpart, ipart);
7393 }
7394
7395
7396 /* Subroutine of native_interpret_expr. Interpret the contents of
7397 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7398 If the buffer cannot be interpreted, return NULL_TREE. */
7399
7400 static tree
7401 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7402 {
7403 tree etype, elem;
7404 int i, size, count;
7405 tree *elements;
7406
7407 etype = TREE_TYPE (type);
7408 size = GET_MODE_SIZE (TYPE_MODE (etype));
7409 count = TYPE_VECTOR_SUBPARTS (type);
7410 if (size * count > len)
7411 return NULL_TREE;
7412
7413 elements = XALLOCAVEC (tree, count);
7414 for (i = count - 1; i >= 0; i--)
7415 {
7416 elem = native_interpret_expr (etype, ptr+(i*size), size);
7417 if (!elem)
7418 return NULL_TREE;
7419 elements[i] = elem;
7420 }
7421 return build_vector (type, elements);
7422 }
7423
7424
7425 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7426 the buffer PTR of length LEN as a constant of type TYPE. For
7427 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7428 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7429 return NULL_TREE. */
7430
7431 tree
7432 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7433 {
7434 switch (TREE_CODE (type))
7435 {
7436 case INTEGER_TYPE:
7437 case ENUMERAL_TYPE:
7438 case BOOLEAN_TYPE:
7439 case POINTER_TYPE:
7440 case REFERENCE_TYPE:
7441 return native_interpret_int (type, ptr, len);
7442
7443 case REAL_TYPE:
7444 return native_interpret_real (type, ptr, len);
7445
7446 case FIXED_POINT_TYPE:
7447 return native_interpret_fixed (type, ptr, len);
7448
7449 case COMPLEX_TYPE:
7450 return native_interpret_complex (type, ptr, len);
7451
7452 case VECTOR_TYPE:
7453 return native_interpret_vector (type, ptr, len);
7454
7455 default:
7456 return NULL_TREE;
7457 }
7458 }
7459
7460 /* Returns true if we can interpret the contents of a native encoding
7461 as TYPE. */
7462
7463 static bool
7464 can_native_interpret_type_p (tree type)
7465 {
7466 switch (TREE_CODE (type))
7467 {
7468 case INTEGER_TYPE:
7469 case ENUMERAL_TYPE:
7470 case BOOLEAN_TYPE:
7471 case POINTER_TYPE:
7472 case REFERENCE_TYPE:
7473 case FIXED_POINT_TYPE:
7474 case REAL_TYPE:
7475 case COMPLEX_TYPE:
7476 case VECTOR_TYPE:
7477 return true;
7478 default:
7479 return false;
7480 }
7481 }
7482
7483 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7484 TYPE at compile-time. If we're unable to perform the conversion
7485 return NULL_TREE. */
7486
7487 static tree
7488 fold_view_convert_expr (tree type, tree expr)
7489 {
7490 /* We support up to 512-bit values (for V8DFmode). */
7491 unsigned char buffer[64];
7492 int len;
7493
7494 /* Check that the host and target are sane. */
7495 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7496 return NULL_TREE;
7497
7498 len = native_encode_expr (expr, buffer, sizeof (buffer));
7499 if (len == 0)
7500 return NULL_TREE;
7501
7502 return native_interpret_expr (type, buffer, len);
7503 }
7504
7505 /* Build an expression for the address of T. Folds away INDIRECT_REF
7506 to avoid confusing the gimplify process. */
7507
7508 tree
7509 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7510 {
7511 /* The size of the object is not relevant when talking about its address. */
7512 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7513 t = TREE_OPERAND (t, 0);
7514
7515 if (TREE_CODE (t) == INDIRECT_REF)
7516 {
7517 t = TREE_OPERAND (t, 0);
7518
7519 if (TREE_TYPE (t) != ptrtype)
7520 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7521 }
7522 else if (TREE_CODE (t) == MEM_REF
7523 && integer_zerop (TREE_OPERAND (t, 1)))
7524 return TREE_OPERAND (t, 0);
7525 else if (TREE_CODE (t) == MEM_REF
7526 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7527 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7528 TREE_OPERAND (t, 0),
7529 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7530 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7531 {
7532 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7533
7534 if (TREE_TYPE (t) != ptrtype)
7535 t = fold_convert_loc (loc, ptrtype, t);
7536 }
7537 else
7538 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7539
7540 return t;
7541 }
7542
7543 /* Build an expression for the address of T. */
7544
7545 tree
7546 build_fold_addr_expr_loc (location_t loc, tree t)
7547 {
7548 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7549
7550 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7551 }
7552
7553 /* Fold a unary expression of code CODE and type TYPE with operand
7554 OP0. Return the folded expression if folding is successful.
7555 Otherwise, return NULL_TREE. */
7556
7557 tree
7558 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7559 {
7560 tree tem;
7561 tree arg0;
7562 enum tree_code_class kind = TREE_CODE_CLASS (code);
7563
7564 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7565 && TREE_CODE_LENGTH (code) == 1);
7566
7567 arg0 = op0;
7568 if (arg0)
7569 {
7570 if (CONVERT_EXPR_CODE_P (code)
7571 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7572 {
7573 /* Don't use STRIP_NOPS, because signedness of argument type
7574 matters. */
7575 STRIP_SIGN_NOPS (arg0);
7576 }
7577 else
7578 {
7579 /* Strip any conversions that don't change the mode. This
7580 is safe for every expression, except for a comparison
7581 expression because its signedness is derived from its
7582 operands.
7583
7584 Note that this is done as an internal manipulation within
7585 the constant folder, in order to find the simplest
7586 representation of the arguments so that their form can be
7587 studied. In any cases, the appropriate type conversions
7588 should be put back in the tree that will get out of the
7589 constant folder. */
7590 STRIP_NOPS (arg0);
7591 }
7592
7593 if (CONSTANT_CLASS_P (arg0))
7594 {
7595 tree tem = const_unop (code, type, arg0);
7596 if (tem)
7597 {
7598 if (TREE_TYPE (tem) != type)
7599 tem = fold_convert_loc (loc, type, tem);
7600 return tem;
7601 }
7602 }
7603 }
7604
7605 tem = generic_simplify (loc, code, type, op0);
7606 if (tem)
7607 return tem;
7608
7609 if (TREE_CODE_CLASS (code) == tcc_unary)
7610 {
7611 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7612 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7613 fold_build1_loc (loc, code, type,
7614 fold_convert_loc (loc, TREE_TYPE (op0),
7615 TREE_OPERAND (arg0, 1))));
7616 else if (TREE_CODE (arg0) == COND_EXPR)
7617 {
7618 tree arg01 = TREE_OPERAND (arg0, 1);
7619 tree arg02 = TREE_OPERAND (arg0, 2);
7620 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7621 arg01 = fold_build1_loc (loc, code, type,
7622 fold_convert_loc (loc,
7623 TREE_TYPE (op0), arg01));
7624 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7625 arg02 = fold_build1_loc (loc, code, type,
7626 fold_convert_loc (loc,
7627 TREE_TYPE (op0), arg02));
7628 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7629 arg01, arg02);
7630
7631 /* If this was a conversion, and all we did was to move into
7632 inside the COND_EXPR, bring it back out. But leave it if
7633 it is a conversion from integer to integer and the
7634 result precision is no wider than a word since such a
7635 conversion is cheap and may be optimized away by combine,
7636 while it couldn't if it were outside the COND_EXPR. Then return
7637 so we don't get into an infinite recursion loop taking the
7638 conversion out and then back in. */
7639
7640 if ((CONVERT_EXPR_CODE_P (code)
7641 || code == NON_LVALUE_EXPR)
7642 && TREE_CODE (tem) == COND_EXPR
7643 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7644 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7645 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7646 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7647 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7648 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7649 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7650 && (INTEGRAL_TYPE_P
7651 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7652 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7653 || flag_syntax_only))
7654 tem = build1_loc (loc, code, type,
7655 build3 (COND_EXPR,
7656 TREE_TYPE (TREE_OPERAND
7657 (TREE_OPERAND (tem, 1), 0)),
7658 TREE_OPERAND (tem, 0),
7659 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7660 TREE_OPERAND (TREE_OPERAND (tem, 2),
7661 0)));
7662 return tem;
7663 }
7664 }
7665
7666 switch (code)
7667 {
7668 case NON_LVALUE_EXPR:
7669 if (!maybe_lvalue_p (op0))
7670 return fold_convert_loc (loc, type, op0);
7671 return NULL_TREE;
7672
7673 CASE_CONVERT:
7674 case FLOAT_EXPR:
7675 case FIX_TRUNC_EXPR:
7676 if (COMPARISON_CLASS_P (op0))
7677 {
7678 /* If we have (type) (a CMP b) and type is an integral type, return
7679 new expression involving the new type. Canonicalize
7680 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7681 non-integral type.
7682 Do not fold the result as that would not simplify further, also
7683 folding again results in recursions. */
7684 if (TREE_CODE (type) == BOOLEAN_TYPE)
7685 return build2_loc (loc, TREE_CODE (op0), type,
7686 TREE_OPERAND (op0, 0),
7687 TREE_OPERAND (op0, 1));
7688 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7689 && TREE_CODE (type) != VECTOR_TYPE)
7690 return build3_loc (loc, COND_EXPR, type, op0,
7691 constant_boolean_node (true, type),
7692 constant_boolean_node (false, type));
7693 }
7694
7695 /* Handle (T *)&A.B.C for A being of type T and B and C
7696 living at offset zero. This occurs frequently in
7697 C++ upcasting and then accessing the base. */
7698 if (TREE_CODE (op0) == ADDR_EXPR
7699 && POINTER_TYPE_P (type)
7700 && handled_component_p (TREE_OPERAND (op0, 0)))
7701 {
7702 HOST_WIDE_INT bitsize, bitpos;
7703 tree offset;
7704 machine_mode mode;
7705 int unsignedp, reversep, volatilep;
7706 tree base
7707 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7708 &offset, &mode, &unsignedp, &reversep,
7709 &volatilep, false);
7710 /* If the reference was to a (constant) zero offset, we can use
7711 the address of the base if it has the same base type
7712 as the result type and the pointer type is unqualified. */
7713 if (! offset && bitpos == 0
7714 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7715 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7716 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7717 return fold_convert_loc (loc, type,
7718 build_fold_addr_expr_loc (loc, base));
7719 }
7720
7721 if (TREE_CODE (op0) == MODIFY_EXPR
7722 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7723 /* Detect assigning a bitfield. */
7724 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7725 && DECL_BIT_FIELD
7726 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7727 {
7728 /* Don't leave an assignment inside a conversion
7729 unless assigning a bitfield. */
7730 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7731 /* First do the assignment, then return converted constant. */
7732 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7733 TREE_NO_WARNING (tem) = 1;
7734 TREE_USED (tem) = 1;
7735 return tem;
7736 }
7737
7738 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7739 constants (if x has signed type, the sign bit cannot be set
7740 in c). This folds extension into the BIT_AND_EXPR.
7741 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7742 very likely don't have maximal range for their precision and this
7743 transformation effectively doesn't preserve non-maximal ranges. */
7744 if (TREE_CODE (type) == INTEGER_TYPE
7745 && TREE_CODE (op0) == BIT_AND_EXPR
7746 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7747 {
7748 tree and_expr = op0;
7749 tree and0 = TREE_OPERAND (and_expr, 0);
7750 tree and1 = TREE_OPERAND (and_expr, 1);
7751 int change = 0;
7752
7753 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7754 || (TYPE_PRECISION (type)
7755 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7756 change = 1;
7757 else if (TYPE_PRECISION (TREE_TYPE (and1))
7758 <= HOST_BITS_PER_WIDE_INT
7759 && tree_fits_uhwi_p (and1))
7760 {
7761 unsigned HOST_WIDE_INT cst;
7762
7763 cst = tree_to_uhwi (and1);
7764 cst &= HOST_WIDE_INT_M1U
7765 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7766 change = (cst == 0);
7767 if (change
7768 && !flag_syntax_only
7769 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7770 == ZERO_EXTEND))
7771 {
7772 tree uns = unsigned_type_for (TREE_TYPE (and0));
7773 and0 = fold_convert_loc (loc, uns, and0);
7774 and1 = fold_convert_loc (loc, uns, and1);
7775 }
7776 }
7777 if (change)
7778 {
7779 tem = force_fit_type (type, wi::to_widest (and1), 0,
7780 TREE_OVERFLOW (and1));
7781 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7782 fold_convert_loc (loc, type, and0), tem);
7783 }
7784 }
7785
7786 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7787 cast (T1)X will fold away. We assume that this happens when X itself
7788 is a cast. */
7789 if (POINTER_TYPE_P (type)
7790 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7791 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7792 {
7793 tree arg00 = TREE_OPERAND (arg0, 0);
7794 tree arg01 = TREE_OPERAND (arg0, 1);
7795
7796 return fold_build_pointer_plus_loc
7797 (loc, fold_convert_loc (loc, type, arg00), arg01);
7798 }
7799
7800 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7801 of the same precision, and X is an integer type not narrower than
7802 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7803 if (INTEGRAL_TYPE_P (type)
7804 && TREE_CODE (op0) == BIT_NOT_EXPR
7805 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7806 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7807 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7808 {
7809 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7810 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7811 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7812 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7813 fold_convert_loc (loc, type, tem));
7814 }
7815
7816 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7817 type of X and Y (integer types only). */
7818 if (INTEGRAL_TYPE_P (type)
7819 && TREE_CODE (op0) == MULT_EXPR
7820 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7821 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7822 {
7823 /* Be careful not to introduce new overflows. */
7824 tree mult_type;
7825 if (TYPE_OVERFLOW_WRAPS (type))
7826 mult_type = type;
7827 else
7828 mult_type = unsigned_type_for (type);
7829
7830 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7831 {
7832 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7833 fold_convert_loc (loc, mult_type,
7834 TREE_OPERAND (op0, 0)),
7835 fold_convert_loc (loc, mult_type,
7836 TREE_OPERAND (op0, 1)));
7837 return fold_convert_loc (loc, type, tem);
7838 }
7839 }
7840
7841 return NULL_TREE;
7842
7843 case VIEW_CONVERT_EXPR:
7844 if (TREE_CODE (op0) == MEM_REF)
7845 {
7846 tem = fold_build2_loc (loc, MEM_REF, type,
7847 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7848 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7849 return tem;
7850 }
7851
7852 return NULL_TREE;
7853
7854 case NEGATE_EXPR:
7855 tem = fold_negate_expr (loc, arg0);
7856 if (tem)
7857 return fold_convert_loc (loc, type, tem);
7858 return NULL_TREE;
7859
7860 case ABS_EXPR:
7861 /* Convert fabs((double)float) into (double)fabsf(float). */
7862 if (TREE_CODE (arg0) == NOP_EXPR
7863 && TREE_CODE (type) == REAL_TYPE)
7864 {
7865 tree targ0 = strip_float_extensions (arg0);
7866 if (targ0 != arg0)
7867 return fold_convert_loc (loc, type,
7868 fold_build1_loc (loc, ABS_EXPR,
7869 TREE_TYPE (targ0),
7870 targ0));
7871 }
7872 return NULL_TREE;
7873
7874 case BIT_NOT_EXPR:
7875 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7876 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7877 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7878 fold_convert_loc (loc, type,
7879 TREE_OPERAND (arg0, 0)))))
7880 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7881 fold_convert_loc (loc, type,
7882 TREE_OPERAND (arg0, 1)));
7883 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7884 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7885 fold_convert_loc (loc, type,
7886 TREE_OPERAND (arg0, 1)))))
7887 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7888 fold_convert_loc (loc, type,
7889 TREE_OPERAND (arg0, 0)), tem);
7890
7891 return NULL_TREE;
7892
7893 case TRUTH_NOT_EXPR:
7894 /* Note that the operand of this must be an int
7895 and its values must be 0 or 1.
7896 ("true" is a fixed value perhaps depending on the language,
7897 but we don't handle values other than 1 correctly yet.) */
7898 tem = fold_truth_not_expr (loc, arg0);
7899 if (!tem)
7900 return NULL_TREE;
7901 return fold_convert_loc (loc, type, tem);
7902
7903 case INDIRECT_REF:
7904 /* Fold *&X to X if X is an lvalue. */
7905 if (TREE_CODE (op0) == ADDR_EXPR)
7906 {
7907 tree op00 = TREE_OPERAND (op0, 0);
7908 if ((TREE_CODE (op00) == VAR_DECL
7909 || TREE_CODE (op00) == PARM_DECL
7910 || TREE_CODE (op00) == RESULT_DECL)
7911 && !TREE_READONLY (op00))
7912 return op00;
7913 }
7914 return NULL_TREE;
7915
7916 default:
7917 return NULL_TREE;
7918 } /* switch (code) */
7919 }
7920
7921
7922 /* If the operation was a conversion do _not_ mark a resulting constant
7923 with TREE_OVERFLOW if the original constant was not. These conversions
7924 have implementation defined behavior and retaining the TREE_OVERFLOW
7925 flag here would confuse later passes such as VRP. */
7926 tree
7927 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7928 tree type, tree op0)
7929 {
7930 tree res = fold_unary_loc (loc, code, type, op0);
7931 if (res
7932 && TREE_CODE (res) == INTEGER_CST
7933 && TREE_CODE (op0) == INTEGER_CST
7934 && CONVERT_EXPR_CODE_P (code))
7935 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7936
7937 return res;
7938 }
7939
7940 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7941 operands OP0 and OP1. LOC is the location of the resulting expression.
7942 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7943 Return the folded expression if folding is successful. Otherwise,
7944 return NULL_TREE. */
7945 static tree
7946 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7947 tree arg0, tree arg1, tree op0, tree op1)
7948 {
7949 tree tem;
7950
7951 /* We only do these simplifications if we are optimizing. */
7952 if (!optimize)
7953 return NULL_TREE;
7954
7955 /* Check for things like (A || B) && (A || C). We can convert this
7956 to A || (B && C). Note that either operator can be any of the four
7957 truth and/or operations and the transformation will still be
7958 valid. Also note that we only care about order for the
7959 ANDIF and ORIF operators. If B contains side effects, this
7960 might change the truth-value of A. */
7961 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7962 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7963 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7964 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7965 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7966 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7967 {
7968 tree a00 = TREE_OPERAND (arg0, 0);
7969 tree a01 = TREE_OPERAND (arg0, 1);
7970 tree a10 = TREE_OPERAND (arg1, 0);
7971 tree a11 = TREE_OPERAND (arg1, 1);
7972 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7973 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7974 && (code == TRUTH_AND_EXPR
7975 || code == TRUTH_OR_EXPR));
7976
7977 if (operand_equal_p (a00, a10, 0))
7978 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7979 fold_build2_loc (loc, code, type, a01, a11));
7980 else if (commutative && operand_equal_p (a00, a11, 0))
7981 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7982 fold_build2_loc (loc, code, type, a01, a10));
7983 else if (commutative && operand_equal_p (a01, a10, 0))
7984 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7985 fold_build2_loc (loc, code, type, a00, a11));
7986
7987 /* This case if tricky because we must either have commutative
7988 operators or else A10 must not have side-effects. */
7989
7990 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7991 && operand_equal_p (a01, a11, 0))
7992 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7993 fold_build2_loc (loc, code, type, a00, a10),
7994 a01);
7995 }
7996
7997 /* See if we can build a range comparison. */
7998 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7999 return tem;
8000
8001 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8002 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8003 {
8004 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8005 if (tem)
8006 return fold_build2_loc (loc, code, type, tem, arg1);
8007 }
8008
8009 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8010 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8011 {
8012 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8013 if (tem)
8014 return fold_build2_loc (loc, code, type, arg0, tem);
8015 }
8016
8017 /* Check for the possibility of merging component references. If our
8018 lhs is another similar operation, try to merge its rhs with our
8019 rhs. Then try to merge our lhs and rhs. */
8020 if (TREE_CODE (arg0) == code
8021 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8022 TREE_OPERAND (arg0, 1), arg1)))
8023 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8024
8025 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8026 return tem;
8027
8028 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8029 && (code == TRUTH_AND_EXPR
8030 || code == TRUTH_ANDIF_EXPR
8031 || code == TRUTH_OR_EXPR
8032 || code == TRUTH_ORIF_EXPR))
8033 {
8034 enum tree_code ncode, icode;
8035
8036 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8037 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8038 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8039
8040 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8041 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8042 We don't want to pack more than two leafs to a non-IF AND/OR
8043 expression.
8044 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8045 equal to IF-CODE, then we don't want to add right-hand operand.
8046 If the inner right-hand side of left-hand operand has
8047 side-effects, or isn't simple, then we can't add to it,
8048 as otherwise we might destroy if-sequence. */
8049 if (TREE_CODE (arg0) == icode
8050 && simple_operand_p_2 (arg1)
8051 /* Needed for sequence points to handle trappings, and
8052 side-effects. */
8053 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8054 {
8055 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8056 arg1);
8057 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8058 tem);
8059 }
8060 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8061 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8062 else if (TREE_CODE (arg1) == icode
8063 && simple_operand_p_2 (arg0)
8064 /* Needed for sequence points to handle trappings, and
8065 side-effects. */
8066 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8067 {
8068 tem = fold_build2_loc (loc, ncode, type,
8069 arg0, TREE_OPERAND (arg1, 0));
8070 return fold_build2_loc (loc, icode, type, tem,
8071 TREE_OPERAND (arg1, 1));
8072 }
8073 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8074 into (A OR B).
8075 For sequence point consistancy, we need to check for trapping,
8076 and side-effects. */
8077 else if (code == icode && simple_operand_p_2 (arg0)
8078 && simple_operand_p_2 (arg1))
8079 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8080 }
8081
8082 return NULL_TREE;
8083 }
8084
8085 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8086 by changing CODE to reduce the magnitude of constants involved in
8087 ARG0 of the comparison.
8088 Returns a canonicalized comparison tree if a simplification was
8089 possible, otherwise returns NULL_TREE.
8090 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8091 valid if signed overflow is undefined. */
8092
8093 static tree
8094 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8095 tree arg0, tree arg1,
8096 bool *strict_overflow_p)
8097 {
8098 enum tree_code code0 = TREE_CODE (arg0);
8099 tree t, cst0 = NULL_TREE;
8100 int sgn0;
8101
8102 /* Match A +- CST code arg1. We can change this only if overflow
8103 is undefined. */
8104 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8105 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8106 /* In principle pointers also have undefined overflow behavior,
8107 but that causes problems elsewhere. */
8108 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8109 && (code0 == MINUS_EXPR
8110 || code0 == PLUS_EXPR)
8111 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8112 return NULL_TREE;
8113
8114 /* Identify the constant in arg0 and its sign. */
8115 cst0 = TREE_OPERAND (arg0, 1);
8116 sgn0 = tree_int_cst_sgn (cst0);
8117
8118 /* Overflowed constants and zero will cause problems. */
8119 if (integer_zerop (cst0)
8120 || TREE_OVERFLOW (cst0))
8121 return NULL_TREE;
8122
8123 /* See if we can reduce the magnitude of the constant in
8124 arg0 by changing the comparison code. */
8125 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8126 if (code == LT_EXPR
8127 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8128 code = LE_EXPR;
8129 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8130 else if (code == GT_EXPR
8131 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8132 code = GE_EXPR;
8133 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8134 else if (code == LE_EXPR
8135 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8136 code = LT_EXPR;
8137 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8138 else if (code == GE_EXPR
8139 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8140 code = GT_EXPR;
8141 else
8142 return NULL_TREE;
8143 *strict_overflow_p = true;
8144
8145 /* Now build the constant reduced in magnitude. But not if that
8146 would produce one outside of its types range. */
8147 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8148 && ((sgn0 == 1
8149 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8150 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8151 || (sgn0 == -1
8152 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8153 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8154 return NULL_TREE;
8155
8156 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8157 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8158 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8159 t = fold_convert (TREE_TYPE (arg1), t);
8160
8161 return fold_build2_loc (loc, code, type, t, arg1);
8162 }
8163
8164 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8165 overflow further. Try to decrease the magnitude of constants involved
8166 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8167 and put sole constants at the second argument position.
8168 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8169
8170 static tree
8171 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8172 tree arg0, tree arg1)
8173 {
8174 tree t;
8175 bool strict_overflow_p;
8176 const char * const warnmsg = G_("assuming signed overflow does not occur "
8177 "when reducing constant in comparison");
8178
8179 /* Try canonicalization by simplifying arg0. */
8180 strict_overflow_p = false;
8181 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8182 &strict_overflow_p);
8183 if (t)
8184 {
8185 if (strict_overflow_p)
8186 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8187 return t;
8188 }
8189
8190 /* Try canonicalization by simplifying arg1 using the swapped
8191 comparison. */
8192 code = swap_tree_comparison (code);
8193 strict_overflow_p = false;
8194 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8195 &strict_overflow_p);
8196 if (t && strict_overflow_p)
8197 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8198 return t;
8199 }
8200
8201 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8202 space. This is used to avoid issuing overflow warnings for
8203 expressions like &p->x which can not wrap. */
8204
8205 static bool
8206 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8207 {
8208 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8209 return true;
8210
8211 if (bitpos < 0)
8212 return true;
8213
8214 wide_int wi_offset;
8215 int precision = TYPE_PRECISION (TREE_TYPE (base));
8216 if (offset == NULL_TREE)
8217 wi_offset = wi::zero (precision);
8218 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8219 return true;
8220 else
8221 wi_offset = offset;
8222
8223 bool overflow;
8224 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8225 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8226 if (overflow)
8227 return true;
8228
8229 if (!wi::fits_uhwi_p (total))
8230 return true;
8231
8232 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8233 if (size <= 0)
8234 return true;
8235
8236 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8237 array. */
8238 if (TREE_CODE (base) == ADDR_EXPR)
8239 {
8240 HOST_WIDE_INT base_size;
8241
8242 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8243 if (base_size > 0 && size < base_size)
8244 size = base_size;
8245 }
8246
8247 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8248 }
8249
8250 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8251 kind INTEGER_CST. This makes sure to properly sign-extend the
8252 constant. */
8253
8254 static HOST_WIDE_INT
8255 size_low_cst (const_tree t)
8256 {
8257 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8258 int prec = TYPE_PRECISION (TREE_TYPE (t));
8259 if (prec < HOST_BITS_PER_WIDE_INT)
8260 return sext_hwi (w, prec);
8261 return w;
8262 }
8263
8264 /* Subroutine of fold_binary. This routine performs all of the
8265 transformations that are common to the equality/inequality
8266 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8267 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8268 fold_binary should call fold_binary. Fold a comparison with
8269 tree code CODE and type TYPE with operands OP0 and OP1. Return
8270 the folded comparison or NULL_TREE. */
8271
8272 static tree
8273 fold_comparison (location_t loc, enum tree_code code, tree type,
8274 tree op0, tree op1)
8275 {
8276 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8277 tree arg0, arg1, tem;
8278
8279 arg0 = op0;
8280 arg1 = op1;
8281
8282 STRIP_SIGN_NOPS (arg0);
8283 STRIP_SIGN_NOPS (arg1);
8284
8285 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8286 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8287 && (equality_code
8288 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8289 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8290 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8291 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8292 && TREE_CODE (arg1) == INTEGER_CST
8293 && !TREE_OVERFLOW (arg1))
8294 {
8295 const enum tree_code
8296 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8297 tree const1 = TREE_OPERAND (arg0, 1);
8298 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8299 tree variable = TREE_OPERAND (arg0, 0);
8300 tree new_const = int_const_binop (reverse_op, const2, const1);
8301
8302 /* If the constant operation overflowed this can be
8303 simplified as a comparison against INT_MAX/INT_MIN. */
8304 if (TREE_OVERFLOW (new_const)
8305 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8306 {
8307 int const1_sgn = tree_int_cst_sgn (const1);
8308 enum tree_code code2 = code;
8309
8310 /* Get the sign of the constant on the lhs if the
8311 operation were VARIABLE + CONST1. */
8312 if (TREE_CODE (arg0) == MINUS_EXPR)
8313 const1_sgn = -const1_sgn;
8314
8315 /* The sign of the constant determines if we overflowed
8316 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8317 Canonicalize to the INT_MIN overflow by swapping the comparison
8318 if necessary. */
8319 if (const1_sgn == -1)
8320 code2 = swap_tree_comparison (code);
8321
8322 /* We now can look at the canonicalized case
8323 VARIABLE + 1 CODE2 INT_MIN
8324 and decide on the result. */
8325 switch (code2)
8326 {
8327 case EQ_EXPR:
8328 case LT_EXPR:
8329 case LE_EXPR:
8330 return
8331 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8332
8333 case NE_EXPR:
8334 case GE_EXPR:
8335 case GT_EXPR:
8336 return
8337 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8338
8339 default:
8340 gcc_unreachable ();
8341 }
8342 }
8343 else
8344 {
8345 if (!equality_code)
8346 fold_overflow_warning ("assuming signed overflow does not occur "
8347 "when changing X +- C1 cmp C2 to "
8348 "X cmp C2 -+ C1",
8349 WARN_STRICT_OVERFLOW_COMPARISON);
8350 return fold_build2_loc (loc, code, type, variable, new_const);
8351 }
8352 }
8353
8354 /* For comparisons of pointers we can decompose it to a compile time
8355 comparison of the base objects and the offsets into the object.
8356 This requires at least one operand being an ADDR_EXPR or a
8357 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8358 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8359 && (TREE_CODE (arg0) == ADDR_EXPR
8360 || TREE_CODE (arg1) == ADDR_EXPR
8361 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8362 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8363 {
8364 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8365 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8366 machine_mode mode;
8367 int volatilep, reversep, unsignedp;
8368 bool indirect_base0 = false, indirect_base1 = false;
8369
8370 /* Get base and offset for the access. Strip ADDR_EXPR for
8371 get_inner_reference, but put it back by stripping INDIRECT_REF
8372 off the base object if possible. indirect_baseN will be true
8373 if baseN is not an address but refers to the object itself. */
8374 base0 = arg0;
8375 if (TREE_CODE (arg0) == ADDR_EXPR)
8376 {
8377 base0
8378 = get_inner_reference (TREE_OPERAND (arg0, 0),
8379 &bitsize, &bitpos0, &offset0, &mode,
8380 &unsignedp, &reversep, &volatilep, false);
8381 if (TREE_CODE (base0) == INDIRECT_REF)
8382 base0 = TREE_OPERAND (base0, 0);
8383 else
8384 indirect_base0 = true;
8385 }
8386 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8387 {
8388 base0 = TREE_OPERAND (arg0, 0);
8389 STRIP_SIGN_NOPS (base0);
8390 if (TREE_CODE (base0) == ADDR_EXPR)
8391 {
8392 base0 = TREE_OPERAND (base0, 0);
8393 indirect_base0 = true;
8394 }
8395 offset0 = TREE_OPERAND (arg0, 1);
8396 if (tree_fits_shwi_p (offset0))
8397 {
8398 HOST_WIDE_INT off = size_low_cst (offset0);
8399 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8400 * BITS_PER_UNIT)
8401 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8402 {
8403 bitpos0 = off * BITS_PER_UNIT;
8404 offset0 = NULL_TREE;
8405 }
8406 }
8407 }
8408
8409 base1 = arg1;
8410 if (TREE_CODE (arg1) == ADDR_EXPR)
8411 {
8412 base1
8413 = get_inner_reference (TREE_OPERAND (arg1, 0),
8414 &bitsize, &bitpos1, &offset1, &mode,
8415 &unsignedp, &reversep, &volatilep, false);
8416 if (TREE_CODE (base1) == INDIRECT_REF)
8417 base1 = TREE_OPERAND (base1, 0);
8418 else
8419 indirect_base1 = true;
8420 }
8421 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8422 {
8423 base1 = TREE_OPERAND (arg1, 0);
8424 STRIP_SIGN_NOPS (base1);
8425 if (TREE_CODE (base1) == ADDR_EXPR)
8426 {
8427 base1 = TREE_OPERAND (base1, 0);
8428 indirect_base1 = true;
8429 }
8430 offset1 = TREE_OPERAND (arg1, 1);
8431 if (tree_fits_shwi_p (offset1))
8432 {
8433 HOST_WIDE_INT off = size_low_cst (offset1);
8434 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8435 * BITS_PER_UNIT)
8436 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8437 {
8438 bitpos1 = off * BITS_PER_UNIT;
8439 offset1 = NULL_TREE;
8440 }
8441 }
8442 }
8443
8444 /* If we have equivalent bases we might be able to simplify. */
8445 if (indirect_base0 == indirect_base1
8446 && operand_equal_p (base0, base1,
8447 indirect_base0 ? OEP_ADDRESS_OF : 0))
8448 {
8449 /* We can fold this expression to a constant if the non-constant
8450 offset parts are equal. */
8451 if ((offset0 == offset1
8452 || (offset0 && offset1
8453 && operand_equal_p (offset0, offset1, 0)))
8454 && (code == EQ_EXPR
8455 || code == NE_EXPR
8456 || (indirect_base0 && DECL_P (base0))
8457 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8458
8459 {
8460 if (!equality_code
8461 && bitpos0 != bitpos1
8462 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8463 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8464 fold_overflow_warning (("assuming pointer wraparound does not "
8465 "occur when comparing P +- C1 with "
8466 "P +- C2"),
8467 WARN_STRICT_OVERFLOW_CONDITIONAL);
8468
8469 switch (code)
8470 {
8471 case EQ_EXPR:
8472 return constant_boolean_node (bitpos0 == bitpos1, type);
8473 case NE_EXPR:
8474 return constant_boolean_node (bitpos0 != bitpos1, type);
8475 case LT_EXPR:
8476 return constant_boolean_node (bitpos0 < bitpos1, type);
8477 case LE_EXPR:
8478 return constant_boolean_node (bitpos0 <= bitpos1, type);
8479 case GE_EXPR:
8480 return constant_boolean_node (bitpos0 >= bitpos1, type);
8481 case GT_EXPR:
8482 return constant_boolean_node (bitpos0 > bitpos1, type);
8483 default:;
8484 }
8485 }
8486 /* We can simplify the comparison to a comparison of the variable
8487 offset parts if the constant offset parts are equal.
8488 Be careful to use signed sizetype here because otherwise we
8489 mess with array offsets in the wrong way. This is possible
8490 because pointer arithmetic is restricted to retain within an
8491 object and overflow on pointer differences is undefined as of
8492 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8493 else if (bitpos0 == bitpos1
8494 && (equality_code
8495 || (indirect_base0 && DECL_P (base0))
8496 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8497 {
8498 /* By converting to signed sizetype we cover middle-end pointer
8499 arithmetic which operates on unsigned pointer types of size
8500 type size and ARRAY_REF offsets which are properly sign or
8501 zero extended from their type in case it is narrower than
8502 sizetype. */
8503 if (offset0 == NULL_TREE)
8504 offset0 = build_int_cst (ssizetype, 0);
8505 else
8506 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8507 if (offset1 == NULL_TREE)
8508 offset1 = build_int_cst (ssizetype, 0);
8509 else
8510 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8511
8512 if (!equality_code
8513 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8514 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8515 fold_overflow_warning (("assuming pointer wraparound does not "
8516 "occur when comparing P +- C1 with "
8517 "P +- C2"),
8518 WARN_STRICT_OVERFLOW_COMPARISON);
8519
8520 return fold_build2_loc (loc, code, type, offset0, offset1);
8521 }
8522 }
8523 /* For equal offsets we can simplify to a comparison of the
8524 base addresses. */
8525 else if (bitpos0 == bitpos1
8526 && (indirect_base0
8527 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8528 && (indirect_base1
8529 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8530 && ((offset0 == offset1)
8531 || (offset0 && offset1
8532 && operand_equal_p (offset0, offset1, 0))))
8533 {
8534 if (indirect_base0)
8535 base0 = build_fold_addr_expr_loc (loc, base0);
8536 if (indirect_base1)
8537 base1 = build_fold_addr_expr_loc (loc, base1);
8538 return fold_build2_loc (loc, code, type, base0, base1);
8539 }
8540 }
8541
8542 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8543 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8544 the resulting offset is smaller in absolute value than the
8545 original one and has the same sign. */
8546 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8547 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8548 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8549 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8550 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8551 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8552 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8553 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8554 {
8555 tree const1 = TREE_OPERAND (arg0, 1);
8556 tree const2 = TREE_OPERAND (arg1, 1);
8557 tree variable1 = TREE_OPERAND (arg0, 0);
8558 tree variable2 = TREE_OPERAND (arg1, 0);
8559 tree cst;
8560 const char * const warnmsg = G_("assuming signed overflow does not "
8561 "occur when combining constants around "
8562 "a comparison");
8563
8564 /* Put the constant on the side where it doesn't overflow and is
8565 of lower absolute value and of same sign than before. */
8566 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8567 ? MINUS_EXPR : PLUS_EXPR,
8568 const2, const1);
8569 if (!TREE_OVERFLOW (cst)
8570 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8571 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8572 {
8573 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8574 return fold_build2_loc (loc, code, type,
8575 variable1,
8576 fold_build2_loc (loc, TREE_CODE (arg1),
8577 TREE_TYPE (arg1),
8578 variable2, cst));
8579 }
8580
8581 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8582 ? MINUS_EXPR : PLUS_EXPR,
8583 const1, const2);
8584 if (!TREE_OVERFLOW (cst)
8585 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8586 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8587 {
8588 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8589 return fold_build2_loc (loc, code, type,
8590 fold_build2_loc (loc, TREE_CODE (arg0),
8591 TREE_TYPE (arg0),
8592 variable1, cst),
8593 variable2);
8594 }
8595 }
8596
8597 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8598 if (tem)
8599 return tem;
8600
8601 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8602 constant, we can simplify it. */
8603 if (TREE_CODE (arg1) == INTEGER_CST
8604 && (TREE_CODE (arg0) == MIN_EXPR
8605 || TREE_CODE (arg0) == MAX_EXPR)
8606 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8607 {
8608 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8609 if (tem)
8610 return tem;
8611 }
8612
8613 /* If we are comparing an expression that just has comparisons
8614 of two integer values, arithmetic expressions of those comparisons,
8615 and constants, we can simplify it. There are only three cases
8616 to check: the two values can either be equal, the first can be
8617 greater, or the second can be greater. Fold the expression for
8618 those three values. Since each value must be 0 or 1, we have
8619 eight possibilities, each of which corresponds to the constant 0
8620 or 1 or one of the six possible comparisons.
8621
8622 This handles common cases like (a > b) == 0 but also handles
8623 expressions like ((x > y) - (y > x)) > 0, which supposedly
8624 occur in macroized code. */
8625
8626 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8627 {
8628 tree cval1 = 0, cval2 = 0;
8629 int save_p = 0;
8630
8631 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8632 /* Don't handle degenerate cases here; they should already
8633 have been handled anyway. */
8634 && cval1 != 0 && cval2 != 0
8635 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8636 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8637 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8638 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8639 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8640 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8641 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8642 {
8643 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8644 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8645
8646 /* We can't just pass T to eval_subst in case cval1 or cval2
8647 was the same as ARG1. */
8648
8649 tree high_result
8650 = fold_build2_loc (loc, code, type,
8651 eval_subst (loc, arg0, cval1, maxval,
8652 cval2, minval),
8653 arg1);
8654 tree equal_result
8655 = fold_build2_loc (loc, code, type,
8656 eval_subst (loc, arg0, cval1, maxval,
8657 cval2, maxval),
8658 arg1);
8659 tree low_result
8660 = fold_build2_loc (loc, code, type,
8661 eval_subst (loc, arg0, cval1, minval,
8662 cval2, maxval),
8663 arg1);
8664
8665 /* All three of these results should be 0 or 1. Confirm they are.
8666 Then use those values to select the proper code to use. */
8667
8668 if (TREE_CODE (high_result) == INTEGER_CST
8669 && TREE_CODE (equal_result) == INTEGER_CST
8670 && TREE_CODE (low_result) == INTEGER_CST)
8671 {
8672 /* Make a 3-bit mask with the high-order bit being the
8673 value for `>', the next for '=', and the low for '<'. */
8674 switch ((integer_onep (high_result) * 4)
8675 + (integer_onep (equal_result) * 2)
8676 + integer_onep (low_result))
8677 {
8678 case 0:
8679 /* Always false. */
8680 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8681 case 1:
8682 code = LT_EXPR;
8683 break;
8684 case 2:
8685 code = EQ_EXPR;
8686 break;
8687 case 3:
8688 code = LE_EXPR;
8689 break;
8690 case 4:
8691 code = GT_EXPR;
8692 break;
8693 case 5:
8694 code = NE_EXPR;
8695 break;
8696 case 6:
8697 code = GE_EXPR;
8698 break;
8699 case 7:
8700 /* Always true. */
8701 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8702 }
8703
8704 if (save_p)
8705 {
8706 tem = save_expr (build2 (code, type, cval1, cval2));
8707 SET_EXPR_LOCATION (tem, loc);
8708 return tem;
8709 }
8710 return fold_build2_loc (loc, code, type, cval1, cval2);
8711 }
8712 }
8713 }
8714
8715 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8716 into a single range test. */
8717 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8718 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8719 && TREE_CODE (arg1) == INTEGER_CST
8720 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8721 && !integer_zerop (TREE_OPERAND (arg0, 1))
8722 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8723 && !TREE_OVERFLOW (arg1))
8724 {
8725 tem = fold_div_compare (loc, code, type, arg0, arg1);
8726 if (tem != NULL_TREE)
8727 return tem;
8728 }
8729
8730 return NULL_TREE;
8731 }
8732
8733
8734 /* Subroutine of fold_binary. Optimize complex multiplications of the
8735 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8736 argument EXPR represents the expression "z" of type TYPE. */
8737
8738 static tree
8739 fold_mult_zconjz (location_t loc, tree type, tree expr)
8740 {
8741 tree itype = TREE_TYPE (type);
8742 tree rpart, ipart, tem;
8743
8744 if (TREE_CODE (expr) == COMPLEX_EXPR)
8745 {
8746 rpart = TREE_OPERAND (expr, 0);
8747 ipart = TREE_OPERAND (expr, 1);
8748 }
8749 else if (TREE_CODE (expr) == COMPLEX_CST)
8750 {
8751 rpart = TREE_REALPART (expr);
8752 ipart = TREE_IMAGPART (expr);
8753 }
8754 else
8755 {
8756 expr = save_expr (expr);
8757 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8758 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8759 }
8760
8761 rpart = save_expr (rpart);
8762 ipart = save_expr (ipart);
8763 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8764 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8765 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8766 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8767 build_zero_cst (itype));
8768 }
8769
8770
8771 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8772 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8773
8774 static bool
8775 vec_cst_ctor_to_array (tree arg, tree *elts)
8776 {
8777 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8778
8779 if (TREE_CODE (arg) == VECTOR_CST)
8780 {
8781 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8782 elts[i] = VECTOR_CST_ELT (arg, i);
8783 }
8784 else if (TREE_CODE (arg) == CONSTRUCTOR)
8785 {
8786 constructor_elt *elt;
8787
8788 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8789 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8790 return false;
8791 else
8792 elts[i] = elt->value;
8793 }
8794 else
8795 return false;
8796 for (; i < nelts; i++)
8797 elts[i]
8798 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8799 return true;
8800 }
8801
8802 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8803 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8804 NULL_TREE otherwise. */
8805
8806 static tree
8807 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8808 {
8809 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8810 tree *elts;
8811 bool need_ctor = false;
8812
8813 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8814 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8815 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8816 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8817 return NULL_TREE;
8818
8819 elts = XALLOCAVEC (tree, nelts * 3);
8820 if (!vec_cst_ctor_to_array (arg0, elts)
8821 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8822 return NULL_TREE;
8823
8824 for (i = 0; i < nelts; i++)
8825 {
8826 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8827 need_ctor = true;
8828 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8829 }
8830
8831 if (need_ctor)
8832 {
8833 vec<constructor_elt, va_gc> *v;
8834 vec_alloc (v, nelts);
8835 for (i = 0; i < nelts; i++)
8836 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8837 return build_constructor (type, v);
8838 }
8839 else
8840 return build_vector (type, &elts[2 * nelts]);
8841 }
8842
8843 /* Try to fold a pointer difference of type TYPE two address expressions of
8844 array references AREF0 and AREF1 using location LOC. Return a
8845 simplified expression for the difference or NULL_TREE. */
8846
8847 static tree
8848 fold_addr_of_array_ref_difference (location_t loc, tree type,
8849 tree aref0, tree aref1)
8850 {
8851 tree base0 = TREE_OPERAND (aref0, 0);
8852 tree base1 = TREE_OPERAND (aref1, 0);
8853 tree base_offset = build_int_cst (type, 0);
8854
8855 /* If the bases are array references as well, recurse. If the bases
8856 are pointer indirections compute the difference of the pointers.
8857 If the bases are equal, we are set. */
8858 if ((TREE_CODE (base0) == ARRAY_REF
8859 && TREE_CODE (base1) == ARRAY_REF
8860 && (base_offset
8861 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8862 || (INDIRECT_REF_P (base0)
8863 && INDIRECT_REF_P (base1)
8864 && (base_offset
8865 = fold_binary_loc (loc, MINUS_EXPR, type,
8866 fold_convert (type, TREE_OPERAND (base0, 0)),
8867 fold_convert (type,
8868 TREE_OPERAND (base1, 0)))))
8869 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8870 {
8871 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8872 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8873 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8874 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8875 return fold_build2_loc (loc, PLUS_EXPR, type,
8876 base_offset,
8877 fold_build2_loc (loc, MULT_EXPR, type,
8878 diff, esz));
8879 }
8880 return NULL_TREE;
8881 }
8882
8883 /* If the real or vector real constant CST of type TYPE has an exact
8884 inverse, return it, else return NULL. */
8885
8886 tree
8887 exact_inverse (tree type, tree cst)
8888 {
8889 REAL_VALUE_TYPE r;
8890 tree unit_type, *elts;
8891 machine_mode mode;
8892 unsigned vec_nelts, i;
8893
8894 switch (TREE_CODE (cst))
8895 {
8896 case REAL_CST:
8897 r = TREE_REAL_CST (cst);
8898
8899 if (exact_real_inverse (TYPE_MODE (type), &r))
8900 return build_real (type, r);
8901
8902 return NULL_TREE;
8903
8904 case VECTOR_CST:
8905 vec_nelts = VECTOR_CST_NELTS (cst);
8906 elts = XALLOCAVEC (tree, vec_nelts);
8907 unit_type = TREE_TYPE (type);
8908 mode = TYPE_MODE (unit_type);
8909
8910 for (i = 0; i < vec_nelts; i++)
8911 {
8912 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8913 if (!exact_real_inverse (mode, &r))
8914 return NULL_TREE;
8915 elts[i] = build_real (unit_type, r);
8916 }
8917
8918 return build_vector (type, elts);
8919
8920 default:
8921 return NULL_TREE;
8922 }
8923 }
8924
8925 /* Mask out the tz least significant bits of X of type TYPE where
8926 tz is the number of trailing zeroes in Y. */
8927 static wide_int
8928 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8929 {
8930 int tz = wi::ctz (y);
8931 if (tz > 0)
8932 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8933 return x;
8934 }
8935
8936 /* Return true when T is an address and is known to be nonzero.
8937 For floating point we further ensure that T is not denormal.
8938 Similar logic is present in nonzero_address in rtlanal.h.
8939
8940 If the return value is based on the assumption that signed overflow
8941 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8942 change *STRICT_OVERFLOW_P. */
8943
8944 static bool
8945 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8946 {
8947 tree type = TREE_TYPE (t);
8948 enum tree_code code;
8949
8950 /* Doing something useful for floating point would need more work. */
8951 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8952 return false;
8953
8954 code = TREE_CODE (t);
8955 switch (TREE_CODE_CLASS (code))
8956 {
8957 case tcc_unary:
8958 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8959 strict_overflow_p);
8960 case tcc_binary:
8961 case tcc_comparison:
8962 return tree_binary_nonzero_warnv_p (code, type,
8963 TREE_OPERAND (t, 0),
8964 TREE_OPERAND (t, 1),
8965 strict_overflow_p);
8966 case tcc_constant:
8967 case tcc_declaration:
8968 case tcc_reference:
8969 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8970
8971 default:
8972 break;
8973 }
8974
8975 switch (code)
8976 {
8977 case TRUTH_NOT_EXPR:
8978 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8979 strict_overflow_p);
8980
8981 case TRUTH_AND_EXPR:
8982 case TRUTH_OR_EXPR:
8983 case TRUTH_XOR_EXPR:
8984 return tree_binary_nonzero_warnv_p (code, type,
8985 TREE_OPERAND (t, 0),
8986 TREE_OPERAND (t, 1),
8987 strict_overflow_p);
8988
8989 case COND_EXPR:
8990 case CONSTRUCTOR:
8991 case OBJ_TYPE_REF:
8992 case ASSERT_EXPR:
8993 case ADDR_EXPR:
8994 case WITH_SIZE_EXPR:
8995 case SSA_NAME:
8996 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8997
8998 case COMPOUND_EXPR:
8999 case MODIFY_EXPR:
9000 case BIND_EXPR:
9001 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9002 strict_overflow_p);
9003
9004 case SAVE_EXPR:
9005 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9006 strict_overflow_p);
9007
9008 case CALL_EXPR:
9009 {
9010 tree fndecl = get_callee_fndecl (t);
9011 if (!fndecl) return false;
9012 if (flag_delete_null_pointer_checks && !flag_check_new
9013 && DECL_IS_OPERATOR_NEW (fndecl)
9014 && !TREE_NOTHROW (fndecl))
9015 return true;
9016 if (flag_delete_null_pointer_checks
9017 && lookup_attribute ("returns_nonnull",
9018 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9019 return true;
9020 return alloca_call_p (t);
9021 }
9022
9023 default:
9024 break;
9025 }
9026 return false;
9027 }
9028
9029 /* Return true when T is an address and is known to be nonzero.
9030 Handle warnings about undefined signed overflow. */
9031
9032 static bool
9033 tree_expr_nonzero_p (tree t)
9034 {
9035 bool ret, strict_overflow_p;
9036
9037 strict_overflow_p = false;
9038 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9039 if (strict_overflow_p)
9040 fold_overflow_warning (("assuming signed overflow does not occur when "
9041 "determining that expression is always "
9042 "non-zero"),
9043 WARN_STRICT_OVERFLOW_MISC);
9044 return ret;
9045 }
9046
9047 /* Fold a binary expression of code CODE and type TYPE with operands
9048 OP0 and OP1. LOC is the location of the resulting expression.
9049 Return the folded expression if folding is successful. Otherwise,
9050 return NULL_TREE. */
9051
9052 tree
9053 fold_binary_loc (location_t loc,
9054 enum tree_code code, tree type, tree op0, tree op1)
9055 {
9056 enum tree_code_class kind = TREE_CODE_CLASS (code);
9057 tree arg0, arg1, tem;
9058 tree t1 = NULL_TREE;
9059 bool strict_overflow_p;
9060 unsigned int prec;
9061
9062 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9063 && TREE_CODE_LENGTH (code) == 2
9064 && op0 != NULL_TREE
9065 && op1 != NULL_TREE);
9066
9067 arg0 = op0;
9068 arg1 = op1;
9069
9070 /* Strip any conversions that don't change the mode. This is
9071 safe for every expression, except for a comparison expression
9072 because its signedness is derived from its operands. So, in
9073 the latter case, only strip conversions that don't change the
9074 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9075 preserved.
9076
9077 Note that this is done as an internal manipulation within the
9078 constant folder, in order to find the simplest representation
9079 of the arguments so that their form can be studied. In any
9080 cases, the appropriate type conversions should be put back in
9081 the tree that will get out of the constant folder. */
9082
9083 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9084 {
9085 STRIP_SIGN_NOPS (arg0);
9086 STRIP_SIGN_NOPS (arg1);
9087 }
9088 else
9089 {
9090 STRIP_NOPS (arg0);
9091 STRIP_NOPS (arg1);
9092 }
9093
9094 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9095 constant but we can't do arithmetic on them. */
9096 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9097 {
9098 tem = const_binop (code, type, arg0, arg1);
9099 if (tem != NULL_TREE)
9100 {
9101 if (TREE_TYPE (tem) != type)
9102 tem = fold_convert_loc (loc, type, tem);
9103 return tem;
9104 }
9105 }
9106
9107 /* If this is a commutative operation, and ARG0 is a constant, move it
9108 to ARG1 to reduce the number of tests below. */
9109 if (commutative_tree_code (code)
9110 && tree_swap_operands_p (arg0, arg1, true))
9111 return fold_build2_loc (loc, code, type, op1, op0);
9112
9113 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9114 to ARG1 to reduce the number of tests below. */
9115 if (kind == tcc_comparison
9116 && tree_swap_operands_p (arg0, arg1, true))
9117 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9118
9119 tem = generic_simplify (loc, code, type, op0, op1);
9120 if (tem)
9121 return tem;
9122
9123 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9124
9125 First check for cases where an arithmetic operation is applied to a
9126 compound, conditional, or comparison operation. Push the arithmetic
9127 operation inside the compound or conditional to see if any folding
9128 can then be done. Convert comparison to conditional for this purpose.
9129 The also optimizes non-constant cases that used to be done in
9130 expand_expr.
9131
9132 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9133 one of the operands is a comparison and the other is a comparison, a
9134 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9135 code below would make the expression more complex. Change it to a
9136 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9137 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9138
9139 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9140 || code == EQ_EXPR || code == NE_EXPR)
9141 && TREE_CODE (type) != VECTOR_TYPE
9142 && ((truth_value_p (TREE_CODE (arg0))
9143 && (truth_value_p (TREE_CODE (arg1))
9144 || (TREE_CODE (arg1) == BIT_AND_EXPR
9145 && integer_onep (TREE_OPERAND (arg1, 1)))))
9146 || (truth_value_p (TREE_CODE (arg1))
9147 && (truth_value_p (TREE_CODE (arg0))
9148 || (TREE_CODE (arg0) == BIT_AND_EXPR
9149 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9150 {
9151 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9152 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9153 : TRUTH_XOR_EXPR,
9154 boolean_type_node,
9155 fold_convert_loc (loc, boolean_type_node, arg0),
9156 fold_convert_loc (loc, boolean_type_node, arg1));
9157
9158 if (code == EQ_EXPR)
9159 tem = invert_truthvalue_loc (loc, tem);
9160
9161 return fold_convert_loc (loc, type, tem);
9162 }
9163
9164 if (TREE_CODE_CLASS (code) == tcc_binary
9165 || TREE_CODE_CLASS (code) == tcc_comparison)
9166 {
9167 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9168 {
9169 tem = fold_build2_loc (loc, code, type,
9170 fold_convert_loc (loc, TREE_TYPE (op0),
9171 TREE_OPERAND (arg0, 1)), op1);
9172 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9173 tem);
9174 }
9175 if (TREE_CODE (arg1) == COMPOUND_EXPR
9176 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9177 {
9178 tem = fold_build2_loc (loc, code, type, op0,
9179 fold_convert_loc (loc, TREE_TYPE (op1),
9180 TREE_OPERAND (arg1, 1)));
9181 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9182 tem);
9183 }
9184
9185 if (TREE_CODE (arg0) == COND_EXPR
9186 || TREE_CODE (arg0) == VEC_COND_EXPR
9187 || COMPARISON_CLASS_P (arg0))
9188 {
9189 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9190 arg0, arg1,
9191 /*cond_first_p=*/1);
9192 if (tem != NULL_TREE)
9193 return tem;
9194 }
9195
9196 if (TREE_CODE (arg1) == COND_EXPR
9197 || TREE_CODE (arg1) == VEC_COND_EXPR
9198 || COMPARISON_CLASS_P (arg1))
9199 {
9200 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9201 arg1, arg0,
9202 /*cond_first_p=*/0);
9203 if (tem != NULL_TREE)
9204 return tem;
9205 }
9206 }
9207
9208 switch (code)
9209 {
9210 case MEM_REF:
9211 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9212 if (TREE_CODE (arg0) == ADDR_EXPR
9213 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9214 {
9215 tree iref = TREE_OPERAND (arg0, 0);
9216 return fold_build2 (MEM_REF, type,
9217 TREE_OPERAND (iref, 0),
9218 int_const_binop (PLUS_EXPR, arg1,
9219 TREE_OPERAND (iref, 1)));
9220 }
9221
9222 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9223 if (TREE_CODE (arg0) == ADDR_EXPR
9224 && handled_component_p (TREE_OPERAND (arg0, 0)))
9225 {
9226 tree base;
9227 HOST_WIDE_INT coffset;
9228 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9229 &coffset);
9230 if (!base)
9231 return NULL_TREE;
9232 return fold_build2 (MEM_REF, type,
9233 build_fold_addr_expr (base),
9234 int_const_binop (PLUS_EXPR, arg1,
9235 size_int (coffset)));
9236 }
9237
9238 return NULL_TREE;
9239
9240 case POINTER_PLUS_EXPR:
9241 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9242 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9243 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9244 return fold_convert_loc (loc, type,
9245 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9246 fold_convert_loc (loc, sizetype,
9247 arg1),
9248 fold_convert_loc (loc, sizetype,
9249 arg0)));
9250
9251 return NULL_TREE;
9252
9253 case PLUS_EXPR:
9254 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9255 {
9256 /* X + (X / CST) * -CST is X % CST. */
9257 if (TREE_CODE (arg1) == MULT_EXPR
9258 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9259 && operand_equal_p (arg0,
9260 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9261 {
9262 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9263 tree cst1 = TREE_OPERAND (arg1, 1);
9264 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9265 cst1, cst0);
9266 if (sum && integer_zerop (sum))
9267 return fold_convert_loc (loc, type,
9268 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9269 TREE_TYPE (arg0), arg0,
9270 cst0));
9271 }
9272 }
9273
9274 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9275 one. Make sure the type is not saturating and has the signedness of
9276 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9277 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9278 if ((TREE_CODE (arg0) == MULT_EXPR
9279 || TREE_CODE (arg1) == MULT_EXPR)
9280 && !TYPE_SATURATING (type)
9281 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9282 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9283 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9284 {
9285 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9286 if (tem)
9287 return tem;
9288 }
9289
9290 if (! FLOAT_TYPE_P (type))
9291 {
9292 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9293 (plus (plus (mult) (mult)) (foo)) so that we can
9294 take advantage of the factoring cases below. */
9295 if (ANY_INTEGRAL_TYPE_P (type)
9296 && TYPE_OVERFLOW_WRAPS (type)
9297 && (((TREE_CODE (arg0) == PLUS_EXPR
9298 || TREE_CODE (arg0) == MINUS_EXPR)
9299 && TREE_CODE (arg1) == MULT_EXPR)
9300 || ((TREE_CODE (arg1) == PLUS_EXPR
9301 || TREE_CODE (arg1) == MINUS_EXPR)
9302 && TREE_CODE (arg0) == MULT_EXPR)))
9303 {
9304 tree parg0, parg1, parg, marg;
9305 enum tree_code pcode;
9306
9307 if (TREE_CODE (arg1) == MULT_EXPR)
9308 parg = arg0, marg = arg1;
9309 else
9310 parg = arg1, marg = arg0;
9311 pcode = TREE_CODE (parg);
9312 parg0 = TREE_OPERAND (parg, 0);
9313 parg1 = TREE_OPERAND (parg, 1);
9314 STRIP_NOPS (parg0);
9315 STRIP_NOPS (parg1);
9316
9317 if (TREE_CODE (parg0) == MULT_EXPR
9318 && TREE_CODE (parg1) != MULT_EXPR)
9319 return fold_build2_loc (loc, pcode, type,
9320 fold_build2_loc (loc, PLUS_EXPR, type,
9321 fold_convert_loc (loc, type,
9322 parg0),
9323 fold_convert_loc (loc, type,
9324 marg)),
9325 fold_convert_loc (loc, type, parg1));
9326 if (TREE_CODE (parg0) != MULT_EXPR
9327 && TREE_CODE (parg1) == MULT_EXPR)
9328 return
9329 fold_build2_loc (loc, PLUS_EXPR, type,
9330 fold_convert_loc (loc, type, parg0),
9331 fold_build2_loc (loc, pcode, type,
9332 fold_convert_loc (loc, type, marg),
9333 fold_convert_loc (loc, type,
9334 parg1)));
9335 }
9336 }
9337 else
9338 {
9339 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9340 to __complex__ ( x, y ). This is not the same for SNaNs or
9341 if signed zeros are involved. */
9342 if (!HONOR_SNANS (element_mode (arg0))
9343 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9344 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9345 {
9346 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9347 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9348 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9349 bool arg0rz = false, arg0iz = false;
9350 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9351 || (arg0i && (arg0iz = real_zerop (arg0i))))
9352 {
9353 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9354 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9355 if (arg0rz && arg1i && real_zerop (arg1i))
9356 {
9357 tree rp = arg1r ? arg1r
9358 : build1 (REALPART_EXPR, rtype, arg1);
9359 tree ip = arg0i ? arg0i
9360 : build1 (IMAGPART_EXPR, rtype, arg0);
9361 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9362 }
9363 else if (arg0iz && arg1r && real_zerop (arg1r))
9364 {
9365 tree rp = arg0r ? arg0r
9366 : build1 (REALPART_EXPR, rtype, arg0);
9367 tree ip = arg1i ? arg1i
9368 : build1 (IMAGPART_EXPR, rtype, arg1);
9369 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9370 }
9371 }
9372 }
9373
9374 if (flag_unsafe_math_optimizations
9375 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9376 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9377 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9378 return tem;
9379
9380 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9381 We associate floats only if the user has specified
9382 -fassociative-math. */
9383 if (flag_associative_math
9384 && TREE_CODE (arg1) == PLUS_EXPR
9385 && TREE_CODE (arg0) != MULT_EXPR)
9386 {
9387 tree tree10 = TREE_OPERAND (arg1, 0);
9388 tree tree11 = TREE_OPERAND (arg1, 1);
9389 if (TREE_CODE (tree11) == MULT_EXPR
9390 && TREE_CODE (tree10) == MULT_EXPR)
9391 {
9392 tree tree0;
9393 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9394 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9395 }
9396 }
9397 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9398 We associate floats only if the user has specified
9399 -fassociative-math. */
9400 if (flag_associative_math
9401 && TREE_CODE (arg0) == PLUS_EXPR
9402 && TREE_CODE (arg1) != MULT_EXPR)
9403 {
9404 tree tree00 = TREE_OPERAND (arg0, 0);
9405 tree tree01 = TREE_OPERAND (arg0, 1);
9406 if (TREE_CODE (tree01) == MULT_EXPR
9407 && TREE_CODE (tree00) == MULT_EXPR)
9408 {
9409 tree tree0;
9410 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9411 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9412 }
9413 }
9414 }
9415
9416 bit_rotate:
9417 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9418 is a rotate of A by C1 bits. */
9419 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9420 is a rotate of A by B bits. */
9421 {
9422 enum tree_code code0, code1;
9423 tree rtype;
9424 code0 = TREE_CODE (arg0);
9425 code1 = TREE_CODE (arg1);
9426 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9427 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9428 && operand_equal_p (TREE_OPERAND (arg0, 0),
9429 TREE_OPERAND (arg1, 0), 0)
9430 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9431 TYPE_UNSIGNED (rtype))
9432 /* Only create rotates in complete modes. Other cases are not
9433 expanded properly. */
9434 && (element_precision (rtype)
9435 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9436 {
9437 tree tree01, tree11;
9438 enum tree_code code01, code11;
9439
9440 tree01 = TREE_OPERAND (arg0, 1);
9441 tree11 = TREE_OPERAND (arg1, 1);
9442 STRIP_NOPS (tree01);
9443 STRIP_NOPS (tree11);
9444 code01 = TREE_CODE (tree01);
9445 code11 = TREE_CODE (tree11);
9446 if (code01 == INTEGER_CST
9447 && code11 == INTEGER_CST
9448 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9449 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9450 {
9451 tem = build2_loc (loc, LROTATE_EXPR,
9452 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9453 TREE_OPERAND (arg0, 0),
9454 code0 == LSHIFT_EXPR
9455 ? TREE_OPERAND (arg0, 1)
9456 : TREE_OPERAND (arg1, 1));
9457 return fold_convert_loc (loc, type, tem);
9458 }
9459 else if (code11 == MINUS_EXPR)
9460 {
9461 tree tree110, tree111;
9462 tree110 = TREE_OPERAND (tree11, 0);
9463 tree111 = TREE_OPERAND (tree11, 1);
9464 STRIP_NOPS (tree110);
9465 STRIP_NOPS (tree111);
9466 if (TREE_CODE (tree110) == INTEGER_CST
9467 && 0 == compare_tree_int (tree110,
9468 element_precision
9469 (TREE_TYPE (TREE_OPERAND
9470 (arg0, 0))))
9471 && operand_equal_p (tree01, tree111, 0))
9472 return
9473 fold_convert_loc (loc, type,
9474 build2 ((code0 == LSHIFT_EXPR
9475 ? LROTATE_EXPR
9476 : RROTATE_EXPR),
9477 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9478 TREE_OPERAND (arg0, 0),
9479 TREE_OPERAND (arg0, 1)));
9480 }
9481 else if (code01 == MINUS_EXPR)
9482 {
9483 tree tree010, tree011;
9484 tree010 = TREE_OPERAND (tree01, 0);
9485 tree011 = TREE_OPERAND (tree01, 1);
9486 STRIP_NOPS (tree010);
9487 STRIP_NOPS (tree011);
9488 if (TREE_CODE (tree010) == INTEGER_CST
9489 && 0 == compare_tree_int (tree010,
9490 element_precision
9491 (TREE_TYPE (TREE_OPERAND
9492 (arg0, 0))))
9493 && operand_equal_p (tree11, tree011, 0))
9494 return fold_convert_loc
9495 (loc, type,
9496 build2 ((code0 != LSHIFT_EXPR
9497 ? LROTATE_EXPR
9498 : RROTATE_EXPR),
9499 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9500 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9501 }
9502 }
9503 }
9504
9505 associate:
9506 /* In most languages, can't associate operations on floats through
9507 parentheses. Rather than remember where the parentheses were, we
9508 don't associate floats at all, unless the user has specified
9509 -fassociative-math.
9510 And, we need to make sure type is not saturating. */
9511
9512 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9513 && !TYPE_SATURATING (type))
9514 {
9515 tree var0, con0, lit0, minus_lit0;
9516 tree var1, con1, lit1, minus_lit1;
9517 tree atype = type;
9518 bool ok = true;
9519
9520 /* Split both trees into variables, constants, and literals. Then
9521 associate each group together, the constants with literals,
9522 then the result with variables. This increases the chances of
9523 literals being recombined later and of generating relocatable
9524 expressions for the sum of a constant and literal. */
9525 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9526 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9527 code == MINUS_EXPR);
9528
9529 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9530 if (code == MINUS_EXPR)
9531 code = PLUS_EXPR;
9532
9533 /* With undefined overflow prefer doing association in a type
9534 which wraps on overflow, if that is one of the operand types. */
9535 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9536 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9537 {
9538 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9539 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9540 atype = TREE_TYPE (arg0);
9541 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9542 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9543 atype = TREE_TYPE (arg1);
9544 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9545 }
9546
9547 /* With undefined overflow we can only associate constants with one
9548 variable, and constants whose association doesn't overflow. */
9549 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9550 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9551 {
9552 if (var0 && var1)
9553 {
9554 tree tmp0 = var0;
9555 tree tmp1 = var1;
9556 bool one_neg = false;
9557
9558 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9559 {
9560 tmp0 = TREE_OPERAND (tmp0, 0);
9561 one_neg = !one_neg;
9562 }
9563 if (CONVERT_EXPR_P (tmp0)
9564 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9565 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9566 <= TYPE_PRECISION (atype)))
9567 tmp0 = TREE_OPERAND (tmp0, 0);
9568 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9569 {
9570 tmp1 = TREE_OPERAND (tmp1, 0);
9571 one_neg = !one_neg;
9572 }
9573 if (CONVERT_EXPR_P (tmp1)
9574 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9575 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9576 <= TYPE_PRECISION (atype)))
9577 tmp1 = TREE_OPERAND (tmp1, 0);
9578 /* The only case we can still associate with two variables
9579 is if they cancel out. */
9580 if (!one_neg
9581 || !operand_equal_p (tmp0, tmp1, 0))
9582 ok = false;
9583 }
9584 }
9585
9586 /* Only do something if we found more than two objects. Otherwise,
9587 nothing has changed and we risk infinite recursion. */
9588 if (ok
9589 && (2 < ((var0 != 0) + (var1 != 0)
9590 + (con0 != 0) + (con1 != 0)
9591 + (lit0 != 0) + (lit1 != 0)
9592 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9593 {
9594 bool any_overflows = false;
9595 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9596 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9597 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9598 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9599 var0 = associate_trees (loc, var0, var1, code, atype);
9600 con0 = associate_trees (loc, con0, con1, code, atype);
9601 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9602 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9603 code, atype);
9604
9605 /* Preserve the MINUS_EXPR if the negative part of the literal is
9606 greater than the positive part. Otherwise, the multiplicative
9607 folding code (i.e extract_muldiv) may be fooled in case
9608 unsigned constants are subtracted, like in the following
9609 example: ((X*2 + 4) - 8U)/2. */
9610 if (minus_lit0 && lit0)
9611 {
9612 if (TREE_CODE (lit0) == INTEGER_CST
9613 && TREE_CODE (minus_lit0) == INTEGER_CST
9614 && tree_int_cst_lt (lit0, minus_lit0))
9615 {
9616 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9617 MINUS_EXPR, atype);
9618 lit0 = 0;
9619 }
9620 else
9621 {
9622 lit0 = associate_trees (loc, lit0, minus_lit0,
9623 MINUS_EXPR, atype);
9624 minus_lit0 = 0;
9625 }
9626 }
9627
9628 /* Don't introduce overflows through reassociation. */
9629 if (!any_overflows
9630 && ((lit0 && TREE_OVERFLOW_P (lit0))
9631 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9632 return NULL_TREE;
9633
9634 if (minus_lit0)
9635 {
9636 if (con0 == 0)
9637 return
9638 fold_convert_loc (loc, type,
9639 associate_trees (loc, var0, minus_lit0,
9640 MINUS_EXPR, atype));
9641 else
9642 {
9643 con0 = associate_trees (loc, con0, minus_lit0,
9644 MINUS_EXPR, atype);
9645 return
9646 fold_convert_loc (loc, type,
9647 associate_trees (loc, var0, con0,
9648 PLUS_EXPR, atype));
9649 }
9650 }
9651
9652 con0 = associate_trees (loc, con0, lit0, code, atype);
9653 return
9654 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9655 code, atype));
9656 }
9657 }
9658
9659 return NULL_TREE;
9660
9661 case MINUS_EXPR:
9662 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9663 if (TREE_CODE (arg0) == NEGATE_EXPR
9664 && negate_expr_p (arg1)
9665 && reorder_operands_p (arg0, arg1))
9666 return fold_build2_loc (loc, MINUS_EXPR, type,
9667 fold_convert_loc (loc, type,
9668 negate_expr (arg1)),
9669 fold_convert_loc (loc, type,
9670 TREE_OPERAND (arg0, 0)));
9671
9672 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9673 __complex__ ( x, -y ). This is not the same for SNaNs or if
9674 signed zeros are involved. */
9675 if (!HONOR_SNANS (element_mode (arg0))
9676 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9677 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9678 {
9679 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9680 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9681 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9682 bool arg0rz = false, arg0iz = false;
9683 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9684 || (arg0i && (arg0iz = real_zerop (arg0i))))
9685 {
9686 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9687 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9688 if (arg0rz && arg1i && real_zerop (arg1i))
9689 {
9690 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9691 arg1r ? arg1r
9692 : build1 (REALPART_EXPR, rtype, arg1));
9693 tree ip = arg0i ? arg0i
9694 : build1 (IMAGPART_EXPR, rtype, arg0);
9695 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9696 }
9697 else if (arg0iz && arg1r && real_zerop (arg1r))
9698 {
9699 tree rp = arg0r ? arg0r
9700 : build1 (REALPART_EXPR, rtype, arg0);
9701 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9702 arg1i ? arg1i
9703 : build1 (IMAGPART_EXPR, rtype, arg1));
9704 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9705 }
9706 }
9707 }
9708
9709 /* A - B -> A + (-B) if B is easily negatable. */
9710 if (negate_expr_p (arg1)
9711 && !TYPE_OVERFLOW_SANITIZED (type)
9712 && ((FLOAT_TYPE_P (type)
9713 /* Avoid this transformation if B is a positive REAL_CST. */
9714 && (TREE_CODE (arg1) != REAL_CST
9715 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9716 || INTEGRAL_TYPE_P (type)))
9717 return fold_build2_loc (loc, PLUS_EXPR, type,
9718 fold_convert_loc (loc, type, arg0),
9719 fold_convert_loc (loc, type,
9720 negate_expr (arg1)));
9721
9722 /* Fold &a[i] - &a[j] to i-j. */
9723 if (TREE_CODE (arg0) == ADDR_EXPR
9724 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9725 && TREE_CODE (arg1) == ADDR_EXPR
9726 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9727 {
9728 tree tem = fold_addr_of_array_ref_difference (loc, type,
9729 TREE_OPERAND (arg0, 0),
9730 TREE_OPERAND (arg1, 0));
9731 if (tem)
9732 return tem;
9733 }
9734
9735 if (FLOAT_TYPE_P (type)
9736 && flag_unsafe_math_optimizations
9737 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9738 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9739 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9740 return tem;
9741
9742 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9743 one. Make sure the type is not saturating and has the signedness of
9744 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9745 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9746 if ((TREE_CODE (arg0) == MULT_EXPR
9747 || TREE_CODE (arg1) == MULT_EXPR)
9748 && !TYPE_SATURATING (type)
9749 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9750 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9751 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9752 {
9753 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9754 if (tem)
9755 return tem;
9756 }
9757
9758 goto associate;
9759
9760 case MULT_EXPR:
9761 if (! FLOAT_TYPE_P (type))
9762 {
9763 /* Transform x * -C into -x * C if x is easily negatable. */
9764 if (TREE_CODE (arg1) == INTEGER_CST
9765 && tree_int_cst_sgn (arg1) == -1
9766 && negate_expr_p (arg0)
9767 && (tem = negate_expr (arg1)) != arg1
9768 && !TREE_OVERFLOW (tem))
9769 return fold_build2_loc (loc, MULT_EXPR, type,
9770 fold_convert_loc (loc, type,
9771 negate_expr (arg0)),
9772 tem);
9773
9774 /* (A + A) * C -> A * 2 * C */
9775 if (TREE_CODE (arg0) == PLUS_EXPR
9776 && TREE_CODE (arg1) == INTEGER_CST
9777 && operand_equal_p (TREE_OPERAND (arg0, 0),
9778 TREE_OPERAND (arg0, 1), 0))
9779 return fold_build2_loc (loc, MULT_EXPR, type,
9780 omit_one_operand_loc (loc, type,
9781 TREE_OPERAND (arg0, 0),
9782 TREE_OPERAND (arg0, 1)),
9783 fold_build2_loc (loc, MULT_EXPR, type,
9784 build_int_cst (type, 2) , arg1));
9785
9786 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9787 sign-changing only. */
9788 if (TREE_CODE (arg1) == INTEGER_CST
9789 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9790 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9791 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9792
9793 strict_overflow_p = false;
9794 if (TREE_CODE (arg1) == INTEGER_CST
9795 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9796 &strict_overflow_p)))
9797 {
9798 if (strict_overflow_p)
9799 fold_overflow_warning (("assuming signed overflow does not "
9800 "occur when simplifying "
9801 "multiplication"),
9802 WARN_STRICT_OVERFLOW_MISC);
9803 return fold_convert_loc (loc, type, tem);
9804 }
9805
9806 /* Optimize z * conj(z) for integer complex numbers. */
9807 if (TREE_CODE (arg0) == CONJ_EXPR
9808 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9809 return fold_mult_zconjz (loc, type, arg1);
9810 if (TREE_CODE (arg1) == CONJ_EXPR
9811 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9812 return fold_mult_zconjz (loc, type, arg0);
9813 }
9814 else
9815 {
9816 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9817 This is not the same for NaNs or if signed zeros are
9818 involved. */
9819 if (!HONOR_NANS (arg0)
9820 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9821 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9822 && TREE_CODE (arg1) == COMPLEX_CST
9823 && real_zerop (TREE_REALPART (arg1)))
9824 {
9825 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9826 if (real_onep (TREE_IMAGPART (arg1)))
9827 return
9828 fold_build2_loc (loc, COMPLEX_EXPR, type,
9829 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9830 rtype, arg0)),
9831 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9832 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9833 return
9834 fold_build2_loc (loc, COMPLEX_EXPR, type,
9835 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9836 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9837 rtype, arg0)));
9838 }
9839
9840 /* Optimize z * conj(z) for floating point complex numbers.
9841 Guarded by flag_unsafe_math_optimizations as non-finite
9842 imaginary components don't produce scalar results. */
9843 if (flag_unsafe_math_optimizations
9844 && TREE_CODE (arg0) == CONJ_EXPR
9845 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9846 return fold_mult_zconjz (loc, type, arg1);
9847 if (flag_unsafe_math_optimizations
9848 && TREE_CODE (arg1) == CONJ_EXPR
9849 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9850 return fold_mult_zconjz (loc, type, arg0);
9851
9852 if (flag_unsafe_math_optimizations)
9853 {
9854
9855 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9856 if (!in_gimple_form
9857 && optimize
9858 && operand_equal_p (arg0, arg1, 0))
9859 {
9860 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9861
9862 if (powfn)
9863 {
9864 tree arg = build_real (type, dconst2);
9865 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9866 }
9867 }
9868 }
9869 }
9870 goto associate;
9871
9872 case BIT_IOR_EXPR:
9873 /* Canonicalize (X & C1) | C2. */
9874 if (TREE_CODE (arg0) == BIT_AND_EXPR
9875 && TREE_CODE (arg1) == INTEGER_CST
9876 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9877 {
9878 int width = TYPE_PRECISION (type), w;
9879 wide_int c1 = TREE_OPERAND (arg0, 1);
9880 wide_int c2 = arg1;
9881
9882 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9883 if ((c1 & c2) == c1)
9884 return omit_one_operand_loc (loc, type, arg1,
9885 TREE_OPERAND (arg0, 0));
9886
9887 wide_int msk = wi::mask (width, false,
9888 TYPE_PRECISION (TREE_TYPE (arg1)));
9889
9890 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9891 if (msk.and_not (c1 | c2) == 0)
9892 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9893 TREE_OPERAND (arg0, 0), arg1);
9894
9895 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9896 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9897 mode which allows further optimizations. */
9898 c1 &= msk;
9899 c2 &= msk;
9900 wide_int c3 = c1.and_not (c2);
9901 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9902 {
9903 wide_int mask = wi::mask (w, false,
9904 TYPE_PRECISION (type));
9905 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9906 {
9907 c3 = mask;
9908 break;
9909 }
9910 }
9911
9912 if (c3 != c1)
9913 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9914 fold_build2_loc (loc, BIT_AND_EXPR, type,
9915 TREE_OPERAND (arg0, 0),
9916 wide_int_to_tree (type,
9917 c3)),
9918 arg1);
9919 }
9920
9921 /* See if this can be simplified into a rotate first. If that
9922 is unsuccessful continue in the association code. */
9923 goto bit_rotate;
9924
9925 case BIT_XOR_EXPR:
9926 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9927 if (TREE_CODE (arg0) == BIT_AND_EXPR
9928 && INTEGRAL_TYPE_P (type)
9929 && integer_onep (TREE_OPERAND (arg0, 1))
9930 && integer_onep (arg1))
9931 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9932 build_zero_cst (TREE_TYPE (arg0)));
9933
9934 /* See if this can be simplified into a rotate first. If that
9935 is unsuccessful continue in the association code. */
9936 goto bit_rotate;
9937
9938 case BIT_AND_EXPR:
9939 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9940 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9941 && INTEGRAL_TYPE_P (type)
9942 && integer_onep (TREE_OPERAND (arg0, 1))
9943 && integer_onep (arg1))
9944 {
9945 tree tem2;
9946 tem = TREE_OPERAND (arg0, 0);
9947 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9948 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9949 tem, tem2);
9950 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9951 build_zero_cst (TREE_TYPE (tem)));
9952 }
9953 /* Fold ~X & 1 as (X & 1) == 0. */
9954 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9955 && INTEGRAL_TYPE_P (type)
9956 && integer_onep (arg1))
9957 {
9958 tree tem2;
9959 tem = TREE_OPERAND (arg0, 0);
9960 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9961 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9962 tem, tem2);
9963 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9964 build_zero_cst (TREE_TYPE (tem)));
9965 }
9966 /* Fold !X & 1 as X == 0. */
9967 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9968 && integer_onep (arg1))
9969 {
9970 tem = TREE_OPERAND (arg0, 0);
9971 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9972 build_zero_cst (TREE_TYPE (tem)));
9973 }
9974
9975 /* Fold (X ^ Y) & Y as ~X & Y. */
9976 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9977 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9978 {
9979 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9980 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9981 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
9982 fold_convert_loc (loc, type, arg1));
9983 }
9984 /* Fold (X ^ Y) & X as ~Y & X. */
9985 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9986 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9987 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9988 {
9989 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9990 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9991 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
9992 fold_convert_loc (loc, type, arg1));
9993 }
9994 /* Fold X & (X ^ Y) as X & ~Y. */
9995 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9996 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9997 {
9998 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9999 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10000 fold_convert_loc (loc, type, arg0),
10001 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10002 }
10003 /* Fold X & (Y ^ X) as ~Y & X. */
10004 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10005 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10006 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10007 {
10008 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10009 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10010 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10011 fold_convert_loc (loc, type, arg0));
10012 }
10013
10014 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10015 multiple of 1 << CST. */
10016 if (TREE_CODE (arg1) == INTEGER_CST)
10017 {
10018 wide_int cst1 = arg1;
10019 wide_int ncst1 = -cst1;
10020 if ((cst1 & ncst1) == ncst1
10021 && multiple_of_p (type, arg0,
10022 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10023 return fold_convert_loc (loc, type, arg0);
10024 }
10025
10026 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10027 bits from CST2. */
10028 if (TREE_CODE (arg1) == INTEGER_CST
10029 && TREE_CODE (arg0) == MULT_EXPR
10030 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10031 {
10032 wide_int warg1 = arg1;
10033 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10034
10035 if (masked == 0)
10036 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10037 arg0, arg1);
10038 else if (masked != warg1)
10039 {
10040 /* Avoid the transform if arg1 is a mask of some
10041 mode which allows further optimizations. */
10042 int pop = wi::popcount (warg1);
10043 if (!(pop >= BITS_PER_UNIT
10044 && exact_log2 (pop) != -1
10045 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10046 return fold_build2_loc (loc, code, type, op0,
10047 wide_int_to_tree (type, masked));
10048 }
10049 }
10050
10051 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10052 ((A & N) + B) & M -> (A + B) & M
10053 Similarly if (N & M) == 0,
10054 ((A | N) + B) & M -> (A + B) & M
10055 and for - instead of + (or unary - instead of +)
10056 and/or ^ instead of |.
10057 If B is constant and (B & M) == 0, fold into A & M. */
10058 if (TREE_CODE (arg1) == INTEGER_CST)
10059 {
10060 wide_int cst1 = arg1;
10061 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10062 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10063 && (TREE_CODE (arg0) == PLUS_EXPR
10064 || TREE_CODE (arg0) == MINUS_EXPR
10065 || TREE_CODE (arg0) == NEGATE_EXPR)
10066 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10067 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10068 {
10069 tree pmop[2];
10070 int which = 0;
10071 wide_int cst0;
10072
10073 /* Now we know that arg0 is (C + D) or (C - D) or
10074 -C and arg1 (M) is == (1LL << cst) - 1.
10075 Store C into PMOP[0] and D into PMOP[1]. */
10076 pmop[0] = TREE_OPERAND (arg0, 0);
10077 pmop[1] = NULL;
10078 if (TREE_CODE (arg0) != NEGATE_EXPR)
10079 {
10080 pmop[1] = TREE_OPERAND (arg0, 1);
10081 which = 1;
10082 }
10083
10084 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10085 which = -1;
10086
10087 for (; which >= 0; which--)
10088 switch (TREE_CODE (pmop[which]))
10089 {
10090 case BIT_AND_EXPR:
10091 case BIT_IOR_EXPR:
10092 case BIT_XOR_EXPR:
10093 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10094 != INTEGER_CST)
10095 break;
10096 cst0 = TREE_OPERAND (pmop[which], 1);
10097 cst0 &= cst1;
10098 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10099 {
10100 if (cst0 != cst1)
10101 break;
10102 }
10103 else if (cst0 != 0)
10104 break;
10105 /* If C or D is of the form (A & N) where
10106 (N & M) == M, or of the form (A | N) or
10107 (A ^ N) where (N & M) == 0, replace it with A. */
10108 pmop[which] = TREE_OPERAND (pmop[which], 0);
10109 break;
10110 case INTEGER_CST:
10111 /* If C or D is a N where (N & M) == 0, it can be
10112 omitted (assumed 0). */
10113 if ((TREE_CODE (arg0) == PLUS_EXPR
10114 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10115 && (cst1 & pmop[which]) == 0)
10116 pmop[which] = NULL;
10117 break;
10118 default:
10119 break;
10120 }
10121
10122 /* Only build anything new if we optimized one or both arguments
10123 above. */
10124 if (pmop[0] != TREE_OPERAND (arg0, 0)
10125 || (TREE_CODE (arg0) != NEGATE_EXPR
10126 && pmop[1] != TREE_OPERAND (arg0, 1)))
10127 {
10128 tree utype = TREE_TYPE (arg0);
10129 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10130 {
10131 /* Perform the operations in a type that has defined
10132 overflow behavior. */
10133 utype = unsigned_type_for (TREE_TYPE (arg0));
10134 if (pmop[0] != NULL)
10135 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10136 if (pmop[1] != NULL)
10137 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10138 }
10139
10140 if (TREE_CODE (arg0) == NEGATE_EXPR)
10141 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10142 else if (TREE_CODE (arg0) == PLUS_EXPR)
10143 {
10144 if (pmop[0] != NULL && pmop[1] != NULL)
10145 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10146 pmop[0], pmop[1]);
10147 else if (pmop[0] != NULL)
10148 tem = pmop[0];
10149 else if (pmop[1] != NULL)
10150 tem = pmop[1];
10151 else
10152 return build_int_cst (type, 0);
10153 }
10154 else if (pmop[0] == NULL)
10155 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10156 else
10157 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10158 pmop[0], pmop[1]);
10159 /* TEM is now the new binary +, - or unary - replacement. */
10160 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10161 fold_convert_loc (loc, utype, arg1));
10162 return fold_convert_loc (loc, type, tem);
10163 }
10164 }
10165 }
10166
10167 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10168 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10169 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10170 {
10171 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10172
10173 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10174 if (mask == -1)
10175 return
10176 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10177 }
10178
10179 goto associate;
10180
10181 case RDIV_EXPR:
10182 /* Don't touch a floating-point divide by zero unless the mode
10183 of the constant can represent infinity. */
10184 if (TREE_CODE (arg1) == REAL_CST
10185 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10186 && real_zerop (arg1))
10187 return NULL_TREE;
10188
10189 /* (-A) / (-B) -> A / B */
10190 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10191 return fold_build2_loc (loc, RDIV_EXPR, type,
10192 TREE_OPERAND (arg0, 0),
10193 negate_expr (arg1));
10194 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10195 return fold_build2_loc (loc, RDIV_EXPR, type,
10196 negate_expr (arg0),
10197 TREE_OPERAND (arg1, 0));
10198 return NULL_TREE;
10199
10200 case TRUNC_DIV_EXPR:
10201 /* Fall through */
10202
10203 case FLOOR_DIV_EXPR:
10204 /* Simplify A / (B << N) where A and B are positive and B is
10205 a power of 2, to A >> (N + log2(B)). */
10206 strict_overflow_p = false;
10207 if (TREE_CODE (arg1) == LSHIFT_EXPR
10208 && (TYPE_UNSIGNED (type)
10209 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10210 {
10211 tree sval = TREE_OPERAND (arg1, 0);
10212 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10213 {
10214 tree sh_cnt = TREE_OPERAND (arg1, 1);
10215 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10216 wi::exact_log2 (sval));
10217
10218 if (strict_overflow_p)
10219 fold_overflow_warning (("assuming signed overflow does not "
10220 "occur when simplifying A / (B << N)"),
10221 WARN_STRICT_OVERFLOW_MISC);
10222
10223 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10224 sh_cnt, pow2);
10225 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10226 fold_convert_loc (loc, type, arg0), sh_cnt);
10227 }
10228 }
10229
10230 /* Fall through */
10231
10232 case ROUND_DIV_EXPR:
10233 case CEIL_DIV_EXPR:
10234 case EXACT_DIV_EXPR:
10235 if (integer_zerop (arg1))
10236 return NULL_TREE;
10237
10238 /* Convert -A / -B to A / B when the type is signed and overflow is
10239 undefined. */
10240 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10241 && TREE_CODE (arg0) == NEGATE_EXPR
10242 && negate_expr_p (arg1))
10243 {
10244 if (INTEGRAL_TYPE_P (type))
10245 fold_overflow_warning (("assuming signed overflow does not occur "
10246 "when distributing negation across "
10247 "division"),
10248 WARN_STRICT_OVERFLOW_MISC);
10249 return fold_build2_loc (loc, code, type,
10250 fold_convert_loc (loc, type,
10251 TREE_OPERAND (arg0, 0)),
10252 fold_convert_loc (loc, type,
10253 negate_expr (arg1)));
10254 }
10255 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10256 && TREE_CODE (arg1) == NEGATE_EXPR
10257 && negate_expr_p (arg0))
10258 {
10259 if (INTEGRAL_TYPE_P (type))
10260 fold_overflow_warning (("assuming signed overflow does not occur "
10261 "when distributing negation across "
10262 "division"),
10263 WARN_STRICT_OVERFLOW_MISC);
10264 return fold_build2_loc (loc, code, type,
10265 fold_convert_loc (loc, type,
10266 negate_expr (arg0)),
10267 fold_convert_loc (loc, type,
10268 TREE_OPERAND (arg1, 0)));
10269 }
10270
10271 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10272 operation, EXACT_DIV_EXPR.
10273
10274 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10275 At one time others generated faster code, it's not clear if they do
10276 after the last round to changes to the DIV code in expmed.c. */
10277 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10278 && multiple_of_p (type, arg0, arg1))
10279 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10280 fold_convert (type, arg0),
10281 fold_convert (type, arg1));
10282
10283 strict_overflow_p = false;
10284 if (TREE_CODE (arg1) == INTEGER_CST
10285 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10286 &strict_overflow_p)))
10287 {
10288 if (strict_overflow_p)
10289 fold_overflow_warning (("assuming signed overflow does not occur "
10290 "when simplifying division"),
10291 WARN_STRICT_OVERFLOW_MISC);
10292 return fold_convert_loc (loc, type, tem);
10293 }
10294
10295 return NULL_TREE;
10296
10297 case CEIL_MOD_EXPR:
10298 case FLOOR_MOD_EXPR:
10299 case ROUND_MOD_EXPR:
10300 case TRUNC_MOD_EXPR:
10301 strict_overflow_p = false;
10302 if (TREE_CODE (arg1) == INTEGER_CST
10303 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10304 &strict_overflow_p)))
10305 {
10306 if (strict_overflow_p)
10307 fold_overflow_warning (("assuming signed overflow does not occur "
10308 "when simplifying modulus"),
10309 WARN_STRICT_OVERFLOW_MISC);
10310 return fold_convert_loc (loc, type, tem);
10311 }
10312
10313 return NULL_TREE;
10314
10315 case LROTATE_EXPR:
10316 case RROTATE_EXPR:
10317 case RSHIFT_EXPR:
10318 case LSHIFT_EXPR:
10319 /* Since negative shift count is not well-defined,
10320 don't try to compute it in the compiler. */
10321 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10322 return NULL_TREE;
10323
10324 prec = element_precision (type);
10325
10326 /* If we have a rotate of a bit operation with the rotate count and
10327 the second operand of the bit operation both constant,
10328 permute the two operations. */
10329 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10330 && (TREE_CODE (arg0) == BIT_AND_EXPR
10331 || TREE_CODE (arg0) == BIT_IOR_EXPR
10332 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10333 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10334 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10335 fold_build2_loc (loc, code, type,
10336 TREE_OPERAND (arg0, 0), arg1),
10337 fold_build2_loc (loc, code, type,
10338 TREE_OPERAND (arg0, 1), arg1));
10339
10340 /* Two consecutive rotates adding up to the some integer
10341 multiple of the precision of the type can be ignored. */
10342 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10343 && TREE_CODE (arg0) == RROTATE_EXPR
10344 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10345 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10346 prec) == 0)
10347 return TREE_OPERAND (arg0, 0);
10348
10349 return NULL_TREE;
10350
10351 case MIN_EXPR:
10352 case MAX_EXPR:
10353 goto associate;
10354
10355 case TRUTH_ANDIF_EXPR:
10356 /* Note that the operands of this must be ints
10357 and their values must be 0 or 1.
10358 ("true" is a fixed value perhaps depending on the language.) */
10359 /* If first arg is constant zero, return it. */
10360 if (integer_zerop (arg0))
10361 return fold_convert_loc (loc, type, arg0);
10362 case TRUTH_AND_EXPR:
10363 /* If either arg is constant true, drop it. */
10364 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10365 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10366 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10367 /* Preserve sequence points. */
10368 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10369 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10370 /* If second arg is constant zero, result is zero, but first arg
10371 must be evaluated. */
10372 if (integer_zerop (arg1))
10373 return omit_one_operand_loc (loc, type, arg1, arg0);
10374 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10375 case will be handled here. */
10376 if (integer_zerop (arg0))
10377 return omit_one_operand_loc (loc, type, arg0, arg1);
10378
10379 /* !X && X is always false. */
10380 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10381 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10382 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10383 /* X && !X is always false. */
10384 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10385 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10386 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10387
10388 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10389 means A >= Y && A != MAX, but in this case we know that
10390 A < X <= MAX. */
10391
10392 if (!TREE_SIDE_EFFECTS (arg0)
10393 && !TREE_SIDE_EFFECTS (arg1))
10394 {
10395 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10396 if (tem && !operand_equal_p (tem, arg0, 0))
10397 return fold_build2_loc (loc, code, type, tem, arg1);
10398
10399 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10400 if (tem && !operand_equal_p (tem, arg1, 0))
10401 return fold_build2_loc (loc, code, type, arg0, tem);
10402 }
10403
10404 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10405 != NULL_TREE)
10406 return tem;
10407
10408 return NULL_TREE;
10409
10410 case TRUTH_ORIF_EXPR:
10411 /* Note that the operands of this must be ints
10412 and their values must be 0 or true.
10413 ("true" is a fixed value perhaps depending on the language.) */
10414 /* If first arg is constant true, return it. */
10415 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10416 return fold_convert_loc (loc, type, arg0);
10417 case TRUTH_OR_EXPR:
10418 /* If either arg is constant zero, drop it. */
10419 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10420 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10421 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10422 /* Preserve sequence points. */
10423 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10424 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10425 /* If second arg is constant true, result is true, but we must
10426 evaluate first arg. */
10427 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10428 return omit_one_operand_loc (loc, type, arg1, arg0);
10429 /* Likewise for first arg, but note this only occurs here for
10430 TRUTH_OR_EXPR. */
10431 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10432 return omit_one_operand_loc (loc, type, arg0, arg1);
10433
10434 /* !X || X is always true. */
10435 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10436 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10437 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10438 /* X || !X is always true. */
10439 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10440 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10441 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10442
10443 /* (X && !Y) || (!X && Y) is X ^ Y */
10444 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10445 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10446 {
10447 tree a0, a1, l0, l1, n0, n1;
10448
10449 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10450 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10451
10452 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10453 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10454
10455 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10456 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10457
10458 if ((operand_equal_p (n0, a0, 0)
10459 && operand_equal_p (n1, a1, 0))
10460 || (operand_equal_p (n0, a1, 0)
10461 && operand_equal_p (n1, a0, 0)))
10462 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10463 }
10464
10465 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10466 != NULL_TREE)
10467 return tem;
10468
10469 return NULL_TREE;
10470
10471 case TRUTH_XOR_EXPR:
10472 /* If the second arg is constant zero, drop it. */
10473 if (integer_zerop (arg1))
10474 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10475 /* If the second arg is constant true, this is a logical inversion. */
10476 if (integer_onep (arg1))
10477 {
10478 tem = invert_truthvalue_loc (loc, arg0);
10479 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10480 }
10481 /* Identical arguments cancel to zero. */
10482 if (operand_equal_p (arg0, arg1, 0))
10483 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10484
10485 /* !X ^ X is always true. */
10486 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10487 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10488 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10489
10490 /* X ^ !X is always true. */
10491 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10492 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10493 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10494
10495 return NULL_TREE;
10496
10497 case EQ_EXPR:
10498 case NE_EXPR:
10499 STRIP_NOPS (arg0);
10500 STRIP_NOPS (arg1);
10501
10502 tem = fold_comparison (loc, code, type, op0, op1);
10503 if (tem != NULL_TREE)
10504 return tem;
10505
10506 /* bool_var != 1 becomes !bool_var. */
10507 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10508 && code == NE_EXPR)
10509 return fold_convert_loc (loc, type,
10510 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10511 TREE_TYPE (arg0), arg0));
10512
10513 /* bool_var == 0 becomes !bool_var. */
10514 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10515 && code == EQ_EXPR)
10516 return fold_convert_loc (loc, type,
10517 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10518 TREE_TYPE (arg0), arg0));
10519
10520 /* !exp != 0 becomes !exp */
10521 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10522 && code == NE_EXPR)
10523 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10524
10525 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10526 if ((TREE_CODE (arg0) == PLUS_EXPR
10527 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10528 || TREE_CODE (arg0) == MINUS_EXPR)
10529 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10530 0)),
10531 arg1, 0)
10532 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10533 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10534 {
10535 tree val = TREE_OPERAND (arg0, 1);
10536 return omit_two_operands_loc (loc, type,
10537 fold_build2_loc (loc, code, type,
10538 val,
10539 build_int_cst (TREE_TYPE (val),
10540 0)),
10541 TREE_OPERAND (arg0, 0), arg1);
10542 }
10543
10544 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10545 if (TREE_CODE (arg0) == MINUS_EXPR
10546 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10547 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10548 1)),
10549 arg1, 0)
10550 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10551 {
10552 return omit_two_operands_loc (loc, type,
10553 code == NE_EXPR
10554 ? boolean_true_node : boolean_false_node,
10555 TREE_OPERAND (arg0, 1), arg1);
10556 }
10557
10558 /* If this is an EQ or NE comparison with zero and ARG0 is
10559 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10560 two operations, but the latter can be done in one less insn
10561 on machines that have only two-operand insns or on which a
10562 constant cannot be the first operand. */
10563 if (TREE_CODE (arg0) == BIT_AND_EXPR
10564 && integer_zerop (arg1))
10565 {
10566 tree arg00 = TREE_OPERAND (arg0, 0);
10567 tree arg01 = TREE_OPERAND (arg0, 1);
10568 if (TREE_CODE (arg00) == LSHIFT_EXPR
10569 && integer_onep (TREE_OPERAND (arg00, 0)))
10570 {
10571 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10572 arg01, TREE_OPERAND (arg00, 1));
10573 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10574 build_int_cst (TREE_TYPE (arg0), 1));
10575 return fold_build2_loc (loc, code, type,
10576 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10577 arg1);
10578 }
10579 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10580 && integer_onep (TREE_OPERAND (arg01, 0)))
10581 {
10582 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10583 arg00, TREE_OPERAND (arg01, 1));
10584 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10585 build_int_cst (TREE_TYPE (arg0), 1));
10586 return fold_build2_loc (loc, code, type,
10587 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10588 arg1);
10589 }
10590 }
10591
10592 /* If this is an NE or EQ comparison of zero against the result of a
10593 signed MOD operation whose second operand is a power of 2, make
10594 the MOD operation unsigned since it is simpler and equivalent. */
10595 if (integer_zerop (arg1)
10596 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10597 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10598 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10599 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10600 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10601 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10602 {
10603 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10604 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10605 fold_convert_loc (loc, newtype,
10606 TREE_OPERAND (arg0, 0)),
10607 fold_convert_loc (loc, newtype,
10608 TREE_OPERAND (arg0, 1)));
10609
10610 return fold_build2_loc (loc, code, type, newmod,
10611 fold_convert_loc (loc, newtype, arg1));
10612 }
10613
10614 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10615 C1 is a valid shift constant, and C2 is a power of two, i.e.
10616 a single bit. */
10617 if (TREE_CODE (arg0) == BIT_AND_EXPR
10618 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10619 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10620 == INTEGER_CST
10621 && integer_pow2p (TREE_OPERAND (arg0, 1))
10622 && integer_zerop (arg1))
10623 {
10624 tree itype = TREE_TYPE (arg0);
10625 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10626 prec = TYPE_PRECISION (itype);
10627
10628 /* Check for a valid shift count. */
10629 if (wi::ltu_p (arg001, prec))
10630 {
10631 tree arg01 = TREE_OPERAND (arg0, 1);
10632 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10633 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10634 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10635 can be rewritten as (X & (C2 << C1)) != 0. */
10636 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10637 {
10638 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10639 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10640 return fold_build2_loc (loc, code, type, tem,
10641 fold_convert_loc (loc, itype, arg1));
10642 }
10643 /* Otherwise, for signed (arithmetic) shifts,
10644 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10645 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10646 else if (!TYPE_UNSIGNED (itype))
10647 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10648 arg000, build_int_cst (itype, 0));
10649 /* Otherwise, of unsigned (logical) shifts,
10650 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10651 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10652 else
10653 return omit_one_operand_loc (loc, type,
10654 code == EQ_EXPR ? integer_one_node
10655 : integer_zero_node,
10656 arg000);
10657 }
10658 }
10659
10660 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10661 Similarly for NE_EXPR. */
10662 if (TREE_CODE (arg0) == BIT_AND_EXPR
10663 && TREE_CODE (arg1) == INTEGER_CST
10664 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10665 {
10666 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10667 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10668 TREE_OPERAND (arg0, 1));
10669 tree dandnotc
10670 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10671 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10672 notc);
10673 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10674 if (integer_nonzerop (dandnotc))
10675 return omit_one_operand_loc (loc, type, rslt, arg0);
10676 }
10677
10678 /* If this is a comparison of a field, we may be able to simplify it. */
10679 if ((TREE_CODE (arg0) == COMPONENT_REF
10680 || TREE_CODE (arg0) == BIT_FIELD_REF)
10681 /* Handle the constant case even without -O
10682 to make sure the warnings are given. */
10683 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10684 {
10685 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10686 if (t1)
10687 return t1;
10688 }
10689
10690 /* Optimize comparisons of strlen vs zero to a compare of the
10691 first character of the string vs zero. To wit,
10692 strlen(ptr) == 0 => *ptr == 0
10693 strlen(ptr) != 0 => *ptr != 0
10694 Other cases should reduce to one of these two (or a constant)
10695 due to the return value of strlen being unsigned. */
10696 if (TREE_CODE (arg0) == CALL_EXPR
10697 && integer_zerop (arg1))
10698 {
10699 tree fndecl = get_callee_fndecl (arg0);
10700
10701 if (fndecl
10702 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10703 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10704 && call_expr_nargs (arg0) == 1
10705 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10706 {
10707 tree iref = build_fold_indirect_ref_loc (loc,
10708 CALL_EXPR_ARG (arg0, 0));
10709 return fold_build2_loc (loc, code, type, iref,
10710 build_int_cst (TREE_TYPE (iref), 0));
10711 }
10712 }
10713
10714 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10715 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10716 if (TREE_CODE (arg0) == RSHIFT_EXPR
10717 && integer_zerop (arg1)
10718 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10719 {
10720 tree arg00 = TREE_OPERAND (arg0, 0);
10721 tree arg01 = TREE_OPERAND (arg0, 1);
10722 tree itype = TREE_TYPE (arg00);
10723 if (wi::eq_p (arg01, element_precision (itype) - 1))
10724 {
10725 if (TYPE_UNSIGNED (itype))
10726 {
10727 itype = signed_type_for (itype);
10728 arg00 = fold_convert_loc (loc, itype, arg00);
10729 }
10730 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10731 type, arg00, build_zero_cst (itype));
10732 }
10733 }
10734
10735 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10736 (X & C) == 0 when C is a single bit. */
10737 if (TREE_CODE (arg0) == BIT_AND_EXPR
10738 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10739 && integer_zerop (arg1)
10740 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10741 {
10742 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10743 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10744 TREE_OPERAND (arg0, 1));
10745 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10746 type, tem,
10747 fold_convert_loc (loc, TREE_TYPE (arg0),
10748 arg1));
10749 }
10750
10751 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10752 constant C is a power of two, i.e. a single bit. */
10753 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10754 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10755 && integer_zerop (arg1)
10756 && integer_pow2p (TREE_OPERAND (arg0, 1))
10757 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10758 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10759 {
10760 tree arg00 = TREE_OPERAND (arg0, 0);
10761 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10762 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10763 }
10764
10765 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10766 when is C is a power of two, i.e. a single bit. */
10767 if (TREE_CODE (arg0) == BIT_AND_EXPR
10768 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10769 && integer_zerop (arg1)
10770 && integer_pow2p (TREE_OPERAND (arg0, 1))
10771 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10772 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10773 {
10774 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10775 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10776 arg000, TREE_OPERAND (arg0, 1));
10777 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10778 tem, build_int_cst (TREE_TYPE (tem), 0));
10779 }
10780
10781 if (integer_zerop (arg1)
10782 && tree_expr_nonzero_p (arg0))
10783 {
10784 tree res = constant_boolean_node (code==NE_EXPR, type);
10785 return omit_one_operand_loc (loc, type, res, arg0);
10786 }
10787
10788 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10789 if (TREE_CODE (arg0) == BIT_AND_EXPR
10790 && TREE_CODE (arg1) == BIT_AND_EXPR)
10791 {
10792 tree arg00 = TREE_OPERAND (arg0, 0);
10793 tree arg01 = TREE_OPERAND (arg0, 1);
10794 tree arg10 = TREE_OPERAND (arg1, 0);
10795 tree arg11 = TREE_OPERAND (arg1, 1);
10796 tree itype = TREE_TYPE (arg0);
10797
10798 if (operand_equal_p (arg01, arg11, 0))
10799 return fold_build2_loc (loc, code, type,
10800 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10801 fold_build2_loc (loc,
10802 BIT_XOR_EXPR, itype,
10803 arg00, arg10),
10804 arg01),
10805 build_zero_cst (itype));
10806
10807 if (operand_equal_p (arg01, arg10, 0))
10808 return fold_build2_loc (loc, code, type,
10809 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10810 fold_build2_loc (loc,
10811 BIT_XOR_EXPR, itype,
10812 arg00, arg11),
10813 arg01),
10814 build_zero_cst (itype));
10815
10816 if (operand_equal_p (arg00, arg11, 0))
10817 return fold_build2_loc (loc, code, type,
10818 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10819 fold_build2_loc (loc,
10820 BIT_XOR_EXPR, itype,
10821 arg01, arg10),
10822 arg00),
10823 build_zero_cst (itype));
10824
10825 if (operand_equal_p (arg00, arg10, 0))
10826 return fold_build2_loc (loc, code, type,
10827 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10828 fold_build2_loc (loc,
10829 BIT_XOR_EXPR, itype,
10830 arg01, arg11),
10831 arg00),
10832 build_zero_cst (itype));
10833 }
10834
10835 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10836 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10837 {
10838 tree arg00 = TREE_OPERAND (arg0, 0);
10839 tree arg01 = TREE_OPERAND (arg0, 1);
10840 tree arg10 = TREE_OPERAND (arg1, 0);
10841 tree arg11 = TREE_OPERAND (arg1, 1);
10842 tree itype = TREE_TYPE (arg0);
10843
10844 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10845 operand_equal_p guarantees no side-effects so we don't need
10846 to use omit_one_operand on Z. */
10847 if (operand_equal_p (arg01, arg11, 0))
10848 return fold_build2_loc (loc, code, type, arg00,
10849 fold_convert_loc (loc, TREE_TYPE (arg00),
10850 arg10));
10851 if (operand_equal_p (arg01, arg10, 0))
10852 return fold_build2_loc (loc, code, type, arg00,
10853 fold_convert_loc (loc, TREE_TYPE (arg00),
10854 arg11));
10855 if (operand_equal_p (arg00, arg11, 0))
10856 return fold_build2_loc (loc, code, type, arg01,
10857 fold_convert_loc (loc, TREE_TYPE (arg01),
10858 arg10));
10859 if (operand_equal_p (arg00, arg10, 0))
10860 return fold_build2_loc (loc, code, type, arg01,
10861 fold_convert_loc (loc, TREE_TYPE (arg01),
10862 arg11));
10863
10864 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10865 if (TREE_CODE (arg01) == INTEGER_CST
10866 && TREE_CODE (arg11) == INTEGER_CST)
10867 {
10868 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10869 fold_convert_loc (loc, itype, arg11));
10870 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10871 return fold_build2_loc (loc, code, type, tem,
10872 fold_convert_loc (loc, itype, arg10));
10873 }
10874 }
10875
10876 /* Attempt to simplify equality/inequality comparisons of complex
10877 values. Only lower the comparison if the result is known or
10878 can be simplified to a single scalar comparison. */
10879 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10880 || TREE_CODE (arg0) == COMPLEX_CST)
10881 && (TREE_CODE (arg1) == COMPLEX_EXPR
10882 || TREE_CODE (arg1) == COMPLEX_CST))
10883 {
10884 tree real0, imag0, real1, imag1;
10885 tree rcond, icond;
10886
10887 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10888 {
10889 real0 = TREE_OPERAND (arg0, 0);
10890 imag0 = TREE_OPERAND (arg0, 1);
10891 }
10892 else
10893 {
10894 real0 = TREE_REALPART (arg0);
10895 imag0 = TREE_IMAGPART (arg0);
10896 }
10897
10898 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10899 {
10900 real1 = TREE_OPERAND (arg1, 0);
10901 imag1 = TREE_OPERAND (arg1, 1);
10902 }
10903 else
10904 {
10905 real1 = TREE_REALPART (arg1);
10906 imag1 = TREE_IMAGPART (arg1);
10907 }
10908
10909 rcond = fold_binary_loc (loc, code, type, real0, real1);
10910 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10911 {
10912 if (integer_zerop (rcond))
10913 {
10914 if (code == EQ_EXPR)
10915 return omit_two_operands_loc (loc, type, boolean_false_node,
10916 imag0, imag1);
10917 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10918 }
10919 else
10920 {
10921 if (code == NE_EXPR)
10922 return omit_two_operands_loc (loc, type, boolean_true_node,
10923 imag0, imag1);
10924 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10925 }
10926 }
10927
10928 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10929 if (icond && TREE_CODE (icond) == INTEGER_CST)
10930 {
10931 if (integer_zerop (icond))
10932 {
10933 if (code == EQ_EXPR)
10934 return omit_two_operands_loc (loc, type, boolean_false_node,
10935 real0, real1);
10936 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10937 }
10938 else
10939 {
10940 if (code == NE_EXPR)
10941 return omit_two_operands_loc (loc, type, boolean_true_node,
10942 real0, real1);
10943 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10944 }
10945 }
10946 }
10947
10948 return NULL_TREE;
10949
10950 case LT_EXPR:
10951 case GT_EXPR:
10952 case LE_EXPR:
10953 case GE_EXPR:
10954 tem = fold_comparison (loc, code, type, op0, op1);
10955 if (tem != NULL_TREE)
10956 return tem;
10957
10958 /* Transform comparisons of the form X +- C CMP X. */
10959 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10960 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10961 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10962 && !HONOR_SNANS (arg0))
10963 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10964 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10965 {
10966 tree arg01 = TREE_OPERAND (arg0, 1);
10967 enum tree_code code0 = TREE_CODE (arg0);
10968 int is_positive;
10969
10970 if (TREE_CODE (arg01) == REAL_CST)
10971 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10972 else
10973 is_positive = tree_int_cst_sgn (arg01);
10974
10975 /* (X - c) > X becomes false. */
10976 if (code == GT_EXPR
10977 && ((code0 == MINUS_EXPR && is_positive >= 0)
10978 || (code0 == PLUS_EXPR && is_positive <= 0)))
10979 {
10980 if (TREE_CODE (arg01) == INTEGER_CST
10981 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10982 fold_overflow_warning (("assuming signed overflow does not "
10983 "occur when assuming that (X - c) > X "
10984 "is always false"),
10985 WARN_STRICT_OVERFLOW_ALL);
10986 return constant_boolean_node (0, type);
10987 }
10988
10989 /* Likewise (X + c) < X becomes false. */
10990 if (code == LT_EXPR
10991 && ((code0 == PLUS_EXPR && is_positive >= 0)
10992 || (code0 == MINUS_EXPR && is_positive <= 0)))
10993 {
10994 if (TREE_CODE (arg01) == INTEGER_CST
10995 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10996 fold_overflow_warning (("assuming signed overflow does not "
10997 "occur when assuming that "
10998 "(X + c) < X is always false"),
10999 WARN_STRICT_OVERFLOW_ALL);
11000 return constant_boolean_node (0, type);
11001 }
11002
11003 /* Convert (X - c) <= X to true. */
11004 if (!HONOR_NANS (arg1)
11005 && code == LE_EXPR
11006 && ((code0 == MINUS_EXPR && is_positive >= 0)
11007 || (code0 == PLUS_EXPR && is_positive <= 0)))
11008 {
11009 if (TREE_CODE (arg01) == INTEGER_CST
11010 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11011 fold_overflow_warning (("assuming signed overflow does not "
11012 "occur when assuming that "
11013 "(X - c) <= X is always true"),
11014 WARN_STRICT_OVERFLOW_ALL);
11015 return constant_boolean_node (1, type);
11016 }
11017
11018 /* Convert (X + c) >= X to true. */
11019 if (!HONOR_NANS (arg1)
11020 && code == GE_EXPR
11021 && ((code0 == PLUS_EXPR && is_positive >= 0)
11022 || (code0 == MINUS_EXPR && is_positive <= 0)))
11023 {
11024 if (TREE_CODE (arg01) == INTEGER_CST
11025 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11026 fold_overflow_warning (("assuming signed overflow does not "
11027 "occur when assuming that "
11028 "(X + c) >= X is always true"),
11029 WARN_STRICT_OVERFLOW_ALL);
11030 return constant_boolean_node (1, type);
11031 }
11032
11033 if (TREE_CODE (arg01) == INTEGER_CST)
11034 {
11035 /* Convert X + c > X and X - c < X to true for integers. */
11036 if (code == GT_EXPR
11037 && ((code0 == PLUS_EXPR && is_positive > 0)
11038 || (code0 == MINUS_EXPR && is_positive < 0)))
11039 {
11040 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11041 fold_overflow_warning (("assuming signed overflow does "
11042 "not occur when assuming that "
11043 "(X + c) > X is always true"),
11044 WARN_STRICT_OVERFLOW_ALL);
11045 return constant_boolean_node (1, type);
11046 }
11047
11048 if (code == LT_EXPR
11049 && ((code0 == MINUS_EXPR && is_positive > 0)
11050 || (code0 == PLUS_EXPR && is_positive < 0)))
11051 {
11052 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11053 fold_overflow_warning (("assuming signed overflow does "
11054 "not occur when assuming that "
11055 "(X - c) < X is always true"),
11056 WARN_STRICT_OVERFLOW_ALL);
11057 return constant_boolean_node (1, type);
11058 }
11059
11060 /* Convert X + c <= X and X - c >= X to false for integers. */
11061 if (code == LE_EXPR
11062 && ((code0 == PLUS_EXPR && is_positive > 0)
11063 || (code0 == MINUS_EXPR && is_positive < 0)))
11064 {
11065 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11066 fold_overflow_warning (("assuming signed overflow does "
11067 "not occur when assuming that "
11068 "(X + c) <= X is always false"),
11069 WARN_STRICT_OVERFLOW_ALL);
11070 return constant_boolean_node (0, type);
11071 }
11072
11073 if (code == GE_EXPR
11074 && ((code0 == MINUS_EXPR && is_positive > 0)
11075 || (code0 == PLUS_EXPR && is_positive < 0)))
11076 {
11077 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11078 fold_overflow_warning (("assuming signed overflow does "
11079 "not occur when assuming that "
11080 "(X - c) >= X is always false"),
11081 WARN_STRICT_OVERFLOW_ALL);
11082 return constant_boolean_node (0, type);
11083 }
11084 }
11085 }
11086
11087 /* If we are comparing an ABS_EXPR with a constant, we can
11088 convert all the cases into explicit comparisons, but they may
11089 well not be faster than doing the ABS and one comparison.
11090 But ABS (X) <= C is a range comparison, which becomes a subtraction
11091 and a comparison, and is probably faster. */
11092 if (code == LE_EXPR
11093 && TREE_CODE (arg1) == INTEGER_CST
11094 && TREE_CODE (arg0) == ABS_EXPR
11095 && ! TREE_SIDE_EFFECTS (arg0)
11096 && (0 != (tem = negate_expr (arg1)))
11097 && TREE_CODE (tem) == INTEGER_CST
11098 && !TREE_OVERFLOW (tem))
11099 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11100 build2 (GE_EXPR, type,
11101 TREE_OPERAND (arg0, 0), tem),
11102 build2 (LE_EXPR, type,
11103 TREE_OPERAND (arg0, 0), arg1));
11104
11105 /* Convert ABS_EXPR<x> >= 0 to true. */
11106 strict_overflow_p = false;
11107 if (code == GE_EXPR
11108 && (integer_zerop (arg1)
11109 || (! HONOR_NANS (arg0)
11110 && real_zerop (arg1)))
11111 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11112 {
11113 if (strict_overflow_p)
11114 fold_overflow_warning (("assuming signed overflow does not occur "
11115 "when simplifying comparison of "
11116 "absolute value and zero"),
11117 WARN_STRICT_OVERFLOW_CONDITIONAL);
11118 return omit_one_operand_loc (loc, type,
11119 constant_boolean_node (true, type),
11120 arg0);
11121 }
11122
11123 /* Convert ABS_EXPR<x> < 0 to false. */
11124 strict_overflow_p = false;
11125 if (code == LT_EXPR
11126 && (integer_zerop (arg1) || real_zerop (arg1))
11127 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11128 {
11129 if (strict_overflow_p)
11130 fold_overflow_warning (("assuming signed overflow does not occur "
11131 "when simplifying comparison of "
11132 "absolute value and zero"),
11133 WARN_STRICT_OVERFLOW_CONDITIONAL);
11134 return omit_one_operand_loc (loc, type,
11135 constant_boolean_node (false, type),
11136 arg0);
11137 }
11138
11139 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11140 and similarly for >= into !=. */
11141 if ((code == LT_EXPR || code == GE_EXPR)
11142 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11143 && TREE_CODE (arg1) == LSHIFT_EXPR
11144 && integer_onep (TREE_OPERAND (arg1, 0)))
11145 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11146 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11147 TREE_OPERAND (arg1, 1)),
11148 build_zero_cst (TREE_TYPE (arg0)));
11149
11150 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11151 otherwise Y might be >= # of bits in X's type and thus e.g.
11152 (unsigned char) (1 << Y) for Y 15 might be 0.
11153 If the cast is widening, then 1 << Y should have unsigned type,
11154 otherwise if Y is number of bits in the signed shift type minus 1,
11155 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11156 31 might be 0xffffffff80000000. */
11157 if ((code == LT_EXPR || code == GE_EXPR)
11158 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11159 && CONVERT_EXPR_P (arg1)
11160 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11161 && (element_precision (TREE_TYPE (arg1))
11162 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11163 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11164 || (element_precision (TREE_TYPE (arg1))
11165 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11166 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11167 {
11168 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11169 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11170 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11171 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11172 build_zero_cst (TREE_TYPE (arg0)));
11173 }
11174
11175 return NULL_TREE;
11176
11177 case UNORDERED_EXPR:
11178 case ORDERED_EXPR:
11179 case UNLT_EXPR:
11180 case UNLE_EXPR:
11181 case UNGT_EXPR:
11182 case UNGE_EXPR:
11183 case UNEQ_EXPR:
11184 case LTGT_EXPR:
11185 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11186 {
11187 tree targ0 = strip_float_extensions (arg0);
11188 tree targ1 = strip_float_extensions (arg1);
11189 tree newtype = TREE_TYPE (targ0);
11190
11191 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11192 newtype = TREE_TYPE (targ1);
11193
11194 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11195 return fold_build2_loc (loc, code, type,
11196 fold_convert_loc (loc, newtype, targ0),
11197 fold_convert_loc (loc, newtype, targ1));
11198 }
11199
11200 return NULL_TREE;
11201
11202 case COMPOUND_EXPR:
11203 /* When pedantic, a compound expression can be neither an lvalue
11204 nor an integer constant expression. */
11205 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11206 return NULL_TREE;
11207 /* Don't let (0, 0) be null pointer constant. */
11208 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11209 : fold_convert_loc (loc, type, arg1);
11210 return pedantic_non_lvalue_loc (loc, tem);
11211
11212 case ASSERT_EXPR:
11213 /* An ASSERT_EXPR should never be passed to fold_binary. */
11214 gcc_unreachable ();
11215
11216 default:
11217 return NULL_TREE;
11218 } /* switch (code) */
11219 }
11220
11221 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11222 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11223 of GOTO_EXPR. */
11224
11225 static tree
11226 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11227 {
11228 switch (TREE_CODE (*tp))
11229 {
11230 case LABEL_EXPR:
11231 return *tp;
11232
11233 case GOTO_EXPR:
11234 *walk_subtrees = 0;
11235
11236 /* ... fall through ... */
11237
11238 default:
11239 return NULL_TREE;
11240 }
11241 }
11242
11243 /* Return whether the sub-tree ST contains a label which is accessible from
11244 outside the sub-tree. */
11245
11246 static bool
11247 contains_label_p (tree st)
11248 {
11249 return
11250 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11251 }
11252
11253 /* Fold a ternary expression of code CODE and type TYPE with operands
11254 OP0, OP1, and OP2. Return the folded expression if folding is
11255 successful. Otherwise, return NULL_TREE. */
11256
11257 tree
11258 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11259 tree op0, tree op1, tree op2)
11260 {
11261 tree tem;
11262 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11263 enum tree_code_class kind = TREE_CODE_CLASS (code);
11264
11265 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11266 && TREE_CODE_LENGTH (code) == 3);
11267
11268 /* If this is a commutative operation, and OP0 is a constant, move it
11269 to OP1 to reduce the number of tests below. */
11270 if (commutative_ternary_tree_code (code)
11271 && tree_swap_operands_p (op0, op1, true))
11272 return fold_build3_loc (loc, code, type, op1, op0, op2);
11273
11274 tem = generic_simplify (loc, code, type, op0, op1, op2);
11275 if (tem)
11276 return tem;
11277
11278 /* Strip any conversions that don't change the mode. This is safe
11279 for every expression, except for a comparison expression because
11280 its signedness is derived from its operands. So, in the latter
11281 case, only strip conversions that don't change the signedness.
11282
11283 Note that this is done as an internal manipulation within the
11284 constant folder, in order to find the simplest representation of
11285 the arguments so that their form can be studied. In any cases,
11286 the appropriate type conversions should be put back in the tree
11287 that will get out of the constant folder. */
11288 if (op0)
11289 {
11290 arg0 = op0;
11291 STRIP_NOPS (arg0);
11292 }
11293
11294 if (op1)
11295 {
11296 arg1 = op1;
11297 STRIP_NOPS (arg1);
11298 }
11299
11300 if (op2)
11301 {
11302 arg2 = op2;
11303 STRIP_NOPS (arg2);
11304 }
11305
11306 switch (code)
11307 {
11308 case COMPONENT_REF:
11309 if (TREE_CODE (arg0) == CONSTRUCTOR
11310 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11311 {
11312 unsigned HOST_WIDE_INT idx;
11313 tree field, value;
11314 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11315 if (field == arg1)
11316 return value;
11317 }
11318 return NULL_TREE;
11319
11320 case COND_EXPR:
11321 case VEC_COND_EXPR:
11322 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11323 so all simple results must be passed through pedantic_non_lvalue. */
11324 if (TREE_CODE (arg0) == INTEGER_CST)
11325 {
11326 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11327 tem = integer_zerop (arg0) ? op2 : op1;
11328 /* Only optimize constant conditions when the selected branch
11329 has the same type as the COND_EXPR. This avoids optimizing
11330 away "c ? x : throw", where the throw has a void type.
11331 Avoid throwing away that operand which contains label. */
11332 if ((!TREE_SIDE_EFFECTS (unused_op)
11333 || !contains_label_p (unused_op))
11334 && (! VOID_TYPE_P (TREE_TYPE (tem))
11335 || VOID_TYPE_P (type)))
11336 return pedantic_non_lvalue_loc (loc, tem);
11337 return NULL_TREE;
11338 }
11339 else if (TREE_CODE (arg0) == VECTOR_CST)
11340 {
11341 if ((TREE_CODE (arg1) == VECTOR_CST
11342 || TREE_CODE (arg1) == CONSTRUCTOR)
11343 && (TREE_CODE (arg2) == VECTOR_CST
11344 || TREE_CODE (arg2) == CONSTRUCTOR))
11345 {
11346 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11347 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11348 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11349 for (i = 0; i < nelts; i++)
11350 {
11351 tree val = VECTOR_CST_ELT (arg0, i);
11352 if (integer_all_onesp (val))
11353 sel[i] = i;
11354 else if (integer_zerop (val))
11355 sel[i] = nelts + i;
11356 else /* Currently unreachable. */
11357 return NULL_TREE;
11358 }
11359 tree t = fold_vec_perm (type, arg1, arg2, sel);
11360 if (t != NULL_TREE)
11361 return t;
11362 }
11363 }
11364
11365 /* If we have A op B ? A : C, we may be able to convert this to a
11366 simpler expression, depending on the operation and the values
11367 of B and C. Signed zeros prevent all of these transformations,
11368 for reasons given above each one.
11369
11370 Also try swapping the arguments and inverting the conditional. */
11371 if (COMPARISON_CLASS_P (arg0)
11372 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11373 arg1, TREE_OPERAND (arg0, 1))
11374 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11375 {
11376 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11377 if (tem)
11378 return tem;
11379 }
11380
11381 if (COMPARISON_CLASS_P (arg0)
11382 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11383 op2,
11384 TREE_OPERAND (arg0, 1))
11385 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11386 {
11387 location_t loc0 = expr_location_or (arg0, loc);
11388 tem = fold_invert_truthvalue (loc0, arg0);
11389 if (tem && COMPARISON_CLASS_P (tem))
11390 {
11391 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11392 if (tem)
11393 return tem;
11394 }
11395 }
11396
11397 /* If the second operand is simpler than the third, swap them
11398 since that produces better jump optimization results. */
11399 if (truth_value_p (TREE_CODE (arg0))
11400 && tree_swap_operands_p (op1, op2, false))
11401 {
11402 location_t loc0 = expr_location_or (arg0, loc);
11403 /* See if this can be inverted. If it can't, possibly because
11404 it was a floating-point inequality comparison, don't do
11405 anything. */
11406 tem = fold_invert_truthvalue (loc0, arg0);
11407 if (tem)
11408 return fold_build3_loc (loc, code, type, tem, op2, op1);
11409 }
11410
11411 /* Convert A ? 1 : 0 to simply A. */
11412 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11413 : (integer_onep (op1)
11414 && !VECTOR_TYPE_P (type)))
11415 && integer_zerop (op2)
11416 /* If we try to convert OP0 to our type, the
11417 call to fold will try to move the conversion inside
11418 a COND, which will recurse. In that case, the COND_EXPR
11419 is probably the best choice, so leave it alone. */
11420 && type == TREE_TYPE (arg0))
11421 return pedantic_non_lvalue_loc (loc, arg0);
11422
11423 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11424 over COND_EXPR in cases such as floating point comparisons. */
11425 if (integer_zerop (op1)
11426 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11427 : (integer_onep (op2)
11428 && !VECTOR_TYPE_P (type)))
11429 && truth_value_p (TREE_CODE (arg0)))
11430 return pedantic_non_lvalue_loc (loc,
11431 fold_convert_loc (loc, type,
11432 invert_truthvalue_loc (loc,
11433 arg0)));
11434
11435 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11436 if (TREE_CODE (arg0) == LT_EXPR
11437 && integer_zerop (TREE_OPERAND (arg0, 1))
11438 && integer_zerop (op2)
11439 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11440 {
11441 /* sign_bit_p looks through both zero and sign extensions,
11442 but for this optimization only sign extensions are
11443 usable. */
11444 tree tem2 = TREE_OPERAND (arg0, 0);
11445 while (tem != tem2)
11446 {
11447 if (TREE_CODE (tem2) != NOP_EXPR
11448 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11449 {
11450 tem = NULL_TREE;
11451 break;
11452 }
11453 tem2 = TREE_OPERAND (tem2, 0);
11454 }
11455 /* sign_bit_p only checks ARG1 bits within A's precision.
11456 If <sign bit of A> has wider type than A, bits outside
11457 of A's precision in <sign bit of A> need to be checked.
11458 If they are all 0, this optimization needs to be done
11459 in unsigned A's type, if they are all 1 in signed A's type,
11460 otherwise this can't be done. */
11461 if (tem
11462 && TYPE_PRECISION (TREE_TYPE (tem))
11463 < TYPE_PRECISION (TREE_TYPE (arg1))
11464 && TYPE_PRECISION (TREE_TYPE (tem))
11465 < TYPE_PRECISION (type))
11466 {
11467 int inner_width, outer_width;
11468 tree tem_type;
11469
11470 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11471 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11472 if (outer_width > TYPE_PRECISION (type))
11473 outer_width = TYPE_PRECISION (type);
11474
11475 wide_int mask = wi::shifted_mask
11476 (inner_width, outer_width - inner_width, false,
11477 TYPE_PRECISION (TREE_TYPE (arg1)));
11478
11479 wide_int common = mask & arg1;
11480 if (common == mask)
11481 {
11482 tem_type = signed_type_for (TREE_TYPE (tem));
11483 tem = fold_convert_loc (loc, tem_type, tem);
11484 }
11485 else if (common == 0)
11486 {
11487 tem_type = unsigned_type_for (TREE_TYPE (tem));
11488 tem = fold_convert_loc (loc, tem_type, tem);
11489 }
11490 else
11491 tem = NULL;
11492 }
11493
11494 if (tem)
11495 return
11496 fold_convert_loc (loc, type,
11497 fold_build2_loc (loc, BIT_AND_EXPR,
11498 TREE_TYPE (tem), tem,
11499 fold_convert_loc (loc,
11500 TREE_TYPE (tem),
11501 arg1)));
11502 }
11503
11504 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11505 already handled above. */
11506 if (TREE_CODE (arg0) == BIT_AND_EXPR
11507 && integer_onep (TREE_OPERAND (arg0, 1))
11508 && integer_zerop (op2)
11509 && integer_pow2p (arg1))
11510 {
11511 tree tem = TREE_OPERAND (arg0, 0);
11512 STRIP_NOPS (tem);
11513 if (TREE_CODE (tem) == RSHIFT_EXPR
11514 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11515 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11516 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11517 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11518 TREE_OPERAND (tem, 0), arg1);
11519 }
11520
11521 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11522 is probably obsolete because the first operand should be a
11523 truth value (that's why we have the two cases above), but let's
11524 leave it in until we can confirm this for all front-ends. */
11525 if (integer_zerop (op2)
11526 && TREE_CODE (arg0) == NE_EXPR
11527 && integer_zerop (TREE_OPERAND (arg0, 1))
11528 && integer_pow2p (arg1)
11529 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11530 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11531 arg1, OEP_ONLY_CONST))
11532 return pedantic_non_lvalue_loc (loc,
11533 fold_convert_loc (loc, type,
11534 TREE_OPERAND (arg0, 0)));
11535
11536 /* Disable the transformations below for vectors, since
11537 fold_binary_op_with_conditional_arg may undo them immediately,
11538 yielding an infinite loop. */
11539 if (code == VEC_COND_EXPR)
11540 return NULL_TREE;
11541
11542 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11543 if (integer_zerop (op2)
11544 && truth_value_p (TREE_CODE (arg0))
11545 && truth_value_p (TREE_CODE (arg1))
11546 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11547 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11548 : TRUTH_ANDIF_EXPR,
11549 type, fold_convert_loc (loc, type, arg0), arg1);
11550
11551 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11552 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11553 && truth_value_p (TREE_CODE (arg0))
11554 && truth_value_p (TREE_CODE (arg1))
11555 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11556 {
11557 location_t loc0 = expr_location_or (arg0, loc);
11558 /* Only perform transformation if ARG0 is easily inverted. */
11559 tem = fold_invert_truthvalue (loc0, arg0);
11560 if (tem)
11561 return fold_build2_loc (loc, code == VEC_COND_EXPR
11562 ? BIT_IOR_EXPR
11563 : TRUTH_ORIF_EXPR,
11564 type, fold_convert_loc (loc, type, tem),
11565 arg1);
11566 }
11567
11568 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11569 if (integer_zerop (arg1)
11570 && truth_value_p (TREE_CODE (arg0))
11571 && truth_value_p (TREE_CODE (op2))
11572 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11573 {
11574 location_t loc0 = expr_location_or (arg0, loc);
11575 /* Only perform transformation if ARG0 is easily inverted. */
11576 tem = fold_invert_truthvalue (loc0, arg0);
11577 if (tem)
11578 return fold_build2_loc (loc, code == VEC_COND_EXPR
11579 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11580 type, fold_convert_loc (loc, type, tem),
11581 op2);
11582 }
11583
11584 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11585 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11586 && truth_value_p (TREE_CODE (arg0))
11587 && truth_value_p (TREE_CODE (op2))
11588 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11589 return fold_build2_loc (loc, code == VEC_COND_EXPR
11590 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11591 type, fold_convert_loc (loc, type, arg0), op2);
11592
11593 return NULL_TREE;
11594
11595 case CALL_EXPR:
11596 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11597 of fold_ternary on them. */
11598 gcc_unreachable ();
11599
11600 case BIT_FIELD_REF:
11601 if ((TREE_CODE (arg0) == VECTOR_CST
11602 || (TREE_CODE (arg0) == CONSTRUCTOR
11603 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11604 && (type == TREE_TYPE (TREE_TYPE (arg0))
11605 || (TREE_CODE (type) == VECTOR_TYPE
11606 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11607 {
11608 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11609 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11610 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11611 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11612
11613 if (n != 0
11614 && (idx % width) == 0
11615 && (n % width) == 0
11616 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11617 {
11618 idx = idx / width;
11619 n = n / width;
11620
11621 if (TREE_CODE (arg0) == VECTOR_CST)
11622 {
11623 if (n == 1)
11624 return VECTOR_CST_ELT (arg0, idx);
11625
11626 tree *vals = XALLOCAVEC (tree, n);
11627 for (unsigned i = 0; i < n; ++i)
11628 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11629 return build_vector (type, vals);
11630 }
11631
11632 /* Constructor elements can be subvectors. */
11633 unsigned HOST_WIDE_INT k = 1;
11634 if (CONSTRUCTOR_NELTS (arg0) != 0)
11635 {
11636 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11637 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11638 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11639 }
11640
11641 /* We keep an exact subset of the constructor elements. */
11642 if ((idx % k) == 0 && (n % k) == 0)
11643 {
11644 if (CONSTRUCTOR_NELTS (arg0) == 0)
11645 return build_constructor (type, NULL);
11646 idx /= k;
11647 n /= k;
11648 if (n == 1)
11649 {
11650 if (idx < CONSTRUCTOR_NELTS (arg0))
11651 return CONSTRUCTOR_ELT (arg0, idx)->value;
11652 return build_zero_cst (type);
11653 }
11654
11655 vec<constructor_elt, va_gc> *vals;
11656 vec_alloc (vals, n);
11657 for (unsigned i = 0;
11658 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11659 ++i)
11660 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11661 CONSTRUCTOR_ELT
11662 (arg0, idx + i)->value);
11663 return build_constructor (type, vals);
11664 }
11665 /* The bitfield references a single constructor element. */
11666 else if (idx + n <= (idx / k + 1) * k)
11667 {
11668 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11669 return build_zero_cst (type);
11670 else if (n == k)
11671 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11672 else
11673 return fold_build3_loc (loc, code, type,
11674 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11675 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11676 }
11677 }
11678 }
11679
11680 /* A bit-field-ref that referenced the full argument can be stripped. */
11681 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11682 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11683 && integer_zerop (op2))
11684 return fold_convert_loc (loc, type, arg0);
11685
11686 /* On constants we can use native encode/interpret to constant
11687 fold (nearly) all BIT_FIELD_REFs. */
11688 if (CONSTANT_CLASS_P (arg0)
11689 && can_native_interpret_type_p (type)
11690 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11691 /* This limitation should not be necessary, we just need to
11692 round this up to mode size. */
11693 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11694 /* Need bit-shifting of the buffer to relax the following. */
11695 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11696 {
11697 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11698 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11699 unsigned HOST_WIDE_INT clen;
11700 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11701 /* ??? We cannot tell native_encode_expr to start at
11702 some random byte only. So limit us to a reasonable amount
11703 of work. */
11704 if (clen <= 4096)
11705 {
11706 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11707 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11708 if (len > 0
11709 && len * BITS_PER_UNIT >= bitpos + bitsize)
11710 {
11711 tree v = native_interpret_expr (type,
11712 b + bitpos / BITS_PER_UNIT,
11713 bitsize / BITS_PER_UNIT);
11714 if (v)
11715 return v;
11716 }
11717 }
11718 }
11719
11720 return NULL_TREE;
11721
11722 case FMA_EXPR:
11723 /* For integers we can decompose the FMA if possible. */
11724 if (TREE_CODE (arg0) == INTEGER_CST
11725 && TREE_CODE (arg1) == INTEGER_CST)
11726 return fold_build2_loc (loc, PLUS_EXPR, type,
11727 const_binop (MULT_EXPR, arg0, arg1), arg2);
11728 if (integer_zerop (arg2))
11729 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11730
11731 return fold_fma (loc, type, arg0, arg1, arg2);
11732
11733 case VEC_PERM_EXPR:
11734 if (TREE_CODE (arg2) == VECTOR_CST)
11735 {
11736 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11737 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11738 unsigned char *sel2 = sel + nelts;
11739 bool need_mask_canon = false;
11740 bool need_mask_canon2 = false;
11741 bool all_in_vec0 = true;
11742 bool all_in_vec1 = true;
11743 bool maybe_identity = true;
11744 bool single_arg = (op0 == op1);
11745 bool changed = false;
11746
11747 mask2 = 2 * nelts - 1;
11748 mask = single_arg ? (nelts - 1) : mask2;
11749 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11750 for (i = 0; i < nelts; i++)
11751 {
11752 tree val = VECTOR_CST_ELT (arg2, i);
11753 if (TREE_CODE (val) != INTEGER_CST)
11754 return NULL_TREE;
11755
11756 /* Make sure that the perm value is in an acceptable
11757 range. */
11758 wide_int t = val;
11759 need_mask_canon |= wi::gtu_p (t, mask);
11760 need_mask_canon2 |= wi::gtu_p (t, mask2);
11761 sel[i] = t.to_uhwi () & mask;
11762 sel2[i] = t.to_uhwi () & mask2;
11763
11764 if (sel[i] < nelts)
11765 all_in_vec1 = false;
11766 else
11767 all_in_vec0 = false;
11768
11769 if ((sel[i] & (nelts-1)) != i)
11770 maybe_identity = false;
11771 }
11772
11773 if (maybe_identity)
11774 {
11775 if (all_in_vec0)
11776 return op0;
11777 if (all_in_vec1)
11778 return op1;
11779 }
11780
11781 if (all_in_vec0)
11782 op1 = op0;
11783 else if (all_in_vec1)
11784 {
11785 op0 = op1;
11786 for (i = 0; i < nelts; i++)
11787 sel[i] -= nelts;
11788 need_mask_canon = true;
11789 }
11790
11791 if ((TREE_CODE (op0) == VECTOR_CST
11792 || TREE_CODE (op0) == CONSTRUCTOR)
11793 && (TREE_CODE (op1) == VECTOR_CST
11794 || TREE_CODE (op1) == CONSTRUCTOR))
11795 {
11796 tree t = fold_vec_perm (type, op0, op1, sel);
11797 if (t != NULL_TREE)
11798 return t;
11799 }
11800
11801 if (op0 == op1 && !single_arg)
11802 changed = true;
11803
11804 /* Some targets are deficient and fail to expand a single
11805 argument permutation while still allowing an equivalent
11806 2-argument version. */
11807 if (need_mask_canon && arg2 == op2
11808 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11809 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11810 {
11811 need_mask_canon = need_mask_canon2;
11812 sel = sel2;
11813 }
11814
11815 if (need_mask_canon && arg2 == op2)
11816 {
11817 tree *tsel = XALLOCAVEC (tree, nelts);
11818 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11819 for (i = 0; i < nelts; i++)
11820 tsel[i] = build_int_cst (eltype, sel[i]);
11821 op2 = build_vector (TREE_TYPE (arg2), tsel);
11822 changed = true;
11823 }
11824
11825 if (changed)
11826 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11827 }
11828 return NULL_TREE;
11829
11830 default:
11831 return NULL_TREE;
11832 } /* switch (code) */
11833 }
11834
11835 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11836 of an array (or vector). */
11837
11838 tree
11839 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11840 {
11841 tree index_type = NULL_TREE;
11842 offset_int low_bound = 0;
11843
11844 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11845 {
11846 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11847 if (domain_type && TYPE_MIN_VALUE (domain_type))
11848 {
11849 /* Static constructors for variably sized objects makes no sense. */
11850 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11851 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11852 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11853 }
11854 }
11855
11856 if (index_type)
11857 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11858 TYPE_SIGN (index_type));
11859
11860 offset_int index = low_bound - 1;
11861 if (index_type)
11862 index = wi::ext (index, TYPE_PRECISION (index_type),
11863 TYPE_SIGN (index_type));
11864
11865 offset_int max_index;
11866 unsigned HOST_WIDE_INT cnt;
11867 tree cfield, cval;
11868
11869 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11870 {
11871 /* Array constructor might explicitly set index, or specify a range,
11872 or leave index NULL meaning that it is next index after previous
11873 one. */
11874 if (cfield)
11875 {
11876 if (TREE_CODE (cfield) == INTEGER_CST)
11877 max_index = index = wi::to_offset (cfield);
11878 else
11879 {
11880 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11881 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11882 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11883 }
11884 }
11885 else
11886 {
11887 index += 1;
11888 if (index_type)
11889 index = wi::ext (index, TYPE_PRECISION (index_type),
11890 TYPE_SIGN (index_type));
11891 max_index = index;
11892 }
11893
11894 /* Do we have match? */
11895 if (wi::cmpu (access_index, index) >= 0
11896 && wi::cmpu (access_index, max_index) <= 0)
11897 return cval;
11898 }
11899 return NULL_TREE;
11900 }
11901
11902 /* Perform constant folding and related simplification of EXPR.
11903 The related simplifications include x*1 => x, x*0 => 0, etc.,
11904 and application of the associative law.
11905 NOP_EXPR conversions may be removed freely (as long as we
11906 are careful not to change the type of the overall expression).
11907 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11908 but we can constant-fold them if they have constant operands. */
11909
11910 #ifdef ENABLE_FOLD_CHECKING
11911 # define fold(x) fold_1 (x)
11912 static tree fold_1 (tree);
11913 static
11914 #endif
11915 tree
11916 fold (tree expr)
11917 {
11918 const tree t = expr;
11919 enum tree_code code = TREE_CODE (t);
11920 enum tree_code_class kind = TREE_CODE_CLASS (code);
11921 tree tem;
11922 location_t loc = EXPR_LOCATION (expr);
11923
11924 /* Return right away if a constant. */
11925 if (kind == tcc_constant)
11926 return t;
11927
11928 /* CALL_EXPR-like objects with variable numbers of operands are
11929 treated specially. */
11930 if (kind == tcc_vl_exp)
11931 {
11932 if (code == CALL_EXPR)
11933 {
11934 tem = fold_call_expr (loc, expr, false);
11935 return tem ? tem : expr;
11936 }
11937 return expr;
11938 }
11939
11940 if (IS_EXPR_CODE_CLASS (kind))
11941 {
11942 tree type = TREE_TYPE (t);
11943 tree op0, op1, op2;
11944
11945 switch (TREE_CODE_LENGTH (code))
11946 {
11947 case 1:
11948 op0 = TREE_OPERAND (t, 0);
11949 tem = fold_unary_loc (loc, code, type, op0);
11950 return tem ? tem : expr;
11951 case 2:
11952 op0 = TREE_OPERAND (t, 0);
11953 op1 = TREE_OPERAND (t, 1);
11954 tem = fold_binary_loc (loc, code, type, op0, op1);
11955 return tem ? tem : expr;
11956 case 3:
11957 op0 = TREE_OPERAND (t, 0);
11958 op1 = TREE_OPERAND (t, 1);
11959 op2 = TREE_OPERAND (t, 2);
11960 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11961 return tem ? tem : expr;
11962 default:
11963 break;
11964 }
11965 }
11966
11967 switch (code)
11968 {
11969 case ARRAY_REF:
11970 {
11971 tree op0 = TREE_OPERAND (t, 0);
11972 tree op1 = TREE_OPERAND (t, 1);
11973
11974 if (TREE_CODE (op1) == INTEGER_CST
11975 && TREE_CODE (op0) == CONSTRUCTOR
11976 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11977 {
11978 tree val = get_array_ctor_element_at_index (op0,
11979 wi::to_offset (op1));
11980 if (val)
11981 return val;
11982 }
11983
11984 return t;
11985 }
11986
11987 /* Return a VECTOR_CST if possible. */
11988 case CONSTRUCTOR:
11989 {
11990 tree type = TREE_TYPE (t);
11991 if (TREE_CODE (type) != VECTOR_TYPE)
11992 return t;
11993
11994 unsigned i;
11995 tree val;
11996 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11997 if (! CONSTANT_CLASS_P (val))
11998 return t;
11999
12000 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12001 }
12002
12003 case CONST_DECL:
12004 return fold (DECL_INITIAL (t));
12005
12006 default:
12007 return t;
12008 } /* switch (code) */
12009 }
12010
12011 #ifdef ENABLE_FOLD_CHECKING
12012 #undef fold
12013
12014 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12015 hash_table<nofree_ptr_hash<const tree_node> > *);
12016 static void fold_check_failed (const_tree, const_tree);
12017 void print_fold_checksum (const_tree);
12018
12019 /* When --enable-checking=fold, compute a digest of expr before
12020 and after actual fold call to see if fold did not accidentally
12021 change original expr. */
12022
12023 tree
12024 fold (tree expr)
12025 {
12026 tree ret;
12027 struct md5_ctx ctx;
12028 unsigned char checksum_before[16], checksum_after[16];
12029 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12030
12031 md5_init_ctx (&ctx);
12032 fold_checksum_tree (expr, &ctx, &ht);
12033 md5_finish_ctx (&ctx, checksum_before);
12034 ht.empty ();
12035
12036 ret = fold_1 (expr);
12037
12038 md5_init_ctx (&ctx);
12039 fold_checksum_tree (expr, &ctx, &ht);
12040 md5_finish_ctx (&ctx, checksum_after);
12041
12042 if (memcmp (checksum_before, checksum_after, 16))
12043 fold_check_failed (expr, ret);
12044
12045 return ret;
12046 }
12047
12048 void
12049 print_fold_checksum (const_tree expr)
12050 {
12051 struct md5_ctx ctx;
12052 unsigned char checksum[16], cnt;
12053 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12054
12055 md5_init_ctx (&ctx);
12056 fold_checksum_tree (expr, &ctx, &ht);
12057 md5_finish_ctx (&ctx, checksum);
12058 for (cnt = 0; cnt < 16; ++cnt)
12059 fprintf (stderr, "%02x", checksum[cnt]);
12060 putc ('\n', stderr);
12061 }
12062
12063 static void
12064 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12065 {
12066 internal_error ("fold check: original tree changed by fold");
12067 }
12068
12069 static void
12070 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12071 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12072 {
12073 const tree_node **slot;
12074 enum tree_code code;
12075 union tree_node buf;
12076 int i, len;
12077
12078 recursive_label:
12079 if (expr == NULL)
12080 return;
12081 slot = ht->find_slot (expr, INSERT);
12082 if (*slot != NULL)
12083 return;
12084 *slot = expr;
12085 code = TREE_CODE (expr);
12086 if (TREE_CODE_CLASS (code) == tcc_declaration
12087 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12088 {
12089 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12090 memcpy ((char *) &buf, expr, tree_size (expr));
12091 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12092 buf.decl_with_vis.symtab_node = NULL;
12093 expr = (tree) &buf;
12094 }
12095 else if (TREE_CODE_CLASS (code) == tcc_type
12096 && (TYPE_POINTER_TO (expr)
12097 || TYPE_REFERENCE_TO (expr)
12098 || TYPE_CACHED_VALUES_P (expr)
12099 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12100 || TYPE_NEXT_VARIANT (expr)))
12101 {
12102 /* Allow these fields to be modified. */
12103 tree tmp;
12104 memcpy ((char *) &buf, expr, tree_size (expr));
12105 expr = tmp = (tree) &buf;
12106 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12107 TYPE_POINTER_TO (tmp) = NULL;
12108 TYPE_REFERENCE_TO (tmp) = NULL;
12109 TYPE_NEXT_VARIANT (tmp) = NULL;
12110 if (TYPE_CACHED_VALUES_P (tmp))
12111 {
12112 TYPE_CACHED_VALUES_P (tmp) = 0;
12113 TYPE_CACHED_VALUES (tmp) = NULL;
12114 }
12115 }
12116 md5_process_bytes (expr, tree_size (expr), ctx);
12117 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12118 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12119 if (TREE_CODE_CLASS (code) != tcc_type
12120 && TREE_CODE_CLASS (code) != tcc_declaration
12121 && code != TREE_LIST
12122 && code != SSA_NAME
12123 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12124 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12125 switch (TREE_CODE_CLASS (code))
12126 {
12127 case tcc_constant:
12128 switch (code)
12129 {
12130 case STRING_CST:
12131 md5_process_bytes (TREE_STRING_POINTER (expr),
12132 TREE_STRING_LENGTH (expr), ctx);
12133 break;
12134 case COMPLEX_CST:
12135 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12136 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12137 break;
12138 case VECTOR_CST:
12139 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12140 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12141 break;
12142 default:
12143 break;
12144 }
12145 break;
12146 case tcc_exceptional:
12147 switch (code)
12148 {
12149 case TREE_LIST:
12150 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12151 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12152 expr = TREE_CHAIN (expr);
12153 goto recursive_label;
12154 break;
12155 case TREE_VEC:
12156 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12157 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12158 break;
12159 default:
12160 break;
12161 }
12162 break;
12163 case tcc_expression:
12164 case tcc_reference:
12165 case tcc_comparison:
12166 case tcc_unary:
12167 case tcc_binary:
12168 case tcc_statement:
12169 case tcc_vl_exp:
12170 len = TREE_OPERAND_LENGTH (expr);
12171 for (i = 0; i < len; ++i)
12172 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12173 break;
12174 case tcc_declaration:
12175 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12176 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12177 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12178 {
12179 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12180 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12181 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12182 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12183 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12184 }
12185
12186 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12187 {
12188 if (TREE_CODE (expr) == FUNCTION_DECL)
12189 {
12190 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12191 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12192 }
12193 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12194 }
12195 break;
12196 case tcc_type:
12197 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12198 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12199 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12200 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12201 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12202 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12203 if (INTEGRAL_TYPE_P (expr)
12204 || SCALAR_FLOAT_TYPE_P (expr))
12205 {
12206 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12207 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12208 }
12209 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12210 if (TREE_CODE (expr) == RECORD_TYPE
12211 || TREE_CODE (expr) == UNION_TYPE
12212 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12213 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12214 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12215 break;
12216 default:
12217 break;
12218 }
12219 }
12220
12221 /* Helper function for outputting the checksum of a tree T. When
12222 debugging with gdb, you can "define mynext" to be "next" followed
12223 by "call debug_fold_checksum (op0)", then just trace down till the
12224 outputs differ. */
12225
12226 DEBUG_FUNCTION void
12227 debug_fold_checksum (const_tree t)
12228 {
12229 int i;
12230 unsigned char checksum[16];
12231 struct md5_ctx ctx;
12232 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12233
12234 md5_init_ctx (&ctx);
12235 fold_checksum_tree (t, &ctx, &ht);
12236 md5_finish_ctx (&ctx, checksum);
12237 ht.empty ();
12238
12239 for (i = 0; i < 16; i++)
12240 fprintf (stderr, "%d ", checksum[i]);
12241
12242 fprintf (stderr, "\n");
12243 }
12244
12245 #endif
12246
12247 /* Fold a unary tree expression with code CODE of type TYPE with an
12248 operand OP0. LOC is the location of the resulting expression.
12249 Return a folded expression if successful. Otherwise, return a tree
12250 expression with code CODE of type TYPE with an operand OP0. */
12251
12252 tree
12253 fold_build1_stat_loc (location_t loc,
12254 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12255 {
12256 tree tem;
12257 #ifdef ENABLE_FOLD_CHECKING
12258 unsigned char checksum_before[16], checksum_after[16];
12259 struct md5_ctx ctx;
12260 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12261
12262 md5_init_ctx (&ctx);
12263 fold_checksum_tree (op0, &ctx, &ht);
12264 md5_finish_ctx (&ctx, checksum_before);
12265 ht.empty ();
12266 #endif
12267
12268 tem = fold_unary_loc (loc, code, type, op0);
12269 if (!tem)
12270 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12271
12272 #ifdef ENABLE_FOLD_CHECKING
12273 md5_init_ctx (&ctx);
12274 fold_checksum_tree (op0, &ctx, &ht);
12275 md5_finish_ctx (&ctx, checksum_after);
12276
12277 if (memcmp (checksum_before, checksum_after, 16))
12278 fold_check_failed (op0, tem);
12279 #endif
12280 return tem;
12281 }
12282
12283 /* Fold a binary tree expression with code CODE of type TYPE with
12284 operands OP0 and OP1. LOC is the location of the resulting
12285 expression. Return a folded expression if successful. Otherwise,
12286 return a tree expression with code CODE of type TYPE with operands
12287 OP0 and OP1. */
12288
12289 tree
12290 fold_build2_stat_loc (location_t loc,
12291 enum tree_code code, tree type, tree op0, tree op1
12292 MEM_STAT_DECL)
12293 {
12294 tree tem;
12295 #ifdef ENABLE_FOLD_CHECKING
12296 unsigned char checksum_before_op0[16],
12297 checksum_before_op1[16],
12298 checksum_after_op0[16],
12299 checksum_after_op1[16];
12300 struct md5_ctx ctx;
12301 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12302
12303 md5_init_ctx (&ctx);
12304 fold_checksum_tree (op0, &ctx, &ht);
12305 md5_finish_ctx (&ctx, checksum_before_op0);
12306 ht.empty ();
12307
12308 md5_init_ctx (&ctx);
12309 fold_checksum_tree (op1, &ctx, &ht);
12310 md5_finish_ctx (&ctx, checksum_before_op1);
12311 ht.empty ();
12312 #endif
12313
12314 tem = fold_binary_loc (loc, code, type, op0, op1);
12315 if (!tem)
12316 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12317
12318 #ifdef ENABLE_FOLD_CHECKING
12319 md5_init_ctx (&ctx);
12320 fold_checksum_tree (op0, &ctx, &ht);
12321 md5_finish_ctx (&ctx, checksum_after_op0);
12322 ht.empty ();
12323
12324 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12325 fold_check_failed (op0, tem);
12326
12327 md5_init_ctx (&ctx);
12328 fold_checksum_tree (op1, &ctx, &ht);
12329 md5_finish_ctx (&ctx, checksum_after_op1);
12330
12331 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12332 fold_check_failed (op1, tem);
12333 #endif
12334 return tem;
12335 }
12336
12337 /* Fold a ternary tree expression with code CODE of type TYPE with
12338 operands OP0, OP1, and OP2. Return a folded expression if
12339 successful. Otherwise, return a tree expression with code CODE of
12340 type TYPE with operands OP0, OP1, and OP2. */
12341
12342 tree
12343 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12344 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12345 {
12346 tree tem;
12347 #ifdef ENABLE_FOLD_CHECKING
12348 unsigned char checksum_before_op0[16],
12349 checksum_before_op1[16],
12350 checksum_before_op2[16],
12351 checksum_after_op0[16],
12352 checksum_after_op1[16],
12353 checksum_after_op2[16];
12354 struct md5_ctx ctx;
12355 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12356
12357 md5_init_ctx (&ctx);
12358 fold_checksum_tree (op0, &ctx, &ht);
12359 md5_finish_ctx (&ctx, checksum_before_op0);
12360 ht.empty ();
12361
12362 md5_init_ctx (&ctx);
12363 fold_checksum_tree (op1, &ctx, &ht);
12364 md5_finish_ctx (&ctx, checksum_before_op1);
12365 ht.empty ();
12366
12367 md5_init_ctx (&ctx);
12368 fold_checksum_tree (op2, &ctx, &ht);
12369 md5_finish_ctx (&ctx, checksum_before_op2);
12370 ht.empty ();
12371 #endif
12372
12373 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12374 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12375 if (!tem)
12376 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12377
12378 #ifdef ENABLE_FOLD_CHECKING
12379 md5_init_ctx (&ctx);
12380 fold_checksum_tree (op0, &ctx, &ht);
12381 md5_finish_ctx (&ctx, checksum_after_op0);
12382 ht.empty ();
12383
12384 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12385 fold_check_failed (op0, tem);
12386
12387 md5_init_ctx (&ctx);
12388 fold_checksum_tree (op1, &ctx, &ht);
12389 md5_finish_ctx (&ctx, checksum_after_op1);
12390 ht.empty ();
12391
12392 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12393 fold_check_failed (op1, tem);
12394
12395 md5_init_ctx (&ctx);
12396 fold_checksum_tree (op2, &ctx, &ht);
12397 md5_finish_ctx (&ctx, checksum_after_op2);
12398
12399 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12400 fold_check_failed (op2, tem);
12401 #endif
12402 return tem;
12403 }
12404
12405 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12406 arguments in ARGARRAY, and a null static chain.
12407 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12408 of type TYPE from the given operands as constructed by build_call_array. */
12409
12410 tree
12411 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12412 int nargs, tree *argarray)
12413 {
12414 tree tem;
12415 #ifdef ENABLE_FOLD_CHECKING
12416 unsigned char checksum_before_fn[16],
12417 checksum_before_arglist[16],
12418 checksum_after_fn[16],
12419 checksum_after_arglist[16];
12420 struct md5_ctx ctx;
12421 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12422 int i;
12423
12424 md5_init_ctx (&ctx);
12425 fold_checksum_tree (fn, &ctx, &ht);
12426 md5_finish_ctx (&ctx, checksum_before_fn);
12427 ht.empty ();
12428
12429 md5_init_ctx (&ctx);
12430 for (i = 0; i < nargs; i++)
12431 fold_checksum_tree (argarray[i], &ctx, &ht);
12432 md5_finish_ctx (&ctx, checksum_before_arglist);
12433 ht.empty ();
12434 #endif
12435
12436 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12437 if (!tem)
12438 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12439
12440 #ifdef ENABLE_FOLD_CHECKING
12441 md5_init_ctx (&ctx);
12442 fold_checksum_tree (fn, &ctx, &ht);
12443 md5_finish_ctx (&ctx, checksum_after_fn);
12444 ht.empty ();
12445
12446 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12447 fold_check_failed (fn, tem);
12448
12449 md5_init_ctx (&ctx);
12450 for (i = 0; i < nargs; i++)
12451 fold_checksum_tree (argarray[i], &ctx, &ht);
12452 md5_finish_ctx (&ctx, checksum_after_arglist);
12453
12454 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12455 fold_check_failed (NULL_TREE, tem);
12456 #endif
12457 return tem;
12458 }
12459
12460 /* Perform constant folding and related simplification of initializer
12461 expression EXPR. These behave identically to "fold_buildN" but ignore
12462 potential run-time traps and exceptions that fold must preserve. */
12463
12464 #define START_FOLD_INIT \
12465 int saved_signaling_nans = flag_signaling_nans;\
12466 int saved_trapping_math = flag_trapping_math;\
12467 int saved_rounding_math = flag_rounding_math;\
12468 int saved_trapv = flag_trapv;\
12469 int saved_folding_initializer = folding_initializer;\
12470 flag_signaling_nans = 0;\
12471 flag_trapping_math = 0;\
12472 flag_rounding_math = 0;\
12473 flag_trapv = 0;\
12474 folding_initializer = 1;
12475
12476 #define END_FOLD_INIT \
12477 flag_signaling_nans = saved_signaling_nans;\
12478 flag_trapping_math = saved_trapping_math;\
12479 flag_rounding_math = saved_rounding_math;\
12480 flag_trapv = saved_trapv;\
12481 folding_initializer = saved_folding_initializer;
12482
12483 tree
12484 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12485 tree type, tree op)
12486 {
12487 tree result;
12488 START_FOLD_INIT;
12489
12490 result = fold_build1_loc (loc, code, type, op);
12491
12492 END_FOLD_INIT;
12493 return result;
12494 }
12495
12496 tree
12497 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12498 tree type, tree op0, tree op1)
12499 {
12500 tree result;
12501 START_FOLD_INIT;
12502
12503 result = fold_build2_loc (loc, code, type, op0, op1);
12504
12505 END_FOLD_INIT;
12506 return result;
12507 }
12508
12509 tree
12510 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12511 int nargs, tree *argarray)
12512 {
12513 tree result;
12514 START_FOLD_INIT;
12515
12516 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12517
12518 END_FOLD_INIT;
12519 return result;
12520 }
12521
12522 #undef START_FOLD_INIT
12523 #undef END_FOLD_INIT
12524
12525 /* Determine if first argument is a multiple of second argument. Return 0 if
12526 it is not, or we cannot easily determined it to be.
12527
12528 An example of the sort of thing we care about (at this point; this routine
12529 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12530 fold cases do now) is discovering that
12531
12532 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12533
12534 is a multiple of
12535
12536 SAVE_EXPR (J * 8)
12537
12538 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12539
12540 This code also handles discovering that
12541
12542 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12543
12544 is a multiple of 8 so we don't have to worry about dealing with a
12545 possible remainder.
12546
12547 Note that we *look* inside a SAVE_EXPR only to determine how it was
12548 calculated; it is not safe for fold to do much of anything else with the
12549 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12550 at run time. For example, the latter example above *cannot* be implemented
12551 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12552 evaluation time of the original SAVE_EXPR is not necessarily the same at
12553 the time the new expression is evaluated. The only optimization of this
12554 sort that would be valid is changing
12555
12556 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12557
12558 divided by 8 to
12559
12560 SAVE_EXPR (I) * SAVE_EXPR (J)
12561
12562 (where the same SAVE_EXPR (J) is used in the original and the
12563 transformed version). */
12564
12565 int
12566 multiple_of_p (tree type, const_tree top, const_tree bottom)
12567 {
12568 if (operand_equal_p (top, bottom, 0))
12569 return 1;
12570
12571 if (TREE_CODE (type) != INTEGER_TYPE)
12572 return 0;
12573
12574 switch (TREE_CODE (top))
12575 {
12576 case BIT_AND_EXPR:
12577 /* Bitwise and provides a power of two multiple. If the mask is
12578 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12579 if (!integer_pow2p (bottom))
12580 return 0;
12581 /* FALLTHRU */
12582
12583 case MULT_EXPR:
12584 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12585 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12586
12587 case PLUS_EXPR:
12588 case MINUS_EXPR:
12589 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12590 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12591
12592 case LSHIFT_EXPR:
12593 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12594 {
12595 tree op1, t1;
12596
12597 op1 = TREE_OPERAND (top, 1);
12598 /* const_binop may not detect overflow correctly,
12599 so check for it explicitly here. */
12600 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12601 && 0 != (t1 = fold_convert (type,
12602 const_binop (LSHIFT_EXPR,
12603 size_one_node,
12604 op1)))
12605 && !TREE_OVERFLOW (t1))
12606 return multiple_of_p (type, t1, bottom);
12607 }
12608 return 0;
12609
12610 case NOP_EXPR:
12611 /* Can't handle conversions from non-integral or wider integral type. */
12612 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12613 || (TYPE_PRECISION (type)
12614 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12615 return 0;
12616
12617 /* .. fall through ... */
12618
12619 case SAVE_EXPR:
12620 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12621
12622 case COND_EXPR:
12623 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12624 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12625
12626 case INTEGER_CST:
12627 if (TREE_CODE (bottom) != INTEGER_CST
12628 || integer_zerop (bottom)
12629 || (TYPE_UNSIGNED (type)
12630 && (tree_int_cst_sgn (top) < 0
12631 || tree_int_cst_sgn (bottom) < 0)))
12632 return 0;
12633 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12634 SIGNED);
12635
12636 default:
12637 return 0;
12638 }
12639 }
12640
12641 #define tree_expr_nonnegative_warnv_p(X, Y) \
12642 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12643
12644 #define RECURSE(X) \
12645 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12646
12647 /* Return true if CODE or TYPE is known to be non-negative. */
12648
12649 static bool
12650 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12651 {
12652 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12653 && truth_value_p (code))
12654 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12655 have a signed:1 type (where the value is -1 and 0). */
12656 return true;
12657 return false;
12658 }
12659
12660 /* Return true if (CODE OP0) is known to be non-negative. If the return
12661 value is based on the assumption that signed overflow is undefined,
12662 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12663 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12664
12665 bool
12666 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12667 bool *strict_overflow_p, int depth)
12668 {
12669 if (TYPE_UNSIGNED (type))
12670 return true;
12671
12672 switch (code)
12673 {
12674 case ABS_EXPR:
12675 /* We can't return 1 if flag_wrapv is set because
12676 ABS_EXPR<INT_MIN> = INT_MIN. */
12677 if (!ANY_INTEGRAL_TYPE_P (type))
12678 return true;
12679 if (TYPE_OVERFLOW_UNDEFINED (type))
12680 {
12681 *strict_overflow_p = true;
12682 return true;
12683 }
12684 break;
12685
12686 case NON_LVALUE_EXPR:
12687 case FLOAT_EXPR:
12688 case FIX_TRUNC_EXPR:
12689 return RECURSE (op0);
12690
12691 CASE_CONVERT:
12692 {
12693 tree inner_type = TREE_TYPE (op0);
12694 tree outer_type = type;
12695
12696 if (TREE_CODE (outer_type) == REAL_TYPE)
12697 {
12698 if (TREE_CODE (inner_type) == REAL_TYPE)
12699 return RECURSE (op0);
12700 if (INTEGRAL_TYPE_P (inner_type))
12701 {
12702 if (TYPE_UNSIGNED (inner_type))
12703 return true;
12704 return RECURSE (op0);
12705 }
12706 }
12707 else if (INTEGRAL_TYPE_P (outer_type))
12708 {
12709 if (TREE_CODE (inner_type) == REAL_TYPE)
12710 return RECURSE (op0);
12711 if (INTEGRAL_TYPE_P (inner_type))
12712 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12713 && TYPE_UNSIGNED (inner_type);
12714 }
12715 }
12716 break;
12717
12718 default:
12719 return tree_simple_nonnegative_warnv_p (code, type);
12720 }
12721
12722 /* We don't know sign of `t', so be conservative and return false. */
12723 return false;
12724 }
12725
12726 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12727 value is based on the assumption that signed overflow is undefined,
12728 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12729 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12730
12731 bool
12732 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12733 tree op1, bool *strict_overflow_p,
12734 int depth)
12735 {
12736 if (TYPE_UNSIGNED (type))
12737 return true;
12738
12739 switch (code)
12740 {
12741 case POINTER_PLUS_EXPR:
12742 case PLUS_EXPR:
12743 if (FLOAT_TYPE_P (type))
12744 return RECURSE (op0) && RECURSE (op1);
12745
12746 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12747 both unsigned and at least 2 bits shorter than the result. */
12748 if (TREE_CODE (type) == INTEGER_TYPE
12749 && TREE_CODE (op0) == NOP_EXPR
12750 && TREE_CODE (op1) == NOP_EXPR)
12751 {
12752 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12753 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12754 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12755 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12756 {
12757 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12758 TYPE_PRECISION (inner2)) + 1;
12759 return prec < TYPE_PRECISION (type);
12760 }
12761 }
12762 break;
12763
12764 case MULT_EXPR:
12765 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12766 {
12767 /* x * x is always non-negative for floating point x
12768 or without overflow. */
12769 if (operand_equal_p (op0, op1, 0)
12770 || (RECURSE (op0) && RECURSE (op1)))
12771 {
12772 if (ANY_INTEGRAL_TYPE_P (type)
12773 && TYPE_OVERFLOW_UNDEFINED (type))
12774 *strict_overflow_p = true;
12775 return true;
12776 }
12777 }
12778
12779 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12780 both unsigned and their total bits is shorter than the result. */
12781 if (TREE_CODE (type) == INTEGER_TYPE
12782 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12783 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12784 {
12785 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12786 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12787 : TREE_TYPE (op0);
12788 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12789 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12790 : TREE_TYPE (op1);
12791
12792 bool unsigned0 = TYPE_UNSIGNED (inner0);
12793 bool unsigned1 = TYPE_UNSIGNED (inner1);
12794
12795 if (TREE_CODE (op0) == INTEGER_CST)
12796 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12797
12798 if (TREE_CODE (op1) == INTEGER_CST)
12799 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12800
12801 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12802 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12803 {
12804 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12805 ? tree_int_cst_min_precision (op0, UNSIGNED)
12806 : TYPE_PRECISION (inner0);
12807
12808 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12809 ? tree_int_cst_min_precision (op1, UNSIGNED)
12810 : TYPE_PRECISION (inner1);
12811
12812 return precision0 + precision1 < TYPE_PRECISION (type);
12813 }
12814 }
12815 return false;
12816
12817 case BIT_AND_EXPR:
12818 case MAX_EXPR:
12819 return RECURSE (op0) || RECURSE (op1);
12820
12821 case BIT_IOR_EXPR:
12822 case BIT_XOR_EXPR:
12823 case MIN_EXPR:
12824 case RDIV_EXPR:
12825 case TRUNC_DIV_EXPR:
12826 case CEIL_DIV_EXPR:
12827 case FLOOR_DIV_EXPR:
12828 case ROUND_DIV_EXPR:
12829 return RECURSE (op0) && RECURSE (op1);
12830
12831 case TRUNC_MOD_EXPR:
12832 return RECURSE (op0);
12833
12834 case FLOOR_MOD_EXPR:
12835 return RECURSE (op1);
12836
12837 case CEIL_MOD_EXPR:
12838 case ROUND_MOD_EXPR:
12839 default:
12840 return tree_simple_nonnegative_warnv_p (code, type);
12841 }
12842
12843 /* We don't know sign of `t', so be conservative and return false. */
12844 return false;
12845 }
12846
12847 /* Return true if T is known to be non-negative. If the return
12848 value is based on the assumption that signed overflow is undefined,
12849 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12850 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12851
12852 bool
12853 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12854 {
12855 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12856 return true;
12857
12858 switch (TREE_CODE (t))
12859 {
12860 case INTEGER_CST:
12861 return tree_int_cst_sgn (t) >= 0;
12862
12863 case REAL_CST:
12864 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12865
12866 case FIXED_CST:
12867 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12868
12869 case COND_EXPR:
12870 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12871
12872 case SSA_NAME:
12873 /* Limit the depth of recursion to avoid quadratic behavior.
12874 This is expected to catch almost all occurrences in practice.
12875 If this code misses important cases that unbounded recursion
12876 would not, passes that need this information could be revised
12877 to provide it through dataflow propagation. */
12878 return (!name_registered_for_update_p (t)
12879 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12880 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12881 strict_overflow_p, depth));
12882
12883 default:
12884 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12885 }
12886 }
12887
12888 /* Return true if T is known to be non-negative. If the return
12889 value is based on the assumption that signed overflow is undefined,
12890 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12891 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12892
12893 bool
12894 tree_call_nonnegative_warnv_p (tree type, tree fndecl, tree arg0, tree arg1,
12895 bool *strict_overflow_p, int depth)
12896 {
12897 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12898 switch (DECL_FUNCTION_CODE (fndecl))
12899 {
12900 CASE_FLT_FN (BUILT_IN_ACOS):
12901 CASE_FLT_FN (BUILT_IN_ACOSH):
12902 CASE_FLT_FN (BUILT_IN_CABS):
12903 CASE_FLT_FN (BUILT_IN_COSH):
12904 CASE_FLT_FN (BUILT_IN_ERFC):
12905 CASE_FLT_FN (BUILT_IN_EXP):
12906 CASE_FLT_FN (BUILT_IN_EXP10):
12907 CASE_FLT_FN (BUILT_IN_EXP2):
12908 CASE_FLT_FN (BUILT_IN_FABS):
12909 CASE_FLT_FN (BUILT_IN_FDIM):
12910 CASE_FLT_FN (BUILT_IN_HYPOT):
12911 CASE_FLT_FN (BUILT_IN_POW10):
12912 CASE_INT_FN (BUILT_IN_FFS):
12913 CASE_INT_FN (BUILT_IN_PARITY):
12914 CASE_INT_FN (BUILT_IN_POPCOUNT):
12915 CASE_INT_FN (BUILT_IN_CLZ):
12916 CASE_INT_FN (BUILT_IN_CLRSB):
12917 case BUILT_IN_BSWAP32:
12918 case BUILT_IN_BSWAP64:
12919 /* Always true. */
12920 return true;
12921
12922 CASE_FLT_FN (BUILT_IN_SQRT):
12923 /* sqrt(-0.0) is -0.0. */
12924 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12925 return true;
12926 return RECURSE (arg0);
12927
12928 CASE_FLT_FN (BUILT_IN_ASINH):
12929 CASE_FLT_FN (BUILT_IN_ATAN):
12930 CASE_FLT_FN (BUILT_IN_ATANH):
12931 CASE_FLT_FN (BUILT_IN_CBRT):
12932 CASE_FLT_FN (BUILT_IN_CEIL):
12933 CASE_FLT_FN (BUILT_IN_ERF):
12934 CASE_FLT_FN (BUILT_IN_EXPM1):
12935 CASE_FLT_FN (BUILT_IN_FLOOR):
12936 CASE_FLT_FN (BUILT_IN_FMOD):
12937 CASE_FLT_FN (BUILT_IN_FREXP):
12938 CASE_FLT_FN (BUILT_IN_ICEIL):
12939 CASE_FLT_FN (BUILT_IN_IFLOOR):
12940 CASE_FLT_FN (BUILT_IN_IRINT):
12941 CASE_FLT_FN (BUILT_IN_IROUND):
12942 CASE_FLT_FN (BUILT_IN_LCEIL):
12943 CASE_FLT_FN (BUILT_IN_LDEXP):
12944 CASE_FLT_FN (BUILT_IN_LFLOOR):
12945 CASE_FLT_FN (BUILT_IN_LLCEIL):
12946 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12947 CASE_FLT_FN (BUILT_IN_LLRINT):
12948 CASE_FLT_FN (BUILT_IN_LLROUND):
12949 CASE_FLT_FN (BUILT_IN_LRINT):
12950 CASE_FLT_FN (BUILT_IN_LROUND):
12951 CASE_FLT_FN (BUILT_IN_MODF):
12952 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12953 CASE_FLT_FN (BUILT_IN_RINT):
12954 CASE_FLT_FN (BUILT_IN_ROUND):
12955 CASE_FLT_FN (BUILT_IN_SCALB):
12956 CASE_FLT_FN (BUILT_IN_SCALBLN):
12957 CASE_FLT_FN (BUILT_IN_SCALBN):
12958 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12959 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
12960 CASE_FLT_FN (BUILT_IN_SINH):
12961 CASE_FLT_FN (BUILT_IN_TANH):
12962 CASE_FLT_FN (BUILT_IN_TRUNC):
12963 /* True if the 1st argument is nonnegative. */
12964 return RECURSE (arg0);
12965
12966 CASE_FLT_FN (BUILT_IN_FMAX):
12967 /* True if the 1st OR 2nd arguments are nonnegative. */
12968 return RECURSE (arg0) || RECURSE (arg1);
12969
12970 CASE_FLT_FN (BUILT_IN_FMIN):
12971 /* True if the 1st AND 2nd arguments are nonnegative. */
12972 return RECURSE (arg0) && RECURSE (arg1);
12973
12974 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12975 /* True if the 2nd argument is nonnegative. */
12976 return RECURSE (arg1);
12977
12978 CASE_FLT_FN (BUILT_IN_POWI):
12979 /* True if the 1st argument is nonnegative or the second
12980 argument is an even integer. */
12981 if (TREE_CODE (arg1) == INTEGER_CST
12982 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12983 return true;
12984 return RECURSE (arg0);
12985
12986 CASE_FLT_FN (BUILT_IN_POW):
12987 /* True if the 1st argument is nonnegative or the second
12988 argument is an even integer valued real. */
12989 if (TREE_CODE (arg1) == REAL_CST)
12990 {
12991 REAL_VALUE_TYPE c;
12992 HOST_WIDE_INT n;
12993
12994 c = TREE_REAL_CST (arg1);
12995 n = real_to_integer (&c);
12996 if ((n & 1) == 0)
12997 {
12998 REAL_VALUE_TYPE cint;
12999 real_from_integer (&cint, VOIDmode, n, SIGNED);
13000 if (real_identical (&c, &cint))
13001 return true;
13002 }
13003 }
13004 return RECURSE (arg0);
13005
13006 default:
13007 break;
13008 }
13009 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13010 }
13011
13012 /* Return true if T is known to be non-negative. If the return
13013 value is based on the assumption that signed overflow is undefined,
13014 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13015 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13016
13017 static bool
13018 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13019 {
13020 enum tree_code code = TREE_CODE (t);
13021 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13022 return true;
13023
13024 switch (code)
13025 {
13026 case TARGET_EXPR:
13027 {
13028 tree temp = TARGET_EXPR_SLOT (t);
13029 t = TARGET_EXPR_INITIAL (t);
13030
13031 /* If the initializer is non-void, then it's a normal expression
13032 that will be assigned to the slot. */
13033 if (!VOID_TYPE_P (t))
13034 return RECURSE (t);
13035
13036 /* Otherwise, the initializer sets the slot in some way. One common
13037 way is an assignment statement at the end of the initializer. */
13038 while (1)
13039 {
13040 if (TREE_CODE (t) == BIND_EXPR)
13041 t = expr_last (BIND_EXPR_BODY (t));
13042 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13043 || TREE_CODE (t) == TRY_CATCH_EXPR)
13044 t = expr_last (TREE_OPERAND (t, 0));
13045 else if (TREE_CODE (t) == STATEMENT_LIST)
13046 t = expr_last (t);
13047 else
13048 break;
13049 }
13050 if (TREE_CODE (t) == MODIFY_EXPR
13051 && TREE_OPERAND (t, 0) == temp)
13052 return RECURSE (TREE_OPERAND (t, 1));
13053
13054 return false;
13055 }
13056
13057 case CALL_EXPR:
13058 {
13059 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13060 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13061
13062 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13063 get_callee_fndecl (t),
13064 arg0,
13065 arg1,
13066 strict_overflow_p, depth);
13067 }
13068 case COMPOUND_EXPR:
13069 case MODIFY_EXPR:
13070 return RECURSE (TREE_OPERAND (t, 1));
13071
13072 case BIND_EXPR:
13073 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13074
13075 case SAVE_EXPR:
13076 return RECURSE (TREE_OPERAND (t, 0));
13077
13078 default:
13079 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13080 }
13081 }
13082
13083 #undef RECURSE
13084 #undef tree_expr_nonnegative_warnv_p
13085
13086 /* Return true if T is known to be non-negative. If the return
13087 value is based on the assumption that signed overflow is undefined,
13088 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13089 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13090
13091 bool
13092 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13093 {
13094 enum tree_code code;
13095 if (t == error_mark_node)
13096 return false;
13097
13098 code = TREE_CODE (t);
13099 switch (TREE_CODE_CLASS (code))
13100 {
13101 case tcc_binary:
13102 case tcc_comparison:
13103 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13104 TREE_TYPE (t),
13105 TREE_OPERAND (t, 0),
13106 TREE_OPERAND (t, 1),
13107 strict_overflow_p, depth);
13108
13109 case tcc_unary:
13110 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13111 TREE_TYPE (t),
13112 TREE_OPERAND (t, 0),
13113 strict_overflow_p, depth);
13114
13115 case tcc_constant:
13116 case tcc_declaration:
13117 case tcc_reference:
13118 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13119
13120 default:
13121 break;
13122 }
13123
13124 switch (code)
13125 {
13126 case TRUTH_AND_EXPR:
13127 case TRUTH_OR_EXPR:
13128 case TRUTH_XOR_EXPR:
13129 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13130 TREE_TYPE (t),
13131 TREE_OPERAND (t, 0),
13132 TREE_OPERAND (t, 1),
13133 strict_overflow_p, depth);
13134 case TRUTH_NOT_EXPR:
13135 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13136 TREE_TYPE (t),
13137 TREE_OPERAND (t, 0),
13138 strict_overflow_p, depth);
13139
13140 case COND_EXPR:
13141 case CONSTRUCTOR:
13142 case OBJ_TYPE_REF:
13143 case ASSERT_EXPR:
13144 case ADDR_EXPR:
13145 case WITH_SIZE_EXPR:
13146 case SSA_NAME:
13147 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13148
13149 default:
13150 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13151 }
13152 }
13153
13154 /* Return true if `t' is known to be non-negative. Handle warnings
13155 about undefined signed overflow. */
13156
13157 bool
13158 tree_expr_nonnegative_p (tree t)
13159 {
13160 bool ret, strict_overflow_p;
13161
13162 strict_overflow_p = false;
13163 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13164 if (strict_overflow_p)
13165 fold_overflow_warning (("assuming signed overflow does not occur when "
13166 "determining that expression is always "
13167 "non-negative"),
13168 WARN_STRICT_OVERFLOW_MISC);
13169 return ret;
13170 }
13171
13172
13173 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13174 For floating point we further ensure that T is not denormal.
13175 Similar logic is present in nonzero_address in rtlanal.h.
13176
13177 If the return value is based on the assumption that signed overflow
13178 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13179 change *STRICT_OVERFLOW_P. */
13180
13181 bool
13182 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13183 bool *strict_overflow_p)
13184 {
13185 switch (code)
13186 {
13187 case ABS_EXPR:
13188 return tree_expr_nonzero_warnv_p (op0,
13189 strict_overflow_p);
13190
13191 case NOP_EXPR:
13192 {
13193 tree inner_type = TREE_TYPE (op0);
13194 tree outer_type = type;
13195
13196 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13197 && tree_expr_nonzero_warnv_p (op0,
13198 strict_overflow_p));
13199 }
13200 break;
13201
13202 case NON_LVALUE_EXPR:
13203 return tree_expr_nonzero_warnv_p (op0,
13204 strict_overflow_p);
13205
13206 default:
13207 break;
13208 }
13209
13210 return false;
13211 }
13212
13213 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13214 For floating point we further ensure that T is not denormal.
13215 Similar logic is present in nonzero_address in rtlanal.h.
13216
13217 If the return value is based on the assumption that signed overflow
13218 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13219 change *STRICT_OVERFLOW_P. */
13220
13221 bool
13222 tree_binary_nonzero_warnv_p (enum tree_code code,
13223 tree type,
13224 tree op0,
13225 tree op1, bool *strict_overflow_p)
13226 {
13227 bool sub_strict_overflow_p;
13228 switch (code)
13229 {
13230 case POINTER_PLUS_EXPR:
13231 case PLUS_EXPR:
13232 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13233 {
13234 /* With the presence of negative values it is hard
13235 to say something. */
13236 sub_strict_overflow_p = false;
13237 if (!tree_expr_nonnegative_warnv_p (op0,
13238 &sub_strict_overflow_p)
13239 || !tree_expr_nonnegative_warnv_p (op1,
13240 &sub_strict_overflow_p))
13241 return false;
13242 /* One of operands must be positive and the other non-negative. */
13243 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13244 overflows, on a twos-complement machine the sum of two
13245 nonnegative numbers can never be zero. */
13246 return (tree_expr_nonzero_warnv_p (op0,
13247 strict_overflow_p)
13248 || tree_expr_nonzero_warnv_p (op1,
13249 strict_overflow_p));
13250 }
13251 break;
13252
13253 case MULT_EXPR:
13254 if (TYPE_OVERFLOW_UNDEFINED (type))
13255 {
13256 if (tree_expr_nonzero_warnv_p (op0,
13257 strict_overflow_p)
13258 && tree_expr_nonzero_warnv_p (op1,
13259 strict_overflow_p))
13260 {
13261 *strict_overflow_p = true;
13262 return true;
13263 }
13264 }
13265 break;
13266
13267 case MIN_EXPR:
13268 sub_strict_overflow_p = false;
13269 if (tree_expr_nonzero_warnv_p (op0,
13270 &sub_strict_overflow_p)
13271 && tree_expr_nonzero_warnv_p (op1,
13272 &sub_strict_overflow_p))
13273 {
13274 if (sub_strict_overflow_p)
13275 *strict_overflow_p = true;
13276 }
13277 break;
13278
13279 case MAX_EXPR:
13280 sub_strict_overflow_p = false;
13281 if (tree_expr_nonzero_warnv_p (op0,
13282 &sub_strict_overflow_p))
13283 {
13284 if (sub_strict_overflow_p)
13285 *strict_overflow_p = true;
13286
13287 /* When both operands are nonzero, then MAX must be too. */
13288 if (tree_expr_nonzero_warnv_p (op1,
13289 strict_overflow_p))
13290 return true;
13291
13292 /* MAX where operand 0 is positive is positive. */
13293 return tree_expr_nonnegative_warnv_p (op0,
13294 strict_overflow_p);
13295 }
13296 /* MAX where operand 1 is positive is positive. */
13297 else if (tree_expr_nonzero_warnv_p (op1,
13298 &sub_strict_overflow_p)
13299 && tree_expr_nonnegative_warnv_p (op1,
13300 &sub_strict_overflow_p))
13301 {
13302 if (sub_strict_overflow_p)
13303 *strict_overflow_p = true;
13304 return true;
13305 }
13306 break;
13307
13308 case BIT_IOR_EXPR:
13309 return (tree_expr_nonzero_warnv_p (op1,
13310 strict_overflow_p)
13311 || tree_expr_nonzero_warnv_p (op0,
13312 strict_overflow_p));
13313
13314 default:
13315 break;
13316 }
13317
13318 return false;
13319 }
13320
13321 /* Return true when T is an address and is known to be nonzero.
13322 For floating point we further ensure that T is not denormal.
13323 Similar logic is present in nonzero_address in rtlanal.h.
13324
13325 If the return value is based on the assumption that signed overflow
13326 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13327 change *STRICT_OVERFLOW_P. */
13328
13329 bool
13330 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13331 {
13332 bool sub_strict_overflow_p;
13333 switch (TREE_CODE (t))
13334 {
13335 case INTEGER_CST:
13336 return !integer_zerop (t);
13337
13338 case ADDR_EXPR:
13339 {
13340 tree base = TREE_OPERAND (t, 0);
13341
13342 if (!DECL_P (base))
13343 base = get_base_address (base);
13344
13345 if (!base)
13346 return false;
13347
13348 /* For objects in symbol table check if we know they are non-zero.
13349 Don't do anything for variables and functions before symtab is built;
13350 it is quite possible that they will be declared weak later. */
13351 if (DECL_P (base) && decl_in_symtab_p (base))
13352 {
13353 struct symtab_node *symbol;
13354
13355 symbol = symtab_node::get_create (base);
13356 if (symbol)
13357 return symbol->nonzero_address ();
13358 else
13359 return false;
13360 }
13361
13362 /* Function local objects are never NULL. */
13363 if (DECL_P (base)
13364 && (DECL_CONTEXT (base)
13365 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13366 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13367 return true;
13368
13369 /* Constants are never weak. */
13370 if (CONSTANT_CLASS_P (base))
13371 return true;
13372
13373 return false;
13374 }
13375
13376 case COND_EXPR:
13377 sub_strict_overflow_p = false;
13378 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13379 &sub_strict_overflow_p)
13380 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13381 &sub_strict_overflow_p))
13382 {
13383 if (sub_strict_overflow_p)
13384 *strict_overflow_p = true;
13385 return true;
13386 }
13387 break;
13388
13389 default:
13390 break;
13391 }
13392 return false;
13393 }
13394
13395 #define integer_valued_real_p(X) \
13396 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13397
13398 #define RECURSE(X) \
13399 ((integer_valued_real_p) (X, depth + 1))
13400
13401 /* Return true if the floating point result of (CODE OP0) has an
13402 integer value. We also allow +Inf, -Inf and NaN to be considered
13403 integer values.
13404
13405 DEPTH is the current nesting depth of the query. */
13406
13407 bool
13408 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13409 {
13410 switch (code)
13411 {
13412 case FLOAT_EXPR:
13413 return true;
13414
13415 case ABS_EXPR:
13416 return RECURSE (op0);
13417
13418 CASE_CONVERT:
13419 {
13420 tree type = TREE_TYPE (op0);
13421 if (TREE_CODE (type) == INTEGER_TYPE)
13422 return true;
13423 if (TREE_CODE (type) == REAL_TYPE)
13424 return RECURSE (op0);
13425 break;
13426 }
13427
13428 default:
13429 break;
13430 }
13431 return false;
13432 }
13433
13434 /* Return true if the floating point result of (CODE OP0 OP1) has an
13435 integer value. We also allow +Inf, -Inf and NaN to be considered
13436 integer values.
13437
13438 DEPTH is the current nesting depth of the query. */
13439
13440 bool
13441 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13442 {
13443 switch (code)
13444 {
13445 case PLUS_EXPR:
13446 case MINUS_EXPR:
13447 case MULT_EXPR:
13448 case MIN_EXPR:
13449 case MAX_EXPR:
13450 return RECURSE (op0) && RECURSE (op1);
13451
13452 default:
13453 break;
13454 }
13455 return false;
13456 }
13457
13458 /* Return true if the floating point result of calling FNDECL with arguments
13459 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13460 considered integer values. If FNDECL takes fewer than 2 arguments,
13461 the remaining ARGn are null.
13462
13463 DEPTH is the current nesting depth of the query. */
13464
13465 bool
13466 integer_valued_real_call_p (tree fndecl, tree arg0, tree arg1, int depth)
13467 {
13468 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13469 switch (DECL_FUNCTION_CODE (fndecl))
13470 {
13471 CASE_FLT_FN (BUILT_IN_CEIL):
13472 CASE_FLT_FN (BUILT_IN_FLOOR):
13473 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13474 CASE_FLT_FN (BUILT_IN_RINT):
13475 CASE_FLT_FN (BUILT_IN_ROUND):
13476 CASE_FLT_FN (BUILT_IN_TRUNC):
13477 return true;
13478
13479 CASE_FLT_FN (BUILT_IN_FMIN):
13480 CASE_FLT_FN (BUILT_IN_FMAX):
13481 return RECURSE (arg0) && RECURSE (arg1);
13482
13483 default:
13484 break;
13485 }
13486 return false;
13487 }
13488
13489 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13490 has an integer value. We also allow +Inf, -Inf and NaN to be
13491 considered integer values.
13492
13493 DEPTH is the current nesting depth of the query. */
13494
13495 bool
13496 integer_valued_real_single_p (tree t, int depth)
13497 {
13498 switch (TREE_CODE (t))
13499 {
13500 case REAL_CST:
13501 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13502
13503 case COND_EXPR:
13504 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13505
13506 case SSA_NAME:
13507 /* Limit the depth of recursion to avoid quadratic behavior.
13508 This is expected to catch almost all occurrences in practice.
13509 If this code misses important cases that unbounded recursion
13510 would not, passes that need this information could be revised
13511 to provide it through dataflow propagation. */
13512 return (!name_registered_for_update_p (t)
13513 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13514 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13515 depth));
13516
13517 default:
13518 break;
13519 }
13520 return false;
13521 }
13522
13523 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13524 has an integer value. We also allow +Inf, -Inf and NaN to be
13525 considered integer values.
13526
13527 DEPTH is the current nesting depth of the query. */
13528
13529 static bool
13530 integer_valued_real_invalid_p (tree t, int depth)
13531 {
13532 switch (TREE_CODE (t))
13533 {
13534 case COMPOUND_EXPR:
13535 case MODIFY_EXPR:
13536 case BIND_EXPR:
13537 return RECURSE (TREE_OPERAND (t, 1));
13538
13539 case SAVE_EXPR:
13540 return RECURSE (TREE_OPERAND (t, 0));
13541
13542 default:
13543 break;
13544 }
13545 return false;
13546 }
13547
13548 #undef RECURSE
13549 #undef integer_valued_real_p
13550
13551 /* Return true if the floating point expression T has an integer value.
13552 We also allow +Inf, -Inf and NaN to be considered integer values.
13553
13554 DEPTH is the current nesting depth of the query. */
13555
13556 bool
13557 integer_valued_real_p (tree t, int depth)
13558 {
13559 if (t == error_mark_node)
13560 return false;
13561
13562 tree_code code = TREE_CODE (t);
13563 switch (TREE_CODE_CLASS (code))
13564 {
13565 case tcc_binary:
13566 case tcc_comparison:
13567 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13568 TREE_OPERAND (t, 1), depth);
13569
13570 case tcc_unary:
13571 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13572
13573 case tcc_constant:
13574 case tcc_declaration:
13575 case tcc_reference:
13576 return integer_valued_real_single_p (t, depth);
13577
13578 default:
13579 break;
13580 }
13581
13582 switch (code)
13583 {
13584 case COND_EXPR:
13585 case SSA_NAME:
13586 return integer_valued_real_single_p (t, depth);
13587
13588 case CALL_EXPR:
13589 {
13590 tree arg0 = (call_expr_nargs (t) > 0
13591 ? CALL_EXPR_ARG (t, 0)
13592 : NULL_TREE);
13593 tree arg1 = (call_expr_nargs (t) > 1
13594 ? CALL_EXPR_ARG (t, 1)
13595 : NULL_TREE);
13596 return integer_valued_real_call_p (get_callee_fndecl (t),
13597 arg0, arg1, depth);
13598 }
13599
13600 default:
13601 return integer_valued_real_invalid_p (t, depth);
13602 }
13603 }
13604
13605 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13606 attempt to fold the expression to a constant without modifying TYPE,
13607 OP0 or OP1.
13608
13609 If the expression could be simplified to a constant, then return
13610 the constant. If the expression would not be simplified to a
13611 constant, then return NULL_TREE. */
13612
13613 tree
13614 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13615 {
13616 tree tem = fold_binary (code, type, op0, op1);
13617 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13618 }
13619
13620 /* Given the components of a unary expression CODE, TYPE and OP0,
13621 attempt to fold the expression to a constant without modifying
13622 TYPE or OP0.
13623
13624 If the expression could be simplified to a constant, then return
13625 the constant. If the expression would not be simplified to a
13626 constant, then return NULL_TREE. */
13627
13628 tree
13629 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13630 {
13631 tree tem = fold_unary (code, type, op0);
13632 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13633 }
13634
13635 /* If EXP represents referencing an element in a constant string
13636 (either via pointer arithmetic or array indexing), return the
13637 tree representing the value accessed, otherwise return NULL. */
13638
13639 tree
13640 fold_read_from_constant_string (tree exp)
13641 {
13642 if ((TREE_CODE (exp) == INDIRECT_REF
13643 || TREE_CODE (exp) == ARRAY_REF)
13644 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13645 {
13646 tree exp1 = TREE_OPERAND (exp, 0);
13647 tree index;
13648 tree string;
13649 location_t loc = EXPR_LOCATION (exp);
13650
13651 if (TREE_CODE (exp) == INDIRECT_REF)
13652 string = string_constant (exp1, &index);
13653 else
13654 {
13655 tree low_bound = array_ref_low_bound (exp);
13656 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13657
13658 /* Optimize the special-case of a zero lower bound.
13659
13660 We convert the low_bound to sizetype to avoid some problems
13661 with constant folding. (E.g. suppose the lower bound is 1,
13662 and its mode is QI. Without the conversion,l (ARRAY
13663 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13664 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13665 if (! integer_zerop (low_bound))
13666 index = size_diffop_loc (loc, index,
13667 fold_convert_loc (loc, sizetype, low_bound));
13668
13669 string = exp1;
13670 }
13671
13672 if (string
13673 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13674 && TREE_CODE (string) == STRING_CST
13675 && TREE_CODE (index) == INTEGER_CST
13676 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13677 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13678 == MODE_INT)
13679 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13680 return build_int_cst_type (TREE_TYPE (exp),
13681 (TREE_STRING_POINTER (string)
13682 [TREE_INT_CST_LOW (index)]));
13683 }
13684 return NULL;
13685 }
13686
13687 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13688 an integer constant, real, or fixed-point constant.
13689
13690 TYPE is the type of the result. */
13691
13692 static tree
13693 fold_negate_const (tree arg0, tree type)
13694 {
13695 tree t = NULL_TREE;
13696
13697 switch (TREE_CODE (arg0))
13698 {
13699 case INTEGER_CST:
13700 {
13701 bool overflow;
13702 wide_int val = wi::neg (arg0, &overflow);
13703 t = force_fit_type (type, val, 1,
13704 (overflow | TREE_OVERFLOW (arg0))
13705 && !TYPE_UNSIGNED (type));
13706 break;
13707 }
13708
13709 case REAL_CST:
13710 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13711 break;
13712
13713 case FIXED_CST:
13714 {
13715 FIXED_VALUE_TYPE f;
13716 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13717 &(TREE_FIXED_CST (arg0)), NULL,
13718 TYPE_SATURATING (type));
13719 t = build_fixed (type, f);
13720 /* Propagate overflow flags. */
13721 if (overflow_p | TREE_OVERFLOW (arg0))
13722 TREE_OVERFLOW (t) = 1;
13723 break;
13724 }
13725
13726 default:
13727 gcc_unreachable ();
13728 }
13729
13730 return t;
13731 }
13732
13733 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13734 an integer constant or real constant.
13735
13736 TYPE is the type of the result. */
13737
13738 tree
13739 fold_abs_const (tree arg0, tree type)
13740 {
13741 tree t = NULL_TREE;
13742
13743 switch (TREE_CODE (arg0))
13744 {
13745 case INTEGER_CST:
13746 {
13747 /* If the value is unsigned or non-negative, then the absolute value
13748 is the same as the ordinary value. */
13749 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13750 t = arg0;
13751
13752 /* If the value is negative, then the absolute value is
13753 its negation. */
13754 else
13755 {
13756 bool overflow;
13757 wide_int val = wi::neg (arg0, &overflow);
13758 t = force_fit_type (type, val, -1,
13759 overflow | TREE_OVERFLOW (arg0));
13760 }
13761 }
13762 break;
13763
13764 case REAL_CST:
13765 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13766 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13767 else
13768 t = arg0;
13769 break;
13770
13771 default:
13772 gcc_unreachable ();
13773 }
13774
13775 return t;
13776 }
13777
13778 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13779 constant. TYPE is the type of the result. */
13780
13781 static tree
13782 fold_not_const (const_tree arg0, tree type)
13783 {
13784 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13785
13786 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13787 }
13788
13789 /* Given CODE, a relational operator, the target type, TYPE and two
13790 constant operands OP0 and OP1, return the result of the
13791 relational operation. If the result is not a compile time
13792 constant, then return NULL_TREE. */
13793
13794 static tree
13795 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13796 {
13797 int result, invert;
13798
13799 /* From here on, the only cases we handle are when the result is
13800 known to be a constant. */
13801
13802 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13803 {
13804 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13805 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13806
13807 /* Handle the cases where either operand is a NaN. */
13808 if (real_isnan (c0) || real_isnan (c1))
13809 {
13810 switch (code)
13811 {
13812 case EQ_EXPR:
13813 case ORDERED_EXPR:
13814 result = 0;
13815 break;
13816
13817 case NE_EXPR:
13818 case UNORDERED_EXPR:
13819 case UNLT_EXPR:
13820 case UNLE_EXPR:
13821 case UNGT_EXPR:
13822 case UNGE_EXPR:
13823 case UNEQ_EXPR:
13824 result = 1;
13825 break;
13826
13827 case LT_EXPR:
13828 case LE_EXPR:
13829 case GT_EXPR:
13830 case GE_EXPR:
13831 case LTGT_EXPR:
13832 if (flag_trapping_math)
13833 return NULL_TREE;
13834 result = 0;
13835 break;
13836
13837 default:
13838 gcc_unreachable ();
13839 }
13840
13841 return constant_boolean_node (result, type);
13842 }
13843
13844 return constant_boolean_node (real_compare (code, c0, c1), type);
13845 }
13846
13847 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13848 {
13849 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13850 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13851 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13852 }
13853
13854 /* Handle equality/inequality of complex constants. */
13855 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13856 {
13857 tree rcond = fold_relational_const (code, type,
13858 TREE_REALPART (op0),
13859 TREE_REALPART (op1));
13860 tree icond = fold_relational_const (code, type,
13861 TREE_IMAGPART (op0),
13862 TREE_IMAGPART (op1));
13863 if (code == EQ_EXPR)
13864 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13865 else if (code == NE_EXPR)
13866 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13867 else
13868 return NULL_TREE;
13869 }
13870
13871 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13872 {
13873 unsigned count = VECTOR_CST_NELTS (op0);
13874 tree *elts = XALLOCAVEC (tree, count);
13875 gcc_assert (VECTOR_CST_NELTS (op1) == count
13876 && TYPE_VECTOR_SUBPARTS (type) == count);
13877
13878 for (unsigned i = 0; i < count; i++)
13879 {
13880 tree elem_type = TREE_TYPE (type);
13881 tree elem0 = VECTOR_CST_ELT (op0, i);
13882 tree elem1 = VECTOR_CST_ELT (op1, i);
13883
13884 tree tem = fold_relational_const (code, elem_type,
13885 elem0, elem1);
13886
13887 if (tem == NULL_TREE)
13888 return NULL_TREE;
13889
13890 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13891 }
13892
13893 return build_vector (type, elts);
13894 }
13895
13896 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13897
13898 To compute GT, swap the arguments and do LT.
13899 To compute GE, do LT and invert the result.
13900 To compute LE, swap the arguments, do LT and invert the result.
13901 To compute NE, do EQ and invert the result.
13902
13903 Therefore, the code below must handle only EQ and LT. */
13904
13905 if (code == LE_EXPR || code == GT_EXPR)
13906 {
13907 std::swap (op0, op1);
13908 code = swap_tree_comparison (code);
13909 }
13910
13911 /* Note that it is safe to invert for real values here because we
13912 have already handled the one case that it matters. */
13913
13914 invert = 0;
13915 if (code == NE_EXPR || code == GE_EXPR)
13916 {
13917 invert = 1;
13918 code = invert_tree_comparison (code, false);
13919 }
13920
13921 /* Compute a result for LT or EQ if args permit;
13922 Otherwise return T. */
13923 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13924 {
13925 if (code == EQ_EXPR)
13926 result = tree_int_cst_equal (op0, op1);
13927 else
13928 result = tree_int_cst_lt (op0, op1);
13929 }
13930 else
13931 return NULL_TREE;
13932
13933 if (invert)
13934 result ^= 1;
13935 return constant_boolean_node (result, type);
13936 }
13937
13938 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13939 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13940 itself. */
13941
13942 tree
13943 fold_build_cleanup_point_expr (tree type, tree expr)
13944 {
13945 /* If the expression does not have side effects then we don't have to wrap
13946 it with a cleanup point expression. */
13947 if (!TREE_SIDE_EFFECTS (expr))
13948 return expr;
13949
13950 /* If the expression is a return, check to see if the expression inside the
13951 return has no side effects or the right hand side of the modify expression
13952 inside the return. If either don't have side effects set we don't need to
13953 wrap the expression in a cleanup point expression. Note we don't check the
13954 left hand side of the modify because it should always be a return decl. */
13955 if (TREE_CODE (expr) == RETURN_EXPR)
13956 {
13957 tree op = TREE_OPERAND (expr, 0);
13958 if (!op || !TREE_SIDE_EFFECTS (op))
13959 return expr;
13960 op = TREE_OPERAND (op, 1);
13961 if (!TREE_SIDE_EFFECTS (op))
13962 return expr;
13963 }
13964
13965 return build1 (CLEANUP_POINT_EXPR, type, expr);
13966 }
13967
13968 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13969 of an indirection through OP0, or NULL_TREE if no simplification is
13970 possible. */
13971
13972 tree
13973 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13974 {
13975 tree sub = op0;
13976 tree subtype;
13977
13978 STRIP_NOPS (sub);
13979 subtype = TREE_TYPE (sub);
13980 if (!POINTER_TYPE_P (subtype))
13981 return NULL_TREE;
13982
13983 if (TREE_CODE (sub) == ADDR_EXPR)
13984 {
13985 tree op = TREE_OPERAND (sub, 0);
13986 tree optype = TREE_TYPE (op);
13987 /* *&CONST_DECL -> to the value of the const decl. */
13988 if (TREE_CODE (op) == CONST_DECL)
13989 return DECL_INITIAL (op);
13990 /* *&p => p; make sure to handle *&"str"[cst] here. */
13991 if (type == optype)
13992 {
13993 tree fop = fold_read_from_constant_string (op);
13994 if (fop)
13995 return fop;
13996 else
13997 return op;
13998 }
13999 /* *(foo *)&fooarray => fooarray[0] */
14000 else if (TREE_CODE (optype) == ARRAY_TYPE
14001 && type == TREE_TYPE (optype)
14002 && (!in_gimple_form
14003 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14004 {
14005 tree type_domain = TYPE_DOMAIN (optype);
14006 tree min_val = size_zero_node;
14007 if (type_domain && TYPE_MIN_VALUE (type_domain))
14008 min_val = TYPE_MIN_VALUE (type_domain);
14009 if (in_gimple_form
14010 && TREE_CODE (min_val) != INTEGER_CST)
14011 return NULL_TREE;
14012 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14013 NULL_TREE, NULL_TREE);
14014 }
14015 /* *(foo *)&complexfoo => __real__ complexfoo */
14016 else if (TREE_CODE (optype) == COMPLEX_TYPE
14017 && type == TREE_TYPE (optype))
14018 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14019 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14020 else if (TREE_CODE (optype) == VECTOR_TYPE
14021 && type == TREE_TYPE (optype))
14022 {
14023 tree part_width = TYPE_SIZE (type);
14024 tree index = bitsize_int (0);
14025 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14026 }
14027 }
14028
14029 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14030 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14031 {
14032 tree op00 = TREE_OPERAND (sub, 0);
14033 tree op01 = TREE_OPERAND (sub, 1);
14034
14035 STRIP_NOPS (op00);
14036 if (TREE_CODE (op00) == ADDR_EXPR)
14037 {
14038 tree op00type;
14039 op00 = TREE_OPERAND (op00, 0);
14040 op00type = TREE_TYPE (op00);
14041
14042 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14043 if (TREE_CODE (op00type) == VECTOR_TYPE
14044 && type == TREE_TYPE (op00type))
14045 {
14046 HOST_WIDE_INT offset = tree_to_shwi (op01);
14047 tree part_width = TYPE_SIZE (type);
14048 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14049 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14050 tree index = bitsize_int (indexi);
14051
14052 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14053 return fold_build3_loc (loc,
14054 BIT_FIELD_REF, type, op00,
14055 part_width, index);
14056
14057 }
14058 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14059 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14060 && type == TREE_TYPE (op00type))
14061 {
14062 tree size = TYPE_SIZE_UNIT (type);
14063 if (tree_int_cst_equal (size, op01))
14064 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14065 }
14066 /* ((foo *)&fooarray)[1] => fooarray[1] */
14067 else if (TREE_CODE (op00type) == ARRAY_TYPE
14068 && type == TREE_TYPE (op00type))
14069 {
14070 tree type_domain = TYPE_DOMAIN (op00type);
14071 tree min_val = size_zero_node;
14072 if (type_domain && TYPE_MIN_VALUE (type_domain))
14073 min_val = TYPE_MIN_VALUE (type_domain);
14074 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14075 TYPE_SIZE_UNIT (type));
14076 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14077 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14078 NULL_TREE, NULL_TREE);
14079 }
14080 }
14081 }
14082
14083 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14084 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14085 && type == TREE_TYPE (TREE_TYPE (subtype))
14086 && (!in_gimple_form
14087 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14088 {
14089 tree type_domain;
14090 tree min_val = size_zero_node;
14091 sub = build_fold_indirect_ref_loc (loc, sub);
14092 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14093 if (type_domain && TYPE_MIN_VALUE (type_domain))
14094 min_val = TYPE_MIN_VALUE (type_domain);
14095 if (in_gimple_form
14096 && TREE_CODE (min_val) != INTEGER_CST)
14097 return NULL_TREE;
14098 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14099 NULL_TREE);
14100 }
14101
14102 return NULL_TREE;
14103 }
14104
14105 /* Builds an expression for an indirection through T, simplifying some
14106 cases. */
14107
14108 tree
14109 build_fold_indirect_ref_loc (location_t loc, tree t)
14110 {
14111 tree type = TREE_TYPE (TREE_TYPE (t));
14112 tree sub = fold_indirect_ref_1 (loc, type, t);
14113
14114 if (sub)
14115 return sub;
14116
14117 return build1_loc (loc, INDIRECT_REF, type, t);
14118 }
14119
14120 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14121
14122 tree
14123 fold_indirect_ref_loc (location_t loc, tree t)
14124 {
14125 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14126
14127 if (sub)
14128 return sub;
14129 else
14130 return t;
14131 }
14132
14133 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14134 whose result is ignored. The type of the returned tree need not be
14135 the same as the original expression. */
14136
14137 tree
14138 fold_ignored_result (tree t)
14139 {
14140 if (!TREE_SIDE_EFFECTS (t))
14141 return integer_zero_node;
14142
14143 for (;;)
14144 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14145 {
14146 case tcc_unary:
14147 t = TREE_OPERAND (t, 0);
14148 break;
14149
14150 case tcc_binary:
14151 case tcc_comparison:
14152 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14153 t = TREE_OPERAND (t, 0);
14154 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14155 t = TREE_OPERAND (t, 1);
14156 else
14157 return t;
14158 break;
14159
14160 case tcc_expression:
14161 switch (TREE_CODE (t))
14162 {
14163 case COMPOUND_EXPR:
14164 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14165 return t;
14166 t = TREE_OPERAND (t, 0);
14167 break;
14168
14169 case COND_EXPR:
14170 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14171 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14172 return t;
14173 t = TREE_OPERAND (t, 0);
14174 break;
14175
14176 default:
14177 return t;
14178 }
14179 break;
14180
14181 default:
14182 return t;
14183 }
14184 }
14185
14186 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14187
14188 tree
14189 round_up_loc (location_t loc, tree value, unsigned int divisor)
14190 {
14191 tree div = NULL_TREE;
14192
14193 if (divisor == 1)
14194 return value;
14195
14196 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14197 have to do anything. Only do this when we are not given a const,
14198 because in that case, this check is more expensive than just
14199 doing it. */
14200 if (TREE_CODE (value) != INTEGER_CST)
14201 {
14202 div = build_int_cst (TREE_TYPE (value), divisor);
14203
14204 if (multiple_of_p (TREE_TYPE (value), value, div))
14205 return value;
14206 }
14207
14208 /* If divisor is a power of two, simplify this to bit manipulation. */
14209 if (divisor == (divisor & -divisor))
14210 {
14211 if (TREE_CODE (value) == INTEGER_CST)
14212 {
14213 wide_int val = value;
14214 bool overflow_p;
14215
14216 if ((val & (divisor - 1)) == 0)
14217 return value;
14218
14219 overflow_p = TREE_OVERFLOW (value);
14220 val += divisor - 1;
14221 val &= - (int) divisor;
14222 if (val == 0)
14223 overflow_p = true;
14224
14225 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14226 }
14227 else
14228 {
14229 tree t;
14230
14231 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14232 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14233 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14234 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14235 }
14236 }
14237 else
14238 {
14239 if (!div)
14240 div = build_int_cst (TREE_TYPE (value), divisor);
14241 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14242 value = size_binop_loc (loc, MULT_EXPR, value, div);
14243 }
14244
14245 return value;
14246 }
14247
14248 /* Likewise, but round down. */
14249
14250 tree
14251 round_down_loc (location_t loc, tree value, int divisor)
14252 {
14253 tree div = NULL_TREE;
14254
14255 gcc_assert (divisor > 0);
14256 if (divisor == 1)
14257 return value;
14258
14259 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14260 have to do anything. Only do this when we are not given a const,
14261 because in that case, this check is more expensive than just
14262 doing it. */
14263 if (TREE_CODE (value) != INTEGER_CST)
14264 {
14265 div = build_int_cst (TREE_TYPE (value), divisor);
14266
14267 if (multiple_of_p (TREE_TYPE (value), value, div))
14268 return value;
14269 }
14270
14271 /* If divisor is a power of two, simplify this to bit manipulation. */
14272 if (divisor == (divisor & -divisor))
14273 {
14274 tree t;
14275
14276 t = build_int_cst (TREE_TYPE (value), -divisor);
14277 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14278 }
14279 else
14280 {
14281 if (!div)
14282 div = build_int_cst (TREE_TYPE (value), divisor);
14283 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14284 value = size_binop_loc (loc, MULT_EXPR, value, div);
14285 }
14286
14287 return value;
14288 }
14289
14290 /* Returns the pointer to the base of the object addressed by EXP and
14291 extracts the information about the offset of the access, storing it
14292 to PBITPOS and POFFSET. */
14293
14294 static tree
14295 split_address_to_core_and_offset (tree exp,
14296 HOST_WIDE_INT *pbitpos, tree *poffset)
14297 {
14298 tree core;
14299 machine_mode mode;
14300 int unsignedp, reversep, volatilep;
14301 HOST_WIDE_INT bitsize;
14302 location_t loc = EXPR_LOCATION (exp);
14303
14304 if (TREE_CODE (exp) == ADDR_EXPR)
14305 {
14306 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14307 poffset, &mode, &unsignedp, &reversep,
14308 &volatilep, false);
14309 core = build_fold_addr_expr_loc (loc, core);
14310 }
14311 else
14312 {
14313 core = exp;
14314 *pbitpos = 0;
14315 *poffset = NULL_TREE;
14316 }
14317
14318 return core;
14319 }
14320
14321 /* Returns true if addresses of E1 and E2 differ by a constant, false
14322 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14323
14324 bool
14325 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14326 {
14327 tree core1, core2;
14328 HOST_WIDE_INT bitpos1, bitpos2;
14329 tree toffset1, toffset2, tdiff, type;
14330
14331 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14332 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14333
14334 if (bitpos1 % BITS_PER_UNIT != 0
14335 || bitpos2 % BITS_PER_UNIT != 0
14336 || !operand_equal_p (core1, core2, 0))
14337 return false;
14338
14339 if (toffset1 && toffset2)
14340 {
14341 type = TREE_TYPE (toffset1);
14342 if (type != TREE_TYPE (toffset2))
14343 toffset2 = fold_convert (type, toffset2);
14344
14345 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14346 if (!cst_and_fits_in_hwi (tdiff))
14347 return false;
14348
14349 *diff = int_cst_value (tdiff);
14350 }
14351 else if (toffset1 || toffset2)
14352 {
14353 /* If only one of the offsets is non-constant, the difference cannot
14354 be a constant. */
14355 return false;
14356 }
14357 else
14358 *diff = 0;
14359
14360 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14361 return true;
14362 }
14363
14364 /* Return OFF converted to a pointer offset type suitable as offset for
14365 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14366 tree
14367 convert_to_ptrofftype_loc (location_t loc, tree off)
14368 {
14369 return fold_convert_loc (loc, sizetype, off);
14370 }
14371
14372 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14373 tree
14374 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14375 {
14376 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14377 ptr, convert_to_ptrofftype_loc (loc, off));
14378 }
14379
14380 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14381 tree
14382 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14383 {
14384 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14385 ptr, size_int (off));
14386 }
14387
14388 /* Return a char pointer for a C string if it is a string constant
14389 or sum of string constant and integer constant. */
14390
14391 const char *
14392 c_getstr (tree src)
14393 {
14394 tree offset_node;
14395
14396 src = string_constant (src, &offset_node);
14397 if (src == 0)
14398 return 0;
14399
14400 if (offset_node == 0)
14401 return TREE_STRING_POINTER (src);
14402 else if (!tree_fits_uhwi_p (offset_node)
14403 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14404 return 0;
14405
14406 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
14407 }