coretypes.h: Include machmode.h...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "hash-set.h"
49 #include "vec.h"
50 #include "input.h"
51 #include "alias.h"
52 #include "symtab.h"
53 #include "inchash.h"
54 #include "tree.h"
55 #include "fold-const.h"
56 #include "stor-layout.h"
57 #include "calls.h"
58 #include "tree-iterator.h"
59 #include "realmpfr.h"
60 #include "rtl.h"
61 #include "hashtab.h"
62 #include "hard-reg-set.h"
63 #include "function.h"
64 #include "statistics.h"
65 #include "insn-config.h"
66 #include "expmed.h"
67 #include "dojump.h"
68 #include "explow.h"
69 #include "emit-rtl.h"
70 #include "varasm.h"
71 #include "stmt.h"
72 #include "expr.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "diagnostic-core.h"
76 #include "intl.h"
77 #include "langhooks.h"
78 #include "md5.h"
79 #include "predict.h"
80 #include "basic-block.h"
81 #include "tree-ssa-alias.h"
82 #include "internal-fn.h"
83 #include "tree-eh.h"
84 #include "gimple-expr.h"
85 #include "is-a.h"
86 #include "gimple.h"
87 #include "gimplify.h"
88 #include "tree-dfa.h"
89 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
90 #include "builtins.h"
91 #include "hash-map.h"
92 #include "plugin-api.h"
93 #include "ipa-ref.h"
94 #include "cgraph.h"
95 #include "generic-match.h"
96 #include "optabs.h"
97
98 /* Nonzero if we are folding constants inside an initializer; zero
99 otherwise. */
100 int folding_initializer = 0;
101
102 /* The following constants represent a bit based encoding of GCC's
103 comparison operators. This encoding simplifies transformations
104 on relational comparison operators, such as AND and OR. */
105 enum comparison_code {
106 COMPCODE_FALSE = 0,
107 COMPCODE_LT = 1,
108 COMPCODE_EQ = 2,
109 COMPCODE_LE = 3,
110 COMPCODE_GT = 4,
111 COMPCODE_LTGT = 5,
112 COMPCODE_GE = 6,
113 COMPCODE_ORD = 7,
114 COMPCODE_UNORD = 8,
115 COMPCODE_UNLT = 9,
116 COMPCODE_UNEQ = 10,
117 COMPCODE_UNLE = 11,
118 COMPCODE_UNGT = 12,
119 COMPCODE_NE = 13,
120 COMPCODE_UNGE = 14,
121 COMPCODE_TRUE = 15
122 };
123
124 static bool negate_mathfn_p (enum built_in_function);
125 static bool negate_expr_p (tree);
126 static tree negate_expr (tree);
127 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
128 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
129 static enum comparison_code comparison_to_compcode (enum tree_code);
130 static enum tree_code compcode_to_comparison (enum comparison_code);
131 static int operand_equal_for_comparison_p (tree, tree, tree);
132 static int twoval_comparison_p (tree, tree *, tree *, int *);
133 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
134 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
135 static tree make_bit_field_ref (location_t, tree, tree,
136 HOST_WIDE_INT, HOST_WIDE_INT, int);
137 static tree optimize_bit_field_compare (location_t, enum tree_code,
138 tree, tree, tree);
139 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
140 HOST_WIDE_INT *,
141 machine_mode *, int *, int *,
142 tree *, tree *);
143 static int simple_operand_p (const_tree);
144 static bool simple_operand_p_2 (tree);
145 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
146 static tree range_predecessor (tree);
147 static tree range_successor (tree);
148 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
149 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
150 static tree unextend (tree, int, int, tree);
151 static tree optimize_minmax_comparison (location_t, enum tree_code,
152 tree, tree, tree);
153 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
154 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
155 static tree fold_binary_op_with_conditional_arg (location_t,
156 enum tree_code, tree,
157 tree, tree,
158 tree, tree, int);
159 static tree fold_mathfn_compare (location_t,
160 enum built_in_function, enum tree_code,
161 tree, tree, tree);
162 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
163 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
164 static bool reorder_operands_p (const_tree, const_tree);
165 static tree fold_negate_const (tree, tree);
166 static tree fold_not_const (const_tree, tree);
167 static tree fold_relational_const (enum tree_code, tree, tree, tree);
168 static tree fold_convert_const (enum tree_code, tree, tree);
169 static tree fold_view_convert_expr (tree, tree);
170 static bool vec_cst_ctor_to_array (tree, tree *);
171
172
173 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
174 Otherwise, return LOC. */
175
176 static location_t
177 expr_location_or (tree t, location_t loc)
178 {
179 location_t tloc = EXPR_LOCATION (t);
180 return tloc == UNKNOWN_LOCATION ? loc : tloc;
181 }
182
183 /* Similar to protected_set_expr_location, but never modify x in place,
184 if location can and needs to be set, unshare it. */
185
186 static inline tree
187 protected_set_expr_location_unshare (tree x, location_t loc)
188 {
189 if (CAN_HAVE_LOCATION_P (x)
190 && EXPR_LOCATION (x) != loc
191 && !(TREE_CODE (x) == SAVE_EXPR
192 || TREE_CODE (x) == TARGET_EXPR
193 || TREE_CODE (x) == BIND_EXPR))
194 {
195 x = copy_node (x);
196 SET_EXPR_LOCATION (x, loc);
197 }
198 return x;
199 }
200 \f
201 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
202 division and returns the quotient. Otherwise returns
203 NULL_TREE. */
204
205 tree
206 div_if_zero_remainder (const_tree arg1, const_tree arg2)
207 {
208 widest_int quo;
209
210 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
211 SIGNED, &quo))
212 return wide_int_to_tree (TREE_TYPE (arg1), quo);
213
214 return NULL_TREE;
215 }
216 \f
217 /* This is nonzero if we should defer warnings about undefined
218 overflow. This facility exists because these warnings are a
219 special case. The code to estimate loop iterations does not want
220 to issue any warnings, since it works with expressions which do not
221 occur in user code. Various bits of cleanup code call fold(), but
222 only use the result if it has certain characteristics (e.g., is a
223 constant); that code only wants to issue a warning if the result is
224 used. */
225
226 static int fold_deferring_overflow_warnings;
227
228 /* If a warning about undefined overflow is deferred, this is the
229 warning. Note that this may cause us to turn two warnings into
230 one, but that is fine since it is sufficient to only give one
231 warning per expression. */
232
233 static const char* fold_deferred_overflow_warning;
234
235 /* If a warning about undefined overflow is deferred, this is the
236 level at which the warning should be emitted. */
237
238 static enum warn_strict_overflow_code fold_deferred_overflow_code;
239
240 /* Start deferring overflow warnings. We could use a stack here to
241 permit nested calls, but at present it is not necessary. */
242
243 void
244 fold_defer_overflow_warnings (void)
245 {
246 ++fold_deferring_overflow_warnings;
247 }
248
249 /* Stop deferring overflow warnings. If there is a pending warning,
250 and ISSUE is true, then issue the warning if appropriate. STMT is
251 the statement with which the warning should be associated (used for
252 location information); STMT may be NULL. CODE is the level of the
253 warning--a warn_strict_overflow_code value. This function will use
254 the smaller of CODE and the deferred code when deciding whether to
255 issue the warning. CODE may be zero to mean to always use the
256 deferred code. */
257
258 void
259 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
260 {
261 const char *warnmsg;
262 location_t locus;
263
264 gcc_assert (fold_deferring_overflow_warnings > 0);
265 --fold_deferring_overflow_warnings;
266 if (fold_deferring_overflow_warnings > 0)
267 {
268 if (fold_deferred_overflow_warning != NULL
269 && code != 0
270 && code < (int) fold_deferred_overflow_code)
271 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
272 return;
273 }
274
275 warnmsg = fold_deferred_overflow_warning;
276 fold_deferred_overflow_warning = NULL;
277
278 if (!issue || warnmsg == NULL)
279 return;
280
281 if (gimple_no_warning_p (stmt))
282 return;
283
284 /* Use the smallest code level when deciding to issue the
285 warning. */
286 if (code == 0 || code > (int) fold_deferred_overflow_code)
287 code = fold_deferred_overflow_code;
288
289 if (!issue_strict_overflow_warning (code))
290 return;
291
292 if (stmt == NULL)
293 locus = input_location;
294 else
295 locus = gimple_location (stmt);
296 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
297 }
298
299 /* Stop deferring overflow warnings, ignoring any deferred
300 warnings. */
301
302 void
303 fold_undefer_and_ignore_overflow_warnings (void)
304 {
305 fold_undefer_overflow_warnings (false, NULL, 0);
306 }
307
308 /* Whether we are deferring overflow warnings. */
309
310 bool
311 fold_deferring_overflow_warnings_p (void)
312 {
313 return fold_deferring_overflow_warnings > 0;
314 }
315
316 /* This is called when we fold something based on the fact that signed
317 overflow is undefined. */
318
319 static void
320 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
321 {
322 if (fold_deferring_overflow_warnings > 0)
323 {
324 if (fold_deferred_overflow_warning == NULL
325 || wc < fold_deferred_overflow_code)
326 {
327 fold_deferred_overflow_warning = gmsgid;
328 fold_deferred_overflow_code = wc;
329 }
330 }
331 else if (issue_strict_overflow_warning (wc))
332 warning (OPT_Wstrict_overflow, gmsgid);
333 }
334 \f
335 /* Return true if the built-in mathematical function specified by CODE
336 is odd, i.e. -f(x) == f(-x). */
337
338 static bool
339 negate_mathfn_p (enum built_in_function code)
340 {
341 switch (code)
342 {
343 CASE_FLT_FN (BUILT_IN_ASIN):
344 CASE_FLT_FN (BUILT_IN_ASINH):
345 CASE_FLT_FN (BUILT_IN_ATAN):
346 CASE_FLT_FN (BUILT_IN_ATANH):
347 CASE_FLT_FN (BUILT_IN_CASIN):
348 CASE_FLT_FN (BUILT_IN_CASINH):
349 CASE_FLT_FN (BUILT_IN_CATAN):
350 CASE_FLT_FN (BUILT_IN_CATANH):
351 CASE_FLT_FN (BUILT_IN_CBRT):
352 CASE_FLT_FN (BUILT_IN_CPROJ):
353 CASE_FLT_FN (BUILT_IN_CSIN):
354 CASE_FLT_FN (BUILT_IN_CSINH):
355 CASE_FLT_FN (BUILT_IN_CTAN):
356 CASE_FLT_FN (BUILT_IN_CTANH):
357 CASE_FLT_FN (BUILT_IN_ERF):
358 CASE_FLT_FN (BUILT_IN_LLROUND):
359 CASE_FLT_FN (BUILT_IN_LROUND):
360 CASE_FLT_FN (BUILT_IN_ROUND):
361 CASE_FLT_FN (BUILT_IN_SIN):
362 CASE_FLT_FN (BUILT_IN_SINH):
363 CASE_FLT_FN (BUILT_IN_TAN):
364 CASE_FLT_FN (BUILT_IN_TANH):
365 CASE_FLT_FN (BUILT_IN_TRUNC):
366 return true;
367
368 CASE_FLT_FN (BUILT_IN_LLRINT):
369 CASE_FLT_FN (BUILT_IN_LRINT):
370 CASE_FLT_FN (BUILT_IN_NEARBYINT):
371 CASE_FLT_FN (BUILT_IN_RINT):
372 return !flag_rounding_math;
373
374 default:
375 break;
376 }
377 return false;
378 }
379
380 /* Check whether we may negate an integer constant T without causing
381 overflow. */
382
383 bool
384 may_negate_without_overflow_p (const_tree t)
385 {
386 tree type;
387
388 gcc_assert (TREE_CODE (t) == INTEGER_CST);
389
390 type = TREE_TYPE (t);
391 if (TYPE_UNSIGNED (type))
392 return false;
393
394 return !wi::only_sign_bit_p (t);
395 }
396
397 /* Determine whether an expression T can be cheaply negated using
398 the function negate_expr without introducing undefined overflow. */
399
400 static bool
401 negate_expr_p (tree t)
402 {
403 tree type;
404
405 if (t == 0)
406 return false;
407
408 type = TREE_TYPE (t);
409
410 STRIP_SIGN_NOPS (t);
411 switch (TREE_CODE (t))
412 {
413 case INTEGER_CST:
414 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
415 return true;
416
417 /* Check that -CST will not overflow type. */
418 return may_negate_without_overflow_p (t);
419 case BIT_NOT_EXPR:
420 return (INTEGRAL_TYPE_P (type)
421 && TYPE_OVERFLOW_WRAPS (type));
422
423 case FIXED_CST:
424 return true;
425
426 case NEGATE_EXPR:
427 return !TYPE_OVERFLOW_SANITIZED (type);
428
429 case REAL_CST:
430 /* We want to canonicalize to positive real constants. Pretend
431 that only negative ones can be easily negated. */
432 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
433
434 case COMPLEX_CST:
435 return negate_expr_p (TREE_REALPART (t))
436 && negate_expr_p (TREE_IMAGPART (t));
437
438 case VECTOR_CST:
439 {
440 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
441 return true;
442
443 int count = TYPE_VECTOR_SUBPARTS (type), i;
444
445 for (i = 0; i < count; i++)
446 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
447 return false;
448
449 return true;
450 }
451
452 case COMPLEX_EXPR:
453 return negate_expr_p (TREE_OPERAND (t, 0))
454 && negate_expr_p (TREE_OPERAND (t, 1));
455
456 case CONJ_EXPR:
457 return negate_expr_p (TREE_OPERAND (t, 0));
458
459 case PLUS_EXPR:
460 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
461 || HONOR_SIGNED_ZEROS (element_mode (type)))
462 return false;
463 /* -(A + B) -> (-B) - A. */
464 if (negate_expr_p (TREE_OPERAND (t, 1))
465 && reorder_operands_p (TREE_OPERAND (t, 0),
466 TREE_OPERAND (t, 1)))
467 return true;
468 /* -(A + B) -> (-A) - B. */
469 return negate_expr_p (TREE_OPERAND (t, 0));
470
471 case MINUS_EXPR:
472 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
473 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
474 && !HONOR_SIGNED_ZEROS (element_mode (type))
475 && reorder_operands_p (TREE_OPERAND (t, 0),
476 TREE_OPERAND (t, 1));
477
478 case MULT_EXPR:
479 if (TYPE_UNSIGNED (TREE_TYPE (t)))
480 break;
481
482 /* Fall through. */
483
484 case RDIV_EXPR:
485 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
486 return negate_expr_p (TREE_OPERAND (t, 1))
487 || negate_expr_p (TREE_OPERAND (t, 0));
488 break;
489
490 case TRUNC_DIV_EXPR:
491 case ROUND_DIV_EXPR:
492 case EXACT_DIV_EXPR:
493 /* In general we can't negate A / B, because if A is INT_MIN and
494 B is 1, we may turn this into INT_MIN / -1 which is undefined
495 and actually traps on some architectures. But if overflow is
496 undefined, we can negate, because - (INT_MIN / 1) is an
497 overflow. */
498 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
499 {
500 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
501 break;
502 /* If overflow is undefined then we have to be careful because
503 we ask whether it's ok to associate the negate with the
504 division which is not ok for example for
505 -((a - b) / c) where (-(a - b)) / c may invoke undefined
506 overflow because of negating INT_MIN. So do not use
507 negate_expr_p here but open-code the two important cases. */
508 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
509 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
510 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
511 return true;
512 }
513 else if (negate_expr_p (TREE_OPERAND (t, 0)))
514 return true;
515 return negate_expr_p (TREE_OPERAND (t, 1));
516
517 case NOP_EXPR:
518 /* Negate -((double)float) as (double)(-float). */
519 if (TREE_CODE (type) == REAL_TYPE)
520 {
521 tree tem = strip_float_extensions (t);
522 if (tem != t)
523 return negate_expr_p (tem);
524 }
525 break;
526
527 case CALL_EXPR:
528 /* Negate -f(x) as f(-x). */
529 if (negate_mathfn_p (builtin_mathfn_code (t)))
530 return negate_expr_p (CALL_EXPR_ARG (t, 0));
531 break;
532
533 case RSHIFT_EXPR:
534 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
535 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
536 {
537 tree op1 = TREE_OPERAND (t, 1);
538 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
539 return true;
540 }
541 break;
542
543 default:
544 break;
545 }
546 return false;
547 }
548
549 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
550 simplification is possible.
551 If negate_expr_p would return true for T, NULL_TREE will never be
552 returned. */
553
554 static tree
555 fold_negate_expr (location_t loc, tree t)
556 {
557 tree type = TREE_TYPE (t);
558 tree tem;
559
560 switch (TREE_CODE (t))
561 {
562 /* Convert - (~A) to A + 1. */
563 case BIT_NOT_EXPR:
564 if (INTEGRAL_TYPE_P (type))
565 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
566 build_one_cst (type));
567 break;
568
569 case INTEGER_CST:
570 tem = fold_negate_const (t, type);
571 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
572 || (ANY_INTEGRAL_TYPE_P (type)
573 && !TYPE_OVERFLOW_TRAPS (type)
574 && TYPE_OVERFLOW_WRAPS (type))
575 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
576 return tem;
577 break;
578
579 case REAL_CST:
580 tem = fold_negate_const (t, type);
581 return tem;
582
583 case FIXED_CST:
584 tem = fold_negate_const (t, type);
585 return tem;
586
587 case COMPLEX_CST:
588 {
589 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
590 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
591 if (rpart && ipart)
592 return build_complex (type, rpart, ipart);
593 }
594 break;
595
596 case VECTOR_CST:
597 {
598 int count = TYPE_VECTOR_SUBPARTS (type), i;
599 tree *elts = XALLOCAVEC (tree, count);
600
601 for (i = 0; i < count; i++)
602 {
603 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
604 if (elts[i] == NULL_TREE)
605 return NULL_TREE;
606 }
607
608 return build_vector (type, elts);
609 }
610
611 case COMPLEX_EXPR:
612 if (negate_expr_p (t))
613 return fold_build2_loc (loc, COMPLEX_EXPR, type,
614 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
615 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
616 break;
617
618 case CONJ_EXPR:
619 if (negate_expr_p (t))
620 return fold_build1_loc (loc, CONJ_EXPR, type,
621 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
622 break;
623
624 case NEGATE_EXPR:
625 if (!TYPE_OVERFLOW_SANITIZED (type))
626 return TREE_OPERAND (t, 0);
627 break;
628
629 case PLUS_EXPR:
630 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
631 && !HONOR_SIGNED_ZEROS (element_mode (type)))
632 {
633 /* -(A + B) -> (-B) - A. */
634 if (negate_expr_p (TREE_OPERAND (t, 1))
635 && reorder_operands_p (TREE_OPERAND (t, 0),
636 TREE_OPERAND (t, 1)))
637 {
638 tem = negate_expr (TREE_OPERAND (t, 1));
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 tem, TREE_OPERAND (t, 0));
641 }
642
643 /* -(A + B) -> (-A) - B. */
644 if (negate_expr_p (TREE_OPERAND (t, 0)))
645 {
646 tem = negate_expr (TREE_OPERAND (t, 0));
647 return fold_build2_loc (loc, MINUS_EXPR, type,
648 tem, TREE_OPERAND (t, 1));
649 }
650 }
651 break;
652
653 case MINUS_EXPR:
654 /* - (A - B) -> B - A */
655 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
656 && !HONOR_SIGNED_ZEROS (element_mode (type))
657 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
658 return fold_build2_loc (loc, MINUS_EXPR, type,
659 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
660 break;
661
662 case MULT_EXPR:
663 if (TYPE_UNSIGNED (type))
664 break;
665
666 /* Fall through. */
667
668 case RDIV_EXPR:
669 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
670 {
671 tem = TREE_OPERAND (t, 1);
672 if (negate_expr_p (tem))
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0), negate_expr (tem));
675 tem = TREE_OPERAND (t, 0);
676 if (negate_expr_p (tem))
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 negate_expr (tem), TREE_OPERAND (t, 1));
679 }
680 break;
681
682 case TRUNC_DIV_EXPR:
683 case ROUND_DIV_EXPR:
684 case EXACT_DIV_EXPR:
685 /* In general we can't negate A / B, because if A is INT_MIN and
686 B is 1, we may turn this into INT_MIN / -1 which is undefined
687 and actually traps on some architectures. But if overflow is
688 undefined, we can negate, because - (INT_MIN / 1) is an
689 overflow. */
690 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
691 {
692 const char * const warnmsg = G_("assuming signed overflow does not "
693 "occur when negating a division");
694 tem = TREE_OPERAND (t, 1);
695 if (negate_expr_p (tem))
696 {
697 if (INTEGRAL_TYPE_P (type)
698 && (TREE_CODE (tem) != INTEGER_CST
699 || integer_onep (tem)))
700 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
701 return fold_build2_loc (loc, TREE_CODE (t), type,
702 TREE_OPERAND (t, 0), negate_expr (tem));
703 }
704 /* If overflow is undefined then we have to be careful because
705 we ask whether it's ok to associate the negate with the
706 division which is not ok for example for
707 -((a - b) / c) where (-(a - b)) / c may invoke undefined
708 overflow because of negating INT_MIN. So do not use
709 negate_expr_p here but open-code the two important cases. */
710 tem = TREE_OPERAND (t, 0);
711 if ((INTEGRAL_TYPE_P (type)
712 && (TREE_CODE (tem) == NEGATE_EXPR
713 || (TREE_CODE (tem) == INTEGER_CST
714 && may_negate_without_overflow_p (tem))))
715 || !INTEGRAL_TYPE_P (type))
716 return fold_build2_loc (loc, TREE_CODE (t), type,
717 negate_expr (tem), TREE_OPERAND (t, 1));
718 }
719 break;
720
721 case NOP_EXPR:
722 /* Convert -((double)float) into (double)(-float). */
723 if (TREE_CODE (type) == REAL_TYPE)
724 {
725 tem = strip_float_extensions (t);
726 if (tem != t && negate_expr_p (tem))
727 return fold_convert_loc (loc, type, negate_expr (tem));
728 }
729 break;
730
731 case CALL_EXPR:
732 /* Negate -f(x) as f(-x). */
733 if (negate_mathfn_p (builtin_mathfn_code (t))
734 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
735 {
736 tree fndecl, arg;
737
738 fndecl = get_callee_fndecl (t);
739 arg = negate_expr (CALL_EXPR_ARG (t, 0));
740 return build_call_expr_loc (loc, fndecl, 1, arg);
741 }
742 break;
743
744 case RSHIFT_EXPR:
745 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
746 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
747 {
748 tree op1 = TREE_OPERAND (t, 1);
749 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
750 {
751 tree ntype = TYPE_UNSIGNED (type)
752 ? signed_type_for (type)
753 : unsigned_type_for (type);
754 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
755 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
756 return fold_convert_loc (loc, type, temp);
757 }
758 }
759 break;
760
761 default:
762 break;
763 }
764
765 return NULL_TREE;
766 }
767
768 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
769 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
770 return NULL_TREE. */
771
772 static tree
773 negate_expr (tree t)
774 {
775 tree type, tem;
776 location_t loc;
777
778 if (t == NULL_TREE)
779 return NULL_TREE;
780
781 loc = EXPR_LOCATION (t);
782 type = TREE_TYPE (t);
783 STRIP_SIGN_NOPS (t);
784
785 tem = fold_negate_expr (loc, t);
786 if (!tem)
787 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
788 return fold_convert_loc (loc, type, tem);
789 }
790 \f
791 /* Split a tree IN into a constant, literal and variable parts that could be
792 combined with CODE to make IN. "constant" means an expression with
793 TREE_CONSTANT but that isn't an actual constant. CODE must be a
794 commutative arithmetic operation. Store the constant part into *CONP,
795 the literal in *LITP and return the variable part. If a part isn't
796 present, set it to null. If the tree does not decompose in this way,
797 return the entire tree as the variable part and the other parts as null.
798
799 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
800 case, we negate an operand that was subtracted. Except if it is a
801 literal for which we use *MINUS_LITP instead.
802
803 If NEGATE_P is true, we are negating all of IN, again except a literal
804 for which we use *MINUS_LITP instead.
805
806 If IN is itself a literal or constant, return it as appropriate.
807
808 Note that we do not guarantee that any of the three values will be the
809 same type as IN, but they will have the same signedness and mode. */
810
811 static tree
812 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
813 tree *minus_litp, int negate_p)
814 {
815 tree var = 0;
816
817 *conp = 0;
818 *litp = 0;
819 *minus_litp = 0;
820
821 /* Strip any conversions that don't change the machine mode or signedness. */
822 STRIP_SIGN_NOPS (in);
823
824 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
825 || TREE_CODE (in) == FIXED_CST)
826 *litp = in;
827 else if (TREE_CODE (in) == code
828 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
829 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
830 /* We can associate addition and subtraction together (even
831 though the C standard doesn't say so) for integers because
832 the value is not affected. For reals, the value might be
833 affected, so we can't. */
834 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
835 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
836 {
837 tree op0 = TREE_OPERAND (in, 0);
838 tree op1 = TREE_OPERAND (in, 1);
839 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
840 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
841
842 /* First see if either of the operands is a literal, then a constant. */
843 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
844 || TREE_CODE (op0) == FIXED_CST)
845 *litp = op0, op0 = 0;
846 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
847 || TREE_CODE (op1) == FIXED_CST)
848 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
849
850 if (op0 != 0 && TREE_CONSTANT (op0))
851 *conp = op0, op0 = 0;
852 else if (op1 != 0 && TREE_CONSTANT (op1))
853 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
854
855 /* If we haven't dealt with either operand, this is not a case we can
856 decompose. Otherwise, VAR is either of the ones remaining, if any. */
857 if (op0 != 0 && op1 != 0)
858 var = in;
859 else if (op0 != 0)
860 var = op0;
861 else
862 var = op1, neg_var_p = neg1_p;
863
864 /* Now do any needed negations. */
865 if (neg_litp_p)
866 *minus_litp = *litp, *litp = 0;
867 if (neg_conp_p)
868 *conp = negate_expr (*conp);
869 if (neg_var_p)
870 var = negate_expr (var);
871 }
872 else if (TREE_CODE (in) == BIT_NOT_EXPR
873 && code == PLUS_EXPR)
874 {
875 /* -X - 1 is folded to ~X, undo that here. */
876 *minus_litp = build_one_cst (TREE_TYPE (in));
877 var = negate_expr (TREE_OPERAND (in, 0));
878 }
879 else if (TREE_CONSTANT (in))
880 *conp = in;
881 else
882 var = in;
883
884 if (negate_p)
885 {
886 if (*litp)
887 *minus_litp = *litp, *litp = 0;
888 else if (*minus_litp)
889 *litp = *minus_litp, *minus_litp = 0;
890 *conp = negate_expr (*conp);
891 var = negate_expr (var);
892 }
893
894 return var;
895 }
896
897 /* Re-associate trees split by the above function. T1 and T2 are
898 either expressions to associate or null. Return the new
899 expression, if any. LOC is the location of the new expression. If
900 we build an operation, do it in TYPE and with CODE. */
901
902 static tree
903 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
904 {
905 if (t1 == 0)
906 return t2;
907 else if (t2 == 0)
908 return t1;
909
910 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
911 try to fold this since we will have infinite recursion. But do
912 deal with any NEGATE_EXPRs. */
913 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
914 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
915 {
916 if (code == PLUS_EXPR)
917 {
918 if (TREE_CODE (t1) == NEGATE_EXPR)
919 return build2_loc (loc, MINUS_EXPR, type,
920 fold_convert_loc (loc, type, t2),
921 fold_convert_loc (loc, type,
922 TREE_OPERAND (t1, 0)));
923 else if (TREE_CODE (t2) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t2, 0)));
928 else if (integer_zerop (t2))
929 return fold_convert_loc (loc, type, t1);
930 }
931 else if (code == MINUS_EXPR)
932 {
933 if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
935 }
936
937 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
939 }
940
941 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
942 fold_convert_loc (loc, type, t2));
943 }
944 \f
945 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
946 for use in int_const_binop, size_binop and size_diffop. */
947
948 static bool
949 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
950 {
951 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
952 return false;
953 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
954 return false;
955
956 switch (code)
957 {
958 case LSHIFT_EXPR:
959 case RSHIFT_EXPR:
960 case LROTATE_EXPR:
961 case RROTATE_EXPR:
962 return true;
963
964 default:
965 break;
966 }
967
968 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
969 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
970 && TYPE_MODE (type1) == TYPE_MODE (type2);
971 }
972
973
974 /* Combine two integer constants ARG1 and ARG2 under operation CODE
975 to produce a new constant. Return NULL_TREE if we don't know how
976 to evaluate CODE at compile-time. */
977
978 static tree
979 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
980 int overflowable)
981 {
982 wide_int res;
983 tree t;
984 tree type = TREE_TYPE (arg1);
985 signop sign = TYPE_SIGN (type);
986 bool overflow = false;
987
988 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
989 TYPE_SIGN (TREE_TYPE (parg2)));
990
991 switch (code)
992 {
993 case BIT_IOR_EXPR:
994 res = wi::bit_or (arg1, arg2);
995 break;
996
997 case BIT_XOR_EXPR:
998 res = wi::bit_xor (arg1, arg2);
999 break;
1000
1001 case BIT_AND_EXPR:
1002 res = wi::bit_and (arg1, arg2);
1003 break;
1004
1005 case RSHIFT_EXPR:
1006 case LSHIFT_EXPR:
1007 if (wi::neg_p (arg2))
1008 {
1009 arg2 = -arg2;
1010 if (code == RSHIFT_EXPR)
1011 code = LSHIFT_EXPR;
1012 else
1013 code = RSHIFT_EXPR;
1014 }
1015
1016 if (code == RSHIFT_EXPR)
1017 /* It's unclear from the C standard whether shifts can overflow.
1018 The following code ignores overflow; perhaps a C standard
1019 interpretation ruling is needed. */
1020 res = wi::rshift (arg1, arg2, sign);
1021 else
1022 res = wi::lshift (arg1, arg2);
1023 break;
1024
1025 case RROTATE_EXPR:
1026 case LROTATE_EXPR:
1027 if (wi::neg_p (arg2))
1028 {
1029 arg2 = -arg2;
1030 if (code == RROTATE_EXPR)
1031 code = LROTATE_EXPR;
1032 else
1033 code = RROTATE_EXPR;
1034 }
1035
1036 if (code == RROTATE_EXPR)
1037 res = wi::rrotate (arg1, arg2);
1038 else
1039 res = wi::lrotate (arg1, arg2);
1040 break;
1041
1042 case PLUS_EXPR:
1043 res = wi::add (arg1, arg2, sign, &overflow);
1044 break;
1045
1046 case MINUS_EXPR:
1047 res = wi::sub (arg1, arg2, sign, &overflow);
1048 break;
1049
1050 case MULT_EXPR:
1051 res = wi::mul (arg1, arg2, sign, &overflow);
1052 break;
1053
1054 case MULT_HIGHPART_EXPR:
1055 res = wi::mul_high (arg1, arg2, sign);
1056 break;
1057
1058 case TRUNC_DIV_EXPR:
1059 case EXACT_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1063 break;
1064
1065 case FLOOR_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_floor (arg1, arg2, sign, &overflow);
1069 break;
1070
1071 case CEIL_DIV_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1075 break;
1076
1077 case ROUND_DIV_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::div_round (arg1, arg2, sign, &overflow);
1081 break;
1082
1083 case TRUNC_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1087 break;
1088
1089 case FLOOR_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1093 break;
1094
1095 case CEIL_MOD_EXPR:
1096 if (arg2 == 0)
1097 return NULL_TREE;
1098 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1099 break;
1100
1101 case ROUND_MOD_EXPR:
1102 if (arg2 == 0)
1103 return NULL_TREE;
1104 res = wi::mod_round (arg1, arg2, sign, &overflow);
1105 break;
1106
1107 case MIN_EXPR:
1108 res = wi::min (arg1, arg2, sign);
1109 break;
1110
1111 case MAX_EXPR:
1112 res = wi::max (arg1, arg2, sign);
1113 break;
1114
1115 default:
1116 return NULL_TREE;
1117 }
1118
1119 t = force_fit_type (type, res, overflowable,
1120 (((sign == SIGNED || overflowable == -1)
1121 && overflow)
1122 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1123
1124 return t;
1125 }
1126
1127 tree
1128 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1129 {
1130 return int_const_binop_1 (code, arg1, arg2, 1);
1131 }
1132
1133 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1134 constant. We assume ARG1 and ARG2 have the same data type, or at least
1135 are the same kind of constant and the same machine mode. Return zero if
1136 combining the constants is not allowed in the current operating mode. */
1137
1138 static tree
1139 const_binop (enum tree_code code, tree arg1, tree arg2)
1140 {
1141 /* Sanity check for the recursive cases. */
1142 if (!arg1 || !arg2)
1143 return NULL_TREE;
1144
1145 STRIP_NOPS (arg1);
1146 STRIP_NOPS (arg2);
1147
1148 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1149 {
1150 if (code == POINTER_PLUS_EXPR)
1151 return int_const_binop (PLUS_EXPR,
1152 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1153
1154 return int_const_binop (code, arg1, arg2);
1155 }
1156
1157 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1158 {
1159 machine_mode mode;
1160 REAL_VALUE_TYPE d1;
1161 REAL_VALUE_TYPE d2;
1162 REAL_VALUE_TYPE value;
1163 REAL_VALUE_TYPE result;
1164 bool inexact;
1165 tree t, type;
1166
1167 /* The following codes are handled by real_arithmetic. */
1168 switch (code)
1169 {
1170 case PLUS_EXPR:
1171 case MINUS_EXPR:
1172 case MULT_EXPR:
1173 case RDIV_EXPR:
1174 case MIN_EXPR:
1175 case MAX_EXPR:
1176 break;
1177
1178 default:
1179 return NULL_TREE;
1180 }
1181
1182 d1 = TREE_REAL_CST (arg1);
1183 d2 = TREE_REAL_CST (arg2);
1184
1185 type = TREE_TYPE (arg1);
1186 mode = TYPE_MODE (type);
1187
1188 /* Don't perform operation if we honor signaling NaNs and
1189 either operand is a NaN. */
1190 if (HONOR_SNANS (mode)
1191 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1192 return NULL_TREE;
1193
1194 /* Don't perform operation if it would raise a division
1195 by zero exception. */
1196 if (code == RDIV_EXPR
1197 && REAL_VALUES_EQUAL (d2, dconst0)
1198 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1199 return NULL_TREE;
1200
1201 /* If either operand is a NaN, just return it. Otherwise, set up
1202 for floating-point trap; we return an overflow. */
1203 if (REAL_VALUE_ISNAN (d1))
1204 return arg1;
1205 else if (REAL_VALUE_ISNAN (d2))
1206 return arg2;
1207
1208 inexact = real_arithmetic (&value, code, &d1, &d2);
1209 real_convert (&result, mode, &value);
1210
1211 /* Don't constant fold this floating point operation if
1212 the result has overflowed and flag_trapping_math. */
1213 if (flag_trapping_math
1214 && MODE_HAS_INFINITIES (mode)
1215 && REAL_VALUE_ISINF (result)
1216 && !REAL_VALUE_ISINF (d1)
1217 && !REAL_VALUE_ISINF (d2))
1218 return NULL_TREE;
1219
1220 /* Don't constant fold this floating point operation if the
1221 result may dependent upon the run-time rounding mode and
1222 flag_rounding_math is set, or if GCC's software emulation
1223 is unable to accurately represent the result. */
1224 if ((flag_rounding_math
1225 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1226 && (inexact || !real_identical (&result, &value)))
1227 return NULL_TREE;
1228
1229 t = build_real (type, result);
1230
1231 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1232 return t;
1233 }
1234
1235 if (TREE_CODE (arg1) == FIXED_CST)
1236 {
1237 FIXED_VALUE_TYPE f1;
1238 FIXED_VALUE_TYPE f2;
1239 FIXED_VALUE_TYPE result;
1240 tree t, type;
1241 int sat_p;
1242 bool overflow_p;
1243
1244 /* The following codes are handled by fixed_arithmetic. */
1245 switch (code)
1246 {
1247 case PLUS_EXPR:
1248 case MINUS_EXPR:
1249 case MULT_EXPR:
1250 case TRUNC_DIV_EXPR:
1251 if (TREE_CODE (arg2) != FIXED_CST)
1252 return NULL_TREE;
1253 f2 = TREE_FIXED_CST (arg2);
1254 break;
1255
1256 case LSHIFT_EXPR:
1257 case RSHIFT_EXPR:
1258 {
1259 if (TREE_CODE (arg2) != INTEGER_CST)
1260 return NULL_TREE;
1261 wide_int w2 = arg2;
1262 f2.data.high = w2.elt (1);
1263 f2.data.low = w2.elt (0);
1264 f2.mode = SImode;
1265 }
1266 break;
1267
1268 default:
1269 return NULL_TREE;
1270 }
1271
1272 f1 = TREE_FIXED_CST (arg1);
1273 type = TREE_TYPE (arg1);
1274 sat_p = TYPE_SATURATING (type);
1275 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1276 t = build_fixed (type, result);
1277 /* Propagate overflow flags. */
1278 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1279 TREE_OVERFLOW (t) = 1;
1280 return t;
1281 }
1282
1283 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1284 {
1285 tree type = TREE_TYPE (arg1);
1286 tree r1 = TREE_REALPART (arg1);
1287 tree i1 = TREE_IMAGPART (arg1);
1288 tree r2 = TREE_REALPART (arg2);
1289 tree i2 = TREE_IMAGPART (arg2);
1290 tree real, imag;
1291
1292 switch (code)
1293 {
1294 case PLUS_EXPR:
1295 case MINUS_EXPR:
1296 real = const_binop (code, r1, r2);
1297 imag = const_binop (code, i1, i2);
1298 break;
1299
1300 case MULT_EXPR:
1301 if (COMPLEX_FLOAT_TYPE_P (type))
1302 return do_mpc_arg2 (arg1, arg2, type,
1303 /* do_nonfinite= */ folding_initializer,
1304 mpc_mul);
1305
1306 real = const_binop (MINUS_EXPR,
1307 const_binop (MULT_EXPR, r1, r2),
1308 const_binop (MULT_EXPR, i1, i2));
1309 imag = const_binop (PLUS_EXPR,
1310 const_binop (MULT_EXPR, r1, i2),
1311 const_binop (MULT_EXPR, i1, r2));
1312 break;
1313
1314 case RDIV_EXPR:
1315 if (COMPLEX_FLOAT_TYPE_P (type))
1316 return do_mpc_arg2 (arg1, arg2, type,
1317 /* do_nonfinite= */ folding_initializer,
1318 mpc_div);
1319 /* Fallthru ... */
1320 case TRUNC_DIV_EXPR:
1321 case CEIL_DIV_EXPR:
1322 case FLOOR_DIV_EXPR:
1323 case ROUND_DIV_EXPR:
1324 if (flag_complex_method == 0)
1325 {
1326 /* Keep this algorithm in sync with
1327 tree-complex.c:expand_complex_div_straight().
1328
1329 Expand complex division to scalars, straightforward algorithm.
1330 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1331 t = br*br + bi*bi
1332 */
1333 tree magsquared
1334 = const_binop (PLUS_EXPR,
1335 const_binop (MULT_EXPR, r2, r2),
1336 const_binop (MULT_EXPR, i2, i2));
1337 tree t1
1338 = const_binop (PLUS_EXPR,
1339 const_binop (MULT_EXPR, r1, r2),
1340 const_binop (MULT_EXPR, i1, i2));
1341 tree t2
1342 = const_binop (MINUS_EXPR,
1343 const_binop (MULT_EXPR, i1, r2),
1344 const_binop (MULT_EXPR, r1, i2));
1345
1346 real = const_binop (code, t1, magsquared);
1347 imag = const_binop (code, t2, magsquared);
1348 }
1349 else
1350 {
1351 /* Keep this algorithm in sync with
1352 tree-complex.c:expand_complex_div_wide().
1353
1354 Expand complex division to scalars, modified algorithm to minimize
1355 overflow with wide input ranges. */
1356 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1357 fold_abs_const (r2, TREE_TYPE (type)),
1358 fold_abs_const (i2, TREE_TYPE (type)));
1359
1360 if (integer_nonzerop (compare))
1361 {
1362 /* In the TRUE branch, we compute
1363 ratio = br/bi;
1364 div = (br * ratio) + bi;
1365 tr = (ar * ratio) + ai;
1366 ti = (ai * ratio) - ar;
1367 tr = tr / div;
1368 ti = ti / div; */
1369 tree ratio = const_binop (code, r2, i2);
1370 tree div = const_binop (PLUS_EXPR, i2,
1371 const_binop (MULT_EXPR, r2, ratio));
1372 real = const_binop (MULT_EXPR, r1, ratio);
1373 real = const_binop (PLUS_EXPR, real, i1);
1374 real = const_binop (code, real, div);
1375
1376 imag = const_binop (MULT_EXPR, i1, ratio);
1377 imag = const_binop (MINUS_EXPR, imag, r1);
1378 imag = const_binop (code, imag, div);
1379 }
1380 else
1381 {
1382 /* In the FALSE branch, we compute
1383 ratio = d/c;
1384 divisor = (d * ratio) + c;
1385 tr = (b * ratio) + a;
1386 ti = b - (a * ratio);
1387 tr = tr / div;
1388 ti = ti / div; */
1389 tree ratio = const_binop (code, i2, r2);
1390 tree div = const_binop (PLUS_EXPR, r2,
1391 const_binop (MULT_EXPR, i2, ratio));
1392
1393 real = const_binop (MULT_EXPR, i1, ratio);
1394 real = const_binop (PLUS_EXPR, real, r1);
1395 real = const_binop (code, real, div);
1396
1397 imag = const_binop (MULT_EXPR, r1, ratio);
1398 imag = const_binop (MINUS_EXPR, i1, imag);
1399 imag = const_binop (code, imag, div);
1400 }
1401 }
1402 break;
1403
1404 default:
1405 return NULL_TREE;
1406 }
1407
1408 if (real && imag)
1409 return build_complex (type, real, imag);
1410 }
1411
1412 if (TREE_CODE (arg1) == VECTOR_CST
1413 && TREE_CODE (arg2) == VECTOR_CST)
1414 {
1415 tree type = TREE_TYPE (arg1);
1416 int count = TYPE_VECTOR_SUBPARTS (type), i;
1417 tree *elts = XALLOCAVEC (tree, count);
1418
1419 for (i = 0; i < count; i++)
1420 {
1421 tree elem1 = VECTOR_CST_ELT (arg1, i);
1422 tree elem2 = VECTOR_CST_ELT (arg2, i);
1423
1424 elts[i] = const_binop (code, elem1, elem2);
1425
1426 /* It is possible that const_binop cannot handle the given
1427 code and return NULL_TREE */
1428 if (elts[i] == NULL_TREE)
1429 return NULL_TREE;
1430 }
1431
1432 return build_vector (type, elts);
1433 }
1434
1435 /* Shifts allow a scalar offset for a vector. */
1436 if (TREE_CODE (arg1) == VECTOR_CST
1437 && TREE_CODE (arg2) == INTEGER_CST)
1438 {
1439 tree type = TREE_TYPE (arg1);
1440 int count = TYPE_VECTOR_SUBPARTS (type), i;
1441 tree *elts = XALLOCAVEC (tree, count);
1442
1443 for (i = 0; i < count; i++)
1444 {
1445 tree elem1 = VECTOR_CST_ELT (arg1, i);
1446
1447 elts[i] = const_binop (code, elem1, arg2);
1448
1449 /* It is possible that const_binop cannot handle the given
1450 code and return NULL_TREE. */
1451 if (elts[i] == NULL_TREE)
1452 return NULL_TREE;
1453 }
1454
1455 return build_vector (type, elts);
1456 }
1457 return NULL_TREE;
1458 }
1459
1460 /* Overload that adds a TYPE parameter to be able to dispatch
1461 to fold_relational_const. */
1462
1463 tree
1464 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1465 {
1466 if (TREE_CODE_CLASS (code) == tcc_comparison)
1467 return fold_relational_const (code, type, arg1, arg2);
1468
1469 /* ??? Until we make the const_binop worker take the type of the
1470 result as argument put those cases that need it here. */
1471 switch (code)
1472 {
1473 case COMPLEX_EXPR:
1474 if ((TREE_CODE (arg1) == REAL_CST
1475 && TREE_CODE (arg2) == REAL_CST)
1476 || (TREE_CODE (arg1) == INTEGER_CST
1477 && TREE_CODE (arg2) == INTEGER_CST))
1478 return build_complex (type, arg1, arg2);
1479 return NULL_TREE;
1480
1481 case VEC_PACK_TRUNC_EXPR:
1482 case VEC_PACK_FIX_TRUNC_EXPR:
1483 {
1484 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1485 tree *elts;
1486
1487 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1488 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1489 if (TREE_CODE (arg1) != VECTOR_CST
1490 || TREE_CODE (arg2) != VECTOR_CST)
1491 return NULL_TREE;
1492
1493 elts = XALLOCAVEC (tree, nelts);
1494 if (!vec_cst_ctor_to_array (arg1, elts)
1495 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1496 return NULL_TREE;
1497
1498 for (i = 0; i < nelts; i++)
1499 {
1500 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1501 ? NOP_EXPR : FIX_TRUNC_EXPR,
1502 TREE_TYPE (type), elts[i]);
1503 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1504 return NULL_TREE;
1505 }
1506
1507 return build_vector (type, elts);
1508 }
1509
1510 case VEC_WIDEN_MULT_LO_EXPR:
1511 case VEC_WIDEN_MULT_HI_EXPR:
1512 case VEC_WIDEN_MULT_EVEN_EXPR:
1513 case VEC_WIDEN_MULT_ODD_EXPR:
1514 {
1515 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1516 unsigned int out, ofs, scale;
1517 tree *elts;
1518
1519 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1520 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1521 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1522 return NULL_TREE;
1523
1524 elts = XALLOCAVEC (tree, nelts * 4);
1525 if (!vec_cst_ctor_to_array (arg1, elts)
1526 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1527 return NULL_TREE;
1528
1529 if (code == VEC_WIDEN_MULT_LO_EXPR)
1530 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1531 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1532 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1533 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1534 scale = 1, ofs = 0;
1535 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1536 scale = 1, ofs = 1;
1537
1538 for (out = 0; out < nelts; out++)
1539 {
1540 unsigned int in1 = (out << scale) + ofs;
1541 unsigned int in2 = in1 + nelts * 2;
1542 tree t1, t2;
1543
1544 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1545 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1546
1547 if (t1 == NULL_TREE || t2 == NULL_TREE)
1548 return NULL_TREE;
1549 elts[out] = const_binop (MULT_EXPR, t1, t2);
1550 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1551 return NULL_TREE;
1552 }
1553
1554 return build_vector (type, elts);
1555 }
1556
1557 default:;
1558 }
1559
1560 if (TREE_CODE_CLASS (code) != tcc_binary)
1561 return NULL_TREE;
1562
1563 /* Make sure type and arg0 have the same saturating flag. */
1564 gcc_checking_assert (TYPE_SATURATING (type)
1565 == TYPE_SATURATING (TREE_TYPE (arg1)));
1566
1567 return const_binop (code, arg1, arg2);
1568 }
1569
1570 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1571 Return zero if computing the constants is not possible. */
1572
1573 tree
1574 const_unop (enum tree_code code, tree type, tree arg0)
1575 {
1576 switch (code)
1577 {
1578 CASE_CONVERT:
1579 case FLOAT_EXPR:
1580 case FIX_TRUNC_EXPR:
1581 case FIXED_CONVERT_EXPR:
1582 return fold_convert_const (code, type, arg0);
1583
1584 case ADDR_SPACE_CONVERT_EXPR:
1585 if (integer_zerop (arg0))
1586 return fold_convert_const (code, type, arg0);
1587 break;
1588
1589 case VIEW_CONVERT_EXPR:
1590 return fold_view_convert_expr (type, arg0);
1591
1592 case NEGATE_EXPR:
1593 {
1594 /* Can't call fold_negate_const directly here as that doesn't
1595 handle all cases and we might not be able to negate some
1596 constants. */
1597 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1598 if (tem && CONSTANT_CLASS_P (tem))
1599 return tem;
1600 break;
1601 }
1602
1603 case ABS_EXPR:
1604 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1605 return fold_abs_const (arg0, type);
1606 break;
1607
1608 case CONJ_EXPR:
1609 if (TREE_CODE (arg0) == COMPLEX_CST)
1610 {
1611 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1612 TREE_TYPE (type));
1613 return build_complex (type, TREE_REALPART (arg0), ipart);
1614 }
1615 break;
1616
1617 case BIT_NOT_EXPR:
1618 if (TREE_CODE (arg0) == INTEGER_CST)
1619 return fold_not_const (arg0, type);
1620 /* Perform BIT_NOT_EXPR on each element individually. */
1621 else if (TREE_CODE (arg0) == VECTOR_CST)
1622 {
1623 tree *elements;
1624 tree elem;
1625 unsigned count = VECTOR_CST_NELTS (arg0), i;
1626
1627 elements = XALLOCAVEC (tree, count);
1628 for (i = 0; i < count; i++)
1629 {
1630 elem = VECTOR_CST_ELT (arg0, i);
1631 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1632 if (elem == NULL_TREE)
1633 break;
1634 elements[i] = elem;
1635 }
1636 if (i == count)
1637 return build_vector (type, elements);
1638 }
1639 break;
1640
1641 case TRUTH_NOT_EXPR:
1642 if (TREE_CODE (arg0) == INTEGER_CST)
1643 return constant_boolean_node (integer_zerop (arg0), type);
1644 break;
1645
1646 case REALPART_EXPR:
1647 if (TREE_CODE (arg0) == COMPLEX_CST)
1648 return fold_convert (type, TREE_REALPART (arg0));
1649 break;
1650
1651 case IMAGPART_EXPR:
1652 if (TREE_CODE (arg0) == COMPLEX_CST)
1653 return fold_convert (type, TREE_IMAGPART (arg0));
1654 break;
1655
1656 case VEC_UNPACK_LO_EXPR:
1657 case VEC_UNPACK_HI_EXPR:
1658 case VEC_UNPACK_FLOAT_LO_EXPR:
1659 case VEC_UNPACK_FLOAT_HI_EXPR:
1660 {
1661 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1662 tree *elts;
1663 enum tree_code subcode;
1664
1665 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1666 if (TREE_CODE (arg0) != VECTOR_CST)
1667 return NULL_TREE;
1668
1669 elts = XALLOCAVEC (tree, nelts * 2);
1670 if (!vec_cst_ctor_to_array (arg0, elts))
1671 return NULL_TREE;
1672
1673 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1674 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1675 elts += nelts;
1676
1677 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1678 subcode = NOP_EXPR;
1679 else
1680 subcode = FLOAT_EXPR;
1681
1682 for (i = 0; i < nelts; i++)
1683 {
1684 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1685 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1686 return NULL_TREE;
1687 }
1688
1689 return build_vector (type, elts);
1690 }
1691
1692 case REDUC_MIN_EXPR:
1693 case REDUC_MAX_EXPR:
1694 case REDUC_PLUS_EXPR:
1695 {
1696 unsigned int nelts, i;
1697 tree *elts;
1698 enum tree_code subcode;
1699
1700 if (TREE_CODE (arg0) != VECTOR_CST)
1701 return NULL_TREE;
1702 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1703
1704 elts = XALLOCAVEC (tree, nelts);
1705 if (!vec_cst_ctor_to_array (arg0, elts))
1706 return NULL_TREE;
1707
1708 switch (code)
1709 {
1710 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1711 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1712 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1713 default: gcc_unreachable ();
1714 }
1715
1716 for (i = 1; i < nelts; i++)
1717 {
1718 elts[0] = const_binop (subcode, elts[0], elts[i]);
1719 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1720 return NULL_TREE;
1721 }
1722
1723 return elts[0];
1724 }
1725
1726 default:
1727 break;
1728 }
1729
1730 return NULL_TREE;
1731 }
1732
1733 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1734 indicates which particular sizetype to create. */
1735
1736 tree
1737 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1738 {
1739 return build_int_cst (sizetype_tab[(int) kind], number);
1740 }
1741 \f
1742 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1743 is a tree code. The type of the result is taken from the operands.
1744 Both must be equivalent integer types, ala int_binop_types_match_p.
1745 If the operands are constant, so is the result. */
1746
1747 tree
1748 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1749 {
1750 tree type = TREE_TYPE (arg0);
1751
1752 if (arg0 == error_mark_node || arg1 == error_mark_node)
1753 return error_mark_node;
1754
1755 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1756 TREE_TYPE (arg1)));
1757
1758 /* Handle the special case of two integer constants faster. */
1759 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1760 {
1761 /* And some specific cases even faster than that. */
1762 if (code == PLUS_EXPR)
1763 {
1764 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1765 return arg1;
1766 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1767 return arg0;
1768 }
1769 else if (code == MINUS_EXPR)
1770 {
1771 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1772 return arg0;
1773 }
1774 else if (code == MULT_EXPR)
1775 {
1776 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1777 return arg1;
1778 }
1779
1780 /* Handle general case of two integer constants. For sizetype
1781 constant calculations we always want to know about overflow,
1782 even in the unsigned case. */
1783 return int_const_binop_1 (code, arg0, arg1, -1);
1784 }
1785
1786 return fold_build2_loc (loc, code, type, arg0, arg1);
1787 }
1788
1789 /* Given two values, either both of sizetype or both of bitsizetype,
1790 compute the difference between the two values. Return the value
1791 in signed type corresponding to the type of the operands. */
1792
1793 tree
1794 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1795 {
1796 tree type = TREE_TYPE (arg0);
1797 tree ctype;
1798
1799 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1800 TREE_TYPE (arg1)));
1801
1802 /* If the type is already signed, just do the simple thing. */
1803 if (!TYPE_UNSIGNED (type))
1804 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1805
1806 if (type == sizetype)
1807 ctype = ssizetype;
1808 else if (type == bitsizetype)
1809 ctype = sbitsizetype;
1810 else
1811 ctype = signed_type_for (type);
1812
1813 /* If either operand is not a constant, do the conversions to the signed
1814 type and subtract. The hardware will do the right thing with any
1815 overflow in the subtraction. */
1816 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1817 return size_binop_loc (loc, MINUS_EXPR,
1818 fold_convert_loc (loc, ctype, arg0),
1819 fold_convert_loc (loc, ctype, arg1));
1820
1821 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1822 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1823 overflow) and negate (which can't either). Special-case a result
1824 of zero while we're here. */
1825 if (tree_int_cst_equal (arg0, arg1))
1826 return build_int_cst (ctype, 0);
1827 else if (tree_int_cst_lt (arg1, arg0))
1828 return fold_convert_loc (loc, ctype,
1829 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1830 else
1831 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1832 fold_convert_loc (loc, ctype,
1833 size_binop_loc (loc,
1834 MINUS_EXPR,
1835 arg1, arg0)));
1836 }
1837 \f
1838 /* A subroutine of fold_convert_const handling conversions of an
1839 INTEGER_CST to another integer type. */
1840
1841 static tree
1842 fold_convert_const_int_from_int (tree type, const_tree arg1)
1843 {
1844 /* Given an integer constant, make new constant with new type,
1845 appropriately sign-extended or truncated. Use widest_int
1846 so that any extension is done according ARG1's type. */
1847 return force_fit_type (type, wi::to_widest (arg1),
1848 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1849 TREE_OVERFLOW (arg1));
1850 }
1851
1852 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1853 to an integer type. */
1854
1855 static tree
1856 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1857 {
1858 bool overflow = false;
1859 tree t;
1860
1861 /* The following code implements the floating point to integer
1862 conversion rules required by the Java Language Specification,
1863 that IEEE NaNs are mapped to zero and values that overflow
1864 the target precision saturate, i.e. values greater than
1865 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1866 are mapped to INT_MIN. These semantics are allowed by the
1867 C and C++ standards that simply state that the behavior of
1868 FP-to-integer conversion is unspecified upon overflow. */
1869
1870 wide_int val;
1871 REAL_VALUE_TYPE r;
1872 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1873
1874 switch (code)
1875 {
1876 case FIX_TRUNC_EXPR:
1877 real_trunc (&r, VOIDmode, &x);
1878 break;
1879
1880 default:
1881 gcc_unreachable ();
1882 }
1883
1884 /* If R is NaN, return zero and show we have an overflow. */
1885 if (REAL_VALUE_ISNAN (r))
1886 {
1887 overflow = true;
1888 val = wi::zero (TYPE_PRECISION (type));
1889 }
1890
1891 /* See if R is less than the lower bound or greater than the
1892 upper bound. */
1893
1894 if (! overflow)
1895 {
1896 tree lt = TYPE_MIN_VALUE (type);
1897 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1898 if (REAL_VALUES_LESS (r, l))
1899 {
1900 overflow = true;
1901 val = lt;
1902 }
1903 }
1904
1905 if (! overflow)
1906 {
1907 tree ut = TYPE_MAX_VALUE (type);
1908 if (ut)
1909 {
1910 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1911 if (REAL_VALUES_LESS (u, r))
1912 {
1913 overflow = true;
1914 val = ut;
1915 }
1916 }
1917 }
1918
1919 if (! overflow)
1920 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1921
1922 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1923 return t;
1924 }
1925
1926 /* A subroutine of fold_convert_const handling conversions of a
1927 FIXED_CST to an integer type. */
1928
1929 static tree
1930 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1931 {
1932 tree t;
1933 double_int temp, temp_trunc;
1934 unsigned int mode;
1935
1936 /* Right shift FIXED_CST to temp by fbit. */
1937 temp = TREE_FIXED_CST (arg1).data;
1938 mode = TREE_FIXED_CST (arg1).mode;
1939 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1940 {
1941 temp = temp.rshift (GET_MODE_FBIT (mode),
1942 HOST_BITS_PER_DOUBLE_INT,
1943 SIGNED_FIXED_POINT_MODE_P (mode));
1944
1945 /* Left shift temp to temp_trunc by fbit. */
1946 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1947 HOST_BITS_PER_DOUBLE_INT,
1948 SIGNED_FIXED_POINT_MODE_P (mode));
1949 }
1950 else
1951 {
1952 temp = double_int_zero;
1953 temp_trunc = double_int_zero;
1954 }
1955
1956 /* If FIXED_CST is negative, we need to round the value toward 0.
1957 By checking if the fractional bits are not zero to add 1 to temp. */
1958 if (SIGNED_FIXED_POINT_MODE_P (mode)
1959 && temp_trunc.is_negative ()
1960 && TREE_FIXED_CST (arg1).data != temp_trunc)
1961 temp += double_int_one;
1962
1963 /* Given a fixed-point constant, make new constant with new type,
1964 appropriately sign-extended or truncated. */
1965 t = force_fit_type (type, temp, -1,
1966 (temp.is_negative ()
1967 && (TYPE_UNSIGNED (type)
1968 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1969 | TREE_OVERFLOW (arg1));
1970
1971 return t;
1972 }
1973
1974 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1975 to another floating point type. */
1976
1977 static tree
1978 fold_convert_const_real_from_real (tree type, const_tree arg1)
1979 {
1980 REAL_VALUE_TYPE value;
1981 tree t;
1982
1983 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1984 t = build_real (type, value);
1985
1986 /* If converting an infinity or NAN to a representation that doesn't
1987 have one, set the overflow bit so that we can produce some kind of
1988 error message at the appropriate point if necessary. It's not the
1989 most user-friendly message, but it's better than nothing. */
1990 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1991 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1992 TREE_OVERFLOW (t) = 1;
1993 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1994 && !MODE_HAS_NANS (TYPE_MODE (type)))
1995 TREE_OVERFLOW (t) = 1;
1996 /* Regular overflow, conversion produced an infinity in a mode that
1997 can't represent them. */
1998 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1999 && REAL_VALUE_ISINF (value)
2000 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2001 TREE_OVERFLOW (t) = 1;
2002 else
2003 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2004 return t;
2005 }
2006
2007 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2008 to a floating point type. */
2009
2010 static tree
2011 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2012 {
2013 REAL_VALUE_TYPE value;
2014 tree t;
2015
2016 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2017 t = build_real (type, value);
2018
2019 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2020 return t;
2021 }
2022
2023 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2024 to another fixed-point type. */
2025
2026 static tree
2027 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2028 {
2029 FIXED_VALUE_TYPE value;
2030 tree t;
2031 bool overflow_p;
2032
2033 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2034 TYPE_SATURATING (type));
2035 t = build_fixed (type, value);
2036
2037 /* Propagate overflow flags. */
2038 if (overflow_p | TREE_OVERFLOW (arg1))
2039 TREE_OVERFLOW (t) = 1;
2040 return t;
2041 }
2042
2043 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2044 to a fixed-point type. */
2045
2046 static tree
2047 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2048 {
2049 FIXED_VALUE_TYPE value;
2050 tree t;
2051 bool overflow_p;
2052 double_int di;
2053
2054 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2055
2056 di.low = TREE_INT_CST_ELT (arg1, 0);
2057 if (TREE_INT_CST_NUNITS (arg1) == 1)
2058 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2059 else
2060 di.high = TREE_INT_CST_ELT (arg1, 1);
2061
2062 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2063 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2064 TYPE_SATURATING (type));
2065 t = build_fixed (type, value);
2066
2067 /* Propagate overflow flags. */
2068 if (overflow_p | TREE_OVERFLOW (arg1))
2069 TREE_OVERFLOW (t) = 1;
2070 return t;
2071 }
2072
2073 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2074 to a fixed-point type. */
2075
2076 static tree
2077 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2078 {
2079 FIXED_VALUE_TYPE value;
2080 tree t;
2081 bool overflow_p;
2082
2083 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2084 &TREE_REAL_CST (arg1),
2085 TYPE_SATURATING (type));
2086 t = build_fixed (type, value);
2087
2088 /* Propagate overflow flags. */
2089 if (overflow_p | TREE_OVERFLOW (arg1))
2090 TREE_OVERFLOW (t) = 1;
2091 return t;
2092 }
2093
2094 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2095 type TYPE. If no simplification can be done return NULL_TREE. */
2096
2097 static tree
2098 fold_convert_const (enum tree_code code, tree type, tree arg1)
2099 {
2100 if (TREE_TYPE (arg1) == type)
2101 return arg1;
2102
2103 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2104 || TREE_CODE (type) == OFFSET_TYPE)
2105 {
2106 if (TREE_CODE (arg1) == INTEGER_CST)
2107 return fold_convert_const_int_from_int (type, arg1);
2108 else if (TREE_CODE (arg1) == REAL_CST)
2109 return fold_convert_const_int_from_real (code, type, arg1);
2110 else if (TREE_CODE (arg1) == FIXED_CST)
2111 return fold_convert_const_int_from_fixed (type, arg1);
2112 }
2113 else if (TREE_CODE (type) == REAL_TYPE)
2114 {
2115 if (TREE_CODE (arg1) == INTEGER_CST)
2116 return build_real_from_int_cst (type, arg1);
2117 else if (TREE_CODE (arg1) == REAL_CST)
2118 return fold_convert_const_real_from_real (type, arg1);
2119 else if (TREE_CODE (arg1) == FIXED_CST)
2120 return fold_convert_const_real_from_fixed (type, arg1);
2121 }
2122 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2123 {
2124 if (TREE_CODE (arg1) == FIXED_CST)
2125 return fold_convert_const_fixed_from_fixed (type, arg1);
2126 else if (TREE_CODE (arg1) == INTEGER_CST)
2127 return fold_convert_const_fixed_from_int (type, arg1);
2128 else if (TREE_CODE (arg1) == REAL_CST)
2129 return fold_convert_const_fixed_from_real (type, arg1);
2130 }
2131 return NULL_TREE;
2132 }
2133
2134 /* Construct a vector of zero elements of vector type TYPE. */
2135
2136 static tree
2137 build_zero_vector (tree type)
2138 {
2139 tree t;
2140
2141 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2142 return build_vector_from_val (type, t);
2143 }
2144
2145 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2146
2147 bool
2148 fold_convertible_p (const_tree type, const_tree arg)
2149 {
2150 tree orig = TREE_TYPE (arg);
2151
2152 if (type == orig)
2153 return true;
2154
2155 if (TREE_CODE (arg) == ERROR_MARK
2156 || TREE_CODE (type) == ERROR_MARK
2157 || TREE_CODE (orig) == ERROR_MARK)
2158 return false;
2159
2160 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2161 return true;
2162
2163 switch (TREE_CODE (type))
2164 {
2165 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2166 case POINTER_TYPE: case REFERENCE_TYPE:
2167 case OFFSET_TYPE:
2168 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2169 || TREE_CODE (orig) == OFFSET_TYPE)
2170 return true;
2171 return (TREE_CODE (orig) == VECTOR_TYPE
2172 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2173
2174 case REAL_TYPE:
2175 case FIXED_POINT_TYPE:
2176 case COMPLEX_TYPE:
2177 case VECTOR_TYPE:
2178 case VOID_TYPE:
2179 return TREE_CODE (type) == TREE_CODE (orig);
2180
2181 default:
2182 return false;
2183 }
2184 }
2185
2186 /* Convert expression ARG to type TYPE. Used by the middle-end for
2187 simple conversions in preference to calling the front-end's convert. */
2188
2189 tree
2190 fold_convert_loc (location_t loc, tree type, tree arg)
2191 {
2192 tree orig = TREE_TYPE (arg);
2193 tree tem;
2194
2195 if (type == orig)
2196 return arg;
2197
2198 if (TREE_CODE (arg) == ERROR_MARK
2199 || TREE_CODE (type) == ERROR_MARK
2200 || TREE_CODE (orig) == ERROR_MARK)
2201 return error_mark_node;
2202
2203 switch (TREE_CODE (type))
2204 {
2205 case POINTER_TYPE:
2206 case REFERENCE_TYPE:
2207 /* Handle conversions between pointers to different address spaces. */
2208 if (POINTER_TYPE_P (orig)
2209 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2210 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2211 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2212 /* fall through */
2213
2214 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2215 case OFFSET_TYPE:
2216 if (TREE_CODE (arg) == INTEGER_CST)
2217 {
2218 tem = fold_convert_const (NOP_EXPR, type, arg);
2219 if (tem != NULL_TREE)
2220 return tem;
2221 }
2222 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2223 || TREE_CODE (orig) == OFFSET_TYPE)
2224 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2225 if (TREE_CODE (orig) == COMPLEX_TYPE)
2226 return fold_convert_loc (loc, type,
2227 fold_build1_loc (loc, REALPART_EXPR,
2228 TREE_TYPE (orig), arg));
2229 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2230 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2231 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2232
2233 case REAL_TYPE:
2234 if (TREE_CODE (arg) == INTEGER_CST)
2235 {
2236 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2237 if (tem != NULL_TREE)
2238 return tem;
2239 }
2240 else if (TREE_CODE (arg) == REAL_CST)
2241 {
2242 tem = fold_convert_const (NOP_EXPR, type, arg);
2243 if (tem != NULL_TREE)
2244 return tem;
2245 }
2246 else if (TREE_CODE (arg) == FIXED_CST)
2247 {
2248 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2249 if (tem != NULL_TREE)
2250 return tem;
2251 }
2252
2253 switch (TREE_CODE (orig))
2254 {
2255 case INTEGER_TYPE:
2256 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2257 case POINTER_TYPE: case REFERENCE_TYPE:
2258 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2259
2260 case REAL_TYPE:
2261 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2262
2263 case FIXED_POINT_TYPE:
2264 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2265
2266 case COMPLEX_TYPE:
2267 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2268 return fold_convert_loc (loc, type, tem);
2269
2270 default:
2271 gcc_unreachable ();
2272 }
2273
2274 case FIXED_POINT_TYPE:
2275 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2276 || TREE_CODE (arg) == REAL_CST)
2277 {
2278 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2279 if (tem != NULL_TREE)
2280 goto fold_convert_exit;
2281 }
2282
2283 switch (TREE_CODE (orig))
2284 {
2285 case FIXED_POINT_TYPE:
2286 case INTEGER_TYPE:
2287 case ENUMERAL_TYPE:
2288 case BOOLEAN_TYPE:
2289 case REAL_TYPE:
2290 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2291
2292 case COMPLEX_TYPE:
2293 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2294 return fold_convert_loc (loc, type, tem);
2295
2296 default:
2297 gcc_unreachable ();
2298 }
2299
2300 case COMPLEX_TYPE:
2301 switch (TREE_CODE (orig))
2302 {
2303 case INTEGER_TYPE:
2304 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2305 case POINTER_TYPE: case REFERENCE_TYPE:
2306 case REAL_TYPE:
2307 case FIXED_POINT_TYPE:
2308 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2309 fold_convert_loc (loc, TREE_TYPE (type), arg),
2310 fold_convert_loc (loc, TREE_TYPE (type),
2311 integer_zero_node));
2312 case COMPLEX_TYPE:
2313 {
2314 tree rpart, ipart;
2315
2316 if (TREE_CODE (arg) == COMPLEX_EXPR)
2317 {
2318 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2319 TREE_OPERAND (arg, 0));
2320 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2321 TREE_OPERAND (arg, 1));
2322 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2323 }
2324
2325 arg = save_expr (arg);
2326 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2327 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2328 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2329 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2330 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2331 }
2332
2333 default:
2334 gcc_unreachable ();
2335 }
2336
2337 case VECTOR_TYPE:
2338 if (integer_zerop (arg))
2339 return build_zero_vector (type);
2340 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2341 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2342 || TREE_CODE (orig) == VECTOR_TYPE);
2343 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2344
2345 case VOID_TYPE:
2346 tem = fold_ignored_result (arg);
2347 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2348
2349 default:
2350 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2351 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2352 gcc_unreachable ();
2353 }
2354 fold_convert_exit:
2355 protected_set_expr_location_unshare (tem, loc);
2356 return tem;
2357 }
2358 \f
2359 /* Return false if expr can be assumed not to be an lvalue, true
2360 otherwise. */
2361
2362 static bool
2363 maybe_lvalue_p (const_tree x)
2364 {
2365 /* We only need to wrap lvalue tree codes. */
2366 switch (TREE_CODE (x))
2367 {
2368 case VAR_DECL:
2369 case PARM_DECL:
2370 case RESULT_DECL:
2371 case LABEL_DECL:
2372 case FUNCTION_DECL:
2373 case SSA_NAME:
2374
2375 case COMPONENT_REF:
2376 case MEM_REF:
2377 case INDIRECT_REF:
2378 case ARRAY_REF:
2379 case ARRAY_RANGE_REF:
2380 case BIT_FIELD_REF:
2381 case OBJ_TYPE_REF:
2382
2383 case REALPART_EXPR:
2384 case IMAGPART_EXPR:
2385 case PREINCREMENT_EXPR:
2386 case PREDECREMENT_EXPR:
2387 case SAVE_EXPR:
2388 case TRY_CATCH_EXPR:
2389 case WITH_CLEANUP_EXPR:
2390 case COMPOUND_EXPR:
2391 case MODIFY_EXPR:
2392 case TARGET_EXPR:
2393 case COND_EXPR:
2394 case BIND_EXPR:
2395 break;
2396
2397 default:
2398 /* Assume the worst for front-end tree codes. */
2399 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2400 break;
2401 return false;
2402 }
2403
2404 return true;
2405 }
2406
2407 /* Return an expr equal to X but certainly not valid as an lvalue. */
2408
2409 tree
2410 non_lvalue_loc (location_t loc, tree x)
2411 {
2412 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2413 us. */
2414 if (in_gimple_form)
2415 return x;
2416
2417 if (! maybe_lvalue_p (x))
2418 return x;
2419 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2420 }
2421
2422 /* When pedantic, return an expr equal to X but certainly not valid as a
2423 pedantic lvalue. Otherwise, return X. */
2424
2425 static tree
2426 pedantic_non_lvalue_loc (location_t loc, tree x)
2427 {
2428 return protected_set_expr_location_unshare (x, loc);
2429 }
2430 \f
2431 /* Given a tree comparison code, return the code that is the logical inverse.
2432 It is generally not safe to do this for floating-point comparisons, except
2433 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2434 ERROR_MARK in this case. */
2435
2436 enum tree_code
2437 invert_tree_comparison (enum tree_code code, bool honor_nans)
2438 {
2439 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2440 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2441 return ERROR_MARK;
2442
2443 switch (code)
2444 {
2445 case EQ_EXPR:
2446 return NE_EXPR;
2447 case NE_EXPR:
2448 return EQ_EXPR;
2449 case GT_EXPR:
2450 return honor_nans ? UNLE_EXPR : LE_EXPR;
2451 case GE_EXPR:
2452 return honor_nans ? UNLT_EXPR : LT_EXPR;
2453 case LT_EXPR:
2454 return honor_nans ? UNGE_EXPR : GE_EXPR;
2455 case LE_EXPR:
2456 return honor_nans ? UNGT_EXPR : GT_EXPR;
2457 case LTGT_EXPR:
2458 return UNEQ_EXPR;
2459 case UNEQ_EXPR:
2460 return LTGT_EXPR;
2461 case UNGT_EXPR:
2462 return LE_EXPR;
2463 case UNGE_EXPR:
2464 return LT_EXPR;
2465 case UNLT_EXPR:
2466 return GE_EXPR;
2467 case UNLE_EXPR:
2468 return GT_EXPR;
2469 case ORDERED_EXPR:
2470 return UNORDERED_EXPR;
2471 case UNORDERED_EXPR:
2472 return ORDERED_EXPR;
2473 default:
2474 gcc_unreachable ();
2475 }
2476 }
2477
2478 /* Similar, but return the comparison that results if the operands are
2479 swapped. This is safe for floating-point. */
2480
2481 enum tree_code
2482 swap_tree_comparison (enum tree_code code)
2483 {
2484 switch (code)
2485 {
2486 case EQ_EXPR:
2487 case NE_EXPR:
2488 case ORDERED_EXPR:
2489 case UNORDERED_EXPR:
2490 case LTGT_EXPR:
2491 case UNEQ_EXPR:
2492 return code;
2493 case GT_EXPR:
2494 return LT_EXPR;
2495 case GE_EXPR:
2496 return LE_EXPR;
2497 case LT_EXPR:
2498 return GT_EXPR;
2499 case LE_EXPR:
2500 return GE_EXPR;
2501 case UNGT_EXPR:
2502 return UNLT_EXPR;
2503 case UNGE_EXPR:
2504 return UNLE_EXPR;
2505 case UNLT_EXPR:
2506 return UNGT_EXPR;
2507 case UNLE_EXPR:
2508 return UNGE_EXPR;
2509 default:
2510 gcc_unreachable ();
2511 }
2512 }
2513
2514
2515 /* Convert a comparison tree code from an enum tree_code representation
2516 into a compcode bit-based encoding. This function is the inverse of
2517 compcode_to_comparison. */
2518
2519 static enum comparison_code
2520 comparison_to_compcode (enum tree_code code)
2521 {
2522 switch (code)
2523 {
2524 case LT_EXPR:
2525 return COMPCODE_LT;
2526 case EQ_EXPR:
2527 return COMPCODE_EQ;
2528 case LE_EXPR:
2529 return COMPCODE_LE;
2530 case GT_EXPR:
2531 return COMPCODE_GT;
2532 case NE_EXPR:
2533 return COMPCODE_NE;
2534 case GE_EXPR:
2535 return COMPCODE_GE;
2536 case ORDERED_EXPR:
2537 return COMPCODE_ORD;
2538 case UNORDERED_EXPR:
2539 return COMPCODE_UNORD;
2540 case UNLT_EXPR:
2541 return COMPCODE_UNLT;
2542 case UNEQ_EXPR:
2543 return COMPCODE_UNEQ;
2544 case UNLE_EXPR:
2545 return COMPCODE_UNLE;
2546 case UNGT_EXPR:
2547 return COMPCODE_UNGT;
2548 case LTGT_EXPR:
2549 return COMPCODE_LTGT;
2550 case UNGE_EXPR:
2551 return COMPCODE_UNGE;
2552 default:
2553 gcc_unreachable ();
2554 }
2555 }
2556
2557 /* Convert a compcode bit-based encoding of a comparison operator back
2558 to GCC's enum tree_code representation. This function is the
2559 inverse of comparison_to_compcode. */
2560
2561 static enum tree_code
2562 compcode_to_comparison (enum comparison_code code)
2563 {
2564 switch (code)
2565 {
2566 case COMPCODE_LT:
2567 return LT_EXPR;
2568 case COMPCODE_EQ:
2569 return EQ_EXPR;
2570 case COMPCODE_LE:
2571 return LE_EXPR;
2572 case COMPCODE_GT:
2573 return GT_EXPR;
2574 case COMPCODE_NE:
2575 return NE_EXPR;
2576 case COMPCODE_GE:
2577 return GE_EXPR;
2578 case COMPCODE_ORD:
2579 return ORDERED_EXPR;
2580 case COMPCODE_UNORD:
2581 return UNORDERED_EXPR;
2582 case COMPCODE_UNLT:
2583 return UNLT_EXPR;
2584 case COMPCODE_UNEQ:
2585 return UNEQ_EXPR;
2586 case COMPCODE_UNLE:
2587 return UNLE_EXPR;
2588 case COMPCODE_UNGT:
2589 return UNGT_EXPR;
2590 case COMPCODE_LTGT:
2591 return LTGT_EXPR;
2592 case COMPCODE_UNGE:
2593 return UNGE_EXPR;
2594 default:
2595 gcc_unreachable ();
2596 }
2597 }
2598
2599 /* Return a tree for the comparison which is the combination of
2600 doing the AND or OR (depending on CODE) of the two operations LCODE
2601 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2602 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2603 if this makes the transformation invalid. */
2604
2605 tree
2606 combine_comparisons (location_t loc,
2607 enum tree_code code, enum tree_code lcode,
2608 enum tree_code rcode, tree truth_type,
2609 tree ll_arg, tree lr_arg)
2610 {
2611 bool honor_nans = HONOR_NANS (ll_arg);
2612 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2613 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2614 int compcode;
2615
2616 switch (code)
2617 {
2618 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2619 compcode = lcompcode & rcompcode;
2620 break;
2621
2622 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2623 compcode = lcompcode | rcompcode;
2624 break;
2625
2626 default:
2627 return NULL_TREE;
2628 }
2629
2630 if (!honor_nans)
2631 {
2632 /* Eliminate unordered comparisons, as well as LTGT and ORD
2633 which are not used unless the mode has NaNs. */
2634 compcode &= ~COMPCODE_UNORD;
2635 if (compcode == COMPCODE_LTGT)
2636 compcode = COMPCODE_NE;
2637 else if (compcode == COMPCODE_ORD)
2638 compcode = COMPCODE_TRUE;
2639 }
2640 else if (flag_trapping_math)
2641 {
2642 /* Check that the original operation and the optimized ones will trap
2643 under the same condition. */
2644 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2645 && (lcompcode != COMPCODE_EQ)
2646 && (lcompcode != COMPCODE_ORD);
2647 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2648 && (rcompcode != COMPCODE_EQ)
2649 && (rcompcode != COMPCODE_ORD);
2650 bool trap = (compcode & COMPCODE_UNORD) == 0
2651 && (compcode != COMPCODE_EQ)
2652 && (compcode != COMPCODE_ORD);
2653
2654 /* In a short-circuited boolean expression the LHS might be
2655 such that the RHS, if evaluated, will never trap. For
2656 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2657 if neither x nor y is NaN. (This is a mixed blessing: for
2658 example, the expression above will never trap, hence
2659 optimizing it to x < y would be invalid). */
2660 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2661 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2662 rtrap = false;
2663
2664 /* If the comparison was short-circuited, and only the RHS
2665 trapped, we may now generate a spurious trap. */
2666 if (rtrap && !ltrap
2667 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2668 return NULL_TREE;
2669
2670 /* If we changed the conditions that cause a trap, we lose. */
2671 if ((ltrap || rtrap) != trap)
2672 return NULL_TREE;
2673 }
2674
2675 if (compcode == COMPCODE_TRUE)
2676 return constant_boolean_node (true, truth_type);
2677 else if (compcode == COMPCODE_FALSE)
2678 return constant_boolean_node (false, truth_type);
2679 else
2680 {
2681 enum tree_code tcode;
2682
2683 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2684 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2685 }
2686 }
2687 \f
2688 /* Return nonzero if two operands (typically of the same tree node)
2689 are necessarily equal. If either argument has side-effects this
2690 function returns zero. FLAGS modifies behavior as follows:
2691
2692 If OEP_ONLY_CONST is set, only return nonzero for constants.
2693 This function tests whether the operands are indistinguishable;
2694 it does not test whether they are equal using C's == operation.
2695 The distinction is important for IEEE floating point, because
2696 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2697 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2698
2699 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2700 even though it may hold multiple values during a function.
2701 This is because a GCC tree node guarantees that nothing else is
2702 executed between the evaluation of its "operands" (which may often
2703 be evaluated in arbitrary order). Hence if the operands themselves
2704 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2705 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2706 unset means assuming isochronic (or instantaneous) tree equivalence.
2707 Unless comparing arbitrary expression trees, such as from different
2708 statements, this flag can usually be left unset.
2709
2710 If OEP_PURE_SAME is set, then pure functions with identical arguments
2711 are considered the same. It is used when the caller has other ways
2712 to ensure that global memory is unchanged in between. */
2713
2714 int
2715 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2716 {
2717 /* If either is ERROR_MARK, they aren't equal. */
2718 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2719 || TREE_TYPE (arg0) == error_mark_node
2720 || TREE_TYPE (arg1) == error_mark_node)
2721 return 0;
2722
2723 /* Similar, if either does not have a type (like a released SSA name),
2724 they aren't equal. */
2725 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2726 return 0;
2727
2728 /* Check equality of integer constants before bailing out due to
2729 precision differences. */
2730 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2731 return tree_int_cst_equal (arg0, arg1);
2732
2733 /* If both types don't have the same signedness, then we can't consider
2734 them equal. We must check this before the STRIP_NOPS calls
2735 because they may change the signedness of the arguments. As pointers
2736 strictly don't have a signedness, require either two pointers or
2737 two non-pointers as well. */
2738 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2739 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2740 return 0;
2741
2742 /* We cannot consider pointers to different address space equal. */
2743 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2744 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2745 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2746 return 0;
2747
2748 /* If both types don't have the same precision, then it is not safe
2749 to strip NOPs. */
2750 if (element_precision (TREE_TYPE (arg0))
2751 != element_precision (TREE_TYPE (arg1)))
2752 return 0;
2753
2754 STRIP_NOPS (arg0);
2755 STRIP_NOPS (arg1);
2756
2757 /* In case both args are comparisons but with different comparison
2758 code, try to swap the comparison operands of one arg to produce
2759 a match and compare that variant. */
2760 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2761 && COMPARISON_CLASS_P (arg0)
2762 && COMPARISON_CLASS_P (arg1))
2763 {
2764 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2765
2766 if (TREE_CODE (arg0) == swap_code)
2767 return operand_equal_p (TREE_OPERAND (arg0, 0),
2768 TREE_OPERAND (arg1, 1), flags)
2769 && operand_equal_p (TREE_OPERAND (arg0, 1),
2770 TREE_OPERAND (arg1, 0), flags);
2771 }
2772
2773 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2774 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2775 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2776 return 0;
2777
2778 /* This is needed for conversions and for COMPONENT_REF.
2779 Might as well play it safe and always test this. */
2780 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2781 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2782 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2783 return 0;
2784
2785 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2786 We don't care about side effects in that case because the SAVE_EXPR
2787 takes care of that for us. In all other cases, two expressions are
2788 equal if they have no side effects. If we have two identical
2789 expressions with side effects that should be treated the same due
2790 to the only side effects being identical SAVE_EXPR's, that will
2791 be detected in the recursive calls below.
2792 If we are taking an invariant address of two identical objects
2793 they are necessarily equal as well. */
2794 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2795 && (TREE_CODE (arg0) == SAVE_EXPR
2796 || (flags & OEP_CONSTANT_ADDRESS_OF)
2797 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2798 return 1;
2799
2800 /* Next handle constant cases, those for which we can return 1 even
2801 if ONLY_CONST is set. */
2802 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2803 switch (TREE_CODE (arg0))
2804 {
2805 case INTEGER_CST:
2806 return tree_int_cst_equal (arg0, arg1);
2807
2808 case FIXED_CST:
2809 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2810 TREE_FIXED_CST (arg1));
2811
2812 case REAL_CST:
2813 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2814 TREE_REAL_CST (arg1)))
2815 return 1;
2816
2817
2818 if (!HONOR_SIGNED_ZEROS (arg0))
2819 {
2820 /* If we do not distinguish between signed and unsigned zero,
2821 consider them equal. */
2822 if (real_zerop (arg0) && real_zerop (arg1))
2823 return 1;
2824 }
2825 return 0;
2826
2827 case VECTOR_CST:
2828 {
2829 unsigned i;
2830
2831 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2832 return 0;
2833
2834 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2835 {
2836 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2837 VECTOR_CST_ELT (arg1, i), flags))
2838 return 0;
2839 }
2840 return 1;
2841 }
2842
2843 case COMPLEX_CST:
2844 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2845 flags)
2846 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2847 flags));
2848
2849 case STRING_CST:
2850 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2851 && ! memcmp (TREE_STRING_POINTER (arg0),
2852 TREE_STRING_POINTER (arg1),
2853 TREE_STRING_LENGTH (arg0)));
2854
2855 case ADDR_EXPR:
2856 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2857 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2858 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2859 default:
2860 break;
2861 }
2862
2863 if (flags & OEP_ONLY_CONST)
2864 return 0;
2865
2866 /* Define macros to test an operand from arg0 and arg1 for equality and a
2867 variant that allows null and views null as being different from any
2868 non-null value. In the latter case, if either is null, the both
2869 must be; otherwise, do the normal comparison. */
2870 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2871 TREE_OPERAND (arg1, N), flags)
2872
2873 #define OP_SAME_WITH_NULL(N) \
2874 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2875 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2876
2877 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2878 {
2879 case tcc_unary:
2880 /* Two conversions are equal only if signedness and modes match. */
2881 switch (TREE_CODE (arg0))
2882 {
2883 CASE_CONVERT:
2884 case FIX_TRUNC_EXPR:
2885 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2886 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2887 return 0;
2888 break;
2889 default:
2890 break;
2891 }
2892
2893 return OP_SAME (0);
2894
2895
2896 case tcc_comparison:
2897 case tcc_binary:
2898 if (OP_SAME (0) && OP_SAME (1))
2899 return 1;
2900
2901 /* For commutative ops, allow the other order. */
2902 return (commutative_tree_code (TREE_CODE (arg0))
2903 && operand_equal_p (TREE_OPERAND (arg0, 0),
2904 TREE_OPERAND (arg1, 1), flags)
2905 && operand_equal_p (TREE_OPERAND (arg0, 1),
2906 TREE_OPERAND (arg1, 0), flags));
2907
2908 case tcc_reference:
2909 /* If either of the pointer (or reference) expressions we are
2910 dereferencing contain a side effect, these cannot be equal,
2911 but their addresses can be. */
2912 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2913 && (TREE_SIDE_EFFECTS (arg0)
2914 || TREE_SIDE_EFFECTS (arg1)))
2915 return 0;
2916
2917 switch (TREE_CODE (arg0))
2918 {
2919 case INDIRECT_REF:
2920 if (!(flags & OEP_ADDRESS_OF)
2921 && (TYPE_ALIGN (TREE_TYPE (arg0))
2922 != TYPE_ALIGN (TREE_TYPE (arg1))))
2923 return 0;
2924 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2925 return OP_SAME (0);
2926
2927 case REALPART_EXPR:
2928 case IMAGPART_EXPR:
2929 return OP_SAME (0);
2930
2931 case TARGET_MEM_REF:
2932 case MEM_REF:
2933 /* Require equal access sizes, and similar pointer types.
2934 We can have incomplete types for array references of
2935 variable-sized arrays from the Fortran frontend
2936 though. Also verify the types are compatible. */
2937 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2938 || (TYPE_SIZE (TREE_TYPE (arg0))
2939 && TYPE_SIZE (TREE_TYPE (arg1))
2940 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2941 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2942 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2943 && ((flags & OEP_ADDRESS_OF)
2944 || (alias_ptr_types_compatible_p
2945 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2946 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2947 && (MR_DEPENDENCE_CLIQUE (arg0)
2948 == MR_DEPENDENCE_CLIQUE (arg1))
2949 && (MR_DEPENDENCE_BASE (arg0)
2950 == MR_DEPENDENCE_BASE (arg1))
2951 && (TYPE_ALIGN (TREE_TYPE (arg0))
2952 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2953 return 0;
2954 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2955 return (OP_SAME (0) && OP_SAME (1)
2956 /* TARGET_MEM_REF require equal extra operands. */
2957 && (TREE_CODE (arg0) != TARGET_MEM_REF
2958 || (OP_SAME_WITH_NULL (2)
2959 && OP_SAME_WITH_NULL (3)
2960 && OP_SAME_WITH_NULL (4))));
2961
2962 case ARRAY_REF:
2963 case ARRAY_RANGE_REF:
2964 /* Operands 2 and 3 may be null.
2965 Compare the array index by value if it is constant first as we
2966 may have different types but same value here. */
2967 if (!OP_SAME (0))
2968 return 0;
2969 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2970 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2971 TREE_OPERAND (arg1, 1))
2972 || OP_SAME (1))
2973 && OP_SAME_WITH_NULL (2)
2974 && OP_SAME_WITH_NULL (3));
2975
2976 case COMPONENT_REF:
2977 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2978 may be NULL when we're called to compare MEM_EXPRs. */
2979 if (!OP_SAME_WITH_NULL (0)
2980 || !OP_SAME (1))
2981 return 0;
2982 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2983 return OP_SAME_WITH_NULL (2);
2984
2985 case BIT_FIELD_REF:
2986 if (!OP_SAME (0))
2987 return 0;
2988 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2989 return OP_SAME (1) && OP_SAME (2);
2990
2991 default:
2992 return 0;
2993 }
2994
2995 case tcc_expression:
2996 switch (TREE_CODE (arg0))
2997 {
2998 case ADDR_EXPR:
2999 return operand_equal_p (TREE_OPERAND (arg0, 0),
3000 TREE_OPERAND (arg1, 0),
3001 flags | OEP_ADDRESS_OF);
3002
3003 case TRUTH_NOT_EXPR:
3004 return OP_SAME (0);
3005
3006 case TRUTH_ANDIF_EXPR:
3007 case TRUTH_ORIF_EXPR:
3008 return OP_SAME (0) && OP_SAME (1);
3009
3010 case FMA_EXPR:
3011 case WIDEN_MULT_PLUS_EXPR:
3012 case WIDEN_MULT_MINUS_EXPR:
3013 if (!OP_SAME (2))
3014 return 0;
3015 /* The multiplcation operands are commutative. */
3016 /* FALLTHRU */
3017
3018 case TRUTH_AND_EXPR:
3019 case TRUTH_OR_EXPR:
3020 case TRUTH_XOR_EXPR:
3021 if (OP_SAME (0) && OP_SAME (1))
3022 return 1;
3023
3024 /* Otherwise take into account this is a commutative operation. */
3025 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3026 TREE_OPERAND (arg1, 1), flags)
3027 && operand_equal_p (TREE_OPERAND (arg0, 1),
3028 TREE_OPERAND (arg1, 0), flags));
3029
3030 case COND_EXPR:
3031 case VEC_COND_EXPR:
3032 case DOT_PROD_EXPR:
3033 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3034
3035 default:
3036 return 0;
3037 }
3038
3039 case tcc_vl_exp:
3040 switch (TREE_CODE (arg0))
3041 {
3042 case CALL_EXPR:
3043 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3044 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3045 /* If not both CALL_EXPRs are either internal or normal function
3046 functions, then they are not equal. */
3047 return 0;
3048 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3049 {
3050 /* If the CALL_EXPRs call different internal functions, then they
3051 are not equal. */
3052 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3053 return 0;
3054 }
3055 else
3056 {
3057 /* If the CALL_EXPRs call different functions, then they are not
3058 equal. */
3059 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3060 flags))
3061 return 0;
3062 }
3063
3064 {
3065 unsigned int cef = call_expr_flags (arg0);
3066 if (flags & OEP_PURE_SAME)
3067 cef &= ECF_CONST | ECF_PURE;
3068 else
3069 cef &= ECF_CONST;
3070 if (!cef)
3071 return 0;
3072 }
3073
3074 /* Now see if all the arguments are the same. */
3075 {
3076 const_call_expr_arg_iterator iter0, iter1;
3077 const_tree a0, a1;
3078 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3079 a1 = first_const_call_expr_arg (arg1, &iter1);
3080 a0 && a1;
3081 a0 = next_const_call_expr_arg (&iter0),
3082 a1 = next_const_call_expr_arg (&iter1))
3083 if (! operand_equal_p (a0, a1, flags))
3084 return 0;
3085
3086 /* If we get here and both argument lists are exhausted
3087 then the CALL_EXPRs are equal. */
3088 return ! (a0 || a1);
3089 }
3090 default:
3091 return 0;
3092 }
3093
3094 case tcc_declaration:
3095 /* Consider __builtin_sqrt equal to sqrt. */
3096 return (TREE_CODE (arg0) == FUNCTION_DECL
3097 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3098 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3099 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3100
3101 default:
3102 return 0;
3103 }
3104
3105 #undef OP_SAME
3106 #undef OP_SAME_WITH_NULL
3107 }
3108 \f
3109 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3110 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3111
3112 When in doubt, return 0. */
3113
3114 static int
3115 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3116 {
3117 int unsignedp1, unsignedpo;
3118 tree primarg0, primarg1, primother;
3119 unsigned int correct_width;
3120
3121 if (operand_equal_p (arg0, arg1, 0))
3122 return 1;
3123
3124 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3125 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3126 return 0;
3127
3128 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3129 and see if the inner values are the same. This removes any
3130 signedness comparison, which doesn't matter here. */
3131 primarg0 = arg0, primarg1 = arg1;
3132 STRIP_NOPS (primarg0);
3133 STRIP_NOPS (primarg1);
3134 if (operand_equal_p (primarg0, primarg1, 0))
3135 return 1;
3136
3137 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3138 actual comparison operand, ARG0.
3139
3140 First throw away any conversions to wider types
3141 already present in the operands. */
3142
3143 primarg1 = get_narrower (arg1, &unsignedp1);
3144 primother = get_narrower (other, &unsignedpo);
3145
3146 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3147 if (unsignedp1 == unsignedpo
3148 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3149 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3150 {
3151 tree type = TREE_TYPE (arg0);
3152
3153 /* Make sure shorter operand is extended the right way
3154 to match the longer operand. */
3155 primarg1 = fold_convert (signed_or_unsigned_type_for
3156 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3157
3158 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3159 return 1;
3160 }
3161
3162 return 0;
3163 }
3164 \f
3165 /* See if ARG is an expression that is either a comparison or is performing
3166 arithmetic on comparisons. The comparisons must only be comparing
3167 two different values, which will be stored in *CVAL1 and *CVAL2; if
3168 they are nonzero it means that some operands have already been found.
3169 No variables may be used anywhere else in the expression except in the
3170 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3171 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3172
3173 If this is true, return 1. Otherwise, return zero. */
3174
3175 static int
3176 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3177 {
3178 enum tree_code code = TREE_CODE (arg);
3179 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3180
3181 /* We can handle some of the tcc_expression cases here. */
3182 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3183 tclass = tcc_unary;
3184 else if (tclass == tcc_expression
3185 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3186 || code == COMPOUND_EXPR))
3187 tclass = tcc_binary;
3188
3189 else if (tclass == tcc_expression && code == SAVE_EXPR
3190 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3191 {
3192 /* If we've already found a CVAL1 or CVAL2, this expression is
3193 two complex to handle. */
3194 if (*cval1 || *cval2)
3195 return 0;
3196
3197 tclass = tcc_unary;
3198 *save_p = 1;
3199 }
3200
3201 switch (tclass)
3202 {
3203 case tcc_unary:
3204 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3205
3206 case tcc_binary:
3207 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3208 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3209 cval1, cval2, save_p));
3210
3211 case tcc_constant:
3212 return 1;
3213
3214 case tcc_expression:
3215 if (code == COND_EXPR)
3216 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3217 cval1, cval2, save_p)
3218 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3219 cval1, cval2, save_p)
3220 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3221 cval1, cval2, save_p));
3222 return 0;
3223
3224 case tcc_comparison:
3225 /* First see if we can handle the first operand, then the second. For
3226 the second operand, we know *CVAL1 can't be zero. It must be that
3227 one side of the comparison is each of the values; test for the
3228 case where this isn't true by failing if the two operands
3229 are the same. */
3230
3231 if (operand_equal_p (TREE_OPERAND (arg, 0),
3232 TREE_OPERAND (arg, 1), 0))
3233 return 0;
3234
3235 if (*cval1 == 0)
3236 *cval1 = TREE_OPERAND (arg, 0);
3237 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3238 ;
3239 else if (*cval2 == 0)
3240 *cval2 = TREE_OPERAND (arg, 0);
3241 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3242 ;
3243 else
3244 return 0;
3245
3246 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3247 ;
3248 else if (*cval2 == 0)
3249 *cval2 = TREE_OPERAND (arg, 1);
3250 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3251 ;
3252 else
3253 return 0;
3254
3255 return 1;
3256
3257 default:
3258 return 0;
3259 }
3260 }
3261 \f
3262 /* ARG is a tree that is known to contain just arithmetic operations and
3263 comparisons. Evaluate the operations in the tree substituting NEW0 for
3264 any occurrence of OLD0 as an operand of a comparison and likewise for
3265 NEW1 and OLD1. */
3266
3267 static tree
3268 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3269 tree old1, tree new1)
3270 {
3271 tree type = TREE_TYPE (arg);
3272 enum tree_code code = TREE_CODE (arg);
3273 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3274
3275 /* We can handle some of the tcc_expression cases here. */
3276 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3277 tclass = tcc_unary;
3278 else if (tclass == tcc_expression
3279 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3280 tclass = tcc_binary;
3281
3282 switch (tclass)
3283 {
3284 case tcc_unary:
3285 return fold_build1_loc (loc, code, type,
3286 eval_subst (loc, TREE_OPERAND (arg, 0),
3287 old0, new0, old1, new1));
3288
3289 case tcc_binary:
3290 return fold_build2_loc (loc, code, type,
3291 eval_subst (loc, TREE_OPERAND (arg, 0),
3292 old0, new0, old1, new1),
3293 eval_subst (loc, TREE_OPERAND (arg, 1),
3294 old0, new0, old1, new1));
3295
3296 case tcc_expression:
3297 switch (code)
3298 {
3299 case SAVE_EXPR:
3300 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3301 old1, new1);
3302
3303 case COMPOUND_EXPR:
3304 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3305 old1, new1);
3306
3307 case COND_EXPR:
3308 return fold_build3_loc (loc, code, type,
3309 eval_subst (loc, TREE_OPERAND (arg, 0),
3310 old0, new0, old1, new1),
3311 eval_subst (loc, TREE_OPERAND (arg, 1),
3312 old0, new0, old1, new1),
3313 eval_subst (loc, TREE_OPERAND (arg, 2),
3314 old0, new0, old1, new1));
3315 default:
3316 break;
3317 }
3318 /* Fall through - ??? */
3319
3320 case tcc_comparison:
3321 {
3322 tree arg0 = TREE_OPERAND (arg, 0);
3323 tree arg1 = TREE_OPERAND (arg, 1);
3324
3325 /* We need to check both for exact equality and tree equality. The
3326 former will be true if the operand has a side-effect. In that
3327 case, we know the operand occurred exactly once. */
3328
3329 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3330 arg0 = new0;
3331 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3332 arg0 = new1;
3333
3334 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3335 arg1 = new0;
3336 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3337 arg1 = new1;
3338
3339 return fold_build2_loc (loc, code, type, arg0, arg1);
3340 }
3341
3342 default:
3343 return arg;
3344 }
3345 }
3346 \f
3347 /* Return a tree for the case when the result of an expression is RESULT
3348 converted to TYPE and OMITTED was previously an operand of the expression
3349 but is now not needed (e.g., we folded OMITTED * 0).
3350
3351 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3352 the conversion of RESULT to TYPE. */
3353
3354 tree
3355 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3356 {
3357 tree t = fold_convert_loc (loc, type, result);
3358
3359 /* If the resulting operand is an empty statement, just return the omitted
3360 statement casted to void. */
3361 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3362 return build1_loc (loc, NOP_EXPR, void_type_node,
3363 fold_ignored_result (omitted));
3364
3365 if (TREE_SIDE_EFFECTS (omitted))
3366 return build2_loc (loc, COMPOUND_EXPR, type,
3367 fold_ignored_result (omitted), t);
3368
3369 return non_lvalue_loc (loc, t);
3370 }
3371
3372 /* Return a tree for the case when the result of an expression is RESULT
3373 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3374 of the expression but are now not needed.
3375
3376 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3377 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3378 evaluated before OMITTED2. Otherwise, if neither has side effects,
3379 just do the conversion of RESULT to TYPE. */
3380
3381 tree
3382 omit_two_operands_loc (location_t loc, tree type, tree result,
3383 tree omitted1, tree omitted2)
3384 {
3385 tree t = fold_convert_loc (loc, type, result);
3386
3387 if (TREE_SIDE_EFFECTS (omitted2))
3388 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3389 if (TREE_SIDE_EFFECTS (omitted1))
3390 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3391
3392 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3393 }
3394
3395 \f
3396 /* Return a simplified tree node for the truth-negation of ARG. This
3397 never alters ARG itself. We assume that ARG is an operation that
3398 returns a truth value (0 or 1).
3399
3400 FIXME: one would think we would fold the result, but it causes
3401 problems with the dominator optimizer. */
3402
3403 static tree
3404 fold_truth_not_expr (location_t loc, tree arg)
3405 {
3406 tree type = TREE_TYPE (arg);
3407 enum tree_code code = TREE_CODE (arg);
3408 location_t loc1, loc2;
3409
3410 /* If this is a comparison, we can simply invert it, except for
3411 floating-point non-equality comparisons, in which case we just
3412 enclose a TRUTH_NOT_EXPR around what we have. */
3413
3414 if (TREE_CODE_CLASS (code) == tcc_comparison)
3415 {
3416 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3417 if (FLOAT_TYPE_P (op_type)
3418 && flag_trapping_math
3419 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3420 && code != NE_EXPR && code != EQ_EXPR)
3421 return NULL_TREE;
3422
3423 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3424 if (code == ERROR_MARK)
3425 return NULL_TREE;
3426
3427 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3428 TREE_OPERAND (arg, 1));
3429 }
3430
3431 switch (code)
3432 {
3433 case INTEGER_CST:
3434 return constant_boolean_node (integer_zerop (arg), type);
3435
3436 case TRUTH_AND_EXPR:
3437 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3438 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3439 return build2_loc (loc, TRUTH_OR_EXPR, type,
3440 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3441 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3442
3443 case TRUTH_OR_EXPR:
3444 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3445 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3446 return build2_loc (loc, TRUTH_AND_EXPR, type,
3447 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3448 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3449
3450 case TRUTH_XOR_EXPR:
3451 /* Here we can invert either operand. We invert the first operand
3452 unless the second operand is a TRUTH_NOT_EXPR in which case our
3453 result is the XOR of the first operand with the inside of the
3454 negation of the second operand. */
3455
3456 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3457 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3458 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3459 else
3460 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3461 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3462 TREE_OPERAND (arg, 1));
3463
3464 case TRUTH_ANDIF_EXPR:
3465 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3466 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3467 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3468 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3469 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3470
3471 case TRUTH_ORIF_EXPR:
3472 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3473 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3474 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3475 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3476 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3477
3478 case TRUTH_NOT_EXPR:
3479 return TREE_OPERAND (arg, 0);
3480
3481 case COND_EXPR:
3482 {
3483 tree arg1 = TREE_OPERAND (arg, 1);
3484 tree arg2 = TREE_OPERAND (arg, 2);
3485
3486 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3487 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3488
3489 /* A COND_EXPR may have a throw as one operand, which
3490 then has void type. Just leave void operands
3491 as they are. */
3492 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3493 VOID_TYPE_P (TREE_TYPE (arg1))
3494 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3495 VOID_TYPE_P (TREE_TYPE (arg2))
3496 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3497 }
3498
3499 case COMPOUND_EXPR:
3500 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3501 return build2_loc (loc, COMPOUND_EXPR, type,
3502 TREE_OPERAND (arg, 0),
3503 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3504
3505 case NON_LVALUE_EXPR:
3506 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3507 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3508
3509 CASE_CONVERT:
3510 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3511 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3512
3513 /* ... fall through ... */
3514
3515 case FLOAT_EXPR:
3516 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3517 return build1_loc (loc, TREE_CODE (arg), type,
3518 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3519
3520 case BIT_AND_EXPR:
3521 if (!integer_onep (TREE_OPERAND (arg, 1)))
3522 return NULL_TREE;
3523 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3524
3525 case SAVE_EXPR:
3526 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3527
3528 case CLEANUP_POINT_EXPR:
3529 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3530 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3531 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3532
3533 default:
3534 return NULL_TREE;
3535 }
3536 }
3537
3538 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3539 assume that ARG is an operation that returns a truth value (0 or 1
3540 for scalars, 0 or -1 for vectors). Return the folded expression if
3541 folding is successful. Otherwise, return NULL_TREE. */
3542
3543 static tree
3544 fold_invert_truthvalue (location_t loc, tree arg)
3545 {
3546 tree type = TREE_TYPE (arg);
3547 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3548 ? BIT_NOT_EXPR
3549 : TRUTH_NOT_EXPR,
3550 type, arg);
3551 }
3552
3553 /* Return a simplified tree node for the truth-negation of ARG. This
3554 never alters ARG itself. We assume that ARG is an operation that
3555 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3556
3557 tree
3558 invert_truthvalue_loc (location_t loc, tree arg)
3559 {
3560 if (TREE_CODE (arg) == ERROR_MARK)
3561 return arg;
3562
3563 tree type = TREE_TYPE (arg);
3564 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3565 ? BIT_NOT_EXPR
3566 : TRUTH_NOT_EXPR,
3567 type, arg);
3568 }
3569
3570 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3571 operands are another bit-wise operation with a common input. If so,
3572 distribute the bit operations to save an operation and possibly two if
3573 constants are involved. For example, convert
3574 (A | B) & (A | C) into A | (B & C)
3575 Further simplification will occur if B and C are constants.
3576
3577 If this optimization cannot be done, 0 will be returned. */
3578
3579 static tree
3580 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3581 tree arg0, tree arg1)
3582 {
3583 tree common;
3584 tree left, right;
3585
3586 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3587 || TREE_CODE (arg0) == code
3588 || (TREE_CODE (arg0) != BIT_AND_EXPR
3589 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3590 return 0;
3591
3592 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3593 {
3594 common = TREE_OPERAND (arg0, 0);
3595 left = TREE_OPERAND (arg0, 1);
3596 right = TREE_OPERAND (arg1, 1);
3597 }
3598 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3599 {
3600 common = TREE_OPERAND (arg0, 0);
3601 left = TREE_OPERAND (arg0, 1);
3602 right = TREE_OPERAND (arg1, 0);
3603 }
3604 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3605 {
3606 common = TREE_OPERAND (arg0, 1);
3607 left = TREE_OPERAND (arg0, 0);
3608 right = TREE_OPERAND (arg1, 1);
3609 }
3610 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3611 {
3612 common = TREE_OPERAND (arg0, 1);
3613 left = TREE_OPERAND (arg0, 0);
3614 right = TREE_OPERAND (arg1, 0);
3615 }
3616 else
3617 return 0;
3618
3619 common = fold_convert_loc (loc, type, common);
3620 left = fold_convert_loc (loc, type, left);
3621 right = fold_convert_loc (loc, type, right);
3622 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3623 fold_build2_loc (loc, code, type, left, right));
3624 }
3625
3626 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3627 with code CODE. This optimization is unsafe. */
3628 static tree
3629 distribute_real_division (location_t loc, enum tree_code code, tree type,
3630 tree arg0, tree arg1)
3631 {
3632 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3633 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3634
3635 /* (A / C) +- (B / C) -> (A +- B) / C. */
3636 if (mul0 == mul1
3637 && operand_equal_p (TREE_OPERAND (arg0, 1),
3638 TREE_OPERAND (arg1, 1), 0))
3639 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3640 fold_build2_loc (loc, code, type,
3641 TREE_OPERAND (arg0, 0),
3642 TREE_OPERAND (arg1, 0)),
3643 TREE_OPERAND (arg0, 1));
3644
3645 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3646 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3647 TREE_OPERAND (arg1, 0), 0)
3648 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3649 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3650 {
3651 REAL_VALUE_TYPE r0, r1;
3652 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3653 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3654 if (!mul0)
3655 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3656 if (!mul1)
3657 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3658 real_arithmetic (&r0, code, &r0, &r1);
3659 return fold_build2_loc (loc, MULT_EXPR, type,
3660 TREE_OPERAND (arg0, 0),
3661 build_real (type, r0));
3662 }
3663
3664 return NULL_TREE;
3665 }
3666 \f
3667 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3668 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3669
3670 static tree
3671 make_bit_field_ref (location_t loc, tree inner, tree type,
3672 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3673 {
3674 tree result, bftype;
3675
3676 if (bitpos == 0)
3677 {
3678 tree size = TYPE_SIZE (TREE_TYPE (inner));
3679 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3680 || POINTER_TYPE_P (TREE_TYPE (inner)))
3681 && tree_fits_shwi_p (size)
3682 && tree_to_shwi (size) == bitsize)
3683 return fold_convert_loc (loc, type, inner);
3684 }
3685
3686 bftype = type;
3687 if (TYPE_PRECISION (bftype) != bitsize
3688 || TYPE_UNSIGNED (bftype) == !unsignedp)
3689 bftype = build_nonstandard_integer_type (bitsize, 0);
3690
3691 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3692 size_int (bitsize), bitsize_int (bitpos));
3693
3694 if (bftype != type)
3695 result = fold_convert_loc (loc, type, result);
3696
3697 return result;
3698 }
3699
3700 /* Optimize a bit-field compare.
3701
3702 There are two cases: First is a compare against a constant and the
3703 second is a comparison of two items where the fields are at the same
3704 bit position relative to the start of a chunk (byte, halfword, word)
3705 large enough to contain it. In these cases we can avoid the shift
3706 implicit in bitfield extractions.
3707
3708 For constants, we emit a compare of the shifted constant with the
3709 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3710 compared. For two fields at the same position, we do the ANDs with the
3711 similar mask and compare the result of the ANDs.
3712
3713 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3714 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3715 are the left and right operands of the comparison, respectively.
3716
3717 If the optimization described above can be done, we return the resulting
3718 tree. Otherwise we return zero. */
3719
3720 static tree
3721 optimize_bit_field_compare (location_t loc, enum tree_code code,
3722 tree compare_type, tree lhs, tree rhs)
3723 {
3724 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3725 tree type = TREE_TYPE (lhs);
3726 tree unsigned_type;
3727 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3728 machine_mode lmode, rmode, nmode;
3729 int lunsignedp, runsignedp;
3730 int lvolatilep = 0, rvolatilep = 0;
3731 tree linner, rinner = NULL_TREE;
3732 tree mask;
3733 tree offset;
3734
3735 /* Get all the information about the extractions being done. If the bit size
3736 if the same as the size of the underlying object, we aren't doing an
3737 extraction at all and so can do nothing. We also don't want to
3738 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3739 then will no longer be able to replace it. */
3740 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3741 &lunsignedp, &lvolatilep, false);
3742 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3743 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3744 return 0;
3745
3746 if (!const_p)
3747 {
3748 /* If this is not a constant, we can only do something if bit positions,
3749 sizes, and signedness are the same. */
3750 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3751 &runsignedp, &rvolatilep, false);
3752
3753 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3754 || lunsignedp != runsignedp || offset != 0
3755 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3756 return 0;
3757 }
3758
3759 /* See if we can find a mode to refer to this field. We should be able to,
3760 but fail if we can't. */
3761 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3762 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3763 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3764 TYPE_ALIGN (TREE_TYPE (rinner))),
3765 word_mode, false);
3766 if (nmode == VOIDmode)
3767 return 0;
3768
3769 /* Set signed and unsigned types of the precision of this mode for the
3770 shifts below. */
3771 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3772
3773 /* Compute the bit position and size for the new reference and our offset
3774 within it. If the new reference is the same size as the original, we
3775 won't optimize anything, so return zero. */
3776 nbitsize = GET_MODE_BITSIZE (nmode);
3777 nbitpos = lbitpos & ~ (nbitsize - 1);
3778 lbitpos -= nbitpos;
3779 if (nbitsize == lbitsize)
3780 return 0;
3781
3782 if (BYTES_BIG_ENDIAN)
3783 lbitpos = nbitsize - lbitsize - lbitpos;
3784
3785 /* Make the mask to be used against the extracted field. */
3786 mask = build_int_cst_type (unsigned_type, -1);
3787 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3788 mask = const_binop (RSHIFT_EXPR, mask,
3789 size_int (nbitsize - lbitsize - lbitpos));
3790
3791 if (! const_p)
3792 /* If not comparing with constant, just rework the comparison
3793 and return. */
3794 return fold_build2_loc (loc, code, compare_type,
3795 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3796 make_bit_field_ref (loc, linner,
3797 unsigned_type,
3798 nbitsize, nbitpos,
3799 1),
3800 mask),
3801 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3802 make_bit_field_ref (loc, rinner,
3803 unsigned_type,
3804 nbitsize, nbitpos,
3805 1),
3806 mask));
3807
3808 /* Otherwise, we are handling the constant case. See if the constant is too
3809 big for the field. Warn and return a tree of for 0 (false) if so. We do
3810 this not only for its own sake, but to avoid having to test for this
3811 error case below. If we didn't, we might generate wrong code.
3812
3813 For unsigned fields, the constant shifted right by the field length should
3814 be all zero. For signed fields, the high-order bits should agree with
3815 the sign bit. */
3816
3817 if (lunsignedp)
3818 {
3819 if (wi::lrshift (rhs, lbitsize) != 0)
3820 {
3821 warning (0, "comparison is always %d due to width of bit-field",
3822 code == NE_EXPR);
3823 return constant_boolean_node (code == NE_EXPR, compare_type);
3824 }
3825 }
3826 else
3827 {
3828 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3829 if (tem != 0 && tem != -1)
3830 {
3831 warning (0, "comparison is always %d due to width of bit-field",
3832 code == NE_EXPR);
3833 return constant_boolean_node (code == NE_EXPR, compare_type);
3834 }
3835 }
3836
3837 /* Single-bit compares should always be against zero. */
3838 if (lbitsize == 1 && ! integer_zerop (rhs))
3839 {
3840 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3841 rhs = build_int_cst (type, 0);
3842 }
3843
3844 /* Make a new bitfield reference, shift the constant over the
3845 appropriate number of bits and mask it with the computed mask
3846 (in case this was a signed field). If we changed it, make a new one. */
3847 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3848
3849 rhs = const_binop (BIT_AND_EXPR,
3850 const_binop (LSHIFT_EXPR,
3851 fold_convert_loc (loc, unsigned_type, rhs),
3852 size_int (lbitpos)),
3853 mask);
3854
3855 lhs = build2_loc (loc, code, compare_type,
3856 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3857 return lhs;
3858 }
3859 \f
3860 /* Subroutine for fold_truth_andor_1: decode a field reference.
3861
3862 If EXP is a comparison reference, we return the innermost reference.
3863
3864 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3865 set to the starting bit number.
3866
3867 If the innermost field can be completely contained in a mode-sized
3868 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3869
3870 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3871 otherwise it is not changed.
3872
3873 *PUNSIGNEDP is set to the signedness of the field.
3874
3875 *PMASK is set to the mask used. This is either contained in a
3876 BIT_AND_EXPR or derived from the width of the field.
3877
3878 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3879
3880 Return 0 if this is not a component reference or is one that we can't
3881 do anything with. */
3882
3883 static tree
3884 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3885 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3886 int *punsignedp, int *pvolatilep,
3887 tree *pmask, tree *pand_mask)
3888 {
3889 tree outer_type = 0;
3890 tree and_mask = 0;
3891 tree mask, inner, offset;
3892 tree unsigned_type;
3893 unsigned int precision;
3894
3895 /* All the optimizations using this function assume integer fields.
3896 There are problems with FP fields since the type_for_size call
3897 below can fail for, e.g., XFmode. */
3898 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3899 return 0;
3900
3901 /* We are interested in the bare arrangement of bits, so strip everything
3902 that doesn't affect the machine mode. However, record the type of the
3903 outermost expression if it may matter below. */
3904 if (CONVERT_EXPR_P (exp)
3905 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3906 outer_type = TREE_TYPE (exp);
3907 STRIP_NOPS (exp);
3908
3909 if (TREE_CODE (exp) == BIT_AND_EXPR)
3910 {
3911 and_mask = TREE_OPERAND (exp, 1);
3912 exp = TREE_OPERAND (exp, 0);
3913 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3914 if (TREE_CODE (and_mask) != INTEGER_CST)
3915 return 0;
3916 }
3917
3918 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3919 punsignedp, pvolatilep, false);
3920 if ((inner == exp && and_mask == 0)
3921 || *pbitsize < 0 || offset != 0
3922 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3923 return 0;
3924
3925 /* If the number of bits in the reference is the same as the bitsize of
3926 the outer type, then the outer type gives the signedness. Otherwise
3927 (in case of a small bitfield) the signedness is unchanged. */
3928 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3929 *punsignedp = TYPE_UNSIGNED (outer_type);
3930
3931 /* Compute the mask to access the bitfield. */
3932 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3933 precision = TYPE_PRECISION (unsigned_type);
3934
3935 mask = build_int_cst_type (unsigned_type, -1);
3936
3937 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3938 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3939
3940 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3941 if (and_mask != 0)
3942 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3943 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3944
3945 *pmask = mask;
3946 *pand_mask = and_mask;
3947 return inner;
3948 }
3949
3950 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3951 bit positions and MASK is SIGNED. */
3952
3953 static int
3954 all_ones_mask_p (const_tree mask, unsigned int size)
3955 {
3956 tree type = TREE_TYPE (mask);
3957 unsigned int precision = TYPE_PRECISION (type);
3958
3959 /* If this function returns true when the type of the mask is
3960 UNSIGNED, then there will be errors. In particular see
3961 gcc.c-torture/execute/990326-1.c. There does not appear to be
3962 any documentation paper trail as to why this is so. But the pre
3963 wide-int worked with that restriction and it has been preserved
3964 here. */
3965 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3966 return false;
3967
3968 return wi::mask (size, false, precision) == mask;
3969 }
3970
3971 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3972 represents the sign bit of EXP's type. If EXP represents a sign
3973 or zero extension, also test VAL against the unextended type.
3974 The return value is the (sub)expression whose sign bit is VAL,
3975 or NULL_TREE otherwise. */
3976
3977 tree
3978 sign_bit_p (tree exp, const_tree val)
3979 {
3980 int width;
3981 tree t;
3982
3983 /* Tree EXP must have an integral type. */
3984 t = TREE_TYPE (exp);
3985 if (! INTEGRAL_TYPE_P (t))
3986 return NULL_TREE;
3987
3988 /* Tree VAL must be an integer constant. */
3989 if (TREE_CODE (val) != INTEGER_CST
3990 || TREE_OVERFLOW (val))
3991 return NULL_TREE;
3992
3993 width = TYPE_PRECISION (t);
3994 if (wi::only_sign_bit_p (val, width))
3995 return exp;
3996
3997 /* Handle extension from a narrower type. */
3998 if (TREE_CODE (exp) == NOP_EXPR
3999 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4000 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4001
4002 return NULL_TREE;
4003 }
4004
4005 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4006 to be evaluated unconditionally. */
4007
4008 static int
4009 simple_operand_p (const_tree exp)
4010 {
4011 /* Strip any conversions that don't change the machine mode. */
4012 STRIP_NOPS (exp);
4013
4014 return (CONSTANT_CLASS_P (exp)
4015 || TREE_CODE (exp) == SSA_NAME
4016 || (DECL_P (exp)
4017 && ! TREE_ADDRESSABLE (exp)
4018 && ! TREE_THIS_VOLATILE (exp)
4019 && ! DECL_NONLOCAL (exp)
4020 /* Don't regard global variables as simple. They may be
4021 allocated in ways unknown to the compiler (shared memory,
4022 #pragma weak, etc). */
4023 && ! TREE_PUBLIC (exp)
4024 && ! DECL_EXTERNAL (exp)
4025 /* Weakrefs are not safe to be read, since they can be NULL.
4026 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4027 have DECL_WEAK flag set. */
4028 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4029 /* Loading a static variable is unduly expensive, but global
4030 registers aren't expensive. */
4031 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4032 }
4033
4034 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4035 to be evaluated unconditionally.
4036 I addition to simple_operand_p, we assume that comparisons, conversions,
4037 and logic-not operations are simple, if their operands are simple, too. */
4038
4039 static bool
4040 simple_operand_p_2 (tree exp)
4041 {
4042 enum tree_code code;
4043
4044 if (TREE_SIDE_EFFECTS (exp)
4045 || tree_could_trap_p (exp))
4046 return false;
4047
4048 while (CONVERT_EXPR_P (exp))
4049 exp = TREE_OPERAND (exp, 0);
4050
4051 code = TREE_CODE (exp);
4052
4053 if (TREE_CODE_CLASS (code) == tcc_comparison)
4054 return (simple_operand_p (TREE_OPERAND (exp, 0))
4055 && simple_operand_p (TREE_OPERAND (exp, 1)));
4056
4057 if (code == TRUTH_NOT_EXPR)
4058 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4059
4060 return simple_operand_p (exp);
4061 }
4062
4063 \f
4064 /* The following functions are subroutines to fold_range_test and allow it to
4065 try to change a logical combination of comparisons into a range test.
4066
4067 For example, both
4068 X == 2 || X == 3 || X == 4 || X == 5
4069 and
4070 X >= 2 && X <= 5
4071 are converted to
4072 (unsigned) (X - 2) <= 3
4073
4074 We describe each set of comparisons as being either inside or outside
4075 a range, using a variable named like IN_P, and then describe the
4076 range with a lower and upper bound. If one of the bounds is omitted,
4077 it represents either the highest or lowest value of the type.
4078
4079 In the comments below, we represent a range by two numbers in brackets
4080 preceded by a "+" to designate being inside that range, or a "-" to
4081 designate being outside that range, so the condition can be inverted by
4082 flipping the prefix. An omitted bound is represented by a "-". For
4083 example, "- [-, 10]" means being outside the range starting at the lowest
4084 possible value and ending at 10, in other words, being greater than 10.
4085 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4086 always false.
4087
4088 We set up things so that the missing bounds are handled in a consistent
4089 manner so neither a missing bound nor "true" and "false" need to be
4090 handled using a special case. */
4091
4092 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4093 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4094 and UPPER1_P are nonzero if the respective argument is an upper bound
4095 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4096 must be specified for a comparison. ARG1 will be converted to ARG0's
4097 type if both are specified. */
4098
4099 static tree
4100 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4101 tree arg1, int upper1_p)
4102 {
4103 tree tem;
4104 int result;
4105 int sgn0, sgn1;
4106
4107 /* If neither arg represents infinity, do the normal operation.
4108 Else, if not a comparison, return infinity. Else handle the special
4109 comparison rules. Note that most of the cases below won't occur, but
4110 are handled for consistency. */
4111
4112 if (arg0 != 0 && arg1 != 0)
4113 {
4114 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4115 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4116 STRIP_NOPS (tem);
4117 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4118 }
4119
4120 if (TREE_CODE_CLASS (code) != tcc_comparison)
4121 return 0;
4122
4123 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4124 for neither. In real maths, we cannot assume open ended ranges are
4125 the same. But, this is computer arithmetic, where numbers are finite.
4126 We can therefore make the transformation of any unbounded range with
4127 the value Z, Z being greater than any representable number. This permits
4128 us to treat unbounded ranges as equal. */
4129 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4130 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4131 switch (code)
4132 {
4133 case EQ_EXPR:
4134 result = sgn0 == sgn1;
4135 break;
4136 case NE_EXPR:
4137 result = sgn0 != sgn1;
4138 break;
4139 case LT_EXPR:
4140 result = sgn0 < sgn1;
4141 break;
4142 case LE_EXPR:
4143 result = sgn0 <= sgn1;
4144 break;
4145 case GT_EXPR:
4146 result = sgn0 > sgn1;
4147 break;
4148 case GE_EXPR:
4149 result = sgn0 >= sgn1;
4150 break;
4151 default:
4152 gcc_unreachable ();
4153 }
4154
4155 return constant_boolean_node (result, type);
4156 }
4157 \f
4158 /* Helper routine for make_range. Perform one step for it, return
4159 new expression if the loop should continue or NULL_TREE if it should
4160 stop. */
4161
4162 tree
4163 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4164 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4165 bool *strict_overflow_p)
4166 {
4167 tree arg0_type = TREE_TYPE (arg0);
4168 tree n_low, n_high, low = *p_low, high = *p_high;
4169 int in_p = *p_in_p, n_in_p;
4170
4171 switch (code)
4172 {
4173 case TRUTH_NOT_EXPR:
4174 /* We can only do something if the range is testing for zero. */
4175 if (low == NULL_TREE || high == NULL_TREE
4176 || ! integer_zerop (low) || ! integer_zerop (high))
4177 return NULL_TREE;
4178 *p_in_p = ! in_p;
4179 return arg0;
4180
4181 case EQ_EXPR: case NE_EXPR:
4182 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4183 /* We can only do something if the range is testing for zero
4184 and if the second operand is an integer constant. Note that
4185 saying something is "in" the range we make is done by
4186 complementing IN_P since it will set in the initial case of
4187 being not equal to zero; "out" is leaving it alone. */
4188 if (low == NULL_TREE || high == NULL_TREE
4189 || ! integer_zerop (low) || ! integer_zerop (high)
4190 || TREE_CODE (arg1) != INTEGER_CST)
4191 return NULL_TREE;
4192
4193 switch (code)
4194 {
4195 case NE_EXPR: /* - [c, c] */
4196 low = high = arg1;
4197 break;
4198 case EQ_EXPR: /* + [c, c] */
4199 in_p = ! in_p, low = high = arg1;
4200 break;
4201 case GT_EXPR: /* - [-, c] */
4202 low = 0, high = arg1;
4203 break;
4204 case GE_EXPR: /* + [c, -] */
4205 in_p = ! in_p, low = arg1, high = 0;
4206 break;
4207 case LT_EXPR: /* - [c, -] */
4208 low = arg1, high = 0;
4209 break;
4210 case LE_EXPR: /* + [-, c] */
4211 in_p = ! in_p, low = 0, high = arg1;
4212 break;
4213 default:
4214 gcc_unreachable ();
4215 }
4216
4217 /* If this is an unsigned comparison, we also know that EXP is
4218 greater than or equal to zero. We base the range tests we make
4219 on that fact, so we record it here so we can parse existing
4220 range tests. We test arg0_type since often the return type
4221 of, e.g. EQ_EXPR, is boolean. */
4222 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4223 {
4224 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4225 in_p, low, high, 1,
4226 build_int_cst (arg0_type, 0),
4227 NULL_TREE))
4228 return NULL_TREE;
4229
4230 in_p = n_in_p, low = n_low, high = n_high;
4231
4232 /* If the high bound is missing, but we have a nonzero low
4233 bound, reverse the range so it goes from zero to the low bound
4234 minus 1. */
4235 if (high == 0 && low && ! integer_zerop (low))
4236 {
4237 in_p = ! in_p;
4238 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4239 build_int_cst (TREE_TYPE (low), 1), 0);
4240 low = build_int_cst (arg0_type, 0);
4241 }
4242 }
4243
4244 *p_low = low;
4245 *p_high = high;
4246 *p_in_p = in_p;
4247 return arg0;
4248
4249 case NEGATE_EXPR:
4250 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4251 low and high are non-NULL, then normalize will DTRT. */
4252 if (!TYPE_UNSIGNED (arg0_type)
4253 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4254 {
4255 if (low == NULL_TREE)
4256 low = TYPE_MIN_VALUE (arg0_type);
4257 if (high == NULL_TREE)
4258 high = TYPE_MAX_VALUE (arg0_type);
4259 }
4260
4261 /* (-x) IN [a,b] -> x in [-b, -a] */
4262 n_low = range_binop (MINUS_EXPR, exp_type,
4263 build_int_cst (exp_type, 0),
4264 0, high, 1);
4265 n_high = range_binop (MINUS_EXPR, exp_type,
4266 build_int_cst (exp_type, 0),
4267 0, low, 0);
4268 if (n_high != 0 && TREE_OVERFLOW (n_high))
4269 return NULL_TREE;
4270 goto normalize;
4271
4272 case BIT_NOT_EXPR:
4273 /* ~ X -> -X - 1 */
4274 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4275 build_int_cst (exp_type, 1));
4276
4277 case PLUS_EXPR:
4278 case MINUS_EXPR:
4279 if (TREE_CODE (arg1) != INTEGER_CST)
4280 return NULL_TREE;
4281
4282 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4283 move a constant to the other side. */
4284 if (!TYPE_UNSIGNED (arg0_type)
4285 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4286 return NULL_TREE;
4287
4288 /* If EXP is signed, any overflow in the computation is undefined,
4289 so we don't worry about it so long as our computations on
4290 the bounds don't overflow. For unsigned, overflow is defined
4291 and this is exactly the right thing. */
4292 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4293 arg0_type, low, 0, arg1, 0);
4294 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4295 arg0_type, high, 1, arg1, 0);
4296 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4297 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4298 return NULL_TREE;
4299
4300 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4301 *strict_overflow_p = true;
4302
4303 normalize:
4304 /* Check for an unsigned range which has wrapped around the maximum
4305 value thus making n_high < n_low, and normalize it. */
4306 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4307 {
4308 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4309 build_int_cst (TREE_TYPE (n_high), 1), 0);
4310 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4311 build_int_cst (TREE_TYPE (n_low), 1), 0);
4312
4313 /* If the range is of the form +/- [ x+1, x ], we won't
4314 be able to normalize it. But then, it represents the
4315 whole range or the empty set, so make it
4316 +/- [ -, - ]. */
4317 if (tree_int_cst_equal (n_low, low)
4318 && tree_int_cst_equal (n_high, high))
4319 low = high = 0;
4320 else
4321 in_p = ! in_p;
4322 }
4323 else
4324 low = n_low, high = n_high;
4325
4326 *p_low = low;
4327 *p_high = high;
4328 *p_in_p = in_p;
4329 return arg0;
4330
4331 CASE_CONVERT:
4332 case NON_LVALUE_EXPR:
4333 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4334 return NULL_TREE;
4335
4336 if (! INTEGRAL_TYPE_P (arg0_type)
4337 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4338 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4339 return NULL_TREE;
4340
4341 n_low = low, n_high = high;
4342
4343 if (n_low != 0)
4344 n_low = fold_convert_loc (loc, arg0_type, n_low);
4345
4346 if (n_high != 0)
4347 n_high = fold_convert_loc (loc, arg0_type, n_high);
4348
4349 /* If we're converting arg0 from an unsigned type, to exp,
4350 a signed type, we will be doing the comparison as unsigned.
4351 The tests above have already verified that LOW and HIGH
4352 are both positive.
4353
4354 So we have to ensure that we will handle large unsigned
4355 values the same way that the current signed bounds treat
4356 negative values. */
4357
4358 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4359 {
4360 tree high_positive;
4361 tree equiv_type;
4362 /* For fixed-point modes, we need to pass the saturating flag
4363 as the 2nd parameter. */
4364 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4365 equiv_type
4366 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4367 TYPE_SATURATING (arg0_type));
4368 else
4369 equiv_type
4370 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4371
4372 /* A range without an upper bound is, naturally, unbounded.
4373 Since convert would have cropped a very large value, use
4374 the max value for the destination type. */
4375 high_positive
4376 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4377 : TYPE_MAX_VALUE (arg0_type);
4378
4379 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4380 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4381 fold_convert_loc (loc, arg0_type,
4382 high_positive),
4383 build_int_cst (arg0_type, 1));
4384
4385 /* If the low bound is specified, "and" the range with the
4386 range for which the original unsigned value will be
4387 positive. */
4388 if (low != 0)
4389 {
4390 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4391 1, fold_convert_loc (loc, arg0_type,
4392 integer_zero_node),
4393 high_positive))
4394 return NULL_TREE;
4395
4396 in_p = (n_in_p == in_p);
4397 }
4398 else
4399 {
4400 /* Otherwise, "or" the range with the range of the input
4401 that will be interpreted as negative. */
4402 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4403 1, fold_convert_loc (loc, arg0_type,
4404 integer_zero_node),
4405 high_positive))
4406 return NULL_TREE;
4407
4408 in_p = (in_p != n_in_p);
4409 }
4410 }
4411
4412 *p_low = n_low;
4413 *p_high = n_high;
4414 *p_in_p = in_p;
4415 return arg0;
4416
4417 default:
4418 return NULL_TREE;
4419 }
4420 }
4421
4422 /* Given EXP, a logical expression, set the range it is testing into
4423 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4424 actually being tested. *PLOW and *PHIGH will be made of the same
4425 type as the returned expression. If EXP is not a comparison, we
4426 will most likely not be returning a useful value and range. Set
4427 *STRICT_OVERFLOW_P to true if the return value is only valid
4428 because signed overflow is undefined; otherwise, do not change
4429 *STRICT_OVERFLOW_P. */
4430
4431 tree
4432 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4433 bool *strict_overflow_p)
4434 {
4435 enum tree_code code;
4436 tree arg0, arg1 = NULL_TREE;
4437 tree exp_type, nexp;
4438 int in_p;
4439 tree low, high;
4440 location_t loc = EXPR_LOCATION (exp);
4441
4442 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4443 and see if we can refine the range. Some of the cases below may not
4444 happen, but it doesn't seem worth worrying about this. We "continue"
4445 the outer loop when we've changed something; otherwise we "break"
4446 the switch, which will "break" the while. */
4447
4448 in_p = 0;
4449 low = high = build_int_cst (TREE_TYPE (exp), 0);
4450
4451 while (1)
4452 {
4453 code = TREE_CODE (exp);
4454 exp_type = TREE_TYPE (exp);
4455 arg0 = NULL_TREE;
4456
4457 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4458 {
4459 if (TREE_OPERAND_LENGTH (exp) > 0)
4460 arg0 = TREE_OPERAND (exp, 0);
4461 if (TREE_CODE_CLASS (code) == tcc_binary
4462 || TREE_CODE_CLASS (code) == tcc_comparison
4463 || (TREE_CODE_CLASS (code) == tcc_expression
4464 && TREE_OPERAND_LENGTH (exp) > 1))
4465 arg1 = TREE_OPERAND (exp, 1);
4466 }
4467 if (arg0 == NULL_TREE)
4468 break;
4469
4470 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4471 &high, &in_p, strict_overflow_p);
4472 if (nexp == NULL_TREE)
4473 break;
4474 exp = nexp;
4475 }
4476
4477 /* If EXP is a constant, we can evaluate whether this is true or false. */
4478 if (TREE_CODE (exp) == INTEGER_CST)
4479 {
4480 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4481 exp, 0, low, 0))
4482 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4483 exp, 1, high, 1)));
4484 low = high = 0;
4485 exp = 0;
4486 }
4487
4488 *pin_p = in_p, *plow = low, *phigh = high;
4489 return exp;
4490 }
4491 \f
4492 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4493 type, TYPE, return an expression to test if EXP is in (or out of, depending
4494 on IN_P) the range. Return 0 if the test couldn't be created. */
4495
4496 tree
4497 build_range_check (location_t loc, tree type, tree exp, int in_p,
4498 tree low, tree high)
4499 {
4500 tree etype = TREE_TYPE (exp), value;
4501
4502 #ifdef HAVE_canonicalize_funcptr_for_compare
4503 /* Disable this optimization for function pointer expressions
4504 on targets that require function pointer canonicalization. */
4505 if (HAVE_canonicalize_funcptr_for_compare
4506 && TREE_CODE (etype) == POINTER_TYPE
4507 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4508 return NULL_TREE;
4509 #endif
4510
4511 if (! in_p)
4512 {
4513 value = build_range_check (loc, type, exp, 1, low, high);
4514 if (value != 0)
4515 return invert_truthvalue_loc (loc, value);
4516
4517 return 0;
4518 }
4519
4520 if (low == 0 && high == 0)
4521 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4522
4523 if (low == 0)
4524 return fold_build2_loc (loc, LE_EXPR, type, exp,
4525 fold_convert_loc (loc, etype, high));
4526
4527 if (high == 0)
4528 return fold_build2_loc (loc, GE_EXPR, type, exp,
4529 fold_convert_loc (loc, etype, low));
4530
4531 if (operand_equal_p (low, high, 0))
4532 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4533 fold_convert_loc (loc, etype, low));
4534
4535 if (integer_zerop (low))
4536 {
4537 if (! TYPE_UNSIGNED (etype))
4538 {
4539 etype = unsigned_type_for (etype);
4540 high = fold_convert_loc (loc, etype, high);
4541 exp = fold_convert_loc (loc, etype, exp);
4542 }
4543 return build_range_check (loc, type, exp, 1, 0, high);
4544 }
4545
4546 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4547 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4548 {
4549 int prec = TYPE_PRECISION (etype);
4550
4551 if (wi::mask (prec - 1, false, prec) == high)
4552 {
4553 if (TYPE_UNSIGNED (etype))
4554 {
4555 tree signed_etype = signed_type_for (etype);
4556 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4557 etype
4558 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4559 else
4560 etype = signed_etype;
4561 exp = fold_convert_loc (loc, etype, exp);
4562 }
4563 return fold_build2_loc (loc, GT_EXPR, type, exp,
4564 build_int_cst (etype, 0));
4565 }
4566 }
4567
4568 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4569 This requires wrap-around arithmetics for the type of the expression.
4570 First make sure that arithmetics in this type is valid, then make sure
4571 that it wraps around. */
4572 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4573 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4574 TYPE_UNSIGNED (etype));
4575
4576 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4577 {
4578 tree utype, minv, maxv;
4579
4580 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4581 for the type in question, as we rely on this here. */
4582 utype = unsigned_type_for (etype);
4583 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4584 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4585 build_int_cst (TREE_TYPE (maxv), 1), 1);
4586 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4587
4588 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4589 minv, 1, maxv, 1)))
4590 etype = utype;
4591 else
4592 return 0;
4593 }
4594
4595 high = fold_convert_loc (loc, etype, high);
4596 low = fold_convert_loc (loc, etype, low);
4597 exp = fold_convert_loc (loc, etype, exp);
4598
4599 value = const_binop (MINUS_EXPR, high, low);
4600
4601
4602 if (POINTER_TYPE_P (etype))
4603 {
4604 if (value != 0 && !TREE_OVERFLOW (value))
4605 {
4606 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4607 return build_range_check (loc, type,
4608 fold_build_pointer_plus_loc (loc, exp, low),
4609 1, build_int_cst (etype, 0), value);
4610 }
4611 return 0;
4612 }
4613
4614 if (value != 0 && !TREE_OVERFLOW (value))
4615 return build_range_check (loc, type,
4616 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4617 1, build_int_cst (etype, 0), value);
4618
4619 return 0;
4620 }
4621 \f
4622 /* Return the predecessor of VAL in its type, handling the infinite case. */
4623
4624 static tree
4625 range_predecessor (tree val)
4626 {
4627 tree type = TREE_TYPE (val);
4628
4629 if (INTEGRAL_TYPE_P (type)
4630 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4631 return 0;
4632 else
4633 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4634 build_int_cst (TREE_TYPE (val), 1), 0);
4635 }
4636
4637 /* Return the successor of VAL in its type, handling the infinite case. */
4638
4639 static tree
4640 range_successor (tree val)
4641 {
4642 tree type = TREE_TYPE (val);
4643
4644 if (INTEGRAL_TYPE_P (type)
4645 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4646 return 0;
4647 else
4648 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4649 build_int_cst (TREE_TYPE (val), 1), 0);
4650 }
4651
4652 /* Given two ranges, see if we can merge them into one. Return 1 if we
4653 can, 0 if we can't. Set the output range into the specified parameters. */
4654
4655 bool
4656 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4657 tree high0, int in1_p, tree low1, tree high1)
4658 {
4659 int no_overlap;
4660 int subset;
4661 int temp;
4662 tree tem;
4663 int in_p;
4664 tree low, high;
4665 int lowequal = ((low0 == 0 && low1 == 0)
4666 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4667 low0, 0, low1, 0)));
4668 int highequal = ((high0 == 0 && high1 == 0)
4669 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4670 high0, 1, high1, 1)));
4671
4672 /* Make range 0 be the range that starts first, or ends last if they
4673 start at the same value. Swap them if it isn't. */
4674 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4675 low0, 0, low1, 0))
4676 || (lowequal
4677 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4678 high1, 1, high0, 1))))
4679 {
4680 temp = in0_p, in0_p = in1_p, in1_p = temp;
4681 tem = low0, low0 = low1, low1 = tem;
4682 tem = high0, high0 = high1, high1 = tem;
4683 }
4684
4685 /* Now flag two cases, whether the ranges are disjoint or whether the
4686 second range is totally subsumed in the first. Note that the tests
4687 below are simplified by the ones above. */
4688 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4689 high0, 1, low1, 0));
4690 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4691 high1, 1, high0, 1));
4692
4693 /* We now have four cases, depending on whether we are including or
4694 excluding the two ranges. */
4695 if (in0_p && in1_p)
4696 {
4697 /* If they don't overlap, the result is false. If the second range
4698 is a subset it is the result. Otherwise, the range is from the start
4699 of the second to the end of the first. */
4700 if (no_overlap)
4701 in_p = 0, low = high = 0;
4702 else if (subset)
4703 in_p = 1, low = low1, high = high1;
4704 else
4705 in_p = 1, low = low1, high = high0;
4706 }
4707
4708 else if (in0_p && ! in1_p)
4709 {
4710 /* If they don't overlap, the result is the first range. If they are
4711 equal, the result is false. If the second range is a subset of the
4712 first, and the ranges begin at the same place, we go from just after
4713 the end of the second range to the end of the first. If the second
4714 range is not a subset of the first, or if it is a subset and both
4715 ranges end at the same place, the range starts at the start of the
4716 first range and ends just before the second range.
4717 Otherwise, we can't describe this as a single range. */
4718 if (no_overlap)
4719 in_p = 1, low = low0, high = high0;
4720 else if (lowequal && highequal)
4721 in_p = 0, low = high = 0;
4722 else if (subset && lowequal)
4723 {
4724 low = range_successor (high1);
4725 high = high0;
4726 in_p = 1;
4727 if (low == 0)
4728 {
4729 /* We are in the weird situation where high0 > high1 but
4730 high1 has no successor. Punt. */
4731 return 0;
4732 }
4733 }
4734 else if (! subset || highequal)
4735 {
4736 low = low0;
4737 high = range_predecessor (low1);
4738 in_p = 1;
4739 if (high == 0)
4740 {
4741 /* low0 < low1 but low1 has no predecessor. Punt. */
4742 return 0;
4743 }
4744 }
4745 else
4746 return 0;
4747 }
4748
4749 else if (! in0_p && in1_p)
4750 {
4751 /* If they don't overlap, the result is the second range. If the second
4752 is a subset of the first, the result is false. Otherwise,
4753 the range starts just after the first range and ends at the
4754 end of the second. */
4755 if (no_overlap)
4756 in_p = 1, low = low1, high = high1;
4757 else if (subset || highequal)
4758 in_p = 0, low = high = 0;
4759 else
4760 {
4761 low = range_successor (high0);
4762 high = high1;
4763 in_p = 1;
4764 if (low == 0)
4765 {
4766 /* high1 > high0 but high0 has no successor. Punt. */
4767 return 0;
4768 }
4769 }
4770 }
4771
4772 else
4773 {
4774 /* The case where we are excluding both ranges. Here the complex case
4775 is if they don't overlap. In that case, the only time we have a
4776 range is if they are adjacent. If the second is a subset of the
4777 first, the result is the first. Otherwise, the range to exclude
4778 starts at the beginning of the first range and ends at the end of the
4779 second. */
4780 if (no_overlap)
4781 {
4782 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4783 range_successor (high0),
4784 1, low1, 0)))
4785 in_p = 0, low = low0, high = high1;
4786 else
4787 {
4788 /* Canonicalize - [min, x] into - [-, x]. */
4789 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4790 switch (TREE_CODE (TREE_TYPE (low0)))
4791 {
4792 case ENUMERAL_TYPE:
4793 if (TYPE_PRECISION (TREE_TYPE (low0))
4794 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4795 break;
4796 /* FALLTHROUGH */
4797 case INTEGER_TYPE:
4798 if (tree_int_cst_equal (low0,
4799 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4800 low0 = 0;
4801 break;
4802 case POINTER_TYPE:
4803 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4804 && integer_zerop (low0))
4805 low0 = 0;
4806 break;
4807 default:
4808 break;
4809 }
4810
4811 /* Canonicalize - [x, max] into - [x, -]. */
4812 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4813 switch (TREE_CODE (TREE_TYPE (high1)))
4814 {
4815 case ENUMERAL_TYPE:
4816 if (TYPE_PRECISION (TREE_TYPE (high1))
4817 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4818 break;
4819 /* FALLTHROUGH */
4820 case INTEGER_TYPE:
4821 if (tree_int_cst_equal (high1,
4822 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4823 high1 = 0;
4824 break;
4825 case POINTER_TYPE:
4826 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4827 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4828 high1, 1,
4829 build_int_cst (TREE_TYPE (high1), 1),
4830 1)))
4831 high1 = 0;
4832 break;
4833 default:
4834 break;
4835 }
4836
4837 /* The ranges might be also adjacent between the maximum and
4838 minimum values of the given type. For
4839 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4840 return + [x + 1, y - 1]. */
4841 if (low0 == 0 && high1 == 0)
4842 {
4843 low = range_successor (high0);
4844 high = range_predecessor (low1);
4845 if (low == 0 || high == 0)
4846 return 0;
4847
4848 in_p = 1;
4849 }
4850 else
4851 return 0;
4852 }
4853 }
4854 else if (subset)
4855 in_p = 0, low = low0, high = high0;
4856 else
4857 in_p = 0, low = low0, high = high1;
4858 }
4859
4860 *pin_p = in_p, *plow = low, *phigh = high;
4861 return 1;
4862 }
4863 \f
4864
4865 /* Subroutine of fold, looking inside expressions of the form
4866 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4867 of the COND_EXPR. This function is being used also to optimize
4868 A op B ? C : A, by reversing the comparison first.
4869
4870 Return a folded expression whose code is not a COND_EXPR
4871 anymore, or NULL_TREE if no folding opportunity is found. */
4872
4873 static tree
4874 fold_cond_expr_with_comparison (location_t loc, tree type,
4875 tree arg0, tree arg1, tree arg2)
4876 {
4877 enum tree_code comp_code = TREE_CODE (arg0);
4878 tree arg00 = TREE_OPERAND (arg0, 0);
4879 tree arg01 = TREE_OPERAND (arg0, 1);
4880 tree arg1_type = TREE_TYPE (arg1);
4881 tree tem;
4882
4883 STRIP_NOPS (arg1);
4884 STRIP_NOPS (arg2);
4885
4886 /* If we have A op 0 ? A : -A, consider applying the following
4887 transformations:
4888
4889 A == 0? A : -A same as -A
4890 A != 0? A : -A same as A
4891 A >= 0? A : -A same as abs (A)
4892 A > 0? A : -A same as abs (A)
4893 A <= 0? A : -A same as -abs (A)
4894 A < 0? A : -A same as -abs (A)
4895
4896 None of these transformations work for modes with signed
4897 zeros. If A is +/-0, the first two transformations will
4898 change the sign of the result (from +0 to -0, or vice
4899 versa). The last four will fix the sign of the result,
4900 even though the original expressions could be positive or
4901 negative, depending on the sign of A.
4902
4903 Note that all these transformations are correct if A is
4904 NaN, since the two alternatives (A and -A) are also NaNs. */
4905 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4906 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4907 ? real_zerop (arg01)
4908 : integer_zerop (arg01))
4909 && ((TREE_CODE (arg2) == NEGATE_EXPR
4910 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4911 /* In the case that A is of the form X-Y, '-A' (arg2) may
4912 have already been folded to Y-X, check for that. */
4913 || (TREE_CODE (arg1) == MINUS_EXPR
4914 && TREE_CODE (arg2) == MINUS_EXPR
4915 && operand_equal_p (TREE_OPERAND (arg1, 0),
4916 TREE_OPERAND (arg2, 1), 0)
4917 && operand_equal_p (TREE_OPERAND (arg1, 1),
4918 TREE_OPERAND (arg2, 0), 0))))
4919 switch (comp_code)
4920 {
4921 case EQ_EXPR:
4922 case UNEQ_EXPR:
4923 tem = fold_convert_loc (loc, arg1_type, arg1);
4924 return pedantic_non_lvalue_loc (loc,
4925 fold_convert_loc (loc, type,
4926 negate_expr (tem)));
4927 case NE_EXPR:
4928 case LTGT_EXPR:
4929 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4930 case UNGE_EXPR:
4931 case UNGT_EXPR:
4932 if (flag_trapping_math)
4933 break;
4934 /* Fall through. */
4935 case GE_EXPR:
4936 case GT_EXPR:
4937 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4938 arg1 = fold_convert_loc (loc, signed_type_for
4939 (TREE_TYPE (arg1)), arg1);
4940 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4941 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4942 case UNLE_EXPR:
4943 case UNLT_EXPR:
4944 if (flag_trapping_math)
4945 break;
4946 case LE_EXPR:
4947 case LT_EXPR:
4948 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4949 arg1 = fold_convert_loc (loc, signed_type_for
4950 (TREE_TYPE (arg1)), arg1);
4951 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4952 return negate_expr (fold_convert_loc (loc, type, tem));
4953 default:
4954 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4955 break;
4956 }
4957
4958 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4959 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4960 both transformations are correct when A is NaN: A != 0
4961 is then true, and A == 0 is false. */
4962
4963 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4964 && integer_zerop (arg01) && integer_zerop (arg2))
4965 {
4966 if (comp_code == NE_EXPR)
4967 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4968 else if (comp_code == EQ_EXPR)
4969 return build_zero_cst (type);
4970 }
4971
4972 /* Try some transformations of A op B ? A : B.
4973
4974 A == B? A : B same as B
4975 A != B? A : B same as A
4976 A >= B? A : B same as max (A, B)
4977 A > B? A : B same as max (B, A)
4978 A <= B? A : B same as min (A, B)
4979 A < B? A : B same as min (B, A)
4980
4981 As above, these transformations don't work in the presence
4982 of signed zeros. For example, if A and B are zeros of
4983 opposite sign, the first two transformations will change
4984 the sign of the result. In the last four, the original
4985 expressions give different results for (A=+0, B=-0) and
4986 (A=-0, B=+0), but the transformed expressions do not.
4987
4988 The first two transformations are correct if either A or B
4989 is a NaN. In the first transformation, the condition will
4990 be false, and B will indeed be chosen. In the case of the
4991 second transformation, the condition A != B will be true,
4992 and A will be chosen.
4993
4994 The conversions to max() and min() are not correct if B is
4995 a number and A is not. The conditions in the original
4996 expressions will be false, so all four give B. The min()
4997 and max() versions would give a NaN instead. */
4998 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4999 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5000 /* Avoid these transformations if the COND_EXPR may be used
5001 as an lvalue in the C++ front-end. PR c++/19199. */
5002 && (in_gimple_form
5003 || VECTOR_TYPE_P (type)
5004 || (! lang_GNU_CXX ()
5005 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5006 || ! maybe_lvalue_p (arg1)
5007 || ! maybe_lvalue_p (arg2)))
5008 {
5009 tree comp_op0 = arg00;
5010 tree comp_op1 = arg01;
5011 tree comp_type = TREE_TYPE (comp_op0);
5012
5013 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5014 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5015 {
5016 comp_type = type;
5017 comp_op0 = arg1;
5018 comp_op1 = arg2;
5019 }
5020
5021 switch (comp_code)
5022 {
5023 case EQ_EXPR:
5024 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5025 case NE_EXPR:
5026 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5027 case LE_EXPR:
5028 case LT_EXPR:
5029 case UNLE_EXPR:
5030 case UNLT_EXPR:
5031 /* In C++ a ?: expression can be an lvalue, so put the
5032 operand which will be used if they are equal first
5033 so that we can convert this back to the
5034 corresponding COND_EXPR. */
5035 if (!HONOR_NANS (arg1))
5036 {
5037 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5038 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5039 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5040 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5041 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5042 comp_op1, comp_op0);
5043 return pedantic_non_lvalue_loc (loc,
5044 fold_convert_loc (loc, type, tem));
5045 }
5046 break;
5047 case GE_EXPR:
5048 case GT_EXPR:
5049 case UNGE_EXPR:
5050 case UNGT_EXPR:
5051 if (!HONOR_NANS (arg1))
5052 {
5053 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5054 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5055 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5056 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5057 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5058 comp_op1, comp_op0);
5059 return pedantic_non_lvalue_loc (loc,
5060 fold_convert_loc (loc, type, tem));
5061 }
5062 break;
5063 case UNEQ_EXPR:
5064 if (!HONOR_NANS (arg1))
5065 return pedantic_non_lvalue_loc (loc,
5066 fold_convert_loc (loc, type, arg2));
5067 break;
5068 case LTGT_EXPR:
5069 if (!HONOR_NANS (arg1))
5070 return pedantic_non_lvalue_loc (loc,
5071 fold_convert_loc (loc, type, arg1));
5072 break;
5073 default:
5074 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5075 break;
5076 }
5077 }
5078
5079 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5080 we might still be able to simplify this. For example,
5081 if C1 is one less or one more than C2, this might have started
5082 out as a MIN or MAX and been transformed by this function.
5083 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5084
5085 if (INTEGRAL_TYPE_P (type)
5086 && TREE_CODE (arg01) == INTEGER_CST
5087 && TREE_CODE (arg2) == INTEGER_CST)
5088 switch (comp_code)
5089 {
5090 case EQ_EXPR:
5091 if (TREE_CODE (arg1) == INTEGER_CST)
5092 break;
5093 /* We can replace A with C1 in this case. */
5094 arg1 = fold_convert_loc (loc, type, arg01);
5095 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5096
5097 case LT_EXPR:
5098 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5099 MIN_EXPR, to preserve the signedness of the comparison. */
5100 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5101 OEP_ONLY_CONST)
5102 && operand_equal_p (arg01,
5103 const_binop (PLUS_EXPR, arg2,
5104 build_int_cst (type, 1)),
5105 OEP_ONLY_CONST))
5106 {
5107 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5108 fold_convert_loc (loc, TREE_TYPE (arg00),
5109 arg2));
5110 return pedantic_non_lvalue_loc (loc,
5111 fold_convert_loc (loc, type, tem));
5112 }
5113 break;
5114
5115 case LE_EXPR:
5116 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5117 as above. */
5118 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5119 OEP_ONLY_CONST)
5120 && operand_equal_p (arg01,
5121 const_binop (MINUS_EXPR, arg2,
5122 build_int_cst (type, 1)),
5123 OEP_ONLY_CONST))
5124 {
5125 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5126 fold_convert_loc (loc, TREE_TYPE (arg00),
5127 arg2));
5128 return pedantic_non_lvalue_loc (loc,
5129 fold_convert_loc (loc, type, tem));
5130 }
5131 break;
5132
5133 case GT_EXPR:
5134 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5135 MAX_EXPR, to preserve the signedness of the comparison. */
5136 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5137 OEP_ONLY_CONST)
5138 && operand_equal_p (arg01,
5139 const_binop (MINUS_EXPR, arg2,
5140 build_int_cst (type, 1)),
5141 OEP_ONLY_CONST))
5142 {
5143 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5144 fold_convert_loc (loc, TREE_TYPE (arg00),
5145 arg2));
5146 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5147 }
5148 break;
5149
5150 case GE_EXPR:
5151 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5152 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5153 OEP_ONLY_CONST)
5154 && operand_equal_p (arg01,
5155 const_binop (PLUS_EXPR, arg2,
5156 build_int_cst (type, 1)),
5157 OEP_ONLY_CONST))
5158 {
5159 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5160 fold_convert_loc (loc, TREE_TYPE (arg00),
5161 arg2));
5162 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5163 }
5164 break;
5165 case NE_EXPR:
5166 break;
5167 default:
5168 gcc_unreachable ();
5169 }
5170
5171 return NULL_TREE;
5172 }
5173
5174
5175 \f
5176 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5177 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5178 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5179 false) >= 2)
5180 #endif
5181
5182 /* EXP is some logical combination of boolean tests. See if we can
5183 merge it into some range test. Return the new tree if so. */
5184
5185 static tree
5186 fold_range_test (location_t loc, enum tree_code code, tree type,
5187 tree op0, tree op1)
5188 {
5189 int or_op = (code == TRUTH_ORIF_EXPR
5190 || code == TRUTH_OR_EXPR);
5191 int in0_p, in1_p, in_p;
5192 tree low0, low1, low, high0, high1, high;
5193 bool strict_overflow_p = false;
5194 tree tem, lhs, rhs;
5195 const char * const warnmsg = G_("assuming signed overflow does not occur "
5196 "when simplifying range test");
5197
5198 if (!INTEGRAL_TYPE_P (type))
5199 return 0;
5200
5201 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5202 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5203
5204 /* If this is an OR operation, invert both sides; we will invert
5205 again at the end. */
5206 if (or_op)
5207 in0_p = ! in0_p, in1_p = ! in1_p;
5208
5209 /* If both expressions are the same, if we can merge the ranges, and we
5210 can build the range test, return it or it inverted. If one of the
5211 ranges is always true or always false, consider it to be the same
5212 expression as the other. */
5213 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5214 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5215 in1_p, low1, high1)
5216 && 0 != (tem = (build_range_check (loc, type,
5217 lhs != 0 ? lhs
5218 : rhs != 0 ? rhs : integer_zero_node,
5219 in_p, low, high))))
5220 {
5221 if (strict_overflow_p)
5222 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5223 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5224 }
5225
5226 /* On machines where the branch cost is expensive, if this is a
5227 short-circuited branch and the underlying object on both sides
5228 is the same, make a non-short-circuit operation. */
5229 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5230 && lhs != 0 && rhs != 0
5231 && (code == TRUTH_ANDIF_EXPR
5232 || code == TRUTH_ORIF_EXPR)
5233 && operand_equal_p (lhs, rhs, 0))
5234 {
5235 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5236 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5237 which cases we can't do this. */
5238 if (simple_operand_p (lhs))
5239 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5240 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5241 type, op0, op1);
5242
5243 else if (!lang_hooks.decls.global_bindings_p ()
5244 && !CONTAINS_PLACEHOLDER_P (lhs))
5245 {
5246 tree common = save_expr (lhs);
5247
5248 if (0 != (lhs = build_range_check (loc, type, common,
5249 or_op ? ! in0_p : in0_p,
5250 low0, high0))
5251 && (0 != (rhs = build_range_check (loc, type, common,
5252 or_op ? ! in1_p : in1_p,
5253 low1, high1))))
5254 {
5255 if (strict_overflow_p)
5256 fold_overflow_warning (warnmsg,
5257 WARN_STRICT_OVERFLOW_COMPARISON);
5258 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5259 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5260 type, lhs, rhs);
5261 }
5262 }
5263 }
5264
5265 return 0;
5266 }
5267 \f
5268 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5269 bit value. Arrange things so the extra bits will be set to zero if and
5270 only if C is signed-extended to its full width. If MASK is nonzero,
5271 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5272
5273 static tree
5274 unextend (tree c, int p, int unsignedp, tree mask)
5275 {
5276 tree type = TREE_TYPE (c);
5277 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5278 tree temp;
5279
5280 if (p == modesize || unsignedp)
5281 return c;
5282
5283 /* We work by getting just the sign bit into the low-order bit, then
5284 into the high-order bit, then sign-extend. We then XOR that value
5285 with C. */
5286 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5287
5288 /* We must use a signed type in order to get an arithmetic right shift.
5289 However, we must also avoid introducing accidental overflows, so that
5290 a subsequent call to integer_zerop will work. Hence we must
5291 do the type conversion here. At this point, the constant is either
5292 zero or one, and the conversion to a signed type can never overflow.
5293 We could get an overflow if this conversion is done anywhere else. */
5294 if (TYPE_UNSIGNED (type))
5295 temp = fold_convert (signed_type_for (type), temp);
5296
5297 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5298 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5299 if (mask != 0)
5300 temp = const_binop (BIT_AND_EXPR, temp,
5301 fold_convert (TREE_TYPE (c), mask));
5302 /* If necessary, convert the type back to match the type of C. */
5303 if (TYPE_UNSIGNED (type))
5304 temp = fold_convert (type, temp);
5305
5306 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5307 }
5308 \f
5309 /* For an expression that has the form
5310 (A && B) || ~B
5311 or
5312 (A || B) && ~B,
5313 we can drop one of the inner expressions and simplify to
5314 A || ~B
5315 or
5316 A && ~B
5317 LOC is the location of the resulting expression. OP is the inner
5318 logical operation; the left-hand side in the examples above, while CMPOP
5319 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5320 removing a condition that guards another, as in
5321 (A != NULL && A->...) || A == NULL
5322 which we must not transform. If RHS_ONLY is true, only eliminate the
5323 right-most operand of the inner logical operation. */
5324
5325 static tree
5326 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5327 bool rhs_only)
5328 {
5329 tree type = TREE_TYPE (cmpop);
5330 enum tree_code code = TREE_CODE (cmpop);
5331 enum tree_code truthop_code = TREE_CODE (op);
5332 tree lhs = TREE_OPERAND (op, 0);
5333 tree rhs = TREE_OPERAND (op, 1);
5334 tree orig_lhs = lhs, orig_rhs = rhs;
5335 enum tree_code rhs_code = TREE_CODE (rhs);
5336 enum tree_code lhs_code = TREE_CODE (lhs);
5337 enum tree_code inv_code;
5338
5339 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5340 return NULL_TREE;
5341
5342 if (TREE_CODE_CLASS (code) != tcc_comparison)
5343 return NULL_TREE;
5344
5345 if (rhs_code == truthop_code)
5346 {
5347 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5348 if (newrhs != NULL_TREE)
5349 {
5350 rhs = newrhs;
5351 rhs_code = TREE_CODE (rhs);
5352 }
5353 }
5354 if (lhs_code == truthop_code && !rhs_only)
5355 {
5356 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5357 if (newlhs != NULL_TREE)
5358 {
5359 lhs = newlhs;
5360 lhs_code = TREE_CODE (lhs);
5361 }
5362 }
5363
5364 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5365 if (inv_code == rhs_code
5366 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5367 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5368 return lhs;
5369 if (!rhs_only && inv_code == lhs_code
5370 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5371 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5372 return rhs;
5373 if (rhs != orig_rhs || lhs != orig_lhs)
5374 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5375 lhs, rhs);
5376 return NULL_TREE;
5377 }
5378
5379 /* Find ways of folding logical expressions of LHS and RHS:
5380 Try to merge two comparisons to the same innermost item.
5381 Look for range tests like "ch >= '0' && ch <= '9'".
5382 Look for combinations of simple terms on machines with expensive branches
5383 and evaluate the RHS unconditionally.
5384
5385 For example, if we have p->a == 2 && p->b == 4 and we can make an
5386 object large enough to span both A and B, we can do this with a comparison
5387 against the object ANDed with the a mask.
5388
5389 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5390 operations to do this with one comparison.
5391
5392 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5393 function and the one above.
5394
5395 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5396 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5397
5398 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5399 two operands.
5400
5401 We return the simplified tree or 0 if no optimization is possible. */
5402
5403 static tree
5404 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5405 tree lhs, tree rhs)
5406 {
5407 /* If this is the "or" of two comparisons, we can do something if
5408 the comparisons are NE_EXPR. If this is the "and", we can do something
5409 if the comparisons are EQ_EXPR. I.e.,
5410 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5411
5412 WANTED_CODE is this operation code. For single bit fields, we can
5413 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5414 comparison for one-bit fields. */
5415
5416 enum tree_code wanted_code;
5417 enum tree_code lcode, rcode;
5418 tree ll_arg, lr_arg, rl_arg, rr_arg;
5419 tree ll_inner, lr_inner, rl_inner, rr_inner;
5420 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5421 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5422 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5423 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5424 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5425 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5426 machine_mode lnmode, rnmode;
5427 tree ll_mask, lr_mask, rl_mask, rr_mask;
5428 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5429 tree l_const, r_const;
5430 tree lntype, rntype, result;
5431 HOST_WIDE_INT first_bit, end_bit;
5432 int volatilep;
5433
5434 /* Start by getting the comparison codes. Fail if anything is volatile.
5435 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5436 it were surrounded with a NE_EXPR. */
5437
5438 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5439 return 0;
5440
5441 lcode = TREE_CODE (lhs);
5442 rcode = TREE_CODE (rhs);
5443
5444 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5445 {
5446 lhs = build2 (NE_EXPR, truth_type, lhs,
5447 build_int_cst (TREE_TYPE (lhs), 0));
5448 lcode = NE_EXPR;
5449 }
5450
5451 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5452 {
5453 rhs = build2 (NE_EXPR, truth_type, rhs,
5454 build_int_cst (TREE_TYPE (rhs), 0));
5455 rcode = NE_EXPR;
5456 }
5457
5458 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5459 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5460 return 0;
5461
5462 ll_arg = TREE_OPERAND (lhs, 0);
5463 lr_arg = TREE_OPERAND (lhs, 1);
5464 rl_arg = TREE_OPERAND (rhs, 0);
5465 rr_arg = TREE_OPERAND (rhs, 1);
5466
5467 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5468 if (simple_operand_p (ll_arg)
5469 && simple_operand_p (lr_arg))
5470 {
5471 if (operand_equal_p (ll_arg, rl_arg, 0)
5472 && operand_equal_p (lr_arg, rr_arg, 0))
5473 {
5474 result = combine_comparisons (loc, code, lcode, rcode,
5475 truth_type, ll_arg, lr_arg);
5476 if (result)
5477 return result;
5478 }
5479 else if (operand_equal_p (ll_arg, rr_arg, 0)
5480 && operand_equal_p (lr_arg, rl_arg, 0))
5481 {
5482 result = combine_comparisons (loc, code, lcode,
5483 swap_tree_comparison (rcode),
5484 truth_type, ll_arg, lr_arg);
5485 if (result)
5486 return result;
5487 }
5488 }
5489
5490 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5491 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5492
5493 /* If the RHS can be evaluated unconditionally and its operands are
5494 simple, it wins to evaluate the RHS unconditionally on machines
5495 with expensive branches. In this case, this isn't a comparison
5496 that can be merged. */
5497
5498 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5499 false) >= 2
5500 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5501 && simple_operand_p (rl_arg)
5502 && simple_operand_p (rr_arg))
5503 {
5504 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5505 if (code == TRUTH_OR_EXPR
5506 && lcode == NE_EXPR && integer_zerop (lr_arg)
5507 && rcode == NE_EXPR && integer_zerop (rr_arg)
5508 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5509 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5510 return build2_loc (loc, NE_EXPR, truth_type,
5511 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5512 ll_arg, rl_arg),
5513 build_int_cst (TREE_TYPE (ll_arg), 0));
5514
5515 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5516 if (code == TRUTH_AND_EXPR
5517 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5518 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5519 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5520 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5521 return build2_loc (loc, EQ_EXPR, truth_type,
5522 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5523 ll_arg, rl_arg),
5524 build_int_cst (TREE_TYPE (ll_arg), 0));
5525 }
5526
5527 /* See if the comparisons can be merged. Then get all the parameters for
5528 each side. */
5529
5530 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5531 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5532 return 0;
5533
5534 volatilep = 0;
5535 ll_inner = decode_field_reference (loc, ll_arg,
5536 &ll_bitsize, &ll_bitpos, &ll_mode,
5537 &ll_unsignedp, &volatilep, &ll_mask,
5538 &ll_and_mask);
5539 lr_inner = decode_field_reference (loc, lr_arg,
5540 &lr_bitsize, &lr_bitpos, &lr_mode,
5541 &lr_unsignedp, &volatilep, &lr_mask,
5542 &lr_and_mask);
5543 rl_inner = decode_field_reference (loc, rl_arg,
5544 &rl_bitsize, &rl_bitpos, &rl_mode,
5545 &rl_unsignedp, &volatilep, &rl_mask,
5546 &rl_and_mask);
5547 rr_inner = decode_field_reference (loc, rr_arg,
5548 &rr_bitsize, &rr_bitpos, &rr_mode,
5549 &rr_unsignedp, &volatilep, &rr_mask,
5550 &rr_and_mask);
5551
5552 /* It must be true that the inner operation on the lhs of each
5553 comparison must be the same if we are to be able to do anything.
5554 Then see if we have constants. If not, the same must be true for
5555 the rhs's. */
5556 if (volatilep || ll_inner == 0 || rl_inner == 0
5557 || ! operand_equal_p (ll_inner, rl_inner, 0))
5558 return 0;
5559
5560 if (TREE_CODE (lr_arg) == INTEGER_CST
5561 && TREE_CODE (rr_arg) == INTEGER_CST)
5562 l_const = lr_arg, r_const = rr_arg;
5563 else if (lr_inner == 0 || rr_inner == 0
5564 || ! operand_equal_p (lr_inner, rr_inner, 0))
5565 return 0;
5566 else
5567 l_const = r_const = 0;
5568
5569 /* If either comparison code is not correct for our logical operation,
5570 fail. However, we can convert a one-bit comparison against zero into
5571 the opposite comparison against that bit being set in the field. */
5572
5573 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5574 if (lcode != wanted_code)
5575 {
5576 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5577 {
5578 /* Make the left operand unsigned, since we are only interested
5579 in the value of one bit. Otherwise we are doing the wrong
5580 thing below. */
5581 ll_unsignedp = 1;
5582 l_const = ll_mask;
5583 }
5584 else
5585 return 0;
5586 }
5587
5588 /* This is analogous to the code for l_const above. */
5589 if (rcode != wanted_code)
5590 {
5591 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5592 {
5593 rl_unsignedp = 1;
5594 r_const = rl_mask;
5595 }
5596 else
5597 return 0;
5598 }
5599
5600 /* See if we can find a mode that contains both fields being compared on
5601 the left. If we can't, fail. Otherwise, update all constants and masks
5602 to be relative to a field of that size. */
5603 first_bit = MIN (ll_bitpos, rl_bitpos);
5604 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5605 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5606 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5607 volatilep);
5608 if (lnmode == VOIDmode)
5609 return 0;
5610
5611 lnbitsize = GET_MODE_BITSIZE (lnmode);
5612 lnbitpos = first_bit & ~ (lnbitsize - 1);
5613 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5614 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5615
5616 if (BYTES_BIG_ENDIAN)
5617 {
5618 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5619 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5620 }
5621
5622 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5623 size_int (xll_bitpos));
5624 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5625 size_int (xrl_bitpos));
5626
5627 if (l_const)
5628 {
5629 l_const = fold_convert_loc (loc, lntype, l_const);
5630 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5631 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5632 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5633 fold_build1_loc (loc, BIT_NOT_EXPR,
5634 lntype, ll_mask))))
5635 {
5636 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5637
5638 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5639 }
5640 }
5641 if (r_const)
5642 {
5643 r_const = fold_convert_loc (loc, lntype, r_const);
5644 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5645 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5646 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5647 fold_build1_loc (loc, BIT_NOT_EXPR,
5648 lntype, rl_mask))))
5649 {
5650 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5651
5652 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5653 }
5654 }
5655
5656 /* If the right sides are not constant, do the same for it. Also,
5657 disallow this optimization if a size or signedness mismatch occurs
5658 between the left and right sides. */
5659 if (l_const == 0)
5660 {
5661 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5662 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5663 /* Make sure the two fields on the right
5664 correspond to the left without being swapped. */
5665 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5666 return 0;
5667
5668 first_bit = MIN (lr_bitpos, rr_bitpos);
5669 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5670 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5671 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5672 volatilep);
5673 if (rnmode == VOIDmode)
5674 return 0;
5675
5676 rnbitsize = GET_MODE_BITSIZE (rnmode);
5677 rnbitpos = first_bit & ~ (rnbitsize - 1);
5678 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5679 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5680
5681 if (BYTES_BIG_ENDIAN)
5682 {
5683 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5684 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5685 }
5686
5687 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5688 rntype, lr_mask),
5689 size_int (xlr_bitpos));
5690 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5691 rntype, rr_mask),
5692 size_int (xrr_bitpos));
5693
5694 /* Make a mask that corresponds to both fields being compared.
5695 Do this for both items being compared. If the operands are the
5696 same size and the bits being compared are in the same position
5697 then we can do this by masking both and comparing the masked
5698 results. */
5699 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5700 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5701 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5702 {
5703 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5704 ll_unsignedp || rl_unsignedp);
5705 if (! all_ones_mask_p (ll_mask, lnbitsize))
5706 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5707
5708 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5709 lr_unsignedp || rr_unsignedp);
5710 if (! all_ones_mask_p (lr_mask, rnbitsize))
5711 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5712
5713 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5714 }
5715
5716 /* There is still another way we can do something: If both pairs of
5717 fields being compared are adjacent, we may be able to make a wider
5718 field containing them both.
5719
5720 Note that we still must mask the lhs/rhs expressions. Furthermore,
5721 the mask must be shifted to account for the shift done by
5722 make_bit_field_ref. */
5723 if ((ll_bitsize + ll_bitpos == rl_bitpos
5724 && lr_bitsize + lr_bitpos == rr_bitpos)
5725 || (ll_bitpos == rl_bitpos + rl_bitsize
5726 && lr_bitpos == rr_bitpos + rr_bitsize))
5727 {
5728 tree type;
5729
5730 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5731 ll_bitsize + rl_bitsize,
5732 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5733 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5734 lr_bitsize + rr_bitsize,
5735 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5736
5737 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5738 size_int (MIN (xll_bitpos, xrl_bitpos)));
5739 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5740 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5741
5742 /* Convert to the smaller type before masking out unwanted bits. */
5743 type = lntype;
5744 if (lntype != rntype)
5745 {
5746 if (lnbitsize > rnbitsize)
5747 {
5748 lhs = fold_convert_loc (loc, rntype, lhs);
5749 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5750 type = rntype;
5751 }
5752 else if (lnbitsize < rnbitsize)
5753 {
5754 rhs = fold_convert_loc (loc, lntype, rhs);
5755 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5756 type = lntype;
5757 }
5758 }
5759
5760 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5761 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5762
5763 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5764 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5765
5766 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5767 }
5768
5769 return 0;
5770 }
5771
5772 /* Handle the case of comparisons with constants. If there is something in
5773 common between the masks, those bits of the constants must be the same.
5774 If not, the condition is always false. Test for this to avoid generating
5775 incorrect code below. */
5776 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5777 if (! integer_zerop (result)
5778 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5779 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5780 {
5781 if (wanted_code == NE_EXPR)
5782 {
5783 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5784 return constant_boolean_node (true, truth_type);
5785 }
5786 else
5787 {
5788 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5789 return constant_boolean_node (false, truth_type);
5790 }
5791 }
5792
5793 /* Construct the expression we will return. First get the component
5794 reference we will make. Unless the mask is all ones the width of
5795 that field, perform the mask operation. Then compare with the
5796 merged constant. */
5797 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5798 ll_unsignedp || rl_unsignedp);
5799
5800 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5801 if (! all_ones_mask_p (ll_mask, lnbitsize))
5802 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5803
5804 return build2_loc (loc, wanted_code, truth_type, result,
5805 const_binop (BIT_IOR_EXPR, l_const, r_const));
5806 }
5807 \f
5808 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5809 constant. */
5810
5811 static tree
5812 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5813 tree op0, tree op1)
5814 {
5815 tree arg0 = op0;
5816 enum tree_code op_code;
5817 tree comp_const;
5818 tree minmax_const;
5819 int consts_equal, consts_lt;
5820 tree inner;
5821
5822 STRIP_SIGN_NOPS (arg0);
5823
5824 op_code = TREE_CODE (arg0);
5825 minmax_const = TREE_OPERAND (arg0, 1);
5826 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5827 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5828 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5829 inner = TREE_OPERAND (arg0, 0);
5830
5831 /* If something does not permit us to optimize, return the original tree. */
5832 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5833 || TREE_CODE (comp_const) != INTEGER_CST
5834 || TREE_OVERFLOW (comp_const)
5835 || TREE_CODE (minmax_const) != INTEGER_CST
5836 || TREE_OVERFLOW (minmax_const))
5837 return NULL_TREE;
5838
5839 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5840 and GT_EXPR, doing the rest with recursive calls using logical
5841 simplifications. */
5842 switch (code)
5843 {
5844 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5845 {
5846 tree tem
5847 = optimize_minmax_comparison (loc,
5848 invert_tree_comparison (code, false),
5849 type, op0, op1);
5850 if (tem)
5851 return invert_truthvalue_loc (loc, tem);
5852 return NULL_TREE;
5853 }
5854
5855 case GE_EXPR:
5856 return
5857 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5858 optimize_minmax_comparison
5859 (loc, EQ_EXPR, type, arg0, comp_const),
5860 optimize_minmax_comparison
5861 (loc, GT_EXPR, type, arg0, comp_const));
5862
5863 case EQ_EXPR:
5864 if (op_code == MAX_EXPR && consts_equal)
5865 /* MAX (X, 0) == 0 -> X <= 0 */
5866 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5867
5868 else if (op_code == MAX_EXPR && consts_lt)
5869 /* MAX (X, 0) == 5 -> X == 5 */
5870 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5871
5872 else if (op_code == MAX_EXPR)
5873 /* MAX (X, 0) == -1 -> false */
5874 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5875
5876 else if (consts_equal)
5877 /* MIN (X, 0) == 0 -> X >= 0 */
5878 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5879
5880 else if (consts_lt)
5881 /* MIN (X, 0) == 5 -> false */
5882 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5883
5884 else
5885 /* MIN (X, 0) == -1 -> X == -1 */
5886 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5887
5888 case GT_EXPR:
5889 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5890 /* MAX (X, 0) > 0 -> X > 0
5891 MAX (X, 0) > 5 -> X > 5 */
5892 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5893
5894 else if (op_code == MAX_EXPR)
5895 /* MAX (X, 0) > -1 -> true */
5896 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5897
5898 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5899 /* MIN (X, 0) > 0 -> false
5900 MIN (X, 0) > 5 -> false */
5901 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5902
5903 else
5904 /* MIN (X, 0) > -1 -> X > -1 */
5905 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5906
5907 default:
5908 return NULL_TREE;
5909 }
5910 }
5911 \f
5912 /* T is an integer expression that is being multiplied, divided, or taken a
5913 modulus (CODE says which and what kind of divide or modulus) by a
5914 constant C. See if we can eliminate that operation by folding it with
5915 other operations already in T. WIDE_TYPE, if non-null, is a type that
5916 should be used for the computation if wider than our type.
5917
5918 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5919 (X * 2) + (Y * 4). We must, however, be assured that either the original
5920 expression would not overflow or that overflow is undefined for the type
5921 in the language in question.
5922
5923 If we return a non-null expression, it is an equivalent form of the
5924 original computation, but need not be in the original type.
5925
5926 We set *STRICT_OVERFLOW_P to true if the return values depends on
5927 signed overflow being undefined. Otherwise we do not change
5928 *STRICT_OVERFLOW_P. */
5929
5930 static tree
5931 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5932 bool *strict_overflow_p)
5933 {
5934 /* To avoid exponential search depth, refuse to allow recursion past
5935 three levels. Beyond that (1) it's highly unlikely that we'll find
5936 something interesting and (2) we've probably processed it before
5937 when we built the inner expression. */
5938
5939 static int depth;
5940 tree ret;
5941
5942 if (depth > 3)
5943 return NULL;
5944
5945 depth++;
5946 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5947 depth--;
5948
5949 return ret;
5950 }
5951
5952 static tree
5953 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5954 bool *strict_overflow_p)
5955 {
5956 tree type = TREE_TYPE (t);
5957 enum tree_code tcode = TREE_CODE (t);
5958 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5959 > GET_MODE_SIZE (TYPE_MODE (type)))
5960 ? wide_type : type);
5961 tree t1, t2;
5962 int same_p = tcode == code;
5963 tree op0 = NULL_TREE, op1 = NULL_TREE;
5964 bool sub_strict_overflow_p;
5965
5966 /* Don't deal with constants of zero here; they confuse the code below. */
5967 if (integer_zerop (c))
5968 return NULL_TREE;
5969
5970 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5971 op0 = TREE_OPERAND (t, 0);
5972
5973 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5974 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5975
5976 /* Note that we need not handle conditional operations here since fold
5977 already handles those cases. So just do arithmetic here. */
5978 switch (tcode)
5979 {
5980 case INTEGER_CST:
5981 /* For a constant, we can always simplify if we are a multiply
5982 or (for divide and modulus) if it is a multiple of our constant. */
5983 if (code == MULT_EXPR
5984 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5985 return const_binop (code, fold_convert (ctype, t),
5986 fold_convert (ctype, c));
5987 break;
5988
5989 CASE_CONVERT: case NON_LVALUE_EXPR:
5990 /* If op0 is an expression ... */
5991 if ((COMPARISON_CLASS_P (op0)
5992 || UNARY_CLASS_P (op0)
5993 || BINARY_CLASS_P (op0)
5994 || VL_EXP_CLASS_P (op0)
5995 || EXPRESSION_CLASS_P (op0))
5996 /* ... and has wrapping overflow, and its type is smaller
5997 than ctype, then we cannot pass through as widening. */
5998 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5999 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6000 && (TYPE_PRECISION (ctype)
6001 > TYPE_PRECISION (TREE_TYPE (op0))))
6002 /* ... or this is a truncation (t is narrower than op0),
6003 then we cannot pass through this narrowing. */
6004 || (TYPE_PRECISION (type)
6005 < TYPE_PRECISION (TREE_TYPE (op0)))
6006 /* ... or signedness changes for division or modulus,
6007 then we cannot pass through this conversion. */
6008 || (code != MULT_EXPR
6009 && (TYPE_UNSIGNED (ctype)
6010 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6011 /* ... or has undefined overflow while the converted to
6012 type has not, we cannot do the operation in the inner type
6013 as that would introduce undefined overflow. */
6014 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6015 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6016 && !TYPE_OVERFLOW_UNDEFINED (type))))
6017 break;
6018
6019 /* Pass the constant down and see if we can make a simplification. If
6020 we can, replace this expression with the inner simplification for
6021 possible later conversion to our or some other type. */
6022 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6023 && TREE_CODE (t2) == INTEGER_CST
6024 && !TREE_OVERFLOW (t2)
6025 && (0 != (t1 = extract_muldiv (op0, t2, code,
6026 code == MULT_EXPR
6027 ? ctype : NULL_TREE,
6028 strict_overflow_p))))
6029 return t1;
6030 break;
6031
6032 case ABS_EXPR:
6033 /* If widening the type changes it from signed to unsigned, then we
6034 must avoid building ABS_EXPR itself as unsigned. */
6035 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6036 {
6037 tree cstype = (*signed_type_for) (ctype);
6038 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6039 != 0)
6040 {
6041 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6042 return fold_convert (ctype, t1);
6043 }
6044 break;
6045 }
6046 /* If the constant is negative, we cannot simplify this. */
6047 if (tree_int_cst_sgn (c) == -1)
6048 break;
6049 /* FALLTHROUGH */
6050 case NEGATE_EXPR:
6051 /* For division and modulus, type can't be unsigned, as e.g.
6052 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6053 For signed types, even with wrapping overflow, this is fine. */
6054 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6055 break;
6056 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6057 != 0)
6058 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6059 break;
6060
6061 case MIN_EXPR: case MAX_EXPR:
6062 /* If widening the type changes the signedness, then we can't perform
6063 this optimization as that changes the result. */
6064 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6065 break;
6066
6067 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6068 sub_strict_overflow_p = false;
6069 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6070 &sub_strict_overflow_p)) != 0
6071 && (t2 = extract_muldiv (op1, c, code, wide_type,
6072 &sub_strict_overflow_p)) != 0)
6073 {
6074 if (tree_int_cst_sgn (c) < 0)
6075 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6076 if (sub_strict_overflow_p)
6077 *strict_overflow_p = true;
6078 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6079 fold_convert (ctype, t2));
6080 }
6081 break;
6082
6083 case LSHIFT_EXPR: case RSHIFT_EXPR:
6084 /* If the second operand is constant, this is a multiplication
6085 or floor division, by a power of two, so we can treat it that
6086 way unless the multiplier or divisor overflows. Signed
6087 left-shift overflow is implementation-defined rather than
6088 undefined in C90, so do not convert signed left shift into
6089 multiplication. */
6090 if (TREE_CODE (op1) == INTEGER_CST
6091 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6092 /* const_binop may not detect overflow correctly,
6093 so check for it explicitly here. */
6094 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6095 && 0 != (t1 = fold_convert (ctype,
6096 const_binop (LSHIFT_EXPR,
6097 size_one_node,
6098 op1)))
6099 && !TREE_OVERFLOW (t1))
6100 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6101 ? MULT_EXPR : FLOOR_DIV_EXPR,
6102 ctype,
6103 fold_convert (ctype, op0),
6104 t1),
6105 c, code, wide_type, strict_overflow_p);
6106 break;
6107
6108 case PLUS_EXPR: case MINUS_EXPR:
6109 /* See if we can eliminate the operation on both sides. If we can, we
6110 can return a new PLUS or MINUS. If we can't, the only remaining
6111 cases where we can do anything are if the second operand is a
6112 constant. */
6113 sub_strict_overflow_p = false;
6114 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6115 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6116 if (t1 != 0 && t2 != 0
6117 && (code == MULT_EXPR
6118 /* If not multiplication, we can only do this if both operands
6119 are divisible by c. */
6120 || (multiple_of_p (ctype, op0, c)
6121 && multiple_of_p (ctype, op1, c))))
6122 {
6123 if (sub_strict_overflow_p)
6124 *strict_overflow_p = true;
6125 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6126 fold_convert (ctype, t2));
6127 }
6128
6129 /* If this was a subtraction, negate OP1 and set it to be an addition.
6130 This simplifies the logic below. */
6131 if (tcode == MINUS_EXPR)
6132 {
6133 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6134 /* If OP1 was not easily negatable, the constant may be OP0. */
6135 if (TREE_CODE (op0) == INTEGER_CST)
6136 {
6137 std::swap (op0, op1);
6138 std::swap (t1, t2);
6139 }
6140 }
6141
6142 if (TREE_CODE (op1) != INTEGER_CST)
6143 break;
6144
6145 /* If either OP1 or C are negative, this optimization is not safe for
6146 some of the division and remainder types while for others we need
6147 to change the code. */
6148 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6149 {
6150 if (code == CEIL_DIV_EXPR)
6151 code = FLOOR_DIV_EXPR;
6152 else if (code == FLOOR_DIV_EXPR)
6153 code = CEIL_DIV_EXPR;
6154 else if (code != MULT_EXPR
6155 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6156 break;
6157 }
6158
6159 /* If it's a multiply or a division/modulus operation of a multiple
6160 of our constant, do the operation and verify it doesn't overflow. */
6161 if (code == MULT_EXPR
6162 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6163 {
6164 op1 = const_binop (code, fold_convert (ctype, op1),
6165 fold_convert (ctype, c));
6166 /* We allow the constant to overflow with wrapping semantics. */
6167 if (op1 == 0
6168 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6169 break;
6170 }
6171 else
6172 break;
6173
6174 /* If we have an unsigned type, we cannot widen the operation since it
6175 will change the result if the original computation overflowed. */
6176 if (TYPE_UNSIGNED (ctype) && ctype != type)
6177 break;
6178
6179 /* If we were able to eliminate our operation from the first side,
6180 apply our operation to the second side and reform the PLUS. */
6181 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6182 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6183
6184 /* The last case is if we are a multiply. In that case, we can
6185 apply the distributive law to commute the multiply and addition
6186 if the multiplication of the constants doesn't overflow
6187 and overflow is defined. With undefined overflow
6188 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6189 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6190 return fold_build2 (tcode, ctype,
6191 fold_build2 (code, ctype,
6192 fold_convert (ctype, op0),
6193 fold_convert (ctype, c)),
6194 op1);
6195
6196 break;
6197
6198 case MULT_EXPR:
6199 /* We have a special case here if we are doing something like
6200 (C * 8) % 4 since we know that's zero. */
6201 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6202 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6203 /* If the multiplication can overflow we cannot optimize this. */
6204 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6205 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6206 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6207 {
6208 *strict_overflow_p = true;
6209 return omit_one_operand (type, integer_zero_node, op0);
6210 }
6211
6212 /* ... fall through ... */
6213
6214 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6215 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6216 /* If we can extract our operation from the LHS, do so and return a
6217 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6218 do something only if the second operand is a constant. */
6219 if (same_p
6220 && (t1 = extract_muldiv (op0, c, code, wide_type,
6221 strict_overflow_p)) != 0)
6222 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6223 fold_convert (ctype, op1));
6224 else if (tcode == MULT_EXPR && code == MULT_EXPR
6225 && (t1 = extract_muldiv (op1, c, code, wide_type,
6226 strict_overflow_p)) != 0)
6227 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6228 fold_convert (ctype, t1));
6229 else if (TREE_CODE (op1) != INTEGER_CST)
6230 return 0;
6231
6232 /* If these are the same operation types, we can associate them
6233 assuming no overflow. */
6234 if (tcode == code)
6235 {
6236 bool overflow_p = false;
6237 bool overflow_mul_p;
6238 signop sign = TYPE_SIGN (ctype);
6239 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6240 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6241 if (overflow_mul_p
6242 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6243 overflow_p = true;
6244 if (!overflow_p)
6245 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6246 wide_int_to_tree (ctype, mul));
6247 }
6248
6249 /* If these operations "cancel" each other, we have the main
6250 optimizations of this pass, which occur when either constant is a
6251 multiple of the other, in which case we replace this with either an
6252 operation or CODE or TCODE.
6253
6254 If we have an unsigned type, we cannot do this since it will change
6255 the result if the original computation overflowed. */
6256 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6257 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6258 || (tcode == MULT_EXPR
6259 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6260 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6261 && code != MULT_EXPR)))
6262 {
6263 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6264 {
6265 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6266 *strict_overflow_p = true;
6267 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6268 fold_convert (ctype,
6269 const_binop (TRUNC_DIV_EXPR,
6270 op1, c)));
6271 }
6272 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6273 {
6274 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6275 *strict_overflow_p = true;
6276 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6277 fold_convert (ctype,
6278 const_binop (TRUNC_DIV_EXPR,
6279 c, op1)));
6280 }
6281 }
6282 break;
6283
6284 default:
6285 break;
6286 }
6287
6288 return 0;
6289 }
6290 \f
6291 /* Return a node which has the indicated constant VALUE (either 0 or
6292 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6293 and is of the indicated TYPE. */
6294
6295 tree
6296 constant_boolean_node (bool value, tree type)
6297 {
6298 if (type == integer_type_node)
6299 return value ? integer_one_node : integer_zero_node;
6300 else if (type == boolean_type_node)
6301 return value ? boolean_true_node : boolean_false_node;
6302 else if (TREE_CODE (type) == VECTOR_TYPE)
6303 return build_vector_from_val (type,
6304 build_int_cst (TREE_TYPE (type),
6305 value ? -1 : 0));
6306 else
6307 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6308 }
6309
6310
6311 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6312 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6313 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6314 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6315 COND is the first argument to CODE; otherwise (as in the example
6316 given here), it is the second argument. TYPE is the type of the
6317 original expression. Return NULL_TREE if no simplification is
6318 possible. */
6319
6320 static tree
6321 fold_binary_op_with_conditional_arg (location_t loc,
6322 enum tree_code code,
6323 tree type, tree op0, tree op1,
6324 tree cond, tree arg, int cond_first_p)
6325 {
6326 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6327 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6328 tree test, true_value, false_value;
6329 tree lhs = NULL_TREE;
6330 tree rhs = NULL_TREE;
6331 enum tree_code cond_code = COND_EXPR;
6332
6333 if (TREE_CODE (cond) == COND_EXPR
6334 || TREE_CODE (cond) == VEC_COND_EXPR)
6335 {
6336 test = TREE_OPERAND (cond, 0);
6337 true_value = TREE_OPERAND (cond, 1);
6338 false_value = TREE_OPERAND (cond, 2);
6339 /* If this operand throws an expression, then it does not make
6340 sense to try to perform a logical or arithmetic operation
6341 involving it. */
6342 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6343 lhs = true_value;
6344 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6345 rhs = false_value;
6346 }
6347 else
6348 {
6349 tree testtype = TREE_TYPE (cond);
6350 test = cond;
6351 true_value = constant_boolean_node (true, testtype);
6352 false_value = constant_boolean_node (false, testtype);
6353 }
6354
6355 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6356 cond_code = VEC_COND_EXPR;
6357
6358 /* This transformation is only worthwhile if we don't have to wrap ARG
6359 in a SAVE_EXPR and the operation can be simplified without recursing
6360 on at least one of the branches once its pushed inside the COND_EXPR. */
6361 if (!TREE_CONSTANT (arg)
6362 && (TREE_SIDE_EFFECTS (arg)
6363 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6364 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6365 return NULL_TREE;
6366
6367 arg = fold_convert_loc (loc, arg_type, arg);
6368 if (lhs == 0)
6369 {
6370 true_value = fold_convert_loc (loc, cond_type, true_value);
6371 if (cond_first_p)
6372 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6373 else
6374 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6375 }
6376 if (rhs == 0)
6377 {
6378 false_value = fold_convert_loc (loc, cond_type, false_value);
6379 if (cond_first_p)
6380 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6381 else
6382 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6383 }
6384
6385 /* Check that we have simplified at least one of the branches. */
6386 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6387 return NULL_TREE;
6388
6389 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6390 }
6391
6392 \f
6393 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6394
6395 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6396 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6397 ADDEND is the same as X.
6398
6399 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6400 and finite. The problematic cases are when X is zero, and its mode
6401 has signed zeros. In the case of rounding towards -infinity,
6402 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6403 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6404
6405 bool
6406 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6407 {
6408 if (!real_zerop (addend))
6409 return false;
6410
6411 /* Don't allow the fold with -fsignaling-nans. */
6412 if (HONOR_SNANS (element_mode (type)))
6413 return false;
6414
6415 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6416 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6417 return true;
6418
6419 /* In a vector or complex, we would need to check the sign of all zeros. */
6420 if (TREE_CODE (addend) != REAL_CST)
6421 return false;
6422
6423 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6424 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6425 negate = !negate;
6426
6427 /* The mode has signed zeros, and we have to honor their sign.
6428 In this situation, there is only one case we can return true for.
6429 X - 0 is the same as X unless rounding towards -infinity is
6430 supported. */
6431 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6432 }
6433
6434 /* Subroutine of fold() that checks comparisons of built-in math
6435 functions against real constants.
6436
6437 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6438 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6439 is the type of the result and ARG0 and ARG1 are the operands of the
6440 comparison. ARG1 must be a TREE_REAL_CST.
6441
6442 The function returns the constant folded tree if a simplification
6443 can be made, and NULL_TREE otherwise. */
6444
6445 static tree
6446 fold_mathfn_compare (location_t loc,
6447 enum built_in_function fcode, enum tree_code code,
6448 tree type, tree arg0, tree arg1)
6449 {
6450 REAL_VALUE_TYPE c;
6451
6452 if (BUILTIN_SQRT_P (fcode))
6453 {
6454 tree arg = CALL_EXPR_ARG (arg0, 0);
6455 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6456
6457 c = TREE_REAL_CST (arg1);
6458 if (REAL_VALUE_NEGATIVE (c))
6459 {
6460 /* sqrt(x) < y is always false, if y is negative. */
6461 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6462 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6463
6464 /* sqrt(x) > y is always true, if y is negative and we
6465 don't care about NaNs, i.e. negative values of x. */
6466 if (code == NE_EXPR || !HONOR_NANS (mode))
6467 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6468
6469 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6470 return fold_build2_loc (loc, GE_EXPR, type, arg,
6471 build_real (TREE_TYPE (arg), dconst0));
6472 }
6473 else if (code == GT_EXPR || code == GE_EXPR)
6474 {
6475 REAL_VALUE_TYPE c2;
6476
6477 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6478 real_convert (&c2, mode, &c2);
6479
6480 if (REAL_VALUE_ISINF (c2))
6481 {
6482 /* sqrt(x) > y is x == +Inf, when y is very large. */
6483 if (HONOR_INFINITIES (mode))
6484 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6485 build_real (TREE_TYPE (arg), c2));
6486
6487 /* sqrt(x) > y is always false, when y is very large
6488 and we don't care about infinities. */
6489 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6490 }
6491
6492 /* sqrt(x) > c is the same as x > c*c. */
6493 return fold_build2_loc (loc, code, type, arg,
6494 build_real (TREE_TYPE (arg), c2));
6495 }
6496 else if (code == LT_EXPR || code == LE_EXPR)
6497 {
6498 REAL_VALUE_TYPE c2;
6499
6500 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6501 real_convert (&c2, mode, &c2);
6502
6503 if (REAL_VALUE_ISINF (c2))
6504 {
6505 /* sqrt(x) < y is always true, when y is a very large
6506 value and we don't care about NaNs or Infinities. */
6507 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6508 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6509
6510 /* sqrt(x) < y is x != +Inf when y is very large and we
6511 don't care about NaNs. */
6512 if (! HONOR_NANS (mode))
6513 return fold_build2_loc (loc, NE_EXPR, type, arg,
6514 build_real (TREE_TYPE (arg), c2));
6515
6516 /* sqrt(x) < y is x >= 0 when y is very large and we
6517 don't care about Infinities. */
6518 if (! HONOR_INFINITIES (mode))
6519 return fold_build2_loc (loc, GE_EXPR, type, arg,
6520 build_real (TREE_TYPE (arg), dconst0));
6521
6522 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6523 arg = save_expr (arg);
6524 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6525 fold_build2_loc (loc, GE_EXPR, type, arg,
6526 build_real (TREE_TYPE (arg),
6527 dconst0)),
6528 fold_build2_loc (loc, NE_EXPR, type, arg,
6529 build_real (TREE_TYPE (arg),
6530 c2)));
6531 }
6532
6533 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6534 if (! HONOR_NANS (mode))
6535 return fold_build2_loc (loc, code, type, arg,
6536 build_real (TREE_TYPE (arg), c2));
6537
6538 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6539 arg = save_expr (arg);
6540 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6541 fold_build2_loc (loc, GE_EXPR, type, arg,
6542 build_real (TREE_TYPE (arg),
6543 dconst0)),
6544 fold_build2_loc (loc, code, type, arg,
6545 build_real (TREE_TYPE (arg),
6546 c2)));
6547 }
6548 }
6549
6550 return NULL_TREE;
6551 }
6552
6553 /* Subroutine of fold() that optimizes comparisons against Infinities,
6554 either +Inf or -Inf.
6555
6556 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6557 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6558 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6559
6560 The function returns the constant folded tree if a simplification
6561 can be made, and NULL_TREE otherwise. */
6562
6563 static tree
6564 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6565 tree arg0, tree arg1)
6566 {
6567 machine_mode mode;
6568 REAL_VALUE_TYPE max;
6569 tree temp;
6570 bool neg;
6571
6572 mode = TYPE_MODE (TREE_TYPE (arg0));
6573
6574 /* For negative infinity swap the sense of the comparison. */
6575 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6576 if (neg)
6577 code = swap_tree_comparison (code);
6578
6579 switch (code)
6580 {
6581 case GT_EXPR:
6582 /* x > +Inf is always false, if with ignore sNANs. */
6583 if (HONOR_SNANS (mode))
6584 return NULL_TREE;
6585 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6586
6587 case LE_EXPR:
6588 /* x <= +Inf is always true, if we don't case about NaNs. */
6589 if (! HONOR_NANS (mode))
6590 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6591
6592 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6593 arg0 = save_expr (arg0);
6594 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6595
6596 case EQ_EXPR:
6597 case GE_EXPR:
6598 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6599 real_maxval (&max, neg, mode);
6600 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6601 arg0, build_real (TREE_TYPE (arg0), max));
6602
6603 case LT_EXPR:
6604 /* x < +Inf is always equal to x <= DBL_MAX. */
6605 real_maxval (&max, neg, mode);
6606 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6607 arg0, build_real (TREE_TYPE (arg0), max));
6608
6609 case NE_EXPR:
6610 /* x != +Inf is always equal to !(x > DBL_MAX). */
6611 real_maxval (&max, neg, mode);
6612 if (! HONOR_NANS (mode))
6613 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6614 arg0, build_real (TREE_TYPE (arg0), max));
6615
6616 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6617 arg0, build_real (TREE_TYPE (arg0), max));
6618 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6619
6620 default:
6621 break;
6622 }
6623
6624 return NULL_TREE;
6625 }
6626
6627 /* Subroutine of fold() that optimizes comparisons of a division by
6628 a nonzero integer constant against an integer constant, i.e.
6629 X/C1 op C2.
6630
6631 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6632 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6633 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6634
6635 The function returns the constant folded tree if a simplification
6636 can be made, and NULL_TREE otherwise. */
6637
6638 static tree
6639 fold_div_compare (location_t loc,
6640 enum tree_code code, tree type, tree arg0, tree arg1)
6641 {
6642 tree prod, tmp, hi, lo;
6643 tree arg00 = TREE_OPERAND (arg0, 0);
6644 tree arg01 = TREE_OPERAND (arg0, 1);
6645 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6646 bool neg_overflow = false;
6647 bool overflow;
6648
6649 /* We have to do this the hard way to detect unsigned overflow.
6650 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6651 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6652 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6653 neg_overflow = false;
6654
6655 if (sign == UNSIGNED)
6656 {
6657 tmp = int_const_binop (MINUS_EXPR, arg01,
6658 build_int_cst (TREE_TYPE (arg01), 1));
6659 lo = prod;
6660
6661 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6662 val = wi::add (prod, tmp, sign, &overflow);
6663 hi = force_fit_type (TREE_TYPE (arg00), val,
6664 -1, overflow | TREE_OVERFLOW (prod));
6665 }
6666 else if (tree_int_cst_sgn (arg01) >= 0)
6667 {
6668 tmp = int_const_binop (MINUS_EXPR, arg01,
6669 build_int_cst (TREE_TYPE (arg01), 1));
6670 switch (tree_int_cst_sgn (arg1))
6671 {
6672 case -1:
6673 neg_overflow = true;
6674 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6675 hi = prod;
6676 break;
6677
6678 case 0:
6679 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6680 hi = tmp;
6681 break;
6682
6683 case 1:
6684 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6685 lo = prod;
6686 break;
6687
6688 default:
6689 gcc_unreachable ();
6690 }
6691 }
6692 else
6693 {
6694 /* A negative divisor reverses the relational operators. */
6695 code = swap_tree_comparison (code);
6696
6697 tmp = int_const_binop (PLUS_EXPR, arg01,
6698 build_int_cst (TREE_TYPE (arg01), 1));
6699 switch (tree_int_cst_sgn (arg1))
6700 {
6701 case -1:
6702 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6703 lo = prod;
6704 break;
6705
6706 case 0:
6707 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6708 lo = tmp;
6709 break;
6710
6711 case 1:
6712 neg_overflow = true;
6713 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6714 hi = prod;
6715 break;
6716
6717 default:
6718 gcc_unreachable ();
6719 }
6720 }
6721
6722 switch (code)
6723 {
6724 case EQ_EXPR:
6725 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6726 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6727 if (TREE_OVERFLOW (hi))
6728 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6729 if (TREE_OVERFLOW (lo))
6730 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6731 return build_range_check (loc, type, arg00, 1, lo, hi);
6732
6733 case NE_EXPR:
6734 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6735 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6736 if (TREE_OVERFLOW (hi))
6737 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6738 if (TREE_OVERFLOW (lo))
6739 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6740 return build_range_check (loc, type, arg00, 0, lo, hi);
6741
6742 case LT_EXPR:
6743 if (TREE_OVERFLOW (lo))
6744 {
6745 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6746 return omit_one_operand_loc (loc, type, tmp, arg00);
6747 }
6748 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6749
6750 case LE_EXPR:
6751 if (TREE_OVERFLOW (hi))
6752 {
6753 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6754 return omit_one_operand_loc (loc, type, tmp, arg00);
6755 }
6756 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6757
6758 case GT_EXPR:
6759 if (TREE_OVERFLOW (hi))
6760 {
6761 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6762 return omit_one_operand_loc (loc, type, tmp, arg00);
6763 }
6764 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6765
6766 case GE_EXPR:
6767 if (TREE_OVERFLOW (lo))
6768 {
6769 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6770 return omit_one_operand_loc (loc, type, tmp, arg00);
6771 }
6772 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6773
6774 default:
6775 break;
6776 }
6777
6778 return NULL_TREE;
6779 }
6780
6781
6782 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6783 equality/inequality test, then return a simplified form of the test
6784 using a sign testing. Otherwise return NULL. TYPE is the desired
6785 result type. */
6786
6787 static tree
6788 fold_single_bit_test_into_sign_test (location_t loc,
6789 enum tree_code code, tree arg0, tree arg1,
6790 tree result_type)
6791 {
6792 /* If this is testing a single bit, we can optimize the test. */
6793 if ((code == NE_EXPR || code == EQ_EXPR)
6794 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6795 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6796 {
6797 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6798 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6799 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6800
6801 if (arg00 != NULL_TREE
6802 /* This is only a win if casting to a signed type is cheap,
6803 i.e. when arg00's type is not a partial mode. */
6804 && TYPE_PRECISION (TREE_TYPE (arg00))
6805 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6806 {
6807 tree stype = signed_type_for (TREE_TYPE (arg00));
6808 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6809 result_type,
6810 fold_convert_loc (loc, stype, arg00),
6811 build_int_cst (stype, 0));
6812 }
6813 }
6814
6815 return NULL_TREE;
6816 }
6817
6818 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6819 equality/inequality test, then return a simplified form of
6820 the test using shifts and logical operations. Otherwise return
6821 NULL. TYPE is the desired result type. */
6822
6823 tree
6824 fold_single_bit_test (location_t loc, enum tree_code code,
6825 tree arg0, tree arg1, tree result_type)
6826 {
6827 /* If this is testing a single bit, we can optimize the test. */
6828 if ((code == NE_EXPR || code == EQ_EXPR)
6829 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6830 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6831 {
6832 tree inner = TREE_OPERAND (arg0, 0);
6833 tree type = TREE_TYPE (arg0);
6834 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6835 machine_mode operand_mode = TYPE_MODE (type);
6836 int ops_unsigned;
6837 tree signed_type, unsigned_type, intermediate_type;
6838 tree tem, one;
6839
6840 /* First, see if we can fold the single bit test into a sign-bit
6841 test. */
6842 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6843 result_type);
6844 if (tem)
6845 return tem;
6846
6847 /* Otherwise we have (A & C) != 0 where C is a single bit,
6848 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6849 Similarly for (A & C) == 0. */
6850
6851 /* If INNER is a right shift of a constant and it plus BITNUM does
6852 not overflow, adjust BITNUM and INNER. */
6853 if (TREE_CODE (inner) == RSHIFT_EXPR
6854 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6855 && bitnum < TYPE_PRECISION (type)
6856 && wi::ltu_p (TREE_OPERAND (inner, 1),
6857 TYPE_PRECISION (type) - bitnum))
6858 {
6859 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6860 inner = TREE_OPERAND (inner, 0);
6861 }
6862
6863 /* If we are going to be able to omit the AND below, we must do our
6864 operations as unsigned. If we must use the AND, we have a choice.
6865 Normally unsigned is faster, but for some machines signed is. */
6866 #ifdef LOAD_EXTEND_OP
6867 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6868 && !flag_syntax_only) ? 0 : 1;
6869 #else
6870 ops_unsigned = 1;
6871 #endif
6872
6873 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6874 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6875 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6876 inner = fold_convert_loc (loc, intermediate_type, inner);
6877
6878 if (bitnum != 0)
6879 inner = build2 (RSHIFT_EXPR, intermediate_type,
6880 inner, size_int (bitnum));
6881
6882 one = build_int_cst (intermediate_type, 1);
6883
6884 if (code == EQ_EXPR)
6885 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6886
6887 /* Put the AND last so it can combine with more things. */
6888 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6889
6890 /* Make sure to return the proper type. */
6891 inner = fold_convert_loc (loc, result_type, inner);
6892
6893 return inner;
6894 }
6895 return NULL_TREE;
6896 }
6897
6898 /* Check whether we are allowed to reorder operands arg0 and arg1,
6899 such that the evaluation of arg1 occurs before arg0. */
6900
6901 static bool
6902 reorder_operands_p (const_tree arg0, const_tree arg1)
6903 {
6904 if (! flag_evaluation_order)
6905 return true;
6906 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6907 return true;
6908 return ! TREE_SIDE_EFFECTS (arg0)
6909 && ! TREE_SIDE_EFFECTS (arg1);
6910 }
6911
6912 /* Test whether it is preferable two swap two operands, ARG0 and
6913 ARG1, for example because ARG0 is an integer constant and ARG1
6914 isn't. If REORDER is true, only recommend swapping if we can
6915 evaluate the operands in reverse order. */
6916
6917 bool
6918 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6919 {
6920 if (CONSTANT_CLASS_P (arg1))
6921 return 0;
6922 if (CONSTANT_CLASS_P (arg0))
6923 return 1;
6924
6925 STRIP_NOPS (arg0);
6926 STRIP_NOPS (arg1);
6927
6928 if (TREE_CONSTANT (arg1))
6929 return 0;
6930 if (TREE_CONSTANT (arg0))
6931 return 1;
6932
6933 if (reorder && flag_evaluation_order
6934 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6935 return 0;
6936
6937 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6938 for commutative and comparison operators. Ensuring a canonical
6939 form allows the optimizers to find additional redundancies without
6940 having to explicitly check for both orderings. */
6941 if (TREE_CODE (arg0) == SSA_NAME
6942 && TREE_CODE (arg1) == SSA_NAME
6943 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6944 return 1;
6945
6946 /* Put SSA_NAMEs last. */
6947 if (TREE_CODE (arg1) == SSA_NAME)
6948 return 0;
6949 if (TREE_CODE (arg0) == SSA_NAME)
6950 return 1;
6951
6952 /* Put variables last. */
6953 if (DECL_P (arg1))
6954 return 0;
6955 if (DECL_P (arg0))
6956 return 1;
6957
6958 return 0;
6959 }
6960
6961 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6962 ARG0 is extended to a wider type. */
6963
6964 static tree
6965 fold_widened_comparison (location_t loc, enum tree_code code,
6966 tree type, tree arg0, tree arg1)
6967 {
6968 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6969 tree arg1_unw;
6970 tree shorter_type, outer_type;
6971 tree min, max;
6972 bool above, below;
6973
6974 if (arg0_unw == arg0)
6975 return NULL_TREE;
6976 shorter_type = TREE_TYPE (arg0_unw);
6977
6978 #ifdef HAVE_canonicalize_funcptr_for_compare
6979 /* Disable this optimization if we're casting a function pointer
6980 type on targets that require function pointer canonicalization. */
6981 if (HAVE_canonicalize_funcptr_for_compare
6982 && TREE_CODE (shorter_type) == POINTER_TYPE
6983 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6984 return NULL_TREE;
6985 #endif
6986
6987 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6988 return NULL_TREE;
6989
6990 arg1_unw = get_unwidened (arg1, NULL_TREE);
6991
6992 /* If possible, express the comparison in the shorter mode. */
6993 if ((code == EQ_EXPR || code == NE_EXPR
6994 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6995 && (TREE_TYPE (arg1_unw) == shorter_type
6996 || ((TYPE_PRECISION (shorter_type)
6997 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6998 && (TYPE_UNSIGNED (shorter_type)
6999 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7000 || (TREE_CODE (arg1_unw) == INTEGER_CST
7001 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7002 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7003 && int_fits_type_p (arg1_unw, shorter_type))))
7004 return fold_build2_loc (loc, code, type, arg0_unw,
7005 fold_convert_loc (loc, shorter_type, arg1_unw));
7006
7007 if (TREE_CODE (arg1_unw) != INTEGER_CST
7008 || TREE_CODE (shorter_type) != INTEGER_TYPE
7009 || !int_fits_type_p (arg1_unw, shorter_type))
7010 return NULL_TREE;
7011
7012 /* If we are comparing with the integer that does not fit into the range
7013 of the shorter type, the result is known. */
7014 outer_type = TREE_TYPE (arg1_unw);
7015 min = lower_bound_in_type (outer_type, shorter_type);
7016 max = upper_bound_in_type (outer_type, shorter_type);
7017
7018 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7019 max, arg1_unw));
7020 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7021 arg1_unw, min));
7022
7023 switch (code)
7024 {
7025 case EQ_EXPR:
7026 if (above || below)
7027 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7028 break;
7029
7030 case NE_EXPR:
7031 if (above || below)
7032 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7033 break;
7034
7035 case LT_EXPR:
7036 case LE_EXPR:
7037 if (above)
7038 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7039 else if (below)
7040 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7041
7042 case GT_EXPR:
7043 case GE_EXPR:
7044 if (above)
7045 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7046 else if (below)
7047 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7048
7049 default:
7050 break;
7051 }
7052
7053 return NULL_TREE;
7054 }
7055
7056 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7057 ARG0 just the signedness is changed. */
7058
7059 static tree
7060 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7061 tree arg0, tree arg1)
7062 {
7063 tree arg0_inner;
7064 tree inner_type, outer_type;
7065
7066 if (!CONVERT_EXPR_P (arg0))
7067 return NULL_TREE;
7068
7069 outer_type = TREE_TYPE (arg0);
7070 arg0_inner = TREE_OPERAND (arg0, 0);
7071 inner_type = TREE_TYPE (arg0_inner);
7072
7073 #ifdef HAVE_canonicalize_funcptr_for_compare
7074 /* Disable this optimization if we're casting a function pointer
7075 type on targets that require function pointer canonicalization. */
7076 if (HAVE_canonicalize_funcptr_for_compare
7077 && TREE_CODE (inner_type) == POINTER_TYPE
7078 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7079 return NULL_TREE;
7080 #endif
7081
7082 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7083 return NULL_TREE;
7084
7085 if (TREE_CODE (arg1) != INTEGER_CST
7086 && !(CONVERT_EXPR_P (arg1)
7087 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7088 return NULL_TREE;
7089
7090 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7091 && code != NE_EXPR
7092 && code != EQ_EXPR)
7093 return NULL_TREE;
7094
7095 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7096 return NULL_TREE;
7097
7098 if (TREE_CODE (arg1) == INTEGER_CST)
7099 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7100 TREE_OVERFLOW (arg1));
7101 else
7102 arg1 = fold_convert_loc (loc, inner_type, arg1);
7103
7104 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7105 }
7106
7107
7108 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7109 means A >= Y && A != MAX, but in this case we know that
7110 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7111
7112 static tree
7113 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7114 {
7115 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7116
7117 if (TREE_CODE (bound) == LT_EXPR)
7118 a = TREE_OPERAND (bound, 0);
7119 else if (TREE_CODE (bound) == GT_EXPR)
7120 a = TREE_OPERAND (bound, 1);
7121 else
7122 return NULL_TREE;
7123
7124 typea = TREE_TYPE (a);
7125 if (!INTEGRAL_TYPE_P (typea)
7126 && !POINTER_TYPE_P (typea))
7127 return NULL_TREE;
7128
7129 if (TREE_CODE (ineq) == LT_EXPR)
7130 {
7131 a1 = TREE_OPERAND (ineq, 1);
7132 y = TREE_OPERAND (ineq, 0);
7133 }
7134 else if (TREE_CODE (ineq) == GT_EXPR)
7135 {
7136 a1 = TREE_OPERAND (ineq, 0);
7137 y = TREE_OPERAND (ineq, 1);
7138 }
7139 else
7140 return NULL_TREE;
7141
7142 if (TREE_TYPE (a1) != typea)
7143 return NULL_TREE;
7144
7145 if (POINTER_TYPE_P (typea))
7146 {
7147 /* Convert the pointer types into integer before taking the difference. */
7148 tree ta = fold_convert_loc (loc, ssizetype, a);
7149 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7150 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7151 }
7152 else
7153 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7154
7155 if (!diff || !integer_onep (diff))
7156 return NULL_TREE;
7157
7158 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7159 }
7160
7161 /* Fold a sum or difference of at least one multiplication.
7162 Returns the folded tree or NULL if no simplification could be made. */
7163
7164 static tree
7165 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7166 tree arg0, tree arg1)
7167 {
7168 tree arg00, arg01, arg10, arg11;
7169 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7170
7171 /* (A * C) +- (B * C) -> (A+-B) * C.
7172 (A * C) +- A -> A * (C+-1).
7173 We are most concerned about the case where C is a constant,
7174 but other combinations show up during loop reduction. Since
7175 it is not difficult, try all four possibilities. */
7176
7177 if (TREE_CODE (arg0) == MULT_EXPR)
7178 {
7179 arg00 = TREE_OPERAND (arg0, 0);
7180 arg01 = TREE_OPERAND (arg0, 1);
7181 }
7182 else if (TREE_CODE (arg0) == INTEGER_CST)
7183 {
7184 arg00 = build_one_cst (type);
7185 arg01 = arg0;
7186 }
7187 else
7188 {
7189 /* We cannot generate constant 1 for fract. */
7190 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7191 return NULL_TREE;
7192 arg00 = arg0;
7193 arg01 = build_one_cst (type);
7194 }
7195 if (TREE_CODE (arg1) == MULT_EXPR)
7196 {
7197 arg10 = TREE_OPERAND (arg1, 0);
7198 arg11 = TREE_OPERAND (arg1, 1);
7199 }
7200 else if (TREE_CODE (arg1) == INTEGER_CST)
7201 {
7202 arg10 = build_one_cst (type);
7203 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7204 the purpose of this canonicalization. */
7205 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7206 && negate_expr_p (arg1)
7207 && code == PLUS_EXPR)
7208 {
7209 arg11 = negate_expr (arg1);
7210 code = MINUS_EXPR;
7211 }
7212 else
7213 arg11 = arg1;
7214 }
7215 else
7216 {
7217 /* We cannot generate constant 1 for fract. */
7218 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7219 return NULL_TREE;
7220 arg10 = arg1;
7221 arg11 = build_one_cst (type);
7222 }
7223 same = NULL_TREE;
7224
7225 if (operand_equal_p (arg01, arg11, 0))
7226 same = arg01, alt0 = arg00, alt1 = arg10;
7227 else if (operand_equal_p (arg00, arg10, 0))
7228 same = arg00, alt0 = arg01, alt1 = arg11;
7229 else if (operand_equal_p (arg00, arg11, 0))
7230 same = arg00, alt0 = arg01, alt1 = arg10;
7231 else if (operand_equal_p (arg01, arg10, 0))
7232 same = arg01, alt0 = arg00, alt1 = arg11;
7233
7234 /* No identical multiplicands; see if we can find a common
7235 power-of-two factor in non-power-of-two multiplies. This
7236 can help in multi-dimensional array access. */
7237 else if (tree_fits_shwi_p (arg01)
7238 && tree_fits_shwi_p (arg11))
7239 {
7240 HOST_WIDE_INT int01, int11, tmp;
7241 bool swap = false;
7242 tree maybe_same;
7243 int01 = tree_to_shwi (arg01);
7244 int11 = tree_to_shwi (arg11);
7245
7246 /* Move min of absolute values to int11. */
7247 if (absu_hwi (int01) < absu_hwi (int11))
7248 {
7249 tmp = int01, int01 = int11, int11 = tmp;
7250 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7251 maybe_same = arg01;
7252 swap = true;
7253 }
7254 else
7255 maybe_same = arg11;
7256
7257 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7258 /* The remainder should not be a constant, otherwise we
7259 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7260 increased the number of multiplications necessary. */
7261 && TREE_CODE (arg10) != INTEGER_CST)
7262 {
7263 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7264 build_int_cst (TREE_TYPE (arg00),
7265 int01 / int11));
7266 alt1 = arg10;
7267 same = maybe_same;
7268 if (swap)
7269 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7270 }
7271 }
7272
7273 if (same)
7274 return fold_build2_loc (loc, MULT_EXPR, type,
7275 fold_build2_loc (loc, code, type,
7276 fold_convert_loc (loc, type, alt0),
7277 fold_convert_loc (loc, type, alt1)),
7278 fold_convert_loc (loc, type, same));
7279
7280 return NULL_TREE;
7281 }
7282
7283 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7284 specified by EXPR into the buffer PTR of length LEN bytes.
7285 Return the number of bytes placed in the buffer, or zero
7286 upon failure. */
7287
7288 static int
7289 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7290 {
7291 tree type = TREE_TYPE (expr);
7292 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7293 int byte, offset, word, words;
7294 unsigned char value;
7295
7296 if ((off == -1 && total_bytes > len)
7297 || off >= total_bytes)
7298 return 0;
7299 if (off == -1)
7300 off = 0;
7301 words = total_bytes / UNITS_PER_WORD;
7302
7303 for (byte = 0; byte < total_bytes; byte++)
7304 {
7305 int bitpos = byte * BITS_PER_UNIT;
7306 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7307 number of bytes. */
7308 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7309
7310 if (total_bytes > UNITS_PER_WORD)
7311 {
7312 word = byte / UNITS_PER_WORD;
7313 if (WORDS_BIG_ENDIAN)
7314 word = (words - 1) - word;
7315 offset = word * UNITS_PER_WORD;
7316 if (BYTES_BIG_ENDIAN)
7317 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7318 else
7319 offset += byte % UNITS_PER_WORD;
7320 }
7321 else
7322 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7323 if (offset >= off
7324 && offset - off < len)
7325 ptr[offset - off] = value;
7326 }
7327 return MIN (len, total_bytes - off);
7328 }
7329
7330
7331 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7332 specified by EXPR into the buffer PTR of length LEN bytes.
7333 Return the number of bytes placed in the buffer, or zero
7334 upon failure. */
7335
7336 static int
7337 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7338 {
7339 tree type = TREE_TYPE (expr);
7340 machine_mode mode = TYPE_MODE (type);
7341 int total_bytes = GET_MODE_SIZE (mode);
7342 FIXED_VALUE_TYPE value;
7343 tree i_value, i_type;
7344
7345 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7346 return 0;
7347
7348 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7349
7350 if (NULL_TREE == i_type
7351 || TYPE_PRECISION (i_type) != total_bytes)
7352 return 0;
7353
7354 value = TREE_FIXED_CST (expr);
7355 i_value = double_int_to_tree (i_type, value.data);
7356
7357 return native_encode_int (i_value, ptr, len, off);
7358 }
7359
7360
7361 /* Subroutine of native_encode_expr. Encode the REAL_CST
7362 specified by EXPR into the buffer PTR of length LEN bytes.
7363 Return the number of bytes placed in the buffer, or zero
7364 upon failure. */
7365
7366 static int
7367 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7368 {
7369 tree type = TREE_TYPE (expr);
7370 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7371 int byte, offset, word, words, bitpos;
7372 unsigned char value;
7373
7374 /* There are always 32 bits in each long, no matter the size of
7375 the hosts long. We handle floating point representations with
7376 up to 192 bits. */
7377 long tmp[6];
7378
7379 if ((off == -1 && total_bytes > len)
7380 || off >= total_bytes)
7381 return 0;
7382 if (off == -1)
7383 off = 0;
7384 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7385
7386 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7387
7388 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7389 bitpos += BITS_PER_UNIT)
7390 {
7391 byte = (bitpos / BITS_PER_UNIT) & 3;
7392 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7393
7394 if (UNITS_PER_WORD < 4)
7395 {
7396 word = byte / UNITS_PER_WORD;
7397 if (WORDS_BIG_ENDIAN)
7398 word = (words - 1) - word;
7399 offset = word * UNITS_PER_WORD;
7400 if (BYTES_BIG_ENDIAN)
7401 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7402 else
7403 offset += byte % UNITS_PER_WORD;
7404 }
7405 else
7406 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7407 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7408 if (offset >= off
7409 && offset - off < len)
7410 ptr[offset - off] = value;
7411 }
7412 return MIN (len, total_bytes - off);
7413 }
7414
7415 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7416 specified by EXPR into the buffer PTR of length LEN bytes.
7417 Return the number of bytes placed in the buffer, or zero
7418 upon failure. */
7419
7420 static int
7421 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7422 {
7423 int rsize, isize;
7424 tree part;
7425
7426 part = TREE_REALPART (expr);
7427 rsize = native_encode_expr (part, ptr, len, off);
7428 if (off == -1
7429 && rsize == 0)
7430 return 0;
7431 part = TREE_IMAGPART (expr);
7432 if (off != -1)
7433 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7434 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7435 if (off == -1
7436 && isize != rsize)
7437 return 0;
7438 return rsize + isize;
7439 }
7440
7441
7442 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7443 specified by EXPR into the buffer PTR of length LEN bytes.
7444 Return the number of bytes placed in the buffer, or zero
7445 upon failure. */
7446
7447 static int
7448 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7449 {
7450 unsigned i, count;
7451 int size, offset;
7452 tree itype, elem;
7453
7454 offset = 0;
7455 count = VECTOR_CST_NELTS (expr);
7456 itype = TREE_TYPE (TREE_TYPE (expr));
7457 size = GET_MODE_SIZE (TYPE_MODE (itype));
7458 for (i = 0; i < count; i++)
7459 {
7460 if (off >= size)
7461 {
7462 off -= size;
7463 continue;
7464 }
7465 elem = VECTOR_CST_ELT (expr, i);
7466 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7467 if ((off == -1 && res != size)
7468 || res == 0)
7469 return 0;
7470 offset += res;
7471 if (offset >= len)
7472 return offset;
7473 if (off != -1)
7474 off = 0;
7475 }
7476 return offset;
7477 }
7478
7479
7480 /* Subroutine of native_encode_expr. Encode the STRING_CST
7481 specified by EXPR into the buffer PTR of length LEN bytes.
7482 Return the number of bytes placed in the buffer, or zero
7483 upon failure. */
7484
7485 static int
7486 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7487 {
7488 tree type = TREE_TYPE (expr);
7489 HOST_WIDE_INT total_bytes;
7490
7491 if (TREE_CODE (type) != ARRAY_TYPE
7492 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7493 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7494 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7495 return 0;
7496 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7497 if ((off == -1 && total_bytes > len)
7498 || off >= total_bytes)
7499 return 0;
7500 if (off == -1)
7501 off = 0;
7502 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7503 {
7504 int written = 0;
7505 if (off < TREE_STRING_LENGTH (expr))
7506 {
7507 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7508 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7509 }
7510 memset (ptr + written, 0,
7511 MIN (total_bytes - written, len - written));
7512 }
7513 else
7514 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7515 return MIN (total_bytes - off, len);
7516 }
7517
7518
7519 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7520 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7521 buffer PTR of length LEN bytes. If OFF is not -1 then start
7522 the encoding at byte offset OFF and encode at most LEN bytes.
7523 Return the number of bytes placed in the buffer, or zero upon failure. */
7524
7525 int
7526 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7527 {
7528 switch (TREE_CODE (expr))
7529 {
7530 case INTEGER_CST:
7531 return native_encode_int (expr, ptr, len, off);
7532
7533 case REAL_CST:
7534 return native_encode_real (expr, ptr, len, off);
7535
7536 case FIXED_CST:
7537 return native_encode_fixed (expr, ptr, len, off);
7538
7539 case COMPLEX_CST:
7540 return native_encode_complex (expr, ptr, len, off);
7541
7542 case VECTOR_CST:
7543 return native_encode_vector (expr, ptr, len, off);
7544
7545 case STRING_CST:
7546 return native_encode_string (expr, ptr, len, off);
7547
7548 default:
7549 return 0;
7550 }
7551 }
7552
7553
7554 /* Subroutine of native_interpret_expr. Interpret the contents of
7555 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7556 If the buffer cannot be interpreted, return NULL_TREE. */
7557
7558 static tree
7559 native_interpret_int (tree type, const unsigned char *ptr, int len)
7560 {
7561 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7562
7563 if (total_bytes > len
7564 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7565 return NULL_TREE;
7566
7567 wide_int result = wi::from_buffer (ptr, total_bytes);
7568
7569 return wide_int_to_tree (type, result);
7570 }
7571
7572
7573 /* Subroutine of native_interpret_expr. Interpret the contents of
7574 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7575 If the buffer cannot be interpreted, return NULL_TREE. */
7576
7577 static tree
7578 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7579 {
7580 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7581 double_int result;
7582 FIXED_VALUE_TYPE fixed_value;
7583
7584 if (total_bytes > len
7585 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7586 return NULL_TREE;
7587
7588 result = double_int::from_buffer (ptr, total_bytes);
7589 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7590
7591 return build_fixed (type, fixed_value);
7592 }
7593
7594
7595 /* Subroutine of native_interpret_expr. Interpret the contents of
7596 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7597 If the buffer cannot be interpreted, return NULL_TREE. */
7598
7599 static tree
7600 native_interpret_real (tree type, const unsigned char *ptr, int len)
7601 {
7602 machine_mode mode = TYPE_MODE (type);
7603 int total_bytes = GET_MODE_SIZE (mode);
7604 int byte, offset, word, words, bitpos;
7605 unsigned char value;
7606 /* There are always 32 bits in each long, no matter the size of
7607 the hosts long. We handle floating point representations with
7608 up to 192 bits. */
7609 REAL_VALUE_TYPE r;
7610 long tmp[6];
7611
7612 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7613 if (total_bytes > len || total_bytes > 24)
7614 return NULL_TREE;
7615 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7616
7617 memset (tmp, 0, sizeof (tmp));
7618 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7619 bitpos += BITS_PER_UNIT)
7620 {
7621 byte = (bitpos / BITS_PER_UNIT) & 3;
7622 if (UNITS_PER_WORD < 4)
7623 {
7624 word = byte / UNITS_PER_WORD;
7625 if (WORDS_BIG_ENDIAN)
7626 word = (words - 1) - word;
7627 offset = word * UNITS_PER_WORD;
7628 if (BYTES_BIG_ENDIAN)
7629 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7630 else
7631 offset += byte % UNITS_PER_WORD;
7632 }
7633 else
7634 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7635 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7636
7637 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7638 }
7639
7640 real_from_target (&r, tmp, mode);
7641 return build_real (type, r);
7642 }
7643
7644
7645 /* Subroutine of native_interpret_expr. Interpret the contents of
7646 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7647 If the buffer cannot be interpreted, return NULL_TREE. */
7648
7649 static tree
7650 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7651 {
7652 tree etype, rpart, ipart;
7653 int size;
7654
7655 etype = TREE_TYPE (type);
7656 size = GET_MODE_SIZE (TYPE_MODE (etype));
7657 if (size * 2 > len)
7658 return NULL_TREE;
7659 rpart = native_interpret_expr (etype, ptr, size);
7660 if (!rpart)
7661 return NULL_TREE;
7662 ipart = native_interpret_expr (etype, ptr+size, size);
7663 if (!ipart)
7664 return NULL_TREE;
7665 return build_complex (type, rpart, ipart);
7666 }
7667
7668
7669 /* Subroutine of native_interpret_expr. Interpret the contents of
7670 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7671 If the buffer cannot be interpreted, return NULL_TREE. */
7672
7673 static tree
7674 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7675 {
7676 tree etype, elem;
7677 int i, size, count;
7678 tree *elements;
7679
7680 etype = TREE_TYPE (type);
7681 size = GET_MODE_SIZE (TYPE_MODE (etype));
7682 count = TYPE_VECTOR_SUBPARTS (type);
7683 if (size * count > len)
7684 return NULL_TREE;
7685
7686 elements = XALLOCAVEC (tree, count);
7687 for (i = count - 1; i >= 0; i--)
7688 {
7689 elem = native_interpret_expr (etype, ptr+(i*size), size);
7690 if (!elem)
7691 return NULL_TREE;
7692 elements[i] = elem;
7693 }
7694 return build_vector (type, elements);
7695 }
7696
7697
7698 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7699 the buffer PTR of length LEN as a constant of type TYPE. For
7700 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7701 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7702 return NULL_TREE. */
7703
7704 tree
7705 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7706 {
7707 switch (TREE_CODE (type))
7708 {
7709 case INTEGER_TYPE:
7710 case ENUMERAL_TYPE:
7711 case BOOLEAN_TYPE:
7712 case POINTER_TYPE:
7713 case REFERENCE_TYPE:
7714 return native_interpret_int (type, ptr, len);
7715
7716 case REAL_TYPE:
7717 return native_interpret_real (type, ptr, len);
7718
7719 case FIXED_POINT_TYPE:
7720 return native_interpret_fixed (type, ptr, len);
7721
7722 case COMPLEX_TYPE:
7723 return native_interpret_complex (type, ptr, len);
7724
7725 case VECTOR_TYPE:
7726 return native_interpret_vector (type, ptr, len);
7727
7728 default:
7729 return NULL_TREE;
7730 }
7731 }
7732
7733 /* Returns true if we can interpret the contents of a native encoding
7734 as TYPE. */
7735
7736 static bool
7737 can_native_interpret_type_p (tree type)
7738 {
7739 switch (TREE_CODE (type))
7740 {
7741 case INTEGER_TYPE:
7742 case ENUMERAL_TYPE:
7743 case BOOLEAN_TYPE:
7744 case POINTER_TYPE:
7745 case REFERENCE_TYPE:
7746 case FIXED_POINT_TYPE:
7747 case REAL_TYPE:
7748 case COMPLEX_TYPE:
7749 case VECTOR_TYPE:
7750 return true;
7751 default:
7752 return false;
7753 }
7754 }
7755
7756 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7757 TYPE at compile-time. If we're unable to perform the conversion
7758 return NULL_TREE. */
7759
7760 static tree
7761 fold_view_convert_expr (tree type, tree expr)
7762 {
7763 /* We support up to 512-bit values (for V8DFmode). */
7764 unsigned char buffer[64];
7765 int len;
7766
7767 /* Check that the host and target are sane. */
7768 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7769 return NULL_TREE;
7770
7771 len = native_encode_expr (expr, buffer, sizeof (buffer));
7772 if (len == 0)
7773 return NULL_TREE;
7774
7775 return native_interpret_expr (type, buffer, len);
7776 }
7777
7778 /* Build an expression for the address of T. Folds away INDIRECT_REF
7779 to avoid confusing the gimplify process. */
7780
7781 tree
7782 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7783 {
7784 /* The size of the object is not relevant when talking about its address. */
7785 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7786 t = TREE_OPERAND (t, 0);
7787
7788 if (TREE_CODE (t) == INDIRECT_REF)
7789 {
7790 t = TREE_OPERAND (t, 0);
7791
7792 if (TREE_TYPE (t) != ptrtype)
7793 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7794 }
7795 else if (TREE_CODE (t) == MEM_REF
7796 && integer_zerop (TREE_OPERAND (t, 1)))
7797 return TREE_OPERAND (t, 0);
7798 else if (TREE_CODE (t) == MEM_REF
7799 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7800 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7801 TREE_OPERAND (t, 0),
7802 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7803 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7804 {
7805 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7806
7807 if (TREE_TYPE (t) != ptrtype)
7808 t = fold_convert_loc (loc, ptrtype, t);
7809 }
7810 else
7811 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7812
7813 return t;
7814 }
7815
7816 /* Build an expression for the address of T. */
7817
7818 tree
7819 build_fold_addr_expr_loc (location_t loc, tree t)
7820 {
7821 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7822
7823 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7824 }
7825
7826 /* Fold a unary expression of code CODE and type TYPE with operand
7827 OP0. Return the folded expression if folding is successful.
7828 Otherwise, return NULL_TREE. */
7829
7830 tree
7831 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7832 {
7833 tree tem;
7834 tree arg0;
7835 enum tree_code_class kind = TREE_CODE_CLASS (code);
7836
7837 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7838 && TREE_CODE_LENGTH (code) == 1);
7839
7840 arg0 = op0;
7841 if (arg0)
7842 {
7843 if (CONVERT_EXPR_CODE_P (code)
7844 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7845 {
7846 /* Don't use STRIP_NOPS, because signedness of argument type
7847 matters. */
7848 STRIP_SIGN_NOPS (arg0);
7849 }
7850 else
7851 {
7852 /* Strip any conversions that don't change the mode. This
7853 is safe for every expression, except for a comparison
7854 expression because its signedness is derived from its
7855 operands.
7856
7857 Note that this is done as an internal manipulation within
7858 the constant folder, in order to find the simplest
7859 representation of the arguments so that their form can be
7860 studied. In any cases, the appropriate type conversions
7861 should be put back in the tree that will get out of the
7862 constant folder. */
7863 STRIP_NOPS (arg0);
7864 }
7865
7866 if (CONSTANT_CLASS_P (arg0))
7867 {
7868 tree tem = const_unop (code, type, arg0);
7869 if (tem)
7870 {
7871 if (TREE_TYPE (tem) != type)
7872 tem = fold_convert_loc (loc, type, tem);
7873 return tem;
7874 }
7875 }
7876 }
7877
7878 tem = generic_simplify (loc, code, type, op0);
7879 if (tem)
7880 return tem;
7881
7882 if (TREE_CODE_CLASS (code) == tcc_unary)
7883 {
7884 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7885 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7886 fold_build1_loc (loc, code, type,
7887 fold_convert_loc (loc, TREE_TYPE (op0),
7888 TREE_OPERAND (arg0, 1))));
7889 else if (TREE_CODE (arg0) == COND_EXPR)
7890 {
7891 tree arg01 = TREE_OPERAND (arg0, 1);
7892 tree arg02 = TREE_OPERAND (arg0, 2);
7893 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7894 arg01 = fold_build1_loc (loc, code, type,
7895 fold_convert_loc (loc,
7896 TREE_TYPE (op0), arg01));
7897 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7898 arg02 = fold_build1_loc (loc, code, type,
7899 fold_convert_loc (loc,
7900 TREE_TYPE (op0), arg02));
7901 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7902 arg01, arg02);
7903
7904 /* If this was a conversion, and all we did was to move into
7905 inside the COND_EXPR, bring it back out. But leave it if
7906 it is a conversion from integer to integer and the
7907 result precision is no wider than a word since such a
7908 conversion is cheap and may be optimized away by combine,
7909 while it couldn't if it were outside the COND_EXPR. Then return
7910 so we don't get into an infinite recursion loop taking the
7911 conversion out and then back in. */
7912
7913 if ((CONVERT_EXPR_CODE_P (code)
7914 || code == NON_LVALUE_EXPR)
7915 && TREE_CODE (tem) == COND_EXPR
7916 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7917 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7918 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7919 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7920 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7921 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7922 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7923 && (INTEGRAL_TYPE_P
7924 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7925 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7926 || flag_syntax_only))
7927 tem = build1_loc (loc, code, type,
7928 build3 (COND_EXPR,
7929 TREE_TYPE (TREE_OPERAND
7930 (TREE_OPERAND (tem, 1), 0)),
7931 TREE_OPERAND (tem, 0),
7932 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7933 TREE_OPERAND (TREE_OPERAND (tem, 2),
7934 0)));
7935 return tem;
7936 }
7937 }
7938
7939 switch (code)
7940 {
7941 case NON_LVALUE_EXPR:
7942 if (!maybe_lvalue_p (op0))
7943 return fold_convert_loc (loc, type, op0);
7944 return NULL_TREE;
7945
7946 CASE_CONVERT:
7947 case FLOAT_EXPR:
7948 case FIX_TRUNC_EXPR:
7949 if (COMPARISON_CLASS_P (op0))
7950 {
7951 /* If we have (type) (a CMP b) and type is an integral type, return
7952 new expression involving the new type. Canonicalize
7953 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7954 non-integral type.
7955 Do not fold the result as that would not simplify further, also
7956 folding again results in recursions. */
7957 if (TREE_CODE (type) == BOOLEAN_TYPE)
7958 return build2_loc (loc, TREE_CODE (op0), type,
7959 TREE_OPERAND (op0, 0),
7960 TREE_OPERAND (op0, 1));
7961 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7962 && TREE_CODE (type) != VECTOR_TYPE)
7963 return build3_loc (loc, COND_EXPR, type, op0,
7964 constant_boolean_node (true, type),
7965 constant_boolean_node (false, type));
7966 }
7967
7968 /* Handle (T *)&A.B.C for A being of type T and B and C
7969 living at offset zero. This occurs frequently in
7970 C++ upcasting and then accessing the base. */
7971 if (TREE_CODE (op0) == ADDR_EXPR
7972 && POINTER_TYPE_P (type)
7973 && handled_component_p (TREE_OPERAND (op0, 0)))
7974 {
7975 HOST_WIDE_INT bitsize, bitpos;
7976 tree offset;
7977 machine_mode mode;
7978 int unsignedp, volatilep;
7979 tree base = TREE_OPERAND (op0, 0);
7980 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7981 &mode, &unsignedp, &volatilep, false);
7982 /* If the reference was to a (constant) zero offset, we can use
7983 the address of the base if it has the same base type
7984 as the result type and the pointer type is unqualified. */
7985 if (! offset && bitpos == 0
7986 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7987 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7988 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7989 return fold_convert_loc (loc, type,
7990 build_fold_addr_expr_loc (loc, base));
7991 }
7992
7993 if (TREE_CODE (op0) == MODIFY_EXPR
7994 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7995 /* Detect assigning a bitfield. */
7996 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7997 && DECL_BIT_FIELD
7998 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7999 {
8000 /* Don't leave an assignment inside a conversion
8001 unless assigning a bitfield. */
8002 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8003 /* First do the assignment, then return converted constant. */
8004 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8005 TREE_NO_WARNING (tem) = 1;
8006 TREE_USED (tem) = 1;
8007 return tem;
8008 }
8009
8010 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8011 constants (if x has signed type, the sign bit cannot be set
8012 in c). This folds extension into the BIT_AND_EXPR.
8013 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8014 very likely don't have maximal range for their precision and this
8015 transformation effectively doesn't preserve non-maximal ranges. */
8016 if (TREE_CODE (type) == INTEGER_TYPE
8017 && TREE_CODE (op0) == BIT_AND_EXPR
8018 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8019 {
8020 tree and_expr = op0;
8021 tree and0 = TREE_OPERAND (and_expr, 0);
8022 tree and1 = TREE_OPERAND (and_expr, 1);
8023 int change = 0;
8024
8025 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8026 || (TYPE_PRECISION (type)
8027 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8028 change = 1;
8029 else if (TYPE_PRECISION (TREE_TYPE (and1))
8030 <= HOST_BITS_PER_WIDE_INT
8031 && tree_fits_uhwi_p (and1))
8032 {
8033 unsigned HOST_WIDE_INT cst;
8034
8035 cst = tree_to_uhwi (and1);
8036 cst &= HOST_WIDE_INT_M1U
8037 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8038 change = (cst == 0);
8039 #ifdef LOAD_EXTEND_OP
8040 if (change
8041 && !flag_syntax_only
8042 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8043 == ZERO_EXTEND))
8044 {
8045 tree uns = unsigned_type_for (TREE_TYPE (and0));
8046 and0 = fold_convert_loc (loc, uns, and0);
8047 and1 = fold_convert_loc (loc, uns, and1);
8048 }
8049 #endif
8050 }
8051 if (change)
8052 {
8053 tem = force_fit_type (type, wi::to_widest (and1), 0,
8054 TREE_OVERFLOW (and1));
8055 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8056 fold_convert_loc (loc, type, and0), tem);
8057 }
8058 }
8059
8060 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8061 when one of the new casts will fold away. Conservatively we assume
8062 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8063 if (POINTER_TYPE_P (type)
8064 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8065 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8066 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8067 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8068 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8069 {
8070 tree arg00 = TREE_OPERAND (arg0, 0);
8071 tree arg01 = TREE_OPERAND (arg0, 1);
8072
8073 return fold_build_pointer_plus_loc
8074 (loc, fold_convert_loc (loc, type, arg00), arg01);
8075 }
8076
8077 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8078 of the same precision, and X is an integer type not narrower than
8079 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8080 if (INTEGRAL_TYPE_P (type)
8081 && TREE_CODE (op0) == BIT_NOT_EXPR
8082 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8083 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8084 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8085 {
8086 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8087 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8088 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8089 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8090 fold_convert_loc (loc, type, tem));
8091 }
8092
8093 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8094 type of X and Y (integer types only). */
8095 if (INTEGRAL_TYPE_P (type)
8096 && TREE_CODE (op0) == MULT_EXPR
8097 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8098 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8099 {
8100 /* Be careful not to introduce new overflows. */
8101 tree mult_type;
8102 if (TYPE_OVERFLOW_WRAPS (type))
8103 mult_type = type;
8104 else
8105 mult_type = unsigned_type_for (type);
8106
8107 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8108 {
8109 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8110 fold_convert_loc (loc, mult_type,
8111 TREE_OPERAND (op0, 0)),
8112 fold_convert_loc (loc, mult_type,
8113 TREE_OPERAND (op0, 1)));
8114 return fold_convert_loc (loc, type, tem);
8115 }
8116 }
8117
8118 return NULL_TREE;
8119
8120 case VIEW_CONVERT_EXPR:
8121 if (TREE_CODE (op0) == MEM_REF)
8122 return fold_build2_loc (loc, MEM_REF, type,
8123 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8124
8125 return NULL_TREE;
8126
8127 case NEGATE_EXPR:
8128 tem = fold_negate_expr (loc, arg0);
8129 if (tem)
8130 return fold_convert_loc (loc, type, tem);
8131 return NULL_TREE;
8132
8133 case ABS_EXPR:
8134 /* Convert fabs((double)float) into (double)fabsf(float). */
8135 if (TREE_CODE (arg0) == NOP_EXPR
8136 && TREE_CODE (type) == REAL_TYPE)
8137 {
8138 tree targ0 = strip_float_extensions (arg0);
8139 if (targ0 != arg0)
8140 return fold_convert_loc (loc, type,
8141 fold_build1_loc (loc, ABS_EXPR,
8142 TREE_TYPE (targ0),
8143 targ0));
8144 }
8145 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8146 else if (TREE_CODE (arg0) == ABS_EXPR)
8147 return arg0;
8148
8149 /* Strip sign ops from argument. */
8150 if (TREE_CODE (type) == REAL_TYPE)
8151 {
8152 tem = fold_strip_sign_ops (arg0);
8153 if (tem)
8154 return fold_build1_loc (loc, ABS_EXPR, type,
8155 fold_convert_loc (loc, type, tem));
8156 }
8157 return NULL_TREE;
8158
8159 case CONJ_EXPR:
8160 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8161 return fold_convert_loc (loc, type, arg0);
8162 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8163 {
8164 tree itype = TREE_TYPE (type);
8165 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8166 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8167 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8168 negate_expr (ipart));
8169 }
8170 if (TREE_CODE (arg0) == CONJ_EXPR)
8171 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8172 return NULL_TREE;
8173
8174 case BIT_NOT_EXPR:
8175 /* Convert ~ (-A) to A - 1. */
8176 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8177 return fold_build2_loc (loc, MINUS_EXPR, type,
8178 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8179 build_int_cst (type, 1));
8180 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8181 else if (INTEGRAL_TYPE_P (type)
8182 && ((TREE_CODE (arg0) == MINUS_EXPR
8183 && integer_onep (TREE_OPERAND (arg0, 1)))
8184 || (TREE_CODE (arg0) == PLUS_EXPR
8185 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8186 {
8187 /* Perform the negation in ARG0's type and only then convert
8188 to TYPE as to avoid introducing undefined behavior. */
8189 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8190 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8191 TREE_OPERAND (arg0, 0));
8192 return fold_convert_loc (loc, type, t);
8193 }
8194 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8195 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8196 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8197 fold_convert_loc (loc, type,
8198 TREE_OPERAND (arg0, 0)))))
8199 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8200 fold_convert_loc (loc, type,
8201 TREE_OPERAND (arg0, 1)));
8202 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8203 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8204 fold_convert_loc (loc, type,
8205 TREE_OPERAND (arg0, 1)))))
8206 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8207 fold_convert_loc (loc, type,
8208 TREE_OPERAND (arg0, 0)), tem);
8209
8210 return NULL_TREE;
8211
8212 case TRUTH_NOT_EXPR:
8213 /* Note that the operand of this must be an int
8214 and its values must be 0 or 1.
8215 ("true" is a fixed value perhaps depending on the language,
8216 but we don't handle values other than 1 correctly yet.) */
8217 tem = fold_truth_not_expr (loc, arg0);
8218 if (!tem)
8219 return NULL_TREE;
8220 return fold_convert_loc (loc, type, tem);
8221
8222 case REALPART_EXPR:
8223 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8224 return fold_convert_loc (loc, type, arg0);
8225 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8226 {
8227 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8228 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8229 fold_build1_loc (loc, REALPART_EXPR, itype,
8230 TREE_OPERAND (arg0, 0)),
8231 fold_build1_loc (loc, REALPART_EXPR, itype,
8232 TREE_OPERAND (arg0, 1)));
8233 return fold_convert_loc (loc, type, tem);
8234 }
8235 if (TREE_CODE (arg0) == CONJ_EXPR)
8236 {
8237 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8238 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8239 TREE_OPERAND (arg0, 0));
8240 return fold_convert_loc (loc, type, tem);
8241 }
8242 if (TREE_CODE (arg0) == CALL_EXPR)
8243 {
8244 tree fn = get_callee_fndecl (arg0);
8245 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8246 switch (DECL_FUNCTION_CODE (fn))
8247 {
8248 CASE_FLT_FN (BUILT_IN_CEXPI):
8249 fn = mathfn_built_in (type, BUILT_IN_COS);
8250 if (fn)
8251 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8252 break;
8253
8254 default:
8255 break;
8256 }
8257 }
8258 return NULL_TREE;
8259
8260 case IMAGPART_EXPR:
8261 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8262 return build_zero_cst (type);
8263 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8264 {
8265 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8266 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8267 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8268 TREE_OPERAND (arg0, 0)),
8269 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8270 TREE_OPERAND (arg0, 1)));
8271 return fold_convert_loc (loc, type, tem);
8272 }
8273 if (TREE_CODE (arg0) == CONJ_EXPR)
8274 {
8275 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8276 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8277 return fold_convert_loc (loc, type, negate_expr (tem));
8278 }
8279 if (TREE_CODE (arg0) == CALL_EXPR)
8280 {
8281 tree fn = get_callee_fndecl (arg0);
8282 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8283 switch (DECL_FUNCTION_CODE (fn))
8284 {
8285 CASE_FLT_FN (BUILT_IN_CEXPI):
8286 fn = mathfn_built_in (type, BUILT_IN_SIN);
8287 if (fn)
8288 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8289 break;
8290
8291 default:
8292 break;
8293 }
8294 }
8295 return NULL_TREE;
8296
8297 case INDIRECT_REF:
8298 /* Fold *&X to X if X is an lvalue. */
8299 if (TREE_CODE (op0) == ADDR_EXPR)
8300 {
8301 tree op00 = TREE_OPERAND (op0, 0);
8302 if ((TREE_CODE (op00) == VAR_DECL
8303 || TREE_CODE (op00) == PARM_DECL
8304 || TREE_CODE (op00) == RESULT_DECL)
8305 && !TREE_READONLY (op00))
8306 return op00;
8307 }
8308 return NULL_TREE;
8309
8310 default:
8311 return NULL_TREE;
8312 } /* switch (code) */
8313 }
8314
8315
8316 /* If the operation was a conversion do _not_ mark a resulting constant
8317 with TREE_OVERFLOW if the original constant was not. These conversions
8318 have implementation defined behavior and retaining the TREE_OVERFLOW
8319 flag here would confuse later passes such as VRP. */
8320 tree
8321 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8322 tree type, tree op0)
8323 {
8324 tree res = fold_unary_loc (loc, code, type, op0);
8325 if (res
8326 && TREE_CODE (res) == INTEGER_CST
8327 && TREE_CODE (op0) == INTEGER_CST
8328 && CONVERT_EXPR_CODE_P (code))
8329 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8330
8331 return res;
8332 }
8333
8334 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8335 operands OP0 and OP1. LOC is the location of the resulting expression.
8336 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8337 Return the folded expression if folding is successful. Otherwise,
8338 return NULL_TREE. */
8339 static tree
8340 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8341 tree arg0, tree arg1, tree op0, tree op1)
8342 {
8343 tree tem;
8344
8345 /* We only do these simplifications if we are optimizing. */
8346 if (!optimize)
8347 return NULL_TREE;
8348
8349 /* Check for things like (A || B) && (A || C). We can convert this
8350 to A || (B && C). Note that either operator can be any of the four
8351 truth and/or operations and the transformation will still be
8352 valid. Also note that we only care about order for the
8353 ANDIF and ORIF operators. If B contains side effects, this
8354 might change the truth-value of A. */
8355 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8356 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8357 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8358 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8359 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8360 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8361 {
8362 tree a00 = TREE_OPERAND (arg0, 0);
8363 tree a01 = TREE_OPERAND (arg0, 1);
8364 tree a10 = TREE_OPERAND (arg1, 0);
8365 tree a11 = TREE_OPERAND (arg1, 1);
8366 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8367 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8368 && (code == TRUTH_AND_EXPR
8369 || code == TRUTH_OR_EXPR));
8370
8371 if (operand_equal_p (a00, a10, 0))
8372 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8373 fold_build2_loc (loc, code, type, a01, a11));
8374 else if (commutative && operand_equal_p (a00, a11, 0))
8375 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8376 fold_build2_loc (loc, code, type, a01, a10));
8377 else if (commutative && operand_equal_p (a01, a10, 0))
8378 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8379 fold_build2_loc (loc, code, type, a00, a11));
8380
8381 /* This case if tricky because we must either have commutative
8382 operators or else A10 must not have side-effects. */
8383
8384 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8385 && operand_equal_p (a01, a11, 0))
8386 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8387 fold_build2_loc (loc, code, type, a00, a10),
8388 a01);
8389 }
8390
8391 /* See if we can build a range comparison. */
8392 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8393 return tem;
8394
8395 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8396 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8397 {
8398 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8399 if (tem)
8400 return fold_build2_loc (loc, code, type, tem, arg1);
8401 }
8402
8403 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8404 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8405 {
8406 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8407 if (tem)
8408 return fold_build2_loc (loc, code, type, arg0, tem);
8409 }
8410
8411 /* Check for the possibility of merging component references. If our
8412 lhs is another similar operation, try to merge its rhs with our
8413 rhs. Then try to merge our lhs and rhs. */
8414 if (TREE_CODE (arg0) == code
8415 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8416 TREE_OPERAND (arg0, 1), arg1)))
8417 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8418
8419 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8420 return tem;
8421
8422 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8423 && (code == TRUTH_AND_EXPR
8424 || code == TRUTH_ANDIF_EXPR
8425 || code == TRUTH_OR_EXPR
8426 || code == TRUTH_ORIF_EXPR))
8427 {
8428 enum tree_code ncode, icode;
8429
8430 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8431 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8432 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8433
8434 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8435 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8436 We don't want to pack more than two leafs to a non-IF AND/OR
8437 expression.
8438 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8439 equal to IF-CODE, then we don't want to add right-hand operand.
8440 If the inner right-hand side of left-hand operand has
8441 side-effects, or isn't simple, then we can't add to it,
8442 as otherwise we might destroy if-sequence. */
8443 if (TREE_CODE (arg0) == icode
8444 && simple_operand_p_2 (arg1)
8445 /* Needed for sequence points to handle trappings, and
8446 side-effects. */
8447 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8448 {
8449 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8450 arg1);
8451 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8452 tem);
8453 }
8454 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8455 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8456 else if (TREE_CODE (arg1) == icode
8457 && simple_operand_p_2 (arg0)
8458 /* Needed for sequence points to handle trappings, and
8459 side-effects. */
8460 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8461 {
8462 tem = fold_build2_loc (loc, ncode, type,
8463 arg0, TREE_OPERAND (arg1, 0));
8464 return fold_build2_loc (loc, icode, type, tem,
8465 TREE_OPERAND (arg1, 1));
8466 }
8467 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8468 into (A OR B).
8469 For sequence point consistancy, we need to check for trapping,
8470 and side-effects. */
8471 else if (code == icode && simple_operand_p_2 (arg0)
8472 && simple_operand_p_2 (arg1))
8473 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8474 }
8475
8476 return NULL_TREE;
8477 }
8478
8479 /* Fold a binary expression of code CODE and type TYPE with operands
8480 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8481 Return the folded expression if folding is successful. Otherwise,
8482 return NULL_TREE. */
8483
8484 static tree
8485 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8486 {
8487 enum tree_code compl_code;
8488
8489 if (code == MIN_EXPR)
8490 compl_code = MAX_EXPR;
8491 else if (code == MAX_EXPR)
8492 compl_code = MIN_EXPR;
8493 else
8494 gcc_unreachable ();
8495
8496 /* MIN (MAX (a, b), b) == b. */
8497 if (TREE_CODE (op0) == compl_code
8498 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8499 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8500
8501 /* MIN (MAX (b, a), b) == b. */
8502 if (TREE_CODE (op0) == compl_code
8503 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8504 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8505 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8506
8507 /* MIN (a, MAX (a, b)) == a. */
8508 if (TREE_CODE (op1) == compl_code
8509 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8510 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8511 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8512
8513 /* MIN (a, MAX (b, a)) == a. */
8514 if (TREE_CODE (op1) == compl_code
8515 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8516 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8517 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8518
8519 return NULL_TREE;
8520 }
8521
8522 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8523 by changing CODE to reduce the magnitude of constants involved in
8524 ARG0 of the comparison.
8525 Returns a canonicalized comparison tree if a simplification was
8526 possible, otherwise returns NULL_TREE.
8527 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8528 valid if signed overflow is undefined. */
8529
8530 static tree
8531 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8532 tree arg0, tree arg1,
8533 bool *strict_overflow_p)
8534 {
8535 enum tree_code code0 = TREE_CODE (arg0);
8536 tree t, cst0 = NULL_TREE;
8537 int sgn0;
8538 bool swap = false;
8539
8540 /* Match A +- CST code arg1 and CST code arg1. We can change the
8541 first form only if overflow is undefined. */
8542 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8543 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8544 /* In principle pointers also have undefined overflow behavior,
8545 but that causes problems elsewhere. */
8546 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8547 && (code0 == MINUS_EXPR
8548 || code0 == PLUS_EXPR)
8549 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8550 || code0 == INTEGER_CST))
8551 return NULL_TREE;
8552
8553 /* Identify the constant in arg0 and its sign. */
8554 if (code0 == INTEGER_CST)
8555 cst0 = arg0;
8556 else
8557 cst0 = TREE_OPERAND (arg0, 1);
8558 sgn0 = tree_int_cst_sgn (cst0);
8559
8560 /* Overflowed constants and zero will cause problems. */
8561 if (integer_zerop (cst0)
8562 || TREE_OVERFLOW (cst0))
8563 return NULL_TREE;
8564
8565 /* See if we can reduce the magnitude of the constant in
8566 arg0 by changing the comparison code. */
8567 if (code0 == INTEGER_CST)
8568 {
8569 /* CST <= arg1 -> CST-1 < arg1. */
8570 if (code == LE_EXPR && sgn0 == 1)
8571 code = LT_EXPR;
8572 /* -CST < arg1 -> -CST-1 <= arg1. */
8573 else if (code == LT_EXPR && sgn0 == -1)
8574 code = LE_EXPR;
8575 /* CST > arg1 -> CST-1 >= arg1. */
8576 else if (code == GT_EXPR && sgn0 == 1)
8577 code = GE_EXPR;
8578 /* -CST >= arg1 -> -CST-1 > arg1. */
8579 else if (code == GE_EXPR && sgn0 == -1)
8580 code = GT_EXPR;
8581 else
8582 return NULL_TREE;
8583 /* arg1 code' CST' might be more canonical. */
8584 swap = true;
8585 }
8586 else
8587 {
8588 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8589 if (code == LT_EXPR
8590 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8591 code = LE_EXPR;
8592 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8593 else if (code == GT_EXPR
8594 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8595 code = GE_EXPR;
8596 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8597 else if (code == LE_EXPR
8598 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8599 code = LT_EXPR;
8600 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8601 else if (code == GE_EXPR
8602 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8603 code = GT_EXPR;
8604 else
8605 return NULL_TREE;
8606 *strict_overflow_p = true;
8607 }
8608
8609 /* Now build the constant reduced in magnitude. But not if that
8610 would produce one outside of its types range. */
8611 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8612 && ((sgn0 == 1
8613 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8614 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8615 || (sgn0 == -1
8616 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8617 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8618 /* We cannot swap the comparison here as that would cause us to
8619 endlessly recurse. */
8620 return NULL_TREE;
8621
8622 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8623 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8624 if (code0 != INTEGER_CST)
8625 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8626 t = fold_convert (TREE_TYPE (arg1), t);
8627
8628 /* If swapping might yield to a more canonical form, do so. */
8629 if (swap)
8630 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8631 else
8632 return fold_build2_loc (loc, code, type, t, arg1);
8633 }
8634
8635 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8636 overflow further. Try to decrease the magnitude of constants involved
8637 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8638 and put sole constants at the second argument position.
8639 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8640
8641 static tree
8642 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8643 tree arg0, tree arg1)
8644 {
8645 tree t;
8646 bool strict_overflow_p;
8647 const char * const warnmsg = G_("assuming signed overflow does not occur "
8648 "when reducing constant in comparison");
8649
8650 /* Try canonicalization by simplifying arg0. */
8651 strict_overflow_p = false;
8652 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8653 &strict_overflow_p);
8654 if (t)
8655 {
8656 if (strict_overflow_p)
8657 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8658 return t;
8659 }
8660
8661 /* Try canonicalization by simplifying arg1 using the swapped
8662 comparison. */
8663 code = swap_tree_comparison (code);
8664 strict_overflow_p = false;
8665 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8666 &strict_overflow_p);
8667 if (t && strict_overflow_p)
8668 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8669 return t;
8670 }
8671
8672 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8673 space. This is used to avoid issuing overflow warnings for
8674 expressions like &p->x which can not wrap. */
8675
8676 static bool
8677 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8678 {
8679 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8680 return true;
8681
8682 if (bitpos < 0)
8683 return true;
8684
8685 wide_int wi_offset;
8686 int precision = TYPE_PRECISION (TREE_TYPE (base));
8687 if (offset == NULL_TREE)
8688 wi_offset = wi::zero (precision);
8689 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8690 return true;
8691 else
8692 wi_offset = offset;
8693
8694 bool overflow;
8695 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8696 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8697 if (overflow)
8698 return true;
8699
8700 if (!wi::fits_uhwi_p (total))
8701 return true;
8702
8703 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8704 if (size <= 0)
8705 return true;
8706
8707 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8708 array. */
8709 if (TREE_CODE (base) == ADDR_EXPR)
8710 {
8711 HOST_WIDE_INT base_size;
8712
8713 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8714 if (base_size > 0 && size < base_size)
8715 size = base_size;
8716 }
8717
8718 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8719 }
8720
8721 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8722 kind INTEGER_CST. This makes sure to properly sign-extend the
8723 constant. */
8724
8725 static HOST_WIDE_INT
8726 size_low_cst (const_tree t)
8727 {
8728 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8729 int prec = TYPE_PRECISION (TREE_TYPE (t));
8730 if (prec < HOST_BITS_PER_WIDE_INT)
8731 return sext_hwi (w, prec);
8732 return w;
8733 }
8734
8735 /* Subroutine of fold_binary. This routine performs all of the
8736 transformations that are common to the equality/inequality
8737 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8738 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8739 fold_binary should call fold_binary. Fold a comparison with
8740 tree code CODE and type TYPE with operands OP0 and OP1. Return
8741 the folded comparison or NULL_TREE. */
8742
8743 static tree
8744 fold_comparison (location_t loc, enum tree_code code, tree type,
8745 tree op0, tree op1)
8746 {
8747 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8748 tree arg0, arg1, tem;
8749
8750 arg0 = op0;
8751 arg1 = op1;
8752
8753 STRIP_SIGN_NOPS (arg0);
8754 STRIP_SIGN_NOPS (arg1);
8755
8756 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8757 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8758 && (equality_code
8759 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8760 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8761 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8762 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8763 && TREE_CODE (arg1) == INTEGER_CST
8764 && !TREE_OVERFLOW (arg1))
8765 {
8766 const enum tree_code
8767 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8768 tree const1 = TREE_OPERAND (arg0, 1);
8769 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8770 tree variable = TREE_OPERAND (arg0, 0);
8771 tree new_const = int_const_binop (reverse_op, const2, const1);
8772
8773 /* If the constant operation overflowed this can be
8774 simplified as a comparison against INT_MAX/INT_MIN. */
8775 if (TREE_OVERFLOW (new_const)
8776 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8777 {
8778 int const1_sgn = tree_int_cst_sgn (const1);
8779 enum tree_code code2 = code;
8780
8781 /* Get the sign of the constant on the lhs if the
8782 operation were VARIABLE + CONST1. */
8783 if (TREE_CODE (arg0) == MINUS_EXPR)
8784 const1_sgn = -const1_sgn;
8785
8786 /* The sign of the constant determines if we overflowed
8787 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8788 Canonicalize to the INT_MIN overflow by swapping the comparison
8789 if necessary. */
8790 if (const1_sgn == -1)
8791 code2 = swap_tree_comparison (code);
8792
8793 /* We now can look at the canonicalized case
8794 VARIABLE + 1 CODE2 INT_MIN
8795 and decide on the result. */
8796 switch (code2)
8797 {
8798 case EQ_EXPR:
8799 case LT_EXPR:
8800 case LE_EXPR:
8801 return
8802 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8803
8804 case NE_EXPR:
8805 case GE_EXPR:
8806 case GT_EXPR:
8807 return
8808 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8809
8810 default:
8811 gcc_unreachable ();
8812 }
8813 }
8814 else
8815 {
8816 if (!equality_code)
8817 fold_overflow_warning ("assuming signed overflow does not occur "
8818 "when changing X +- C1 cmp C2 to "
8819 "X cmp C2 -+ C1",
8820 WARN_STRICT_OVERFLOW_COMPARISON);
8821 return fold_build2_loc (loc, code, type, variable, new_const);
8822 }
8823 }
8824
8825 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8826 if (TREE_CODE (arg0) == MINUS_EXPR
8827 && equality_code
8828 && integer_zerop (arg1))
8829 {
8830 /* ??? The transformation is valid for the other operators if overflow
8831 is undefined for the type, but performing it here badly interacts
8832 with the transformation in fold_cond_expr_with_comparison which
8833 attempts to synthetize ABS_EXPR. */
8834 if (!equality_code)
8835 fold_overflow_warning ("assuming signed overflow does not occur "
8836 "when changing X - Y cmp 0 to X cmp Y",
8837 WARN_STRICT_OVERFLOW_COMPARISON);
8838 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8839 TREE_OPERAND (arg0, 1));
8840 }
8841
8842 /* For comparisons of pointers we can decompose it to a compile time
8843 comparison of the base objects and the offsets into the object.
8844 This requires at least one operand being an ADDR_EXPR or a
8845 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8846 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8847 && (TREE_CODE (arg0) == ADDR_EXPR
8848 || TREE_CODE (arg1) == ADDR_EXPR
8849 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8850 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8851 {
8852 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8853 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8854 machine_mode mode;
8855 int volatilep, unsignedp;
8856 bool indirect_base0 = false, indirect_base1 = false;
8857
8858 /* Get base and offset for the access. Strip ADDR_EXPR for
8859 get_inner_reference, but put it back by stripping INDIRECT_REF
8860 off the base object if possible. indirect_baseN will be true
8861 if baseN is not an address but refers to the object itself. */
8862 base0 = arg0;
8863 if (TREE_CODE (arg0) == ADDR_EXPR)
8864 {
8865 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8866 &bitsize, &bitpos0, &offset0, &mode,
8867 &unsignedp, &volatilep, false);
8868 if (TREE_CODE (base0) == INDIRECT_REF)
8869 base0 = TREE_OPERAND (base0, 0);
8870 else
8871 indirect_base0 = true;
8872 }
8873 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8874 {
8875 base0 = TREE_OPERAND (arg0, 0);
8876 STRIP_SIGN_NOPS (base0);
8877 if (TREE_CODE (base0) == ADDR_EXPR)
8878 {
8879 base0 = TREE_OPERAND (base0, 0);
8880 indirect_base0 = true;
8881 }
8882 offset0 = TREE_OPERAND (arg0, 1);
8883 if (tree_fits_shwi_p (offset0))
8884 {
8885 HOST_WIDE_INT off = size_low_cst (offset0);
8886 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8887 * BITS_PER_UNIT)
8888 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8889 {
8890 bitpos0 = off * BITS_PER_UNIT;
8891 offset0 = NULL_TREE;
8892 }
8893 }
8894 }
8895
8896 base1 = arg1;
8897 if (TREE_CODE (arg1) == ADDR_EXPR)
8898 {
8899 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8900 &bitsize, &bitpos1, &offset1, &mode,
8901 &unsignedp, &volatilep, false);
8902 if (TREE_CODE (base1) == INDIRECT_REF)
8903 base1 = TREE_OPERAND (base1, 0);
8904 else
8905 indirect_base1 = true;
8906 }
8907 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8908 {
8909 base1 = TREE_OPERAND (arg1, 0);
8910 STRIP_SIGN_NOPS (base1);
8911 if (TREE_CODE (base1) == ADDR_EXPR)
8912 {
8913 base1 = TREE_OPERAND (base1, 0);
8914 indirect_base1 = true;
8915 }
8916 offset1 = TREE_OPERAND (arg1, 1);
8917 if (tree_fits_shwi_p (offset1))
8918 {
8919 HOST_WIDE_INT off = size_low_cst (offset1);
8920 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8921 * BITS_PER_UNIT)
8922 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8923 {
8924 bitpos1 = off * BITS_PER_UNIT;
8925 offset1 = NULL_TREE;
8926 }
8927 }
8928 }
8929
8930 /* A local variable can never be pointed to by
8931 the default SSA name of an incoming parameter. */
8932 if ((TREE_CODE (arg0) == ADDR_EXPR
8933 && indirect_base0
8934 && TREE_CODE (base0) == VAR_DECL
8935 && auto_var_in_fn_p (base0, current_function_decl)
8936 && !indirect_base1
8937 && TREE_CODE (base1) == SSA_NAME
8938 && SSA_NAME_IS_DEFAULT_DEF (base1)
8939 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8940 || (TREE_CODE (arg1) == ADDR_EXPR
8941 && indirect_base1
8942 && TREE_CODE (base1) == VAR_DECL
8943 && auto_var_in_fn_p (base1, current_function_decl)
8944 && !indirect_base0
8945 && TREE_CODE (base0) == SSA_NAME
8946 && SSA_NAME_IS_DEFAULT_DEF (base0)
8947 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8948 {
8949 if (code == NE_EXPR)
8950 return constant_boolean_node (1, type);
8951 else if (code == EQ_EXPR)
8952 return constant_boolean_node (0, type);
8953 }
8954 /* If we have equivalent bases we might be able to simplify. */
8955 else if (indirect_base0 == indirect_base1
8956 && operand_equal_p (base0, base1, 0))
8957 {
8958 /* We can fold this expression to a constant if the non-constant
8959 offset parts are equal. */
8960 if ((offset0 == offset1
8961 || (offset0 && offset1
8962 && operand_equal_p (offset0, offset1, 0)))
8963 && (code == EQ_EXPR
8964 || code == NE_EXPR
8965 || (indirect_base0 && DECL_P (base0))
8966 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8967
8968 {
8969 if (!equality_code
8970 && bitpos0 != bitpos1
8971 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8972 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8973 fold_overflow_warning (("assuming pointer wraparound does not "
8974 "occur when comparing P +- C1 with "
8975 "P +- C2"),
8976 WARN_STRICT_OVERFLOW_CONDITIONAL);
8977
8978 switch (code)
8979 {
8980 case EQ_EXPR:
8981 return constant_boolean_node (bitpos0 == bitpos1, type);
8982 case NE_EXPR:
8983 return constant_boolean_node (bitpos0 != bitpos1, type);
8984 case LT_EXPR:
8985 return constant_boolean_node (bitpos0 < bitpos1, type);
8986 case LE_EXPR:
8987 return constant_boolean_node (bitpos0 <= bitpos1, type);
8988 case GE_EXPR:
8989 return constant_boolean_node (bitpos0 >= bitpos1, type);
8990 case GT_EXPR:
8991 return constant_boolean_node (bitpos0 > bitpos1, type);
8992 default:;
8993 }
8994 }
8995 /* We can simplify the comparison to a comparison of the variable
8996 offset parts if the constant offset parts are equal.
8997 Be careful to use signed sizetype here because otherwise we
8998 mess with array offsets in the wrong way. This is possible
8999 because pointer arithmetic is restricted to retain within an
9000 object and overflow on pointer differences is undefined as of
9001 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9002 else if (bitpos0 == bitpos1
9003 && (equality_code
9004 || (indirect_base0 && DECL_P (base0))
9005 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9006 {
9007 /* By converting to signed sizetype we cover middle-end pointer
9008 arithmetic which operates on unsigned pointer types of size
9009 type size and ARRAY_REF offsets which are properly sign or
9010 zero extended from their type in case it is narrower than
9011 sizetype. */
9012 if (offset0 == NULL_TREE)
9013 offset0 = build_int_cst (ssizetype, 0);
9014 else
9015 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9016 if (offset1 == NULL_TREE)
9017 offset1 = build_int_cst (ssizetype, 0);
9018 else
9019 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9020
9021 if (!equality_code
9022 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9023 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9024 fold_overflow_warning (("assuming pointer wraparound does not "
9025 "occur when comparing P +- C1 with "
9026 "P +- C2"),
9027 WARN_STRICT_OVERFLOW_COMPARISON);
9028
9029 return fold_build2_loc (loc, code, type, offset0, offset1);
9030 }
9031 }
9032 /* For non-equal bases we can simplify if they are addresses
9033 declarations with different addresses. */
9034 else if (indirect_base0 && indirect_base1
9035 /* We know that !operand_equal_p (base0, base1, 0)
9036 because the if condition was false. But make
9037 sure two decls are not the same. */
9038 && base0 != base1
9039 && TREE_CODE (arg0) == ADDR_EXPR
9040 && TREE_CODE (arg1) == ADDR_EXPR
9041 && DECL_P (base0)
9042 && DECL_P (base1)
9043 /* Watch for aliases. */
9044 && (!decl_in_symtab_p (base0)
9045 || !decl_in_symtab_p (base1)
9046 || !symtab_node::get_create (base0)->equal_address_to
9047 (symtab_node::get_create (base1))))
9048 {
9049 if (code == EQ_EXPR)
9050 return omit_two_operands_loc (loc, type, boolean_false_node,
9051 arg0, arg1);
9052 else if (code == NE_EXPR)
9053 return omit_two_operands_loc (loc, type, boolean_true_node,
9054 arg0, arg1);
9055 }
9056 /* For equal offsets we can simplify to a comparison of the
9057 base addresses. */
9058 else if (bitpos0 == bitpos1
9059 && (indirect_base0
9060 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9061 && (indirect_base1
9062 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9063 && ((offset0 == offset1)
9064 || (offset0 && offset1
9065 && operand_equal_p (offset0, offset1, 0))))
9066 {
9067 if (indirect_base0)
9068 base0 = build_fold_addr_expr_loc (loc, base0);
9069 if (indirect_base1)
9070 base1 = build_fold_addr_expr_loc (loc, base1);
9071 return fold_build2_loc (loc, code, type, base0, base1);
9072 }
9073 }
9074
9075 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9076 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9077 the resulting offset is smaller in absolute value than the
9078 original one and has the same sign. */
9079 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9080 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9081 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9082 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9083 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9084 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9085 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9086 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9087 {
9088 tree const1 = TREE_OPERAND (arg0, 1);
9089 tree const2 = TREE_OPERAND (arg1, 1);
9090 tree variable1 = TREE_OPERAND (arg0, 0);
9091 tree variable2 = TREE_OPERAND (arg1, 0);
9092 tree cst;
9093 const char * const warnmsg = G_("assuming signed overflow does not "
9094 "occur when combining constants around "
9095 "a comparison");
9096
9097 /* Put the constant on the side where it doesn't overflow and is
9098 of lower absolute value and of same sign than before. */
9099 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9100 ? MINUS_EXPR : PLUS_EXPR,
9101 const2, const1);
9102 if (!TREE_OVERFLOW (cst)
9103 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9104 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9105 {
9106 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9107 return fold_build2_loc (loc, code, type,
9108 variable1,
9109 fold_build2_loc (loc, TREE_CODE (arg1),
9110 TREE_TYPE (arg1),
9111 variable2, cst));
9112 }
9113
9114 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9115 ? MINUS_EXPR : PLUS_EXPR,
9116 const1, const2);
9117 if (!TREE_OVERFLOW (cst)
9118 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9119 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9120 {
9121 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9122 return fold_build2_loc (loc, code, type,
9123 fold_build2_loc (loc, TREE_CODE (arg0),
9124 TREE_TYPE (arg0),
9125 variable1, cst),
9126 variable2);
9127 }
9128 }
9129
9130 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9131 signed arithmetic case. That form is created by the compiler
9132 often enough for folding it to be of value. One example is in
9133 computing loop trip counts after Operator Strength Reduction. */
9134 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9135 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9136 && TREE_CODE (arg0) == MULT_EXPR
9137 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9138 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9139 && integer_zerop (arg1))
9140 {
9141 tree const1 = TREE_OPERAND (arg0, 1);
9142 tree const2 = arg1; /* zero */
9143 tree variable1 = TREE_OPERAND (arg0, 0);
9144 enum tree_code cmp_code = code;
9145
9146 /* Handle unfolded multiplication by zero. */
9147 if (integer_zerop (const1))
9148 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9149
9150 fold_overflow_warning (("assuming signed overflow does not occur when "
9151 "eliminating multiplication in comparison "
9152 "with zero"),
9153 WARN_STRICT_OVERFLOW_COMPARISON);
9154
9155 /* If const1 is negative we swap the sense of the comparison. */
9156 if (tree_int_cst_sgn (const1) < 0)
9157 cmp_code = swap_tree_comparison (cmp_code);
9158
9159 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9160 }
9161
9162 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9163 if (tem)
9164 return tem;
9165
9166 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9167 {
9168 tree targ0 = strip_float_extensions (arg0);
9169 tree targ1 = strip_float_extensions (arg1);
9170 tree newtype = TREE_TYPE (targ0);
9171
9172 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9173 newtype = TREE_TYPE (targ1);
9174
9175 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9176 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9177 return fold_build2_loc (loc, code, type,
9178 fold_convert_loc (loc, newtype, targ0),
9179 fold_convert_loc (loc, newtype, targ1));
9180
9181 if (TREE_CODE (arg1) == REAL_CST)
9182 {
9183 REAL_VALUE_TYPE cst;
9184 cst = TREE_REAL_CST (arg1);
9185
9186 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9187 /* a CMP (-0) -> a CMP 0 */
9188 if (REAL_VALUE_MINUS_ZERO (cst))
9189 return fold_build2_loc (loc, code, type, arg0,
9190 build_real (TREE_TYPE (arg1), dconst0));
9191
9192 /* x != NaN is always true, other ops are always false. */
9193 if (REAL_VALUE_ISNAN (cst)
9194 && ! HONOR_SNANS (arg1))
9195 {
9196 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9197 return omit_one_operand_loc (loc, type, tem, arg0);
9198 }
9199
9200 /* Fold comparisons against infinity. */
9201 if (REAL_VALUE_ISINF (cst)
9202 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9203 {
9204 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9205 if (tem != NULL_TREE)
9206 return tem;
9207 }
9208 }
9209
9210 /* If this is a comparison of a real constant with a PLUS_EXPR
9211 or a MINUS_EXPR of a real constant, we can convert it into a
9212 comparison with a revised real constant as long as no overflow
9213 occurs when unsafe_math_optimizations are enabled. */
9214 if (flag_unsafe_math_optimizations
9215 && TREE_CODE (arg1) == REAL_CST
9216 && (TREE_CODE (arg0) == PLUS_EXPR
9217 || TREE_CODE (arg0) == MINUS_EXPR)
9218 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9219 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9220 ? MINUS_EXPR : PLUS_EXPR,
9221 arg1, TREE_OPERAND (arg0, 1)))
9222 && !TREE_OVERFLOW (tem))
9223 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9224
9225 /* Likewise, we can simplify a comparison of a real constant with
9226 a MINUS_EXPR whose first operand is also a real constant, i.e.
9227 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9228 floating-point types only if -fassociative-math is set. */
9229 if (flag_associative_math
9230 && TREE_CODE (arg1) == REAL_CST
9231 && TREE_CODE (arg0) == MINUS_EXPR
9232 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9233 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9234 arg1))
9235 && !TREE_OVERFLOW (tem))
9236 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9237 TREE_OPERAND (arg0, 1), tem);
9238
9239 /* Fold comparisons against built-in math functions. */
9240 if (TREE_CODE (arg1) == REAL_CST
9241 && flag_unsafe_math_optimizations
9242 && ! flag_errno_math)
9243 {
9244 enum built_in_function fcode = builtin_mathfn_code (arg0);
9245
9246 if (fcode != END_BUILTINS)
9247 {
9248 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9249 if (tem != NULL_TREE)
9250 return tem;
9251 }
9252 }
9253 }
9254
9255 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9256 && CONVERT_EXPR_P (arg0))
9257 {
9258 /* If we are widening one operand of an integer comparison,
9259 see if the other operand is similarly being widened. Perhaps we
9260 can do the comparison in the narrower type. */
9261 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9262 if (tem)
9263 return tem;
9264
9265 /* Or if we are changing signedness. */
9266 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9267 if (tem)
9268 return tem;
9269 }
9270
9271 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9272 constant, we can simplify it. */
9273 if (TREE_CODE (arg1) == INTEGER_CST
9274 && (TREE_CODE (arg0) == MIN_EXPR
9275 || TREE_CODE (arg0) == MAX_EXPR)
9276 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9277 {
9278 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9279 if (tem)
9280 return tem;
9281 }
9282
9283 /* Simplify comparison of something with itself. (For IEEE
9284 floating-point, we can only do some of these simplifications.) */
9285 if (operand_equal_p (arg0, arg1, 0))
9286 {
9287 switch (code)
9288 {
9289 case EQ_EXPR:
9290 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9291 || ! HONOR_NANS (arg0))
9292 return constant_boolean_node (1, type);
9293 break;
9294
9295 case GE_EXPR:
9296 case LE_EXPR:
9297 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9298 || ! HONOR_NANS (arg0))
9299 return constant_boolean_node (1, type);
9300 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9301
9302 case NE_EXPR:
9303 /* For NE, we can only do this simplification if integer
9304 or we don't honor IEEE floating point NaNs. */
9305 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9306 && HONOR_NANS (arg0))
9307 break;
9308 /* ... fall through ... */
9309 case GT_EXPR:
9310 case LT_EXPR:
9311 return constant_boolean_node (0, type);
9312 default:
9313 gcc_unreachable ();
9314 }
9315 }
9316
9317 /* If we are comparing an expression that just has comparisons
9318 of two integer values, arithmetic expressions of those comparisons,
9319 and constants, we can simplify it. There are only three cases
9320 to check: the two values can either be equal, the first can be
9321 greater, or the second can be greater. Fold the expression for
9322 those three values. Since each value must be 0 or 1, we have
9323 eight possibilities, each of which corresponds to the constant 0
9324 or 1 or one of the six possible comparisons.
9325
9326 This handles common cases like (a > b) == 0 but also handles
9327 expressions like ((x > y) - (y > x)) > 0, which supposedly
9328 occur in macroized code. */
9329
9330 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9331 {
9332 tree cval1 = 0, cval2 = 0;
9333 int save_p = 0;
9334
9335 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9336 /* Don't handle degenerate cases here; they should already
9337 have been handled anyway. */
9338 && cval1 != 0 && cval2 != 0
9339 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9340 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9341 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9342 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9343 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9344 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9345 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9346 {
9347 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9348 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9349
9350 /* We can't just pass T to eval_subst in case cval1 or cval2
9351 was the same as ARG1. */
9352
9353 tree high_result
9354 = fold_build2_loc (loc, code, type,
9355 eval_subst (loc, arg0, cval1, maxval,
9356 cval2, minval),
9357 arg1);
9358 tree equal_result
9359 = fold_build2_loc (loc, code, type,
9360 eval_subst (loc, arg0, cval1, maxval,
9361 cval2, maxval),
9362 arg1);
9363 tree low_result
9364 = fold_build2_loc (loc, code, type,
9365 eval_subst (loc, arg0, cval1, minval,
9366 cval2, maxval),
9367 arg1);
9368
9369 /* All three of these results should be 0 or 1. Confirm they are.
9370 Then use those values to select the proper code to use. */
9371
9372 if (TREE_CODE (high_result) == INTEGER_CST
9373 && TREE_CODE (equal_result) == INTEGER_CST
9374 && TREE_CODE (low_result) == INTEGER_CST)
9375 {
9376 /* Make a 3-bit mask with the high-order bit being the
9377 value for `>', the next for '=', and the low for '<'. */
9378 switch ((integer_onep (high_result) * 4)
9379 + (integer_onep (equal_result) * 2)
9380 + integer_onep (low_result))
9381 {
9382 case 0:
9383 /* Always false. */
9384 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9385 case 1:
9386 code = LT_EXPR;
9387 break;
9388 case 2:
9389 code = EQ_EXPR;
9390 break;
9391 case 3:
9392 code = LE_EXPR;
9393 break;
9394 case 4:
9395 code = GT_EXPR;
9396 break;
9397 case 5:
9398 code = NE_EXPR;
9399 break;
9400 case 6:
9401 code = GE_EXPR;
9402 break;
9403 case 7:
9404 /* Always true. */
9405 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9406 }
9407
9408 if (save_p)
9409 {
9410 tem = save_expr (build2 (code, type, cval1, cval2));
9411 SET_EXPR_LOCATION (tem, loc);
9412 return tem;
9413 }
9414 return fold_build2_loc (loc, code, type, cval1, cval2);
9415 }
9416 }
9417 }
9418
9419 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9420 into a single range test. */
9421 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9422 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9423 && TREE_CODE (arg1) == INTEGER_CST
9424 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9425 && !integer_zerop (TREE_OPERAND (arg0, 1))
9426 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9427 && !TREE_OVERFLOW (arg1))
9428 {
9429 tem = fold_div_compare (loc, code, type, arg0, arg1);
9430 if (tem != NULL_TREE)
9431 return tem;
9432 }
9433
9434 /* Fold ~X op ~Y as Y op X. */
9435 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9436 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9437 {
9438 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9439 return fold_build2_loc (loc, code, type,
9440 fold_convert_loc (loc, cmp_type,
9441 TREE_OPERAND (arg1, 0)),
9442 TREE_OPERAND (arg0, 0));
9443 }
9444
9445 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9446 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9447 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9448 {
9449 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9450 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9451 TREE_OPERAND (arg0, 0),
9452 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9453 fold_convert_loc (loc, cmp_type, arg1)));
9454 }
9455
9456 return NULL_TREE;
9457 }
9458
9459
9460 /* Subroutine of fold_binary. Optimize complex multiplications of the
9461 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9462 argument EXPR represents the expression "z" of type TYPE. */
9463
9464 static tree
9465 fold_mult_zconjz (location_t loc, tree type, tree expr)
9466 {
9467 tree itype = TREE_TYPE (type);
9468 tree rpart, ipart, tem;
9469
9470 if (TREE_CODE (expr) == COMPLEX_EXPR)
9471 {
9472 rpart = TREE_OPERAND (expr, 0);
9473 ipart = TREE_OPERAND (expr, 1);
9474 }
9475 else if (TREE_CODE (expr) == COMPLEX_CST)
9476 {
9477 rpart = TREE_REALPART (expr);
9478 ipart = TREE_IMAGPART (expr);
9479 }
9480 else
9481 {
9482 expr = save_expr (expr);
9483 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9484 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9485 }
9486
9487 rpart = save_expr (rpart);
9488 ipart = save_expr (ipart);
9489 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9490 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9491 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9492 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9493 build_zero_cst (itype));
9494 }
9495
9496
9497 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9498 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9499 guarantees that P and N have the same least significant log2(M) bits.
9500 N is not otherwise constrained. In particular, N is not normalized to
9501 0 <= N < M as is common. In general, the precise value of P is unknown.
9502 M is chosen as large as possible such that constant N can be determined.
9503
9504 Returns M and sets *RESIDUE to N.
9505
9506 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9507 account. This is not always possible due to PR 35705.
9508 */
9509
9510 static unsigned HOST_WIDE_INT
9511 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9512 bool allow_func_align)
9513 {
9514 enum tree_code code;
9515
9516 *residue = 0;
9517
9518 code = TREE_CODE (expr);
9519 if (code == ADDR_EXPR)
9520 {
9521 unsigned int bitalign;
9522 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9523 *residue /= BITS_PER_UNIT;
9524 return bitalign / BITS_PER_UNIT;
9525 }
9526 else if (code == POINTER_PLUS_EXPR)
9527 {
9528 tree op0, op1;
9529 unsigned HOST_WIDE_INT modulus;
9530 enum tree_code inner_code;
9531
9532 op0 = TREE_OPERAND (expr, 0);
9533 STRIP_NOPS (op0);
9534 modulus = get_pointer_modulus_and_residue (op0, residue,
9535 allow_func_align);
9536
9537 op1 = TREE_OPERAND (expr, 1);
9538 STRIP_NOPS (op1);
9539 inner_code = TREE_CODE (op1);
9540 if (inner_code == INTEGER_CST)
9541 {
9542 *residue += TREE_INT_CST_LOW (op1);
9543 return modulus;
9544 }
9545 else if (inner_code == MULT_EXPR)
9546 {
9547 op1 = TREE_OPERAND (op1, 1);
9548 if (TREE_CODE (op1) == INTEGER_CST)
9549 {
9550 unsigned HOST_WIDE_INT align;
9551
9552 /* Compute the greatest power-of-2 divisor of op1. */
9553 align = TREE_INT_CST_LOW (op1);
9554 align &= -align;
9555
9556 /* If align is non-zero and less than *modulus, replace
9557 *modulus with align., If align is 0, then either op1 is 0
9558 or the greatest power-of-2 divisor of op1 doesn't fit in an
9559 unsigned HOST_WIDE_INT. In either case, no additional
9560 constraint is imposed. */
9561 if (align)
9562 modulus = MIN (modulus, align);
9563
9564 return modulus;
9565 }
9566 }
9567 }
9568
9569 /* If we get here, we were unable to determine anything useful about the
9570 expression. */
9571 return 1;
9572 }
9573
9574 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9575 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9576
9577 static bool
9578 vec_cst_ctor_to_array (tree arg, tree *elts)
9579 {
9580 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9581
9582 if (TREE_CODE (arg) == VECTOR_CST)
9583 {
9584 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9585 elts[i] = VECTOR_CST_ELT (arg, i);
9586 }
9587 else if (TREE_CODE (arg) == CONSTRUCTOR)
9588 {
9589 constructor_elt *elt;
9590
9591 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9592 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9593 return false;
9594 else
9595 elts[i] = elt->value;
9596 }
9597 else
9598 return false;
9599 for (; i < nelts; i++)
9600 elts[i]
9601 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9602 return true;
9603 }
9604
9605 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9606 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9607 NULL_TREE otherwise. */
9608
9609 static tree
9610 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9611 {
9612 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9613 tree *elts;
9614 bool need_ctor = false;
9615
9616 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9617 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9618 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9619 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9620 return NULL_TREE;
9621
9622 elts = XALLOCAVEC (tree, nelts * 3);
9623 if (!vec_cst_ctor_to_array (arg0, elts)
9624 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9625 return NULL_TREE;
9626
9627 for (i = 0; i < nelts; i++)
9628 {
9629 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9630 need_ctor = true;
9631 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9632 }
9633
9634 if (need_ctor)
9635 {
9636 vec<constructor_elt, va_gc> *v;
9637 vec_alloc (v, nelts);
9638 for (i = 0; i < nelts; i++)
9639 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9640 return build_constructor (type, v);
9641 }
9642 else
9643 return build_vector (type, &elts[2 * nelts]);
9644 }
9645
9646 /* Try to fold a pointer difference of type TYPE two address expressions of
9647 array references AREF0 and AREF1 using location LOC. Return a
9648 simplified expression for the difference or NULL_TREE. */
9649
9650 static tree
9651 fold_addr_of_array_ref_difference (location_t loc, tree type,
9652 tree aref0, tree aref1)
9653 {
9654 tree base0 = TREE_OPERAND (aref0, 0);
9655 tree base1 = TREE_OPERAND (aref1, 0);
9656 tree base_offset = build_int_cst (type, 0);
9657
9658 /* If the bases are array references as well, recurse. If the bases
9659 are pointer indirections compute the difference of the pointers.
9660 If the bases are equal, we are set. */
9661 if ((TREE_CODE (base0) == ARRAY_REF
9662 && TREE_CODE (base1) == ARRAY_REF
9663 && (base_offset
9664 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9665 || (INDIRECT_REF_P (base0)
9666 && INDIRECT_REF_P (base1)
9667 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9668 TREE_OPERAND (base0, 0),
9669 TREE_OPERAND (base1, 0))))
9670 || operand_equal_p (base0, base1, 0))
9671 {
9672 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9673 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9674 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9675 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9676 return fold_build2_loc (loc, PLUS_EXPR, type,
9677 base_offset,
9678 fold_build2_loc (loc, MULT_EXPR, type,
9679 diff, esz));
9680 }
9681 return NULL_TREE;
9682 }
9683
9684 /* If the real or vector real constant CST of type TYPE has an exact
9685 inverse, return it, else return NULL. */
9686
9687 tree
9688 exact_inverse (tree type, tree cst)
9689 {
9690 REAL_VALUE_TYPE r;
9691 tree unit_type, *elts;
9692 machine_mode mode;
9693 unsigned vec_nelts, i;
9694
9695 switch (TREE_CODE (cst))
9696 {
9697 case REAL_CST:
9698 r = TREE_REAL_CST (cst);
9699
9700 if (exact_real_inverse (TYPE_MODE (type), &r))
9701 return build_real (type, r);
9702
9703 return NULL_TREE;
9704
9705 case VECTOR_CST:
9706 vec_nelts = VECTOR_CST_NELTS (cst);
9707 elts = XALLOCAVEC (tree, vec_nelts);
9708 unit_type = TREE_TYPE (type);
9709 mode = TYPE_MODE (unit_type);
9710
9711 for (i = 0; i < vec_nelts; i++)
9712 {
9713 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9714 if (!exact_real_inverse (mode, &r))
9715 return NULL_TREE;
9716 elts[i] = build_real (unit_type, r);
9717 }
9718
9719 return build_vector (type, elts);
9720
9721 default:
9722 return NULL_TREE;
9723 }
9724 }
9725
9726 /* Mask out the tz least significant bits of X of type TYPE where
9727 tz is the number of trailing zeroes in Y. */
9728 static wide_int
9729 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9730 {
9731 int tz = wi::ctz (y);
9732 if (tz > 0)
9733 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9734 return x;
9735 }
9736
9737 /* Return true when T is an address and is known to be nonzero.
9738 For floating point we further ensure that T is not denormal.
9739 Similar logic is present in nonzero_address in rtlanal.h.
9740
9741 If the return value is based on the assumption that signed overflow
9742 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9743 change *STRICT_OVERFLOW_P. */
9744
9745 static bool
9746 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9747 {
9748 tree type = TREE_TYPE (t);
9749 enum tree_code code;
9750
9751 /* Doing something useful for floating point would need more work. */
9752 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9753 return false;
9754
9755 code = TREE_CODE (t);
9756 switch (TREE_CODE_CLASS (code))
9757 {
9758 case tcc_unary:
9759 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9760 strict_overflow_p);
9761 case tcc_binary:
9762 case tcc_comparison:
9763 return tree_binary_nonzero_warnv_p (code, type,
9764 TREE_OPERAND (t, 0),
9765 TREE_OPERAND (t, 1),
9766 strict_overflow_p);
9767 case tcc_constant:
9768 case tcc_declaration:
9769 case tcc_reference:
9770 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9771
9772 default:
9773 break;
9774 }
9775
9776 switch (code)
9777 {
9778 case TRUTH_NOT_EXPR:
9779 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9780 strict_overflow_p);
9781
9782 case TRUTH_AND_EXPR:
9783 case TRUTH_OR_EXPR:
9784 case TRUTH_XOR_EXPR:
9785 return tree_binary_nonzero_warnv_p (code, type,
9786 TREE_OPERAND (t, 0),
9787 TREE_OPERAND (t, 1),
9788 strict_overflow_p);
9789
9790 case COND_EXPR:
9791 case CONSTRUCTOR:
9792 case OBJ_TYPE_REF:
9793 case ASSERT_EXPR:
9794 case ADDR_EXPR:
9795 case WITH_SIZE_EXPR:
9796 case SSA_NAME:
9797 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9798
9799 case COMPOUND_EXPR:
9800 case MODIFY_EXPR:
9801 case BIND_EXPR:
9802 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9803 strict_overflow_p);
9804
9805 case SAVE_EXPR:
9806 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9807 strict_overflow_p);
9808
9809 case CALL_EXPR:
9810 {
9811 tree fndecl = get_callee_fndecl (t);
9812 if (!fndecl) return false;
9813 if (flag_delete_null_pointer_checks && !flag_check_new
9814 && DECL_IS_OPERATOR_NEW (fndecl)
9815 && !TREE_NOTHROW (fndecl))
9816 return true;
9817 if (flag_delete_null_pointer_checks
9818 && lookup_attribute ("returns_nonnull",
9819 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9820 return true;
9821 return alloca_call_p (t);
9822 }
9823
9824 default:
9825 break;
9826 }
9827 return false;
9828 }
9829
9830 /* Return true when T is an address and is known to be nonzero.
9831 Handle warnings about undefined signed overflow. */
9832
9833 static bool
9834 tree_expr_nonzero_p (tree t)
9835 {
9836 bool ret, strict_overflow_p;
9837
9838 strict_overflow_p = false;
9839 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9840 if (strict_overflow_p)
9841 fold_overflow_warning (("assuming signed overflow does not occur when "
9842 "determining that expression is always "
9843 "non-zero"),
9844 WARN_STRICT_OVERFLOW_MISC);
9845 return ret;
9846 }
9847
9848 /* Fold a binary expression of code CODE and type TYPE with operands
9849 OP0 and OP1. LOC is the location of the resulting expression.
9850 Return the folded expression if folding is successful. Otherwise,
9851 return NULL_TREE. */
9852
9853 tree
9854 fold_binary_loc (location_t loc,
9855 enum tree_code code, tree type, tree op0, tree op1)
9856 {
9857 enum tree_code_class kind = TREE_CODE_CLASS (code);
9858 tree arg0, arg1, tem;
9859 tree t1 = NULL_TREE;
9860 bool strict_overflow_p;
9861 unsigned int prec;
9862
9863 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9864 && TREE_CODE_LENGTH (code) == 2
9865 && op0 != NULL_TREE
9866 && op1 != NULL_TREE);
9867
9868 arg0 = op0;
9869 arg1 = op1;
9870
9871 /* Strip any conversions that don't change the mode. This is
9872 safe for every expression, except for a comparison expression
9873 because its signedness is derived from its operands. So, in
9874 the latter case, only strip conversions that don't change the
9875 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9876 preserved.
9877
9878 Note that this is done as an internal manipulation within the
9879 constant folder, in order to find the simplest representation
9880 of the arguments so that their form can be studied. In any
9881 cases, the appropriate type conversions should be put back in
9882 the tree that will get out of the constant folder. */
9883
9884 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9885 {
9886 STRIP_SIGN_NOPS (arg0);
9887 STRIP_SIGN_NOPS (arg1);
9888 }
9889 else
9890 {
9891 STRIP_NOPS (arg0);
9892 STRIP_NOPS (arg1);
9893 }
9894
9895 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9896 constant but we can't do arithmetic on them. */
9897 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9898 {
9899 tem = const_binop (code, type, arg0, arg1);
9900 if (tem != NULL_TREE)
9901 {
9902 if (TREE_TYPE (tem) != type)
9903 tem = fold_convert_loc (loc, type, tem);
9904 return tem;
9905 }
9906 }
9907
9908 /* If this is a commutative operation, and ARG0 is a constant, move it
9909 to ARG1 to reduce the number of tests below. */
9910 if (commutative_tree_code (code)
9911 && tree_swap_operands_p (arg0, arg1, true))
9912 return fold_build2_loc (loc, code, type, op1, op0);
9913
9914 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9915 to ARG1 to reduce the number of tests below. */
9916 if (kind == tcc_comparison
9917 && tree_swap_operands_p (arg0, arg1, true))
9918 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9919
9920 tem = generic_simplify (loc, code, type, op0, op1);
9921 if (tem)
9922 return tem;
9923
9924 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9925
9926 First check for cases where an arithmetic operation is applied to a
9927 compound, conditional, or comparison operation. Push the arithmetic
9928 operation inside the compound or conditional to see if any folding
9929 can then be done. Convert comparison to conditional for this purpose.
9930 The also optimizes non-constant cases that used to be done in
9931 expand_expr.
9932
9933 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9934 one of the operands is a comparison and the other is a comparison, a
9935 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9936 code below would make the expression more complex. Change it to a
9937 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9938 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9939
9940 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9941 || code == EQ_EXPR || code == NE_EXPR)
9942 && TREE_CODE (type) != VECTOR_TYPE
9943 && ((truth_value_p (TREE_CODE (arg0))
9944 && (truth_value_p (TREE_CODE (arg1))
9945 || (TREE_CODE (arg1) == BIT_AND_EXPR
9946 && integer_onep (TREE_OPERAND (arg1, 1)))))
9947 || (truth_value_p (TREE_CODE (arg1))
9948 && (truth_value_p (TREE_CODE (arg0))
9949 || (TREE_CODE (arg0) == BIT_AND_EXPR
9950 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9951 {
9952 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9953 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9954 : TRUTH_XOR_EXPR,
9955 boolean_type_node,
9956 fold_convert_loc (loc, boolean_type_node, arg0),
9957 fold_convert_loc (loc, boolean_type_node, arg1));
9958
9959 if (code == EQ_EXPR)
9960 tem = invert_truthvalue_loc (loc, tem);
9961
9962 return fold_convert_loc (loc, type, tem);
9963 }
9964
9965 if (TREE_CODE_CLASS (code) == tcc_binary
9966 || TREE_CODE_CLASS (code) == tcc_comparison)
9967 {
9968 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9969 {
9970 tem = fold_build2_loc (loc, code, type,
9971 fold_convert_loc (loc, TREE_TYPE (op0),
9972 TREE_OPERAND (arg0, 1)), op1);
9973 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9974 tem);
9975 }
9976 if (TREE_CODE (arg1) == COMPOUND_EXPR
9977 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9978 {
9979 tem = fold_build2_loc (loc, code, type, op0,
9980 fold_convert_loc (loc, TREE_TYPE (op1),
9981 TREE_OPERAND (arg1, 1)));
9982 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9983 tem);
9984 }
9985
9986 if (TREE_CODE (arg0) == COND_EXPR
9987 || TREE_CODE (arg0) == VEC_COND_EXPR
9988 || COMPARISON_CLASS_P (arg0))
9989 {
9990 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9991 arg0, arg1,
9992 /*cond_first_p=*/1);
9993 if (tem != NULL_TREE)
9994 return tem;
9995 }
9996
9997 if (TREE_CODE (arg1) == COND_EXPR
9998 || TREE_CODE (arg1) == VEC_COND_EXPR
9999 || COMPARISON_CLASS_P (arg1))
10000 {
10001 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10002 arg1, arg0,
10003 /*cond_first_p=*/0);
10004 if (tem != NULL_TREE)
10005 return tem;
10006 }
10007 }
10008
10009 switch (code)
10010 {
10011 case MEM_REF:
10012 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10013 if (TREE_CODE (arg0) == ADDR_EXPR
10014 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10015 {
10016 tree iref = TREE_OPERAND (arg0, 0);
10017 return fold_build2 (MEM_REF, type,
10018 TREE_OPERAND (iref, 0),
10019 int_const_binop (PLUS_EXPR, arg1,
10020 TREE_OPERAND (iref, 1)));
10021 }
10022
10023 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10024 if (TREE_CODE (arg0) == ADDR_EXPR
10025 && handled_component_p (TREE_OPERAND (arg0, 0)))
10026 {
10027 tree base;
10028 HOST_WIDE_INT coffset;
10029 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10030 &coffset);
10031 if (!base)
10032 return NULL_TREE;
10033 return fold_build2 (MEM_REF, type,
10034 build_fold_addr_expr (base),
10035 int_const_binop (PLUS_EXPR, arg1,
10036 size_int (coffset)));
10037 }
10038
10039 return NULL_TREE;
10040
10041 case POINTER_PLUS_EXPR:
10042 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10043 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10044 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10045 return fold_convert_loc (loc, type,
10046 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10047 fold_convert_loc (loc, sizetype,
10048 arg1),
10049 fold_convert_loc (loc, sizetype,
10050 arg0)));
10051
10052 return NULL_TREE;
10053
10054 case PLUS_EXPR:
10055 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10056 {
10057 /* X + (X / CST) * -CST is X % CST. */
10058 if (TREE_CODE (arg1) == MULT_EXPR
10059 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10060 && operand_equal_p (arg0,
10061 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10062 {
10063 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10064 tree cst1 = TREE_OPERAND (arg1, 1);
10065 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10066 cst1, cst0);
10067 if (sum && integer_zerop (sum))
10068 return fold_convert_loc (loc, type,
10069 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10070 TREE_TYPE (arg0), arg0,
10071 cst0));
10072 }
10073 }
10074
10075 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10076 one. Make sure the type is not saturating and has the signedness of
10077 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10078 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10079 if ((TREE_CODE (arg0) == MULT_EXPR
10080 || TREE_CODE (arg1) == MULT_EXPR)
10081 && !TYPE_SATURATING (type)
10082 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10083 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10084 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10085 {
10086 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10087 if (tem)
10088 return tem;
10089 }
10090
10091 if (! FLOAT_TYPE_P (type))
10092 {
10093 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10094 with a constant, and the two constants have no bits in common,
10095 we should treat this as a BIT_IOR_EXPR since this may produce more
10096 simplifications. */
10097 if (TREE_CODE (arg0) == BIT_AND_EXPR
10098 && TREE_CODE (arg1) == BIT_AND_EXPR
10099 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10100 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10101 && wi::bit_and (TREE_OPERAND (arg0, 1),
10102 TREE_OPERAND (arg1, 1)) == 0)
10103 {
10104 code = BIT_IOR_EXPR;
10105 goto bit_ior;
10106 }
10107
10108 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10109 (plus (plus (mult) (mult)) (foo)) so that we can
10110 take advantage of the factoring cases below. */
10111 if (ANY_INTEGRAL_TYPE_P (type)
10112 && TYPE_OVERFLOW_WRAPS (type)
10113 && (((TREE_CODE (arg0) == PLUS_EXPR
10114 || TREE_CODE (arg0) == MINUS_EXPR)
10115 && TREE_CODE (arg1) == MULT_EXPR)
10116 || ((TREE_CODE (arg1) == PLUS_EXPR
10117 || TREE_CODE (arg1) == MINUS_EXPR)
10118 && TREE_CODE (arg0) == MULT_EXPR)))
10119 {
10120 tree parg0, parg1, parg, marg;
10121 enum tree_code pcode;
10122
10123 if (TREE_CODE (arg1) == MULT_EXPR)
10124 parg = arg0, marg = arg1;
10125 else
10126 parg = arg1, marg = arg0;
10127 pcode = TREE_CODE (parg);
10128 parg0 = TREE_OPERAND (parg, 0);
10129 parg1 = TREE_OPERAND (parg, 1);
10130 STRIP_NOPS (parg0);
10131 STRIP_NOPS (parg1);
10132
10133 if (TREE_CODE (parg0) == MULT_EXPR
10134 && TREE_CODE (parg1) != MULT_EXPR)
10135 return fold_build2_loc (loc, pcode, type,
10136 fold_build2_loc (loc, PLUS_EXPR, type,
10137 fold_convert_loc (loc, type,
10138 parg0),
10139 fold_convert_loc (loc, type,
10140 marg)),
10141 fold_convert_loc (loc, type, parg1));
10142 if (TREE_CODE (parg0) != MULT_EXPR
10143 && TREE_CODE (parg1) == MULT_EXPR)
10144 return
10145 fold_build2_loc (loc, PLUS_EXPR, type,
10146 fold_convert_loc (loc, type, parg0),
10147 fold_build2_loc (loc, pcode, type,
10148 fold_convert_loc (loc, type, marg),
10149 fold_convert_loc (loc, type,
10150 parg1)));
10151 }
10152 }
10153 else
10154 {
10155 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10156 to __complex__ ( x, y ). This is not the same for SNaNs or
10157 if signed zeros are involved. */
10158 if (!HONOR_SNANS (element_mode (arg0))
10159 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10160 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10161 {
10162 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10163 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10164 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10165 bool arg0rz = false, arg0iz = false;
10166 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10167 || (arg0i && (arg0iz = real_zerop (arg0i))))
10168 {
10169 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10170 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10171 if (arg0rz && arg1i && real_zerop (arg1i))
10172 {
10173 tree rp = arg1r ? arg1r
10174 : build1 (REALPART_EXPR, rtype, arg1);
10175 tree ip = arg0i ? arg0i
10176 : build1 (IMAGPART_EXPR, rtype, arg0);
10177 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10178 }
10179 else if (arg0iz && arg1r && real_zerop (arg1r))
10180 {
10181 tree rp = arg0r ? arg0r
10182 : build1 (REALPART_EXPR, rtype, arg0);
10183 tree ip = arg1i ? arg1i
10184 : build1 (IMAGPART_EXPR, rtype, arg1);
10185 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10186 }
10187 }
10188 }
10189
10190 if (flag_unsafe_math_optimizations
10191 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10192 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10193 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10194 return tem;
10195
10196 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10197 We associate floats only if the user has specified
10198 -fassociative-math. */
10199 if (flag_associative_math
10200 && TREE_CODE (arg1) == PLUS_EXPR
10201 && TREE_CODE (arg0) != MULT_EXPR)
10202 {
10203 tree tree10 = TREE_OPERAND (arg1, 0);
10204 tree tree11 = TREE_OPERAND (arg1, 1);
10205 if (TREE_CODE (tree11) == MULT_EXPR
10206 && TREE_CODE (tree10) == MULT_EXPR)
10207 {
10208 tree tree0;
10209 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10210 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10211 }
10212 }
10213 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10214 We associate floats only if the user has specified
10215 -fassociative-math. */
10216 if (flag_associative_math
10217 && TREE_CODE (arg0) == PLUS_EXPR
10218 && TREE_CODE (arg1) != MULT_EXPR)
10219 {
10220 tree tree00 = TREE_OPERAND (arg0, 0);
10221 tree tree01 = TREE_OPERAND (arg0, 1);
10222 if (TREE_CODE (tree01) == MULT_EXPR
10223 && TREE_CODE (tree00) == MULT_EXPR)
10224 {
10225 tree tree0;
10226 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10227 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10228 }
10229 }
10230 }
10231
10232 bit_rotate:
10233 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10234 is a rotate of A by C1 bits. */
10235 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10236 is a rotate of A by B bits. */
10237 {
10238 enum tree_code code0, code1;
10239 tree rtype;
10240 code0 = TREE_CODE (arg0);
10241 code1 = TREE_CODE (arg1);
10242 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10243 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10244 && operand_equal_p (TREE_OPERAND (arg0, 0),
10245 TREE_OPERAND (arg1, 0), 0)
10246 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10247 TYPE_UNSIGNED (rtype))
10248 /* Only create rotates in complete modes. Other cases are not
10249 expanded properly. */
10250 && (element_precision (rtype)
10251 == element_precision (TYPE_MODE (rtype))))
10252 {
10253 tree tree01, tree11;
10254 enum tree_code code01, code11;
10255
10256 tree01 = TREE_OPERAND (arg0, 1);
10257 tree11 = TREE_OPERAND (arg1, 1);
10258 STRIP_NOPS (tree01);
10259 STRIP_NOPS (tree11);
10260 code01 = TREE_CODE (tree01);
10261 code11 = TREE_CODE (tree11);
10262 if (code01 == INTEGER_CST
10263 && code11 == INTEGER_CST
10264 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10265 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10266 {
10267 tem = build2_loc (loc, LROTATE_EXPR,
10268 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10269 TREE_OPERAND (arg0, 0),
10270 code0 == LSHIFT_EXPR
10271 ? TREE_OPERAND (arg0, 1)
10272 : TREE_OPERAND (arg1, 1));
10273 return fold_convert_loc (loc, type, tem);
10274 }
10275 else if (code11 == MINUS_EXPR)
10276 {
10277 tree tree110, tree111;
10278 tree110 = TREE_OPERAND (tree11, 0);
10279 tree111 = TREE_OPERAND (tree11, 1);
10280 STRIP_NOPS (tree110);
10281 STRIP_NOPS (tree111);
10282 if (TREE_CODE (tree110) == INTEGER_CST
10283 && 0 == compare_tree_int (tree110,
10284 element_precision
10285 (TREE_TYPE (TREE_OPERAND
10286 (arg0, 0))))
10287 && operand_equal_p (tree01, tree111, 0))
10288 return
10289 fold_convert_loc (loc, type,
10290 build2 ((code0 == LSHIFT_EXPR
10291 ? LROTATE_EXPR
10292 : RROTATE_EXPR),
10293 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10294 TREE_OPERAND (arg0, 0),
10295 TREE_OPERAND (arg0, 1)));
10296 }
10297 else if (code01 == MINUS_EXPR)
10298 {
10299 tree tree010, tree011;
10300 tree010 = TREE_OPERAND (tree01, 0);
10301 tree011 = TREE_OPERAND (tree01, 1);
10302 STRIP_NOPS (tree010);
10303 STRIP_NOPS (tree011);
10304 if (TREE_CODE (tree010) == INTEGER_CST
10305 && 0 == compare_tree_int (tree010,
10306 element_precision
10307 (TREE_TYPE (TREE_OPERAND
10308 (arg0, 0))))
10309 && operand_equal_p (tree11, tree011, 0))
10310 return fold_convert_loc
10311 (loc, type,
10312 build2 ((code0 != LSHIFT_EXPR
10313 ? LROTATE_EXPR
10314 : RROTATE_EXPR),
10315 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10316 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
10317 }
10318 }
10319 }
10320
10321 associate:
10322 /* In most languages, can't associate operations on floats through
10323 parentheses. Rather than remember where the parentheses were, we
10324 don't associate floats at all, unless the user has specified
10325 -fassociative-math.
10326 And, we need to make sure type is not saturating. */
10327
10328 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10329 && !TYPE_SATURATING (type))
10330 {
10331 tree var0, con0, lit0, minus_lit0;
10332 tree var1, con1, lit1, minus_lit1;
10333 tree atype = type;
10334 bool ok = true;
10335
10336 /* Split both trees into variables, constants, and literals. Then
10337 associate each group together, the constants with literals,
10338 then the result with variables. This increases the chances of
10339 literals being recombined later and of generating relocatable
10340 expressions for the sum of a constant and literal. */
10341 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10342 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10343 code == MINUS_EXPR);
10344
10345 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10346 if (code == MINUS_EXPR)
10347 code = PLUS_EXPR;
10348
10349 /* With undefined overflow prefer doing association in a type
10350 which wraps on overflow, if that is one of the operand types. */
10351 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10352 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10353 {
10354 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10355 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10356 atype = TREE_TYPE (arg0);
10357 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10358 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10359 atype = TREE_TYPE (arg1);
10360 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10361 }
10362
10363 /* With undefined overflow we can only associate constants with one
10364 variable, and constants whose association doesn't overflow. */
10365 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10366 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10367 {
10368 if (var0 && var1)
10369 {
10370 tree tmp0 = var0;
10371 tree tmp1 = var1;
10372
10373 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10374 tmp0 = TREE_OPERAND (tmp0, 0);
10375 if (CONVERT_EXPR_P (tmp0)
10376 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10377 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10378 <= TYPE_PRECISION (atype)))
10379 tmp0 = TREE_OPERAND (tmp0, 0);
10380 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10381 tmp1 = TREE_OPERAND (tmp1, 0);
10382 if (CONVERT_EXPR_P (tmp1)
10383 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10384 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10385 <= TYPE_PRECISION (atype)))
10386 tmp1 = TREE_OPERAND (tmp1, 0);
10387 /* The only case we can still associate with two variables
10388 is if they are the same, modulo negation and bit-pattern
10389 preserving conversions. */
10390 if (!operand_equal_p (tmp0, tmp1, 0))
10391 ok = false;
10392 }
10393 }
10394
10395 /* Only do something if we found more than two objects. Otherwise,
10396 nothing has changed and we risk infinite recursion. */
10397 if (ok
10398 && (2 < ((var0 != 0) + (var1 != 0)
10399 + (con0 != 0) + (con1 != 0)
10400 + (lit0 != 0) + (lit1 != 0)
10401 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10402 {
10403 bool any_overflows = false;
10404 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10405 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10406 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10407 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10408 var0 = associate_trees (loc, var0, var1, code, atype);
10409 con0 = associate_trees (loc, con0, con1, code, atype);
10410 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10411 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10412 code, atype);
10413
10414 /* Preserve the MINUS_EXPR if the negative part of the literal is
10415 greater than the positive part. Otherwise, the multiplicative
10416 folding code (i.e extract_muldiv) may be fooled in case
10417 unsigned constants are subtracted, like in the following
10418 example: ((X*2 + 4) - 8U)/2. */
10419 if (minus_lit0 && lit0)
10420 {
10421 if (TREE_CODE (lit0) == INTEGER_CST
10422 && TREE_CODE (minus_lit0) == INTEGER_CST
10423 && tree_int_cst_lt (lit0, minus_lit0))
10424 {
10425 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10426 MINUS_EXPR, atype);
10427 lit0 = 0;
10428 }
10429 else
10430 {
10431 lit0 = associate_trees (loc, lit0, minus_lit0,
10432 MINUS_EXPR, atype);
10433 minus_lit0 = 0;
10434 }
10435 }
10436
10437 /* Don't introduce overflows through reassociation. */
10438 if (!any_overflows
10439 && ((lit0 && TREE_OVERFLOW_P (lit0))
10440 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10441 return NULL_TREE;
10442
10443 if (minus_lit0)
10444 {
10445 if (con0 == 0)
10446 return
10447 fold_convert_loc (loc, type,
10448 associate_trees (loc, var0, minus_lit0,
10449 MINUS_EXPR, atype));
10450 else
10451 {
10452 con0 = associate_trees (loc, con0, minus_lit0,
10453 MINUS_EXPR, atype);
10454 return
10455 fold_convert_loc (loc, type,
10456 associate_trees (loc, var0, con0,
10457 PLUS_EXPR, atype));
10458 }
10459 }
10460
10461 con0 = associate_trees (loc, con0, lit0, code, atype);
10462 return
10463 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10464 code, atype));
10465 }
10466 }
10467
10468 return NULL_TREE;
10469
10470 case MINUS_EXPR:
10471 /* Pointer simplifications for subtraction, simple reassociations. */
10472 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10473 {
10474 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10475 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10476 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10477 {
10478 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10479 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10480 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10481 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10482 return fold_build2_loc (loc, PLUS_EXPR, type,
10483 fold_build2_loc (loc, MINUS_EXPR, type,
10484 arg00, arg10),
10485 fold_build2_loc (loc, MINUS_EXPR, type,
10486 arg01, arg11));
10487 }
10488 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10489 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10490 {
10491 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10492 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10493 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10494 fold_convert_loc (loc, type, arg1));
10495 if (tmp)
10496 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10497 }
10498 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10499 simplifies. */
10500 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10501 {
10502 tree arg10 = fold_convert_loc (loc, type,
10503 TREE_OPERAND (arg1, 0));
10504 tree arg11 = fold_convert_loc (loc, type,
10505 TREE_OPERAND (arg1, 1));
10506 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10507 fold_convert_loc (loc, type, arg0),
10508 arg10);
10509 if (tmp)
10510 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10511 }
10512 }
10513 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10514 if (TREE_CODE (arg0) == NEGATE_EXPR
10515 && negate_expr_p (arg1)
10516 && reorder_operands_p (arg0, arg1))
10517 return fold_build2_loc (loc, MINUS_EXPR, type,
10518 fold_convert_loc (loc, type,
10519 negate_expr (arg1)),
10520 fold_convert_loc (loc, type,
10521 TREE_OPERAND (arg0, 0)));
10522
10523 /* X - (X / Y) * Y is X % Y. */
10524 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10525 && TREE_CODE (arg1) == MULT_EXPR
10526 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10527 && operand_equal_p (arg0,
10528 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10529 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10530 TREE_OPERAND (arg1, 1), 0))
10531 return
10532 fold_convert_loc (loc, type,
10533 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10534 arg0, TREE_OPERAND (arg1, 1)));
10535
10536 if (! FLOAT_TYPE_P (type))
10537 {
10538 /* Fold A - (A & B) into ~B & A. */
10539 if (!TREE_SIDE_EFFECTS (arg0)
10540 && TREE_CODE (arg1) == BIT_AND_EXPR)
10541 {
10542 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10543 {
10544 tree arg10 = fold_convert_loc (loc, type,
10545 TREE_OPERAND (arg1, 0));
10546 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10547 fold_build1_loc (loc, BIT_NOT_EXPR,
10548 type, arg10),
10549 fold_convert_loc (loc, type, arg0));
10550 }
10551 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10552 {
10553 tree arg11 = fold_convert_loc (loc,
10554 type, TREE_OPERAND (arg1, 1));
10555 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10556 fold_build1_loc (loc, BIT_NOT_EXPR,
10557 type, arg11),
10558 fold_convert_loc (loc, type, arg0));
10559 }
10560 }
10561
10562 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10563 any power of 2 minus 1. */
10564 if (TREE_CODE (arg0) == BIT_AND_EXPR
10565 && TREE_CODE (arg1) == BIT_AND_EXPR
10566 && operand_equal_p (TREE_OPERAND (arg0, 0),
10567 TREE_OPERAND (arg1, 0), 0))
10568 {
10569 tree mask0 = TREE_OPERAND (arg0, 1);
10570 tree mask1 = TREE_OPERAND (arg1, 1);
10571 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10572
10573 if (operand_equal_p (tem, mask1, 0))
10574 {
10575 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10576 TREE_OPERAND (arg0, 0), mask1);
10577 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10578 }
10579 }
10580 }
10581
10582 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10583 __complex__ ( x, -y ). This is not the same for SNaNs or if
10584 signed zeros are involved. */
10585 if (!HONOR_SNANS (element_mode (arg0))
10586 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10587 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10588 {
10589 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10590 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10591 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10592 bool arg0rz = false, arg0iz = false;
10593 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10594 || (arg0i && (arg0iz = real_zerop (arg0i))))
10595 {
10596 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10597 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10598 if (arg0rz && arg1i && real_zerop (arg1i))
10599 {
10600 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10601 arg1r ? arg1r
10602 : build1 (REALPART_EXPR, rtype, arg1));
10603 tree ip = arg0i ? arg0i
10604 : build1 (IMAGPART_EXPR, rtype, arg0);
10605 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10606 }
10607 else if (arg0iz && arg1r && real_zerop (arg1r))
10608 {
10609 tree rp = arg0r ? arg0r
10610 : build1 (REALPART_EXPR, rtype, arg0);
10611 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10612 arg1i ? arg1i
10613 : build1 (IMAGPART_EXPR, rtype, arg1));
10614 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10615 }
10616 }
10617 }
10618
10619 /* A - B -> A + (-B) if B is easily negatable. */
10620 if (negate_expr_p (arg1)
10621 && !TYPE_OVERFLOW_SANITIZED (type)
10622 && ((FLOAT_TYPE_P (type)
10623 /* Avoid this transformation if B is a positive REAL_CST. */
10624 && (TREE_CODE (arg1) != REAL_CST
10625 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10626 || INTEGRAL_TYPE_P (type)))
10627 return fold_build2_loc (loc, PLUS_EXPR, type,
10628 fold_convert_loc (loc, type, arg0),
10629 fold_convert_loc (loc, type,
10630 negate_expr (arg1)));
10631
10632 /* Try folding difference of addresses. */
10633 {
10634 HOST_WIDE_INT diff;
10635
10636 if ((TREE_CODE (arg0) == ADDR_EXPR
10637 || TREE_CODE (arg1) == ADDR_EXPR)
10638 && ptr_difference_const (arg0, arg1, &diff))
10639 return build_int_cst_type (type, diff);
10640 }
10641
10642 /* Fold &a[i] - &a[j] to i-j. */
10643 if (TREE_CODE (arg0) == ADDR_EXPR
10644 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10645 && TREE_CODE (arg1) == ADDR_EXPR
10646 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10647 {
10648 tree tem = fold_addr_of_array_ref_difference (loc, type,
10649 TREE_OPERAND (arg0, 0),
10650 TREE_OPERAND (arg1, 0));
10651 if (tem)
10652 return tem;
10653 }
10654
10655 if (FLOAT_TYPE_P (type)
10656 && flag_unsafe_math_optimizations
10657 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10658 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10659 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10660 return tem;
10661
10662 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10663 one. Make sure the type is not saturating and has the signedness of
10664 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10665 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10666 if ((TREE_CODE (arg0) == MULT_EXPR
10667 || TREE_CODE (arg1) == MULT_EXPR)
10668 && !TYPE_SATURATING (type)
10669 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10670 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10671 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10672 {
10673 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10674 if (tem)
10675 return tem;
10676 }
10677
10678 goto associate;
10679
10680 case MULT_EXPR:
10681 /* (-A) * (-B) -> A * B */
10682 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10683 return fold_build2_loc (loc, MULT_EXPR, type,
10684 fold_convert_loc (loc, type,
10685 TREE_OPERAND (arg0, 0)),
10686 fold_convert_loc (loc, type,
10687 negate_expr (arg1)));
10688 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10689 return fold_build2_loc (loc, MULT_EXPR, type,
10690 fold_convert_loc (loc, type,
10691 negate_expr (arg0)),
10692 fold_convert_loc (loc, type,
10693 TREE_OPERAND (arg1, 0)));
10694
10695 if (! FLOAT_TYPE_P (type))
10696 {
10697 /* Transform x * -C into -x * C if x is easily negatable. */
10698 if (TREE_CODE (arg1) == INTEGER_CST
10699 && tree_int_cst_sgn (arg1) == -1
10700 && negate_expr_p (arg0)
10701 && (tem = negate_expr (arg1)) != arg1
10702 && !TREE_OVERFLOW (tem))
10703 return fold_build2_loc (loc, MULT_EXPR, type,
10704 fold_convert_loc (loc, type,
10705 negate_expr (arg0)),
10706 tem);
10707
10708 /* (a * (1 << b)) is (a << b) */
10709 if (TREE_CODE (arg1) == LSHIFT_EXPR
10710 && integer_onep (TREE_OPERAND (arg1, 0)))
10711 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10712 TREE_OPERAND (arg1, 1));
10713 if (TREE_CODE (arg0) == LSHIFT_EXPR
10714 && integer_onep (TREE_OPERAND (arg0, 0)))
10715 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10716 TREE_OPERAND (arg0, 1));
10717
10718 /* (A + A) * C -> A * 2 * C */
10719 if (TREE_CODE (arg0) == PLUS_EXPR
10720 && TREE_CODE (arg1) == INTEGER_CST
10721 && operand_equal_p (TREE_OPERAND (arg0, 0),
10722 TREE_OPERAND (arg0, 1), 0))
10723 return fold_build2_loc (loc, MULT_EXPR, type,
10724 omit_one_operand_loc (loc, type,
10725 TREE_OPERAND (arg0, 0),
10726 TREE_OPERAND (arg0, 1)),
10727 fold_build2_loc (loc, MULT_EXPR, type,
10728 build_int_cst (type, 2) , arg1));
10729
10730 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10731 sign-changing only. */
10732 if (TREE_CODE (arg1) == INTEGER_CST
10733 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10734 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10735 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10736
10737 strict_overflow_p = false;
10738 if (TREE_CODE (arg1) == INTEGER_CST
10739 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10740 &strict_overflow_p)))
10741 {
10742 if (strict_overflow_p)
10743 fold_overflow_warning (("assuming signed overflow does not "
10744 "occur when simplifying "
10745 "multiplication"),
10746 WARN_STRICT_OVERFLOW_MISC);
10747 return fold_convert_loc (loc, type, tem);
10748 }
10749
10750 /* Optimize z * conj(z) for integer complex numbers. */
10751 if (TREE_CODE (arg0) == CONJ_EXPR
10752 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10753 return fold_mult_zconjz (loc, type, arg1);
10754 if (TREE_CODE (arg1) == CONJ_EXPR
10755 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10756 return fold_mult_zconjz (loc, type, arg0);
10757 }
10758 else
10759 {
10760 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10761 the result for floating point types due to rounding so it is applied
10762 only if -fassociative-math was specify. */
10763 if (flag_associative_math
10764 && TREE_CODE (arg0) == RDIV_EXPR
10765 && TREE_CODE (arg1) == REAL_CST
10766 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10767 {
10768 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10769 arg1);
10770 if (tem)
10771 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10772 TREE_OPERAND (arg0, 1));
10773 }
10774
10775 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10776 if (operand_equal_p (arg0, arg1, 0))
10777 {
10778 tree tem = fold_strip_sign_ops (arg0);
10779 if (tem != NULL_TREE)
10780 {
10781 tem = fold_convert_loc (loc, type, tem);
10782 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10783 }
10784 }
10785
10786 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10787 This is not the same for NaNs or if signed zeros are
10788 involved. */
10789 if (!HONOR_NANS (arg0)
10790 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10791 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10792 && TREE_CODE (arg1) == COMPLEX_CST
10793 && real_zerop (TREE_REALPART (arg1)))
10794 {
10795 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10796 if (real_onep (TREE_IMAGPART (arg1)))
10797 return
10798 fold_build2_loc (loc, COMPLEX_EXPR, type,
10799 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10800 rtype, arg0)),
10801 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10802 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10803 return
10804 fold_build2_loc (loc, COMPLEX_EXPR, type,
10805 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10806 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10807 rtype, arg0)));
10808 }
10809
10810 /* Optimize z * conj(z) for floating point complex numbers.
10811 Guarded by flag_unsafe_math_optimizations as non-finite
10812 imaginary components don't produce scalar results. */
10813 if (flag_unsafe_math_optimizations
10814 && TREE_CODE (arg0) == CONJ_EXPR
10815 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10816 return fold_mult_zconjz (loc, type, arg1);
10817 if (flag_unsafe_math_optimizations
10818 && TREE_CODE (arg1) == CONJ_EXPR
10819 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10820 return fold_mult_zconjz (loc, type, arg0);
10821
10822 if (flag_unsafe_math_optimizations)
10823 {
10824 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10825 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10826
10827 /* Optimizations of root(...)*root(...). */
10828 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10829 {
10830 tree rootfn, arg;
10831 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10832 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10833
10834 /* Optimize sqrt(x)*sqrt(x) as x. */
10835 if (BUILTIN_SQRT_P (fcode0)
10836 && operand_equal_p (arg00, arg10, 0)
10837 && ! HONOR_SNANS (element_mode (type)))
10838 return arg00;
10839
10840 /* Optimize root(x)*root(y) as root(x*y). */
10841 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10842 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10843 return build_call_expr_loc (loc, rootfn, 1, arg);
10844 }
10845
10846 /* Optimize expN(x)*expN(y) as expN(x+y). */
10847 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10848 {
10849 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10850 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10851 CALL_EXPR_ARG (arg0, 0),
10852 CALL_EXPR_ARG (arg1, 0));
10853 return build_call_expr_loc (loc, expfn, 1, arg);
10854 }
10855
10856 /* Optimizations of pow(...)*pow(...). */
10857 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10858 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10859 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10860 {
10861 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10862 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10863 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10864 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10865
10866 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10867 if (operand_equal_p (arg01, arg11, 0))
10868 {
10869 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10870 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10871 arg00, arg10);
10872 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10873 }
10874
10875 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10876 if (operand_equal_p (arg00, arg10, 0))
10877 {
10878 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10879 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10880 arg01, arg11);
10881 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10882 }
10883 }
10884
10885 /* Optimize tan(x)*cos(x) as sin(x). */
10886 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10887 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10888 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10889 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10890 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10891 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10892 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10893 CALL_EXPR_ARG (arg1, 0), 0))
10894 {
10895 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10896
10897 if (sinfn != NULL_TREE)
10898 return build_call_expr_loc (loc, sinfn, 1,
10899 CALL_EXPR_ARG (arg0, 0));
10900 }
10901
10902 /* Optimize x*pow(x,c) as pow(x,c+1). */
10903 if (fcode1 == BUILT_IN_POW
10904 || fcode1 == BUILT_IN_POWF
10905 || fcode1 == BUILT_IN_POWL)
10906 {
10907 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10908 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10909 if (TREE_CODE (arg11) == REAL_CST
10910 && !TREE_OVERFLOW (arg11)
10911 && operand_equal_p (arg0, arg10, 0))
10912 {
10913 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10914 REAL_VALUE_TYPE c;
10915 tree arg;
10916
10917 c = TREE_REAL_CST (arg11);
10918 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10919 arg = build_real (type, c);
10920 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10921 }
10922 }
10923
10924 /* Optimize pow(x,c)*x as pow(x,c+1). */
10925 if (fcode0 == BUILT_IN_POW
10926 || fcode0 == BUILT_IN_POWF
10927 || fcode0 == BUILT_IN_POWL)
10928 {
10929 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10930 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10931 if (TREE_CODE (arg01) == REAL_CST
10932 && !TREE_OVERFLOW (arg01)
10933 && operand_equal_p (arg1, arg00, 0))
10934 {
10935 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10936 REAL_VALUE_TYPE c;
10937 tree arg;
10938
10939 c = TREE_REAL_CST (arg01);
10940 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10941 arg = build_real (type, c);
10942 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10943 }
10944 }
10945
10946 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10947 if (!in_gimple_form
10948 && optimize
10949 && operand_equal_p (arg0, arg1, 0))
10950 {
10951 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10952
10953 if (powfn)
10954 {
10955 tree arg = build_real (type, dconst2);
10956 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10957 }
10958 }
10959 }
10960 }
10961 goto associate;
10962
10963 case BIT_IOR_EXPR:
10964 bit_ior:
10965 /* ~X | X is -1. */
10966 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10967 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10968 {
10969 t1 = build_zero_cst (type);
10970 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10971 return omit_one_operand_loc (loc, type, t1, arg1);
10972 }
10973
10974 /* X | ~X is -1. */
10975 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10976 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10977 {
10978 t1 = build_zero_cst (type);
10979 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10980 return omit_one_operand_loc (loc, type, t1, arg0);
10981 }
10982
10983 /* Canonicalize (X & C1) | C2. */
10984 if (TREE_CODE (arg0) == BIT_AND_EXPR
10985 && TREE_CODE (arg1) == INTEGER_CST
10986 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10987 {
10988 int width = TYPE_PRECISION (type), w;
10989 wide_int c1 = TREE_OPERAND (arg0, 1);
10990 wide_int c2 = arg1;
10991
10992 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10993 if ((c1 & c2) == c1)
10994 return omit_one_operand_loc (loc, type, arg1,
10995 TREE_OPERAND (arg0, 0));
10996
10997 wide_int msk = wi::mask (width, false,
10998 TYPE_PRECISION (TREE_TYPE (arg1)));
10999
11000 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11001 if (msk.and_not (c1 | c2) == 0)
11002 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11003 TREE_OPERAND (arg0, 0), arg1);
11004
11005 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11006 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11007 mode which allows further optimizations. */
11008 c1 &= msk;
11009 c2 &= msk;
11010 wide_int c3 = c1.and_not (c2);
11011 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11012 {
11013 wide_int mask = wi::mask (w, false,
11014 TYPE_PRECISION (type));
11015 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11016 {
11017 c3 = mask;
11018 break;
11019 }
11020 }
11021
11022 if (c3 != c1)
11023 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11024 fold_build2_loc (loc, BIT_AND_EXPR, type,
11025 TREE_OPERAND (arg0, 0),
11026 wide_int_to_tree (type,
11027 c3)),
11028 arg1);
11029 }
11030
11031 /* (X & ~Y) | (~X & Y) is X ^ Y */
11032 if (TREE_CODE (arg0) == BIT_AND_EXPR
11033 && TREE_CODE (arg1) == BIT_AND_EXPR)
11034 {
11035 tree a0, a1, l0, l1, n0, n1;
11036
11037 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11038 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11039
11040 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11041 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11042
11043 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11044 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11045
11046 if ((operand_equal_p (n0, a0, 0)
11047 && operand_equal_p (n1, a1, 0))
11048 || (operand_equal_p (n0, a1, 0)
11049 && operand_equal_p (n1, a0, 0)))
11050 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11051 }
11052
11053 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11054 if (t1 != NULL_TREE)
11055 return t1;
11056
11057 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11058
11059 This results in more efficient code for machines without a NAND
11060 instruction. Combine will canonicalize to the first form
11061 which will allow use of NAND instructions provided by the
11062 backend if they exist. */
11063 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11064 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11065 {
11066 return
11067 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11068 build2 (BIT_AND_EXPR, type,
11069 fold_convert_loc (loc, type,
11070 TREE_OPERAND (arg0, 0)),
11071 fold_convert_loc (loc, type,
11072 TREE_OPERAND (arg1, 0))));
11073 }
11074
11075 /* See if this can be simplified into a rotate first. If that
11076 is unsuccessful continue in the association code. */
11077 goto bit_rotate;
11078
11079 case BIT_XOR_EXPR:
11080 /* ~X ^ X is -1. */
11081 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11082 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11083 {
11084 t1 = build_zero_cst (type);
11085 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11086 return omit_one_operand_loc (loc, type, t1, arg1);
11087 }
11088
11089 /* X ^ ~X is -1. */
11090 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11091 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11092 {
11093 t1 = build_zero_cst (type);
11094 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11095 return omit_one_operand_loc (loc, type, t1, arg0);
11096 }
11097
11098 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11099 with a constant, and the two constants have no bits in common,
11100 we should treat this as a BIT_IOR_EXPR since this may produce more
11101 simplifications. */
11102 if (TREE_CODE (arg0) == BIT_AND_EXPR
11103 && TREE_CODE (arg1) == BIT_AND_EXPR
11104 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11105 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11106 && wi::bit_and (TREE_OPERAND (arg0, 1),
11107 TREE_OPERAND (arg1, 1)) == 0)
11108 {
11109 code = BIT_IOR_EXPR;
11110 goto bit_ior;
11111 }
11112
11113 /* (X | Y) ^ X -> Y & ~ X*/
11114 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11115 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11116 {
11117 tree t2 = TREE_OPERAND (arg0, 1);
11118 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11119 arg1);
11120 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11121 fold_convert_loc (loc, type, t2),
11122 fold_convert_loc (loc, type, t1));
11123 return t1;
11124 }
11125
11126 /* (Y | X) ^ X -> Y & ~ X*/
11127 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11128 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11129 {
11130 tree t2 = TREE_OPERAND (arg0, 0);
11131 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11132 arg1);
11133 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11134 fold_convert_loc (loc, type, t2),
11135 fold_convert_loc (loc, type, t1));
11136 return t1;
11137 }
11138
11139 /* X ^ (X | Y) -> Y & ~ X*/
11140 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11141 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11142 {
11143 tree t2 = TREE_OPERAND (arg1, 1);
11144 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11145 arg0);
11146 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11147 fold_convert_loc (loc, type, t2),
11148 fold_convert_loc (loc, type, t1));
11149 return t1;
11150 }
11151
11152 /* X ^ (Y | X) -> Y & ~ X*/
11153 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11154 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11155 {
11156 tree t2 = TREE_OPERAND (arg1, 0);
11157 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11158 arg0);
11159 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11160 fold_convert_loc (loc, type, t2),
11161 fold_convert_loc (loc, type, t1));
11162 return t1;
11163 }
11164
11165 /* Convert ~X ^ ~Y to X ^ Y. */
11166 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11167 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11168 return fold_build2_loc (loc, code, type,
11169 fold_convert_loc (loc, type,
11170 TREE_OPERAND (arg0, 0)),
11171 fold_convert_loc (loc, type,
11172 TREE_OPERAND (arg1, 0)));
11173
11174 /* Convert ~X ^ C to X ^ ~C. */
11175 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11176 && TREE_CODE (arg1) == INTEGER_CST)
11177 return fold_build2_loc (loc, code, type,
11178 fold_convert_loc (loc, type,
11179 TREE_OPERAND (arg0, 0)),
11180 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11181
11182 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11183 if (TREE_CODE (arg0) == BIT_AND_EXPR
11184 && INTEGRAL_TYPE_P (type)
11185 && integer_onep (TREE_OPERAND (arg0, 1))
11186 && integer_onep (arg1))
11187 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11188 build_zero_cst (TREE_TYPE (arg0)));
11189
11190 /* Fold (X & Y) ^ Y as ~X & Y. */
11191 if (TREE_CODE (arg0) == BIT_AND_EXPR
11192 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11193 {
11194 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11195 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11196 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11197 fold_convert_loc (loc, type, arg1));
11198 }
11199 /* Fold (X & Y) ^ X as ~Y & X. */
11200 if (TREE_CODE (arg0) == BIT_AND_EXPR
11201 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11202 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11203 {
11204 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11205 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11206 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11207 fold_convert_loc (loc, type, arg1));
11208 }
11209 /* Fold X ^ (X & Y) as X & ~Y. */
11210 if (TREE_CODE (arg1) == BIT_AND_EXPR
11211 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11212 {
11213 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11214 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11215 fold_convert_loc (loc, type, arg0),
11216 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11217 }
11218 /* Fold X ^ (Y & X) as ~Y & X. */
11219 if (TREE_CODE (arg1) == BIT_AND_EXPR
11220 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11221 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11222 {
11223 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11224 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11225 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11226 fold_convert_loc (loc, type, arg0));
11227 }
11228
11229 /* See if this can be simplified into a rotate first. If that
11230 is unsuccessful continue in the association code. */
11231 goto bit_rotate;
11232
11233 case BIT_AND_EXPR:
11234 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11235 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11236 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11237 || (TREE_CODE (arg0) == EQ_EXPR
11238 && integer_zerop (TREE_OPERAND (arg0, 1))))
11239 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11240 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11241
11242 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11243 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11244 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11245 || (TREE_CODE (arg1) == EQ_EXPR
11246 && integer_zerop (TREE_OPERAND (arg1, 1))))
11247 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11248 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11249
11250 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11251 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11252 && INTEGRAL_TYPE_P (type)
11253 && integer_onep (TREE_OPERAND (arg0, 1))
11254 && integer_onep (arg1))
11255 {
11256 tree tem2;
11257 tem = TREE_OPERAND (arg0, 0);
11258 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11259 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11260 tem, tem2);
11261 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11262 build_zero_cst (TREE_TYPE (tem)));
11263 }
11264 /* Fold ~X & 1 as (X & 1) == 0. */
11265 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11266 && INTEGRAL_TYPE_P (type)
11267 && integer_onep (arg1))
11268 {
11269 tree tem2;
11270 tem = TREE_OPERAND (arg0, 0);
11271 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11272 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11273 tem, tem2);
11274 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11275 build_zero_cst (TREE_TYPE (tem)));
11276 }
11277 /* Fold !X & 1 as X == 0. */
11278 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11279 && integer_onep (arg1))
11280 {
11281 tem = TREE_OPERAND (arg0, 0);
11282 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11283 build_zero_cst (TREE_TYPE (tem)));
11284 }
11285
11286 /* Fold (X ^ Y) & Y as ~X & Y. */
11287 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11288 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11289 {
11290 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11291 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11292 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11293 fold_convert_loc (loc, type, arg1));
11294 }
11295 /* Fold (X ^ Y) & X as ~Y & X. */
11296 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11297 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11298 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11299 {
11300 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11301 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11302 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11303 fold_convert_loc (loc, type, arg1));
11304 }
11305 /* Fold X & (X ^ Y) as X & ~Y. */
11306 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11307 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11308 {
11309 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11310 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11311 fold_convert_loc (loc, type, arg0),
11312 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11313 }
11314 /* Fold X & (Y ^ X) as ~Y & X. */
11315 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11316 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11317 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11318 {
11319 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11320 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11321 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11322 fold_convert_loc (loc, type, arg0));
11323 }
11324
11325 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11326 multiple of 1 << CST. */
11327 if (TREE_CODE (arg1) == INTEGER_CST)
11328 {
11329 wide_int cst1 = arg1;
11330 wide_int ncst1 = -cst1;
11331 if ((cst1 & ncst1) == ncst1
11332 && multiple_of_p (type, arg0,
11333 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11334 return fold_convert_loc (loc, type, arg0);
11335 }
11336
11337 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11338 bits from CST2. */
11339 if (TREE_CODE (arg1) == INTEGER_CST
11340 && TREE_CODE (arg0) == MULT_EXPR
11341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11342 {
11343 wide_int warg1 = arg1;
11344 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11345
11346 if (masked == 0)
11347 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11348 arg0, arg1);
11349 else if (masked != warg1)
11350 {
11351 /* Avoid the transform if arg1 is a mask of some
11352 mode which allows further optimizations. */
11353 int pop = wi::popcount (warg1);
11354 if (!(pop >= BITS_PER_UNIT
11355 && exact_log2 (pop) != -1
11356 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11357 return fold_build2_loc (loc, code, type, op0,
11358 wide_int_to_tree (type, masked));
11359 }
11360 }
11361
11362 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11363 ((A & N) + B) & M -> (A + B) & M
11364 Similarly if (N & M) == 0,
11365 ((A | N) + B) & M -> (A + B) & M
11366 and for - instead of + (or unary - instead of +)
11367 and/or ^ instead of |.
11368 If B is constant and (B & M) == 0, fold into A & M. */
11369 if (TREE_CODE (arg1) == INTEGER_CST)
11370 {
11371 wide_int cst1 = arg1;
11372 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11373 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11374 && (TREE_CODE (arg0) == PLUS_EXPR
11375 || TREE_CODE (arg0) == MINUS_EXPR
11376 || TREE_CODE (arg0) == NEGATE_EXPR)
11377 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11378 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11379 {
11380 tree pmop[2];
11381 int which = 0;
11382 wide_int cst0;
11383
11384 /* Now we know that arg0 is (C + D) or (C - D) or
11385 -C and arg1 (M) is == (1LL << cst) - 1.
11386 Store C into PMOP[0] and D into PMOP[1]. */
11387 pmop[0] = TREE_OPERAND (arg0, 0);
11388 pmop[1] = NULL;
11389 if (TREE_CODE (arg0) != NEGATE_EXPR)
11390 {
11391 pmop[1] = TREE_OPERAND (arg0, 1);
11392 which = 1;
11393 }
11394
11395 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11396 which = -1;
11397
11398 for (; which >= 0; which--)
11399 switch (TREE_CODE (pmop[which]))
11400 {
11401 case BIT_AND_EXPR:
11402 case BIT_IOR_EXPR:
11403 case BIT_XOR_EXPR:
11404 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11405 != INTEGER_CST)
11406 break;
11407 cst0 = TREE_OPERAND (pmop[which], 1);
11408 cst0 &= cst1;
11409 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11410 {
11411 if (cst0 != cst1)
11412 break;
11413 }
11414 else if (cst0 != 0)
11415 break;
11416 /* If C or D is of the form (A & N) where
11417 (N & M) == M, or of the form (A | N) or
11418 (A ^ N) where (N & M) == 0, replace it with A. */
11419 pmop[which] = TREE_OPERAND (pmop[which], 0);
11420 break;
11421 case INTEGER_CST:
11422 /* If C or D is a N where (N & M) == 0, it can be
11423 omitted (assumed 0). */
11424 if ((TREE_CODE (arg0) == PLUS_EXPR
11425 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11426 && (cst1 & pmop[which]) == 0)
11427 pmop[which] = NULL;
11428 break;
11429 default:
11430 break;
11431 }
11432
11433 /* Only build anything new if we optimized one or both arguments
11434 above. */
11435 if (pmop[0] != TREE_OPERAND (arg0, 0)
11436 || (TREE_CODE (arg0) != NEGATE_EXPR
11437 && pmop[1] != TREE_OPERAND (arg0, 1)))
11438 {
11439 tree utype = TREE_TYPE (arg0);
11440 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11441 {
11442 /* Perform the operations in a type that has defined
11443 overflow behavior. */
11444 utype = unsigned_type_for (TREE_TYPE (arg0));
11445 if (pmop[0] != NULL)
11446 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11447 if (pmop[1] != NULL)
11448 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11449 }
11450
11451 if (TREE_CODE (arg0) == NEGATE_EXPR)
11452 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11453 else if (TREE_CODE (arg0) == PLUS_EXPR)
11454 {
11455 if (pmop[0] != NULL && pmop[1] != NULL)
11456 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11457 pmop[0], pmop[1]);
11458 else if (pmop[0] != NULL)
11459 tem = pmop[0];
11460 else if (pmop[1] != NULL)
11461 tem = pmop[1];
11462 else
11463 return build_int_cst (type, 0);
11464 }
11465 else if (pmop[0] == NULL)
11466 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11467 else
11468 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11469 pmop[0], pmop[1]);
11470 /* TEM is now the new binary +, - or unary - replacement. */
11471 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11472 fold_convert_loc (loc, utype, arg1));
11473 return fold_convert_loc (loc, type, tem);
11474 }
11475 }
11476 }
11477
11478 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11479 if (t1 != NULL_TREE)
11480 return t1;
11481 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11482 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11483 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11484 {
11485 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11486
11487 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11488 if (mask == -1)
11489 return
11490 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11491 }
11492
11493 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11494
11495 This results in more efficient code for machines without a NOR
11496 instruction. Combine will canonicalize to the first form
11497 which will allow use of NOR instructions provided by the
11498 backend if they exist. */
11499 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11500 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11501 {
11502 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11503 build2 (BIT_IOR_EXPR, type,
11504 fold_convert_loc (loc, type,
11505 TREE_OPERAND (arg0, 0)),
11506 fold_convert_loc (loc, type,
11507 TREE_OPERAND (arg1, 0))));
11508 }
11509
11510 /* If arg0 is derived from the address of an object or function, we may
11511 be able to fold this expression using the object or function's
11512 alignment. */
11513 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11514 {
11515 unsigned HOST_WIDE_INT modulus, residue;
11516 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11517
11518 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11519 integer_onep (arg1));
11520
11521 /* This works because modulus is a power of 2. If this weren't the
11522 case, we'd have to replace it by its greatest power-of-2
11523 divisor: modulus & -modulus. */
11524 if (low < modulus)
11525 return build_int_cst (type, residue & low);
11526 }
11527
11528 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11529 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11530 if the new mask might be further optimized. */
11531 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11532 || TREE_CODE (arg0) == RSHIFT_EXPR)
11533 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11534 && TREE_CODE (arg1) == INTEGER_CST
11535 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11536 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11537 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11538 < TYPE_PRECISION (TREE_TYPE (arg0))))
11539 {
11540 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11541 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11542 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11543 tree shift_type = TREE_TYPE (arg0);
11544
11545 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11546 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11547 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11548 && TYPE_PRECISION (TREE_TYPE (arg0))
11549 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11550 {
11551 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11552 tree arg00 = TREE_OPERAND (arg0, 0);
11553 /* See if more bits can be proven as zero because of
11554 zero extension. */
11555 if (TREE_CODE (arg00) == NOP_EXPR
11556 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11557 {
11558 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11559 if (TYPE_PRECISION (inner_type)
11560 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11561 && TYPE_PRECISION (inner_type) < prec)
11562 {
11563 prec = TYPE_PRECISION (inner_type);
11564 /* See if we can shorten the right shift. */
11565 if (shiftc < prec)
11566 shift_type = inner_type;
11567 /* Otherwise X >> C1 is all zeros, so we'll optimize
11568 it into (X, 0) later on by making sure zerobits
11569 is all ones. */
11570 }
11571 }
11572 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11573 if (shiftc < prec)
11574 {
11575 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11576 zerobits <<= prec - shiftc;
11577 }
11578 /* For arithmetic shift if sign bit could be set, zerobits
11579 can contain actually sign bits, so no transformation is
11580 possible, unless MASK masks them all away. In that
11581 case the shift needs to be converted into logical shift. */
11582 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11583 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11584 {
11585 if ((mask & zerobits) == 0)
11586 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11587 else
11588 zerobits = 0;
11589 }
11590 }
11591
11592 /* ((X << 16) & 0xff00) is (X, 0). */
11593 if ((mask & zerobits) == mask)
11594 return omit_one_operand_loc (loc, type,
11595 build_int_cst (type, 0), arg0);
11596
11597 newmask = mask | zerobits;
11598 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11599 {
11600 /* Only do the transformation if NEWMASK is some integer
11601 mode's mask. */
11602 for (prec = BITS_PER_UNIT;
11603 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11604 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11605 break;
11606 if (prec < HOST_BITS_PER_WIDE_INT
11607 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11608 {
11609 tree newmaskt;
11610
11611 if (shift_type != TREE_TYPE (arg0))
11612 {
11613 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11614 fold_convert_loc (loc, shift_type,
11615 TREE_OPERAND (arg0, 0)),
11616 TREE_OPERAND (arg0, 1));
11617 tem = fold_convert_loc (loc, type, tem);
11618 }
11619 else
11620 tem = op0;
11621 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11622 if (!tree_int_cst_equal (newmaskt, arg1))
11623 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11624 }
11625 }
11626 }
11627
11628 goto associate;
11629
11630 case RDIV_EXPR:
11631 /* Don't touch a floating-point divide by zero unless the mode
11632 of the constant can represent infinity. */
11633 if (TREE_CODE (arg1) == REAL_CST
11634 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11635 && real_zerop (arg1))
11636 return NULL_TREE;
11637
11638 /* (-A) / (-B) -> A / B */
11639 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11640 return fold_build2_loc (loc, RDIV_EXPR, type,
11641 TREE_OPERAND (arg0, 0),
11642 negate_expr (arg1));
11643 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11644 return fold_build2_loc (loc, RDIV_EXPR, type,
11645 negate_expr (arg0),
11646 TREE_OPERAND (arg1, 0));
11647
11648 /* Convert A/B/C to A/(B*C). */
11649 if (flag_reciprocal_math
11650 && TREE_CODE (arg0) == RDIV_EXPR)
11651 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11652 fold_build2_loc (loc, MULT_EXPR, type,
11653 TREE_OPERAND (arg0, 1), arg1));
11654
11655 /* Convert A/(B/C) to (A/B)*C. */
11656 if (flag_reciprocal_math
11657 && TREE_CODE (arg1) == RDIV_EXPR)
11658 return fold_build2_loc (loc, MULT_EXPR, type,
11659 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11660 TREE_OPERAND (arg1, 0)),
11661 TREE_OPERAND (arg1, 1));
11662
11663 /* Convert C1/(X*C2) into (C1/C2)/X. */
11664 if (flag_reciprocal_math
11665 && TREE_CODE (arg1) == MULT_EXPR
11666 && TREE_CODE (arg0) == REAL_CST
11667 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11668 {
11669 tree tem = const_binop (RDIV_EXPR, arg0,
11670 TREE_OPERAND (arg1, 1));
11671 if (tem)
11672 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11673 TREE_OPERAND (arg1, 0));
11674 }
11675
11676 if (flag_unsafe_math_optimizations)
11677 {
11678 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11679 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11680
11681 /* Optimize sin(x)/cos(x) as tan(x). */
11682 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11683 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11684 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11685 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11686 CALL_EXPR_ARG (arg1, 0), 0))
11687 {
11688 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11689
11690 if (tanfn != NULL_TREE)
11691 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11692 }
11693
11694 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11695 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11696 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11697 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11698 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11699 CALL_EXPR_ARG (arg1, 0), 0))
11700 {
11701 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11702
11703 if (tanfn != NULL_TREE)
11704 {
11705 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11706 CALL_EXPR_ARG (arg0, 0));
11707 return fold_build2_loc (loc, RDIV_EXPR, type,
11708 build_real (type, dconst1), tmp);
11709 }
11710 }
11711
11712 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11713 NaNs or Infinities. */
11714 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11715 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11716 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11717 {
11718 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11719 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11720
11721 if (! HONOR_NANS (arg00)
11722 && ! HONOR_INFINITIES (element_mode (arg00))
11723 && operand_equal_p (arg00, arg01, 0))
11724 {
11725 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11726
11727 if (cosfn != NULL_TREE)
11728 return build_call_expr_loc (loc, cosfn, 1, arg00);
11729 }
11730 }
11731
11732 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11733 NaNs or Infinities. */
11734 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11735 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11736 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11737 {
11738 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11739 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11740
11741 if (! HONOR_NANS (arg00)
11742 && ! HONOR_INFINITIES (element_mode (arg00))
11743 && operand_equal_p (arg00, arg01, 0))
11744 {
11745 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11746
11747 if (cosfn != NULL_TREE)
11748 {
11749 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11750 return fold_build2_loc (loc, RDIV_EXPR, type,
11751 build_real (type, dconst1),
11752 tmp);
11753 }
11754 }
11755 }
11756
11757 /* Optimize pow(x,c)/x as pow(x,c-1). */
11758 if (fcode0 == BUILT_IN_POW
11759 || fcode0 == BUILT_IN_POWF
11760 || fcode0 == BUILT_IN_POWL)
11761 {
11762 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11763 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11764 if (TREE_CODE (arg01) == REAL_CST
11765 && !TREE_OVERFLOW (arg01)
11766 && operand_equal_p (arg1, arg00, 0))
11767 {
11768 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11769 REAL_VALUE_TYPE c;
11770 tree arg;
11771
11772 c = TREE_REAL_CST (arg01);
11773 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11774 arg = build_real (type, c);
11775 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11776 }
11777 }
11778
11779 /* Optimize a/root(b/c) into a*root(c/b). */
11780 if (BUILTIN_ROOT_P (fcode1))
11781 {
11782 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11783
11784 if (TREE_CODE (rootarg) == RDIV_EXPR)
11785 {
11786 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11787 tree b = TREE_OPERAND (rootarg, 0);
11788 tree c = TREE_OPERAND (rootarg, 1);
11789
11790 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11791
11792 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11793 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11794 }
11795 }
11796
11797 /* Optimize x/expN(y) into x*expN(-y). */
11798 if (BUILTIN_EXPONENT_P (fcode1))
11799 {
11800 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11801 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11802 arg1 = build_call_expr_loc (loc,
11803 expfn, 1,
11804 fold_convert_loc (loc, type, arg));
11805 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11806 }
11807
11808 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11809 if (fcode1 == BUILT_IN_POW
11810 || fcode1 == BUILT_IN_POWF
11811 || fcode1 == BUILT_IN_POWL)
11812 {
11813 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11814 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11815 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11816 tree neg11 = fold_convert_loc (loc, type,
11817 negate_expr (arg11));
11818 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11819 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11820 }
11821 }
11822 return NULL_TREE;
11823
11824 case TRUNC_DIV_EXPR:
11825 /* Optimize (X & (-A)) / A where A is a power of 2,
11826 to X >> log2(A) */
11827 if (TREE_CODE (arg0) == BIT_AND_EXPR
11828 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11829 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11830 {
11831 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11832 arg1, TREE_OPERAND (arg0, 1));
11833 if (sum && integer_zerop (sum)) {
11834 tree pow2 = build_int_cst (integer_type_node,
11835 wi::exact_log2 (arg1));
11836 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11837 TREE_OPERAND (arg0, 0), pow2);
11838 }
11839 }
11840
11841 /* Fall through */
11842
11843 case FLOOR_DIV_EXPR:
11844 /* Simplify A / (B << N) where A and B are positive and B is
11845 a power of 2, to A >> (N + log2(B)). */
11846 strict_overflow_p = false;
11847 if (TREE_CODE (arg1) == LSHIFT_EXPR
11848 && (TYPE_UNSIGNED (type)
11849 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11850 {
11851 tree sval = TREE_OPERAND (arg1, 0);
11852 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11853 {
11854 tree sh_cnt = TREE_OPERAND (arg1, 1);
11855 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11856 wi::exact_log2 (sval));
11857
11858 if (strict_overflow_p)
11859 fold_overflow_warning (("assuming signed overflow does not "
11860 "occur when simplifying A / (B << N)"),
11861 WARN_STRICT_OVERFLOW_MISC);
11862
11863 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11864 sh_cnt, pow2);
11865 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11866 fold_convert_loc (loc, type, arg0), sh_cnt);
11867 }
11868 }
11869
11870 /* Fall through */
11871
11872 case ROUND_DIV_EXPR:
11873 case CEIL_DIV_EXPR:
11874 case EXACT_DIV_EXPR:
11875 if (integer_zerop (arg1))
11876 return NULL_TREE;
11877
11878 /* Convert -A / -B to A / B when the type is signed and overflow is
11879 undefined. */
11880 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11881 && TREE_CODE (arg0) == NEGATE_EXPR
11882 && negate_expr_p (arg1))
11883 {
11884 if (INTEGRAL_TYPE_P (type))
11885 fold_overflow_warning (("assuming signed overflow does not occur "
11886 "when distributing negation across "
11887 "division"),
11888 WARN_STRICT_OVERFLOW_MISC);
11889 return fold_build2_loc (loc, code, type,
11890 fold_convert_loc (loc, type,
11891 TREE_OPERAND (arg0, 0)),
11892 fold_convert_loc (loc, type,
11893 negate_expr (arg1)));
11894 }
11895 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11896 && TREE_CODE (arg1) == NEGATE_EXPR
11897 && negate_expr_p (arg0))
11898 {
11899 if (INTEGRAL_TYPE_P (type))
11900 fold_overflow_warning (("assuming signed overflow does not occur "
11901 "when distributing negation across "
11902 "division"),
11903 WARN_STRICT_OVERFLOW_MISC);
11904 return fold_build2_loc (loc, code, type,
11905 fold_convert_loc (loc, type,
11906 negate_expr (arg0)),
11907 fold_convert_loc (loc, type,
11908 TREE_OPERAND (arg1, 0)));
11909 }
11910
11911 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11912 operation, EXACT_DIV_EXPR.
11913
11914 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11915 At one time others generated faster code, it's not clear if they do
11916 after the last round to changes to the DIV code in expmed.c. */
11917 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11918 && multiple_of_p (type, arg0, arg1))
11919 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11920
11921 strict_overflow_p = false;
11922 if (TREE_CODE (arg1) == INTEGER_CST
11923 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11924 &strict_overflow_p)))
11925 {
11926 if (strict_overflow_p)
11927 fold_overflow_warning (("assuming signed overflow does not occur "
11928 "when simplifying division"),
11929 WARN_STRICT_OVERFLOW_MISC);
11930 return fold_convert_loc (loc, type, tem);
11931 }
11932
11933 return NULL_TREE;
11934
11935 case CEIL_MOD_EXPR:
11936 case FLOOR_MOD_EXPR:
11937 case ROUND_MOD_EXPR:
11938 case TRUNC_MOD_EXPR:
11939 strict_overflow_p = false;
11940 if (TREE_CODE (arg1) == INTEGER_CST
11941 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11942 &strict_overflow_p)))
11943 {
11944 if (strict_overflow_p)
11945 fold_overflow_warning (("assuming signed overflow does not occur "
11946 "when simplifying modulus"),
11947 WARN_STRICT_OVERFLOW_MISC);
11948 return fold_convert_loc (loc, type, tem);
11949 }
11950
11951 return NULL_TREE;
11952
11953 case LROTATE_EXPR:
11954 case RROTATE_EXPR:
11955 case RSHIFT_EXPR:
11956 case LSHIFT_EXPR:
11957 /* Since negative shift count is not well-defined,
11958 don't try to compute it in the compiler. */
11959 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11960 return NULL_TREE;
11961
11962 prec = element_precision (type);
11963
11964 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11965 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11966 && tree_to_uhwi (arg1) < prec
11967 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11968 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11969 {
11970 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11971 + tree_to_uhwi (arg1));
11972
11973 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11974 being well defined. */
11975 if (low >= prec)
11976 {
11977 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11978 low = low % prec;
11979 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11980 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11981 TREE_OPERAND (arg0, 0));
11982 else
11983 low = prec - 1;
11984 }
11985
11986 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11987 build_int_cst (TREE_TYPE (arg1), low));
11988 }
11989
11990 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11991 into x & ((unsigned)-1 >> c) for unsigned types. */
11992 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11993 || (TYPE_UNSIGNED (type)
11994 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11995 && tree_fits_uhwi_p (arg1)
11996 && tree_to_uhwi (arg1) < prec
11997 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11998 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11999 {
12000 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12001 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12002 tree lshift;
12003 tree arg00;
12004
12005 if (low0 == low1)
12006 {
12007 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12008
12009 lshift = build_minus_one_cst (type);
12010 lshift = const_binop (code, lshift, arg1);
12011
12012 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12013 }
12014 }
12015
12016 /* If we have a rotate of a bit operation with the rotate count and
12017 the second operand of the bit operation both constant,
12018 permute the two operations. */
12019 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12020 && (TREE_CODE (arg0) == BIT_AND_EXPR
12021 || TREE_CODE (arg0) == BIT_IOR_EXPR
12022 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12023 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12024 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12025 fold_build2_loc (loc, code, type,
12026 TREE_OPERAND (arg0, 0), arg1),
12027 fold_build2_loc (loc, code, type,
12028 TREE_OPERAND (arg0, 1), arg1));
12029
12030 /* Two consecutive rotates adding up to the some integer
12031 multiple of the precision of the type can be ignored. */
12032 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12033 && TREE_CODE (arg0) == RROTATE_EXPR
12034 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12035 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12036 prec) == 0)
12037 return TREE_OPERAND (arg0, 0);
12038
12039 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12040 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12041 if the latter can be further optimized. */
12042 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12043 && TREE_CODE (arg0) == BIT_AND_EXPR
12044 && TREE_CODE (arg1) == INTEGER_CST
12045 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12046 {
12047 tree mask = fold_build2_loc (loc, code, type,
12048 fold_convert_loc (loc, type,
12049 TREE_OPERAND (arg0, 1)),
12050 arg1);
12051 tree shift = fold_build2_loc (loc, code, type,
12052 fold_convert_loc (loc, type,
12053 TREE_OPERAND (arg0, 0)),
12054 arg1);
12055 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12056 if (tem)
12057 return tem;
12058 }
12059
12060 return NULL_TREE;
12061
12062 case MIN_EXPR:
12063 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12064 if (tem)
12065 return tem;
12066 goto associate;
12067
12068 case MAX_EXPR:
12069 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12070 if (tem)
12071 return tem;
12072 goto associate;
12073
12074 case TRUTH_ANDIF_EXPR:
12075 /* Note that the operands of this must be ints
12076 and their values must be 0 or 1.
12077 ("true" is a fixed value perhaps depending on the language.) */
12078 /* If first arg is constant zero, return it. */
12079 if (integer_zerop (arg0))
12080 return fold_convert_loc (loc, type, arg0);
12081 case TRUTH_AND_EXPR:
12082 /* If either arg is constant true, drop it. */
12083 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12084 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12085 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12086 /* Preserve sequence points. */
12087 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12088 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12089 /* If second arg is constant zero, result is zero, but first arg
12090 must be evaluated. */
12091 if (integer_zerop (arg1))
12092 return omit_one_operand_loc (loc, type, arg1, arg0);
12093 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12094 case will be handled here. */
12095 if (integer_zerop (arg0))
12096 return omit_one_operand_loc (loc, type, arg0, arg1);
12097
12098 /* !X && X is always false. */
12099 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12100 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12101 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12102 /* X && !X is always false. */
12103 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12104 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12105 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12106
12107 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12108 means A >= Y && A != MAX, but in this case we know that
12109 A < X <= MAX. */
12110
12111 if (!TREE_SIDE_EFFECTS (arg0)
12112 && !TREE_SIDE_EFFECTS (arg1))
12113 {
12114 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12115 if (tem && !operand_equal_p (tem, arg0, 0))
12116 return fold_build2_loc (loc, code, type, tem, arg1);
12117
12118 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12119 if (tem && !operand_equal_p (tem, arg1, 0))
12120 return fold_build2_loc (loc, code, type, arg0, tem);
12121 }
12122
12123 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12124 != NULL_TREE)
12125 return tem;
12126
12127 return NULL_TREE;
12128
12129 case TRUTH_ORIF_EXPR:
12130 /* Note that the operands of this must be ints
12131 and their values must be 0 or true.
12132 ("true" is a fixed value perhaps depending on the language.) */
12133 /* If first arg is constant true, return it. */
12134 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12135 return fold_convert_loc (loc, type, arg0);
12136 case TRUTH_OR_EXPR:
12137 /* If either arg is constant zero, drop it. */
12138 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12139 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12140 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12141 /* Preserve sequence points. */
12142 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12143 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12144 /* If second arg is constant true, result is true, but we must
12145 evaluate first arg. */
12146 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12147 return omit_one_operand_loc (loc, type, arg1, arg0);
12148 /* Likewise for first arg, but note this only occurs here for
12149 TRUTH_OR_EXPR. */
12150 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12151 return omit_one_operand_loc (loc, type, arg0, arg1);
12152
12153 /* !X || X is always true. */
12154 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12155 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12156 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12157 /* X || !X is always true. */
12158 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12159 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12160 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12161
12162 /* (X && !Y) || (!X && Y) is X ^ Y */
12163 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12164 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12165 {
12166 tree a0, a1, l0, l1, n0, n1;
12167
12168 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12169 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12170
12171 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12172 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12173
12174 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12175 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12176
12177 if ((operand_equal_p (n0, a0, 0)
12178 && operand_equal_p (n1, a1, 0))
12179 || (operand_equal_p (n0, a1, 0)
12180 && operand_equal_p (n1, a0, 0)))
12181 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12182 }
12183
12184 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12185 != NULL_TREE)
12186 return tem;
12187
12188 return NULL_TREE;
12189
12190 case TRUTH_XOR_EXPR:
12191 /* If the second arg is constant zero, drop it. */
12192 if (integer_zerop (arg1))
12193 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12194 /* If the second arg is constant true, this is a logical inversion. */
12195 if (integer_onep (arg1))
12196 {
12197 tem = invert_truthvalue_loc (loc, arg0);
12198 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12199 }
12200 /* Identical arguments cancel to zero. */
12201 if (operand_equal_p (arg0, arg1, 0))
12202 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12203
12204 /* !X ^ X is always true. */
12205 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12206 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12207 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12208
12209 /* X ^ !X is always true. */
12210 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12211 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12212 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12213
12214 return NULL_TREE;
12215
12216 case EQ_EXPR:
12217 case NE_EXPR:
12218 STRIP_NOPS (arg0);
12219 STRIP_NOPS (arg1);
12220
12221 tem = fold_comparison (loc, code, type, op0, op1);
12222 if (tem != NULL_TREE)
12223 return tem;
12224
12225 /* bool_var != 0 becomes bool_var. */
12226 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12227 && code == NE_EXPR)
12228 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12229
12230 /* bool_var == 1 becomes bool_var. */
12231 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12232 && code == EQ_EXPR)
12233 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12234
12235 /* bool_var != 1 becomes !bool_var. */
12236 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12237 && code == NE_EXPR)
12238 return fold_convert_loc (loc, type,
12239 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12240 TREE_TYPE (arg0), arg0));
12241
12242 /* bool_var == 0 becomes !bool_var. */
12243 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12244 && code == EQ_EXPR)
12245 return fold_convert_loc (loc, type,
12246 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12247 TREE_TYPE (arg0), arg0));
12248
12249 /* !exp != 0 becomes !exp */
12250 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12251 && code == NE_EXPR)
12252 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12253
12254 /* If this is an equality comparison of the address of two non-weak,
12255 unaliased symbols neither of which are extern (since we do not
12256 have access to attributes for externs), then we know the result. */
12257 if (TREE_CODE (arg0) == ADDR_EXPR
12258 && DECL_P (TREE_OPERAND (arg0, 0))
12259 && TREE_CODE (arg1) == ADDR_EXPR
12260 && DECL_P (TREE_OPERAND (arg1, 0)))
12261 {
12262 int equal;
12263
12264 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12265 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12266 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12267 ->equal_address_to (symtab_node::get_create
12268 (TREE_OPERAND (arg1, 0)));
12269 else
12270 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12271 if (equal != 2)
12272 return constant_boolean_node (equal
12273 ? code == EQ_EXPR : code != EQ_EXPR,
12274 type);
12275 }
12276
12277 /* Similarly for a NEGATE_EXPR. */
12278 if (TREE_CODE (arg0) == NEGATE_EXPR
12279 && TREE_CODE (arg1) == INTEGER_CST
12280 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12281 arg1)))
12282 && TREE_CODE (tem) == INTEGER_CST
12283 && !TREE_OVERFLOW (tem))
12284 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12285
12286 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12287 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12288 && TREE_CODE (arg1) == INTEGER_CST
12289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12290 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12291 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12292 fold_convert_loc (loc,
12293 TREE_TYPE (arg0),
12294 arg1),
12295 TREE_OPERAND (arg0, 1)));
12296
12297 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12298 if ((TREE_CODE (arg0) == PLUS_EXPR
12299 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12300 || TREE_CODE (arg0) == MINUS_EXPR)
12301 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12302 0)),
12303 arg1, 0)
12304 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12305 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12306 {
12307 tree val = TREE_OPERAND (arg0, 1);
12308 return omit_two_operands_loc (loc, type,
12309 fold_build2_loc (loc, code, type,
12310 val,
12311 build_int_cst (TREE_TYPE (val),
12312 0)),
12313 TREE_OPERAND (arg0, 0), arg1);
12314 }
12315
12316 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12317 if (TREE_CODE (arg0) == MINUS_EXPR
12318 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12319 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12320 1)),
12321 arg1, 0)
12322 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12323 {
12324 return omit_two_operands_loc (loc, type,
12325 code == NE_EXPR
12326 ? boolean_true_node : boolean_false_node,
12327 TREE_OPERAND (arg0, 1), arg1);
12328 }
12329
12330 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12331 if (TREE_CODE (arg0) == ABS_EXPR
12332 && (integer_zerop (arg1) || real_zerop (arg1)))
12333 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12334
12335 /* If this is an EQ or NE comparison with zero and ARG0 is
12336 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12337 two operations, but the latter can be done in one less insn
12338 on machines that have only two-operand insns or on which a
12339 constant cannot be the first operand. */
12340 if (TREE_CODE (arg0) == BIT_AND_EXPR
12341 && integer_zerop (arg1))
12342 {
12343 tree arg00 = TREE_OPERAND (arg0, 0);
12344 tree arg01 = TREE_OPERAND (arg0, 1);
12345 if (TREE_CODE (arg00) == LSHIFT_EXPR
12346 && integer_onep (TREE_OPERAND (arg00, 0)))
12347 {
12348 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12349 arg01, TREE_OPERAND (arg00, 1));
12350 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12351 build_int_cst (TREE_TYPE (arg0), 1));
12352 return fold_build2_loc (loc, code, type,
12353 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12354 arg1);
12355 }
12356 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12357 && integer_onep (TREE_OPERAND (arg01, 0)))
12358 {
12359 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12360 arg00, TREE_OPERAND (arg01, 1));
12361 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12362 build_int_cst (TREE_TYPE (arg0), 1));
12363 return fold_build2_loc (loc, code, type,
12364 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12365 arg1);
12366 }
12367 }
12368
12369 /* If this is an NE or EQ comparison of zero against the result of a
12370 signed MOD operation whose second operand is a power of 2, make
12371 the MOD operation unsigned since it is simpler and equivalent. */
12372 if (integer_zerop (arg1)
12373 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12374 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12375 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12376 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12377 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12378 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12379 {
12380 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12381 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12382 fold_convert_loc (loc, newtype,
12383 TREE_OPERAND (arg0, 0)),
12384 fold_convert_loc (loc, newtype,
12385 TREE_OPERAND (arg0, 1)));
12386
12387 return fold_build2_loc (loc, code, type, newmod,
12388 fold_convert_loc (loc, newtype, arg1));
12389 }
12390
12391 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12392 C1 is a valid shift constant, and C2 is a power of two, i.e.
12393 a single bit. */
12394 if (TREE_CODE (arg0) == BIT_AND_EXPR
12395 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12396 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12397 == INTEGER_CST
12398 && integer_pow2p (TREE_OPERAND (arg0, 1))
12399 && integer_zerop (arg1))
12400 {
12401 tree itype = TREE_TYPE (arg0);
12402 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12403 prec = TYPE_PRECISION (itype);
12404
12405 /* Check for a valid shift count. */
12406 if (wi::ltu_p (arg001, prec))
12407 {
12408 tree arg01 = TREE_OPERAND (arg0, 1);
12409 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12410 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12411 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12412 can be rewritten as (X & (C2 << C1)) != 0. */
12413 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12414 {
12415 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12416 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12417 return fold_build2_loc (loc, code, type, tem,
12418 fold_convert_loc (loc, itype, arg1));
12419 }
12420 /* Otherwise, for signed (arithmetic) shifts,
12421 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12422 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12423 else if (!TYPE_UNSIGNED (itype))
12424 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12425 arg000, build_int_cst (itype, 0));
12426 /* Otherwise, of unsigned (logical) shifts,
12427 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12428 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12429 else
12430 return omit_one_operand_loc (loc, type,
12431 code == EQ_EXPR ? integer_one_node
12432 : integer_zero_node,
12433 arg000);
12434 }
12435 }
12436
12437 /* If we have (A & C) == C where C is a power of 2, convert this into
12438 (A & C) != 0. Similarly for NE_EXPR. */
12439 if (TREE_CODE (arg0) == BIT_AND_EXPR
12440 && integer_pow2p (TREE_OPERAND (arg0, 1))
12441 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12442 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12443 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12444 integer_zero_node));
12445
12446 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12447 bit, then fold the expression into A < 0 or A >= 0. */
12448 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12449 if (tem)
12450 return tem;
12451
12452 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12453 Similarly for NE_EXPR. */
12454 if (TREE_CODE (arg0) == BIT_AND_EXPR
12455 && TREE_CODE (arg1) == INTEGER_CST
12456 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12457 {
12458 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12459 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12460 TREE_OPERAND (arg0, 1));
12461 tree dandnotc
12462 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12463 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12464 notc);
12465 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12466 if (integer_nonzerop (dandnotc))
12467 return omit_one_operand_loc (loc, type, rslt, arg0);
12468 }
12469
12470 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12471 Similarly for NE_EXPR. */
12472 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12473 && TREE_CODE (arg1) == INTEGER_CST
12474 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12475 {
12476 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12477 tree candnotd
12478 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12479 TREE_OPERAND (arg0, 1),
12480 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12481 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12482 if (integer_nonzerop (candnotd))
12483 return omit_one_operand_loc (loc, type, rslt, arg0);
12484 }
12485
12486 /* If this is a comparison of a field, we may be able to simplify it. */
12487 if ((TREE_CODE (arg0) == COMPONENT_REF
12488 || TREE_CODE (arg0) == BIT_FIELD_REF)
12489 /* Handle the constant case even without -O
12490 to make sure the warnings are given. */
12491 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12492 {
12493 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12494 if (t1)
12495 return t1;
12496 }
12497
12498 /* Optimize comparisons of strlen vs zero to a compare of the
12499 first character of the string vs zero. To wit,
12500 strlen(ptr) == 0 => *ptr == 0
12501 strlen(ptr) != 0 => *ptr != 0
12502 Other cases should reduce to one of these two (or a constant)
12503 due to the return value of strlen being unsigned. */
12504 if (TREE_CODE (arg0) == CALL_EXPR
12505 && integer_zerop (arg1))
12506 {
12507 tree fndecl = get_callee_fndecl (arg0);
12508
12509 if (fndecl
12510 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12511 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12512 && call_expr_nargs (arg0) == 1
12513 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12514 {
12515 tree iref = build_fold_indirect_ref_loc (loc,
12516 CALL_EXPR_ARG (arg0, 0));
12517 return fold_build2_loc (loc, code, type, iref,
12518 build_int_cst (TREE_TYPE (iref), 0));
12519 }
12520 }
12521
12522 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12523 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12524 if (TREE_CODE (arg0) == RSHIFT_EXPR
12525 && integer_zerop (arg1)
12526 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12527 {
12528 tree arg00 = TREE_OPERAND (arg0, 0);
12529 tree arg01 = TREE_OPERAND (arg0, 1);
12530 tree itype = TREE_TYPE (arg00);
12531 if (wi::eq_p (arg01, element_precision (itype) - 1))
12532 {
12533 if (TYPE_UNSIGNED (itype))
12534 {
12535 itype = signed_type_for (itype);
12536 arg00 = fold_convert_loc (loc, itype, arg00);
12537 }
12538 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12539 type, arg00, build_zero_cst (itype));
12540 }
12541 }
12542
12543 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12544 if (integer_zerop (arg1)
12545 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12546 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12547 TREE_OPERAND (arg0, 1));
12548
12549 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12550 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12551 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12552 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12553 build_zero_cst (TREE_TYPE (arg0)));
12554 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12555 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12556 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12557 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12558 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12559 build_zero_cst (TREE_TYPE (arg0)));
12560
12561 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12562 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12563 && TREE_CODE (arg1) == INTEGER_CST
12564 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12565 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12566 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12567 TREE_OPERAND (arg0, 1), arg1));
12568
12569 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12570 (X & C) == 0 when C is a single bit. */
12571 if (TREE_CODE (arg0) == BIT_AND_EXPR
12572 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12573 && integer_zerop (arg1)
12574 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12575 {
12576 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12577 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12578 TREE_OPERAND (arg0, 1));
12579 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12580 type, tem,
12581 fold_convert_loc (loc, TREE_TYPE (arg0),
12582 arg1));
12583 }
12584
12585 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12586 constant C is a power of two, i.e. a single bit. */
12587 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12588 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12589 && integer_zerop (arg1)
12590 && integer_pow2p (TREE_OPERAND (arg0, 1))
12591 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12592 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12593 {
12594 tree arg00 = TREE_OPERAND (arg0, 0);
12595 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12596 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12597 }
12598
12599 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12600 when is C is a power of two, i.e. a single bit. */
12601 if (TREE_CODE (arg0) == BIT_AND_EXPR
12602 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12603 && integer_zerop (arg1)
12604 && integer_pow2p (TREE_OPERAND (arg0, 1))
12605 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12606 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12607 {
12608 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12609 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12610 arg000, TREE_OPERAND (arg0, 1));
12611 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12612 tem, build_int_cst (TREE_TYPE (tem), 0));
12613 }
12614
12615 if (integer_zerop (arg1)
12616 && tree_expr_nonzero_p (arg0))
12617 {
12618 tree res = constant_boolean_node (code==NE_EXPR, type);
12619 return omit_one_operand_loc (loc, type, res, arg0);
12620 }
12621
12622 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12623 if (TREE_CODE (arg0) == NEGATE_EXPR
12624 && TREE_CODE (arg1) == NEGATE_EXPR)
12625 return fold_build2_loc (loc, code, type,
12626 TREE_OPERAND (arg0, 0),
12627 fold_convert_loc (loc, TREE_TYPE (arg0),
12628 TREE_OPERAND (arg1, 0)));
12629
12630 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12631 if (TREE_CODE (arg0) == BIT_AND_EXPR
12632 && TREE_CODE (arg1) == BIT_AND_EXPR)
12633 {
12634 tree arg00 = TREE_OPERAND (arg0, 0);
12635 tree arg01 = TREE_OPERAND (arg0, 1);
12636 tree arg10 = TREE_OPERAND (arg1, 0);
12637 tree arg11 = TREE_OPERAND (arg1, 1);
12638 tree itype = TREE_TYPE (arg0);
12639
12640 if (operand_equal_p (arg01, arg11, 0))
12641 return fold_build2_loc (loc, code, type,
12642 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12643 fold_build2_loc (loc,
12644 BIT_XOR_EXPR, itype,
12645 arg00, arg10),
12646 arg01),
12647 build_zero_cst (itype));
12648
12649 if (operand_equal_p (arg01, arg10, 0))
12650 return fold_build2_loc (loc, code, type,
12651 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12652 fold_build2_loc (loc,
12653 BIT_XOR_EXPR, itype,
12654 arg00, arg11),
12655 arg01),
12656 build_zero_cst (itype));
12657
12658 if (operand_equal_p (arg00, arg11, 0))
12659 return fold_build2_loc (loc, code, type,
12660 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12661 fold_build2_loc (loc,
12662 BIT_XOR_EXPR, itype,
12663 arg01, arg10),
12664 arg00),
12665 build_zero_cst (itype));
12666
12667 if (operand_equal_p (arg00, arg10, 0))
12668 return fold_build2_loc (loc, code, type,
12669 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12670 fold_build2_loc (loc,
12671 BIT_XOR_EXPR, itype,
12672 arg01, arg11),
12673 arg00),
12674 build_zero_cst (itype));
12675 }
12676
12677 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12678 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12679 {
12680 tree arg00 = TREE_OPERAND (arg0, 0);
12681 tree arg01 = TREE_OPERAND (arg0, 1);
12682 tree arg10 = TREE_OPERAND (arg1, 0);
12683 tree arg11 = TREE_OPERAND (arg1, 1);
12684 tree itype = TREE_TYPE (arg0);
12685
12686 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12687 operand_equal_p guarantees no side-effects so we don't need
12688 to use omit_one_operand on Z. */
12689 if (operand_equal_p (arg01, arg11, 0))
12690 return fold_build2_loc (loc, code, type, arg00,
12691 fold_convert_loc (loc, TREE_TYPE (arg00),
12692 arg10));
12693 if (operand_equal_p (arg01, arg10, 0))
12694 return fold_build2_loc (loc, code, type, arg00,
12695 fold_convert_loc (loc, TREE_TYPE (arg00),
12696 arg11));
12697 if (operand_equal_p (arg00, arg11, 0))
12698 return fold_build2_loc (loc, code, type, arg01,
12699 fold_convert_loc (loc, TREE_TYPE (arg01),
12700 arg10));
12701 if (operand_equal_p (arg00, arg10, 0))
12702 return fold_build2_loc (loc, code, type, arg01,
12703 fold_convert_loc (loc, TREE_TYPE (arg01),
12704 arg11));
12705
12706 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12707 if (TREE_CODE (arg01) == INTEGER_CST
12708 && TREE_CODE (arg11) == INTEGER_CST)
12709 {
12710 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12711 fold_convert_loc (loc, itype, arg11));
12712 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12713 return fold_build2_loc (loc, code, type, tem,
12714 fold_convert_loc (loc, itype, arg10));
12715 }
12716 }
12717
12718 /* Attempt to simplify equality/inequality comparisons of complex
12719 values. Only lower the comparison if the result is known or
12720 can be simplified to a single scalar comparison. */
12721 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12722 || TREE_CODE (arg0) == COMPLEX_CST)
12723 && (TREE_CODE (arg1) == COMPLEX_EXPR
12724 || TREE_CODE (arg1) == COMPLEX_CST))
12725 {
12726 tree real0, imag0, real1, imag1;
12727 tree rcond, icond;
12728
12729 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12730 {
12731 real0 = TREE_OPERAND (arg0, 0);
12732 imag0 = TREE_OPERAND (arg0, 1);
12733 }
12734 else
12735 {
12736 real0 = TREE_REALPART (arg0);
12737 imag0 = TREE_IMAGPART (arg0);
12738 }
12739
12740 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12741 {
12742 real1 = TREE_OPERAND (arg1, 0);
12743 imag1 = TREE_OPERAND (arg1, 1);
12744 }
12745 else
12746 {
12747 real1 = TREE_REALPART (arg1);
12748 imag1 = TREE_IMAGPART (arg1);
12749 }
12750
12751 rcond = fold_binary_loc (loc, code, type, real0, real1);
12752 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12753 {
12754 if (integer_zerop (rcond))
12755 {
12756 if (code == EQ_EXPR)
12757 return omit_two_operands_loc (loc, type, boolean_false_node,
12758 imag0, imag1);
12759 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12760 }
12761 else
12762 {
12763 if (code == NE_EXPR)
12764 return omit_two_operands_loc (loc, type, boolean_true_node,
12765 imag0, imag1);
12766 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12767 }
12768 }
12769
12770 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12771 if (icond && TREE_CODE (icond) == INTEGER_CST)
12772 {
12773 if (integer_zerop (icond))
12774 {
12775 if (code == EQ_EXPR)
12776 return omit_two_operands_loc (loc, type, boolean_false_node,
12777 real0, real1);
12778 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12779 }
12780 else
12781 {
12782 if (code == NE_EXPR)
12783 return omit_two_operands_loc (loc, type, boolean_true_node,
12784 real0, real1);
12785 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12786 }
12787 }
12788 }
12789
12790 return NULL_TREE;
12791
12792 case LT_EXPR:
12793 case GT_EXPR:
12794 case LE_EXPR:
12795 case GE_EXPR:
12796 tem = fold_comparison (loc, code, type, op0, op1);
12797 if (tem != NULL_TREE)
12798 return tem;
12799
12800 /* Transform comparisons of the form X +- C CMP X. */
12801 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12802 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12803 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12804 && !HONOR_SNANS (arg0))
12805 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12806 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12807 {
12808 tree arg01 = TREE_OPERAND (arg0, 1);
12809 enum tree_code code0 = TREE_CODE (arg0);
12810 int is_positive;
12811
12812 if (TREE_CODE (arg01) == REAL_CST)
12813 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12814 else
12815 is_positive = tree_int_cst_sgn (arg01);
12816
12817 /* (X - c) > X becomes false. */
12818 if (code == GT_EXPR
12819 && ((code0 == MINUS_EXPR && is_positive >= 0)
12820 || (code0 == PLUS_EXPR && is_positive <= 0)))
12821 {
12822 if (TREE_CODE (arg01) == INTEGER_CST
12823 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12824 fold_overflow_warning (("assuming signed overflow does not "
12825 "occur when assuming that (X - c) > X "
12826 "is always false"),
12827 WARN_STRICT_OVERFLOW_ALL);
12828 return constant_boolean_node (0, type);
12829 }
12830
12831 /* Likewise (X + c) < X becomes false. */
12832 if (code == LT_EXPR
12833 && ((code0 == PLUS_EXPR && is_positive >= 0)
12834 || (code0 == MINUS_EXPR && is_positive <= 0)))
12835 {
12836 if (TREE_CODE (arg01) == INTEGER_CST
12837 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12838 fold_overflow_warning (("assuming signed overflow does not "
12839 "occur when assuming that "
12840 "(X + c) < X is always false"),
12841 WARN_STRICT_OVERFLOW_ALL);
12842 return constant_boolean_node (0, type);
12843 }
12844
12845 /* Convert (X - c) <= X to true. */
12846 if (!HONOR_NANS (arg1)
12847 && code == LE_EXPR
12848 && ((code0 == MINUS_EXPR && is_positive >= 0)
12849 || (code0 == PLUS_EXPR && is_positive <= 0)))
12850 {
12851 if (TREE_CODE (arg01) == INTEGER_CST
12852 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12853 fold_overflow_warning (("assuming signed overflow does not "
12854 "occur when assuming that "
12855 "(X - c) <= X is always true"),
12856 WARN_STRICT_OVERFLOW_ALL);
12857 return constant_boolean_node (1, type);
12858 }
12859
12860 /* Convert (X + c) >= X to true. */
12861 if (!HONOR_NANS (arg1)
12862 && code == GE_EXPR
12863 && ((code0 == PLUS_EXPR && is_positive >= 0)
12864 || (code0 == MINUS_EXPR && is_positive <= 0)))
12865 {
12866 if (TREE_CODE (arg01) == INTEGER_CST
12867 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12868 fold_overflow_warning (("assuming signed overflow does not "
12869 "occur when assuming that "
12870 "(X + c) >= X is always true"),
12871 WARN_STRICT_OVERFLOW_ALL);
12872 return constant_boolean_node (1, type);
12873 }
12874
12875 if (TREE_CODE (arg01) == INTEGER_CST)
12876 {
12877 /* Convert X + c > X and X - c < X to true for integers. */
12878 if (code == GT_EXPR
12879 && ((code0 == PLUS_EXPR && is_positive > 0)
12880 || (code0 == MINUS_EXPR && is_positive < 0)))
12881 {
12882 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12883 fold_overflow_warning (("assuming signed overflow does "
12884 "not occur when assuming that "
12885 "(X + c) > X is always true"),
12886 WARN_STRICT_OVERFLOW_ALL);
12887 return constant_boolean_node (1, type);
12888 }
12889
12890 if (code == LT_EXPR
12891 && ((code0 == MINUS_EXPR && is_positive > 0)
12892 || (code0 == PLUS_EXPR && is_positive < 0)))
12893 {
12894 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12895 fold_overflow_warning (("assuming signed overflow does "
12896 "not occur when assuming that "
12897 "(X - c) < X is always true"),
12898 WARN_STRICT_OVERFLOW_ALL);
12899 return constant_boolean_node (1, type);
12900 }
12901
12902 /* Convert X + c <= X and X - c >= X to false for integers. */
12903 if (code == LE_EXPR
12904 && ((code0 == PLUS_EXPR && is_positive > 0)
12905 || (code0 == MINUS_EXPR && is_positive < 0)))
12906 {
12907 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12908 fold_overflow_warning (("assuming signed overflow does "
12909 "not occur when assuming that "
12910 "(X + c) <= X is always false"),
12911 WARN_STRICT_OVERFLOW_ALL);
12912 return constant_boolean_node (0, type);
12913 }
12914
12915 if (code == GE_EXPR
12916 && ((code0 == MINUS_EXPR && is_positive > 0)
12917 || (code0 == PLUS_EXPR && is_positive < 0)))
12918 {
12919 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12920 fold_overflow_warning (("assuming signed overflow does "
12921 "not occur when assuming that "
12922 "(X - c) >= X is always false"),
12923 WARN_STRICT_OVERFLOW_ALL);
12924 return constant_boolean_node (0, type);
12925 }
12926 }
12927 }
12928
12929 /* Comparisons with the highest or lowest possible integer of
12930 the specified precision will have known values. */
12931 {
12932 tree arg1_type = TREE_TYPE (arg1);
12933 unsigned int prec = TYPE_PRECISION (arg1_type);
12934
12935 if (TREE_CODE (arg1) == INTEGER_CST
12936 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12937 {
12938 wide_int max = wi::max_value (arg1_type);
12939 wide_int signed_max = wi::max_value (prec, SIGNED);
12940 wide_int min = wi::min_value (arg1_type);
12941
12942 if (wi::eq_p (arg1, max))
12943 switch (code)
12944 {
12945 case GT_EXPR:
12946 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12947
12948 case GE_EXPR:
12949 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12950
12951 case LE_EXPR:
12952 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12953
12954 case LT_EXPR:
12955 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12956
12957 /* The GE_EXPR and LT_EXPR cases above are not normally
12958 reached because of previous transformations. */
12959
12960 default:
12961 break;
12962 }
12963 else if (wi::eq_p (arg1, max - 1))
12964 switch (code)
12965 {
12966 case GT_EXPR:
12967 arg1 = const_binop (PLUS_EXPR, arg1,
12968 build_int_cst (TREE_TYPE (arg1), 1));
12969 return fold_build2_loc (loc, EQ_EXPR, type,
12970 fold_convert_loc (loc,
12971 TREE_TYPE (arg1), arg0),
12972 arg1);
12973 case LE_EXPR:
12974 arg1 = const_binop (PLUS_EXPR, arg1,
12975 build_int_cst (TREE_TYPE (arg1), 1));
12976 return fold_build2_loc (loc, NE_EXPR, type,
12977 fold_convert_loc (loc, TREE_TYPE (arg1),
12978 arg0),
12979 arg1);
12980 default:
12981 break;
12982 }
12983 else if (wi::eq_p (arg1, min))
12984 switch (code)
12985 {
12986 case LT_EXPR:
12987 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12988
12989 case LE_EXPR:
12990 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12991
12992 case GE_EXPR:
12993 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12994
12995 case GT_EXPR:
12996 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12997
12998 default:
12999 break;
13000 }
13001 else if (wi::eq_p (arg1, min + 1))
13002 switch (code)
13003 {
13004 case GE_EXPR:
13005 arg1 = const_binop (MINUS_EXPR, arg1,
13006 build_int_cst (TREE_TYPE (arg1), 1));
13007 return fold_build2_loc (loc, NE_EXPR, type,
13008 fold_convert_loc (loc,
13009 TREE_TYPE (arg1), arg0),
13010 arg1);
13011 case LT_EXPR:
13012 arg1 = const_binop (MINUS_EXPR, arg1,
13013 build_int_cst (TREE_TYPE (arg1), 1));
13014 return fold_build2_loc (loc, EQ_EXPR, type,
13015 fold_convert_loc (loc, TREE_TYPE (arg1),
13016 arg0),
13017 arg1);
13018 default:
13019 break;
13020 }
13021
13022 else if (wi::eq_p (arg1, signed_max)
13023 && TYPE_UNSIGNED (arg1_type)
13024 /* We will flip the signedness of the comparison operator
13025 associated with the mode of arg1, so the sign bit is
13026 specified by this mode. Check that arg1 is the signed
13027 max associated with this sign bit. */
13028 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13029 /* signed_type does not work on pointer types. */
13030 && INTEGRAL_TYPE_P (arg1_type))
13031 {
13032 /* The following case also applies to X < signed_max+1
13033 and X >= signed_max+1 because previous transformations. */
13034 if (code == LE_EXPR || code == GT_EXPR)
13035 {
13036 tree st = signed_type_for (arg1_type);
13037 return fold_build2_loc (loc,
13038 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13039 type, fold_convert_loc (loc, st, arg0),
13040 build_int_cst (st, 0));
13041 }
13042 }
13043 }
13044 }
13045
13046 /* If we are comparing an ABS_EXPR with a constant, we can
13047 convert all the cases into explicit comparisons, but they may
13048 well not be faster than doing the ABS and one comparison.
13049 But ABS (X) <= C is a range comparison, which becomes a subtraction
13050 and a comparison, and is probably faster. */
13051 if (code == LE_EXPR
13052 && TREE_CODE (arg1) == INTEGER_CST
13053 && TREE_CODE (arg0) == ABS_EXPR
13054 && ! TREE_SIDE_EFFECTS (arg0)
13055 && (0 != (tem = negate_expr (arg1)))
13056 && TREE_CODE (tem) == INTEGER_CST
13057 && !TREE_OVERFLOW (tem))
13058 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13059 build2 (GE_EXPR, type,
13060 TREE_OPERAND (arg0, 0), tem),
13061 build2 (LE_EXPR, type,
13062 TREE_OPERAND (arg0, 0), arg1));
13063
13064 /* Convert ABS_EXPR<x> >= 0 to true. */
13065 strict_overflow_p = false;
13066 if (code == GE_EXPR
13067 && (integer_zerop (arg1)
13068 || (! HONOR_NANS (arg0)
13069 && real_zerop (arg1)))
13070 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13071 {
13072 if (strict_overflow_p)
13073 fold_overflow_warning (("assuming signed overflow does not occur "
13074 "when simplifying comparison of "
13075 "absolute value and zero"),
13076 WARN_STRICT_OVERFLOW_CONDITIONAL);
13077 return omit_one_operand_loc (loc, type,
13078 constant_boolean_node (true, type),
13079 arg0);
13080 }
13081
13082 /* Convert ABS_EXPR<x> < 0 to false. */
13083 strict_overflow_p = false;
13084 if (code == LT_EXPR
13085 && (integer_zerop (arg1) || real_zerop (arg1))
13086 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13087 {
13088 if (strict_overflow_p)
13089 fold_overflow_warning (("assuming signed overflow does not occur "
13090 "when simplifying comparison of "
13091 "absolute value and zero"),
13092 WARN_STRICT_OVERFLOW_CONDITIONAL);
13093 return omit_one_operand_loc (loc, type,
13094 constant_boolean_node (false, type),
13095 arg0);
13096 }
13097
13098 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13099 and similarly for >= into !=. */
13100 if ((code == LT_EXPR || code == GE_EXPR)
13101 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13102 && TREE_CODE (arg1) == LSHIFT_EXPR
13103 && integer_onep (TREE_OPERAND (arg1, 0)))
13104 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13105 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13106 TREE_OPERAND (arg1, 1)),
13107 build_zero_cst (TREE_TYPE (arg0)));
13108
13109 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13110 otherwise Y might be >= # of bits in X's type and thus e.g.
13111 (unsigned char) (1 << Y) for Y 15 might be 0.
13112 If the cast is widening, then 1 << Y should have unsigned type,
13113 otherwise if Y is number of bits in the signed shift type minus 1,
13114 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13115 31 might be 0xffffffff80000000. */
13116 if ((code == LT_EXPR || code == GE_EXPR)
13117 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13118 && CONVERT_EXPR_P (arg1)
13119 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13120 && (element_precision (TREE_TYPE (arg1))
13121 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13122 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13123 || (element_precision (TREE_TYPE (arg1))
13124 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13125 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13126 {
13127 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13128 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13129 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13130 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13131 build_zero_cst (TREE_TYPE (arg0)));
13132 }
13133
13134 return NULL_TREE;
13135
13136 case UNORDERED_EXPR:
13137 case ORDERED_EXPR:
13138 case UNLT_EXPR:
13139 case UNLE_EXPR:
13140 case UNGT_EXPR:
13141 case UNGE_EXPR:
13142 case UNEQ_EXPR:
13143 case LTGT_EXPR:
13144 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13145 {
13146 t1 = fold_relational_const (code, type, arg0, arg1);
13147 if (t1 != NULL_TREE)
13148 return t1;
13149 }
13150
13151 /* If the first operand is NaN, the result is constant. */
13152 if (TREE_CODE (arg0) == REAL_CST
13153 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13154 && (code != LTGT_EXPR || ! flag_trapping_math))
13155 {
13156 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13157 ? integer_zero_node
13158 : integer_one_node;
13159 return omit_one_operand_loc (loc, type, t1, arg1);
13160 }
13161
13162 /* If the second operand is NaN, the result is constant. */
13163 if (TREE_CODE (arg1) == REAL_CST
13164 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13165 && (code != LTGT_EXPR || ! flag_trapping_math))
13166 {
13167 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13168 ? integer_zero_node
13169 : integer_one_node;
13170 return omit_one_operand_loc (loc, type, t1, arg0);
13171 }
13172
13173 /* Simplify unordered comparison of something with itself. */
13174 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13175 && operand_equal_p (arg0, arg1, 0))
13176 return constant_boolean_node (1, type);
13177
13178 if (code == LTGT_EXPR
13179 && !flag_trapping_math
13180 && operand_equal_p (arg0, arg1, 0))
13181 return constant_boolean_node (0, type);
13182
13183 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13184 {
13185 tree targ0 = strip_float_extensions (arg0);
13186 tree targ1 = strip_float_extensions (arg1);
13187 tree newtype = TREE_TYPE (targ0);
13188
13189 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13190 newtype = TREE_TYPE (targ1);
13191
13192 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13193 return fold_build2_loc (loc, code, type,
13194 fold_convert_loc (loc, newtype, targ0),
13195 fold_convert_loc (loc, newtype, targ1));
13196 }
13197
13198 return NULL_TREE;
13199
13200 case COMPOUND_EXPR:
13201 /* When pedantic, a compound expression can be neither an lvalue
13202 nor an integer constant expression. */
13203 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13204 return NULL_TREE;
13205 /* Don't let (0, 0) be null pointer constant. */
13206 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13207 : fold_convert_loc (loc, type, arg1);
13208 return pedantic_non_lvalue_loc (loc, tem);
13209
13210 case ASSERT_EXPR:
13211 /* An ASSERT_EXPR should never be passed to fold_binary. */
13212 gcc_unreachable ();
13213
13214 default:
13215 return NULL_TREE;
13216 } /* switch (code) */
13217 }
13218
13219 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13220 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13221 of GOTO_EXPR. */
13222
13223 static tree
13224 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13225 {
13226 switch (TREE_CODE (*tp))
13227 {
13228 case LABEL_EXPR:
13229 return *tp;
13230
13231 case GOTO_EXPR:
13232 *walk_subtrees = 0;
13233
13234 /* ... fall through ... */
13235
13236 default:
13237 return NULL_TREE;
13238 }
13239 }
13240
13241 /* Return whether the sub-tree ST contains a label which is accessible from
13242 outside the sub-tree. */
13243
13244 static bool
13245 contains_label_p (tree st)
13246 {
13247 return
13248 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13249 }
13250
13251 /* Fold a ternary expression of code CODE and type TYPE with operands
13252 OP0, OP1, and OP2. Return the folded expression if folding is
13253 successful. Otherwise, return NULL_TREE. */
13254
13255 tree
13256 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13257 tree op0, tree op1, tree op2)
13258 {
13259 tree tem;
13260 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13261 enum tree_code_class kind = TREE_CODE_CLASS (code);
13262
13263 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13264 && TREE_CODE_LENGTH (code) == 3);
13265
13266 /* If this is a commutative operation, and OP0 is a constant, move it
13267 to OP1 to reduce the number of tests below. */
13268 if (commutative_ternary_tree_code (code)
13269 && tree_swap_operands_p (op0, op1, true))
13270 return fold_build3_loc (loc, code, type, op1, op0, op2);
13271
13272 tem = generic_simplify (loc, code, type, op0, op1, op2);
13273 if (tem)
13274 return tem;
13275
13276 /* Strip any conversions that don't change the mode. This is safe
13277 for every expression, except for a comparison expression because
13278 its signedness is derived from its operands. So, in the latter
13279 case, only strip conversions that don't change the signedness.
13280
13281 Note that this is done as an internal manipulation within the
13282 constant folder, in order to find the simplest representation of
13283 the arguments so that their form can be studied. In any cases,
13284 the appropriate type conversions should be put back in the tree
13285 that will get out of the constant folder. */
13286 if (op0)
13287 {
13288 arg0 = op0;
13289 STRIP_NOPS (arg0);
13290 }
13291
13292 if (op1)
13293 {
13294 arg1 = op1;
13295 STRIP_NOPS (arg1);
13296 }
13297
13298 if (op2)
13299 {
13300 arg2 = op2;
13301 STRIP_NOPS (arg2);
13302 }
13303
13304 switch (code)
13305 {
13306 case COMPONENT_REF:
13307 if (TREE_CODE (arg0) == CONSTRUCTOR
13308 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13309 {
13310 unsigned HOST_WIDE_INT idx;
13311 tree field, value;
13312 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13313 if (field == arg1)
13314 return value;
13315 }
13316 return NULL_TREE;
13317
13318 case COND_EXPR:
13319 case VEC_COND_EXPR:
13320 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13321 so all simple results must be passed through pedantic_non_lvalue. */
13322 if (TREE_CODE (arg0) == INTEGER_CST)
13323 {
13324 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13325 tem = integer_zerop (arg0) ? op2 : op1;
13326 /* Only optimize constant conditions when the selected branch
13327 has the same type as the COND_EXPR. This avoids optimizing
13328 away "c ? x : throw", where the throw has a void type.
13329 Avoid throwing away that operand which contains label. */
13330 if ((!TREE_SIDE_EFFECTS (unused_op)
13331 || !contains_label_p (unused_op))
13332 && (! VOID_TYPE_P (TREE_TYPE (tem))
13333 || VOID_TYPE_P (type)))
13334 return pedantic_non_lvalue_loc (loc, tem);
13335 return NULL_TREE;
13336 }
13337 else if (TREE_CODE (arg0) == VECTOR_CST)
13338 {
13339 if ((TREE_CODE (arg1) == VECTOR_CST
13340 || TREE_CODE (arg1) == CONSTRUCTOR)
13341 && (TREE_CODE (arg2) == VECTOR_CST
13342 || TREE_CODE (arg2) == CONSTRUCTOR))
13343 {
13344 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13345 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13346 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13347 for (i = 0; i < nelts; i++)
13348 {
13349 tree val = VECTOR_CST_ELT (arg0, i);
13350 if (integer_all_onesp (val))
13351 sel[i] = i;
13352 else if (integer_zerop (val))
13353 sel[i] = nelts + i;
13354 else /* Currently unreachable. */
13355 return NULL_TREE;
13356 }
13357 tree t = fold_vec_perm (type, arg1, arg2, sel);
13358 if (t != NULL_TREE)
13359 return t;
13360 }
13361 }
13362
13363 /* If we have A op B ? A : C, we may be able to convert this to a
13364 simpler expression, depending on the operation and the values
13365 of B and C. Signed zeros prevent all of these transformations,
13366 for reasons given above each one.
13367
13368 Also try swapping the arguments and inverting the conditional. */
13369 if (COMPARISON_CLASS_P (arg0)
13370 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13371 arg1, TREE_OPERAND (arg0, 1))
13372 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13373 {
13374 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13375 if (tem)
13376 return tem;
13377 }
13378
13379 if (COMPARISON_CLASS_P (arg0)
13380 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13381 op2,
13382 TREE_OPERAND (arg0, 1))
13383 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13384 {
13385 location_t loc0 = expr_location_or (arg0, loc);
13386 tem = fold_invert_truthvalue (loc0, arg0);
13387 if (tem && COMPARISON_CLASS_P (tem))
13388 {
13389 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13390 if (tem)
13391 return tem;
13392 }
13393 }
13394
13395 /* If the second operand is simpler than the third, swap them
13396 since that produces better jump optimization results. */
13397 if (truth_value_p (TREE_CODE (arg0))
13398 && tree_swap_operands_p (op1, op2, false))
13399 {
13400 location_t loc0 = expr_location_or (arg0, loc);
13401 /* See if this can be inverted. If it can't, possibly because
13402 it was a floating-point inequality comparison, don't do
13403 anything. */
13404 tem = fold_invert_truthvalue (loc0, arg0);
13405 if (tem)
13406 return fold_build3_loc (loc, code, type, tem, op2, op1);
13407 }
13408
13409 /* Convert A ? 1 : 0 to simply A. */
13410 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13411 : (integer_onep (op1)
13412 && !VECTOR_TYPE_P (type)))
13413 && integer_zerop (op2)
13414 /* If we try to convert OP0 to our type, the
13415 call to fold will try to move the conversion inside
13416 a COND, which will recurse. In that case, the COND_EXPR
13417 is probably the best choice, so leave it alone. */
13418 && type == TREE_TYPE (arg0))
13419 return pedantic_non_lvalue_loc (loc, arg0);
13420
13421 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13422 over COND_EXPR in cases such as floating point comparisons. */
13423 if (integer_zerop (op1)
13424 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13425 : (integer_onep (op2)
13426 && !VECTOR_TYPE_P (type)))
13427 && truth_value_p (TREE_CODE (arg0)))
13428 return pedantic_non_lvalue_loc (loc,
13429 fold_convert_loc (loc, type,
13430 invert_truthvalue_loc (loc,
13431 arg0)));
13432
13433 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13434 if (TREE_CODE (arg0) == LT_EXPR
13435 && integer_zerop (TREE_OPERAND (arg0, 1))
13436 && integer_zerop (op2)
13437 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13438 {
13439 /* sign_bit_p looks through both zero and sign extensions,
13440 but for this optimization only sign extensions are
13441 usable. */
13442 tree tem2 = TREE_OPERAND (arg0, 0);
13443 while (tem != tem2)
13444 {
13445 if (TREE_CODE (tem2) != NOP_EXPR
13446 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13447 {
13448 tem = NULL_TREE;
13449 break;
13450 }
13451 tem2 = TREE_OPERAND (tem2, 0);
13452 }
13453 /* sign_bit_p only checks ARG1 bits within A's precision.
13454 If <sign bit of A> has wider type than A, bits outside
13455 of A's precision in <sign bit of A> need to be checked.
13456 If they are all 0, this optimization needs to be done
13457 in unsigned A's type, if they are all 1 in signed A's type,
13458 otherwise this can't be done. */
13459 if (tem
13460 && TYPE_PRECISION (TREE_TYPE (tem))
13461 < TYPE_PRECISION (TREE_TYPE (arg1))
13462 && TYPE_PRECISION (TREE_TYPE (tem))
13463 < TYPE_PRECISION (type))
13464 {
13465 int inner_width, outer_width;
13466 tree tem_type;
13467
13468 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13469 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13470 if (outer_width > TYPE_PRECISION (type))
13471 outer_width = TYPE_PRECISION (type);
13472
13473 wide_int mask = wi::shifted_mask
13474 (inner_width, outer_width - inner_width, false,
13475 TYPE_PRECISION (TREE_TYPE (arg1)));
13476
13477 wide_int common = mask & arg1;
13478 if (common == mask)
13479 {
13480 tem_type = signed_type_for (TREE_TYPE (tem));
13481 tem = fold_convert_loc (loc, tem_type, tem);
13482 }
13483 else if (common == 0)
13484 {
13485 tem_type = unsigned_type_for (TREE_TYPE (tem));
13486 tem = fold_convert_loc (loc, tem_type, tem);
13487 }
13488 else
13489 tem = NULL;
13490 }
13491
13492 if (tem)
13493 return
13494 fold_convert_loc (loc, type,
13495 fold_build2_loc (loc, BIT_AND_EXPR,
13496 TREE_TYPE (tem), tem,
13497 fold_convert_loc (loc,
13498 TREE_TYPE (tem),
13499 arg1)));
13500 }
13501
13502 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13503 already handled above. */
13504 if (TREE_CODE (arg0) == BIT_AND_EXPR
13505 && integer_onep (TREE_OPERAND (arg0, 1))
13506 && integer_zerop (op2)
13507 && integer_pow2p (arg1))
13508 {
13509 tree tem = TREE_OPERAND (arg0, 0);
13510 STRIP_NOPS (tem);
13511 if (TREE_CODE (tem) == RSHIFT_EXPR
13512 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13513 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13514 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13515 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13516 TREE_OPERAND (tem, 0), arg1);
13517 }
13518
13519 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13520 is probably obsolete because the first operand should be a
13521 truth value (that's why we have the two cases above), but let's
13522 leave it in until we can confirm this for all front-ends. */
13523 if (integer_zerop (op2)
13524 && TREE_CODE (arg0) == NE_EXPR
13525 && integer_zerop (TREE_OPERAND (arg0, 1))
13526 && integer_pow2p (arg1)
13527 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13528 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13529 arg1, OEP_ONLY_CONST))
13530 return pedantic_non_lvalue_loc (loc,
13531 fold_convert_loc (loc, type,
13532 TREE_OPERAND (arg0, 0)));
13533
13534 /* Disable the transformations below for vectors, since
13535 fold_binary_op_with_conditional_arg may undo them immediately,
13536 yielding an infinite loop. */
13537 if (code == VEC_COND_EXPR)
13538 return NULL_TREE;
13539
13540 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13541 if (integer_zerop (op2)
13542 && truth_value_p (TREE_CODE (arg0))
13543 && truth_value_p (TREE_CODE (arg1))
13544 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13545 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13546 : TRUTH_ANDIF_EXPR,
13547 type, fold_convert_loc (loc, type, arg0), arg1);
13548
13549 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13550 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13551 && truth_value_p (TREE_CODE (arg0))
13552 && truth_value_p (TREE_CODE (arg1))
13553 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13554 {
13555 location_t loc0 = expr_location_or (arg0, loc);
13556 /* Only perform transformation if ARG0 is easily inverted. */
13557 tem = fold_invert_truthvalue (loc0, arg0);
13558 if (tem)
13559 return fold_build2_loc (loc, code == VEC_COND_EXPR
13560 ? BIT_IOR_EXPR
13561 : TRUTH_ORIF_EXPR,
13562 type, fold_convert_loc (loc, type, tem),
13563 arg1);
13564 }
13565
13566 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13567 if (integer_zerop (arg1)
13568 && truth_value_p (TREE_CODE (arg0))
13569 && truth_value_p (TREE_CODE (op2))
13570 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13571 {
13572 location_t loc0 = expr_location_or (arg0, loc);
13573 /* Only perform transformation if ARG0 is easily inverted. */
13574 tem = fold_invert_truthvalue (loc0, arg0);
13575 if (tem)
13576 return fold_build2_loc (loc, code == VEC_COND_EXPR
13577 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13578 type, fold_convert_loc (loc, type, tem),
13579 op2);
13580 }
13581
13582 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13583 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13584 && truth_value_p (TREE_CODE (arg0))
13585 && truth_value_p (TREE_CODE (op2))
13586 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13587 return fold_build2_loc (loc, code == VEC_COND_EXPR
13588 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13589 type, fold_convert_loc (loc, type, arg0), op2);
13590
13591 return NULL_TREE;
13592
13593 case CALL_EXPR:
13594 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13595 of fold_ternary on them. */
13596 gcc_unreachable ();
13597
13598 case BIT_FIELD_REF:
13599 if ((TREE_CODE (arg0) == VECTOR_CST
13600 || (TREE_CODE (arg0) == CONSTRUCTOR
13601 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13602 && (type == TREE_TYPE (TREE_TYPE (arg0))
13603 || (TREE_CODE (type) == VECTOR_TYPE
13604 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13605 {
13606 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13607 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13608 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13609 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13610
13611 if (n != 0
13612 && (idx % width) == 0
13613 && (n % width) == 0
13614 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13615 {
13616 idx = idx / width;
13617 n = n / width;
13618
13619 if (TREE_CODE (arg0) == VECTOR_CST)
13620 {
13621 if (n == 1)
13622 return VECTOR_CST_ELT (arg0, idx);
13623
13624 tree *vals = XALLOCAVEC (tree, n);
13625 for (unsigned i = 0; i < n; ++i)
13626 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13627 return build_vector (type, vals);
13628 }
13629
13630 /* Constructor elements can be subvectors. */
13631 unsigned HOST_WIDE_INT k = 1;
13632 if (CONSTRUCTOR_NELTS (arg0) != 0)
13633 {
13634 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13635 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13636 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13637 }
13638
13639 /* We keep an exact subset of the constructor elements. */
13640 if ((idx % k) == 0 && (n % k) == 0)
13641 {
13642 if (CONSTRUCTOR_NELTS (arg0) == 0)
13643 return build_constructor (type, NULL);
13644 idx /= k;
13645 n /= k;
13646 if (n == 1)
13647 {
13648 if (idx < CONSTRUCTOR_NELTS (arg0))
13649 return CONSTRUCTOR_ELT (arg0, idx)->value;
13650 return build_zero_cst (type);
13651 }
13652
13653 vec<constructor_elt, va_gc> *vals;
13654 vec_alloc (vals, n);
13655 for (unsigned i = 0;
13656 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13657 ++i)
13658 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13659 CONSTRUCTOR_ELT
13660 (arg0, idx + i)->value);
13661 return build_constructor (type, vals);
13662 }
13663 /* The bitfield references a single constructor element. */
13664 else if (idx + n <= (idx / k + 1) * k)
13665 {
13666 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13667 return build_zero_cst (type);
13668 else if (n == k)
13669 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13670 else
13671 return fold_build3_loc (loc, code, type,
13672 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13673 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13674 }
13675 }
13676 }
13677
13678 /* A bit-field-ref that referenced the full argument can be stripped. */
13679 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13680 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13681 && integer_zerop (op2))
13682 return fold_convert_loc (loc, type, arg0);
13683
13684 /* On constants we can use native encode/interpret to constant
13685 fold (nearly) all BIT_FIELD_REFs. */
13686 if (CONSTANT_CLASS_P (arg0)
13687 && can_native_interpret_type_p (type)
13688 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13689 /* This limitation should not be necessary, we just need to
13690 round this up to mode size. */
13691 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13692 /* Need bit-shifting of the buffer to relax the following. */
13693 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13694 {
13695 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13696 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13697 unsigned HOST_WIDE_INT clen;
13698 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13699 /* ??? We cannot tell native_encode_expr to start at
13700 some random byte only. So limit us to a reasonable amount
13701 of work. */
13702 if (clen <= 4096)
13703 {
13704 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13705 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13706 if (len > 0
13707 && len * BITS_PER_UNIT >= bitpos + bitsize)
13708 {
13709 tree v = native_interpret_expr (type,
13710 b + bitpos / BITS_PER_UNIT,
13711 bitsize / BITS_PER_UNIT);
13712 if (v)
13713 return v;
13714 }
13715 }
13716 }
13717
13718 return NULL_TREE;
13719
13720 case FMA_EXPR:
13721 /* For integers we can decompose the FMA if possible. */
13722 if (TREE_CODE (arg0) == INTEGER_CST
13723 && TREE_CODE (arg1) == INTEGER_CST)
13724 return fold_build2_loc (loc, PLUS_EXPR, type,
13725 const_binop (MULT_EXPR, arg0, arg1), arg2);
13726 if (integer_zerop (arg2))
13727 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13728
13729 return fold_fma (loc, type, arg0, arg1, arg2);
13730
13731 case VEC_PERM_EXPR:
13732 if (TREE_CODE (arg2) == VECTOR_CST)
13733 {
13734 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13735 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13736 unsigned char *sel2 = sel + nelts;
13737 bool need_mask_canon = false;
13738 bool need_mask_canon2 = false;
13739 bool all_in_vec0 = true;
13740 bool all_in_vec1 = true;
13741 bool maybe_identity = true;
13742 bool single_arg = (op0 == op1);
13743 bool changed = false;
13744
13745 mask2 = 2 * nelts - 1;
13746 mask = single_arg ? (nelts - 1) : mask2;
13747 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13748 for (i = 0; i < nelts; i++)
13749 {
13750 tree val = VECTOR_CST_ELT (arg2, i);
13751 if (TREE_CODE (val) != INTEGER_CST)
13752 return NULL_TREE;
13753
13754 /* Make sure that the perm value is in an acceptable
13755 range. */
13756 wide_int t = val;
13757 need_mask_canon |= wi::gtu_p (t, mask);
13758 need_mask_canon2 |= wi::gtu_p (t, mask2);
13759 sel[i] = t.to_uhwi () & mask;
13760 sel2[i] = t.to_uhwi () & mask2;
13761
13762 if (sel[i] < nelts)
13763 all_in_vec1 = false;
13764 else
13765 all_in_vec0 = false;
13766
13767 if ((sel[i] & (nelts-1)) != i)
13768 maybe_identity = false;
13769 }
13770
13771 if (maybe_identity)
13772 {
13773 if (all_in_vec0)
13774 return op0;
13775 if (all_in_vec1)
13776 return op1;
13777 }
13778
13779 if (all_in_vec0)
13780 op1 = op0;
13781 else if (all_in_vec1)
13782 {
13783 op0 = op1;
13784 for (i = 0; i < nelts; i++)
13785 sel[i] -= nelts;
13786 need_mask_canon = true;
13787 }
13788
13789 if ((TREE_CODE (op0) == VECTOR_CST
13790 || TREE_CODE (op0) == CONSTRUCTOR)
13791 && (TREE_CODE (op1) == VECTOR_CST
13792 || TREE_CODE (op1) == CONSTRUCTOR))
13793 {
13794 tree t = fold_vec_perm (type, op0, op1, sel);
13795 if (t != NULL_TREE)
13796 return t;
13797 }
13798
13799 if (op0 == op1 && !single_arg)
13800 changed = true;
13801
13802 /* Some targets are deficient and fail to expand a single
13803 argument permutation while still allowing an equivalent
13804 2-argument version. */
13805 if (need_mask_canon && arg2 == op2
13806 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13807 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13808 {
13809 need_mask_canon = need_mask_canon2;
13810 sel = sel2;
13811 }
13812
13813 if (need_mask_canon && arg2 == op2)
13814 {
13815 tree *tsel = XALLOCAVEC (tree, nelts);
13816 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13817 for (i = 0; i < nelts; i++)
13818 tsel[i] = build_int_cst (eltype, sel[i]);
13819 op2 = build_vector (TREE_TYPE (arg2), tsel);
13820 changed = true;
13821 }
13822
13823 if (changed)
13824 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13825 }
13826 return NULL_TREE;
13827
13828 default:
13829 return NULL_TREE;
13830 } /* switch (code) */
13831 }
13832
13833 /* Perform constant folding and related simplification of EXPR.
13834 The related simplifications include x*1 => x, x*0 => 0, etc.,
13835 and application of the associative law.
13836 NOP_EXPR conversions may be removed freely (as long as we
13837 are careful not to change the type of the overall expression).
13838 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13839 but we can constant-fold them if they have constant operands. */
13840
13841 #ifdef ENABLE_FOLD_CHECKING
13842 # define fold(x) fold_1 (x)
13843 static tree fold_1 (tree);
13844 static
13845 #endif
13846 tree
13847 fold (tree expr)
13848 {
13849 const tree t = expr;
13850 enum tree_code code = TREE_CODE (t);
13851 enum tree_code_class kind = TREE_CODE_CLASS (code);
13852 tree tem;
13853 location_t loc = EXPR_LOCATION (expr);
13854
13855 /* Return right away if a constant. */
13856 if (kind == tcc_constant)
13857 return t;
13858
13859 /* CALL_EXPR-like objects with variable numbers of operands are
13860 treated specially. */
13861 if (kind == tcc_vl_exp)
13862 {
13863 if (code == CALL_EXPR)
13864 {
13865 tem = fold_call_expr (loc, expr, false);
13866 return tem ? tem : expr;
13867 }
13868 return expr;
13869 }
13870
13871 if (IS_EXPR_CODE_CLASS (kind))
13872 {
13873 tree type = TREE_TYPE (t);
13874 tree op0, op1, op2;
13875
13876 switch (TREE_CODE_LENGTH (code))
13877 {
13878 case 1:
13879 op0 = TREE_OPERAND (t, 0);
13880 tem = fold_unary_loc (loc, code, type, op0);
13881 return tem ? tem : expr;
13882 case 2:
13883 op0 = TREE_OPERAND (t, 0);
13884 op1 = TREE_OPERAND (t, 1);
13885 tem = fold_binary_loc (loc, code, type, op0, op1);
13886 return tem ? tem : expr;
13887 case 3:
13888 op0 = TREE_OPERAND (t, 0);
13889 op1 = TREE_OPERAND (t, 1);
13890 op2 = TREE_OPERAND (t, 2);
13891 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13892 return tem ? tem : expr;
13893 default:
13894 break;
13895 }
13896 }
13897
13898 switch (code)
13899 {
13900 case ARRAY_REF:
13901 {
13902 tree op0 = TREE_OPERAND (t, 0);
13903 tree op1 = TREE_OPERAND (t, 1);
13904
13905 if (TREE_CODE (op1) == INTEGER_CST
13906 && TREE_CODE (op0) == CONSTRUCTOR
13907 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13908 {
13909 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13910 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13911 unsigned HOST_WIDE_INT begin = 0;
13912
13913 /* Find a matching index by means of a binary search. */
13914 while (begin != end)
13915 {
13916 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13917 tree index = (*elts)[middle].index;
13918
13919 if (TREE_CODE (index) == INTEGER_CST
13920 && tree_int_cst_lt (index, op1))
13921 begin = middle + 1;
13922 else if (TREE_CODE (index) == INTEGER_CST
13923 && tree_int_cst_lt (op1, index))
13924 end = middle;
13925 else if (TREE_CODE (index) == RANGE_EXPR
13926 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13927 begin = middle + 1;
13928 else if (TREE_CODE (index) == RANGE_EXPR
13929 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13930 end = middle;
13931 else
13932 return (*elts)[middle].value;
13933 }
13934 }
13935
13936 return t;
13937 }
13938
13939 /* Return a VECTOR_CST if possible. */
13940 case CONSTRUCTOR:
13941 {
13942 tree type = TREE_TYPE (t);
13943 if (TREE_CODE (type) != VECTOR_TYPE)
13944 return t;
13945
13946 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13947 unsigned HOST_WIDE_INT idx, pos = 0;
13948 tree value;
13949
13950 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13951 {
13952 if (!CONSTANT_CLASS_P (value))
13953 return t;
13954 if (TREE_CODE (value) == VECTOR_CST)
13955 {
13956 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13957 vec[pos++] = VECTOR_CST_ELT (value, i);
13958 }
13959 else
13960 vec[pos++] = value;
13961 }
13962 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13963 vec[pos] = build_zero_cst (TREE_TYPE (type));
13964
13965 return build_vector (type, vec);
13966 }
13967
13968 case CONST_DECL:
13969 return fold (DECL_INITIAL (t));
13970
13971 default:
13972 return t;
13973 } /* switch (code) */
13974 }
13975
13976 #ifdef ENABLE_FOLD_CHECKING
13977 #undef fold
13978
13979 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13980 hash_table<pointer_hash<const tree_node> > *);
13981 static void fold_check_failed (const_tree, const_tree);
13982 void print_fold_checksum (const_tree);
13983
13984 /* When --enable-checking=fold, compute a digest of expr before
13985 and after actual fold call to see if fold did not accidentally
13986 change original expr. */
13987
13988 tree
13989 fold (tree expr)
13990 {
13991 tree ret;
13992 struct md5_ctx ctx;
13993 unsigned char checksum_before[16], checksum_after[16];
13994 hash_table<pointer_hash<const tree_node> > ht (32);
13995
13996 md5_init_ctx (&ctx);
13997 fold_checksum_tree (expr, &ctx, &ht);
13998 md5_finish_ctx (&ctx, checksum_before);
13999 ht.empty ();
14000
14001 ret = fold_1 (expr);
14002
14003 md5_init_ctx (&ctx);
14004 fold_checksum_tree (expr, &ctx, &ht);
14005 md5_finish_ctx (&ctx, checksum_after);
14006
14007 if (memcmp (checksum_before, checksum_after, 16))
14008 fold_check_failed (expr, ret);
14009
14010 return ret;
14011 }
14012
14013 void
14014 print_fold_checksum (const_tree expr)
14015 {
14016 struct md5_ctx ctx;
14017 unsigned char checksum[16], cnt;
14018 hash_table<pointer_hash<const tree_node> > ht (32);
14019
14020 md5_init_ctx (&ctx);
14021 fold_checksum_tree (expr, &ctx, &ht);
14022 md5_finish_ctx (&ctx, checksum);
14023 for (cnt = 0; cnt < 16; ++cnt)
14024 fprintf (stderr, "%02x", checksum[cnt]);
14025 putc ('\n', stderr);
14026 }
14027
14028 static void
14029 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14030 {
14031 internal_error ("fold check: original tree changed by fold");
14032 }
14033
14034 static void
14035 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14036 hash_table<pointer_hash <const tree_node> > *ht)
14037 {
14038 const tree_node **slot;
14039 enum tree_code code;
14040 union tree_node buf;
14041 int i, len;
14042
14043 recursive_label:
14044 if (expr == NULL)
14045 return;
14046 slot = ht->find_slot (expr, INSERT);
14047 if (*slot != NULL)
14048 return;
14049 *slot = expr;
14050 code = TREE_CODE (expr);
14051 if (TREE_CODE_CLASS (code) == tcc_declaration
14052 && HAS_DECL_ASSEMBLER_NAME_P (expr))
14053 {
14054 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
14055 memcpy ((char *) &buf, expr, tree_size (expr));
14056 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14057 buf.decl_with_vis.symtab_node = NULL;
14058 expr = (tree) &buf;
14059 }
14060 else if (TREE_CODE_CLASS (code) == tcc_type
14061 && (TYPE_POINTER_TO (expr)
14062 || TYPE_REFERENCE_TO (expr)
14063 || TYPE_CACHED_VALUES_P (expr)
14064 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14065 || TYPE_NEXT_VARIANT (expr)))
14066 {
14067 /* Allow these fields to be modified. */
14068 tree tmp;
14069 memcpy ((char *) &buf, expr, tree_size (expr));
14070 expr = tmp = (tree) &buf;
14071 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14072 TYPE_POINTER_TO (tmp) = NULL;
14073 TYPE_REFERENCE_TO (tmp) = NULL;
14074 TYPE_NEXT_VARIANT (tmp) = NULL;
14075 if (TYPE_CACHED_VALUES_P (tmp))
14076 {
14077 TYPE_CACHED_VALUES_P (tmp) = 0;
14078 TYPE_CACHED_VALUES (tmp) = NULL;
14079 }
14080 }
14081 md5_process_bytes (expr, tree_size (expr), ctx);
14082 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14083 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14084 if (TREE_CODE_CLASS (code) != tcc_type
14085 && TREE_CODE_CLASS (code) != tcc_declaration
14086 && code != TREE_LIST
14087 && code != SSA_NAME
14088 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14089 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14090 switch (TREE_CODE_CLASS (code))
14091 {
14092 case tcc_constant:
14093 switch (code)
14094 {
14095 case STRING_CST:
14096 md5_process_bytes (TREE_STRING_POINTER (expr),
14097 TREE_STRING_LENGTH (expr), ctx);
14098 break;
14099 case COMPLEX_CST:
14100 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14101 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14102 break;
14103 case VECTOR_CST:
14104 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14105 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14106 break;
14107 default:
14108 break;
14109 }
14110 break;
14111 case tcc_exceptional:
14112 switch (code)
14113 {
14114 case TREE_LIST:
14115 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14116 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14117 expr = TREE_CHAIN (expr);
14118 goto recursive_label;
14119 break;
14120 case TREE_VEC:
14121 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14122 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14123 break;
14124 default:
14125 break;
14126 }
14127 break;
14128 case tcc_expression:
14129 case tcc_reference:
14130 case tcc_comparison:
14131 case tcc_unary:
14132 case tcc_binary:
14133 case tcc_statement:
14134 case tcc_vl_exp:
14135 len = TREE_OPERAND_LENGTH (expr);
14136 for (i = 0; i < len; ++i)
14137 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14138 break;
14139 case tcc_declaration:
14140 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14141 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14142 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14143 {
14144 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14145 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14146 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14147 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14148 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14149 }
14150
14151 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14152 {
14153 if (TREE_CODE (expr) == FUNCTION_DECL)
14154 {
14155 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14156 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14157 }
14158 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14159 }
14160 break;
14161 case tcc_type:
14162 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14163 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14164 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14165 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14166 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14167 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14168 if (INTEGRAL_TYPE_P (expr)
14169 || SCALAR_FLOAT_TYPE_P (expr))
14170 {
14171 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14172 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14173 }
14174 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14175 if (TREE_CODE (expr) == RECORD_TYPE
14176 || TREE_CODE (expr) == UNION_TYPE
14177 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14178 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14179 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14180 break;
14181 default:
14182 break;
14183 }
14184 }
14185
14186 /* Helper function for outputting the checksum of a tree T. When
14187 debugging with gdb, you can "define mynext" to be "next" followed
14188 by "call debug_fold_checksum (op0)", then just trace down till the
14189 outputs differ. */
14190
14191 DEBUG_FUNCTION void
14192 debug_fold_checksum (const_tree t)
14193 {
14194 int i;
14195 unsigned char checksum[16];
14196 struct md5_ctx ctx;
14197 hash_table<pointer_hash<const tree_node> > ht (32);
14198
14199 md5_init_ctx (&ctx);
14200 fold_checksum_tree (t, &ctx, &ht);
14201 md5_finish_ctx (&ctx, checksum);
14202 ht.empty ();
14203
14204 for (i = 0; i < 16; i++)
14205 fprintf (stderr, "%d ", checksum[i]);
14206
14207 fprintf (stderr, "\n");
14208 }
14209
14210 #endif
14211
14212 /* Fold a unary tree expression with code CODE of type TYPE with an
14213 operand OP0. LOC is the location of the resulting expression.
14214 Return a folded expression if successful. Otherwise, return a tree
14215 expression with code CODE of type TYPE with an operand OP0. */
14216
14217 tree
14218 fold_build1_stat_loc (location_t loc,
14219 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14220 {
14221 tree tem;
14222 #ifdef ENABLE_FOLD_CHECKING
14223 unsigned char checksum_before[16], checksum_after[16];
14224 struct md5_ctx ctx;
14225 hash_table<pointer_hash<const tree_node> > ht (32);
14226
14227 md5_init_ctx (&ctx);
14228 fold_checksum_tree (op0, &ctx, &ht);
14229 md5_finish_ctx (&ctx, checksum_before);
14230 ht.empty ();
14231 #endif
14232
14233 tem = fold_unary_loc (loc, code, type, op0);
14234 if (!tem)
14235 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14236
14237 #ifdef ENABLE_FOLD_CHECKING
14238 md5_init_ctx (&ctx);
14239 fold_checksum_tree (op0, &ctx, &ht);
14240 md5_finish_ctx (&ctx, checksum_after);
14241
14242 if (memcmp (checksum_before, checksum_after, 16))
14243 fold_check_failed (op0, tem);
14244 #endif
14245 return tem;
14246 }
14247
14248 /* Fold a binary tree expression with code CODE of type TYPE with
14249 operands OP0 and OP1. LOC is the location of the resulting
14250 expression. Return a folded expression if successful. Otherwise,
14251 return a tree expression with code CODE of type TYPE with operands
14252 OP0 and OP1. */
14253
14254 tree
14255 fold_build2_stat_loc (location_t loc,
14256 enum tree_code code, tree type, tree op0, tree op1
14257 MEM_STAT_DECL)
14258 {
14259 tree tem;
14260 #ifdef ENABLE_FOLD_CHECKING
14261 unsigned char checksum_before_op0[16],
14262 checksum_before_op1[16],
14263 checksum_after_op0[16],
14264 checksum_after_op1[16];
14265 struct md5_ctx ctx;
14266 hash_table<pointer_hash<const tree_node> > ht (32);
14267
14268 md5_init_ctx (&ctx);
14269 fold_checksum_tree (op0, &ctx, &ht);
14270 md5_finish_ctx (&ctx, checksum_before_op0);
14271 ht.empty ();
14272
14273 md5_init_ctx (&ctx);
14274 fold_checksum_tree (op1, &ctx, &ht);
14275 md5_finish_ctx (&ctx, checksum_before_op1);
14276 ht.empty ();
14277 #endif
14278
14279 tem = fold_binary_loc (loc, code, type, op0, op1);
14280 if (!tem)
14281 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14282
14283 #ifdef ENABLE_FOLD_CHECKING
14284 md5_init_ctx (&ctx);
14285 fold_checksum_tree (op0, &ctx, &ht);
14286 md5_finish_ctx (&ctx, checksum_after_op0);
14287 ht.empty ();
14288
14289 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14290 fold_check_failed (op0, tem);
14291
14292 md5_init_ctx (&ctx);
14293 fold_checksum_tree (op1, &ctx, &ht);
14294 md5_finish_ctx (&ctx, checksum_after_op1);
14295
14296 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14297 fold_check_failed (op1, tem);
14298 #endif
14299 return tem;
14300 }
14301
14302 /* Fold a ternary tree expression with code CODE of type TYPE with
14303 operands OP0, OP1, and OP2. Return a folded expression if
14304 successful. Otherwise, return a tree expression with code CODE of
14305 type TYPE with operands OP0, OP1, and OP2. */
14306
14307 tree
14308 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14309 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14310 {
14311 tree tem;
14312 #ifdef ENABLE_FOLD_CHECKING
14313 unsigned char checksum_before_op0[16],
14314 checksum_before_op1[16],
14315 checksum_before_op2[16],
14316 checksum_after_op0[16],
14317 checksum_after_op1[16],
14318 checksum_after_op2[16];
14319 struct md5_ctx ctx;
14320 hash_table<pointer_hash<const tree_node> > ht (32);
14321
14322 md5_init_ctx (&ctx);
14323 fold_checksum_tree (op0, &ctx, &ht);
14324 md5_finish_ctx (&ctx, checksum_before_op0);
14325 ht.empty ();
14326
14327 md5_init_ctx (&ctx);
14328 fold_checksum_tree (op1, &ctx, &ht);
14329 md5_finish_ctx (&ctx, checksum_before_op1);
14330 ht.empty ();
14331
14332 md5_init_ctx (&ctx);
14333 fold_checksum_tree (op2, &ctx, &ht);
14334 md5_finish_ctx (&ctx, checksum_before_op2);
14335 ht.empty ();
14336 #endif
14337
14338 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14339 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14340 if (!tem)
14341 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14342
14343 #ifdef ENABLE_FOLD_CHECKING
14344 md5_init_ctx (&ctx);
14345 fold_checksum_tree (op0, &ctx, &ht);
14346 md5_finish_ctx (&ctx, checksum_after_op0);
14347 ht.empty ();
14348
14349 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14350 fold_check_failed (op0, tem);
14351
14352 md5_init_ctx (&ctx);
14353 fold_checksum_tree (op1, &ctx, &ht);
14354 md5_finish_ctx (&ctx, checksum_after_op1);
14355 ht.empty ();
14356
14357 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14358 fold_check_failed (op1, tem);
14359
14360 md5_init_ctx (&ctx);
14361 fold_checksum_tree (op2, &ctx, &ht);
14362 md5_finish_ctx (&ctx, checksum_after_op2);
14363
14364 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14365 fold_check_failed (op2, tem);
14366 #endif
14367 return tem;
14368 }
14369
14370 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14371 arguments in ARGARRAY, and a null static chain.
14372 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14373 of type TYPE from the given operands as constructed by build_call_array. */
14374
14375 tree
14376 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14377 int nargs, tree *argarray)
14378 {
14379 tree tem;
14380 #ifdef ENABLE_FOLD_CHECKING
14381 unsigned char checksum_before_fn[16],
14382 checksum_before_arglist[16],
14383 checksum_after_fn[16],
14384 checksum_after_arglist[16];
14385 struct md5_ctx ctx;
14386 hash_table<pointer_hash<const tree_node> > ht (32);
14387 int i;
14388
14389 md5_init_ctx (&ctx);
14390 fold_checksum_tree (fn, &ctx, &ht);
14391 md5_finish_ctx (&ctx, checksum_before_fn);
14392 ht.empty ();
14393
14394 md5_init_ctx (&ctx);
14395 for (i = 0; i < nargs; i++)
14396 fold_checksum_tree (argarray[i], &ctx, &ht);
14397 md5_finish_ctx (&ctx, checksum_before_arglist);
14398 ht.empty ();
14399 #endif
14400
14401 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14402 if (!tem)
14403 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14404
14405 #ifdef ENABLE_FOLD_CHECKING
14406 md5_init_ctx (&ctx);
14407 fold_checksum_tree (fn, &ctx, &ht);
14408 md5_finish_ctx (&ctx, checksum_after_fn);
14409 ht.empty ();
14410
14411 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14412 fold_check_failed (fn, tem);
14413
14414 md5_init_ctx (&ctx);
14415 for (i = 0; i < nargs; i++)
14416 fold_checksum_tree (argarray[i], &ctx, &ht);
14417 md5_finish_ctx (&ctx, checksum_after_arglist);
14418
14419 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14420 fold_check_failed (NULL_TREE, tem);
14421 #endif
14422 return tem;
14423 }
14424
14425 /* Perform constant folding and related simplification of initializer
14426 expression EXPR. These behave identically to "fold_buildN" but ignore
14427 potential run-time traps and exceptions that fold must preserve. */
14428
14429 #define START_FOLD_INIT \
14430 int saved_signaling_nans = flag_signaling_nans;\
14431 int saved_trapping_math = flag_trapping_math;\
14432 int saved_rounding_math = flag_rounding_math;\
14433 int saved_trapv = flag_trapv;\
14434 int saved_folding_initializer = folding_initializer;\
14435 flag_signaling_nans = 0;\
14436 flag_trapping_math = 0;\
14437 flag_rounding_math = 0;\
14438 flag_trapv = 0;\
14439 folding_initializer = 1;
14440
14441 #define END_FOLD_INIT \
14442 flag_signaling_nans = saved_signaling_nans;\
14443 flag_trapping_math = saved_trapping_math;\
14444 flag_rounding_math = saved_rounding_math;\
14445 flag_trapv = saved_trapv;\
14446 folding_initializer = saved_folding_initializer;
14447
14448 tree
14449 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14450 tree type, tree op)
14451 {
14452 tree result;
14453 START_FOLD_INIT;
14454
14455 result = fold_build1_loc (loc, code, type, op);
14456
14457 END_FOLD_INIT;
14458 return result;
14459 }
14460
14461 tree
14462 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14463 tree type, tree op0, tree op1)
14464 {
14465 tree result;
14466 START_FOLD_INIT;
14467
14468 result = fold_build2_loc (loc, code, type, op0, op1);
14469
14470 END_FOLD_INIT;
14471 return result;
14472 }
14473
14474 tree
14475 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14476 int nargs, tree *argarray)
14477 {
14478 tree result;
14479 START_FOLD_INIT;
14480
14481 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14482
14483 END_FOLD_INIT;
14484 return result;
14485 }
14486
14487 #undef START_FOLD_INIT
14488 #undef END_FOLD_INIT
14489
14490 /* Determine if first argument is a multiple of second argument. Return 0 if
14491 it is not, or we cannot easily determined it to be.
14492
14493 An example of the sort of thing we care about (at this point; this routine
14494 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14495 fold cases do now) is discovering that
14496
14497 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14498
14499 is a multiple of
14500
14501 SAVE_EXPR (J * 8)
14502
14503 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14504
14505 This code also handles discovering that
14506
14507 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14508
14509 is a multiple of 8 so we don't have to worry about dealing with a
14510 possible remainder.
14511
14512 Note that we *look* inside a SAVE_EXPR only to determine how it was
14513 calculated; it is not safe for fold to do much of anything else with the
14514 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14515 at run time. For example, the latter example above *cannot* be implemented
14516 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14517 evaluation time of the original SAVE_EXPR is not necessarily the same at
14518 the time the new expression is evaluated. The only optimization of this
14519 sort that would be valid is changing
14520
14521 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14522
14523 divided by 8 to
14524
14525 SAVE_EXPR (I) * SAVE_EXPR (J)
14526
14527 (where the same SAVE_EXPR (J) is used in the original and the
14528 transformed version). */
14529
14530 int
14531 multiple_of_p (tree type, const_tree top, const_tree bottom)
14532 {
14533 if (operand_equal_p (top, bottom, 0))
14534 return 1;
14535
14536 if (TREE_CODE (type) != INTEGER_TYPE)
14537 return 0;
14538
14539 switch (TREE_CODE (top))
14540 {
14541 case BIT_AND_EXPR:
14542 /* Bitwise and provides a power of two multiple. If the mask is
14543 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14544 if (!integer_pow2p (bottom))
14545 return 0;
14546 /* FALLTHRU */
14547
14548 case MULT_EXPR:
14549 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14550 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14551
14552 case PLUS_EXPR:
14553 case MINUS_EXPR:
14554 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14555 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14556
14557 case LSHIFT_EXPR:
14558 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14559 {
14560 tree op1, t1;
14561
14562 op1 = TREE_OPERAND (top, 1);
14563 /* const_binop may not detect overflow correctly,
14564 so check for it explicitly here. */
14565 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14566 && 0 != (t1 = fold_convert (type,
14567 const_binop (LSHIFT_EXPR,
14568 size_one_node,
14569 op1)))
14570 && !TREE_OVERFLOW (t1))
14571 return multiple_of_p (type, t1, bottom);
14572 }
14573 return 0;
14574
14575 case NOP_EXPR:
14576 /* Can't handle conversions from non-integral or wider integral type. */
14577 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14578 || (TYPE_PRECISION (type)
14579 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14580 return 0;
14581
14582 /* .. fall through ... */
14583
14584 case SAVE_EXPR:
14585 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14586
14587 case COND_EXPR:
14588 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14589 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14590
14591 case INTEGER_CST:
14592 if (TREE_CODE (bottom) != INTEGER_CST
14593 || integer_zerop (bottom)
14594 || (TYPE_UNSIGNED (type)
14595 && (tree_int_cst_sgn (top) < 0
14596 || tree_int_cst_sgn (bottom) < 0)))
14597 return 0;
14598 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14599 SIGNED);
14600
14601 default:
14602 return 0;
14603 }
14604 }
14605
14606 /* Return true if CODE or TYPE is known to be non-negative. */
14607
14608 static bool
14609 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14610 {
14611 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14612 && truth_value_p (code))
14613 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14614 have a signed:1 type (where the value is -1 and 0). */
14615 return true;
14616 return false;
14617 }
14618
14619 /* Return true if (CODE OP0) is known to be non-negative. If the return
14620 value is based on the assumption that signed overflow is undefined,
14621 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14622 *STRICT_OVERFLOW_P. */
14623
14624 bool
14625 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14626 bool *strict_overflow_p)
14627 {
14628 if (TYPE_UNSIGNED (type))
14629 return true;
14630
14631 switch (code)
14632 {
14633 case ABS_EXPR:
14634 /* We can't return 1 if flag_wrapv is set because
14635 ABS_EXPR<INT_MIN> = INT_MIN. */
14636 if (!ANY_INTEGRAL_TYPE_P (type))
14637 return true;
14638 if (TYPE_OVERFLOW_UNDEFINED (type))
14639 {
14640 *strict_overflow_p = true;
14641 return true;
14642 }
14643 break;
14644
14645 case NON_LVALUE_EXPR:
14646 case FLOAT_EXPR:
14647 case FIX_TRUNC_EXPR:
14648 return tree_expr_nonnegative_warnv_p (op0,
14649 strict_overflow_p);
14650
14651 CASE_CONVERT:
14652 {
14653 tree inner_type = TREE_TYPE (op0);
14654 tree outer_type = type;
14655
14656 if (TREE_CODE (outer_type) == REAL_TYPE)
14657 {
14658 if (TREE_CODE (inner_type) == REAL_TYPE)
14659 return tree_expr_nonnegative_warnv_p (op0,
14660 strict_overflow_p);
14661 if (INTEGRAL_TYPE_P (inner_type))
14662 {
14663 if (TYPE_UNSIGNED (inner_type))
14664 return true;
14665 return tree_expr_nonnegative_warnv_p (op0,
14666 strict_overflow_p);
14667 }
14668 }
14669 else if (INTEGRAL_TYPE_P (outer_type))
14670 {
14671 if (TREE_CODE (inner_type) == REAL_TYPE)
14672 return tree_expr_nonnegative_warnv_p (op0,
14673 strict_overflow_p);
14674 if (INTEGRAL_TYPE_P (inner_type))
14675 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14676 && TYPE_UNSIGNED (inner_type);
14677 }
14678 }
14679 break;
14680
14681 default:
14682 return tree_simple_nonnegative_warnv_p (code, type);
14683 }
14684
14685 /* We don't know sign of `t', so be conservative and return false. */
14686 return false;
14687 }
14688
14689 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14690 value is based on the assumption that signed overflow is undefined,
14691 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14692 *STRICT_OVERFLOW_P. */
14693
14694 bool
14695 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14696 tree op1, bool *strict_overflow_p)
14697 {
14698 if (TYPE_UNSIGNED (type))
14699 return true;
14700
14701 switch (code)
14702 {
14703 case POINTER_PLUS_EXPR:
14704 case PLUS_EXPR:
14705 if (FLOAT_TYPE_P (type))
14706 return (tree_expr_nonnegative_warnv_p (op0,
14707 strict_overflow_p)
14708 && tree_expr_nonnegative_warnv_p (op1,
14709 strict_overflow_p));
14710
14711 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14712 both unsigned and at least 2 bits shorter than the result. */
14713 if (TREE_CODE (type) == INTEGER_TYPE
14714 && TREE_CODE (op0) == NOP_EXPR
14715 && TREE_CODE (op1) == NOP_EXPR)
14716 {
14717 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14718 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14719 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14720 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14721 {
14722 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14723 TYPE_PRECISION (inner2)) + 1;
14724 return prec < TYPE_PRECISION (type);
14725 }
14726 }
14727 break;
14728
14729 case MULT_EXPR:
14730 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14731 {
14732 /* x * x is always non-negative for floating point x
14733 or without overflow. */
14734 if (operand_equal_p (op0, op1, 0)
14735 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14736 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14737 {
14738 if (ANY_INTEGRAL_TYPE_P (type)
14739 && TYPE_OVERFLOW_UNDEFINED (type))
14740 *strict_overflow_p = true;
14741 return true;
14742 }
14743 }
14744
14745 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14746 both unsigned and their total bits is shorter than the result. */
14747 if (TREE_CODE (type) == INTEGER_TYPE
14748 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14749 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14750 {
14751 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14752 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14753 : TREE_TYPE (op0);
14754 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14755 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14756 : TREE_TYPE (op1);
14757
14758 bool unsigned0 = TYPE_UNSIGNED (inner0);
14759 bool unsigned1 = TYPE_UNSIGNED (inner1);
14760
14761 if (TREE_CODE (op0) == INTEGER_CST)
14762 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14763
14764 if (TREE_CODE (op1) == INTEGER_CST)
14765 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14766
14767 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14768 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14769 {
14770 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14771 ? tree_int_cst_min_precision (op0, UNSIGNED)
14772 : TYPE_PRECISION (inner0);
14773
14774 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14775 ? tree_int_cst_min_precision (op1, UNSIGNED)
14776 : TYPE_PRECISION (inner1);
14777
14778 return precision0 + precision1 < TYPE_PRECISION (type);
14779 }
14780 }
14781 return false;
14782
14783 case BIT_AND_EXPR:
14784 case MAX_EXPR:
14785 return (tree_expr_nonnegative_warnv_p (op0,
14786 strict_overflow_p)
14787 || tree_expr_nonnegative_warnv_p (op1,
14788 strict_overflow_p));
14789
14790 case BIT_IOR_EXPR:
14791 case BIT_XOR_EXPR:
14792 case MIN_EXPR:
14793 case RDIV_EXPR:
14794 case TRUNC_DIV_EXPR:
14795 case CEIL_DIV_EXPR:
14796 case FLOOR_DIV_EXPR:
14797 case ROUND_DIV_EXPR:
14798 return (tree_expr_nonnegative_warnv_p (op0,
14799 strict_overflow_p)
14800 && tree_expr_nonnegative_warnv_p (op1,
14801 strict_overflow_p));
14802
14803 case TRUNC_MOD_EXPR:
14804 case CEIL_MOD_EXPR:
14805 case FLOOR_MOD_EXPR:
14806 case ROUND_MOD_EXPR:
14807 return tree_expr_nonnegative_warnv_p (op0,
14808 strict_overflow_p);
14809 default:
14810 return tree_simple_nonnegative_warnv_p (code, type);
14811 }
14812
14813 /* We don't know sign of `t', so be conservative and return false. */
14814 return false;
14815 }
14816
14817 /* Return true if T is known to be non-negative. If the return
14818 value is based on the assumption that signed overflow is undefined,
14819 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14820 *STRICT_OVERFLOW_P. */
14821
14822 bool
14823 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14824 {
14825 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14826 return true;
14827
14828 switch (TREE_CODE (t))
14829 {
14830 case INTEGER_CST:
14831 return tree_int_cst_sgn (t) >= 0;
14832
14833 case REAL_CST:
14834 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14835
14836 case FIXED_CST:
14837 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14838
14839 case COND_EXPR:
14840 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14841 strict_overflow_p)
14842 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14843 strict_overflow_p));
14844 default:
14845 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14846 TREE_TYPE (t));
14847 }
14848 /* We don't know sign of `t', so be conservative and return false. */
14849 return false;
14850 }
14851
14852 /* Return true if T is known to be non-negative. If the return
14853 value is based on the assumption that signed overflow is undefined,
14854 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14855 *STRICT_OVERFLOW_P. */
14856
14857 bool
14858 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14859 tree arg0, tree arg1, bool *strict_overflow_p)
14860 {
14861 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14862 switch (DECL_FUNCTION_CODE (fndecl))
14863 {
14864 CASE_FLT_FN (BUILT_IN_ACOS):
14865 CASE_FLT_FN (BUILT_IN_ACOSH):
14866 CASE_FLT_FN (BUILT_IN_CABS):
14867 CASE_FLT_FN (BUILT_IN_COSH):
14868 CASE_FLT_FN (BUILT_IN_ERFC):
14869 CASE_FLT_FN (BUILT_IN_EXP):
14870 CASE_FLT_FN (BUILT_IN_EXP10):
14871 CASE_FLT_FN (BUILT_IN_EXP2):
14872 CASE_FLT_FN (BUILT_IN_FABS):
14873 CASE_FLT_FN (BUILT_IN_FDIM):
14874 CASE_FLT_FN (BUILT_IN_HYPOT):
14875 CASE_FLT_FN (BUILT_IN_POW10):
14876 CASE_INT_FN (BUILT_IN_FFS):
14877 CASE_INT_FN (BUILT_IN_PARITY):
14878 CASE_INT_FN (BUILT_IN_POPCOUNT):
14879 CASE_INT_FN (BUILT_IN_CLZ):
14880 CASE_INT_FN (BUILT_IN_CLRSB):
14881 case BUILT_IN_BSWAP32:
14882 case BUILT_IN_BSWAP64:
14883 /* Always true. */
14884 return true;
14885
14886 CASE_FLT_FN (BUILT_IN_SQRT):
14887 /* sqrt(-0.0) is -0.0. */
14888 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14889 return true;
14890 return tree_expr_nonnegative_warnv_p (arg0,
14891 strict_overflow_p);
14892
14893 CASE_FLT_FN (BUILT_IN_ASINH):
14894 CASE_FLT_FN (BUILT_IN_ATAN):
14895 CASE_FLT_FN (BUILT_IN_ATANH):
14896 CASE_FLT_FN (BUILT_IN_CBRT):
14897 CASE_FLT_FN (BUILT_IN_CEIL):
14898 CASE_FLT_FN (BUILT_IN_ERF):
14899 CASE_FLT_FN (BUILT_IN_EXPM1):
14900 CASE_FLT_FN (BUILT_IN_FLOOR):
14901 CASE_FLT_FN (BUILT_IN_FMOD):
14902 CASE_FLT_FN (BUILT_IN_FREXP):
14903 CASE_FLT_FN (BUILT_IN_ICEIL):
14904 CASE_FLT_FN (BUILT_IN_IFLOOR):
14905 CASE_FLT_FN (BUILT_IN_IRINT):
14906 CASE_FLT_FN (BUILT_IN_IROUND):
14907 CASE_FLT_FN (BUILT_IN_LCEIL):
14908 CASE_FLT_FN (BUILT_IN_LDEXP):
14909 CASE_FLT_FN (BUILT_IN_LFLOOR):
14910 CASE_FLT_FN (BUILT_IN_LLCEIL):
14911 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14912 CASE_FLT_FN (BUILT_IN_LLRINT):
14913 CASE_FLT_FN (BUILT_IN_LLROUND):
14914 CASE_FLT_FN (BUILT_IN_LRINT):
14915 CASE_FLT_FN (BUILT_IN_LROUND):
14916 CASE_FLT_FN (BUILT_IN_MODF):
14917 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14918 CASE_FLT_FN (BUILT_IN_RINT):
14919 CASE_FLT_FN (BUILT_IN_ROUND):
14920 CASE_FLT_FN (BUILT_IN_SCALB):
14921 CASE_FLT_FN (BUILT_IN_SCALBLN):
14922 CASE_FLT_FN (BUILT_IN_SCALBN):
14923 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14924 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14925 CASE_FLT_FN (BUILT_IN_SINH):
14926 CASE_FLT_FN (BUILT_IN_TANH):
14927 CASE_FLT_FN (BUILT_IN_TRUNC):
14928 /* True if the 1st argument is nonnegative. */
14929 return tree_expr_nonnegative_warnv_p (arg0,
14930 strict_overflow_p);
14931
14932 CASE_FLT_FN (BUILT_IN_FMAX):
14933 /* True if the 1st OR 2nd arguments are nonnegative. */
14934 return (tree_expr_nonnegative_warnv_p (arg0,
14935 strict_overflow_p)
14936 || (tree_expr_nonnegative_warnv_p (arg1,
14937 strict_overflow_p)));
14938
14939 CASE_FLT_FN (BUILT_IN_FMIN):
14940 /* True if the 1st AND 2nd arguments are nonnegative. */
14941 return (tree_expr_nonnegative_warnv_p (arg0,
14942 strict_overflow_p)
14943 && (tree_expr_nonnegative_warnv_p (arg1,
14944 strict_overflow_p)));
14945
14946 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14947 /* True if the 2nd argument is nonnegative. */
14948 return tree_expr_nonnegative_warnv_p (arg1,
14949 strict_overflow_p);
14950
14951 CASE_FLT_FN (BUILT_IN_POWI):
14952 /* True if the 1st argument is nonnegative or the second
14953 argument is an even integer. */
14954 if (TREE_CODE (arg1) == INTEGER_CST
14955 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14956 return true;
14957 return tree_expr_nonnegative_warnv_p (arg0,
14958 strict_overflow_p);
14959
14960 CASE_FLT_FN (BUILT_IN_POW):
14961 /* True if the 1st argument is nonnegative or the second
14962 argument is an even integer valued real. */
14963 if (TREE_CODE (arg1) == REAL_CST)
14964 {
14965 REAL_VALUE_TYPE c;
14966 HOST_WIDE_INT n;
14967
14968 c = TREE_REAL_CST (arg1);
14969 n = real_to_integer (&c);
14970 if ((n & 1) == 0)
14971 {
14972 REAL_VALUE_TYPE cint;
14973 real_from_integer (&cint, VOIDmode, n, SIGNED);
14974 if (real_identical (&c, &cint))
14975 return true;
14976 }
14977 }
14978 return tree_expr_nonnegative_warnv_p (arg0,
14979 strict_overflow_p);
14980
14981 default:
14982 break;
14983 }
14984 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14985 type);
14986 }
14987
14988 /* Return true if T is known to be non-negative. If the return
14989 value is based on the assumption that signed overflow is undefined,
14990 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14991 *STRICT_OVERFLOW_P. */
14992
14993 static bool
14994 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14995 {
14996 enum tree_code code = TREE_CODE (t);
14997 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14998 return true;
14999
15000 switch (code)
15001 {
15002 case TARGET_EXPR:
15003 {
15004 tree temp = TARGET_EXPR_SLOT (t);
15005 t = TARGET_EXPR_INITIAL (t);
15006
15007 /* If the initializer is non-void, then it's a normal expression
15008 that will be assigned to the slot. */
15009 if (!VOID_TYPE_P (t))
15010 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15011
15012 /* Otherwise, the initializer sets the slot in some way. One common
15013 way is an assignment statement at the end of the initializer. */
15014 while (1)
15015 {
15016 if (TREE_CODE (t) == BIND_EXPR)
15017 t = expr_last (BIND_EXPR_BODY (t));
15018 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15019 || TREE_CODE (t) == TRY_CATCH_EXPR)
15020 t = expr_last (TREE_OPERAND (t, 0));
15021 else if (TREE_CODE (t) == STATEMENT_LIST)
15022 t = expr_last (t);
15023 else
15024 break;
15025 }
15026 if (TREE_CODE (t) == MODIFY_EXPR
15027 && TREE_OPERAND (t, 0) == temp)
15028 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15029 strict_overflow_p);
15030
15031 return false;
15032 }
15033
15034 case CALL_EXPR:
15035 {
15036 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15037 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15038
15039 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15040 get_callee_fndecl (t),
15041 arg0,
15042 arg1,
15043 strict_overflow_p);
15044 }
15045 case COMPOUND_EXPR:
15046 case MODIFY_EXPR:
15047 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15048 strict_overflow_p);
15049 case BIND_EXPR:
15050 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15051 strict_overflow_p);
15052 case SAVE_EXPR:
15053 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15054 strict_overflow_p);
15055
15056 default:
15057 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15058 TREE_TYPE (t));
15059 }
15060
15061 /* We don't know sign of `t', so be conservative and return false. */
15062 return false;
15063 }
15064
15065 /* Return true if T is known to be non-negative. If the return
15066 value is based on the assumption that signed overflow is undefined,
15067 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15068 *STRICT_OVERFLOW_P. */
15069
15070 bool
15071 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15072 {
15073 enum tree_code code;
15074 if (t == error_mark_node)
15075 return false;
15076
15077 code = TREE_CODE (t);
15078 switch (TREE_CODE_CLASS (code))
15079 {
15080 case tcc_binary:
15081 case tcc_comparison:
15082 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15083 TREE_TYPE (t),
15084 TREE_OPERAND (t, 0),
15085 TREE_OPERAND (t, 1),
15086 strict_overflow_p);
15087
15088 case tcc_unary:
15089 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15090 TREE_TYPE (t),
15091 TREE_OPERAND (t, 0),
15092 strict_overflow_p);
15093
15094 case tcc_constant:
15095 case tcc_declaration:
15096 case tcc_reference:
15097 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15098
15099 default:
15100 break;
15101 }
15102
15103 switch (code)
15104 {
15105 case TRUTH_AND_EXPR:
15106 case TRUTH_OR_EXPR:
15107 case TRUTH_XOR_EXPR:
15108 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15109 TREE_TYPE (t),
15110 TREE_OPERAND (t, 0),
15111 TREE_OPERAND (t, 1),
15112 strict_overflow_p);
15113 case TRUTH_NOT_EXPR:
15114 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15115 TREE_TYPE (t),
15116 TREE_OPERAND (t, 0),
15117 strict_overflow_p);
15118
15119 case COND_EXPR:
15120 case CONSTRUCTOR:
15121 case OBJ_TYPE_REF:
15122 case ASSERT_EXPR:
15123 case ADDR_EXPR:
15124 case WITH_SIZE_EXPR:
15125 case SSA_NAME:
15126 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15127
15128 default:
15129 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15130 }
15131 }
15132
15133 /* Return true if `t' is known to be non-negative. Handle warnings
15134 about undefined signed overflow. */
15135
15136 bool
15137 tree_expr_nonnegative_p (tree t)
15138 {
15139 bool ret, strict_overflow_p;
15140
15141 strict_overflow_p = false;
15142 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15143 if (strict_overflow_p)
15144 fold_overflow_warning (("assuming signed overflow does not occur when "
15145 "determining that expression is always "
15146 "non-negative"),
15147 WARN_STRICT_OVERFLOW_MISC);
15148 return ret;
15149 }
15150
15151
15152 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15153 For floating point we further ensure that T is not denormal.
15154 Similar logic is present in nonzero_address in rtlanal.h.
15155
15156 If the return value is based on the assumption that signed overflow
15157 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15158 change *STRICT_OVERFLOW_P. */
15159
15160 bool
15161 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15162 bool *strict_overflow_p)
15163 {
15164 switch (code)
15165 {
15166 case ABS_EXPR:
15167 return tree_expr_nonzero_warnv_p (op0,
15168 strict_overflow_p);
15169
15170 case NOP_EXPR:
15171 {
15172 tree inner_type = TREE_TYPE (op0);
15173 tree outer_type = type;
15174
15175 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15176 && tree_expr_nonzero_warnv_p (op0,
15177 strict_overflow_p));
15178 }
15179 break;
15180
15181 case NON_LVALUE_EXPR:
15182 return tree_expr_nonzero_warnv_p (op0,
15183 strict_overflow_p);
15184
15185 default:
15186 break;
15187 }
15188
15189 return false;
15190 }
15191
15192 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15193 For floating point we further ensure that T is not denormal.
15194 Similar logic is present in nonzero_address in rtlanal.h.
15195
15196 If the return value is based on the assumption that signed overflow
15197 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15198 change *STRICT_OVERFLOW_P. */
15199
15200 bool
15201 tree_binary_nonzero_warnv_p (enum tree_code code,
15202 tree type,
15203 tree op0,
15204 tree op1, bool *strict_overflow_p)
15205 {
15206 bool sub_strict_overflow_p;
15207 switch (code)
15208 {
15209 case POINTER_PLUS_EXPR:
15210 case PLUS_EXPR:
15211 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15212 {
15213 /* With the presence of negative values it is hard
15214 to say something. */
15215 sub_strict_overflow_p = false;
15216 if (!tree_expr_nonnegative_warnv_p (op0,
15217 &sub_strict_overflow_p)
15218 || !tree_expr_nonnegative_warnv_p (op1,
15219 &sub_strict_overflow_p))
15220 return false;
15221 /* One of operands must be positive and the other non-negative. */
15222 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15223 overflows, on a twos-complement machine the sum of two
15224 nonnegative numbers can never be zero. */
15225 return (tree_expr_nonzero_warnv_p (op0,
15226 strict_overflow_p)
15227 || tree_expr_nonzero_warnv_p (op1,
15228 strict_overflow_p));
15229 }
15230 break;
15231
15232 case MULT_EXPR:
15233 if (TYPE_OVERFLOW_UNDEFINED (type))
15234 {
15235 if (tree_expr_nonzero_warnv_p (op0,
15236 strict_overflow_p)
15237 && tree_expr_nonzero_warnv_p (op1,
15238 strict_overflow_p))
15239 {
15240 *strict_overflow_p = true;
15241 return true;
15242 }
15243 }
15244 break;
15245
15246 case MIN_EXPR:
15247 sub_strict_overflow_p = false;
15248 if (tree_expr_nonzero_warnv_p (op0,
15249 &sub_strict_overflow_p)
15250 && tree_expr_nonzero_warnv_p (op1,
15251 &sub_strict_overflow_p))
15252 {
15253 if (sub_strict_overflow_p)
15254 *strict_overflow_p = true;
15255 }
15256 break;
15257
15258 case MAX_EXPR:
15259 sub_strict_overflow_p = false;
15260 if (tree_expr_nonzero_warnv_p (op0,
15261 &sub_strict_overflow_p))
15262 {
15263 if (sub_strict_overflow_p)
15264 *strict_overflow_p = true;
15265
15266 /* When both operands are nonzero, then MAX must be too. */
15267 if (tree_expr_nonzero_warnv_p (op1,
15268 strict_overflow_p))
15269 return true;
15270
15271 /* MAX where operand 0 is positive is positive. */
15272 return tree_expr_nonnegative_warnv_p (op0,
15273 strict_overflow_p);
15274 }
15275 /* MAX where operand 1 is positive is positive. */
15276 else if (tree_expr_nonzero_warnv_p (op1,
15277 &sub_strict_overflow_p)
15278 && tree_expr_nonnegative_warnv_p (op1,
15279 &sub_strict_overflow_p))
15280 {
15281 if (sub_strict_overflow_p)
15282 *strict_overflow_p = true;
15283 return true;
15284 }
15285 break;
15286
15287 case BIT_IOR_EXPR:
15288 return (tree_expr_nonzero_warnv_p (op1,
15289 strict_overflow_p)
15290 || tree_expr_nonzero_warnv_p (op0,
15291 strict_overflow_p));
15292
15293 default:
15294 break;
15295 }
15296
15297 return false;
15298 }
15299
15300 /* Return true when T is an address and is known to be nonzero.
15301 For floating point we further ensure that T is not denormal.
15302 Similar logic is present in nonzero_address in rtlanal.h.
15303
15304 If the return value is based on the assumption that signed overflow
15305 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15306 change *STRICT_OVERFLOW_P. */
15307
15308 bool
15309 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15310 {
15311 bool sub_strict_overflow_p;
15312 switch (TREE_CODE (t))
15313 {
15314 case INTEGER_CST:
15315 return !integer_zerop (t);
15316
15317 case ADDR_EXPR:
15318 {
15319 tree base = TREE_OPERAND (t, 0);
15320
15321 if (!DECL_P (base))
15322 base = get_base_address (base);
15323
15324 if (!base)
15325 return false;
15326
15327 /* For objects in symbol table check if we know they are non-zero.
15328 Don't do anything for variables and functions before symtab is built;
15329 it is quite possible that they will be declared weak later. */
15330 if (DECL_P (base) && decl_in_symtab_p (base))
15331 {
15332 struct symtab_node *symbol;
15333
15334 symbol = symtab_node::get_create (base);
15335 if (symbol)
15336 return symbol->nonzero_address ();
15337 else
15338 return false;
15339 }
15340
15341 /* Function local objects are never NULL. */
15342 if (DECL_P (base)
15343 && (DECL_CONTEXT (base)
15344 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15345 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15346 return true;
15347
15348 /* Constants are never weak. */
15349 if (CONSTANT_CLASS_P (base))
15350 return true;
15351
15352 return false;
15353 }
15354
15355 case COND_EXPR:
15356 sub_strict_overflow_p = false;
15357 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15358 &sub_strict_overflow_p)
15359 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15360 &sub_strict_overflow_p))
15361 {
15362 if (sub_strict_overflow_p)
15363 *strict_overflow_p = true;
15364 return true;
15365 }
15366 break;
15367
15368 default:
15369 break;
15370 }
15371 return false;
15372 }
15373
15374 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15375 attempt to fold the expression to a constant without modifying TYPE,
15376 OP0 or OP1.
15377
15378 If the expression could be simplified to a constant, then return
15379 the constant. If the expression would not be simplified to a
15380 constant, then return NULL_TREE. */
15381
15382 tree
15383 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15384 {
15385 tree tem = fold_binary (code, type, op0, op1);
15386 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15387 }
15388
15389 /* Given the components of a unary expression CODE, TYPE and OP0,
15390 attempt to fold the expression to a constant without modifying
15391 TYPE or OP0.
15392
15393 If the expression could be simplified to a constant, then return
15394 the constant. If the expression would not be simplified to a
15395 constant, then return NULL_TREE. */
15396
15397 tree
15398 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15399 {
15400 tree tem = fold_unary (code, type, op0);
15401 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15402 }
15403
15404 /* If EXP represents referencing an element in a constant string
15405 (either via pointer arithmetic or array indexing), return the
15406 tree representing the value accessed, otherwise return NULL. */
15407
15408 tree
15409 fold_read_from_constant_string (tree exp)
15410 {
15411 if ((TREE_CODE (exp) == INDIRECT_REF
15412 || TREE_CODE (exp) == ARRAY_REF)
15413 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15414 {
15415 tree exp1 = TREE_OPERAND (exp, 0);
15416 tree index;
15417 tree string;
15418 location_t loc = EXPR_LOCATION (exp);
15419
15420 if (TREE_CODE (exp) == INDIRECT_REF)
15421 string = string_constant (exp1, &index);
15422 else
15423 {
15424 tree low_bound = array_ref_low_bound (exp);
15425 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15426
15427 /* Optimize the special-case of a zero lower bound.
15428
15429 We convert the low_bound to sizetype to avoid some problems
15430 with constant folding. (E.g. suppose the lower bound is 1,
15431 and its mode is QI. Without the conversion,l (ARRAY
15432 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15433 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15434 if (! integer_zerop (low_bound))
15435 index = size_diffop_loc (loc, index,
15436 fold_convert_loc (loc, sizetype, low_bound));
15437
15438 string = exp1;
15439 }
15440
15441 if (string
15442 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15443 && TREE_CODE (string) == STRING_CST
15444 && TREE_CODE (index) == INTEGER_CST
15445 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15446 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15447 == MODE_INT)
15448 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15449 return build_int_cst_type (TREE_TYPE (exp),
15450 (TREE_STRING_POINTER (string)
15451 [TREE_INT_CST_LOW (index)]));
15452 }
15453 return NULL;
15454 }
15455
15456 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15457 an integer constant, real, or fixed-point constant.
15458
15459 TYPE is the type of the result. */
15460
15461 static tree
15462 fold_negate_const (tree arg0, tree type)
15463 {
15464 tree t = NULL_TREE;
15465
15466 switch (TREE_CODE (arg0))
15467 {
15468 case INTEGER_CST:
15469 {
15470 bool overflow;
15471 wide_int val = wi::neg (arg0, &overflow);
15472 t = force_fit_type (type, val, 1,
15473 (overflow | TREE_OVERFLOW (arg0))
15474 && !TYPE_UNSIGNED (type));
15475 break;
15476 }
15477
15478 case REAL_CST:
15479 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15480 break;
15481
15482 case FIXED_CST:
15483 {
15484 FIXED_VALUE_TYPE f;
15485 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15486 &(TREE_FIXED_CST (arg0)), NULL,
15487 TYPE_SATURATING (type));
15488 t = build_fixed (type, f);
15489 /* Propagate overflow flags. */
15490 if (overflow_p | TREE_OVERFLOW (arg0))
15491 TREE_OVERFLOW (t) = 1;
15492 break;
15493 }
15494
15495 default:
15496 gcc_unreachable ();
15497 }
15498
15499 return t;
15500 }
15501
15502 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15503 an integer constant or real constant.
15504
15505 TYPE is the type of the result. */
15506
15507 tree
15508 fold_abs_const (tree arg0, tree type)
15509 {
15510 tree t = NULL_TREE;
15511
15512 switch (TREE_CODE (arg0))
15513 {
15514 case INTEGER_CST:
15515 {
15516 /* If the value is unsigned or non-negative, then the absolute value
15517 is the same as the ordinary value. */
15518 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15519 t = arg0;
15520
15521 /* If the value is negative, then the absolute value is
15522 its negation. */
15523 else
15524 {
15525 bool overflow;
15526 wide_int val = wi::neg (arg0, &overflow);
15527 t = force_fit_type (type, val, -1,
15528 overflow | TREE_OVERFLOW (arg0));
15529 }
15530 }
15531 break;
15532
15533 case REAL_CST:
15534 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15535 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15536 else
15537 t = arg0;
15538 break;
15539
15540 default:
15541 gcc_unreachable ();
15542 }
15543
15544 return t;
15545 }
15546
15547 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15548 constant. TYPE is the type of the result. */
15549
15550 static tree
15551 fold_not_const (const_tree arg0, tree type)
15552 {
15553 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15554
15555 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15556 }
15557
15558 /* Given CODE, a relational operator, the target type, TYPE and two
15559 constant operands OP0 and OP1, return the result of the
15560 relational operation. If the result is not a compile time
15561 constant, then return NULL_TREE. */
15562
15563 static tree
15564 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15565 {
15566 int result, invert;
15567
15568 /* From here on, the only cases we handle are when the result is
15569 known to be a constant. */
15570
15571 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15572 {
15573 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15574 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15575
15576 /* Handle the cases where either operand is a NaN. */
15577 if (real_isnan (c0) || real_isnan (c1))
15578 {
15579 switch (code)
15580 {
15581 case EQ_EXPR:
15582 case ORDERED_EXPR:
15583 result = 0;
15584 break;
15585
15586 case NE_EXPR:
15587 case UNORDERED_EXPR:
15588 case UNLT_EXPR:
15589 case UNLE_EXPR:
15590 case UNGT_EXPR:
15591 case UNGE_EXPR:
15592 case UNEQ_EXPR:
15593 result = 1;
15594 break;
15595
15596 case LT_EXPR:
15597 case LE_EXPR:
15598 case GT_EXPR:
15599 case GE_EXPR:
15600 case LTGT_EXPR:
15601 if (flag_trapping_math)
15602 return NULL_TREE;
15603 result = 0;
15604 break;
15605
15606 default:
15607 gcc_unreachable ();
15608 }
15609
15610 return constant_boolean_node (result, type);
15611 }
15612
15613 return constant_boolean_node (real_compare (code, c0, c1), type);
15614 }
15615
15616 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15617 {
15618 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15619 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15620 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15621 }
15622
15623 /* Handle equality/inequality of complex constants. */
15624 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15625 {
15626 tree rcond = fold_relational_const (code, type,
15627 TREE_REALPART (op0),
15628 TREE_REALPART (op1));
15629 tree icond = fold_relational_const (code, type,
15630 TREE_IMAGPART (op0),
15631 TREE_IMAGPART (op1));
15632 if (code == EQ_EXPR)
15633 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15634 else if (code == NE_EXPR)
15635 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15636 else
15637 return NULL_TREE;
15638 }
15639
15640 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15641 {
15642 unsigned count = VECTOR_CST_NELTS (op0);
15643 tree *elts = XALLOCAVEC (tree, count);
15644 gcc_assert (VECTOR_CST_NELTS (op1) == count
15645 && TYPE_VECTOR_SUBPARTS (type) == count);
15646
15647 for (unsigned i = 0; i < count; i++)
15648 {
15649 tree elem_type = TREE_TYPE (type);
15650 tree elem0 = VECTOR_CST_ELT (op0, i);
15651 tree elem1 = VECTOR_CST_ELT (op1, i);
15652
15653 tree tem = fold_relational_const (code, elem_type,
15654 elem0, elem1);
15655
15656 if (tem == NULL_TREE)
15657 return NULL_TREE;
15658
15659 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15660 }
15661
15662 return build_vector (type, elts);
15663 }
15664
15665 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15666
15667 To compute GT, swap the arguments and do LT.
15668 To compute GE, do LT and invert the result.
15669 To compute LE, swap the arguments, do LT and invert the result.
15670 To compute NE, do EQ and invert the result.
15671
15672 Therefore, the code below must handle only EQ and LT. */
15673
15674 if (code == LE_EXPR || code == GT_EXPR)
15675 {
15676 tree tem = op0;
15677 op0 = op1;
15678 op1 = tem;
15679 code = swap_tree_comparison (code);
15680 }
15681
15682 /* Note that it is safe to invert for real values here because we
15683 have already handled the one case that it matters. */
15684
15685 invert = 0;
15686 if (code == NE_EXPR || code == GE_EXPR)
15687 {
15688 invert = 1;
15689 code = invert_tree_comparison (code, false);
15690 }
15691
15692 /* Compute a result for LT or EQ if args permit;
15693 Otherwise return T. */
15694 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15695 {
15696 if (code == EQ_EXPR)
15697 result = tree_int_cst_equal (op0, op1);
15698 else
15699 result = tree_int_cst_lt (op0, op1);
15700 }
15701 else
15702 return NULL_TREE;
15703
15704 if (invert)
15705 result ^= 1;
15706 return constant_boolean_node (result, type);
15707 }
15708
15709 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15710 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15711 itself. */
15712
15713 tree
15714 fold_build_cleanup_point_expr (tree type, tree expr)
15715 {
15716 /* If the expression does not have side effects then we don't have to wrap
15717 it with a cleanup point expression. */
15718 if (!TREE_SIDE_EFFECTS (expr))
15719 return expr;
15720
15721 /* If the expression is a return, check to see if the expression inside the
15722 return has no side effects or the right hand side of the modify expression
15723 inside the return. If either don't have side effects set we don't need to
15724 wrap the expression in a cleanup point expression. Note we don't check the
15725 left hand side of the modify because it should always be a return decl. */
15726 if (TREE_CODE (expr) == RETURN_EXPR)
15727 {
15728 tree op = TREE_OPERAND (expr, 0);
15729 if (!op || !TREE_SIDE_EFFECTS (op))
15730 return expr;
15731 op = TREE_OPERAND (op, 1);
15732 if (!TREE_SIDE_EFFECTS (op))
15733 return expr;
15734 }
15735
15736 return build1 (CLEANUP_POINT_EXPR, type, expr);
15737 }
15738
15739 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15740 of an indirection through OP0, or NULL_TREE if no simplification is
15741 possible. */
15742
15743 tree
15744 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15745 {
15746 tree sub = op0;
15747 tree subtype;
15748
15749 STRIP_NOPS (sub);
15750 subtype = TREE_TYPE (sub);
15751 if (!POINTER_TYPE_P (subtype))
15752 return NULL_TREE;
15753
15754 if (TREE_CODE (sub) == ADDR_EXPR)
15755 {
15756 tree op = TREE_OPERAND (sub, 0);
15757 tree optype = TREE_TYPE (op);
15758 /* *&CONST_DECL -> to the value of the const decl. */
15759 if (TREE_CODE (op) == CONST_DECL)
15760 return DECL_INITIAL (op);
15761 /* *&p => p; make sure to handle *&"str"[cst] here. */
15762 if (type == optype)
15763 {
15764 tree fop = fold_read_from_constant_string (op);
15765 if (fop)
15766 return fop;
15767 else
15768 return op;
15769 }
15770 /* *(foo *)&fooarray => fooarray[0] */
15771 else if (TREE_CODE (optype) == ARRAY_TYPE
15772 && type == TREE_TYPE (optype)
15773 && (!in_gimple_form
15774 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15775 {
15776 tree type_domain = TYPE_DOMAIN (optype);
15777 tree min_val = size_zero_node;
15778 if (type_domain && TYPE_MIN_VALUE (type_domain))
15779 min_val = TYPE_MIN_VALUE (type_domain);
15780 if (in_gimple_form
15781 && TREE_CODE (min_val) != INTEGER_CST)
15782 return NULL_TREE;
15783 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15784 NULL_TREE, NULL_TREE);
15785 }
15786 /* *(foo *)&complexfoo => __real__ complexfoo */
15787 else if (TREE_CODE (optype) == COMPLEX_TYPE
15788 && type == TREE_TYPE (optype))
15789 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15790 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15791 else if (TREE_CODE (optype) == VECTOR_TYPE
15792 && type == TREE_TYPE (optype))
15793 {
15794 tree part_width = TYPE_SIZE (type);
15795 tree index = bitsize_int (0);
15796 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15797 }
15798 }
15799
15800 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15801 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15802 {
15803 tree op00 = TREE_OPERAND (sub, 0);
15804 tree op01 = TREE_OPERAND (sub, 1);
15805
15806 STRIP_NOPS (op00);
15807 if (TREE_CODE (op00) == ADDR_EXPR)
15808 {
15809 tree op00type;
15810 op00 = TREE_OPERAND (op00, 0);
15811 op00type = TREE_TYPE (op00);
15812
15813 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15814 if (TREE_CODE (op00type) == VECTOR_TYPE
15815 && type == TREE_TYPE (op00type))
15816 {
15817 HOST_WIDE_INT offset = tree_to_shwi (op01);
15818 tree part_width = TYPE_SIZE (type);
15819 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15820 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15821 tree index = bitsize_int (indexi);
15822
15823 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15824 return fold_build3_loc (loc,
15825 BIT_FIELD_REF, type, op00,
15826 part_width, index);
15827
15828 }
15829 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15830 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15831 && type == TREE_TYPE (op00type))
15832 {
15833 tree size = TYPE_SIZE_UNIT (type);
15834 if (tree_int_cst_equal (size, op01))
15835 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15836 }
15837 /* ((foo *)&fooarray)[1] => fooarray[1] */
15838 else if (TREE_CODE (op00type) == ARRAY_TYPE
15839 && type == TREE_TYPE (op00type))
15840 {
15841 tree type_domain = TYPE_DOMAIN (op00type);
15842 tree min_val = size_zero_node;
15843 if (type_domain && TYPE_MIN_VALUE (type_domain))
15844 min_val = TYPE_MIN_VALUE (type_domain);
15845 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15846 TYPE_SIZE_UNIT (type));
15847 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15848 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15849 NULL_TREE, NULL_TREE);
15850 }
15851 }
15852 }
15853
15854 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15855 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15856 && type == TREE_TYPE (TREE_TYPE (subtype))
15857 && (!in_gimple_form
15858 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15859 {
15860 tree type_domain;
15861 tree min_val = size_zero_node;
15862 sub = build_fold_indirect_ref_loc (loc, sub);
15863 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15864 if (type_domain && TYPE_MIN_VALUE (type_domain))
15865 min_val = TYPE_MIN_VALUE (type_domain);
15866 if (in_gimple_form
15867 && TREE_CODE (min_val) != INTEGER_CST)
15868 return NULL_TREE;
15869 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15870 NULL_TREE);
15871 }
15872
15873 return NULL_TREE;
15874 }
15875
15876 /* Builds an expression for an indirection through T, simplifying some
15877 cases. */
15878
15879 tree
15880 build_fold_indirect_ref_loc (location_t loc, tree t)
15881 {
15882 tree type = TREE_TYPE (TREE_TYPE (t));
15883 tree sub = fold_indirect_ref_1 (loc, type, t);
15884
15885 if (sub)
15886 return sub;
15887
15888 return build1_loc (loc, INDIRECT_REF, type, t);
15889 }
15890
15891 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15892
15893 tree
15894 fold_indirect_ref_loc (location_t loc, tree t)
15895 {
15896 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15897
15898 if (sub)
15899 return sub;
15900 else
15901 return t;
15902 }
15903
15904 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15905 whose result is ignored. The type of the returned tree need not be
15906 the same as the original expression. */
15907
15908 tree
15909 fold_ignored_result (tree t)
15910 {
15911 if (!TREE_SIDE_EFFECTS (t))
15912 return integer_zero_node;
15913
15914 for (;;)
15915 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15916 {
15917 case tcc_unary:
15918 t = TREE_OPERAND (t, 0);
15919 break;
15920
15921 case tcc_binary:
15922 case tcc_comparison:
15923 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15924 t = TREE_OPERAND (t, 0);
15925 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15926 t = TREE_OPERAND (t, 1);
15927 else
15928 return t;
15929 break;
15930
15931 case tcc_expression:
15932 switch (TREE_CODE (t))
15933 {
15934 case COMPOUND_EXPR:
15935 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15936 return t;
15937 t = TREE_OPERAND (t, 0);
15938 break;
15939
15940 case COND_EXPR:
15941 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15942 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15943 return t;
15944 t = TREE_OPERAND (t, 0);
15945 break;
15946
15947 default:
15948 return t;
15949 }
15950 break;
15951
15952 default:
15953 return t;
15954 }
15955 }
15956
15957 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15958
15959 tree
15960 round_up_loc (location_t loc, tree value, unsigned int divisor)
15961 {
15962 tree div = NULL_TREE;
15963
15964 if (divisor == 1)
15965 return value;
15966
15967 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15968 have to do anything. Only do this when we are not given a const,
15969 because in that case, this check is more expensive than just
15970 doing it. */
15971 if (TREE_CODE (value) != INTEGER_CST)
15972 {
15973 div = build_int_cst (TREE_TYPE (value), divisor);
15974
15975 if (multiple_of_p (TREE_TYPE (value), value, div))
15976 return value;
15977 }
15978
15979 /* If divisor is a power of two, simplify this to bit manipulation. */
15980 if (divisor == (divisor & -divisor))
15981 {
15982 if (TREE_CODE (value) == INTEGER_CST)
15983 {
15984 wide_int val = value;
15985 bool overflow_p;
15986
15987 if ((val & (divisor - 1)) == 0)
15988 return value;
15989
15990 overflow_p = TREE_OVERFLOW (value);
15991 val += divisor - 1;
15992 val &= - (int) divisor;
15993 if (val == 0)
15994 overflow_p = true;
15995
15996 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15997 }
15998 else
15999 {
16000 tree t;
16001
16002 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16003 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16004 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16005 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16006 }
16007 }
16008 else
16009 {
16010 if (!div)
16011 div = build_int_cst (TREE_TYPE (value), divisor);
16012 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16013 value = size_binop_loc (loc, MULT_EXPR, value, div);
16014 }
16015
16016 return value;
16017 }
16018
16019 /* Likewise, but round down. */
16020
16021 tree
16022 round_down_loc (location_t loc, tree value, int divisor)
16023 {
16024 tree div = NULL_TREE;
16025
16026 gcc_assert (divisor > 0);
16027 if (divisor == 1)
16028 return value;
16029
16030 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16031 have to do anything. Only do this when we are not given a const,
16032 because in that case, this check is more expensive than just
16033 doing it. */
16034 if (TREE_CODE (value) != INTEGER_CST)
16035 {
16036 div = build_int_cst (TREE_TYPE (value), divisor);
16037
16038 if (multiple_of_p (TREE_TYPE (value), value, div))
16039 return value;
16040 }
16041
16042 /* If divisor is a power of two, simplify this to bit manipulation. */
16043 if (divisor == (divisor & -divisor))
16044 {
16045 tree t;
16046
16047 t = build_int_cst (TREE_TYPE (value), -divisor);
16048 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16049 }
16050 else
16051 {
16052 if (!div)
16053 div = build_int_cst (TREE_TYPE (value), divisor);
16054 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16055 value = size_binop_loc (loc, MULT_EXPR, value, div);
16056 }
16057
16058 return value;
16059 }
16060
16061 /* Returns the pointer to the base of the object addressed by EXP and
16062 extracts the information about the offset of the access, storing it
16063 to PBITPOS and POFFSET. */
16064
16065 static tree
16066 split_address_to_core_and_offset (tree exp,
16067 HOST_WIDE_INT *pbitpos, tree *poffset)
16068 {
16069 tree core;
16070 machine_mode mode;
16071 int unsignedp, volatilep;
16072 HOST_WIDE_INT bitsize;
16073 location_t loc = EXPR_LOCATION (exp);
16074
16075 if (TREE_CODE (exp) == ADDR_EXPR)
16076 {
16077 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16078 poffset, &mode, &unsignedp, &volatilep,
16079 false);
16080 core = build_fold_addr_expr_loc (loc, core);
16081 }
16082 else
16083 {
16084 core = exp;
16085 *pbitpos = 0;
16086 *poffset = NULL_TREE;
16087 }
16088
16089 return core;
16090 }
16091
16092 /* Returns true if addresses of E1 and E2 differ by a constant, false
16093 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16094
16095 bool
16096 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16097 {
16098 tree core1, core2;
16099 HOST_WIDE_INT bitpos1, bitpos2;
16100 tree toffset1, toffset2, tdiff, type;
16101
16102 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16103 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16104
16105 if (bitpos1 % BITS_PER_UNIT != 0
16106 || bitpos2 % BITS_PER_UNIT != 0
16107 || !operand_equal_p (core1, core2, 0))
16108 return false;
16109
16110 if (toffset1 && toffset2)
16111 {
16112 type = TREE_TYPE (toffset1);
16113 if (type != TREE_TYPE (toffset2))
16114 toffset2 = fold_convert (type, toffset2);
16115
16116 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16117 if (!cst_and_fits_in_hwi (tdiff))
16118 return false;
16119
16120 *diff = int_cst_value (tdiff);
16121 }
16122 else if (toffset1 || toffset2)
16123 {
16124 /* If only one of the offsets is non-constant, the difference cannot
16125 be a constant. */
16126 return false;
16127 }
16128 else
16129 *diff = 0;
16130
16131 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16132 return true;
16133 }
16134
16135 /* Simplify the floating point expression EXP when the sign of the
16136 result is not significant. Return NULL_TREE if no simplification
16137 is possible. */
16138
16139 tree
16140 fold_strip_sign_ops (tree exp)
16141 {
16142 tree arg0, arg1;
16143 location_t loc = EXPR_LOCATION (exp);
16144
16145 switch (TREE_CODE (exp))
16146 {
16147 case ABS_EXPR:
16148 case NEGATE_EXPR:
16149 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16150 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16151
16152 case MULT_EXPR:
16153 case RDIV_EXPR:
16154 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16155 return NULL_TREE;
16156 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16157 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16158 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16159 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16160 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16161 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16162 break;
16163
16164 case COMPOUND_EXPR:
16165 arg0 = TREE_OPERAND (exp, 0);
16166 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16167 if (arg1)
16168 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16169 break;
16170
16171 case COND_EXPR:
16172 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16173 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16174 if (arg0 || arg1)
16175 return fold_build3_loc (loc,
16176 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16177 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16178 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16179 break;
16180
16181 case CALL_EXPR:
16182 {
16183 const enum built_in_function fcode = builtin_mathfn_code (exp);
16184 switch (fcode)
16185 {
16186 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16187 /* Strip copysign function call, return the 1st argument. */
16188 arg0 = CALL_EXPR_ARG (exp, 0);
16189 arg1 = CALL_EXPR_ARG (exp, 1);
16190 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16191
16192 default:
16193 /* Strip sign ops from the argument of "odd" math functions. */
16194 if (negate_mathfn_p (fcode))
16195 {
16196 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16197 if (arg0)
16198 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16199 }
16200 break;
16201 }
16202 }
16203 break;
16204
16205 default:
16206 break;
16207 }
16208 return NULL_TREE;
16209 }
16210
16211 /* Return OFF converted to a pointer offset type suitable as offset for
16212 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16213 tree
16214 convert_to_ptrofftype_loc (location_t loc, tree off)
16215 {
16216 return fold_convert_loc (loc, sizetype, off);
16217 }
16218
16219 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16220 tree
16221 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16222 {
16223 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16224 ptr, convert_to_ptrofftype_loc (loc, off));
16225 }
16226
16227 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16228 tree
16229 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16230 {
16231 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16232 ptr, size_int (off));
16233 }