gimple-predict.h: New file.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "predict.h"
48 #include "tree.h"
49 #include "gimple.h"
50 #include "rtl.h"
51 #include "flags.h"
52 #include "alias.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
55 #include "calls.h"
56 #include "tree-iterator.h"
57 #include "realmpfr.h"
58 #include "insn-config.h"
59 #include "expmed.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "emit-rtl.h"
63 #include "varasm.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "tm_p.h"
67 #include "target.h"
68 #include "diagnostic-core.h"
69 #include "intl.h"
70 #include "langhooks.h"
71 #include "md5.h"
72 #include "internal-fn.h"
73 #include "tree-eh.h"
74 #include "gimplify.h"
75 #include "tree-dfa.h"
76 #include "builtins.h"
77 #include "cgraph.h"
78 #include "generic-match.h"
79 #include "optabs.h"
80
81 #ifndef LOAD_EXTEND_OP
82 #define LOAD_EXTEND_OP(M) UNKNOWN
83 #endif
84
85 /* Nonzero if we are folding constants inside an initializer; zero
86 otherwise. */
87 int folding_initializer = 0;
88
89 /* The following constants represent a bit based encoding of GCC's
90 comparison operators. This encoding simplifies transformations
91 on relational comparison operators, such as AND and OR. */
92 enum comparison_code {
93 COMPCODE_FALSE = 0,
94 COMPCODE_LT = 1,
95 COMPCODE_EQ = 2,
96 COMPCODE_LE = 3,
97 COMPCODE_GT = 4,
98 COMPCODE_LTGT = 5,
99 COMPCODE_GE = 6,
100 COMPCODE_ORD = 7,
101 COMPCODE_UNORD = 8,
102 COMPCODE_UNLT = 9,
103 COMPCODE_UNEQ = 10,
104 COMPCODE_UNLE = 11,
105 COMPCODE_UNGT = 12,
106 COMPCODE_NE = 13,
107 COMPCODE_UNGE = 14,
108 COMPCODE_TRUE = 15
109 };
110
111 static bool negate_mathfn_p (enum built_in_function);
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static int operand_equal_for_comparison_p (tree, tree, tree);
119 static int twoval_comparison_p (tree, tree *, tree *, int *);
120 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
121 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
122 static tree make_bit_field_ref (location_t, tree, tree,
123 HOST_WIDE_INT, HOST_WIDE_INT, int);
124 static tree optimize_bit_field_compare (location_t, enum tree_code,
125 tree, tree, tree);
126 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
127 HOST_WIDE_INT *,
128 machine_mode *, int *, int *,
129 tree *, tree *);
130 static int simple_operand_p (const_tree);
131 static bool simple_operand_p_2 (tree);
132 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
133 static tree range_predecessor (tree);
134 static tree range_successor (tree);
135 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
136 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
137 static tree unextend (tree, int, int, tree);
138 static tree optimize_minmax_comparison (location_t, enum tree_code,
139 tree, tree, tree);
140 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
141 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
142 static tree fold_binary_op_with_conditional_arg (location_t,
143 enum tree_code, tree,
144 tree, tree,
145 tree, tree, int);
146 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
147 static bool reorder_operands_p (const_tree, const_tree);
148 static tree fold_negate_const (tree, tree);
149 static tree fold_not_const (const_tree, tree);
150 static tree fold_relational_const (enum tree_code, tree, tree, tree);
151 static tree fold_convert_const (enum tree_code, tree, tree);
152 static tree fold_view_convert_expr (tree, tree);
153 static bool vec_cst_ctor_to_array (tree, tree *);
154
155
156 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
157 Otherwise, return LOC. */
158
159 static location_t
160 expr_location_or (tree t, location_t loc)
161 {
162 location_t tloc = EXPR_LOCATION (t);
163 return tloc == UNKNOWN_LOCATION ? loc : tloc;
164 }
165
166 /* Similar to protected_set_expr_location, but never modify x in place,
167 if location can and needs to be set, unshare it. */
168
169 static inline tree
170 protected_set_expr_location_unshare (tree x, location_t loc)
171 {
172 if (CAN_HAVE_LOCATION_P (x)
173 && EXPR_LOCATION (x) != loc
174 && !(TREE_CODE (x) == SAVE_EXPR
175 || TREE_CODE (x) == TARGET_EXPR
176 || TREE_CODE (x) == BIND_EXPR))
177 {
178 x = copy_node (x);
179 SET_EXPR_LOCATION (x, loc);
180 }
181 return x;
182 }
183 \f
184 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
185 division and returns the quotient. Otherwise returns
186 NULL_TREE. */
187
188 tree
189 div_if_zero_remainder (const_tree arg1, const_tree arg2)
190 {
191 widest_int quo;
192
193 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
194 SIGNED, &quo))
195 return wide_int_to_tree (TREE_TYPE (arg1), quo);
196
197 return NULL_TREE;
198 }
199 \f
200 /* This is nonzero if we should defer warnings about undefined
201 overflow. This facility exists because these warnings are a
202 special case. The code to estimate loop iterations does not want
203 to issue any warnings, since it works with expressions which do not
204 occur in user code. Various bits of cleanup code call fold(), but
205 only use the result if it has certain characteristics (e.g., is a
206 constant); that code only wants to issue a warning if the result is
207 used. */
208
209 static int fold_deferring_overflow_warnings;
210
211 /* If a warning about undefined overflow is deferred, this is the
212 warning. Note that this may cause us to turn two warnings into
213 one, but that is fine since it is sufficient to only give one
214 warning per expression. */
215
216 static const char* fold_deferred_overflow_warning;
217
218 /* If a warning about undefined overflow is deferred, this is the
219 level at which the warning should be emitted. */
220
221 static enum warn_strict_overflow_code fold_deferred_overflow_code;
222
223 /* Start deferring overflow warnings. We could use a stack here to
224 permit nested calls, but at present it is not necessary. */
225
226 void
227 fold_defer_overflow_warnings (void)
228 {
229 ++fold_deferring_overflow_warnings;
230 }
231
232 /* Stop deferring overflow warnings. If there is a pending warning,
233 and ISSUE is true, then issue the warning if appropriate. STMT is
234 the statement with which the warning should be associated (used for
235 location information); STMT may be NULL. CODE is the level of the
236 warning--a warn_strict_overflow_code value. This function will use
237 the smaller of CODE and the deferred code when deciding whether to
238 issue the warning. CODE may be zero to mean to always use the
239 deferred code. */
240
241 void
242 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
243 {
244 const char *warnmsg;
245 location_t locus;
246
247 gcc_assert (fold_deferring_overflow_warnings > 0);
248 --fold_deferring_overflow_warnings;
249 if (fold_deferring_overflow_warnings > 0)
250 {
251 if (fold_deferred_overflow_warning != NULL
252 && code != 0
253 && code < (int) fold_deferred_overflow_code)
254 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
255 return;
256 }
257
258 warnmsg = fold_deferred_overflow_warning;
259 fold_deferred_overflow_warning = NULL;
260
261 if (!issue || warnmsg == NULL)
262 return;
263
264 if (gimple_no_warning_p (stmt))
265 return;
266
267 /* Use the smallest code level when deciding to issue the
268 warning. */
269 if (code == 0 || code > (int) fold_deferred_overflow_code)
270 code = fold_deferred_overflow_code;
271
272 if (!issue_strict_overflow_warning (code))
273 return;
274
275 if (stmt == NULL)
276 locus = input_location;
277 else
278 locus = gimple_location (stmt);
279 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
280 }
281
282 /* Stop deferring overflow warnings, ignoring any deferred
283 warnings. */
284
285 void
286 fold_undefer_and_ignore_overflow_warnings (void)
287 {
288 fold_undefer_overflow_warnings (false, NULL, 0);
289 }
290
291 /* Whether we are deferring overflow warnings. */
292
293 bool
294 fold_deferring_overflow_warnings_p (void)
295 {
296 return fold_deferring_overflow_warnings > 0;
297 }
298
299 /* This is called when we fold something based on the fact that signed
300 overflow is undefined. */
301
302 static void
303 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
304 {
305 if (fold_deferring_overflow_warnings > 0)
306 {
307 if (fold_deferred_overflow_warning == NULL
308 || wc < fold_deferred_overflow_code)
309 {
310 fold_deferred_overflow_warning = gmsgid;
311 fold_deferred_overflow_code = wc;
312 }
313 }
314 else if (issue_strict_overflow_warning (wc))
315 warning (OPT_Wstrict_overflow, gmsgid);
316 }
317 \f
318 /* Return true if the built-in mathematical function specified by CODE
319 is odd, i.e. -f(x) == f(-x). */
320
321 static bool
322 negate_mathfn_p (enum built_in_function code)
323 {
324 switch (code)
325 {
326 CASE_FLT_FN (BUILT_IN_ASIN):
327 CASE_FLT_FN (BUILT_IN_ASINH):
328 CASE_FLT_FN (BUILT_IN_ATAN):
329 CASE_FLT_FN (BUILT_IN_ATANH):
330 CASE_FLT_FN (BUILT_IN_CASIN):
331 CASE_FLT_FN (BUILT_IN_CASINH):
332 CASE_FLT_FN (BUILT_IN_CATAN):
333 CASE_FLT_FN (BUILT_IN_CATANH):
334 CASE_FLT_FN (BUILT_IN_CBRT):
335 CASE_FLT_FN (BUILT_IN_CPROJ):
336 CASE_FLT_FN (BUILT_IN_CSIN):
337 CASE_FLT_FN (BUILT_IN_CSINH):
338 CASE_FLT_FN (BUILT_IN_CTAN):
339 CASE_FLT_FN (BUILT_IN_CTANH):
340 CASE_FLT_FN (BUILT_IN_ERF):
341 CASE_FLT_FN (BUILT_IN_LLROUND):
342 CASE_FLT_FN (BUILT_IN_LROUND):
343 CASE_FLT_FN (BUILT_IN_ROUND):
344 CASE_FLT_FN (BUILT_IN_SIN):
345 CASE_FLT_FN (BUILT_IN_SINH):
346 CASE_FLT_FN (BUILT_IN_TAN):
347 CASE_FLT_FN (BUILT_IN_TANH):
348 CASE_FLT_FN (BUILT_IN_TRUNC):
349 return true;
350
351 CASE_FLT_FN (BUILT_IN_LLRINT):
352 CASE_FLT_FN (BUILT_IN_LRINT):
353 CASE_FLT_FN (BUILT_IN_NEARBYINT):
354 CASE_FLT_FN (BUILT_IN_RINT):
355 return !flag_rounding_math;
356
357 default:
358 break;
359 }
360 return false;
361 }
362
363 /* Check whether we may negate an integer constant T without causing
364 overflow. */
365
366 bool
367 may_negate_without_overflow_p (const_tree t)
368 {
369 tree type;
370
371 gcc_assert (TREE_CODE (t) == INTEGER_CST);
372
373 type = TREE_TYPE (t);
374 if (TYPE_UNSIGNED (type))
375 return false;
376
377 return !wi::only_sign_bit_p (t);
378 }
379
380 /* Determine whether an expression T can be cheaply negated using
381 the function negate_expr without introducing undefined overflow. */
382
383 static bool
384 negate_expr_p (tree t)
385 {
386 tree type;
387
388 if (t == 0)
389 return false;
390
391 type = TREE_TYPE (t);
392
393 STRIP_SIGN_NOPS (t);
394 switch (TREE_CODE (t))
395 {
396 case INTEGER_CST:
397 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
398 return true;
399
400 /* Check that -CST will not overflow type. */
401 return may_negate_without_overflow_p (t);
402 case BIT_NOT_EXPR:
403 return (INTEGRAL_TYPE_P (type)
404 && TYPE_OVERFLOW_WRAPS (type));
405
406 case FIXED_CST:
407 return true;
408
409 case NEGATE_EXPR:
410 return !TYPE_OVERFLOW_SANITIZED (type);
411
412 case REAL_CST:
413 /* We want to canonicalize to positive real constants. Pretend
414 that only negative ones can be easily negated. */
415 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
416
417 case COMPLEX_CST:
418 return negate_expr_p (TREE_REALPART (t))
419 && negate_expr_p (TREE_IMAGPART (t));
420
421 case VECTOR_CST:
422 {
423 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
424 return true;
425
426 int count = TYPE_VECTOR_SUBPARTS (type), i;
427
428 for (i = 0; i < count; i++)
429 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
430 return false;
431
432 return true;
433 }
434
435 case COMPLEX_EXPR:
436 return negate_expr_p (TREE_OPERAND (t, 0))
437 && negate_expr_p (TREE_OPERAND (t, 1));
438
439 case CONJ_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0));
441
442 case PLUS_EXPR:
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
444 || HONOR_SIGNED_ZEROS (element_mode (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
457 && !HONOR_SIGNED_ZEROS (element_mode (type))
458 && reorder_operands_p (TREE_OPERAND (t, 0),
459 TREE_OPERAND (t, 1));
460
461 case MULT_EXPR:
462 if (TYPE_UNSIGNED (TREE_TYPE (t)))
463 break;
464
465 /* Fall through. */
466
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
472
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 /* In general we can't negate A / B, because if A is INT_MIN and
477 B is 1, we may turn this into INT_MIN / -1 which is undefined
478 and actually traps on some architectures. But if overflow is
479 undefined, we can negate, because - (INT_MIN / 1) is an
480 overflow. */
481 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
482 {
483 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
484 break;
485 /* If overflow is undefined then we have to be careful because
486 we ask whether it's ok to associate the negate with the
487 division which is not ok for example for
488 -((a - b) / c) where (-(a - b)) / c may invoke undefined
489 overflow because of negating INT_MIN. So do not use
490 negate_expr_p here but open-code the two important cases. */
491 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
492 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
493 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
494 return true;
495 }
496 else if (negate_expr_p (TREE_OPERAND (t, 0)))
497 return true;
498 return negate_expr_p (TREE_OPERAND (t, 1));
499
500 case NOP_EXPR:
501 /* Negate -((double)float) as (double)(-float). */
502 if (TREE_CODE (type) == REAL_TYPE)
503 {
504 tree tem = strip_float_extensions (t);
505 if (tem != t)
506 return negate_expr_p (tem);
507 }
508 break;
509
510 case CALL_EXPR:
511 /* Negate -f(x) as f(-x). */
512 if (negate_mathfn_p (builtin_mathfn_code (t)))
513 return negate_expr_p (CALL_EXPR_ARG (t, 0));
514 break;
515
516 case RSHIFT_EXPR:
517 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
518 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
519 {
520 tree op1 = TREE_OPERAND (t, 1);
521 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
522 return true;
523 }
524 break;
525
526 default:
527 break;
528 }
529 return false;
530 }
531
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
535 returned. */
536
537 static tree
538 fold_negate_expr (location_t loc, tree t)
539 {
540 tree type = TREE_TYPE (t);
541 tree tem;
542
543 switch (TREE_CODE (t))
544 {
545 /* Convert - (~A) to A + 1. */
546 case BIT_NOT_EXPR:
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_one_cst (type));
550 break;
551
552 case INTEGER_CST:
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || (ANY_INTEGRAL_TYPE_P (type)
556 && !TYPE_OVERFLOW_TRAPS (type)
557 && TYPE_OVERFLOW_WRAPS (type))
558 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
559 return tem;
560 break;
561
562 case REAL_CST:
563 tem = fold_negate_const (t, type);
564 return tem;
565
566 case FIXED_CST:
567 tem = fold_negate_const (t, type);
568 return tem;
569
570 case COMPLEX_CST:
571 {
572 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
573 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
574 if (rpart && ipart)
575 return build_complex (type, rpart, ipart);
576 }
577 break;
578
579 case VECTOR_CST:
580 {
581 int count = TYPE_VECTOR_SUBPARTS (type), i;
582 tree *elts = XALLOCAVEC (tree, count);
583
584 for (i = 0; i < count; i++)
585 {
586 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
587 if (elts[i] == NULL_TREE)
588 return NULL_TREE;
589 }
590
591 return build_vector (type, elts);
592 }
593
594 case COMPLEX_EXPR:
595 if (negate_expr_p (t))
596 return fold_build2_loc (loc, COMPLEX_EXPR, type,
597 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
598 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
599 break;
600
601 case CONJ_EXPR:
602 if (negate_expr_p (t))
603 return fold_build1_loc (loc, CONJ_EXPR, type,
604 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
605 break;
606
607 case NEGATE_EXPR:
608 if (!TYPE_OVERFLOW_SANITIZED (type))
609 return TREE_OPERAND (t, 0);
610 break;
611
612 case PLUS_EXPR:
613 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
614 && !HONOR_SIGNED_ZEROS (element_mode (type)))
615 {
616 /* -(A + B) -> (-B) - A. */
617 if (negate_expr_p (TREE_OPERAND (t, 1))
618 && reorder_operands_p (TREE_OPERAND (t, 0),
619 TREE_OPERAND (t, 1)))
620 {
621 tem = negate_expr (TREE_OPERAND (t, 1));
622 return fold_build2_loc (loc, MINUS_EXPR, type,
623 tem, TREE_OPERAND (t, 0));
624 }
625
626 /* -(A + B) -> (-A) - B. */
627 if (negate_expr_p (TREE_OPERAND (t, 0)))
628 {
629 tem = negate_expr (TREE_OPERAND (t, 0));
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 tem, TREE_OPERAND (t, 1));
632 }
633 }
634 break;
635
636 case MINUS_EXPR:
637 /* - (A - B) -> B - A */
638 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
639 && !HONOR_SIGNED_ZEROS (element_mode (type))
640 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
641 return fold_build2_loc (loc, MINUS_EXPR, type,
642 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
643 break;
644
645 case MULT_EXPR:
646 if (TYPE_UNSIGNED (type))
647 break;
648
649 /* Fall through. */
650
651 case RDIV_EXPR:
652 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
653 {
654 tem = TREE_OPERAND (t, 1);
655 if (negate_expr_p (tem))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 TREE_OPERAND (t, 0), negate_expr (tem));
658 tem = TREE_OPERAND (t, 0);
659 if (negate_expr_p (tem))
660 return fold_build2_loc (loc, TREE_CODE (t), type,
661 negate_expr (tem), TREE_OPERAND (t, 1));
662 }
663 break;
664
665 case TRUNC_DIV_EXPR:
666 case ROUND_DIV_EXPR:
667 case EXACT_DIV_EXPR:
668 /* In general we can't negate A / B, because if A is INT_MIN and
669 B is 1, we may turn this into INT_MIN / -1 which is undefined
670 and actually traps on some architectures. But if overflow is
671 undefined, we can negate, because - (INT_MIN / 1) is an
672 overflow. */
673 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
674 {
675 const char * const warnmsg = G_("assuming signed overflow does not "
676 "occur when negating a division");
677 tem = TREE_OPERAND (t, 1);
678 if (negate_expr_p (tem))
679 {
680 if (INTEGRAL_TYPE_P (type)
681 && (TREE_CODE (tem) != INTEGER_CST
682 || integer_onep (tem)))
683 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
684 return fold_build2_loc (loc, TREE_CODE (t), type,
685 TREE_OPERAND (t, 0), negate_expr (tem));
686 }
687 /* If overflow is undefined then we have to be careful because
688 we ask whether it's ok to associate the negate with the
689 division which is not ok for example for
690 -((a - b) / c) where (-(a - b)) / c may invoke undefined
691 overflow because of negating INT_MIN. So do not use
692 negate_expr_p here but open-code the two important cases. */
693 tem = TREE_OPERAND (t, 0);
694 if ((INTEGRAL_TYPE_P (type)
695 && (TREE_CODE (tem) == NEGATE_EXPR
696 || (TREE_CODE (tem) == INTEGER_CST
697 && may_negate_without_overflow_p (tem))))
698 || !INTEGRAL_TYPE_P (type))
699 return fold_build2_loc (loc, TREE_CODE (t), type,
700 negate_expr (tem), TREE_OPERAND (t, 1));
701 }
702 break;
703
704 case NOP_EXPR:
705 /* Convert -((double)float) into (double)(-float). */
706 if (TREE_CODE (type) == REAL_TYPE)
707 {
708 tem = strip_float_extensions (t);
709 if (tem != t && negate_expr_p (tem))
710 return fold_convert_loc (loc, type, negate_expr (tem));
711 }
712 break;
713
714 case CALL_EXPR:
715 /* Negate -f(x) as f(-x). */
716 if (negate_mathfn_p (builtin_mathfn_code (t))
717 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
718 {
719 tree fndecl, arg;
720
721 fndecl = get_callee_fndecl (t);
722 arg = negate_expr (CALL_EXPR_ARG (t, 0));
723 return build_call_expr_loc (loc, fndecl, 1, arg);
724 }
725 break;
726
727 case RSHIFT_EXPR:
728 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
729 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
730 {
731 tree op1 = TREE_OPERAND (t, 1);
732 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
733 {
734 tree ntype = TYPE_UNSIGNED (type)
735 ? signed_type_for (type)
736 : unsigned_type_for (type);
737 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
738 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
739 return fold_convert_loc (loc, type, temp);
740 }
741 }
742 break;
743
744 default:
745 break;
746 }
747
748 return NULL_TREE;
749 }
750
751 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
752 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
753 return NULL_TREE. */
754
755 static tree
756 negate_expr (tree t)
757 {
758 tree type, tem;
759 location_t loc;
760
761 if (t == NULL_TREE)
762 return NULL_TREE;
763
764 loc = EXPR_LOCATION (t);
765 type = TREE_TYPE (t);
766 STRIP_SIGN_NOPS (t);
767
768 tem = fold_negate_expr (loc, t);
769 if (!tem)
770 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
771 return fold_convert_loc (loc, type, tem);
772 }
773 \f
774 /* Split a tree IN into a constant, literal and variable parts that could be
775 combined with CODE to make IN. "constant" means an expression with
776 TREE_CONSTANT but that isn't an actual constant. CODE must be a
777 commutative arithmetic operation. Store the constant part into *CONP,
778 the literal in *LITP and return the variable part. If a part isn't
779 present, set it to null. If the tree does not decompose in this way,
780 return the entire tree as the variable part and the other parts as null.
781
782 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
783 case, we negate an operand that was subtracted. Except if it is a
784 literal for which we use *MINUS_LITP instead.
785
786 If NEGATE_P is true, we are negating all of IN, again except a literal
787 for which we use *MINUS_LITP instead.
788
789 If IN is itself a literal or constant, return it as appropriate.
790
791 Note that we do not guarantee that any of the three values will be the
792 same type as IN, but they will have the same signedness and mode. */
793
794 static tree
795 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
796 tree *minus_litp, int negate_p)
797 {
798 tree var = 0;
799
800 *conp = 0;
801 *litp = 0;
802 *minus_litp = 0;
803
804 /* Strip any conversions that don't change the machine mode or signedness. */
805 STRIP_SIGN_NOPS (in);
806
807 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
808 || TREE_CODE (in) == FIXED_CST)
809 *litp = in;
810 else if (TREE_CODE (in) == code
811 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
812 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
813 /* We can associate addition and subtraction together (even
814 though the C standard doesn't say so) for integers because
815 the value is not affected. For reals, the value might be
816 affected, so we can't. */
817 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
818 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
819 {
820 tree op0 = TREE_OPERAND (in, 0);
821 tree op1 = TREE_OPERAND (in, 1);
822 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
823 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
824
825 /* First see if either of the operands is a literal, then a constant. */
826 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
827 || TREE_CODE (op0) == FIXED_CST)
828 *litp = op0, op0 = 0;
829 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
830 || TREE_CODE (op1) == FIXED_CST)
831 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
832
833 if (op0 != 0 && TREE_CONSTANT (op0))
834 *conp = op0, op0 = 0;
835 else if (op1 != 0 && TREE_CONSTANT (op1))
836 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
837
838 /* If we haven't dealt with either operand, this is not a case we can
839 decompose. Otherwise, VAR is either of the ones remaining, if any. */
840 if (op0 != 0 && op1 != 0)
841 var = in;
842 else if (op0 != 0)
843 var = op0;
844 else
845 var = op1, neg_var_p = neg1_p;
846
847 /* Now do any needed negations. */
848 if (neg_litp_p)
849 *minus_litp = *litp, *litp = 0;
850 if (neg_conp_p)
851 *conp = negate_expr (*conp);
852 if (neg_var_p)
853 var = negate_expr (var);
854 }
855 else if (TREE_CODE (in) == BIT_NOT_EXPR
856 && code == PLUS_EXPR)
857 {
858 /* -X - 1 is folded to ~X, undo that here. */
859 *minus_litp = build_one_cst (TREE_TYPE (in));
860 var = negate_expr (TREE_OPERAND (in, 0));
861 }
862 else if (TREE_CONSTANT (in))
863 *conp = in;
864 else
865 var = in;
866
867 if (negate_p)
868 {
869 if (*litp)
870 *minus_litp = *litp, *litp = 0;
871 else if (*minus_litp)
872 *litp = *minus_litp, *minus_litp = 0;
873 *conp = negate_expr (*conp);
874 var = negate_expr (var);
875 }
876
877 return var;
878 }
879
880 /* Re-associate trees split by the above function. T1 and T2 are
881 either expressions to associate or null. Return the new
882 expression, if any. LOC is the location of the new expression. If
883 we build an operation, do it in TYPE and with CODE. */
884
885 static tree
886 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
887 {
888 if (t1 == 0)
889 return t2;
890 else if (t2 == 0)
891 return t1;
892
893 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
894 try to fold this since we will have infinite recursion. But do
895 deal with any NEGATE_EXPRs. */
896 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
897 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
898 {
899 if (code == PLUS_EXPR)
900 {
901 if (TREE_CODE (t1) == NEGATE_EXPR)
902 return build2_loc (loc, MINUS_EXPR, type,
903 fold_convert_loc (loc, type, t2),
904 fold_convert_loc (loc, type,
905 TREE_OPERAND (t1, 0)));
906 else if (TREE_CODE (t2) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t1),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t2, 0)));
911 else if (integer_zerop (t2))
912 return fold_convert_loc (loc, type, t1);
913 }
914 else if (code == MINUS_EXPR)
915 {
916 if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
918 }
919
920 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
921 fold_convert_loc (loc, type, t2));
922 }
923
924 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
925 fold_convert_loc (loc, type, t2));
926 }
927 \f
928 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
929 for use in int_const_binop, size_binop and size_diffop. */
930
931 static bool
932 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
933 {
934 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
935 return false;
936 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
937 return false;
938
939 switch (code)
940 {
941 case LSHIFT_EXPR:
942 case RSHIFT_EXPR:
943 case LROTATE_EXPR:
944 case RROTATE_EXPR:
945 return true;
946
947 default:
948 break;
949 }
950
951 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
952 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
953 && TYPE_MODE (type1) == TYPE_MODE (type2);
954 }
955
956
957 /* Combine two integer constants ARG1 and ARG2 under operation CODE
958 to produce a new constant. Return NULL_TREE if we don't know how
959 to evaluate CODE at compile-time. */
960
961 static tree
962 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
963 int overflowable)
964 {
965 wide_int res;
966 tree t;
967 tree type = TREE_TYPE (arg1);
968 signop sign = TYPE_SIGN (type);
969 bool overflow = false;
970
971 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
972 TYPE_SIGN (TREE_TYPE (parg2)));
973
974 switch (code)
975 {
976 case BIT_IOR_EXPR:
977 res = wi::bit_or (arg1, arg2);
978 break;
979
980 case BIT_XOR_EXPR:
981 res = wi::bit_xor (arg1, arg2);
982 break;
983
984 case BIT_AND_EXPR:
985 res = wi::bit_and (arg1, arg2);
986 break;
987
988 case RSHIFT_EXPR:
989 case LSHIFT_EXPR:
990 if (wi::neg_p (arg2))
991 {
992 arg2 = -arg2;
993 if (code == RSHIFT_EXPR)
994 code = LSHIFT_EXPR;
995 else
996 code = RSHIFT_EXPR;
997 }
998
999 if (code == RSHIFT_EXPR)
1000 /* It's unclear from the C standard whether shifts can overflow.
1001 The following code ignores overflow; perhaps a C standard
1002 interpretation ruling is needed. */
1003 res = wi::rshift (arg1, arg2, sign);
1004 else
1005 res = wi::lshift (arg1, arg2);
1006 break;
1007
1008 case RROTATE_EXPR:
1009 case LROTATE_EXPR:
1010 if (wi::neg_p (arg2))
1011 {
1012 arg2 = -arg2;
1013 if (code == RROTATE_EXPR)
1014 code = LROTATE_EXPR;
1015 else
1016 code = RROTATE_EXPR;
1017 }
1018
1019 if (code == RROTATE_EXPR)
1020 res = wi::rrotate (arg1, arg2);
1021 else
1022 res = wi::lrotate (arg1, arg2);
1023 break;
1024
1025 case PLUS_EXPR:
1026 res = wi::add (arg1, arg2, sign, &overflow);
1027 break;
1028
1029 case MINUS_EXPR:
1030 res = wi::sub (arg1, arg2, sign, &overflow);
1031 break;
1032
1033 case MULT_EXPR:
1034 res = wi::mul (arg1, arg2, sign, &overflow);
1035 break;
1036
1037 case MULT_HIGHPART_EXPR:
1038 res = wi::mul_high (arg1, arg2, sign);
1039 break;
1040
1041 case TRUNC_DIV_EXPR:
1042 case EXACT_DIV_EXPR:
1043 if (arg2 == 0)
1044 return NULL_TREE;
1045 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1046 break;
1047
1048 case FLOOR_DIV_EXPR:
1049 if (arg2 == 0)
1050 return NULL_TREE;
1051 res = wi::div_floor (arg1, arg2, sign, &overflow);
1052 break;
1053
1054 case CEIL_DIV_EXPR:
1055 if (arg2 == 0)
1056 return NULL_TREE;
1057 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1058 break;
1059
1060 case ROUND_DIV_EXPR:
1061 if (arg2 == 0)
1062 return NULL_TREE;
1063 res = wi::div_round (arg1, arg2, sign, &overflow);
1064 break;
1065
1066 case TRUNC_MOD_EXPR:
1067 if (arg2 == 0)
1068 return NULL_TREE;
1069 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1070 break;
1071
1072 case FLOOR_MOD_EXPR:
1073 if (arg2 == 0)
1074 return NULL_TREE;
1075 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1076 break;
1077
1078 case CEIL_MOD_EXPR:
1079 if (arg2 == 0)
1080 return NULL_TREE;
1081 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1082 break;
1083
1084 case ROUND_MOD_EXPR:
1085 if (arg2 == 0)
1086 return NULL_TREE;
1087 res = wi::mod_round (arg1, arg2, sign, &overflow);
1088 break;
1089
1090 case MIN_EXPR:
1091 res = wi::min (arg1, arg2, sign);
1092 break;
1093
1094 case MAX_EXPR:
1095 res = wi::max (arg1, arg2, sign);
1096 break;
1097
1098 default:
1099 return NULL_TREE;
1100 }
1101
1102 t = force_fit_type (type, res, overflowable,
1103 (((sign == SIGNED || overflowable == -1)
1104 && overflow)
1105 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1106
1107 return t;
1108 }
1109
1110 tree
1111 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1112 {
1113 return int_const_binop_1 (code, arg1, arg2, 1);
1114 }
1115
1116 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1117 constant. We assume ARG1 and ARG2 have the same data type, or at least
1118 are the same kind of constant and the same machine mode. Return zero if
1119 combining the constants is not allowed in the current operating mode. */
1120
1121 static tree
1122 const_binop (enum tree_code code, tree arg1, tree arg2)
1123 {
1124 /* Sanity check for the recursive cases. */
1125 if (!arg1 || !arg2)
1126 return NULL_TREE;
1127
1128 STRIP_NOPS (arg1);
1129 STRIP_NOPS (arg2);
1130
1131 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1132 {
1133 if (code == POINTER_PLUS_EXPR)
1134 return int_const_binop (PLUS_EXPR,
1135 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1136
1137 return int_const_binop (code, arg1, arg2);
1138 }
1139
1140 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1141 {
1142 machine_mode mode;
1143 REAL_VALUE_TYPE d1;
1144 REAL_VALUE_TYPE d2;
1145 REAL_VALUE_TYPE value;
1146 REAL_VALUE_TYPE result;
1147 bool inexact;
1148 tree t, type;
1149
1150 /* The following codes are handled by real_arithmetic. */
1151 switch (code)
1152 {
1153 case PLUS_EXPR:
1154 case MINUS_EXPR:
1155 case MULT_EXPR:
1156 case RDIV_EXPR:
1157 case MIN_EXPR:
1158 case MAX_EXPR:
1159 break;
1160
1161 default:
1162 return NULL_TREE;
1163 }
1164
1165 d1 = TREE_REAL_CST (arg1);
1166 d2 = TREE_REAL_CST (arg2);
1167
1168 type = TREE_TYPE (arg1);
1169 mode = TYPE_MODE (type);
1170
1171 /* Don't perform operation if we honor signaling NaNs and
1172 either operand is a NaN. */
1173 if (HONOR_SNANS (mode)
1174 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1175 return NULL_TREE;
1176
1177 /* Don't perform operation if it would raise a division
1178 by zero exception. */
1179 if (code == RDIV_EXPR
1180 && REAL_VALUES_EQUAL (d2, dconst0)
1181 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1182 return NULL_TREE;
1183
1184 /* If either operand is a NaN, just return it. Otherwise, set up
1185 for floating-point trap; we return an overflow. */
1186 if (REAL_VALUE_ISNAN (d1))
1187 return arg1;
1188 else if (REAL_VALUE_ISNAN (d2))
1189 return arg2;
1190
1191 inexact = real_arithmetic (&value, code, &d1, &d2);
1192 real_convert (&result, mode, &value);
1193
1194 /* Don't constant fold this floating point operation if
1195 the result has overflowed and flag_trapping_math. */
1196 if (flag_trapping_math
1197 && MODE_HAS_INFINITIES (mode)
1198 && REAL_VALUE_ISINF (result)
1199 && !REAL_VALUE_ISINF (d1)
1200 && !REAL_VALUE_ISINF (d2))
1201 return NULL_TREE;
1202
1203 /* Don't constant fold this floating point operation if the
1204 result may dependent upon the run-time rounding mode and
1205 flag_rounding_math is set, or if GCC's software emulation
1206 is unable to accurately represent the result. */
1207 if ((flag_rounding_math
1208 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1209 && (inexact || !real_identical (&result, &value)))
1210 return NULL_TREE;
1211
1212 t = build_real (type, result);
1213
1214 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1215 return t;
1216 }
1217
1218 if (TREE_CODE (arg1) == FIXED_CST)
1219 {
1220 FIXED_VALUE_TYPE f1;
1221 FIXED_VALUE_TYPE f2;
1222 FIXED_VALUE_TYPE result;
1223 tree t, type;
1224 int sat_p;
1225 bool overflow_p;
1226
1227 /* The following codes are handled by fixed_arithmetic. */
1228 switch (code)
1229 {
1230 case PLUS_EXPR:
1231 case MINUS_EXPR:
1232 case MULT_EXPR:
1233 case TRUNC_DIV_EXPR:
1234 if (TREE_CODE (arg2) != FIXED_CST)
1235 return NULL_TREE;
1236 f2 = TREE_FIXED_CST (arg2);
1237 break;
1238
1239 case LSHIFT_EXPR:
1240 case RSHIFT_EXPR:
1241 {
1242 if (TREE_CODE (arg2) != INTEGER_CST)
1243 return NULL_TREE;
1244 wide_int w2 = arg2;
1245 f2.data.high = w2.elt (1);
1246 f2.data.low = w2.elt (0);
1247 f2.mode = SImode;
1248 }
1249 break;
1250
1251 default:
1252 return NULL_TREE;
1253 }
1254
1255 f1 = TREE_FIXED_CST (arg1);
1256 type = TREE_TYPE (arg1);
1257 sat_p = TYPE_SATURATING (type);
1258 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1259 t = build_fixed (type, result);
1260 /* Propagate overflow flags. */
1261 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1262 TREE_OVERFLOW (t) = 1;
1263 return t;
1264 }
1265
1266 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1267 {
1268 tree type = TREE_TYPE (arg1);
1269 tree r1 = TREE_REALPART (arg1);
1270 tree i1 = TREE_IMAGPART (arg1);
1271 tree r2 = TREE_REALPART (arg2);
1272 tree i2 = TREE_IMAGPART (arg2);
1273 tree real, imag;
1274
1275 switch (code)
1276 {
1277 case PLUS_EXPR:
1278 case MINUS_EXPR:
1279 real = const_binop (code, r1, r2);
1280 imag = const_binop (code, i1, i2);
1281 break;
1282
1283 case MULT_EXPR:
1284 if (COMPLEX_FLOAT_TYPE_P (type))
1285 return do_mpc_arg2 (arg1, arg2, type,
1286 /* do_nonfinite= */ folding_initializer,
1287 mpc_mul);
1288
1289 real = const_binop (MINUS_EXPR,
1290 const_binop (MULT_EXPR, r1, r2),
1291 const_binop (MULT_EXPR, i1, i2));
1292 imag = const_binop (PLUS_EXPR,
1293 const_binop (MULT_EXPR, r1, i2),
1294 const_binop (MULT_EXPR, i1, r2));
1295 break;
1296
1297 case RDIV_EXPR:
1298 if (COMPLEX_FLOAT_TYPE_P (type))
1299 return do_mpc_arg2 (arg1, arg2, type,
1300 /* do_nonfinite= */ folding_initializer,
1301 mpc_div);
1302 /* Fallthru ... */
1303 case TRUNC_DIV_EXPR:
1304 case CEIL_DIV_EXPR:
1305 case FLOOR_DIV_EXPR:
1306 case ROUND_DIV_EXPR:
1307 if (flag_complex_method == 0)
1308 {
1309 /* Keep this algorithm in sync with
1310 tree-complex.c:expand_complex_div_straight().
1311
1312 Expand complex division to scalars, straightforward algorithm.
1313 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1314 t = br*br + bi*bi
1315 */
1316 tree magsquared
1317 = const_binop (PLUS_EXPR,
1318 const_binop (MULT_EXPR, r2, r2),
1319 const_binop (MULT_EXPR, i2, i2));
1320 tree t1
1321 = const_binop (PLUS_EXPR,
1322 const_binop (MULT_EXPR, r1, r2),
1323 const_binop (MULT_EXPR, i1, i2));
1324 tree t2
1325 = const_binop (MINUS_EXPR,
1326 const_binop (MULT_EXPR, i1, r2),
1327 const_binop (MULT_EXPR, r1, i2));
1328
1329 real = const_binop (code, t1, magsquared);
1330 imag = const_binop (code, t2, magsquared);
1331 }
1332 else
1333 {
1334 /* Keep this algorithm in sync with
1335 tree-complex.c:expand_complex_div_wide().
1336
1337 Expand complex division to scalars, modified algorithm to minimize
1338 overflow with wide input ranges. */
1339 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1340 fold_abs_const (r2, TREE_TYPE (type)),
1341 fold_abs_const (i2, TREE_TYPE (type)));
1342
1343 if (integer_nonzerop (compare))
1344 {
1345 /* In the TRUE branch, we compute
1346 ratio = br/bi;
1347 div = (br * ratio) + bi;
1348 tr = (ar * ratio) + ai;
1349 ti = (ai * ratio) - ar;
1350 tr = tr / div;
1351 ti = ti / div; */
1352 tree ratio = const_binop (code, r2, i2);
1353 tree div = const_binop (PLUS_EXPR, i2,
1354 const_binop (MULT_EXPR, r2, ratio));
1355 real = const_binop (MULT_EXPR, r1, ratio);
1356 real = const_binop (PLUS_EXPR, real, i1);
1357 real = const_binop (code, real, div);
1358
1359 imag = const_binop (MULT_EXPR, i1, ratio);
1360 imag = const_binop (MINUS_EXPR, imag, r1);
1361 imag = const_binop (code, imag, div);
1362 }
1363 else
1364 {
1365 /* In the FALSE branch, we compute
1366 ratio = d/c;
1367 divisor = (d * ratio) + c;
1368 tr = (b * ratio) + a;
1369 ti = b - (a * ratio);
1370 tr = tr / div;
1371 ti = ti / div; */
1372 tree ratio = const_binop (code, i2, r2);
1373 tree div = const_binop (PLUS_EXPR, r2,
1374 const_binop (MULT_EXPR, i2, ratio));
1375
1376 real = const_binop (MULT_EXPR, i1, ratio);
1377 real = const_binop (PLUS_EXPR, real, r1);
1378 real = const_binop (code, real, div);
1379
1380 imag = const_binop (MULT_EXPR, r1, ratio);
1381 imag = const_binop (MINUS_EXPR, i1, imag);
1382 imag = const_binop (code, imag, div);
1383 }
1384 }
1385 break;
1386
1387 default:
1388 return NULL_TREE;
1389 }
1390
1391 if (real && imag)
1392 return build_complex (type, real, imag);
1393 }
1394
1395 if (TREE_CODE (arg1) == VECTOR_CST
1396 && TREE_CODE (arg2) == VECTOR_CST)
1397 {
1398 tree type = TREE_TYPE (arg1);
1399 int count = TYPE_VECTOR_SUBPARTS (type), i;
1400 tree *elts = XALLOCAVEC (tree, count);
1401
1402 for (i = 0; i < count; i++)
1403 {
1404 tree elem1 = VECTOR_CST_ELT (arg1, i);
1405 tree elem2 = VECTOR_CST_ELT (arg2, i);
1406
1407 elts[i] = const_binop (code, elem1, elem2);
1408
1409 /* It is possible that const_binop cannot handle the given
1410 code and return NULL_TREE */
1411 if (elts[i] == NULL_TREE)
1412 return NULL_TREE;
1413 }
1414
1415 return build_vector (type, elts);
1416 }
1417
1418 /* Shifts allow a scalar offset for a vector. */
1419 if (TREE_CODE (arg1) == VECTOR_CST
1420 && TREE_CODE (arg2) == INTEGER_CST)
1421 {
1422 tree type = TREE_TYPE (arg1);
1423 int count = TYPE_VECTOR_SUBPARTS (type), i;
1424 tree *elts = XALLOCAVEC (tree, count);
1425
1426 for (i = 0; i < count; i++)
1427 {
1428 tree elem1 = VECTOR_CST_ELT (arg1, i);
1429
1430 elts[i] = const_binop (code, elem1, arg2);
1431
1432 /* It is possible that const_binop cannot handle the given
1433 code and return NULL_TREE. */
1434 if (elts[i] == NULL_TREE)
1435 return NULL_TREE;
1436 }
1437
1438 return build_vector (type, elts);
1439 }
1440 return NULL_TREE;
1441 }
1442
1443 /* Overload that adds a TYPE parameter to be able to dispatch
1444 to fold_relational_const. */
1445
1446 tree
1447 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1448 {
1449 if (TREE_CODE_CLASS (code) == tcc_comparison)
1450 return fold_relational_const (code, type, arg1, arg2);
1451
1452 /* ??? Until we make the const_binop worker take the type of the
1453 result as argument put those cases that need it here. */
1454 switch (code)
1455 {
1456 case COMPLEX_EXPR:
1457 if ((TREE_CODE (arg1) == REAL_CST
1458 && TREE_CODE (arg2) == REAL_CST)
1459 || (TREE_CODE (arg1) == INTEGER_CST
1460 && TREE_CODE (arg2) == INTEGER_CST))
1461 return build_complex (type, arg1, arg2);
1462 return NULL_TREE;
1463
1464 case VEC_PACK_TRUNC_EXPR:
1465 case VEC_PACK_FIX_TRUNC_EXPR:
1466 {
1467 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1468 tree *elts;
1469
1470 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1471 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1472 if (TREE_CODE (arg1) != VECTOR_CST
1473 || TREE_CODE (arg2) != VECTOR_CST)
1474 return NULL_TREE;
1475
1476 elts = XALLOCAVEC (tree, nelts);
1477 if (!vec_cst_ctor_to_array (arg1, elts)
1478 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1479 return NULL_TREE;
1480
1481 for (i = 0; i < nelts; i++)
1482 {
1483 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1484 ? NOP_EXPR : FIX_TRUNC_EXPR,
1485 TREE_TYPE (type), elts[i]);
1486 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1487 return NULL_TREE;
1488 }
1489
1490 return build_vector (type, elts);
1491 }
1492
1493 case VEC_WIDEN_MULT_LO_EXPR:
1494 case VEC_WIDEN_MULT_HI_EXPR:
1495 case VEC_WIDEN_MULT_EVEN_EXPR:
1496 case VEC_WIDEN_MULT_ODD_EXPR:
1497 {
1498 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1499 unsigned int out, ofs, scale;
1500 tree *elts;
1501
1502 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1503 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1504 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1505 return NULL_TREE;
1506
1507 elts = XALLOCAVEC (tree, nelts * 4);
1508 if (!vec_cst_ctor_to_array (arg1, elts)
1509 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1510 return NULL_TREE;
1511
1512 if (code == VEC_WIDEN_MULT_LO_EXPR)
1513 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1514 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1515 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1516 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1517 scale = 1, ofs = 0;
1518 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1519 scale = 1, ofs = 1;
1520
1521 for (out = 0; out < nelts; out++)
1522 {
1523 unsigned int in1 = (out << scale) + ofs;
1524 unsigned int in2 = in1 + nelts * 2;
1525 tree t1, t2;
1526
1527 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1528 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1529
1530 if (t1 == NULL_TREE || t2 == NULL_TREE)
1531 return NULL_TREE;
1532 elts[out] = const_binop (MULT_EXPR, t1, t2);
1533 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1534 return NULL_TREE;
1535 }
1536
1537 return build_vector (type, elts);
1538 }
1539
1540 default:;
1541 }
1542
1543 if (TREE_CODE_CLASS (code) != tcc_binary)
1544 return NULL_TREE;
1545
1546 /* Make sure type and arg0 have the same saturating flag. */
1547 gcc_checking_assert (TYPE_SATURATING (type)
1548 == TYPE_SATURATING (TREE_TYPE (arg1)));
1549
1550 return const_binop (code, arg1, arg2);
1551 }
1552
1553 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1554 Return zero if computing the constants is not possible. */
1555
1556 tree
1557 const_unop (enum tree_code code, tree type, tree arg0)
1558 {
1559 switch (code)
1560 {
1561 CASE_CONVERT:
1562 case FLOAT_EXPR:
1563 case FIX_TRUNC_EXPR:
1564 case FIXED_CONVERT_EXPR:
1565 return fold_convert_const (code, type, arg0);
1566
1567 case ADDR_SPACE_CONVERT_EXPR:
1568 if (integer_zerop (arg0))
1569 return fold_convert_const (code, type, arg0);
1570 break;
1571
1572 case VIEW_CONVERT_EXPR:
1573 return fold_view_convert_expr (type, arg0);
1574
1575 case NEGATE_EXPR:
1576 {
1577 /* Can't call fold_negate_const directly here as that doesn't
1578 handle all cases and we might not be able to negate some
1579 constants. */
1580 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1581 if (tem && CONSTANT_CLASS_P (tem))
1582 return tem;
1583 break;
1584 }
1585
1586 case ABS_EXPR:
1587 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1588 return fold_abs_const (arg0, type);
1589 break;
1590
1591 case CONJ_EXPR:
1592 if (TREE_CODE (arg0) == COMPLEX_CST)
1593 {
1594 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1595 TREE_TYPE (type));
1596 return build_complex (type, TREE_REALPART (arg0), ipart);
1597 }
1598 break;
1599
1600 case BIT_NOT_EXPR:
1601 if (TREE_CODE (arg0) == INTEGER_CST)
1602 return fold_not_const (arg0, type);
1603 /* Perform BIT_NOT_EXPR on each element individually. */
1604 else if (TREE_CODE (arg0) == VECTOR_CST)
1605 {
1606 tree *elements;
1607 tree elem;
1608 unsigned count = VECTOR_CST_NELTS (arg0), i;
1609
1610 elements = XALLOCAVEC (tree, count);
1611 for (i = 0; i < count; i++)
1612 {
1613 elem = VECTOR_CST_ELT (arg0, i);
1614 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1615 if (elem == NULL_TREE)
1616 break;
1617 elements[i] = elem;
1618 }
1619 if (i == count)
1620 return build_vector (type, elements);
1621 }
1622 break;
1623
1624 case TRUTH_NOT_EXPR:
1625 if (TREE_CODE (arg0) == INTEGER_CST)
1626 return constant_boolean_node (integer_zerop (arg0), type);
1627 break;
1628
1629 case REALPART_EXPR:
1630 if (TREE_CODE (arg0) == COMPLEX_CST)
1631 return fold_convert (type, TREE_REALPART (arg0));
1632 break;
1633
1634 case IMAGPART_EXPR:
1635 if (TREE_CODE (arg0) == COMPLEX_CST)
1636 return fold_convert (type, TREE_IMAGPART (arg0));
1637 break;
1638
1639 case VEC_UNPACK_LO_EXPR:
1640 case VEC_UNPACK_HI_EXPR:
1641 case VEC_UNPACK_FLOAT_LO_EXPR:
1642 case VEC_UNPACK_FLOAT_HI_EXPR:
1643 {
1644 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1645 tree *elts;
1646 enum tree_code subcode;
1647
1648 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1649 if (TREE_CODE (arg0) != VECTOR_CST)
1650 return NULL_TREE;
1651
1652 elts = XALLOCAVEC (tree, nelts * 2);
1653 if (!vec_cst_ctor_to_array (arg0, elts))
1654 return NULL_TREE;
1655
1656 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1657 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1658 elts += nelts;
1659
1660 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1661 subcode = NOP_EXPR;
1662 else
1663 subcode = FLOAT_EXPR;
1664
1665 for (i = 0; i < nelts; i++)
1666 {
1667 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1668 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1669 return NULL_TREE;
1670 }
1671
1672 return build_vector (type, elts);
1673 }
1674
1675 case REDUC_MIN_EXPR:
1676 case REDUC_MAX_EXPR:
1677 case REDUC_PLUS_EXPR:
1678 {
1679 unsigned int nelts, i;
1680 tree *elts;
1681 enum tree_code subcode;
1682
1683 if (TREE_CODE (arg0) != VECTOR_CST)
1684 return NULL_TREE;
1685 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1686
1687 elts = XALLOCAVEC (tree, nelts);
1688 if (!vec_cst_ctor_to_array (arg0, elts))
1689 return NULL_TREE;
1690
1691 switch (code)
1692 {
1693 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1694 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1695 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1696 default: gcc_unreachable ();
1697 }
1698
1699 for (i = 1; i < nelts; i++)
1700 {
1701 elts[0] = const_binop (subcode, elts[0], elts[i]);
1702 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1703 return NULL_TREE;
1704 }
1705
1706 return elts[0];
1707 }
1708
1709 default:
1710 break;
1711 }
1712
1713 return NULL_TREE;
1714 }
1715
1716 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1717 indicates which particular sizetype to create. */
1718
1719 tree
1720 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1721 {
1722 return build_int_cst (sizetype_tab[(int) kind], number);
1723 }
1724 \f
1725 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1726 is a tree code. The type of the result is taken from the operands.
1727 Both must be equivalent integer types, ala int_binop_types_match_p.
1728 If the operands are constant, so is the result. */
1729
1730 tree
1731 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1732 {
1733 tree type = TREE_TYPE (arg0);
1734
1735 if (arg0 == error_mark_node || arg1 == error_mark_node)
1736 return error_mark_node;
1737
1738 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1739 TREE_TYPE (arg1)));
1740
1741 /* Handle the special case of two integer constants faster. */
1742 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1743 {
1744 /* And some specific cases even faster than that. */
1745 if (code == PLUS_EXPR)
1746 {
1747 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1748 return arg1;
1749 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1750 return arg0;
1751 }
1752 else if (code == MINUS_EXPR)
1753 {
1754 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1755 return arg0;
1756 }
1757 else if (code == MULT_EXPR)
1758 {
1759 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1760 return arg1;
1761 }
1762
1763 /* Handle general case of two integer constants. For sizetype
1764 constant calculations we always want to know about overflow,
1765 even in the unsigned case. */
1766 return int_const_binop_1 (code, arg0, arg1, -1);
1767 }
1768
1769 return fold_build2_loc (loc, code, type, arg0, arg1);
1770 }
1771
1772 /* Given two values, either both of sizetype or both of bitsizetype,
1773 compute the difference between the two values. Return the value
1774 in signed type corresponding to the type of the operands. */
1775
1776 tree
1777 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1778 {
1779 tree type = TREE_TYPE (arg0);
1780 tree ctype;
1781
1782 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1783 TREE_TYPE (arg1)));
1784
1785 /* If the type is already signed, just do the simple thing. */
1786 if (!TYPE_UNSIGNED (type))
1787 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1788
1789 if (type == sizetype)
1790 ctype = ssizetype;
1791 else if (type == bitsizetype)
1792 ctype = sbitsizetype;
1793 else
1794 ctype = signed_type_for (type);
1795
1796 /* If either operand is not a constant, do the conversions to the signed
1797 type and subtract. The hardware will do the right thing with any
1798 overflow in the subtraction. */
1799 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1800 return size_binop_loc (loc, MINUS_EXPR,
1801 fold_convert_loc (loc, ctype, arg0),
1802 fold_convert_loc (loc, ctype, arg1));
1803
1804 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1805 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1806 overflow) and negate (which can't either). Special-case a result
1807 of zero while we're here. */
1808 if (tree_int_cst_equal (arg0, arg1))
1809 return build_int_cst (ctype, 0);
1810 else if (tree_int_cst_lt (arg1, arg0))
1811 return fold_convert_loc (loc, ctype,
1812 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1813 else
1814 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1815 fold_convert_loc (loc, ctype,
1816 size_binop_loc (loc,
1817 MINUS_EXPR,
1818 arg1, arg0)));
1819 }
1820 \f
1821 /* A subroutine of fold_convert_const handling conversions of an
1822 INTEGER_CST to another integer type. */
1823
1824 static tree
1825 fold_convert_const_int_from_int (tree type, const_tree arg1)
1826 {
1827 /* Given an integer constant, make new constant with new type,
1828 appropriately sign-extended or truncated. Use widest_int
1829 so that any extension is done according ARG1's type. */
1830 return force_fit_type (type, wi::to_widest (arg1),
1831 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1832 TREE_OVERFLOW (arg1));
1833 }
1834
1835 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1836 to an integer type. */
1837
1838 static tree
1839 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1840 {
1841 bool overflow = false;
1842 tree t;
1843
1844 /* The following code implements the floating point to integer
1845 conversion rules required by the Java Language Specification,
1846 that IEEE NaNs are mapped to zero and values that overflow
1847 the target precision saturate, i.e. values greater than
1848 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1849 are mapped to INT_MIN. These semantics are allowed by the
1850 C and C++ standards that simply state that the behavior of
1851 FP-to-integer conversion is unspecified upon overflow. */
1852
1853 wide_int val;
1854 REAL_VALUE_TYPE r;
1855 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1856
1857 switch (code)
1858 {
1859 case FIX_TRUNC_EXPR:
1860 real_trunc (&r, VOIDmode, &x);
1861 break;
1862
1863 default:
1864 gcc_unreachable ();
1865 }
1866
1867 /* If R is NaN, return zero and show we have an overflow. */
1868 if (REAL_VALUE_ISNAN (r))
1869 {
1870 overflow = true;
1871 val = wi::zero (TYPE_PRECISION (type));
1872 }
1873
1874 /* See if R is less than the lower bound or greater than the
1875 upper bound. */
1876
1877 if (! overflow)
1878 {
1879 tree lt = TYPE_MIN_VALUE (type);
1880 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1881 if (REAL_VALUES_LESS (r, l))
1882 {
1883 overflow = true;
1884 val = lt;
1885 }
1886 }
1887
1888 if (! overflow)
1889 {
1890 tree ut = TYPE_MAX_VALUE (type);
1891 if (ut)
1892 {
1893 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1894 if (REAL_VALUES_LESS (u, r))
1895 {
1896 overflow = true;
1897 val = ut;
1898 }
1899 }
1900 }
1901
1902 if (! overflow)
1903 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1904
1905 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1906 return t;
1907 }
1908
1909 /* A subroutine of fold_convert_const handling conversions of a
1910 FIXED_CST to an integer type. */
1911
1912 static tree
1913 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1914 {
1915 tree t;
1916 double_int temp, temp_trunc;
1917 unsigned int mode;
1918
1919 /* Right shift FIXED_CST to temp by fbit. */
1920 temp = TREE_FIXED_CST (arg1).data;
1921 mode = TREE_FIXED_CST (arg1).mode;
1922 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1923 {
1924 temp = temp.rshift (GET_MODE_FBIT (mode),
1925 HOST_BITS_PER_DOUBLE_INT,
1926 SIGNED_FIXED_POINT_MODE_P (mode));
1927
1928 /* Left shift temp to temp_trunc by fbit. */
1929 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1930 HOST_BITS_PER_DOUBLE_INT,
1931 SIGNED_FIXED_POINT_MODE_P (mode));
1932 }
1933 else
1934 {
1935 temp = double_int_zero;
1936 temp_trunc = double_int_zero;
1937 }
1938
1939 /* If FIXED_CST is negative, we need to round the value toward 0.
1940 By checking if the fractional bits are not zero to add 1 to temp. */
1941 if (SIGNED_FIXED_POINT_MODE_P (mode)
1942 && temp_trunc.is_negative ()
1943 && TREE_FIXED_CST (arg1).data != temp_trunc)
1944 temp += double_int_one;
1945
1946 /* Given a fixed-point constant, make new constant with new type,
1947 appropriately sign-extended or truncated. */
1948 t = force_fit_type (type, temp, -1,
1949 (temp.is_negative ()
1950 && (TYPE_UNSIGNED (type)
1951 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1952 | TREE_OVERFLOW (arg1));
1953
1954 return t;
1955 }
1956
1957 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1958 to another floating point type. */
1959
1960 static tree
1961 fold_convert_const_real_from_real (tree type, const_tree arg1)
1962 {
1963 REAL_VALUE_TYPE value;
1964 tree t;
1965
1966 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1967 t = build_real (type, value);
1968
1969 /* If converting an infinity or NAN to a representation that doesn't
1970 have one, set the overflow bit so that we can produce some kind of
1971 error message at the appropriate point if necessary. It's not the
1972 most user-friendly message, but it's better than nothing. */
1973 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1974 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1975 TREE_OVERFLOW (t) = 1;
1976 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1977 && !MODE_HAS_NANS (TYPE_MODE (type)))
1978 TREE_OVERFLOW (t) = 1;
1979 /* Regular overflow, conversion produced an infinity in a mode that
1980 can't represent them. */
1981 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1982 && REAL_VALUE_ISINF (value)
1983 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1984 TREE_OVERFLOW (t) = 1;
1985 else
1986 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1987 return t;
1988 }
1989
1990 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1991 to a floating point type. */
1992
1993 static tree
1994 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1995 {
1996 REAL_VALUE_TYPE value;
1997 tree t;
1998
1999 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2000 t = build_real (type, value);
2001
2002 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2003 return t;
2004 }
2005
2006 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2007 to another fixed-point type. */
2008
2009 static tree
2010 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2011 {
2012 FIXED_VALUE_TYPE value;
2013 tree t;
2014 bool overflow_p;
2015
2016 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2017 TYPE_SATURATING (type));
2018 t = build_fixed (type, value);
2019
2020 /* Propagate overflow flags. */
2021 if (overflow_p | TREE_OVERFLOW (arg1))
2022 TREE_OVERFLOW (t) = 1;
2023 return t;
2024 }
2025
2026 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2027 to a fixed-point type. */
2028
2029 static tree
2030 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2031 {
2032 FIXED_VALUE_TYPE value;
2033 tree t;
2034 bool overflow_p;
2035 double_int di;
2036
2037 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2038
2039 di.low = TREE_INT_CST_ELT (arg1, 0);
2040 if (TREE_INT_CST_NUNITS (arg1) == 1)
2041 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2042 else
2043 di.high = TREE_INT_CST_ELT (arg1, 1);
2044
2045 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2046 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2047 TYPE_SATURATING (type));
2048 t = build_fixed (type, value);
2049
2050 /* Propagate overflow flags. */
2051 if (overflow_p | TREE_OVERFLOW (arg1))
2052 TREE_OVERFLOW (t) = 1;
2053 return t;
2054 }
2055
2056 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2057 to a fixed-point type. */
2058
2059 static tree
2060 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2061 {
2062 FIXED_VALUE_TYPE value;
2063 tree t;
2064 bool overflow_p;
2065
2066 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2067 &TREE_REAL_CST (arg1),
2068 TYPE_SATURATING (type));
2069 t = build_fixed (type, value);
2070
2071 /* Propagate overflow flags. */
2072 if (overflow_p | TREE_OVERFLOW (arg1))
2073 TREE_OVERFLOW (t) = 1;
2074 return t;
2075 }
2076
2077 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2078 type TYPE. If no simplification can be done return NULL_TREE. */
2079
2080 static tree
2081 fold_convert_const (enum tree_code code, tree type, tree arg1)
2082 {
2083 if (TREE_TYPE (arg1) == type)
2084 return arg1;
2085
2086 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2087 || TREE_CODE (type) == OFFSET_TYPE)
2088 {
2089 if (TREE_CODE (arg1) == INTEGER_CST)
2090 return fold_convert_const_int_from_int (type, arg1);
2091 else if (TREE_CODE (arg1) == REAL_CST)
2092 return fold_convert_const_int_from_real (code, type, arg1);
2093 else if (TREE_CODE (arg1) == FIXED_CST)
2094 return fold_convert_const_int_from_fixed (type, arg1);
2095 }
2096 else if (TREE_CODE (type) == REAL_TYPE)
2097 {
2098 if (TREE_CODE (arg1) == INTEGER_CST)
2099 return build_real_from_int_cst (type, arg1);
2100 else if (TREE_CODE (arg1) == REAL_CST)
2101 return fold_convert_const_real_from_real (type, arg1);
2102 else if (TREE_CODE (arg1) == FIXED_CST)
2103 return fold_convert_const_real_from_fixed (type, arg1);
2104 }
2105 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2106 {
2107 if (TREE_CODE (arg1) == FIXED_CST)
2108 return fold_convert_const_fixed_from_fixed (type, arg1);
2109 else if (TREE_CODE (arg1) == INTEGER_CST)
2110 return fold_convert_const_fixed_from_int (type, arg1);
2111 else if (TREE_CODE (arg1) == REAL_CST)
2112 return fold_convert_const_fixed_from_real (type, arg1);
2113 }
2114 return NULL_TREE;
2115 }
2116
2117 /* Construct a vector of zero elements of vector type TYPE. */
2118
2119 static tree
2120 build_zero_vector (tree type)
2121 {
2122 tree t;
2123
2124 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2125 return build_vector_from_val (type, t);
2126 }
2127
2128 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2129
2130 bool
2131 fold_convertible_p (const_tree type, const_tree arg)
2132 {
2133 tree orig = TREE_TYPE (arg);
2134
2135 if (type == orig)
2136 return true;
2137
2138 if (TREE_CODE (arg) == ERROR_MARK
2139 || TREE_CODE (type) == ERROR_MARK
2140 || TREE_CODE (orig) == ERROR_MARK)
2141 return false;
2142
2143 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2144 return true;
2145
2146 switch (TREE_CODE (type))
2147 {
2148 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2149 case POINTER_TYPE: case REFERENCE_TYPE:
2150 case OFFSET_TYPE:
2151 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2152 || TREE_CODE (orig) == OFFSET_TYPE)
2153 return true;
2154 return (TREE_CODE (orig) == VECTOR_TYPE
2155 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2156
2157 case REAL_TYPE:
2158 case FIXED_POINT_TYPE:
2159 case COMPLEX_TYPE:
2160 case VECTOR_TYPE:
2161 case VOID_TYPE:
2162 return TREE_CODE (type) == TREE_CODE (orig);
2163
2164 default:
2165 return false;
2166 }
2167 }
2168
2169 /* Convert expression ARG to type TYPE. Used by the middle-end for
2170 simple conversions in preference to calling the front-end's convert. */
2171
2172 tree
2173 fold_convert_loc (location_t loc, tree type, tree arg)
2174 {
2175 tree orig = TREE_TYPE (arg);
2176 tree tem;
2177
2178 if (type == orig)
2179 return arg;
2180
2181 if (TREE_CODE (arg) == ERROR_MARK
2182 || TREE_CODE (type) == ERROR_MARK
2183 || TREE_CODE (orig) == ERROR_MARK)
2184 return error_mark_node;
2185
2186 switch (TREE_CODE (type))
2187 {
2188 case POINTER_TYPE:
2189 case REFERENCE_TYPE:
2190 /* Handle conversions between pointers to different address spaces. */
2191 if (POINTER_TYPE_P (orig)
2192 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2193 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2194 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2195 /* fall through */
2196
2197 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2198 case OFFSET_TYPE:
2199 if (TREE_CODE (arg) == INTEGER_CST)
2200 {
2201 tem = fold_convert_const (NOP_EXPR, type, arg);
2202 if (tem != NULL_TREE)
2203 return tem;
2204 }
2205 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2206 || TREE_CODE (orig) == OFFSET_TYPE)
2207 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2208 if (TREE_CODE (orig) == COMPLEX_TYPE)
2209 return fold_convert_loc (loc, type,
2210 fold_build1_loc (loc, REALPART_EXPR,
2211 TREE_TYPE (orig), arg));
2212 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2213 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2214 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2215
2216 case REAL_TYPE:
2217 if (TREE_CODE (arg) == INTEGER_CST)
2218 {
2219 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2220 if (tem != NULL_TREE)
2221 return tem;
2222 }
2223 else if (TREE_CODE (arg) == REAL_CST)
2224 {
2225 tem = fold_convert_const (NOP_EXPR, type, arg);
2226 if (tem != NULL_TREE)
2227 return tem;
2228 }
2229 else if (TREE_CODE (arg) == FIXED_CST)
2230 {
2231 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2232 if (tem != NULL_TREE)
2233 return tem;
2234 }
2235
2236 switch (TREE_CODE (orig))
2237 {
2238 case INTEGER_TYPE:
2239 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2240 case POINTER_TYPE: case REFERENCE_TYPE:
2241 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2242
2243 case REAL_TYPE:
2244 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2245
2246 case FIXED_POINT_TYPE:
2247 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2248
2249 case COMPLEX_TYPE:
2250 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2251 return fold_convert_loc (loc, type, tem);
2252
2253 default:
2254 gcc_unreachable ();
2255 }
2256
2257 case FIXED_POINT_TYPE:
2258 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2259 || TREE_CODE (arg) == REAL_CST)
2260 {
2261 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2262 if (tem != NULL_TREE)
2263 goto fold_convert_exit;
2264 }
2265
2266 switch (TREE_CODE (orig))
2267 {
2268 case FIXED_POINT_TYPE:
2269 case INTEGER_TYPE:
2270 case ENUMERAL_TYPE:
2271 case BOOLEAN_TYPE:
2272 case REAL_TYPE:
2273 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2274
2275 case COMPLEX_TYPE:
2276 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2277 return fold_convert_loc (loc, type, tem);
2278
2279 default:
2280 gcc_unreachable ();
2281 }
2282
2283 case COMPLEX_TYPE:
2284 switch (TREE_CODE (orig))
2285 {
2286 case INTEGER_TYPE:
2287 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2288 case POINTER_TYPE: case REFERENCE_TYPE:
2289 case REAL_TYPE:
2290 case FIXED_POINT_TYPE:
2291 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2292 fold_convert_loc (loc, TREE_TYPE (type), arg),
2293 fold_convert_loc (loc, TREE_TYPE (type),
2294 integer_zero_node));
2295 case COMPLEX_TYPE:
2296 {
2297 tree rpart, ipart;
2298
2299 if (TREE_CODE (arg) == COMPLEX_EXPR)
2300 {
2301 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2302 TREE_OPERAND (arg, 0));
2303 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2304 TREE_OPERAND (arg, 1));
2305 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2306 }
2307
2308 arg = save_expr (arg);
2309 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2310 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2311 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2312 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2313 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2314 }
2315
2316 default:
2317 gcc_unreachable ();
2318 }
2319
2320 case VECTOR_TYPE:
2321 if (integer_zerop (arg))
2322 return build_zero_vector (type);
2323 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2324 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2325 || TREE_CODE (orig) == VECTOR_TYPE);
2326 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2327
2328 case VOID_TYPE:
2329 tem = fold_ignored_result (arg);
2330 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2331
2332 default:
2333 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2334 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2335 gcc_unreachable ();
2336 }
2337 fold_convert_exit:
2338 protected_set_expr_location_unshare (tem, loc);
2339 return tem;
2340 }
2341 \f
2342 /* Return false if expr can be assumed not to be an lvalue, true
2343 otherwise. */
2344
2345 static bool
2346 maybe_lvalue_p (const_tree x)
2347 {
2348 /* We only need to wrap lvalue tree codes. */
2349 switch (TREE_CODE (x))
2350 {
2351 case VAR_DECL:
2352 case PARM_DECL:
2353 case RESULT_DECL:
2354 case LABEL_DECL:
2355 case FUNCTION_DECL:
2356 case SSA_NAME:
2357
2358 case COMPONENT_REF:
2359 case MEM_REF:
2360 case INDIRECT_REF:
2361 case ARRAY_REF:
2362 case ARRAY_RANGE_REF:
2363 case BIT_FIELD_REF:
2364 case OBJ_TYPE_REF:
2365
2366 case REALPART_EXPR:
2367 case IMAGPART_EXPR:
2368 case PREINCREMENT_EXPR:
2369 case PREDECREMENT_EXPR:
2370 case SAVE_EXPR:
2371 case TRY_CATCH_EXPR:
2372 case WITH_CLEANUP_EXPR:
2373 case COMPOUND_EXPR:
2374 case MODIFY_EXPR:
2375 case TARGET_EXPR:
2376 case COND_EXPR:
2377 case BIND_EXPR:
2378 break;
2379
2380 default:
2381 /* Assume the worst for front-end tree codes. */
2382 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2383 break;
2384 return false;
2385 }
2386
2387 return true;
2388 }
2389
2390 /* Return an expr equal to X but certainly not valid as an lvalue. */
2391
2392 tree
2393 non_lvalue_loc (location_t loc, tree x)
2394 {
2395 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2396 us. */
2397 if (in_gimple_form)
2398 return x;
2399
2400 if (! maybe_lvalue_p (x))
2401 return x;
2402 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2403 }
2404
2405 /* When pedantic, return an expr equal to X but certainly not valid as a
2406 pedantic lvalue. Otherwise, return X. */
2407
2408 static tree
2409 pedantic_non_lvalue_loc (location_t loc, tree x)
2410 {
2411 return protected_set_expr_location_unshare (x, loc);
2412 }
2413 \f
2414 /* Given a tree comparison code, return the code that is the logical inverse.
2415 It is generally not safe to do this for floating-point comparisons, except
2416 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2417 ERROR_MARK in this case. */
2418
2419 enum tree_code
2420 invert_tree_comparison (enum tree_code code, bool honor_nans)
2421 {
2422 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2423 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2424 return ERROR_MARK;
2425
2426 switch (code)
2427 {
2428 case EQ_EXPR:
2429 return NE_EXPR;
2430 case NE_EXPR:
2431 return EQ_EXPR;
2432 case GT_EXPR:
2433 return honor_nans ? UNLE_EXPR : LE_EXPR;
2434 case GE_EXPR:
2435 return honor_nans ? UNLT_EXPR : LT_EXPR;
2436 case LT_EXPR:
2437 return honor_nans ? UNGE_EXPR : GE_EXPR;
2438 case LE_EXPR:
2439 return honor_nans ? UNGT_EXPR : GT_EXPR;
2440 case LTGT_EXPR:
2441 return UNEQ_EXPR;
2442 case UNEQ_EXPR:
2443 return LTGT_EXPR;
2444 case UNGT_EXPR:
2445 return LE_EXPR;
2446 case UNGE_EXPR:
2447 return LT_EXPR;
2448 case UNLT_EXPR:
2449 return GE_EXPR;
2450 case UNLE_EXPR:
2451 return GT_EXPR;
2452 case ORDERED_EXPR:
2453 return UNORDERED_EXPR;
2454 case UNORDERED_EXPR:
2455 return ORDERED_EXPR;
2456 default:
2457 gcc_unreachable ();
2458 }
2459 }
2460
2461 /* Similar, but return the comparison that results if the operands are
2462 swapped. This is safe for floating-point. */
2463
2464 enum tree_code
2465 swap_tree_comparison (enum tree_code code)
2466 {
2467 switch (code)
2468 {
2469 case EQ_EXPR:
2470 case NE_EXPR:
2471 case ORDERED_EXPR:
2472 case UNORDERED_EXPR:
2473 case LTGT_EXPR:
2474 case UNEQ_EXPR:
2475 return code;
2476 case GT_EXPR:
2477 return LT_EXPR;
2478 case GE_EXPR:
2479 return LE_EXPR;
2480 case LT_EXPR:
2481 return GT_EXPR;
2482 case LE_EXPR:
2483 return GE_EXPR;
2484 case UNGT_EXPR:
2485 return UNLT_EXPR;
2486 case UNGE_EXPR:
2487 return UNLE_EXPR;
2488 case UNLT_EXPR:
2489 return UNGT_EXPR;
2490 case UNLE_EXPR:
2491 return UNGE_EXPR;
2492 default:
2493 gcc_unreachable ();
2494 }
2495 }
2496
2497
2498 /* Convert a comparison tree code from an enum tree_code representation
2499 into a compcode bit-based encoding. This function is the inverse of
2500 compcode_to_comparison. */
2501
2502 static enum comparison_code
2503 comparison_to_compcode (enum tree_code code)
2504 {
2505 switch (code)
2506 {
2507 case LT_EXPR:
2508 return COMPCODE_LT;
2509 case EQ_EXPR:
2510 return COMPCODE_EQ;
2511 case LE_EXPR:
2512 return COMPCODE_LE;
2513 case GT_EXPR:
2514 return COMPCODE_GT;
2515 case NE_EXPR:
2516 return COMPCODE_NE;
2517 case GE_EXPR:
2518 return COMPCODE_GE;
2519 case ORDERED_EXPR:
2520 return COMPCODE_ORD;
2521 case UNORDERED_EXPR:
2522 return COMPCODE_UNORD;
2523 case UNLT_EXPR:
2524 return COMPCODE_UNLT;
2525 case UNEQ_EXPR:
2526 return COMPCODE_UNEQ;
2527 case UNLE_EXPR:
2528 return COMPCODE_UNLE;
2529 case UNGT_EXPR:
2530 return COMPCODE_UNGT;
2531 case LTGT_EXPR:
2532 return COMPCODE_LTGT;
2533 case UNGE_EXPR:
2534 return COMPCODE_UNGE;
2535 default:
2536 gcc_unreachable ();
2537 }
2538 }
2539
2540 /* Convert a compcode bit-based encoding of a comparison operator back
2541 to GCC's enum tree_code representation. This function is the
2542 inverse of comparison_to_compcode. */
2543
2544 static enum tree_code
2545 compcode_to_comparison (enum comparison_code code)
2546 {
2547 switch (code)
2548 {
2549 case COMPCODE_LT:
2550 return LT_EXPR;
2551 case COMPCODE_EQ:
2552 return EQ_EXPR;
2553 case COMPCODE_LE:
2554 return LE_EXPR;
2555 case COMPCODE_GT:
2556 return GT_EXPR;
2557 case COMPCODE_NE:
2558 return NE_EXPR;
2559 case COMPCODE_GE:
2560 return GE_EXPR;
2561 case COMPCODE_ORD:
2562 return ORDERED_EXPR;
2563 case COMPCODE_UNORD:
2564 return UNORDERED_EXPR;
2565 case COMPCODE_UNLT:
2566 return UNLT_EXPR;
2567 case COMPCODE_UNEQ:
2568 return UNEQ_EXPR;
2569 case COMPCODE_UNLE:
2570 return UNLE_EXPR;
2571 case COMPCODE_UNGT:
2572 return UNGT_EXPR;
2573 case COMPCODE_LTGT:
2574 return LTGT_EXPR;
2575 case COMPCODE_UNGE:
2576 return UNGE_EXPR;
2577 default:
2578 gcc_unreachable ();
2579 }
2580 }
2581
2582 /* Return a tree for the comparison which is the combination of
2583 doing the AND or OR (depending on CODE) of the two operations LCODE
2584 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2585 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2586 if this makes the transformation invalid. */
2587
2588 tree
2589 combine_comparisons (location_t loc,
2590 enum tree_code code, enum tree_code lcode,
2591 enum tree_code rcode, tree truth_type,
2592 tree ll_arg, tree lr_arg)
2593 {
2594 bool honor_nans = HONOR_NANS (ll_arg);
2595 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2596 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2597 int compcode;
2598
2599 switch (code)
2600 {
2601 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2602 compcode = lcompcode & rcompcode;
2603 break;
2604
2605 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2606 compcode = lcompcode | rcompcode;
2607 break;
2608
2609 default:
2610 return NULL_TREE;
2611 }
2612
2613 if (!honor_nans)
2614 {
2615 /* Eliminate unordered comparisons, as well as LTGT and ORD
2616 which are not used unless the mode has NaNs. */
2617 compcode &= ~COMPCODE_UNORD;
2618 if (compcode == COMPCODE_LTGT)
2619 compcode = COMPCODE_NE;
2620 else if (compcode == COMPCODE_ORD)
2621 compcode = COMPCODE_TRUE;
2622 }
2623 else if (flag_trapping_math)
2624 {
2625 /* Check that the original operation and the optimized ones will trap
2626 under the same condition. */
2627 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2628 && (lcompcode != COMPCODE_EQ)
2629 && (lcompcode != COMPCODE_ORD);
2630 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2631 && (rcompcode != COMPCODE_EQ)
2632 && (rcompcode != COMPCODE_ORD);
2633 bool trap = (compcode & COMPCODE_UNORD) == 0
2634 && (compcode != COMPCODE_EQ)
2635 && (compcode != COMPCODE_ORD);
2636
2637 /* In a short-circuited boolean expression the LHS might be
2638 such that the RHS, if evaluated, will never trap. For
2639 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2640 if neither x nor y is NaN. (This is a mixed blessing: for
2641 example, the expression above will never trap, hence
2642 optimizing it to x < y would be invalid). */
2643 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2644 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2645 rtrap = false;
2646
2647 /* If the comparison was short-circuited, and only the RHS
2648 trapped, we may now generate a spurious trap. */
2649 if (rtrap && !ltrap
2650 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2651 return NULL_TREE;
2652
2653 /* If we changed the conditions that cause a trap, we lose. */
2654 if ((ltrap || rtrap) != trap)
2655 return NULL_TREE;
2656 }
2657
2658 if (compcode == COMPCODE_TRUE)
2659 return constant_boolean_node (true, truth_type);
2660 else if (compcode == COMPCODE_FALSE)
2661 return constant_boolean_node (false, truth_type);
2662 else
2663 {
2664 enum tree_code tcode;
2665
2666 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2667 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2668 }
2669 }
2670 \f
2671 /* Return nonzero if two operands (typically of the same tree node)
2672 are necessarily equal. If either argument has side-effects this
2673 function returns zero. FLAGS modifies behavior as follows:
2674
2675 If OEP_ONLY_CONST is set, only return nonzero for constants.
2676 This function tests whether the operands are indistinguishable;
2677 it does not test whether they are equal using C's == operation.
2678 The distinction is important for IEEE floating point, because
2679 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2680 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2681
2682 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2683 even though it may hold multiple values during a function.
2684 This is because a GCC tree node guarantees that nothing else is
2685 executed between the evaluation of its "operands" (which may often
2686 be evaluated in arbitrary order). Hence if the operands themselves
2687 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2688 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2689 unset means assuming isochronic (or instantaneous) tree equivalence.
2690 Unless comparing arbitrary expression trees, such as from different
2691 statements, this flag can usually be left unset.
2692
2693 If OEP_PURE_SAME is set, then pure functions with identical arguments
2694 are considered the same. It is used when the caller has other ways
2695 to ensure that global memory is unchanged in between. */
2696
2697 int
2698 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2699 {
2700 /* If either is ERROR_MARK, they aren't equal. */
2701 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2702 || TREE_TYPE (arg0) == error_mark_node
2703 || TREE_TYPE (arg1) == error_mark_node)
2704 return 0;
2705
2706 /* Similar, if either does not have a type (like a released SSA name),
2707 they aren't equal. */
2708 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2709 return 0;
2710
2711 /* Check equality of integer constants before bailing out due to
2712 precision differences. */
2713 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2714 return tree_int_cst_equal (arg0, arg1);
2715
2716 /* If both types don't have the same signedness, then we can't consider
2717 them equal. We must check this before the STRIP_NOPS calls
2718 because they may change the signedness of the arguments. As pointers
2719 strictly don't have a signedness, require either two pointers or
2720 two non-pointers as well. */
2721 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2722 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2723 return 0;
2724
2725 /* We cannot consider pointers to different address space equal. */
2726 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2727 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2728 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2729 return 0;
2730
2731 /* If both types don't have the same precision, then it is not safe
2732 to strip NOPs. */
2733 if (element_precision (TREE_TYPE (arg0))
2734 != element_precision (TREE_TYPE (arg1)))
2735 return 0;
2736
2737 STRIP_NOPS (arg0);
2738 STRIP_NOPS (arg1);
2739
2740 /* In case both args are comparisons but with different comparison
2741 code, try to swap the comparison operands of one arg to produce
2742 a match and compare that variant. */
2743 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2744 && COMPARISON_CLASS_P (arg0)
2745 && COMPARISON_CLASS_P (arg1))
2746 {
2747 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2748
2749 if (TREE_CODE (arg0) == swap_code)
2750 return operand_equal_p (TREE_OPERAND (arg0, 0),
2751 TREE_OPERAND (arg1, 1), flags)
2752 && operand_equal_p (TREE_OPERAND (arg0, 1),
2753 TREE_OPERAND (arg1, 0), flags);
2754 }
2755
2756 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2757 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2758 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2759 return 0;
2760
2761 /* This is needed for conversions and for COMPONENT_REF.
2762 Might as well play it safe and always test this. */
2763 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2764 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2765 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2766 return 0;
2767
2768 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2769 We don't care about side effects in that case because the SAVE_EXPR
2770 takes care of that for us. In all other cases, two expressions are
2771 equal if they have no side effects. If we have two identical
2772 expressions with side effects that should be treated the same due
2773 to the only side effects being identical SAVE_EXPR's, that will
2774 be detected in the recursive calls below.
2775 If we are taking an invariant address of two identical objects
2776 they are necessarily equal as well. */
2777 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2778 && (TREE_CODE (arg0) == SAVE_EXPR
2779 || (flags & OEP_CONSTANT_ADDRESS_OF)
2780 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2781 return 1;
2782
2783 /* Next handle constant cases, those for which we can return 1 even
2784 if ONLY_CONST is set. */
2785 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2786 switch (TREE_CODE (arg0))
2787 {
2788 case INTEGER_CST:
2789 return tree_int_cst_equal (arg0, arg1);
2790
2791 case FIXED_CST:
2792 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2793 TREE_FIXED_CST (arg1));
2794
2795 case REAL_CST:
2796 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2797 TREE_REAL_CST (arg1)))
2798 return 1;
2799
2800
2801 if (!HONOR_SIGNED_ZEROS (arg0))
2802 {
2803 /* If we do not distinguish between signed and unsigned zero,
2804 consider them equal. */
2805 if (real_zerop (arg0) && real_zerop (arg1))
2806 return 1;
2807 }
2808 return 0;
2809
2810 case VECTOR_CST:
2811 {
2812 unsigned i;
2813
2814 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2815 return 0;
2816
2817 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2818 {
2819 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2820 VECTOR_CST_ELT (arg1, i), flags))
2821 return 0;
2822 }
2823 return 1;
2824 }
2825
2826 case COMPLEX_CST:
2827 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2828 flags)
2829 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2830 flags));
2831
2832 case STRING_CST:
2833 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2834 && ! memcmp (TREE_STRING_POINTER (arg0),
2835 TREE_STRING_POINTER (arg1),
2836 TREE_STRING_LENGTH (arg0)));
2837
2838 case ADDR_EXPR:
2839 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2840 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2841 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2842 default:
2843 break;
2844 }
2845
2846 if (flags & OEP_ONLY_CONST)
2847 return 0;
2848
2849 /* Define macros to test an operand from arg0 and arg1 for equality and a
2850 variant that allows null and views null as being different from any
2851 non-null value. In the latter case, if either is null, the both
2852 must be; otherwise, do the normal comparison. */
2853 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2854 TREE_OPERAND (arg1, N), flags)
2855
2856 #define OP_SAME_WITH_NULL(N) \
2857 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2858 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2859
2860 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2861 {
2862 case tcc_unary:
2863 /* Two conversions are equal only if signedness and modes match. */
2864 switch (TREE_CODE (arg0))
2865 {
2866 CASE_CONVERT:
2867 case FIX_TRUNC_EXPR:
2868 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2869 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2870 return 0;
2871 break;
2872 default:
2873 break;
2874 }
2875
2876 return OP_SAME (0);
2877
2878
2879 case tcc_comparison:
2880 case tcc_binary:
2881 if (OP_SAME (0) && OP_SAME (1))
2882 return 1;
2883
2884 /* For commutative ops, allow the other order. */
2885 return (commutative_tree_code (TREE_CODE (arg0))
2886 && operand_equal_p (TREE_OPERAND (arg0, 0),
2887 TREE_OPERAND (arg1, 1), flags)
2888 && operand_equal_p (TREE_OPERAND (arg0, 1),
2889 TREE_OPERAND (arg1, 0), flags));
2890
2891 case tcc_reference:
2892 /* If either of the pointer (or reference) expressions we are
2893 dereferencing contain a side effect, these cannot be equal,
2894 but their addresses can be. */
2895 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2896 && (TREE_SIDE_EFFECTS (arg0)
2897 || TREE_SIDE_EFFECTS (arg1)))
2898 return 0;
2899
2900 switch (TREE_CODE (arg0))
2901 {
2902 case INDIRECT_REF:
2903 if (!(flags & OEP_ADDRESS_OF)
2904 && (TYPE_ALIGN (TREE_TYPE (arg0))
2905 != TYPE_ALIGN (TREE_TYPE (arg1))))
2906 return 0;
2907 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2908 return OP_SAME (0);
2909
2910 case REALPART_EXPR:
2911 case IMAGPART_EXPR:
2912 return OP_SAME (0);
2913
2914 case TARGET_MEM_REF:
2915 case MEM_REF:
2916 /* Require equal access sizes, and similar pointer types.
2917 We can have incomplete types for array references of
2918 variable-sized arrays from the Fortran frontend
2919 though. Also verify the types are compatible. */
2920 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2921 || (TYPE_SIZE (TREE_TYPE (arg0))
2922 && TYPE_SIZE (TREE_TYPE (arg1))
2923 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2924 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2925 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2926 && ((flags & OEP_ADDRESS_OF)
2927 || (alias_ptr_types_compatible_p
2928 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2929 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2930 && (MR_DEPENDENCE_CLIQUE (arg0)
2931 == MR_DEPENDENCE_CLIQUE (arg1))
2932 && (MR_DEPENDENCE_BASE (arg0)
2933 == MR_DEPENDENCE_BASE (arg1))
2934 && (TYPE_ALIGN (TREE_TYPE (arg0))
2935 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2936 return 0;
2937 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2938 return (OP_SAME (0) && OP_SAME (1)
2939 /* TARGET_MEM_REF require equal extra operands. */
2940 && (TREE_CODE (arg0) != TARGET_MEM_REF
2941 || (OP_SAME_WITH_NULL (2)
2942 && OP_SAME_WITH_NULL (3)
2943 && OP_SAME_WITH_NULL (4))));
2944
2945 case ARRAY_REF:
2946 case ARRAY_RANGE_REF:
2947 /* Operands 2 and 3 may be null.
2948 Compare the array index by value if it is constant first as we
2949 may have different types but same value here. */
2950 if (!OP_SAME (0))
2951 return 0;
2952 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2953 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2954 TREE_OPERAND (arg1, 1))
2955 || OP_SAME (1))
2956 && OP_SAME_WITH_NULL (2)
2957 && OP_SAME_WITH_NULL (3));
2958
2959 case COMPONENT_REF:
2960 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2961 may be NULL when we're called to compare MEM_EXPRs. */
2962 if (!OP_SAME_WITH_NULL (0)
2963 || !OP_SAME (1))
2964 return 0;
2965 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2966 return OP_SAME_WITH_NULL (2);
2967
2968 case BIT_FIELD_REF:
2969 if (!OP_SAME (0))
2970 return 0;
2971 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2972 return OP_SAME (1) && OP_SAME (2);
2973
2974 default:
2975 return 0;
2976 }
2977
2978 case tcc_expression:
2979 switch (TREE_CODE (arg0))
2980 {
2981 case ADDR_EXPR:
2982 return operand_equal_p (TREE_OPERAND (arg0, 0),
2983 TREE_OPERAND (arg1, 0),
2984 flags | OEP_ADDRESS_OF);
2985
2986 case TRUTH_NOT_EXPR:
2987 return OP_SAME (0);
2988
2989 case TRUTH_ANDIF_EXPR:
2990 case TRUTH_ORIF_EXPR:
2991 return OP_SAME (0) && OP_SAME (1);
2992
2993 case FMA_EXPR:
2994 case WIDEN_MULT_PLUS_EXPR:
2995 case WIDEN_MULT_MINUS_EXPR:
2996 if (!OP_SAME (2))
2997 return 0;
2998 /* The multiplcation operands are commutative. */
2999 /* FALLTHRU */
3000
3001 case TRUTH_AND_EXPR:
3002 case TRUTH_OR_EXPR:
3003 case TRUTH_XOR_EXPR:
3004 if (OP_SAME (0) && OP_SAME (1))
3005 return 1;
3006
3007 /* Otherwise take into account this is a commutative operation. */
3008 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3009 TREE_OPERAND (arg1, 1), flags)
3010 && operand_equal_p (TREE_OPERAND (arg0, 1),
3011 TREE_OPERAND (arg1, 0), flags));
3012
3013 case COND_EXPR:
3014 case VEC_COND_EXPR:
3015 case DOT_PROD_EXPR:
3016 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3017
3018 default:
3019 return 0;
3020 }
3021
3022 case tcc_vl_exp:
3023 switch (TREE_CODE (arg0))
3024 {
3025 case CALL_EXPR:
3026 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3027 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3028 /* If not both CALL_EXPRs are either internal or normal function
3029 functions, then they are not equal. */
3030 return 0;
3031 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3032 {
3033 /* If the CALL_EXPRs call different internal functions, then they
3034 are not equal. */
3035 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3036 return 0;
3037 }
3038 else
3039 {
3040 /* If the CALL_EXPRs call different functions, then they are not
3041 equal. */
3042 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3043 flags))
3044 return 0;
3045 }
3046
3047 {
3048 unsigned int cef = call_expr_flags (arg0);
3049 if (flags & OEP_PURE_SAME)
3050 cef &= ECF_CONST | ECF_PURE;
3051 else
3052 cef &= ECF_CONST;
3053 if (!cef)
3054 return 0;
3055 }
3056
3057 /* Now see if all the arguments are the same. */
3058 {
3059 const_call_expr_arg_iterator iter0, iter1;
3060 const_tree a0, a1;
3061 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3062 a1 = first_const_call_expr_arg (arg1, &iter1);
3063 a0 && a1;
3064 a0 = next_const_call_expr_arg (&iter0),
3065 a1 = next_const_call_expr_arg (&iter1))
3066 if (! operand_equal_p (a0, a1, flags))
3067 return 0;
3068
3069 /* If we get here and both argument lists are exhausted
3070 then the CALL_EXPRs are equal. */
3071 return ! (a0 || a1);
3072 }
3073 default:
3074 return 0;
3075 }
3076
3077 case tcc_declaration:
3078 /* Consider __builtin_sqrt equal to sqrt. */
3079 return (TREE_CODE (arg0) == FUNCTION_DECL
3080 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3081 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3082 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3083
3084 default:
3085 return 0;
3086 }
3087
3088 #undef OP_SAME
3089 #undef OP_SAME_WITH_NULL
3090 }
3091 \f
3092 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3093 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3094
3095 When in doubt, return 0. */
3096
3097 static int
3098 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3099 {
3100 int unsignedp1, unsignedpo;
3101 tree primarg0, primarg1, primother;
3102 unsigned int correct_width;
3103
3104 if (operand_equal_p (arg0, arg1, 0))
3105 return 1;
3106
3107 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3108 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3109 return 0;
3110
3111 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3112 and see if the inner values are the same. This removes any
3113 signedness comparison, which doesn't matter here. */
3114 primarg0 = arg0, primarg1 = arg1;
3115 STRIP_NOPS (primarg0);
3116 STRIP_NOPS (primarg1);
3117 if (operand_equal_p (primarg0, primarg1, 0))
3118 return 1;
3119
3120 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3121 actual comparison operand, ARG0.
3122
3123 First throw away any conversions to wider types
3124 already present in the operands. */
3125
3126 primarg1 = get_narrower (arg1, &unsignedp1);
3127 primother = get_narrower (other, &unsignedpo);
3128
3129 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3130 if (unsignedp1 == unsignedpo
3131 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3132 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3133 {
3134 tree type = TREE_TYPE (arg0);
3135
3136 /* Make sure shorter operand is extended the right way
3137 to match the longer operand. */
3138 primarg1 = fold_convert (signed_or_unsigned_type_for
3139 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3140
3141 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3142 return 1;
3143 }
3144
3145 return 0;
3146 }
3147 \f
3148 /* See if ARG is an expression that is either a comparison or is performing
3149 arithmetic on comparisons. The comparisons must only be comparing
3150 two different values, which will be stored in *CVAL1 and *CVAL2; if
3151 they are nonzero it means that some operands have already been found.
3152 No variables may be used anywhere else in the expression except in the
3153 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3154 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3155
3156 If this is true, return 1. Otherwise, return zero. */
3157
3158 static int
3159 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3160 {
3161 enum tree_code code = TREE_CODE (arg);
3162 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3163
3164 /* We can handle some of the tcc_expression cases here. */
3165 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3166 tclass = tcc_unary;
3167 else if (tclass == tcc_expression
3168 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3169 || code == COMPOUND_EXPR))
3170 tclass = tcc_binary;
3171
3172 else if (tclass == tcc_expression && code == SAVE_EXPR
3173 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3174 {
3175 /* If we've already found a CVAL1 or CVAL2, this expression is
3176 two complex to handle. */
3177 if (*cval1 || *cval2)
3178 return 0;
3179
3180 tclass = tcc_unary;
3181 *save_p = 1;
3182 }
3183
3184 switch (tclass)
3185 {
3186 case tcc_unary:
3187 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3188
3189 case tcc_binary:
3190 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3191 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3192 cval1, cval2, save_p));
3193
3194 case tcc_constant:
3195 return 1;
3196
3197 case tcc_expression:
3198 if (code == COND_EXPR)
3199 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3200 cval1, cval2, save_p)
3201 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3202 cval1, cval2, save_p)
3203 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3204 cval1, cval2, save_p));
3205 return 0;
3206
3207 case tcc_comparison:
3208 /* First see if we can handle the first operand, then the second. For
3209 the second operand, we know *CVAL1 can't be zero. It must be that
3210 one side of the comparison is each of the values; test for the
3211 case where this isn't true by failing if the two operands
3212 are the same. */
3213
3214 if (operand_equal_p (TREE_OPERAND (arg, 0),
3215 TREE_OPERAND (arg, 1), 0))
3216 return 0;
3217
3218 if (*cval1 == 0)
3219 *cval1 = TREE_OPERAND (arg, 0);
3220 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3221 ;
3222 else if (*cval2 == 0)
3223 *cval2 = TREE_OPERAND (arg, 0);
3224 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3225 ;
3226 else
3227 return 0;
3228
3229 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3230 ;
3231 else if (*cval2 == 0)
3232 *cval2 = TREE_OPERAND (arg, 1);
3233 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3234 ;
3235 else
3236 return 0;
3237
3238 return 1;
3239
3240 default:
3241 return 0;
3242 }
3243 }
3244 \f
3245 /* ARG is a tree that is known to contain just arithmetic operations and
3246 comparisons. Evaluate the operations in the tree substituting NEW0 for
3247 any occurrence of OLD0 as an operand of a comparison and likewise for
3248 NEW1 and OLD1. */
3249
3250 static tree
3251 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3252 tree old1, tree new1)
3253 {
3254 tree type = TREE_TYPE (arg);
3255 enum tree_code code = TREE_CODE (arg);
3256 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3257
3258 /* We can handle some of the tcc_expression cases here. */
3259 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3260 tclass = tcc_unary;
3261 else if (tclass == tcc_expression
3262 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3263 tclass = tcc_binary;
3264
3265 switch (tclass)
3266 {
3267 case tcc_unary:
3268 return fold_build1_loc (loc, code, type,
3269 eval_subst (loc, TREE_OPERAND (arg, 0),
3270 old0, new0, old1, new1));
3271
3272 case tcc_binary:
3273 return fold_build2_loc (loc, code, type,
3274 eval_subst (loc, TREE_OPERAND (arg, 0),
3275 old0, new0, old1, new1),
3276 eval_subst (loc, TREE_OPERAND (arg, 1),
3277 old0, new0, old1, new1));
3278
3279 case tcc_expression:
3280 switch (code)
3281 {
3282 case SAVE_EXPR:
3283 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3284 old1, new1);
3285
3286 case COMPOUND_EXPR:
3287 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3288 old1, new1);
3289
3290 case COND_EXPR:
3291 return fold_build3_loc (loc, code, type,
3292 eval_subst (loc, TREE_OPERAND (arg, 0),
3293 old0, new0, old1, new1),
3294 eval_subst (loc, TREE_OPERAND (arg, 1),
3295 old0, new0, old1, new1),
3296 eval_subst (loc, TREE_OPERAND (arg, 2),
3297 old0, new0, old1, new1));
3298 default:
3299 break;
3300 }
3301 /* Fall through - ??? */
3302
3303 case tcc_comparison:
3304 {
3305 tree arg0 = TREE_OPERAND (arg, 0);
3306 tree arg1 = TREE_OPERAND (arg, 1);
3307
3308 /* We need to check both for exact equality and tree equality. The
3309 former will be true if the operand has a side-effect. In that
3310 case, we know the operand occurred exactly once. */
3311
3312 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3313 arg0 = new0;
3314 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3315 arg0 = new1;
3316
3317 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3318 arg1 = new0;
3319 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3320 arg1 = new1;
3321
3322 return fold_build2_loc (loc, code, type, arg0, arg1);
3323 }
3324
3325 default:
3326 return arg;
3327 }
3328 }
3329 \f
3330 /* Return a tree for the case when the result of an expression is RESULT
3331 converted to TYPE and OMITTED was previously an operand of the expression
3332 but is now not needed (e.g., we folded OMITTED * 0).
3333
3334 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3335 the conversion of RESULT to TYPE. */
3336
3337 tree
3338 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3339 {
3340 tree t = fold_convert_loc (loc, type, result);
3341
3342 /* If the resulting operand is an empty statement, just return the omitted
3343 statement casted to void. */
3344 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3345 return build1_loc (loc, NOP_EXPR, void_type_node,
3346 fold_ignored_result (omitted));
3347
3348 if (TREE_SIDE_EFFECTS (omitted))
3349 return build2_loc (loc, COMPOUND_EXPR, type,
3350 fold_ignored_result (omitted), t);
3351
3352 return non_lvalue_loc (loc, t);
3353 }
3354
3355 /* Return a tree for the case when the result of an expression is RESULT
3356 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3357 of the expression but are now not needed.
3358
3359 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3360 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3361 evaluated before OMITTED2. Otherwise, if neither has side effects,
3362 just do the conversion of RESULT to TYPE. */
3363
3364 tree
3365 omit_two_operands_loc (location_t loc, tree type, tree result,
3366 tree omitted1, tree omitted2)
3367 {
3368 tree t = fold_convert_loc (loc, type, result);
3369
3370 if (TREE_SIDE_EFFECTS (omitted2))
3371 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3372 if (TREE_SIDE_EFFECTS (omitted1))
3373 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3374
3375 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3376 }
3377
3378 \f
3379 /* Return a simplified tree node for the truth-negation of ARG. This
3380 never alters ARG itself. We assume that ARG is an operation that
3381 returns a truth value (0 or 1).
3382
3383 FIXME: one would think we would fold the result, but it causes
3384 problems with the dominator optimizer. */
3385
3386 static tree
3387 fold_truth_not_expr (location_t loc, tree arg)
3388 {
3389 tree type = TREE_TYPE (arg);
3390 enum tree_code code = TREE_CODE (arg);
3391 location_t loc1, loc2;
3392
3393 /* If this is a comparison, we can simply invert it, except for
3394 floating-point non-equality comparisons, in which case we just
3395 enclose a TRUTH_NOT_EXPR around what we have. */
3396
3397 if (TREE_CODE_CLASS (code) == tcc_comparison)
3398 {
3399 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3400 if (FLOAT_TYPE_P (op_type)
3401 && flag_trapping_math
3402 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3403 && code != NE_EXPR && code != EQ_EXPR)
3404 return NULL_TREE;
3405
3406 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3407 if (code == ERROR_MARK)
3408 return NULL_TREE;
3409
3410 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3411 TREE_OPERAND (arg, 1));
3412 }
3413
3414 switch (code)
3415 {
3416 case INTEGER_CST:
3417 return constant_boolean_node (integer_zerop (arg), type);
3418
3419 case TRUTH_AND_EXPR:
3420 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3421 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3422 return build2_loc (loc, TRUTH_OR_EXPR, type,
3423 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3424 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3425
3426 case TRUTH_OR_EXPR:
3427 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3428 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3429 return build2_loc (loc, TRUTH_AND_EXPR, type,
3430 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3431 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3432
3433 case TRUTH_XOR_EXPR:
3434 /* Here we can invert either operand. We invert the first operand
3435 unless the second operand is a TRUTH_NOT_EXPR in which case our
3436 result is the XOR of the first operand with the inside of the
3437 negation of the second operand. */
3438
3439 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3440 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3441 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3442 else
3443 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3444 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3445 TREE_OPERAND (arg, 1));
3446
3447 case TRUTH_ANDIF_EXPR:
3448 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3449 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3450 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3451 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3452 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3453
3454 case TRUTH_ORIF_EXPR:
3455 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3456 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3457 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3458 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3459 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3460
3461 case TRUTH_NOT_EXPR:
3462 return TREE_OPERAND (arg, 0);
3463
3464 case COND_EXPR:
3465 {
3466 tree arg1 = TREE_OPERAND (arg, 1);
3467 tree arg2 = TREE_OPERAND (arg, 2);
3468
3469 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3470 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3471
3472 /* A COND_EXPR may have a throw as one operand, which
3473 then has void type. Just leave void operands
3474 as they are. */
3475 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3476 VOID_TYPE_P (TREE_TYPE (arg1))
3477 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3478 VOID_TYPE_P (TREE_TYPE (arg2))
3479 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3480 }
3481
3482 case COMPOUND_EXPR:
3483 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3484 return build2_loc (loc, COMPOUND_EXPR, type,
3485 TREE_OPERAND (arg, 0),
3486 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3487
3488 case NON_LVALUE_EXPR:
3489 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3490 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3491
3492 CASE_CONVERT:
3493 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3494 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3495
3496 /* ... fall through ... */
3497
3498 case FLOAT_EXPR:
3499 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3500 return build1_loc (loc, TREE_CODE (arg), type,
3501 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3502
3503 case BIT_AND_EXPR:
3504 if (!integer_onep (TREE_OPERAND (arg, 1)))
3505 return NULL_TREE;
3506 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3507
3508 case SAVE_EXPR:
3509 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3510
3511 case CLEANUP_POINT_EXPR:
3512 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3513 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3514 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3515
3516 default:
3517 return NULL_TREE;
3518 }
3519 }
3520
3521 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3522 assume that ARG is an operation that returns a truth value (0 or 1
3523 for scalars, 0 or -1 for vectors). Return the folded expression if
3524 folding is successful. Otherwise, return NULL_TREE. */
3525
3526 static tree
3527 fold_invert_truthvalue (location_t loc, tree arg)
3528 {
3529 tree type = TREE_TYPE (arg);
3530 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3531 ? BIT_NOT_EXPR
3532 : TRUTH_NOT_EXPR,
3533 type, arg);
3534 }
3535
3536 /* Return a simplified tree node for the truth-negation of ARG. This
3537 never alters ARG itself. We assume that ARG is an operation that
3538 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3539
3540 tree
3541 invert_truthvalue_loc (location_t loc, tree arg)
3542 {
3543 if (TREE_CODE (arg) == ERROR_MARK)
3544 return arg;
3545
3546 tree type = TREE_TYPE (arg);
3547 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3548 ? BIT_NOT_EXPR
3549 : TRUTH_NOT_EXPR,
3550 type, arg);
3551 }
3552
3553 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3554 operands are another bit-wise operation with a common input. If so,
3555 distribute the bit operations to save an operation and possibly two if
3556 constants are involved. For example, convert
3557 (A | B) & (A | C) into A | (B & C)
3558 Further simplification will occur if B and C are constants.
3559
3560 If this optimization cannot be done, 0 will be returned. */
3561
3562 static tree
3563 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3564 tree arg0, tree arg1)
3565 {
3566 tree common;
3567 tree left, right;
3568
3569 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3570 || TREE_CODE (arg0) == code
3571 || (TREE_CODE (arg0) != BIT_AND_EXPR
3572 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3573 return 0;
3574
3575 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3576 {
3577 common = TREE_OPERAND (arg0, 0);
3578 left = TREE_OPERAND (arg0, 1);
3579 right = TREE_OPERAND (arg1, 1);
3580 }
3581 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3582 {
3583 common = TREE_OPERAND (arg0, 0);
3584 left = TREE_OPERAND (arg0, 1);
3585 right = TREE_OPERAND (arg1, 0);
3586 }
3587 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3588 {
3589 common = TREE_OPERAND (arg0, 1);
3590 left = TREE_OPERAND (arg0, 0);
3591 right = TREE_OPERAND (arg1, 1);
3592 }
3593 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3594 {
3595 common = TREE_OPERAND (arg0, 1);
3596 left = TREE_OPERAND (arg0, 0);
3597 right = TREE_OPERAND (arg1, 0);
3598 }
3599 else
3600 return 0;
3601
3602 common = fold_convert_loc (loc, type, common);
3603 left = fold_convert_loc (loc, type, left);
3604 right = fold_convert_loc (loc, type, right);
3605 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3606 fold_build2_loc (loc, code, type, left, right));
3607 }
3608
3609 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3610 with code CODE. This optimization is unsafe. */
3611 static tree
3612 distribute_real_division (location_t loc, enum tree_code code, tree type,
3613 tree arg0, tree arg1)
3614 {
3615 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3616 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3617
3618 /* (A / C) +- (B / C) -> (A +- B) / C. */
3619 if (mul0 == mul1
3620 && operand_equal_p (TREE_OPERAND (arg0, 1),
3621 TREE_OPERAND (arg1, 1), 0))
3622 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3623 fold_build2_loc (loc, code, type,
3624 TREE_OPERAND (arg0, 0),
3625 TREE_OPERAND (arg1, 0)),
3626 TREE_OPERAND (arg0, 1));
3627
3628 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3629 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3630 TREE_OPERAND (arg1, 0), 0)
3631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3632 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3633 {
3634 REAL_VALUE_TYPE r0, r1;
3635 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3636 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3637 if (!mul0)
3638 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3639 if (!mul1)
3640 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3641 real_arithmetic (&r0, code, &r0, &r1);
3642 return fold_build2_loc (loc, MULT_EXPR, type,
3643 TREE_OPERAND (arg0, 0),
3644 build_real (type, r0));
3645 }
3646
3647 return NULL_TREE;
3648 }
3649 \f
3650 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3651 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3652
3653 static tree
3654 make_bit_field_ref (location_t loc, tree inner, tree type,
3655 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3656 {
3657 tree result, bftype;
3658
3659 if (bitpos == 0)
3660 {
3661 tree size = TYPE_SIZE (TREE_TYPE (inner));
3662 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3663 || POINTER_TYPE_P (TREE_TYPE (inner)))
3664 && tree_fits_shwi_p (size)
3665 && tree_to_shwi (size) == bitsize)
3666 return fold_convert_loc (loc, type, inner);
3667 }
3668
3669 bftype = type;
3670 if (TYPE_PRECISION (bftype) != bitsize
3671 || TYPE_UNSIGNED (bftype) == !unsignedp)
3672 bftype = build_nonstandard_integer_type (bitsize, 0);
3673
3674 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3675 size_int (bitsize), bitsize_int (bitpos));
3676
3677 if (bftype != type)
3678 result = fold_convert_loc (loc, type, result);
3679
3680 return result;
3681 }
3682
3683 /* Optimize a bit-field compare.
3684
3685 There are two cases: First is a compare against a constant and the
3686 second is a comparison of two items where the fields are at the same
3687 bit position relative to the start of a chunk (byte, halfword, word)
3688 large enough to contain it. In these cases we can avoid the shift
3689 implicit in bitfield extractions.
3690
3691 For constants, we emit a compare of the shifted constant with the
3692 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3693 compared. For two fields at the same position, we do the ANDs with the
3694 similar mask and compare the result of the ANDs.
3695
3696 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3697 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3698 are the left and right operands of the comparison, respectively.
3699
3700 If the optimization described above can be done, we return the resulting
3701 tree. Otherwise we return zero. */
3702
3703 static tree
3704 optimize_bit_field_compare (location_t loc, enum tree_code code,
3705 tree compare_type, tree lhs, tree rhs)
3706 {
3707 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3708 tree type = TREE_TYPE (lhs);
3709 tree unsigned_type;
3710 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3711 machine_mode lmode, rmode, nmode;
3712 int lunsignedp, runsignedp;
3713 int lvolatilep = 0, rvolatilep = 0;
3714 tree linner, rinner = NULL_TREE;
3715 tree mask;
3716 tree offset;
3717
3718 /* Get all the information about the extractions being done. If the bit size
3719 if the same as the size of the underlying object, we aren't doing an
3720 extraction at all and so can do nothing. We also don't want to
3721 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3722 then will no longer be able to replace it. */
3723 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3724 &lunsignedp, &lvolatilep, false);
3725 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3726 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3727 return 0;
3728
3729 if (!const_p)
3730 {
3731 /* If this is not a constant, we can only do something if bit positions,
3732 sizes, and signedness are the same. */
3733 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3734 &runsignedp, &rvolatilep, false);
3735
3736 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3737 || lunsignedp != runsignedp || offset != 0
3738 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3739 return 0;
3740 }
3741
3742 /* See if we can find a mode to refer to this field. We should be able to,
3743 but fail if we can't. */
3744 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3745 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3746 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3747 TYPE_ALIGN (TREE_TYPE (rinner))),
3748 word_mode, false);
3749 if (nmode == VOIDmode)
3750 return 0;
3751
3752 /* Set signed and unsigned types of the precision of this mode for the
3753 shifts below. */
3754 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3755
3756 /* Compute the bit position and size for the new reference and our offset
3757 within it. If the new reference is the same size as the original, we
3758 won't optimize anything, so return zero. */
3759 nbitsize = GET_MODE_BITSIZE (nmode);
3760 nbitpos = lbitpos & ~ (nbitsize - 1);
3761 lbitpos -= nbitpos;
3762 if (nbitsize == lbitsize)
3763 return 0;
3764
3765 if (BYTES_BIG_ENDIAN)
3766 lbitpos = nbitsize - lbitsize - lbitpos;
3767
3768 /* Make the mask to be used against the extracted field. */
3769 mask = build_int_cst_type (unsigned_type, -1);
3770 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3771 mask = const_binop (RSHIFT_EXPR, mask,
3772 size_int (nbitsize - lbitsize - lbitpos));
3773
3774 if (! const_p)
3775 /* If not comparing with constant, just rework the comparison
3776 and return. */
3777 return fold_build2_loc (loc, code, compare_type,
3778 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3779 make_bit_field_ref (loc, linner,
3780 unsigned_type,
3781 nbitsize, nbitpos,
3782 1),
3783 mask),
3784 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3785 make_bit_field_ref (loc, rinner,
3786 unsigned_type,
3787 nbitsize, nbitpos,
3788 1),
3789 mask));
3790
3791 /* Otherwise, we are handling the constant case. See if the constant is too
3792 big for the field. Warn and return a tree of for 0 (false) if so. We do
3793 this not only for its own sake, but to avoid having to test for this
3794 error case below. If we didn't, we might generate wrong code.
3795
3796 For unsigned fields, the constant shifted right by the field length should
3797 be all zero. For signed fields, the high-order bits should agree with
3798 the sign bit. */
3799
3800 if (lunsignedp)
3801 {
3802 if (wi::lrshift (rhs, lbitsize) != 0)
3803 {
3804 warning (0, "comparison is always %d due to width of bit-field",
3805 code == NE_EXPR);
3806 return constant_boolean_node (code == NE_EXPR, compare_type);
3807 }
3808 }
3809 else
3810 {
3811 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3812 if (tem != 0 && tem != -1)
3813 {
3814 warning (0, "comparison is always %d due to width of bit-field",
3815 code == NE_EXPR);
3816 return constant_boolean_node (code == NE_EXPR, compare_type);
3817 }
3818 }
3819
3820 /* Single-bit compares should always be against zero. */
3821 if (lbitsize == 1 && ! integer_zerop (rhs))
3822 {
3823 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3824 rhs = build_int_cst (type, 0);
3825 }
3826
3827 /* Make a new bitfield reference, shift the constant over the
3828 appropriate number of bits and mask it with the computed mask
3829 (in case this was a signed field). If we changed it, make a new one. */
3830 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3831
3832 rhs = const_binop (BIT_AND_EXPR,
3833 const_binop (LSHIFT_EXPR,
3834 fold_convert_loc (loc, unsigned_type, rhs),
3835 size_int (lbitpos)),
3836 mask);
3837
3838 lhs = build2_loc (loc, code, compare_type,
3839 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3840 return lhs;
3841 }
3842 \f
3843 /* Subroutine for fold_truth_andor_1: decode a field reference.
3844
3845 If EXP is a comparison reference, we return the innermost reference.
3846
3847 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3848 set to the starting bit number.
3849
3850 If the innermost field can be completely contained in a mode-sized
3851 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3852
3853 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3854 otherwise it is not changed.
3855
3856 *PUNSIGNEDP is set to the signedness of the field.
3857
3858 *PMASK is set to the mask used. This is either contained in a
3859 BIT_AND_EXPR or derived from the width of the field.
3860
3861 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3862
3863 Return 0 if this is not a component reference or is one that we can't
3864 do anything with. */
3865
3866 static tree
3867 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3868 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3869 int *punsignedp, int *pvolatilep,
3870 tree *pmask, tree *pand_mask)
3871 {
3872 tree outer_type = 0;
3873 tree and_mask = 0;
3874 tree mask, inner, offset;
3875 tree unsigned_type;
3876 unsigned int precision;
3877
3878 /* All the optimizations using this function assume integer fields.
3879 There are problems with FP fields since the type_for_size call
3880 below can fail for, e.g., XFmode. */
3881 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3882 return 0;
3883
3884 /* We are interested in the bare arrangement of bits, so strip everything
3885 that doesn't affect the machine mode. However, record the type of the
3886 outermost expression if it may matter below. */
3887 if (CONVERT_EXPR_P (exp)
3888 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3889 outer_type = TREE_TYPE (exp);
3890 STRIP_NOPS (exp);
3891
3892 if (TREE_CODE (exp) == BIT_AND_EXPR)
3893 {
3894 and_mask = TREE_OPERAND (exp, 1);
3895 exp = TREE_OPERAND (exp, 0);
3896 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3897 if (TREE_CODE (and_mask) != INTEGER_CST)
3898 return 0;
3899 }
3900
3901 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3902 punsignedp, pvolatilep, false);
3903 if ((inner == exp && and_mask == 0)
3904 || *pbitsize < 0 || offset != 0
3905 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3906 return 0;
3907
3908 /* If the number of bits in the reference is the same as the bitsize of
3909 the outer type, then the outer type gives the signedness. Otherwise
3910 (in case of a small bitfield) the signedness is unchanged. */
3911 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3912 *punsignedp = TYPE_UNSIGNED (outer_type);
3913
3914 /* Compute the mask to access the bitfield. */
3915 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3916 precision = TYPE_PRECISION (unsigned_type);
3917
3918 mask = build_int_cst_type (unsigned_type, -1);
3919
3920 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3921 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3922
3923 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3924 if (and_mask != 0)
3925 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3926 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3927
3928 *pmask = mask;
3929 *pand_mask = and_mask;
3930 return inner;
3931 }
3932
3933 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3934 bit positions and MASK is SIGNED. */
3935
3936 static int
3937 all_ones_mask_p (const_tree mask, unsigned int size)
3938 {
3939 tree type = TREE_TYPE (mask);
3940 unsigned int precision = TYPE_PRECISION (type);
3941
3942 /* If this function returns true when the type of the mask is
3943 UNSIGNED, then there will be errors. In particular see
3944 gcc.c-torture/execute/990326-1.c. There does not appear to be
3945 any documentation paper trail as to why this is so. But the pre
3946 wide-int worked with that restriction and it has been preserved
3947 here. */
3948 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3949 return false;
3950
3951 return wi::mask (size, false, precision) == mask;
3952 }
3953
3954 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3955 represents the sign bit of EXP's type. If EXP represents a sign
3956 or zero extension, also test VAL against the unextended type.
3957 The return value is the (sub)expression whose sign bit is VAL,
3958 or NULL_TREE otherwise. */
3959
3960 tree
3961 sign_bit_p (tree exp, const_tree val)
3962 {
3963 int width;
3964 tree t;
3965
3966 /* Tree EXP must have an integral type. */
3967 t = TREE_TYPE (exp);
3968 if (! INTEGRAL_TYPE_P (t))
3969 return NULL_TREE;
3970
3971 /* Tree VAL must be an integer constant. */
3972 if (TREE_CODE (val) != INTEGER_CST
3973 || TREE_OVERFLOW (val))
3974 return NULL_TREE;
3975
3976 width = TYPE_PRECISION (t);
3977 if (wi::only_sign_bit_p (val, width))
3978 return exp;
3979
3980 /* Handle extension from a narrower type. */
3981 if (TREE_CODE (exp) == NOP_EXPR
3982 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3983 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3984
3985 return NULL_TREE;
3986 }
3987
3988 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3989 to be evaluated unconditionally. */
3990
3991 static int
3992 simple_operand_p (const_tree exp)
3993 {
3994 /* Strip any conversions that don't change the machine mode. */
3995 STRIP_NOPS (exp);
3996
3997 return (CONSTANT_CLASS_P (exp)
3998 || TREE_CODE (exp) == SSA_NAME
3999 || (DECL_P (exp)
4000 && ! TREE_ADDRESSABLE (exp)
4001 && ! TREE_THIS_VOLATILE (exp)
4002 && ! DECL_NONLOCAL (exp)
4003 /* Don't regard global variables as simple. They may be
4004 allocated in ways unknown to the compiler (shared memory,
4005 #pragma weak, etc). */
4006 && ! TREE_PUBLIC (exp)
4007 && ! DECL_EXTERNAL (exp)
4008 /* Weakrefs are not safe to be read, since they can be NULL.
4009 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4010 have DECL_WEAK flag set. */
4011 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4012 /* Loading a static variable is unduly expensive, but global
4013 registers aren't expensive. */
4014 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4015 }
4016
4017 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4018 to be evaluated unconditionally.
4019 I addition to simple_operand_p, we assume that comparisons, conversions,
4020 and logic-not operations are simple, if their operands are simple, too. */
4021
4022 static bool
4023 simple_operand_p_2 (tree exp)
4024 {
4025 enum tree_code code;
4026
4027 if (TREE_SIDE_EFFECTS (exp)
4028 || tree_could_trap_p (exp))
4029 return false;
4030
4031 while (CONVERT_EXPR_P (exp))
4032 exp = TREE_OPERAND (exp, 0);
4033
4034 code = TREE_CODE (exp);
4035
4036 if (TREE_CODE_CLASS (code) == tcc_comparison)
4037 return (simple_operand_p (TREE_OPERAND (exp, 0))
4038 && simple_operand_p (TREE_OPERAND (exp, 1)));
4039
4040 if (code == TRUTH_NOT_EXPR)
4041 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4042
4043 return simple_operand_p (exp);
4044 }
4045
4046 \f
4047 /* The following functions are subroutines to fold_range_test and allow it to
4048 try to change a logical combination of comparisons into a range test.
4049
4050 For example, both
4051 X == 2 || X == 3 || X == 4 || X == 5
4052 and
4053 X >= 2 && X <= 5
4054 are converted to
4055 (unsigned) (X - 2) <= 3
4056
4057 We describe each set of comparisons as being either inside or outside
4058 a range, using a variable named like IN_P, and then describe the
4059 range with a lower and upper bound. If one of the bounds is omitted,
4060 it represents either the highest or lowest value of the type.
4061
4062 In the comments below, we represent a range by two numbers in brackets
4063 preceded by a "+" to designate being inside that range, or a "-" to
4064 designate being outside that range, so the condition can be inverted by
4065 flipping the prefix. An omitted bound is represented by a "-". For
4066 example, "- [-, 10]" means being outside the range starting at the lowest
4067 possible value and ending at 10, in other words, being greater than 10.
4068 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4069 always false.
4070
4071 We set up things so that the missing bounds are handled in a consistent
4072 manner so neither a missing bound nor "true" and "false" need to be
4073 handled using a special case. */
4074
4075 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4076 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4077 and UPPER1_P are nonzero if the respective argument is an upper bound
4078 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4079 must be specified for a comparison. ARG1 will be converted to ARG0's
4080 type if both are specified. */
4081
4082 static tree
4083 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4084 tree arg1, int upper1_p)
4085 {
4086 tree tem;
4087 int result;
4088 int sgn0, sgn1;
4089
4090 /* If neither arg represents infinity, do the normal operation.
4091 Else, if not a comparison, return infinity. Else handle the special
4092 comparison rules. Note that most of the cases below won't occur, but
4093 are handled for consistency. */
4094
4095 if (arg0 != 0 && arg1 != 0)
4096 {
4097 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4098 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4099 STRIP_NOPS (tem);
4100 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4101 }
4102
4103 if (TREE_CODE_CLASS (code) != tcc_comparison)
4104 return 0;
4105
4106 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4107 for neither. In real maths, we cannot assume open ended ranges are
4108 the same. But, this is computer arithmetic, where numbers are finite.
4109 We can therefore make the transformation of any unbounded range with
4110 the value Z, Z being greater than any representable number. This permits
4111 us to treat unbounded ranges as equal. */
4112 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4113 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4114 switch (code)
4115 {
4116 case EQ_EXPR:
4117 result = sgn0 == sgn1;
4118 break;
4119 case NE_EXPR:
4120 result = sgn0 != sgn1;
4121 break;
4122 case LT_EXPR:
4123 result = sgn0 < sgn1;
4124 break;
4125 case LE_EXPR:
4126 result = sgn0 <= sgn1;
4127 break;
4128 case GT_EXPR:
4129 result = sgn0 > sgn1;
4130 break;
4131 case GE_EXPR:
4132 result = sgn0 >= sgn1;
4133 break;
4134 default:
4135 gcc_unreachable ();
4136 }
4137
4138 return constant_boolean_node (result, type);
4139 }
4140 \f
4141 /* Helper routine for make_range. Perform one step for it, return
4142 new expression if the loop should continue or NULL_TREE if it should
4143 stop. */
4144
4145 tree
4146 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4147 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4148 bool *strict_overflow_p)
4149 {
4150 tree arg0_type = TREE_TYPE (arg0);
4151 tree n_low, n_high, low = *p_low, high = *p_high;
4152 int in_p = *p_in_p, n_in_p;
4153
4154 switch (code)
4155 {
4156 case TRUTH_NOT_EXPR:
4157 /* We can only do something if the range is testing for zero. */
4158 if (low == NULL_TREE || high == NULL_TREE
4159 || ! integer_zerop (low) || ! integer_zerop (high))
4160 return NULL_TREE;
4161 *p_in_p = ! in_p;
4162 return arg0;
4163
4164 case EQ_EXPR: case NE_EXPR:
4165 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4166 /* We can only do something if the range is testing for zero
4167 and if the second operand is an integer constant. Note that
4168 saying something is "in" the range we make is done by
4169 complementing IN_P since it will set in the initial case of
4170 being not equal to zero; "out" is leaving it alone. */
4171 if (low == NULL_TREE || high == NULL_TREE
4172 || ! integer_zerop (low) || ! integer_zerop (high)
4173 || TREE_CODE (arg1) != INTEGER_CST)
4174 return NULL_TREE;
4175
4176 switch (code)
4177 {
4178 case NE_EXPR: /* - [c, c] */
4179 low = high = arg1;
4180 break;
4181 case EQ_EXPR: /* + [c, c] */
4182 in_p = ! in_p, low = high = arg1;
4183 break;
4184 case GT_EXPR: /* - [-, c] */
4185 low = 0, high = arg1;
4186 break;
4187 case GE_EXPR: /* + [c, -] */
4188 in_p = ! in_p, low = arg1, high = 0;
4189 break;
4190 case LT_EXPR: /* - [c, -] */
4191 low = arg1, high = 0;
4192 break;
4193 case LE_EXPR: /* + [-, c] */
4194 in_p = ! in_p, low = 0, high = arg1;
4195 break;
4196 default:
4197 gcc_unreachable ();
4198 }
4199
4200 /* If this is an unsigned comparison, we also know that EXP is
4201 greater than or equal to zero. We base the range tests we make
4202 on that fact, so we record it here so we can parse existing
4203 range tests. We test arg0_type since often the return type
4204 of, e.g. EQ_EXPR, is boolean. */
4205 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4206 {
4207 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4208 in_p, low, high, 1,
4209 build_int_cst (arg0_type, 0),
4210 NULL_TREE))
4211 return NULL_TREE;
4212
4213 in_p = n_in_p, low = n_low, high = n_high;
4214
4215 /* If the high bound is missing, but we have a nonzero low
4216 bound, reverse the range so it goes from zero to the low bound
4217 minus 1. */
4218 if (high == 0 && low && ! integer_zerop (low))
4219 {
4220 in_p = ! in_p;
4221 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4222 build_int_cst (TREE_TYPE (low), 1), 0);
4223 low = build_int_cst (arg0_type, 0);
4224 }
4225 }
4226
4227 *p_low = low;
4228 *p_high = high;
4229 *p_in_p = in_p;
4230 return arg0;
4231
4232 case NEGATE_EXPR:
4233 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4234 low and high are non-NULL, then normalize will DTRT. */
4235 if (!TYPE_UNSIGNED (arg0_type)
4236 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4237 {
4238 if (low == NULL_TREE)
4239 low = TYPE_MIN_VALUE (arg0_type);
4240 if (high == NULL_TREE)
4241 high = TYPE_MAX_VALUE (arg0_type);
4242 }
4243
4244 /* (-x) IN [a,b] -> x in [-b, -a] */
4245 n_low = range_binop (MINUS_EXPR, exp_type,
4246 build_int_cst (exp_type, 0),
4247 0, high, 1);
4248 n_high = range_binop (MINUS_EXPR, exp_type,
4249 build_int_cst (exp_type, 0),
4250 0, low, 0);
4251 if (n_high != 0 && TREE_OVERFLOW (n_high))
4252 return NULL_TREE;
4253 goto normalize;
4254
4255 case BIT_NOT_EXPR:
4256 /* ~ X -> -X - 1 */
4257 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4258 build_int_cst (exp_type, 1));
4259
4260 case PLUS_EXPR:
4261 case MINUS_EXPR:
4262 if (TREE_CODE (arg1) != INTEGER_CST)
4263 return NULL_TREE;
4264
4265 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4266 move a constant to the other side. */
4267 if (!TYPE_UNSIGNED (arg0_type)
4268 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4269 return NULL_TREE;
4270
4271 /* If EXP is signed, any overflow in the computation is undefined,
4272 so we don't worry about it so long as our computations on
4273 the bounds don't overflow. For unsigned, overflow is defined
4274 and this is exactly the right thing. */
4275 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4276 arg0_type, low, 0, arg1, 0);
4277 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4278 arg0_type, high, 1, arg1, 0);
4279 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4280 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4281 return NULL_TREE;
4282
4283 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4284 *strict_overflow_p = true;
4285
4286 normalize:
4287 /* Check for an unsigned range which has wrapped around the maximum
4288 value thus making n_high < n_low, and normalize it. */
4289 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4290 {
4291 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4292 build_int_cst (TREE_TYPE (n_high), 1), 0);
4293 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4294 build_int_cst (TREE_TYPE (n_low), 1), 0);
4295
4296 /* If the range is of the form +/- [ x+1, x ], we won't
4297 be able to normalize it. But then, it represents the
4298 whole range or the empty set, so make it
4299 +/- [ -, - ]. */
4300 if (tree_int_cst_equal (n_low, low)
4301 && tree_int_cst_equal (n_high, high))
4302 low = high = 0;
4303 else
4304 in_p = ! in_p;
4305 }
4306 else
4307 low = n_low, high = n_high;
4308
4309 *p_low = low;
4310 *p_high = high;
4311 *p_in_p = in_p;
4312 return arg0;
4313
4314 CASE_CONVERT:
4315 case NON_LVALUE_EXPR:
4316 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4317 return NULL_TREE;
4318
4319 if (! INTEGRAL_TYPE_P (arg0_type)
4320 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4321 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4322 return NULL_TREE;
4323
4324 n_low = low, n_high = high;
4325
4326 if (n_low != 0)
4327 n_low = fold_convert_loc (loc, arg0_type, n_low);
4328
4329 if (n_high != 0)
4330 n_high = fold_convert_loc (loc, arg0_type, n_high);
4331
4332 /* If we're converting arg0 from an unsigned type, to exp,
4333 a signed type, we will be doing the comparison as unsigned.
4334 The tests above have already verified that LOW and HIGH
4335 are both positive.
4336
4337 So we have to ensure that we will handle large unsigned
4338 values the same way that the current signed bounds treat
4339 negative values. */
4340
4341 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4342 {
4343 tree high_positive;
4344 tree equiv_type;
4345 /* For fixed-point modes, we need to pass the saturating flag
4346 as the 2nd parameter. */
4347 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4348 equiv_type
4349 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4350 TYPE_SATURATING (arg0_type));
4351 else
4352 equiv_type
4353 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4354
4355 /* A range without an upper bound is, naturally, unbounded.
4356 Since convert would have cropped a very large value, use
4357 the max value for the destination type. */
4358 high_positive
4359 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4360 : TYPE_MAX_VALUE (arg0_type);
4361
4362 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4363 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4364 fold_convert_loc (loc, arg0_type,
4365 high_positive),
4366 build_int_cst (arg0_type, 1));
4367
4368 /* If the low bound is specified, "and" the range with the
4369 range for which the original unsigned value will be
4370 positive. */
4371 if (low != 0)
4372 {
4373 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4374 1, fold_convert_loc (loc, arg0_type,
4375 integer_zero_node),
4376 high_positive))
4377 return NULL_TREE;
4378
4379 in_p = (n_in_p == in_p);
4380 }
4381 else
4382 {
4383 /* Otherwise, "or" the range with the range of the input
4384 that will be interpreted as negative. */
4385 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4386 1, fold_convert_loc (loc, arg0_type,
4387 integer_zero_node),
4388 high_positive))
4389 return NULL_TREE;
4390
4391 in_p = (in_p != n_in_p);
4392 }
4393 }
4394
4395 *p_low = n_low;
4396 *p_high = n_high;
4397 *p_in_p = in_p;
4398 return arg0;
4399
4400 default:
4401 return NULL_TREE;
4402 }
4403 }
4404
4405 /* Given EXP, a logical expression, set the range it is testing into
4406 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4407 actually being tested. *PLOW and *PHIGH will be made of the same
4408 type as the returned expression. If EXP is not a comparison, we
4409 will most likely not be returning a useful value and range. Set
4410 *STRICT_OVERFLOW_P to true if the return value is only valid
4411 because signed overflow is undefined; otherwise, do not change
4412 *STRICT_OVERFLOW_P. */
4413
4414 tree
4415 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4416 bool *strict_overflow_p)
4417 {
4418 enum tree_code code;
4419 tree arg0, arg1 = NULL_TREE;
4420 tree exp_type, nexp;
4421 int in_p;
4422 tree low, high;
4423 location_t loc = EXPR_LOCATION (exp);
4424
4425 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4426 and see if we can refine the range. Some of the cases below may not
4427 happen, but it doesn't seem worth worrying about this. We "continue"
4428 the outer loop when we've changed something; otherwise we "break"
4429 the switch, which will "break" the while. */
4430
4431 in_p = 0;
4432 low = high = build_int_cst (TREE_TYPE (exp), 0);
4433
4434 while (1)
4435 {
4436 code = TREE_CODE (exp);
4437 exp_type = TREE_TYPE (exp);
4438 arg0 = NULL_TREE;
4439
4440 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4441 {
4442 if (TREE_OPERAND_LENGTH (exp) > 0)
4443 arg0 = TREE_OPERAND (exp, 0);
4444 if (TREE_CODE_CLASS (code) == tcc_binary
4445 || TREE_CODE_CLASS (code) == tcc_comparison
4446 || (TREE_CODE_CLASS (code) == tcc_expression
4447 && TREE_OPERAND_LENGTH (exp) > 1))
4448 arg1 = TREE_OPERAND (exp, 1);
4449 }
4450 if (arg0 == NULL_TREE)
4451 break;
4452
4453 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4454 &high, &in_p, strict_overflow_p);
4455 if (nexp == NULL_TREE)
4456 break;
4457 exp = nexp;
4458 }
4459
4460 /* If EXP is a constant, we can evaluate whether this is true or false. */
4461 if (TREE_CODE (exp) == INTEGER_CST)
4462 {
4463 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4464 exp, 0, low, 0))
4465 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4466 exp, 1, high, 1)));
4467 low = high = 0;
4468 exp = 0;
4469 }
4470
4471 *pin_p = in_p, *plow = low, *phigh = high;
4472 return exp;
4473 }
4474 \f
4475 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4476 type, TYPE, return an expression to test if EXP is in (or out of, depending
4477 on IN_P) the range. Return 0 if the test couldn't be created. */
4478
4479 tree
4480 build_range_check (location_t loc, tree type, tree exp, int in_p,
4481 tree low, tree high)
4482 {
4483 tree etype = TREE_TYPE (exp), value;
4484
4485 /* Disable this optimization for function pointer expressions
4486 on targets that require function pointer canonicalization. */
4487 if (targetm.have_canonicalize_funcptr_for_compare ()
4488 && TREE_CODE (etype) == POINTER_TYPE
4489 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4490 return NULL_TREE;
4491
4492 if (! in_p)
4493 {
4494 value = build_range_check (loc, type, exp, 1, low, high);
4495 if (value != 0)
4496 return invert_truthvalue_loc (loc, value);
4497
4498 return 0;
4499 }
4500
4501 if (low == 0 && high == 0)
4502 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4503
4504 if (low == 0)
4505 return fold_build2_loc (loc, LE_EXPR, type, exp,
4506 fold_convert_loc (loc, etype, high));
4507
4508 if (high == 0)
4509 return fold_build2_loc (loc, GE_EXPR, type, exp,
4510 fold_convert_loc (loc, etype, low));
4511
4512 if (operand_equal_p (low, high, 0))
4513 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4514 fold_convert_loc (loc, etype, low));
4515
4516 if (integer_zerop (low))
4517 {
4518 if (! TYPE_UNSIGNED (etype))
4519 {
4520 etype = unsigned_type_for (etype);
4521 high = fold_convert_loc (loc, etype, high);
4522 exp = fold_convert_loc (loc, etype, exp);
4523 }
4524 return build_range_check (loc, type, exp, 1, 0, high);
4525 }
4526
4527 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4528 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4529 {
4530 int prec = TYPE_PRECISION (etype);
4531
4532 if (wi::mask (prec - 1, false, prec) == high)
4533 {
4534 if (TYPE_UNSIGNED (etype))
4535 {
4536 tree signed_etype = signed_type_for (etype);
4537 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4538 etype
4539 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4540 else
4541 etype = signed_etype;
4542 exp = fold_convert_loc (loc, etype, exp);
4543 }
4544 return fold_build2_loc (loc, GT_EXPR, type, exp,
4545 build_int_cst (etype, 0));
4546 }
4547 }
4548
4549 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4550 This requires wrap-around arithmetics for the type of the expression.
4551 First make sure that arithmetics in this type is valid, then make sure
4552 that it wraps around. */
4553 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4554 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4555 TYPE_UNSIGNED (etype));
4556
4557 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4558 {
4559 tree utype, minv, maxv;
4560
4561 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4562 for the type in question, as we rely on this here. */
4563 utype = unsigned_type_for (etype);
4564 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4565 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4566 build_int_cst (TREE_TYPE (maxv), 1), 1);
4567 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4568
4569 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4570 minv, 1, maxv, 1)))
4571 etype = utype;
4572 else
4573 return 0;
4574 }
4575
4576 high = fold_convert_loc (loc, etype, high);
4577 low = fold_convert_loc (loc, etype, low);
4578 exp = fold_convert_loc (loc, etype, exp);
4579
4580 value = const_binop (MINUS_EXPR, high, low);
4581
4582
4583 if (POINTER_TYPE_P (etype))
4584 {
4585 if (value != 0 && !TREE_OVERFLOW (value))
4586 {
4587 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4588 return build_range_check (loc, type,
4589 fold_build_pointer_plus_loc (loc, exp, low),
4590 1, build_int_cst (etype, 0), value);
4591 }
4592 return 0;
4593 }
4594
4595 if (value != 0 && !TREE_OVERFLOW (value))
4596 return build_range_check (loc, type,
4597 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4598 1, build_int_cst (etype, 0), value);
4599
4600 return 0;
4601 }
4602 \f
4603 /* Return the predecessor of VAL in its type, handling the infinite case. */
4604
4605 static tree
4606 range_predecessor (tree val)
4607 {
4608 tree type = TREE_TYPE (val);
4609
4610 if (INTEGRAL_TYPE_P (type)
4611 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4612 return 0;
4613 else
4614 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4615 build_int_cst (TREE_TYPE (val), 1), 0);
4616 }
4617
4618 /* Return the successor of VAL in its type, handling the infinite case. */
4619
4620 static tree
4621 range_successor (tree val)
4622 {
4623 tree type = TREE_TYPE (val);
4624
4625 if (INTEGRAL_TYPE_P (type)
4626 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4627 return 0;
4628 else
4629 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4630 build_int_cst (TREE_TYPE (val), 1), 0);
4631 }
4632
4633 /* Given two ranges, see if we can merge them into one. Return 1 if we
4634 can, 0 if we can't. Set the output range into the specified parameters. */
4635
4636 bool
4637 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4638 tree high0, int in1_p, tree low1, tree high1)
4639 {
4640 int no_overlap;
4641 int subset;
4642 int temp;
4643 tree tem;
4644 int in_p;
4645 tree low, high;
4646 int lowequal = ((low0 == 0 && low1 == 0)
4647 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4648 low0, 0, low1, 0)));
4649 int highequal = ((high0 == 0 && high1 == 0)
4650 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4651 high0, 1, high1, 1)));
4652
4653 /* Make range 0 be the range that starts first, or ends last if they
4654 start at the same value. Swap them if it isn't. */
4655 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4656 low0, 0, low1, 0))
4657 || (lowequal
4658 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4659 high1, 1, high0, 1))))
4660 {
4661 temp = in0_p, in0_p = in1_p, in1_p = temp;
4662 tem = low0, low0 = low1, low1 = tem;
4663 tem = high0, high0 = high1, high1 = tem;
4664 }
4665
4666 /* Now flag two cases, whether the ranges are disjoint or whether the
4667 second range is totally subsumed in the first. Note that the tests
4668 below are simplified by the ones above. */
4669 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4670 high0, 1, low1, 0));
4671 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4672 high1, 1, high0, 1));
4673
4674 /* We now have four cases, depending on whether we are including or
4675 excluding the two ranges. */
4676 if (in0_p && in1_p)
4677 {
4678 /* If they don't overlap, the result is false. If the second range
4679 is a subset it is the result. Otherwise, the range is from the start
4680 of the second to the end of the first. */
4681 if (no_overlap)
4682 in_p = 0, low = high = 0;
4683 else if (subset)
4684 in_p = 1, low = low1, high = high1;
4685 else
4686 in_p = 1, low = low1, high = high0;
4687 }
4688
4689 else if (in0_p && ! in1_p)
4690 {
4691 /* If they don't overlap, the result is the first range. If they are
4692 equal, the result is false. If the second range is a subset of the
4693 first, and the ranges begin at the same place, we go from just after
4694 the end of the second range to the end of the first. If the second
4695 range is not a subset of the first, or if it is a subset and both
4696 ranges end at the same place, the range starts at the start of the
4697 first range and ends just before the second range.
4698 Otherwise, we can't describe this as a single range. */
4699 if (no_overlap)
4700 in_p = 1, low = low0, high = high0;
4701 else if (lowequal && highequal)
4702 in_p = 0, low = high = 0;
4703 else if (subset && lowequal)
4704 {
4705 low = range_successor (high1);
4706 high = high0;
4707 in_p = 1;
4708 if (low == 0)
4709 {
4710 /* We are in the weird situation where high0 > high1 but
4711 high1 has no successor. Punt. */
4712 return 0;
4713 }
4714 }
4715 else if (! subset || highequal)
4716 {
4717 low = low0;
4718 high = range_predecessor (low1);
4719 in_p = 1;
4720 if (high == 0)
4721 {
4722 /* low0 < low1 but low1 has no predecessor. Punt. */
4723 return 0;
4724 }
4725 }
4726 else
4727 return 0;
4728 }
4729
4730 else if (! in0_p && in1_p)
4731 {
4732 /* If they don't overlap, the result is the second range. If the second
4733 is a subset of the first, the result is false. Otherwise,
4734 the range starts just after the first range and ends at the
4735 end of the second. */
4736 if (no_overlap)
4737 in_p = 1, low = low1, high = high1;
4738 else if (subset || highequal)
4739 in_p = 0, low = high = 0;
4740 else
4741 {
4742 low = range_successor (high0);
4743 high = high1;
4744 in_p = 1;
4745 if (low == 0)
4746 {
4747 /* high1 > high0 but high0 has no successor. Punt. */
4748 return 0;
4749 }
4750 }
4751 }
4752
4753 else
4754 {
4755 /* The case where we are excluding both ranges. Here the complex case
4756 is if they don't overlap. In that case, the only time we have a
4757 range is if they are adjacent. If the second is a subset of the
4758 first, the result is the first. Otherwise, the range to exclude
4759 starts at the beginning of the first range and ends at the end of the
4760 second. */
4761 if (no_overlap)
4762 {
4763 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4764 range_successor (high0),
4765 1, low1, 0)))
4766 in_p = 0, low = low0, high = high1;
4767 else
4768 {
4769 /* Canonicalize - [min, x] into - [-, x]. */
4770 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4771 switch (TREE_CODE (TREE_TYPE (low0)))
4772 {
4773 case ENUMERAL_TYPE:
4774 if (TYPE_PRECISION (TREE_TYPE (low0))
4775 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4776 break;
4777 /* FALLTHROUGH */
4778 case INTEGER_TYPE:
4779 if (tree_int_cst_equal (low0,
4780 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4781 low0 = 0;
4782 break;
4783 case POINTER_TYPE:
4784 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4785 && integer_zerop (low0))
4786 low0 = 0;
4787 break;
4788 default:
4789 break;
4790 }
4791
4792 /* Canonicalize - [x, max] into - [x, -]. */
4793 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4794 switch (TREE_CODE (TREE_TYPE (high1)))
4795 {
4796 case ENUMERAL_TYPE:
4797 if (TYPE_PRECISION (TREE_TYPE (high1))
4798 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4799 break;
4800 /* FALLTHROUGH */
4801 case INTEGER_TYPE:
4802 if (tree_int_cst_equal (high1,
4803 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4804 high1 = 0;
4805 break;
4806 case POINTER_TYPE:
4807 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4808 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4809 high1, 1,
4810 build_int_cst (TREE_TYPE (high1), 1),
4811 1)))
4812 high1 = 0;
4813 break;
4814 default:
4815 break;
4816 }
4817
4818 /* The ranges might be also adjacent between the maximum and
4819 minimum values of the given type. For
4820 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4821 return + [x + 1, y - 1]. */
4822 if (low0 == 0 && high1 == 0)
4823 {
4824 low = range_successor (high0);
4825 high = range_predecessor (low1);
4826 if (low == 0 || high == 0)
4827 return 0;
4828
4829 in_p = 1;
4830 }
4831 else
4832 return 0;
4833 }
4834 }
4835 else if (subset)
4836 in_p = 0, low = low0, high = high0;
4837 else
4838 in_p = 0, low = low0, high = high1;
4839 }
4840
4841 *pin_p = in_p, *plow = low, *phigh = high;
4842 return 1;
4843 }
4844 \f
4845
4846 /* Subroutine of fold, looking inside expressions of the form
4847 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4848 of the COND_EXPR. This function is being used also to optimize
4849 A op B ? C : A, by reversing the comparison first.
4850
4851 Return a folded expression whose code is not a COND_EXPR
4852 anymore, or NULL_TREE if no folding opportunity is found. */
4853
4854 static tree
4855 fold_cond_expr_with_comparison (location_t loc, tree type,
4856 tree arg0, tree arg1, tree arg2)
4857 {
4858 enum tree_code comp_code = TREE_CODE (arg0);
4859 tree arg00 = TREE_OPERAND (arg0, 0);
4860 tree arg01 = TREE_OPERAND (arg0, 1);
4861 tree arg1_type = TREE_TYPE (arg1);
4862 tree tem;
4863
4864 STRIP_NOPS (arg1);
4865 STRIP_NOPS (arg2);
4866
4867 /* If we have A op 0 ? A : -A, consider applying the following
4868 transformations:
4869
4870 A == 0? A : -A same as -A
4871 A != 0? A : -A same as A
4872 A >= 0? A : -A same as abs (A)
4873 A > 0? A : -A same as abs (A)
4874 A <= 0? A : -A same as -abs (A)
4875 A < 0? A : -A same as -abs (A)
4876
4877 None of these transformations work for modes with signed
4878 zeros. If A is +/-0, the first two transformations will
4879 change the sign of the result (from +0 to -0, or vice
4880 versa). The last four will fix the sign of the result,
4881 even though the original expressions could be positive or
4882 negative, depending on the sign of A.
4883
4884 Note that all these transformations are correct if A is
4885 NaN, since the two alternatives (A and -A) are also NaNs. */
4886 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4887 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4888 ? real_zerop (arg01)
4889 : integer_zerop (arg01))
4890 && ((TREE_CODE (arg2) == NEGATE_EXPR
4891 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4892 /* In the case that A is of the form X-Y, '-A' (arg2) may
4893 have already been folded to Y-X, check for that. */
4894 || (TREE_CODE (arg1) == MINUS_EXPR
4895 && TREE_CODE (arg2) == MINUS_EXPR
4896 && operand_equal_p (TREE_OPERAND (arg1, 0),
4897 TREE_OPERAND (arg2, 1), 0)
4898 && operand_equal_p (TREE_OPERAND (arg1, 1),
4899 TREE_OPERAND (arg2, 0), 0))))
4900 switch (comp_code)
4901 {
4902 case EQ_EXPR:
4903 case UNEQ_EXPR:
4904 tem = fold_convert_loc (loc, arg1_type, arg1);
4905 return pedantic_non_lvalue_loc (loc,
4906 fold_convert_loc (loc, type,
4907 negate_expr (tem)));
4908 case NE_EXPR:
4909 case LTGT_EXPR:
4910 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4911 case UNGE_EXPR:
4912 case UNGT_EXPR:
4913 if (flag_trapping_math)
4914 break;
4915 /* Fall through. */
4916 case GE_EXPR:
4917 case GT_EXPR:
4918 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4919 arg1 = fold_convert_loc (loc, signed_type_for
4920 (TREE_TYPE (arg1)), arg1);
4921 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4922 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4923 case UNLE_EXPR:
4924 case UNLT_EXPR:
4925 if (flag_trapping_math)
4926 break;
4927 case LE_EXPR:
4928 case LT_EXPR:
4929 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4930 arg1 = fold_convert_loc (loc, signed_type_for
4931 (TREE_TYPE (arg1)), arg1);
4932 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4933 return negate_expr (fold_convert_loc (loc, type, tem));
4934 default:
4935 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4936 break;
4937 }
4938
4939 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4940 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4941 both transformations are correct when A is NaN: A != 0
4942 is then true, and A == 0 is false. */
4943
4944 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4945 && integer_zerop (arg01) && integer_zerop (arg2))
4946 {
4947 if (comp_code == NE_EXPR)
4948 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4949 else if (comp_code == EQ_EXPR)
4950 return build_zero_cst (type);
4951 }
4952
4953 /* Try some transformations of A op B ? A : B.
4954
4955 A == B? A : B same as B
4956 A != B? A : B same as A
4957 A >= B? A : B same as max (A, B)
4958 A > B? A : B same as max (B, A)
4959 A <= B? A : B same as min (A, B)
4960 A < B? A : B same as min (B, A)
4961
4962 As above, these transformations don't work in the presence
4963 of signed zeros. For example, if A and B are zeros of
4964 opposite sign, the first two transformations will change
4965 the sign of the result. In the last four, the original
4966 expressions give different results for (A=+0, B=-0) and
4967 (A=-0, B=+0), but the transformed expressions do not.
4968
4969 The first two transformations are correct if either A or B
4970 is a NaN. In the first transformation, the condition will
4971 be false, and B will indeed be chosen. In the case of the
4972 second transformation, the condition A != B will be true,
4973 and A will be chosen.
4974
4975 The conversions to max() and min() are not correct if B is
4976 a number and A is not. The conditions in the original
4977 expressions will be false, so all four give B. The min()
4978 and max() versions would give a NaN instead. */
4979 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4980 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4981 /* Avoid these transformations if the COND_EXPR may be used
4982 as an lvalue in the C++ front-end. PR c++/19199. */
4983 && (in_gimple_form
4984 || VECTOR_TYPE_P (type)
4985 || (! lang_GNU_CXX ()
4986 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4987 || ! maybe_lvalue_p (arg1)
4988 || ! maybe_lvalue_p (arg2)))
4989 {
4990 tree comp_op0 = arg00;
4991 tree comp_op1 = arg01;
4992 tree comp_type = TREE_TYPE (comp_op0);
4993
4994 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4995 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4996 {
4997 comp_type = type;
4998 comp_op0 = arg1;
4999 comp_op1 = arg2;
5000 }
5001
5002 switch (comp_code)
5003 {
5004 case EQ_EXPR:
5005 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5006 case NE_EXPR:
5007 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5008 case LE_EXPR:
5009 case LT_EXPR:
5010 case UNLE_EXPR:
5011 case UNLT_EXPR:
5012 /* In C++ a ?: expression can be an lvalue, so put the
5013 operand which will be used if they are equal first
5014 so that we can convert this back to the
5015 corresponding COND_EXPR. */
5016 if (!HONOR_NANS (arg1))
5017 {
5018 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5019 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5020 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5021 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5022 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5023 comp_op1, comp_op0);
5024 return pedantic_non_lvalue_loc (loc,
5025 fold_convert_loc (loc, type, tem));
5026 }
5027 break;
5028 case GE_EXPR:
5029 case GT_EXPR:
5030 case UNGE_EXPR:
5031 case UNGT_EXPR:
5032 if (!HONOR_NANS (arg1))
5033 {
5034 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5035 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5036 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5037 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5038 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5039 comp_op1, comp_op0);
5040 return pedantic_non_lvalue_loc (loc,
5041 fold_convert_loc (loc, type, tem));
5042 }
5043 break;
5044 case UNEQ_EXPR:
5045 if (!HONOR_NANS (arg1))
5046 return pedantic_non_lvalue_loc (loc,
5047 fold_convert_loc (loc, type, arg2));
5048 break;
5049 case LTGT_EXPR:
5050 if (!HONOR_NANS (arg1))
5051 return pedantic_non_lvalue_loc (loc,
5052 fold_convert_loc (loc, type, arg1));
5053 break;
5054 default:
5055 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5056 break;
5057 }
5058 }
5059
5060 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5061 we might still be able to simplify this. For example,
5062 if C1 is one less or one more than C2, this might have started
5063 out as a MIN or MAX and been transformed by this function.
5064 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5065
5066 if (INTEGRAL_TYPE_P (type)
5067 && TREE_CODE (arg01) == INTEGER_CST
5068 && TREE_CODE (arg2) == INTEGER_CST)
5069 switch (comp_code)
5070 {
5071 case EQ_EXPR:
5072 if (TREE_CODE (arg1) == INTEGER_CST)
5073 break;
5074 /* We can replace A with C1 in this case. */
5075 arg1 = fold_convert_loc (loc, type, arg01);
5076 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5077
5078 case LT_EXPR:
5079 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5080 MIN_EXPR, to preserve the signedness of the comparison. */
5081 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5082 OEP_ONLY_CONST)
5083 && operand_equal_p (arg01,
5084 const_binop (PLUS_EXPR, arg2,
5085 build_int_cst (type, 1)),
5086 OEP_ONLY_CONST))
5087 {
5088 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5089 fold_convert_loc (loc, TREE_TYPE (arg00),
5090 arg2));
5091 return pedantic_non_lvalue_loc (loc,
5092 fold_convert_loc (loc, type, tem));
5093 }
5094 break;
5095
5096 case LE_EXPR:
5097 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5098 as above. */
5099 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5100 OEP_ONLY_CONST)
5101 && operand_equal_p (arg01,
5102 const_binop (MINUS_EXPR, arg2,
5103 build_int_cst (type, 1)),
5104 OEP_ONLY_CONST))
5105 {
5106 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5107 fold_convert_loc (loc, TREE_TYPE (arg00),
5108 arg2));
5109 return pedantic_non_lvalue_loc (loc,
5110 fold_convert_loc (loc, type, tem));
5111 }
5112 break;
5113
5114 case GT_EXPR:
5115 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5116 MAX_EXPR, to preserve the signedness of the comparison. */
5117 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5118 OEP_ONLY_CONST)
5119 && operand_equal_p (arg01,
5120 const_binop (MINUS_EXPR, arg2,
5121 build_int_cst (type, 1)),
5122 OEP_ONLY_CONST))
5123 {
5124 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5125 fold_convert_loc (loc, TREE_TYPE (arg00),
5126 arg2));
5127 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5128 }
5129 break;
5130
5131 case GE_EXPR:
5132 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5133 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5134 OEP_ONLY_CONST)
5135 && operand_equal_p (arg01,
5136 const_binop (PLUS_EXPR, arg2,
5137 build_int_cst (type, 1)),
5138 OEP_ONLY_CONST))
5139 {
5140 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5141 fold_convert_loc (loc, TREE_TYPE (arg00),
5142 arg2));
5143 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5144 }
5145 break;
5146 case NE_EXPR:
5147 break;
5148 default:
5149 gcc_unreachable ();
5150 }
5151
5152 return NULL_TREE;
5153 }
5154
5155
5156 \f
5157 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5158 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5159 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5160 false) >= 2)
5161 #endif
5162
5163 /* EXP is some logical combination of boolean tests. See if we can
5164 merge it into some range test. Return the new tree if so. */
5165
5166 static tree
5167 fold_range_test (location_t loc, enum tree_code code, tree type,
5168 tree op0, tree op1)
5169 {
5170 int or_op = (code == TRUTH_ORIF_EXPR
5171 || code == TRUTH_OR_EXPR);
5172 int in0_p, in1_p, in_p;
5173 tree low0, low1, low, high0, high1, high;
5174 bool strict_overflow_p = false;
5175 tree tem, lhs, rhs;
5176 const char * const warnmsg = G_("assuming signed overflow does not occur "
5177 "when simplifying range test");
5178
5179 if (!INTEGRAL_TYPE_P (type))
5180 return 0;
5181
5182 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5183 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5184
5185 /* If this is an OR operation, invert both sides; we will invert
5186 again at the end. */
5187 if (or_op)
5188 in0_p = ! in0_p, in1_p = ! in1_p;
5189
5190 /* If both expressions are the same, if we can merge the ranges, and we
5191 can build the range test, return it or it inverted. If one of the
5192 ranges is always true or always false, consider it to be the same
5193 expression as the other. */
5194 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5195 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5196 in1_p, low1, high1)
5197 && 0 != (tem = (build_range_check (loc, type,
5198 lhs != 0 ? lhs
5199 : rhs != 0 ? rhs : integer_zero_node,
5200 in_p, low, high))))
5201 {
5202 if (strict_overflow_p)
5203 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5204 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5205 }
5206
5207 /* On machines where the branch cost is expensive, if this is a
5208 short-circuited branch and the underlying object on both sides
5209 is the same, make a non-short-circuit operation. */
5210 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5211 && lhs != 0 && rhs != 0
5212 && (code == TRUTH_ANDIF_EXPR
5213 || code == TRUTH_ORIF_EXPR)
5214 && operand_equal_p (lhs, rhs, 0))
5215 {
5216 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5217 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5218 which cases we can't do this. */
5219 if (simple_operand_p (lhs))
5220 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5221 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5222 type, op0, op1);
5223
5224 else if (!lang_hooks.decls.global_bindings_p ()
5225 && !CONTAINS_PLACEHOLDER_P (lhs))
5226 {
5227 tree common = save_expr (lhs);
5228
5229 if (0 != (lhs = build_range_check (loc, type, common,
5230 or_op ? ! in0_p : in0_p,
5231 low0, high0))
5232 && (0 != (rhs = build_range_check (loc, type, common,
5233 or_op ? ! in1_p : in1_p,
5234 low1, high1))))
5235 {
5236 if (strict_overflow_p)
5237 fold_overflow_warning (warnmsg,
5238 WARN_STRICT_OVERFLOW_COMPARISON);
5239 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5240 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5241 type, lhs, rhs);
5242 }
5243 }
5244 }
5245
5246 return 0;
5247 }
5248 \f
5249 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5250 bit value. Arrange things so the extra bits will be set to zero if and
5251 only if C is signed-extended to its full width. If MASK is nonzero,
5252 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5253
5254 static tree
5255 unextend (tree c, int p, int unsignedp, tree mask)
5256 {
5257 tree type = TREE_TYPE (c);
5258 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5259 tree temp;
5260
5261 if (p == modesize || unsignedp)
5262 return c;
5263
5264 /* We work by getting just the sign bit into the low-order bit, then
5265 into the high-order bit, then sign-extend. We then XOR that value
5266 with C. */
5267 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5268
5269 /* We must use a signed type in order to get an arithmetic right shift.
5270 However, we must also avoid introducing accidental overflows, so that
5271 a subsequent call to integer_zerop will work. Hence we must
5272 do the type conversion here. At this point, the constant is either
5273 zero or one, and the conversion to a signed type can never overflow.
5274 We could get an overflow if this conversion is done anywhere else. */
5275 if (TYPE_UNSIGNED (type))
5276 temp = fold_convert (signed_type_for (type), temp);
5277
5278 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5279 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5280 if (mask != 0)
5281 temp = const_binop (BIT_AND_EXPR, temp,
5282 fold_convert (TREE_TYPE (c), mask));
5283 /* If necessary, convert the type back to match the type of C. */
5284 if (TYPE_UNSIGNED (type))
5285 temp = fold_convert (type, temp);
5286
5287 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5288 }
5289 \f
5290 /* For an expression that has the form
5291 (A && B) || ~B
5292 or
5293 (A || B) && ~B,
5294 we can drop one of the inner expressions and simplify to
5295 A || ~B
5296 or
5297 A && ~B
5298 LOC is the location of the resulting expression. OP is the inner
5299 logical operation; the left-hand side in the examples above, while CMPOP
5300 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5301 removing a condition that guards another, as in
5302 (A != NULL && A->...) || A == NULL
5303 which we must not transform. If RHS_ONLY is true, only eliminate the
5304 right-most operand of the inner logical operation. */
5305
5306 static tree
5307 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5308 bool rhs_only)
5309 {
5310 tree type = TREE_TYPE (cmpop);
5311 enum tree_code code = TREE_CODE (cmpop);
5312 enum tree_code truthop_code = TREE_CODE (op);
5313 tree lhs = TREE_OPERAND (op, 0);
5314 tree rhs = TREE_OPERAND (op, 1);
5315 tree orig_lhs = lhs, orig_rhs = rhs;
5316 enum tree_code rhs_code = TREE_CODE (rhs);
5317 enum tree_code lhs_code = TREE_CODE (lhs);
5318 enum tree_code inv_code;
5319
5320 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5321 return NULL_TREE;
5322
5323 if (TREE_CODE_CLASS (code) != tcc_comparison)
5324 return NULL_TREE;
5325
5326 if (rhs_code == truthop_code)
5327 {
5328 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5329 if (newrhs != NULL_TREE)
5330 {
5331 rhs = newrhs;
5332 rhs_code = TREE_CODE (rhs);
5333 }
5334 }
5335 if (lhs_code == truthop_code && !rhs_only)
5336 {
5337 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5338 if (newlhs != NULL_TREE)
5339 {
5340 lhs = newlhs;
5341 lhs_code = TREE_CODE (lhs);
5342 }
5343 }
5344
5345 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5346 if (inv_code == rhs_code
5347 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5348 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5349 return lhs;
5350 if (!rhs_only && inv_code == lhs_code
5351 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5352 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5353 return rhs;
5354 if (rhs != orig_rhs || lhs != orig_lhs)
5355 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5356 lhs, rhs);
5357 return NULL_TREE;
5358 }
5359
5360 /* Find ways of folding logical expressions of LHS and RHS:
5361 Try to merge two comparisons to the same innermost item.
5362 Look for range tests like "ch >= '0' && ch <= '9'".
5363 Look for combinations of simple terms on machines with expensive branches
5364 and evaluate the RHS unconditionally.
5365
5366 For example, if we have p->a == 2 && p->b == 4 and we can make an
5367 object large enough to span both A and B, we can do this with a comparison
5368 against the object ANDed with the a mask.
5369
5370 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5371 operations to do this with one comparison.
5372
5373 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5374 function and the one above.
5375
5376 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5377 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5378
5379 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5380 two operands.
5381
5382 We return the simplified tree or 0 if no optimization is possible. */
5383
5384 static tree
5385 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5386 tree lhs, tree rhs)
5387 {
5388 /* If this is the "or" of two comparisons, we can do something if
5389 the comparisons are NE_EXPR. If this is the "and", we can do something
5390 if the comparisons are EQ_EXPR. I.e.,
5391 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5392
5393 WANTED_CODE is this operation code. For single bit fields, we can
5394 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5395 comparison for one-bit fields. */
5396
5397 enum tree_code wanted_code;
5398 enum tree_code lcode, rcode;
5399 tree ll_arg, lr_arg, rl_arg, rr_arg;
5400 tree ll_inner, lr_inner, rl_inner, rr_inner;
5401 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5402 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5403 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5404 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5405 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5406 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5407 machine_mode lnmode, rnmode;
5408 tree ll_mask, lr_mask, rl_mask, rr_mask;
5409 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5410 tree l_const, r_const;
5411 tree lntype, rntype, result;
5412 HOST_WIDE_INT first_bit, end_bit;
5413 int volatilep;
5414
5415 /* Start by getting the comparison codes. Fail if anything is volatile.
5416 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5417 it were surrounded with a NE_EXPR. */
5418
5419 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5420 return 0;
5421
5422 lcode = TREE_CODE (lhs);
5423 rcode = TREE_CODE (rhs);
5424
5425 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5426 {
5427 lhs = build2 (NE_EXPR, truth_type, lhs,
5428 build_int_cst (TREE_TYPE (lhs), 0));
5429 lcode = NE_EXPR;
5430 }
5431
5432 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5433 {
5434 rhs = build2 (NE_EXPR, truth_type, rhs,
5435 build_int_cst (TREE_TYPE (rhs), 0));
5436 rcode = NE_EXPR;
5437 }
5438
5439 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5440 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5441 return 0;
5442
5443 ll_arg = TREE_OPERAND (lhs, 0);
5444 lr_arg = TREE_OPERAND (lhs, 1);
5445 rl_arg = TREE_OPERAND (rhs, 0);
5446 rr_arg = TREE_OPERAND (rhs, 1);
5447
5448 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5449 if (simple_operand_p (ll_arg)
5450 && simple_operand_p (lr_arg))
5451 {
5452 if (operand_equal_p (ll_arg, rl_arg, 0)
5453 && operand_equal_p (lr_arg, rr_arg, 0))
5454 {
5455 result = combine_comparisons (loc, code, lcode, rcode,
5456 truth_type, ll_arg, lr_arg);
5457 if (result)
5458 return result;
5459 }
5460 else if (operand_equal_p (ll_arg, rr_arg, 0)
5461 && operand_equal_p (lr_arg, rl_arg, 0))
5462 {
5463 result = combine_comparisons (loc, code, lcode,
5464 swap_tree_comparison (rcode),
5465 truth_type, ll_arg, lr_arg);
5466 if (result)
5467 return result;
5468 }
5469 }
5470
5471 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5472 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5473
5474 /* If the RHS can be evaluated unconditionally and its operands are
5475 simple, it wins to evaluate the RHS unconditionally on machines
5476 with expensive branches. In this case, this isn't a comparison
5477 that can be merged. */
5478
5479 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5480 false) >= 2
5481 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5482 && simple_operand_p (rl_arg)
5483 && simple_operand_p (rr_arg))
5484 {
5485 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5486 if (code == TRUTH_OR_EXPR
5487 && lcode == NE_EXPR && integer_zerop (lr_arg)
5488 && rcode == NE_EXPR && integer_zerop (rr_arg)
5489 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5490 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5491 return build2_loc (loc, NE_EXPR, truth_type,
5492 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5493 ll_arg, rl_arg),
5494 build_int_cst (TREE_TYPE (ll_arg), 0));
5495
5496 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5497 if (code == TRUTH_AND_EXPR
5498 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5499 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5500 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5501 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5502 return build2_loc (loc, EQ_EXPR, truth_type,
5503 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5504 ll_arg, rl_arg),
5505 build_int_cst (TREE_TYPE (ll_arg), 0));
5506 }
5507
5508 /* See if the comparisons can be merged. Then get all the parameters for
5509 each side. */
5510
5511 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5512 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5513 return 0;
5514
5515 volatilep = 0;
5516 ll_inner = decode_field_reference (loc, ll_arg,
5517 &ll_bitsize, &ll_bitpos, &ll_mode,
5518 &ll_unsignedp, &volatilep, &ll_mask,
5519 &ll_and_mask);
5520 lr_inner = decode_field_reference (loc, lr_arg,
5521 &lr_bitsize, &lr_bitpos, &lr_mode,
5522 &lr_unsignedp, &volatilep, &lr_mask,
5523 &lr_and_mask);
5524 rl_inner = decode_field_reference (loc, rl_arg,
5525 &rl_bitsize, &rl_bitpos, &rl_mode,
5526 &rl_unsignedp, &volatilep, &rl_mask,
5527 &rl_and_mask);
5528 rr_inner = decode_field_reference (loc, rr_arg,
5529 &rr_bitsize, &rr_bitpos, &rr_mode,
5530 &rr_unsignedp, &volatilep, &rr_mask,
5531 &rr_and_mask);
5532
5533 /* It must be true that the inner operation on the lhs of each
5534 comparison must be the same if we are to be able to do anything.
5535 Then see if we have constants. If not, the same must be true for
5536 the rhs's. */
5537 if (volatilep || ll_inner == 0 || rl_inner == 0
5538 || ! operand_equal_p (ll_inner, rl_inner, 0))
5539 return 0;
5540
5541 if (TREE_CODE (lr_arg) == INTEGER_CST
5542 && TREE_CODE (rr_arg) == INTEGER_CST)
5543 l_const = lr_arg, r_const = rr_arg;
5544 else if (lr_inner == 0 || rr_inner == 0
5545 || ! operand_equal_p (lr_inner, rr_inner, 0))
5546 return 0;
5547 else
5548 l_const = r_const = 0;
5549
5550 /* If either comparison code is not correct for our logical operation,
5551 fail. However, we can convert a one-bit comparison against zero into
5552 the opposite comparison against that bit being set in the field. */
5553
5554 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5555 if (lcode != wanted_code)
5556 {
5557 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5558 {
5559 /* Make the left operand unsigned, since we are only interested
5560 in the value of one bit. Otherwise we are doing the wrong
5561 thing below. */
5562 ll_unsignedp = 1;
5563 l_const = ll_mask;
5564 }
5565 else
5566 return 0;
5567 }
5568
5569 /* This is analogous to the code for l_const above. */
5570 if (rcode != wanted_code)
5571 {
5572 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5573 {
5574 rl_unsignedp = 1;
5575 r_const = rl_mask;
5576 }
5577 else
5578 return 0;
5579 }
5580
5581 /* See if we can find a mode that contains both fields being compared on
5582 the left. If we can't, fail. Otherwise, update all constants and masks
5583 to be relative to a field of that size. */
5584 first_bit = MIN (ll_bitpos, rl_bitpos);
5585 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5586 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5587 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5588 volatilep);
5589 if (lnmode == VOIDmode)
5590 return 0;
5591
5592 lnbitsize = GET_MODE_BITSIZE (lnmode);
5593 lnbitpos = first_bit & ~ (lnbitsize - 1);
5594 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5595 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5596
5597 if (BYTES_BIG_ENDIAN)
5598 {
5599 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5600 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5601 }
5602
5603 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5604 size_int (xll_bitpos));
5605 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5606 size_int (xrl_bitpos));
5607
5608 if (l_const)
5609 {
5610 l_const = fold_convert_loc (loc, lntype, l_const);
5611 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5612 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5613 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5614 fold_build1_loc (loc, BIT_NOT_EXPR,
5615 lntype, ll_mask))))
5616 {
5617 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5618
5619 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5620 }
5621 }
5622 if (r_const)
5623 {
5624 r_const = fold_convert_loc (loc, lntype, r_const);
5625 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5626 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5627 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5628 fold_build1_loc (loc, BIT_NOT_EXPR,
5629 lntype, rl_mask))))
5630 {
5631 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5632
5633 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5634 }
5635 }
5636
5637 /* If the right sides are not constant, do the same for it. Also,
5638 disallow this optimization if a size or signedness mismatch occurs
5639 between the left and right sides. */
5640 if (l_const == 0)
5641 {
5642 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5643 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5644 /* Make sure the two fields on the right
5645 correspond to the left without being swapped. */
5646 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5647 return 0;
5648
5649 first_bit = MIN (lr_bitpos, rr_bitpos);
5650 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5651 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5652 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5653 volatilep);
5654 if (rnmode == VOIDmode)
5655 return 0;
5656
5657 rnbitsize = GET_MODE_BITSIZE (rnmode);
5658 rnbitpos = first_bit & ~ (rnbitsize - 1);
5659 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5660 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5661
5662 if (BYTES_BIG_ENDIAN)
5663 {
5664 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5665 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5666 }
5667
5668 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5669 rntype, lr_mask),
5670 size_int (xlr_bitpos));
5671 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5672 rntype, rr_mask),
5673 size_int (xrr_bitpos));
5674
5675 /* Make a mask that corresponds to both fields being compared.
5676 Do this for both items being compared. If the operands are the
5677 same size and the bits being compared are in the same position
5678 then we can do this by masking both and comparing the masked
5679 results. */
5680 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5681 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5682 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5683 {
5684 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5685 ll_unsignedp || rl_unsignedp);
5686 if (! all_ones_mask_p (ll_mask, lnbitsize))
5687 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5688
5689 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5690 lr_unsignedp || rr_unsignedp);
5691 if (! all_ones_mask_p (lr_mask, rnbitsize))
5692 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5693
5694 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5695 }
5696
5697 /* There is still another way we can do something: If both pairs of
5698 fields being compared are adjacent, we may be able to make a wider
5699 field containing them both.
5700
5701 Note that we still must mask the lhs/rhs expressions. Furthermore,
5702 the mask must be shifted to account for the shift done by
5703 make_bit_field_ref. */
5704 if ((ll_bitsize + ll_bitpos == rl_bitpos
5705 && lr_bitsize + lr_bitpos == rr_bitpos)
5706 || (ll_bitpos == rl_bitpos + rl_bitsize
5707 && lr_bitpos == rr_bitpos + rr_bitsize))
5708 {
5709 tree type;
5710
5711 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5712 ll_bitsize + rl_bitsize,
5713 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5714 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5715 lr_bitsize + rr_bitsize,
5716 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5717
5718 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5719 size_int (MIN (xll_bitpos, xrl_bitpos)));
5720 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5721 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5722
5723 /* Convert to the smaller type before masking out unwanted bits. */
5724 type = lntype;
5725 if (lntype != rntype)
5726 {
5727 if (lnbitsize > rnbitsize)
5728 {
5729 lhs = fold_convert_loc (loc, rntype, lhs);
5730 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5731 type = rntype;
5732 }
5733 else if (lnbitsize < rnbitsize)
5734 {
5735 rhs = fold_convert_loc (loc, lntype, rhs);
5736 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5737 type = lntype;
5738 }
5739 }
5740
5741 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5742 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5743
5744 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5745 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5746
5747 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5748 }
5749
5750 return 0;
5751 }
5752
5753 /* Handle the case of comparisons with constants. If there is something in
5754 common between the masks, those bits of the constants must be the same.
5755 If not, the condition is always false. Test for this to avoid generating
5756 incorrect code below. */
5757 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5758 if (! integer_zerop (result)
5759 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5760 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5761 {
5762 if (wanted_code == NE_EXPR)
5763 {
5764 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5765 return constant_boolean_node (true, truth_type);
5766 }
5767 else
5768 {
5769 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5770 return constant_boolean_node (false, truth_type);
5771 }
5772 }
5773
5774 /* Construct the expression we will return. First get the component
5775 reference we will make. Unless the mask is all ones the width of
5776 that field, perform the mask operation. Then compare with the
5777 merged constant. */
5778 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5779 ll_unsignedp || rl_unsignedp);
5780
5781 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5782 if (! all_ones_mask_p (ll_mask, lnbitsize))
5783 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5784
5785 return build2_loc (loc, wanted_code, truth_type, result,
5786 const_binop (BIT_IOR_EXPR, l_const, r_const));
5787 }
5788 \f
5789 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5790 constant. */
5791
5792 static tree
5793 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5794 tree op0, tree op1)
5795 {
5796 tree arg0 = op0;
5797 enum tree_code op_code;
5798 tree comp_const;
5799 tree minmax_const;
5800 int consts_equal, consts_lt;
5801 tree inner;
5802
5803 STRIP_SIGN_NOPS (arg0);
5804
5805 op_code = TREE_CODE (arg0);
5806 minmax_const = TREE_OPERAND (arg0, 1);
5807 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5808 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5809 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5810 inner = TREE_OPERAND (arg0, 0);
5811
5812 /* If something does not permit us to optimize, return the original tree. */
5813 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5814 || TREE_CODE (comp_const) != INTEGER_CST
5815 || TREE_OVERFLOW (comp_const)
5816 || TREE_CODE (minmax_const) != INTEGER_CST
5817 || TREE_OVERFLOW (minmax_const))
5818 return NULL_TREE;
5819
5820 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5821 and GT_EXPR, doing the rest with recursive calls using logical
5822 simplifications. */
5823 switch (code)
5824 {
5825 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5826 {
5827 tree tem
5828 = optimize_minmax_comparison (loc,
5829 invert_tree_comparison (code, false),
5830 type, op0, op1);
5831 if (tem)
5832 return invert_truthvalue_loc (loc, tem);
5833 return NULL_TREE;
5834 }
5835
5836 case GE_EXPR:
5837 return
5838 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5839 optimize_minmax_comparison
5840 (loc, EQ_EXPR, type, arg0, comp_const),
5841 optimize_minmax_comparison
5842 (loc, GT_EXPR, type, arg0, comp_const));
5843
5844 case EQ_EXPR:
5845 if (op_code == MAX_EXPR && consts_equal)
5846 /* MAX (X, 0) == 0 -> X <= 0 */
5847 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5848
5849 else if (op_code == MAX_EXPR && consts_lt)
5850 /* MAX (X, 0) == 5 -> X == 5 */
5851 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5852
5853 else if (op_code == MAX_EXPR)
5854 /* MAX (X, 0) == -1 -> false */
5855 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5856
5857 else if (consts_equal)
5858 /* MIN (X, 0) == 0 -> X >= 0 */
5859 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5860
5861 else if (consts_lt)
5862 /* MIN (X, 0) == 5 -> false */
5863 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5864
5865 else
5866 /* MIN (X, 0) == -1 -> X == -1 */
5867 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5868
5869 case GT_EXPR:
5870 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5871 /* MAX (X, 0) > 0 -> X > 0
5872 MAX (X, 0) > 5 -> X > 5 */
5873 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5874
5875 else if (op_code == MAX_EXPR)
5876 /* MAX (X, 0) > -1 -> true */
5877 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5878
5879 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5880 /* MIN (X, 0) > 0 -> false
5881 MIN (X, 0) > 5 -> false */
5882 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5883
5884 else
5885 /* MIN (X, 0) > -1 -> X > -1 */
5886 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5887
5888 default:
5889 return NULL_TREE;
5890 }
5891 }
5892 \f
5893 /* T is an integer expression that is being multiplied, divided, or taken a
5894 modulus (CODE says which and what kind of divide or modulus) by a
5895 constant C. See if we can eliminate that operation by folding it with
5896 other operations already in T. WIDE_TYPE, if non-null, is a type that
5897 should be used for the computation if wider than our type.
5898
5899 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5900 (X * 2) + (Y * 4). We must, however, be assured that either the original
5901 expression would not overflow or that overflow is undefined for the type
5902 in the language in question.
5903
5904 If we return a non-null expression, it is an equivalent form of the
5905 original computation, but need not be in the original type.
5906
5907 We set *STRICT_OVERFLOW_P to true if the return values depends on
5908 signed overflow being undefined. Otherwise we do not change
5909 *STRICT_OVERFLOW_P. */
5910
5911 static tree
5912 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5913 bool *strict_overflow_p)
5914 {
5915 /* To avoid exponential search depth, refuse to allow recursion past
5916 three levels. Beyond that (1) it's highly unlikely that we'll find
5917 something interesting and (2) we've probably processed it before
5918 when we built the inner expression. */
5919
5920 static int depth;
5921 tree ret;
5922
5923 if (depth > 3)
5924 return NULL;
5925
5926 depth++;
5927 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5928 depth--;
5929
5930 return ret;
5931 }
5932
5933 static tree
5934 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5935 bool *strict_overflow_p)
5936 {
5937 tree type = TREE_TYPE (t);
5938 enum tree_code tcode = TREE_CODE (t);
5939 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5940 > GET_MODE_SIZE (TYPE_MODE (type)))
5941 ? wide_type : type);
5942 tree t1, t2;
5943 int same_p = tcode == code;
5944 tree op0 = NULL_TREE, op1 = NULL_TREE;
5945 bool sub_strict_overflow_p;
5946
5947 /* Don't deal with constants of zero here; they confuse the code below. */
5948 if (integer_zerop (c))
5949 return NULL_TREE;
5950
5951 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5952 op0 = TREE_OPERAND (t, 0);
5953
5954 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5955 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5956
5957 /* Note that we need not handle conditional operations here since fold
5958 already handles those cases. So just do arithmetic here. */
5959 switch (tcode)
5960 {
5961 case INTEGER_CST:
5962 /* For a constant, we can always simplify if we are a multiply
5963 or (for divide and modulus) if it is a multiple of our constant. */
5964 if (code == MULT_EXPR
5965 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5966 return const_binop (code, fold_convert (ctype, t),
5967 fold_convert (ctype, c));
5968 break;
5969
5970 CASE_CONVERT: case NON_LVALUE_EXPR:
5971 /* If op0 is an expression ... */
5972 if ((COMPARISON_CLASS_P (op0)
5973 || UNARY_CLASS_P (op0)
5974 || BINARY_CLASS_P (op0)
5975 || VL_EXP_CLASS_P (op0)
5976 || EXPRESSION_CLASS_P (op0))
5977 /* ... and has wrapping overflow, and its type is smaller
5978 than ctype, then we cannot pass through as widening. */
5979 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5980 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5981 && (TYPE_PRECISION (ctype)
5982 > TYPE_PRECISION (TREE_TYPE (op0))))
5983 /* ... or this is a truncation (t is narrower than op0),
5984 then we cannot pass through this narrowing. */
5985 || (TYPE_PRECISION (type)
5986 < TYPE_PRECISION (TREE_TYPE (op0)))
5987 /* ... or signedness changes for division or modulus,
5988 then we cannot pass through this conversion. */
5989 || (code != MULT_EXPR
5990 && (TYPE_UNSIGNED (ctype)
5991 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5992 /* ... or has undefined overflow while the converted to
5993 type has not, we cannot do the operation in the inner type
5994 as that would introduce undefined overflow. */
5995 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5996 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
5997 && !TYPE_OVERFLOW_UNDEFINED (type))))
5998 break;
5999
6000 /* Pass the constant down and see if we can make a simplification. If
6001 we can, replace this expression with the inner simplification for
6002 possible later conversion to our or some other type. */
6003 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6004 && TREE_CODE (t2) == INTEGER_CST
6005 && !TREE_OVERFLOW (t2)
6006 && (0 != (t1 = extract_muldiv (op0, t2, code,
6007 code == MULT_EXPR
6008 ? ctype : NULL_TREE,
6009 strict_overflow_p))))
6010 return t1;
6011 break;
6012
6013 case ABS_EXPR:
6014 /* If widening the type changes it from signed to unsigned, then we
6015 must avoid building ABS_EXPR itself as unsigned. */
6016 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6017 {
6018 tree cstype = (*signed_type_for) (ctype);
6019 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6020 != 0)
6021 {
6022 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6023 return fold_convert (ctype, t1);
6024 }
6025 break;
6026 }
6027 /* If the constant is negative, we cannot simplify this. */
6028 if (tree_int_cst_sgn (c) == -1)
6029 break;
6030 /* FALLTHROUGH */
6031 case NEGATE_EXPR:
6032 /* For division and modulus, type can't be unsigned, as e.g.
6033 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6034 For signed types, even with wrapping overflow, this is fine. */
6035 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6036 break;
6037 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6038 != 0)
6039 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6040 break;
6041
6042 case MIN_EXPR: case MAX_EXPR:
6043 /* If widening the type changes the signedness, then we can't perform
6044 this optimization as that changes the result. */
6045 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6046 break;
6047
6048 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6049 sub_strict_overflow_p = false;
6050 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6051 &sub_strict_overflow_p)) != 0
6052 && (t2 = extract_muldiv (op1, c, code, wide_type,
6053 &sub_strict_overflow_p)) != 0)
6054 {
6055 if (tree_int_cst_sgn (c) < 0)
6056 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6057 if (sub_strict_overflow_p)
6058 *strict_overflow_p = true;
6059 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6060 fold_convert (ctype, t2));
6061 }
6062 break;
6063
6064 case LSHIFT_EXPR: case RSHIFT_EXPR:
6065 /* If the second operand is constant, this is a multiplication
6066 or floor division, by a power of two, so we can treat it that
6067 way unless the multiplier or divisor overflows. Signed
6068 left-shift overflow is implementation-defined rather than
6069 undefined in C90, so do not convert signed left shift into
6070 multiplication. */
6071 if (TREE_CODE (op1) == INTEGER_CST
6072 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6073 /* const_binop may not detect overflow correctly,
6074 so check for it explicitly here. */
6075 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6076 && 0 != (t1 = fold_convert (ctype,
6077 const_binop (LSHIFT_EXPR,
6078 size_one_node,
6079 op1)))
6080 && !TREE_OVERFLOW (t1))
6081 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6082 ? MULT_EXPR : FLOOR_DIV_EXPR,
6083 ctype,
6084 fold_convert (ctype, op0),
6085 t1),
6086 c, code, wide_type, strict_overflow_p);
6087 break;
6088
6089 case PLUS_EXPR: case MINUS_EXPR:
6090 /* See if we can eliminate the operation on both sides. If we can, we
6091 can return a new PLUS or MINUS. If we can't, the only remaining
6092 cases where we can do anything are if the second operand is a
6093 constant. */
6094 sub_strict_overflow_p = false;
6095 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6096 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6097 if (t1 != 0 && t2 != 0
6098 && (code == MULT_EXPR
6099 /* If not multiplication, we can only do this if both operands
6100 are divisible by c. */
6101 || (multiple_of_p (ctype, op0, c)
6102 && multiple_of_p (ctype, op1, c))))
6103 {
6104 if (sub_strict_overflow_p)
6105 *strict_overflow_p = true;
6106 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6107 fold_convert (ctype, t2));
6108 }
6109
6110 /* If this was a subtraction, negate OP1 and set it to be an addition.
6111 This simplifies the logic below. */
6112 if (tcode == MINUS_EXPR)
6113 {
6114 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6115 /* If OP1 was not easily negatable, the constant may be OP0. */
6116 if (TREE_CODE (op0) == INTEGER_CST)
6117 {
6118 std::swap (op0, op1);
6119 std::swap (t1, t2);
6120 }
6121 }
6122
6123 if (TREE_CODE (op1) != INTEGER_CST)
6124 break;
6125
6126 /* If either OP1 or C are negative, this optimization is not safe for
6127 some of the division and remainder types while for others we need
6128 to change the code. */
6129 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6130 {
6131 if (code == CEIL_DIV_EXPR)
6132 code = FLOOR_DIV_EXPR;
6133 else if (code == FLOOR_DIV_EXPR)
6134 code = CEIL_DIV_EXPR;
6135 else if (code != MULT_EXPR
6136 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6137 break;
6138 }
6139
6140 /* If it's a multiply or a division/modulus operation of a multiple
6141 of our constant, do the operation and verify it doesn't overflow. */
6142 if (code == MULT_EXPR
6143 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6144 {
6145 op1 = const_binop (code, fold_convert (ctype, op1),
6146 fold_convert (ctype, c));
6147 /* We allow the constant to overflow with wrapping semantics. */
6148 if (op1 == 0
6149 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6150 break;
6151 }
6152 else
6153 break;
6154
6155 /* If we have an unsigned type, we cannot widen the operation since it
6156 will change the result if the original computation overflowed. */
6157 if (TYPE_UNSIGNED (ctype) && ctype != type)
6158 break;
6159
6160 /* If we were able to eliminate our operation from the first side,
6161 apply our operation to the second side and reform the PLUS. */
6162 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6163 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6164
6165 /* The last case is if we are a multiply. In that case, we can
6166 apply the distributive law to commute the multiply and addition
6167 if the multiplication of the constants doesn't overflow
6168 and overflow is defined. With undefined overflow
6169 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6170 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6171 return fold_build2 (tcode, ctype,
6172 fold_build2 (code, ctype,
6173 fold_convert (ctype, op0),
6174 fold_convert (ctype, c)),
6175 op1);
6176
6177 break;
6178
6179 case MULT_EXPR:
6180 /* We have a special case here if we are doing something like
6181 (C * 8) % 4 since we know that's zero. */
6182 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6183 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6184 /* If the multiplication can overflow we cannot optimize this. */
6185 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6186 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6187 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6188 {
6189 *strict_overflow_p = true;
6190 return omit_one_operand (type, integer_zero_node, op0);
6191 }
6192
6193 /* ... fall through ... */
6194
6195 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6196 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6197 /* If we can extract our operation from the LHS, do so and return a
6198 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6199 do something only if the second operand is a constant. */
6200 if (same_p
6201 && (t1 = extract_muldiv (op0, c, code, wide_type,
6202 strict_overflow_p)) != 0)
6203 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6204 fold_convert (ctype, op1));
6205 else if (tcode == MULT_EXPR && code == MULT_EXPR
6206 && (t1 = extract_muldiv (op1, c, code, wide_type,
6207 strict_overflow_p)) != 0)
6208 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6209 fold_convert (ctype, t1));
6210 else if (TREE_CODE (op1) != INTEGER_CST)
6211 return 0;
6212
6213 /* If these are the same operation types, we can associate them
6214 assuming no overflow. */
6215 if (tcode == code)
6216 {
6217 bool overflow_p = false;
6218 bool overflow_mul_p;
6219 signop sign = TYPE_SIGN (ctype);
6220 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6221 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6222 if (overflow_mul_p
6223 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6224 overflow_p = true;
6225 if (!overflow_p)
6226 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6227 wide_int_to_tree (ctype, mul));
6228 }
6229
6230 /* If these operations "cancel" each other, we have the main
6231 optimizations of this pass, which occur when either constant is a
6232 multiple of the other, in which case we replace this with either an
6233 operation or CODE or TCODE.
6234
6235 If we have an unsigned type, we cannot do this since it will change
6236 the result if the original computation overflowed. */
6237 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6238 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6239 || (tcode == MULT_EXPR
6240 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6241 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6242 && code != MULT_EXPR)))
6243 {
6244 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6245 {
6246 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6247 *strict_overflow_p = true;
6248 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6249 fold_convert (ctype,
6250 const_binop (TRUNC_DIV_EXPR,
6251 op1, c)));
6252 }
6253 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6254 {
6255 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6256 *strict_overflow_p = true;
6257 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6258 fold_convert (ctype,
6259 const_binop (TRUNC_DIV_EXPR,
6260 c, op1)));
6261 }
6262 }
6263 break;
6264
6265 default:
6266 break;
6267 }
6268
6269 return 0;
6270 }
6271 \f
6272 /* Return a node which has the indicated constant VALUE (either 0 or
6273 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6274 and is of the indicated TYPE. */
6275
6276 tree
6277 constant_boolean_node (bool value, tree type)
6278 {
6279 if (type == integer_type_node)
6280 return value ? integer_one_node : integer_zero_node;
6281 else if (type == boolean_type_node)
6282 return value ? boolean_true_node : boolean_false_node;
6283 else if (TREE_CODE (type) == VECTOR_TYPE)
6284 return build_vector_from_val (type,
6285 build_int_cst (TREE_TYPE (type),
6286 value ? -1 : 0));
6287 else
6288 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6289 }
6290
6291
6292 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6293 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6294 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6295 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6296 COND is the first argument to CODE; otherwise (as in the example
6297 given here), it is the second argument. TYPE is the type of the
6298 original expression. Return NULL_TREE if no simplification is
6299 possible. */
6300
6301 static tree
6302 fold_binary_op_with_conditional_arg (location_t loc,
6303 enum tree_code code,
6304 tree type, tree op0, tree op1,
6305 tree cond, tree arg, int cond_first_p)
6306 {
6307 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6308 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6309 tree test, true_value, false_value;
6310 tree lhs = NULL_TREE;
6311 tree rhs = NULL_TREE;
6312 enum tree_code cond_code = COND_EXPR;
6313
6314 if (TREE_CODE (cond) == COND_EXPR
6315 || TREE_CODE (cond) == VEC_COND_EXPR)
6316 {
6317 test = TREE_OPERAND (cond, 0);
6318 true_value = TREE_OPERAND (cond, 1);
6319 false_value = TREE_OPERAND (cond, 2);
6320 /* If this operand throws an expression, then it does not make
6321 sense to try to perform a logical or arithmetic operation
6322 involving it. */
6323 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6324 lhs = true_value;
6325 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6326 rhs = false_value;
6327 }
6328 else
6329 {
6330 tree testtype = TREE_TYPE (cond);
6331 test = cond;
6332 true_value = constant_boolean_node (true, testtype);
6333 false_value = constant_boolean_node (false, testtype);
6334 }
6335
6336 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6337 cond_code = VEC_COND_EXPR;
6338
6339 /* This transformation is only worthwhile if we don't have to wrap ARG
6340 in a SAVE_EXPR and the operation can be simplified without recursing
6341 on at least one of the branches once its pushed inside the COND_EXPR. */
6342 if (!TREE_CONSTANT (arg)
6343 && (TREE_SIDE_EFFECTS (arg)
6344 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6345 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6346 return NULL_TREE;
6347
6348 arg = fold_convert_loc (loc, arg_type, arg);
6349 if (lhs == 0)
6350 {
6351 true_value = fold_convert_loc (loc, cond_type, true_value);
6352 if (cond_first_p)
6353 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6354 else
6355 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6356 }
6357 if (rhs == 0)
6358 {
6359 false_value = fold_convert_loc (loc, cond_type, false_value);
6360 if (cond_first_p)
6361 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6362 else
6363 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6364 }
6365
6366 /* Check that we have simplified at least one of the branches. */
6367 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6368 return NULL_TREE;
6369
6370 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6371 }
6372
6373 \f
6374 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6375
6376 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6377 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6378 ADDEND is the same as X.
6379
6380 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6381 and finite. The problematic cases are when X is zero, and its mode
6382 has signed zeros. In the case of rounding towards -infinity,
6383 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6384 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6385
6386 bool
6387 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6388 {
6389 if (!real_zerop (addend))
6390 return false;
6391
6392 /* Don't allow the fold with -fsignaling-nans. */
6393 if (HONOR_SNANS (element_mode (type)))
6394 return false;
6395
6396 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6397 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6398 return true;
6399
6400 /* In a vector or complex, we would need to check the sign of all zeros. */
6401 if (TREE_CODE (addend) != REAL_CST)
6402 return false;
6403
6404 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6405 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6406 negate = !negate;
6407
6408 /* The mode has signed zeros, and we have to honor their sign.
6409 In this situation, there is only one case we can return true for.
6410 X - 0 is the same as X unless rounding towards -infinity is
6411 supported. */
6412 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6413 }
6414
6415 /* Subroutine of fold() that optimizes comparisons of a division by
6416 a nonzero integer constant against an integer constant, i.e.
6417 X/C1 op C2.
6418
6419 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6420 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6421 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6422
6423 The function returns the constant folded tree if a simplification
6424 can be made, and NULL_TREE otherwise. */
6425
6426 static tree
6427 fold_div_compare (location_t loc,
6428 enum tree_code code, tree type, tree arg0, tree arg1)
6429 {
6430 tree prod, tmp, hi, lo;
6431 tree arg00 = TREE_OPERAND (arg0, 0);
6432 tree arg01 = TREE_OPERAND (arg0, 1);
6433 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6434 bool neg_overflow = false;
6435 bool overflow;
6436
6437 /* We have to do this the hard way to detect unsigned overflow.
6438 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6439 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6440 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6441 neg_overflow = false;
6442
6443 if (sign == UNSIGNED)
6444 {
6445 tmp = int_const_binop (MINUS_EXPR, arg01,
6446 build_int_cst (TREE_TYPE (arg01), 1));
6447 lo = prod;
6448
6449 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6450 val = wi::add (prod, tmp, sign, &overflow);
6451 hi = force_fit_type (TREE_TYPE (arg00), val,
6452 -1, overflow | TREE_OVERFLOW (prod));
6453 }
6454 else if (tree_int_cst_sgn (arg01) >= 0)
6455 {
6456 tmp = int_const_binop (MINUS_EXPR, arg01,
6457 build_int_cst (TREE_TYPE (arg01), 1));
6458 switch (tree_int_cst_sgn (arg1))
6459 {
6460 case -1:
6461 neg_overflow = true;
6462 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6463 hi = prod;
6464 break;
6465
6466 case 0:
6467 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6468 hi = tmp;
6469 break;
6470
6471 case 1:
6472 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6473 lo = prod;
6474 break;
6475
6476 default:
6477 gcc_unreachable ();
6478 }
6479 }
6480 else
6481 {
6482 /* A negative divisor reverses the relational operators. */
6483 code = swap_tree_comparison (code);
6484
6485 tmp = int_const_binop (PLUS_EXPR, arg01,
6486 build_int_cst (TREE_TYPE (arg01), 1));
6487 switch (tree_int_cst_sgn (arg1))
6488 {
6489 case -1:
6490 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6491 lo = prod;
6492 break;
6493
6494 case 0:
6495 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6496 lo = tmp;
6497 break;
6498
6499 case 1:
6500 neg_overflow = true;
6501 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6502 hi = prod;
6503 break;
6504
6505 default:
6506 gcc_unreachable ();
6507 }
6508 }
6509
6510 switch (code)
6511 {
6512 case EQ_EXPR:
6513 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6514 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6515 if (TREE_OVERFLOW (hi))
6516 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6517 if (TREE_OVERFLOW (lo))
6518 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6519 return build_range_check (loc, type, arg00, 1, lo, hi);
6520
6521 case NE_EXPR:
6522 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6523 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6524 if (TREE_OVERFLOW (hi))
6525 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6526 if (TREE_OVERFLOW (lo))
6527 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6528 return build_range_check (loc, type, arg00, 0, lo, hi);
6529
6530 case LT_EXPR:
6531 if (TREE_OVERFLOW (lo))
6532 {
6533 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6534 return omit_one_operand_loc (loc, type, tmp, arg00);
6535 }
6536 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6537
6538 case LE_EXPR:
6539 if (TREE_OVERFLOW (hi))
6540 {
6541 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6542 return omit_one_operand_loc (loc, type, tmp, arg00);
6543 }
6544 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6545
6546 case GT_EXPR:
6547 if (TREE_OVERFLOW (hi))
6548 {
6549 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6550 return omit_one_operand_loc (loc, type, tmp, arg00);
6551 }
6552 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6553
6554 case GE_EXPR:
6555 if (TREE_OVERFLOW (lo))
6556 {
6557 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6558 return omit_one_operand_loc (loc, type, tmp, arg00);
6559 }
6560 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6561
6562 default:
6563 break;
6564 }
6565
6566 return NULL_TREE;
6567 }
6568
6569
6570 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6571 equality/inequality test, then return a simplified form of the test
6572 using a sign testing. Otherwise return NULL. TYPE is the desired
6573 result type. */
6574
6575 static tree
6576 fold_single_bit_test_into_sign_test (location_t loc,
6577 enum tree_code code, tree arg0, tree arg1,
6578 tree result_type)
6579 {
6580 /* If this is testing a single bit, we can optimize the test. */
6581 if ((code == NE_EXPR || code == EQ_EXPR)
6582 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6583 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6584 {
6585 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6586 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6587 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6588
6589 if (arg00 != NULL_TREE
6590 /* This is only a win if casting to a signed type is cheap,
6591 i.e. when arg00's type is not a partial mode. */
6592 && TYPE_PRECISION (TREE_TYPE (arg00))
6593 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6594 {
6595 tree stype = signed_type_for (TREE_TYPE (arg00));
6596 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6597 result_type,
6598 fold_convert_loc (loc, stype, arg00),
6599 build_int_cst (stype, 0));
6600 }
6601 }
6602
6603 return NULL_TREE;
6604 }
6605
6606 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6607 equality/inequality test, then return a simplified form of
6608 the test using shifts and logical operations. Otherwise return
6609 NULL. TYPE is the desired result type. */
6610
6611 tree
6612 fold_single_bit_test (location_t loc, enum tree_code code,
6613 tree arg0, tree arg1, tree result_type)
6614 {
6615 /* If this is testing a single bit, we can optimize the test. */
6616 if ((code == NE_EXPR || code == EQ_EXPR)
6617 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6618 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6619 {
6620 tree inner = TREE_OPERAND (arg0, 0);
6621 tree type = TREE_TYPE (arg0);
6622 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6623 machine_mode operand_mode = TYPE_MODE (type);
6624 int ops_unsigned;
6625 tree signed_type, unsigned_type, intermediate_type;
6626 tree tem, one;
6627
6628 /* First, see if we can fold the single bit test into a sign-bit
6629 test. */
6630 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6631 result_type);
6632 if (tem)
6633 return tem;
6634
6635 /* Otherwise we have (A & C) != 0 where C is a single bit,
6636 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6637 Similarly for (A & C) == 0. */
6638
6639 /* If INNER is a right shift of a constant and it plus BITNUM does
6640 not overflow, adjust BITNUM and INNER. */
6641 if (TREE_CODE (inner) == RSHIFT_EXPR
6642 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6643 && bitnum < TYPE_PRECISION (type)
6644 && wi::ltu_p (TREE_OPERAND (inner, 1),
6645 TYPE_PRECISION (type) - bitnum))
6646 {
6647 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6648 inner = TREE_OPERAND (inner, 0);
6649 }
6650
6651 /* If we are going to be able to omit the AND below, we must do our
6652 operations as unsigned. If we must use the AND, we have a choice.
6653 Normally unsigned is faster, but for some machines signed is. */
6654 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6655 && !flag_syntax_only) ? 0 : 1;
6656
6657 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6658 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6659 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6660 inner = fold_convert_loc (loc, intermediate_type, inner);
6661
6662 if (bitnum != 0)
6663 inner = build2 (RSHIFT_EXPR, intermediate_type,
6664 inner, size_int (bitnum));
6665
6666 one = build_int_cst (intermediate_type, 1);
6667
6668 if (code == EQ_EXPR)
6669 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6670
6671 /* Put the AND last so it can combine with more things. */
6672 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6673
6674 /* Make sure to return the proper type. */
6675 inner = fold_convert_loc (loc, result_type, inner);
6676
6677 return inner;
6678 }
6679 return NULL_TREE;
6680 }
6681
6682 /* Check whether we are allowed to reorder operands arg0 and arg1,
6683 such that the evaluation of arg1 occurs before arg0. */
6684
6685 static bool
6686 reorder_operands_p (const_tree arg0, const_tree arg1)
6687 {
6688 if (! flag_evaluation_order)
6689 return true;
6690 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6691 return true;
6692 return ! TREE_SIDE_EFFECTS (arg0)
6693 && ! TREE_SIDE_EFFECTS (arg1);
6694 }
6695
6696 /* Test whether it is preferable two swap two operands, ARG0 and
6697 ARG1, for example because ARG0 is an integer constant and ARG1
6698 isn't. If REORDER is true, only recommend swapping if we can
6699 evaluate the operands in reverse order. */
6700
6701 bool
6702 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6703 {
6704 if (CONSTANT_CLASS_P (arg1))
6705 return 0;
6706 if (CONSTANT_CLASS_P (arg0))
6707 return 1;
6708
6709 STRIP_NOPS (arg0);
6710 STRIP_NOPS (arg1);
6711
6712 if (TREE_CONSTANT (arg1))
6713 return 0;
6714 if (TREE_CONSTANT (arg0))
6715 return 1;
6716
6717 if (reorder && flag_evaluation_order
6718 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6719 return 0;
6720
6721 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6722 for commutative and comparison operators. Ensuring a canonical
6723 form allows the optimizers to find additional redundancies without
6724 having to explicitly check for both orderings. */
6725 if (TREE_CODE (arg0) == SSA_NAME
6726 && TREE_CODE (arg1) == SSA_NAME
6727 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6728 return 1;
6729
6730 /* Put SSA_NAMEs last. */
6731 if (TREE_CODE (arg1) == SSA_NAME)
6732 return 0;
6733 if (TREE_CODE (arg0) == SSA_NAME)
6734 return 1;
6735
6736 /* Put variables last. */
6737 if (DECL_P (arg1))
6738 return 0;
6739 if (DECL_P (arg0))
6740 return 1;
6741
6742 return 0;
6743 }
6744
6745 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6746 ARG0 is extended to a wider type. */
6747
6748 static tree
6749 fold_widened_comparison (location_t loc, enum tree_code code,
6750 tree type, tree arg0, tree arg1)
6751 {
6752 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6753 tree arg1_unw;
6754 tree shorter_type, outer_type;
6755 tree min, max;
6756 bool above, below;
6757
6758 if (arg0_unw == arg0)
6759 return NULL_TREE;
6760 shorter_type = TREE_TYPE (arg0_unw);
6761
6762 /* Disable this optimization if we're casting a function pointer
6763 type on targets that require function pointer canonicalization. */
6764 if (targetm.have_canonicalize_funcptr_for_compare ()
6765 && TREE_CODE (shorter_type) == POINTER_TYPE
6766 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6767 return NULL_TREE;
6768
6769 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6770 return NULL_TREE;
6771
6772 arg1_unw = get_unwidened (arg1, NULL_TREE);
6773
6774 /* If possible, express the comparison in the shorter mode. */
6775 if ((code == EQ_EXPR || code == NE_EXPR
6776 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6777 && (TREE_TYPE (arg1_unw) == shorter_type
6778 || ((TYPE_PRECISION (shorter_type)
6779 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6780 && (TYPE_UNSIGNED (shorter_type)
6781 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6782 || (TREE_CODE (arg1_unw) == INTEGER_CST
6783 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6784 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6785 && int_fits_type_p (arg1_unw, shorter_type))))
6786 return fold_build2_loc (loc, code, type, arg0_unw,
6787 fold_convert_loc (loc, shorter_type, arg1_unw));
6788
6789 if (TREE_CODE (arg1_unw) != INTEGER_CST
6790 || TREE_CODE (shorter_type) != INTEGER_TYPE
6791 || int_fits_type_p (arg1_unw, shorter_type))
6792 return NULL_TREE;
6793
6794 /* If we are comparing with the integer that does not fit into the range
6795 of the shorter type, the result is known. */
6796 outer_type = TREE_TYPE (arg1_unw);
6797 min = lower_bound_in_type (outer_type, shorter_type);
6798 max = upper_bound_in_type (outer_type, shorter_type);
6799
6800 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6801 max, arg1_unw));
6802 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6803 arg1_unw, min));
6804
6805 switch (code)
6806 {
6807 case EQ_EXPR:
6808 if (above || below)
6809 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6810 break;
6811
6812 case NE_EXPR:
6813 if (above || below)
6814 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6815 break;
6816
6817 case LT_EXPR:
6818 case LE_EXPR:
6819 if (above)
6820 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6821 else if (below)
6822 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6823
6824 case GT_EXPR:
6825 case GE_EXPR:
6826 if (above)
6827 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6828 else if (below)
6829 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6830
6831 default:
6832 break;
6833 }
6834
6835 return NULL_TREE;
6836 }
6837
6838 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6839 ARG0 just the signedness is changed. */
6840
6841 static tree
6842 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6843 tree arg0, tree arg1)
6844 {
6845 tree arg0_inner;
6846 tree inner_type, outer_type;
6847
6848 if (!CONVERT_EXPR_P (arg0))
6849 return NULL_TREE;
6850
6851 outer_type = TREE_TYPE (arg0);
6852 arg0_inner = TREE_OPERAND (arg0, 0);
6853 inner_type = TREE_TYPE (arg0_inner);
6854
6855 /* Disable this optimization if we're casting a function pointer
6856 type on targets that require function pointer canonicalization. */
6857 if (targetm.have_canonicalize_funcptr_for_compare ()
6858 && TREE_CODE (inner_type) == POINTER_TYPE
6859 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6860 return NULL_TREE;
6861
6862 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6863 return NULL_TREE;
6864
6865 if (TREE_CODE (arg1) != INTEGER_CST
6866 && !(CONVERT_EXPR_P (arg1)
6867 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6868 return NULL_TREE;
6869
6870 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6871 && code != NE_EXPR
6872 && code != EQ_EXPR)
6873 return NULL_TREE;
6874
6875 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6876 return NULL_TREE;
6877
6878 if (TREE_CODE (arg1) == INTEGER_CST)
6879 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6880 TREE_OVERFLOW (arg1));
6881 else
6882 arg1 = fold_convert_loc (loc, inner_type, arg1);
6883
6884 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6885 }
6886
6887
6888 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6889 means A >= Y && A != MAX, but in this case we know that
6890 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6891
6892 static tree
6893 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6894 {
6895 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6896
6897 if (TREE_CODE (bound) == LT_EXPR)
6898 a = TREE_OPERAND (bound, 0);
6899 else if (TREE_CODE (bound) == GT_EXPR)
6900 a = TREE_OPERAND (bound, 1);
6901 else
6902 return NULL_TREE;
6903
6904 typea = TREE_TYPE (a);
6905 if (!INTEGRAL_TYPE_P (typea)
6906 && !POINTER_TYPE_P (typea))
6907 return NULL_TREE;
6908
6909 if (TREE_CODE (ineq) == LT_EXPR)
6910 {
6911 a1 = TREE_OPERAND (ineq, 1);
6912 y = TREE_OPERAND (ineq, 0);
6913 }
6914 else if (TREE_CODE (ineq) == GT_EXPR)
6915 {
6916 a1 = TREE_OPERAND (ineq, 0);
6917 y = TREE_OPERAND (ineq, 1);
6918 }
6919 else
6920 return NULL_TREE;
6921
6922 if (TREE_TYPE (a1) != typea)
6923 return NULL_TREE;
6924
6925 if (POINTER_TYPE_P (typea))
6926 {
6927 /* Convert the pointer types into integer before taking the difference. */
6928 tree ta = fold_convert_loc (loc, ssizetype, a);
6929 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6930 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6931 }
6932 else
6933 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6934
6935 if (!diff || !integer_onep (diff))
6936 return NULL_TREE;
6937
6938 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6939 }
6940
6941 /* Fold a sum or difference of at least one multiplication.
6942 Returns the folded tree or NULL if no simplification could be made. */
6943
6944 static tree
6945 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6946 tree arg0, tree arg1)
6947 {
6948 tree arg00, arg01, arg10, arg11;
6949 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6950
6951 /* (A * C) +- (B * C) -> (A+-B) * C.
6952 (A * C) +- A -> A * (C+-1).
6953 We are most concerned about the case where C is a constant,
6954 but other combinations show up during loop reduction. Since
6955 it is not difficult, try all four possibilities. */
6956
6957 if (TREE_CODE (arg0) == MULT_EXPR)
6958 {
6959 arg00 = TREE_OPERAND (arg0, 0);
6960 arg01 = TREE_OPERAND (arg0, 1);
6961 }
6962 else if (TREE_CODE (arg0) == INTEGER_CST)
6963 {
6964 arg00 = build_one_cst (type);
6965 arg01 = arg0;
6966 }
6967 else
6968 {
6969 /* We cannot generate constant 1 for fract. */
6970 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6971 return NULL_TREE;
6972 arg00 = arg0;
6973 arg01 = build_one_cst (type);
6974 }
6975 if (TREE_CODE (arg1) == MULT_EXPR)
6976 {
6977 arg10 = TREE_OPERAND (arg1, 0);
6978 arg11 = TREE_OPERAND (arg1, 1);
6979 }
6980 else if (TREE_CODE (arg1) == INTEGER_CST)
6981 {
6982 arg10 = build_one_cst (type);
6983 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6984 the purpose of this canonicalization. */
6985 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6986 && negate_expr_p (arg1)
6987 && code == PLUS_EXPR)
6988 {
6989 arg11 = negate_expr (arg1);
6990 code = MINUS_EXPR;
6991 }
6992 else
6993 arg11 = arg1;
6994 }
6995 else
6996 {
6997 /* We cannot generate constant 1 for fract. */
6998 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6999 return NULL_TREE;
7000 arg10 = arg1;
7001 arg11 = build_one_cst (type);
7002 }
7003 same = NULL_TREE;
7004
7005 if (operand_equal_p (arg01, arg11, 0))
7006 same = arg01, alt0 = arg00, alt1 = arg10;
7007 else if (operand_equal_p (arg00, arg10, 0))
7008 same = arg00, alt0 = arg01, alt1 = arg11;
7009 else if (operand_equal_p (arg00, arg11, 0))
7010 same = arg00, alt0 = arg01, alt1 = arg10;
7011 else if (operand_equal_p (arg01, arg10, 0))
7012 same = arg01, alt0 = arg00, alt1 = arg11;
7013
7014 /* No identical multiplicands; see if we can find a common
7015 power-of-two factor in non-power-of-two multiplies. This
7016 can help in multi-dimensional array access. */
7017 else if (tree_fits_shwi_p (arg01)
7018 && tree_fits_shwi_p (arg11))
7019 {
7020 HOST_WIDE_INT int01, int11, tmp;
7021 bool swap = false;
7022 tree maybe_same;
7023 int01 = tree_to_shwi (arg01);
7024 int11 = tree_to_shwi (arg11);
7025
7026 /* Move min of absolute values to int11. */
7027 if (absu_hwi (int01) < absu_hwi (int11))
7028 {
7029 tmp = int01, int01 = int11, int11 = tmp;
7030 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7031 maybe_same = arg01;
7032 swap = true;
7033 }
7034 else
7035 maybe_same = arg11;
7036
7037 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7038 /* The remainder should not be a constant, otherwise we
7039 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7040 increased the number of multiplications necessary. */
7041 && TREE_CODE (arg10) != INTEGER_CST)
7042 {
7043 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7044 build_int_cst (TREE_TYPE (arg00),
7045 int01 / int11));
7046 alt1 = arg10;
7047 same = maybe_same;
7048 if (swap)
7049 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7050 }
7051 }
7052
7053 if (same)
7054 return fold_build2_loc (loc, MULT_EXPR, type,
7055 fold_build2_loc (loc, code, type,
7056 fold_convert_loc (loc, type, alt0),
7057 fold_convert_loc (loc, type, alt1)),
7058 fold_convert_loc (loc, type, same));
7059
7060 return NULL_TREE;
7061 }
7062
7063 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7064 specified by EXPR into the buffer PTR of length LEN bytes.
7065 Return the number of bytes placed in the buffer, or zero
7066 upon failure. */
7067
7068 static int
7069 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7070 {
7071 tree type = TREE_TYPE (expr);
7072 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7073 int byte, offset, word, words;
7074 unsigned char value;
7075
7076 if ((off == -1 && total_bytes > len)
7077 || off >= total_bytes)
7078 return 0;
7079 if (off == -1)
7080 off = 0;
7081 words = total_bytes / UNITS_PER_WORD;
7082
7083 for (byte = 0; byte < total_bytes; byte++)
7084 {
7085 int bitpos = byte * BITS_PER_UNIT;
7086 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7087 number of bytes. */
7088 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7089
7090 if (total_bytes > UNITS_PER_WORD)
7091 {
7092 word = byte / UNITS_PER_WORD;
7093 if (WORDS_BIG_ENDIAN)
7094 word = (words - 1) - word;
7095 offset = word * UNITS_PER_WORD;
7096 if (BYTES_BIG_ENDIAN)
7097 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7098 else
7099 offset += byte % UNITS_PER_WORD;
7100 }
7101 else
7102 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7103 if (offset >= off
7104 && offset - off < len)
7105 ptr[offset - off] = value;
7106 }
7107 return MIN (len, total_bytes - off);
7108 }
7109
7110
7111 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7112 specified by EXPR into the buffer PTR of length LEN bytes.
7113 Return the number of bytes placed in the buffer, or zero
7114 upon failure. */
7115
7116 static int
7117 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7118 {
7119 tree type = TREE_TYPE (expr);
7120 machine_mode mode = TYPE_MODE (type);
7121 int total_bytes = GET_MODE_SIZE (mode);
7122 FIXED_VALUE_TYPE value;
7123 tree i_value, i_type;
7124
7125 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7126 return 0;
7127
7128 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7129
7130 if (NULL_TREE == i_type
7131 || TYPE_PRECISION (i_type) != total_bytes)
7132 return 0;
7133
7134 value = TREE_FIXED_CST (expr);
7135 i_value = double_int_to_tree (i_type, value.data);
7136
7137 return native_encode_int (i_value, ptr, len, off);
7138 }
7139
7140
7141 /* Subroutine of native_encode_expr. Encode the REAL_CST
7142 specified by EXPR into the buffer PTR of length LEN bytes.
7143 Return the number of bytes placed in the buffer, or zero
7144 upon failure. */
7145
7146 static int
7147 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7148 {
7149 tree type = TREE_TYPE (expr);
7150 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7151 int byte, offset, word, words, bitpos;
7152 unsigned char value;
7153
7154 /* There are always 32 bits in each long, no matter the size of
7155 the hosts long. We handle floating point representations with
7156 up to 192 bits. */
7157 long tmp[6];
7158
7159 if ((off == -1 && total_bytes > len)
7160 || off >= total_bytes)
7161 return 0;
7162 if (off == -1)
7163 off = 0;
7164 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7165
7166 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7167
7168 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7169 bitpos += BITS_PER_UNIT)
7170 {
7171 byte = (bitpos / BITS_PER_UNIT) & 3;
7172 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7173
7174 if (UNITS_PER_WORD < 4)
7175 {
7176 word = byte / UNITS_PER_WORD;
7177 if (WORDS_BIG_ENDIAN)
7178 word = (words - 1) - word;
7179 offset = word * UNITS_PER_WORD;
7180 if (BYTES_BIG_ENDIAN)
7181 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7182 else
7183 offset += byte % UNITS_PER_WORD;
7184 }
7185 else
7186 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7187 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7188 if (offset >= off
7189 && offset - off < len)
7190 ptr[offset - off] = value;
7191 }
7192 return MIN (len, total_bytes - off);
7193 }
7194
7195 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7196 specified by EXPR into the buffer PTR of length LEN bytes.
7197 Return the number of bytes placed in the buffer, or zero
7198 upon failure. */
7199
7200 static int
7201 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7202 {
7203 int rsize, isize;
7204 tree part;
7205
7206 part = TREE_REALPART (expr);
7207 rsize = native_encode_expr (part, ptr, len, off);
7208 if (off == -1
7209 && rsize == 0)
7210 return 0;
7211 part = TREE_IMAGPART (expr);
7212 if (off != -1)
7213 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7214 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7215 if (off == -1
7216 && isize != rsize)
7217 return 0;
7218 return rsize + isize;
7219 }
7220
7221
7222 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7223 specified by EXPR into the buffer PTR of length LEN bytes.
7224 Return the number of bytes placed in the buffer, or zero
7225 upon failure. */
7226
7227 static int
7228 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7229 {
7230 unsigned i, count;
7231 int size, offset;
7232 tree itype, elem;
7233
7234 offset = 0;
7235 count = VECTOR_CST_NELTS (expr);
7236 itype = TREE_TYPE (TREE_TYPE (expr));
7237 size = GET_MODE_SIZE (TYPE_MODE (itype));
7238 for (i = 0; i < count; i++)
7239 {
7240 if (off >= size)
7241 {
7242 off -= size;
7243 continue;
7244 }
7245 elem = VECTOR_CST_ELT (expr, i);
7246 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7247 if ((off == -1 && res != size)
7248 || res == 0)
7249 return 0;
7250 offset += res;
7251 if (offset >= len)
7252 return offset;
7253 if (off != -1)
7254 off = 0;
7255 }
7256 return offset;
7257 }
7258
7259
7260 /* Subroutine of native_encode_expr. Encode the STRING_CST
7261 specified by EXPR into the buffer PTR of length LEN bytes.
7262 Return the number of bytes placed in the buffer, or zero
7263 upon failure. */
7264
7265 static int
7266 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7267 {
7268 tree type = TREE_TYPE (expr);
7269 HOST_WIDE_INT total_bytes;
7270
7271 if (TREE_CODE (type) != ARRAY_TYPE
7272 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7273 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7274 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7275 return 0;
7276 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7277 if ((off == -1 && total_bytes > len)
7278 || off >= total_bytes)
7279 return 0;
7280 if (off == -1)
7281 off = 0;
7282 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7283 {
7284 int written = 0;
7285 if (off < TREE_STRING_LENGTH (expr))
7286 {
7287 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7288 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7289 }
7290 memset (ptr + written, 0,
7291 MIN (total_bytes - written, len - written));
7292 }
7293 else
7294 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7295 return MIN (total_bytes - off, len);
7296 }
7297
7298
7299 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7300 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7301 buffer PTR of length LEN bytes. If OFF is not -1 then start
7302 the encoding at byte offset OFF and encode at most LEN bytes.
7303 Return the number of bytes placed in the buffer, or zero upon failure. */
7304
7305 int
7306 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7307 {
7308 switch (TREE_CODE (expr))
7309 {
7310 case INTEGER_CST:
7311 return native_encode_int (expr, ptr, len, off);
7312
7313 case REAL_CST:
7314 return native_encode_real (expr, ptr, len, off);
7315
7316 case FIXED_CST:
7317 return native_encode_fixed (expr, ptr, len, off);
7318
7319 case COMPLEX_CST:
7320 return native_encode_complex (expr, ptr, len, off);
7321
7322 case VECTOR_CST:
7323 return native_encode_vector (expr, ptr, len, off);
7324
7325 case STRING_CST:
7326 return native_encode_string (expr, ptr, len, off);
7327
7328 default:
7329 return 0;
7330 }
7331 }
7332
7333
7334 /* Subroutine of native_interpret_expr. Interpret the contents of
7335 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7336 If the buffer cannot be interpreted, return NULL_TREE. */
7337
7338 static tree
7339 native_interpret_int (tree type, const unsigned char *ptr, int len)
7340 {
7341 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7342
7343 if (total_bytes > len
7344 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7345 return NULL_TREE;
7346
7347 wide_int result = wi::from_buffer (ptr, total_bytes);
7348
7349 return wide_int_to_tree (type, result);
7350 }
7351
7352
7353 /* Subroutine of native_interpret_expr. Interpret the contents of
7354 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7355 If the buffer cannot be interpreted, return NULL_TREE. */
7356
7357 static tree
7358 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7359 {
7360 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7361 double_int result;
7362 FIXED_VALUE_TYPE fixed_value;
7363
7364 if (total_bytes > len
7365 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7366 return NULL_TREE;
7367
7368 result = double_int::from_buffer (ptr, total_bytes);
7369 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7370
7371 return build_fixed (type, fixed_value);
7372 }
7373
7374
7375 /* Subroutine of native_interpret_expr. Interpret the contents of
7376 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7377 If the buffer cannot be interpreted, return NULL_TREE. */
7378
7379 static tree
7380 native_interpret_real (tree type, const unsigned char *ptr, int len)
7381 {
7382 machine_mode mode = TYPE_MODE (type);
7383 int total_bytes = GET_MODE_SIZE (mode);
7384 int byte, offset, word, words, bitpos;
7385 unsigned char value;
7386 /* There are always 32 bits in each long, no matter the size of
7387 the hosts long. We handle floating point representations with
7388 up to 192 bits. */
7389 REAL_VALUE_TYPE r;
7390 long tmp[6];
7391
7392 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7393 if (total_bytes > len || total_bytes > 24)
7394 return NULL_TREE;
7395 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7396
7397 memset (tmp, 0, sizeof (tmp));
7398 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7399 bitpos += BITS_PER_UNIT)
7400 {
7401 byte = (bitpos / BITS_PER_UNIT) & 3;
7402 if (UNITS_PER_WORD < 4)
7403 {
7404 word = byte / UNITS_PER_WORD;
7405 if (WORDS_BIG_ENDIAN)
7406 word = (words - 1) - word;
7407 offset = word * UNITS_PER_WORD;
7408 if (BYTES_BIG_ENDIAN)
7409 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7410 else
7411 offset += byte % UNITS_PER_WORD;
7412 }
7413 else
7414 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7415 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7416
7417 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7418 }
7419
7420 real_from_target (&r, tmp, mode);
7421 return build_real (type, r);
7422 }
7423
7424
7425 /* Subroutine of native_interpret_expr. Interpret the contents of
7426 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7427 If the buffer cannot be interpreted, return NULL_TREE. */
7428
7429 static tree
7430 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7431 {
7432 tree etype, rpart, ipart;
7433 int size;
7434
7435 etype = TREE_TYPE (type);
7436 size = GET_MODE_SIZE (TYPE_MODE (etype));
7437 if (size * 2 > len)
7438 return NULL_TREE;
7439 rpart = native_interpret_expr (etype, ptr, size);
7440 if (!rpart)
7441 return NULL_TREE;
7442 ipart = native_interpret_expr (etype, ptr+size, size);
7443 if (!ipart)
7444 return NULL_TREE;
7445 return build_complex (type, rpart, ipart);
7446 }
7447
7448
7449 /* Subroutine of native_interpret_expr. Interpret the contents of
7450 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7451 If the buffer cannot be interpreted, return NULL_TREE. */
7452
7453 static tree
7454 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7455 {
7456 tree etype, elem;
7457 int i, size, count;
7458 tree *elements;
7459
7460 etype = TREE_TYPE (type);
7461 size = GET_MODE_SIZE (TYPE_MODE (etype));
7462 count = TYPE_VECTOR_SUBPARTS (type);
7463 if (size * count > len)
7464 return NULL_TREE;
7465
7466 elements = XALLOCAVEC (tree, count);
7467 for (i = count - 1; i >= 0; i--)
7468 {
7469 elem = native_interpret_expr (etype, ptr+(i*size), size);
7470 if (!elem)
7471 return NULL_TREE;
7472 elements[i] = elem;
7473 }
7474 return build_vector (type, elements);
7475 }
7476
7477
7478 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7479 the buffer PTR of length LEN as a constant of type TYPE. For
7480 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7481 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7482 return NULL_TREE. */
7483
7484 tree
7485 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7486 {
7487 switch (TREE_CODE (type))
7488 {
7489 case INTEGER_TYPE:
7490 case ENUMERAL_TYPE:
7491 case BOOLEAN_TYPE:
7492 case POINTER_TYPE:
7493 case REFERENCE_TYPE:
7494 return native_interpret_int (type, ptr, len);
7495
7496 case REAL_TYPE:
7497 return native_interpret_real (type, ptr, len);
7498
7499 case FIXED_POINT_TYPE:
7500 return native_interpret_fixed (type, ptr, len);
7501
7502 case COMPLEX_TYPE:
7503 return native_interpret_complex (type, ptr, len);
7504
7505 case VECTOR_TYPE:
7506 return native_interpret_vector (type, ptr, len);
7507
7508 default:
7509 return NULL_TREE;
7510 }
7511 }
7512
7513 /* Returns true if we can interpret the contents of a native encoding
7514 as TYPE. */
7515
7516 static bool
7517 can_native_interpret_type_p (tree type)
7518 {
7519 switch (TREE_CODE (type))
7520 {
7521 case INTEGER_TYPE:
7522 case ENUMERAL_TYPE:
7523 case BOOLEAN_TYPE:
7524 case POINTER_TYPE:
7525 case REFERENCE_TYPE:
7526 case FIXED_POINT_TYPE:
7527 case REAL_TYPE:
7528 case COMPLEX_TYPE:
7529 case VECTOR_TYPE:
7530 return true;
7531 default:
7532 return false;
7533 }
7534 }
7535
7536 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7537 TYPE at compile-time. If we're unable to perform the conversion
7538 return NULL_TREE. */
7539
7540 static tree
7541 fold_view_convert_expr (tree type, tree expr)
7542 {
7543 /* We support up to 512-bit values (for V8DFmode). */
7544 unsigned char buffer[64];
7545 int len;
7546
7547 /* Check that the host and target are sane. */
7548 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7549 return NULL_TREE;
7550
7551 len = native_encode_expr (expr, buffer, sizeof (buffer));
7552 if (len == 0)
7553 return NULL_TREE;
7554
7555 return native_interpret_expr (type, buffer, len);
7556 }
7557
7558 /* Build an expression for the address of T. Folds away INDIRECT_REF
7559 to avoid confusing the gimplify process. */
7560
7561 tree
7562 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7563 {
7564 /* The size of the object is not relevant when talking about its address. */
7565 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7566 t = TREE_OPERAND (t, 0);
7567
7568 if (TREE_CODE (t) == INDIRECT_REF)
7569 {
7570 t = TREE_OPERAND (t, 0);
7571
7572 if (TREE_TYPE (t) != ptrtype)
7573 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7574 }
7575 else if (TREE_CODE (t) == MEM_REF
7576 && integer_zerop (TREE_OPERAND (t, 1)))
7577 return TREE_OPERAND (t, 0);
7578 else if (TREE_CODE (t) == MEM_REF
7579 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7580 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7581 TREE_OPERAND (t, 0),
7582 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7583 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7584 {
7585 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7586
7587 if (TREE_TYPE (t) != ptrtype)
7588 t = fold_convert_loc (loc, ptrtype, t);
7589 }
7590 else
7591 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7592
7593 return t;
7594 }
7595
7596 /* Build an expression for the address of T. */
7597
7598 tree
7599 build_fold_addr_expr_loc (location_t loc, tree t)
7600 {
7601 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7602
7603 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7604 }
7605
7606 /* Fold a unary expression of code CODE and type TYPE with operand
7607 OP0. Return the folded expression if folding is successful.
7608 Otherwise, return NULL_TREE. */
7609
7610 tree
7611 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7612 {
7613 tree tem;
7614 tree arg0;
7615 enum tree_code_class kind = TREE_CODE_CLASS (code);
7616
7617 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7618 && TREE_CODE_LENGTH (code) == 1);
7619
7620 arg0 = op0;
7621 if (arg0)
7622 {
7623 if (CONVERT_EXPR_CODE_P (code)
7624 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7625 {
7626 /* Don't use STRIP_NOPS, because signedness of argument type
7627 matters. */
7628 STRIP_SIGN_NOPS (arg0);
7629 }
7630 else
7631 {
7632 /* Strip any conversions that don't change the mode. This
7633 is safe for every expression, except for a comparison
7634 expression because its signedness is derived from its
7635 operands.
7636
7637 Note that this is done as an internal manipulation within
7638 the constant folder, in order to find the simplest
7639 representation of the arguments so that their form can be
7640 studied. In any cases, the appropriate type conversions
7641 should be put back in the tree that will get out of the
7642 constant folder. */
7643 STRIP_NOPS (arg0);
7644 }
7645
7646 if (CONSTANT_CLASS_P (arg0))
7647 {
7648 tree tem = const_unop (code, type, arg0);
7649 if (tem)
7650 {
7651 if (TREE_TYPE (tem) != type)
7652 tem = fold_convert_loc (loc, type, tem);
7653 return tem;
7654 }
7655 }
7656 }
7657
7658 tem = generic_simplify (loc, code, type, op0);
7659 if (tem)
7660 return tem;
7661
7662 if (TREE_CODE_CLASS (code) == tcc_unary)
7663 {
7664 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7665 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7666 fold_build1_loc (loc, code, type,
7667 fold_convert_loc (loc, TREE_TYPE (op0),
7668 TREE_OPERAND (arg0, 1))));
7669 else if (TREE_CODE (arg0) == COND_EXPR)
7670 {
7671 tree arg01 = TREE_OPERAND (arg0, 1);
7672 tree arg02 = TREE_OPERAND (arg0, 2);
7673 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7674 arg01 = fold_build1_loc (loc, code, type,
7675 fold_convert_loc (loc,
7676 TREE_TYPE (op0), arg01));
7677 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7678 arg02 = fold_build1_loc (loc, code, type,
7679 fold_convert_loc (loc,
7680 TREE_TYPE (op0), arg02));
7681 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7682 arg01, arg02);
7683
7684 /* If this was a conversion, and all we did was to move into
7685 inside the COND_EXPR, bring it back out. But leave it if
7686 it is a conversion from integer to integer and the
7687 result precision is no wider than a word since such a
7688 conversion is cheap and may be optimized away by combine,
7689 while it couldn't if it were outside the COND_EXPR. Then return
7690 so we don't get into an infinite recursion loop taking the
7691 conversion out and then back in. */
7692
7693 if ((CONVERT_EXPR_CODE_P (code)
7694 || code == NON_LVALUE_EXPR)
7695 && TREE_CODE (tem) == COND_EXPR
7696 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7697 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7698 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7699 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7700 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7701 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7702 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7703 && (INTEGRAL_TYPE_P
7704 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7705 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7706 || flag_syntax_only))
7707 tem = build1_loc (loc, code, type,
7708 build3 (COND_EXPR,
7709 TREE_TYPE (TREE_OPERAND
7710 (TREE_OPERAND (tem, 1), 0)),
7711 TREE_OPERAND (tem, 0),
7712 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7713 TREE_OPERAND (TREE_OPERAND (tem, 2),
7714 0)));
7715 return tem;
7716 }
7717 }
7718
7719 switch (code)
7720 {
7721 case NON_LVALUE_EXPR:
7722 if (!maybe_lvalue_p (op0))
7723 return fold_convert_loc (loc, type, op0);
7724 return NULL_TREE;
7725
7726 CASE_CONVERT:
7727 case FLOAT_EXPR:
7728 case FIX_TRUNC_EXPR:
7729 if (COMPARISON_CLASS_P (op0))
7730 {
7731 /* If we have (type) (a CMP b) and type is an integral type, return
7732 new expression involving the new type. Canonicalize
7733 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7734 non-integral type.
7735 Do not fold the result as that would not simplify further, also
7736 folding again results in recursions. */
7737 if (TREE_CODE (type) == BOOLEAN_TYPE)
7738 return build2_loc (loc, TREE_CODE (op0), type,
7739 TREE_OPERAND (op0, 0),
7740 TREE_OPERAND (op0, 1));
7741 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7742 && TREE_CODE (type) != VECTOR_TYPE)
7743 return build3_loc (loc, COND_EXPR, type, op0,
7744 constant_boolean_node (true, type),
7745 constant_boolean_node (false, type));
7746 }
7747
7748 /* Handle (T *)&A.B.C for A being of type T and B and C
7749 living at offset zero. This occurs frequently in
7750 C++ upcasting and then accessing the base. */
7751 if (TREE_CODE (op0) == ADDR_EXPR
7752 && POINTER_TYPE_P (type)
7753 && handled_component_p (TREE_OPERAND (op0, 0)))
7754 {
7755 HOST_WIDE_INT bitsize, bitpos;
7756 tree offset;
7757 machine_mode mode;
7758 int unsignedp, volatilep;
7759 tree base = TREE_OPERAND (op0, 0);
7760 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7761 &mode, &unsignedp, &volatilep, false);
7762 /* If the reference was to a (constant) zero offset, we can use
7763 the address of the base if it has the same base type
7764 as the result type and the pointer type is unqualified. */
7765 if (! offset && bitpos == 0
7766 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7767 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7768 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7769 return fold_convert_loc (loc, type,
7770 build_fold_addr_expr_loc (loc, base));
7771 }
7772
7773 if (TREE_CODE (op0) == MODIFY_EXPR
7774 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7775 /* Detect assigning a bitfield. */
7776 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7777 && DECL_BIT_FIELD
7778 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7779 {
7780 /* Don't leave an assignment inside a conversion
7781 unless assigning a bitfield. */
7782 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7783 /* First do the assignment, then return converted constant. */
7784 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7785 TREE_NO_WARNING (tem) = 1;
7786 TREE_USED (tem) = 1;
7787 return tem;
7788 }
7789
7790 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7791 constants (if x has signed type, the sign bit cannot be set
7792 in c). This folds extension into the BIT_AND_EXPR.
7793 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7794 very likely don't have maximal range for their precision and this
7795 transformation effectively doesn't preserve non-maximal ranges. */
7796 if (TREE_CODE (type) == INTEGER_TYPE
7797 && TREE_CODE (op0) == BIT_AND_EXPR
7798 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7799 {
7800 tree and_expr = op0;
7801 tree and0 = TREE_OPERAND (and_expr, 0);
7802 tree and1 = TREE_OPERAND (and_expr, 1);
7803 int change = 0;
7804
7805 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7806 || (TYPE_PRECISION (type)
7807 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7808 change = 1;
7809 else if (TYPE_PRECISION (TREE_TYPE (and1))
7810 <= HOST_BITS_PER_WIDE_INT
7811 && tree_fits_uhwi_p (and1))
7812 {
7813 unsigned HOST_WIDE_INT cst;
7814
7815 cst = tree_to_uhwi (and1);
7816 cst &= HOST_WIDE_INT_M1U
7817 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7818 change = (cst == 0);
7819 if (change
7820 && !flag_syntax_only
7821 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7822 == ZERO_EXTEND))
7823 {
7824 tree uns = unsigned_type_for (TREE_TYPE (and0));
7825 and0 = fold_convert_loc (loc, uns, and0);
7826 and1 = fold_convert_loc (loc, uns, and1);
7827 }
7828 }
7829 if (change)
7830 {
7831 tem = force_fit_type (type, wi::to_widest (and1), 0,
7832 TREE_OVERFLOW (and1));
7833 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7834 fold_convert_loc (loc, type, and0), tem);
7835 }
7836 }
7837
7838 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7839 when one of the new casts will fold away. Conservatively we assume
7840 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7841 if (POINTER_TYPE_P (type)
7842 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7843 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7844 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7845 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7846 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7847 {
7848 tree arg00 = TREE_OPERAND (arg0, 0);
7849 tree arg01 = TREE_OPERAND (arg0, 1);
7850
7851 return fold_build_pointer_plus_loc
7852 (loc, fold_convert_loc (loc, type, arg00), arg01);
7853 }
7854
7855 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7856 of the same precision, and X is an integer type not narrower than
7857 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7858 if (INTEGRAL_TYPE_P (type)
7859 && TREE_CODE (op0) == BIT_NOT_EXPR
7860 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7861 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7862 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7863 {
7864 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7865 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7866 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7867 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7868 fold_convert_loc (loc, type, tem));
7869 }
7870
7871 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7872 type of X and Y (integer types only). */
7873 if (INTEGRAL_TYPE_P (type)
7874 && TREE_CODE (op0) == MULT_EXPR
7875 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7876 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7877 {
7878 /* Be careful not to introduce new overflows. */
7879 tree mult_type;
7880 if (TYPE_OVERFLOW_WRAPS (type))
7881 mult_type = type;
7882 else
7883 mult_type = unsigned_type_for (type);
7884
7885 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7886 {
7887 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7888 fold_convert_loc (loc, mult_type,
7889 TREE_OPERAND (op0, 0)),
7890 fold_convert_loc (loc, mult_type,
7891 TREE_OPERAND (op0, 1)));
7892 return fold_convert_loc (loc, type, tem);
7893 }
7894 }
7895
7896 return NULL_TREE;
7897
7898 case VIEW_CONVERT_EXPR:
7899 if (TREE_CODE (op0) == MEM_REF)
7900 return fold_build2_loc (loc, MEM_REF, type,
7901 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7902
7903 return NULL_TREE;
7904
7905 case NEGATE_EXPR:
7906 tem = fold_negate_expr (loc, arg0);
7907 if (tem)
7908 return fold_convert_loc (loc, type, tem);
7909 return NULL_TREE;
7910
7911 case ABS_EXPR:
7912 /* Convert fabs((double)float) into (double)fabsf(float). */
7913 if (TREE_CODE (arg0) == NOP_EXPR
7914 && TREE_CODE (type) == REAL_TYPE)
7915 {
7916 tree targ0 = strip_float_extensions (arg0);
7917 if (targ0 != arg0)
7918 return fold_convert_loc (loc, type,
7919 fold_build1_loc (loc, ABS_EXPR,
7920 TREE_TYPE (targ0),
7921 targ0));
7922 }
7923
7924 /* Strip sign ops from argument. */
7925 if (TREE_CODE (type) == REAL_TYPE)
7926 {
7927 tem = fold_strip_sign_ops (arg0);
7928 if (tem)
7929 return fold_build1_loc (loc, ABS_EXPR, type,
7930 fold_convert_loc (loc, type, tem));
7931 }
7932 return NULL_TREE;
7933
7934 case CONJ_EXPR:
7935 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7936 return fold_convert_loc (loc, type, arg0);
7937 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7938 {
7939 tree itype = TREE_TYPE (type);
7940 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7941 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7942 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7943 negate_expr (ipart));
7944 }
7945 if (TREE_CODE (arg0) == CONJ_EXPR)
7946 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7947 return NULL_TREE;
7948
7949 case BIT_NOT_EXPR:
7950 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7951 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7952 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7953 fold_convert_loc (loc, type,
7954 TREE_OPERAND (arg0, 0)))))
7955 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7956 fold_convert_loc (loc, type,
7957 TREE_OPERAND (arg0, 1)));
7958 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7959 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7960 fold_convert_loc (loc, type,
7961 TREE_OPERAND (arg0, 1)))))
7962 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7963 fold_convert_loc (loc, type,
7964 TREE_OPERAND (arg0, 0)), tem);
7965
7966 return NULL_TREE;
7967
7968 case TRUTH_NOT_EXPR:
7969 /* Note that the operand of this must be an int
7970 and its values must be 0 or 1.
7971 ("true" is a fixed value perhaps depending on the language,
7972 but we don't handle values other than 1 correctly yet.) */
7973 tem = fold_truth_not_expr (loc, arg0);
7974 if (!tem)
7975 return NULL_TREE;
7976 return fold_convert_loc (loc, type, tem);
7977
7978 case REALPART_EXPR:
7979 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7980 return fold_convert_loc (loc, type, arg0);
7981 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7982 {
7983 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7984 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7985 fold_build1_loc (loc, REALPART_EXPR, itype,
7986 TREE_OPERAND (arg0, 0)),
7987 fold_build1_loc (loc, REALPART_EXPR, itype,
7988 TREE_OPERAND (arg0, 1)));
7989 return fold_convert_loc (loc, type, tem);
7990 }
7991 if (TREE_CODE (arg0) == CONJ_EXPR)
7992 {
7993 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7994 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
7995 TREE_OPERAND (arg0, 0));
7996 return fold_convert_loc (loc, type, tem);
7997 }
7998 if (TREE_CODE (arg0) == CALL_EXPR)
7999 {
8000 tree fn = get_callee_fndecl (arg0);
8001 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8002 switch (DECL_FUNCTION_CODE (fn))
8003 {
8004 CASE_FLT_FN (BUILT_IN_CEXPI):
8005 fn = mathfn_built_in (type, BUILT_IN_COS);
8006 if (fn)
8007 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8008 break;
8009
8010 default:
8011 break;
8012 }
8013 }
8014 return NULL_TREE;
8015
8016 case IMAGPART_EXPR:
8017 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8018 return build_zero_cst (type);
8019 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8020 {
8021 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8022 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8023 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8024 TREE_OPERAND (arg0, 0)),
8025 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8026 TREE_OPERAND (arg0, 1)));
8027 return fold_convert_loc (loc, type, tem);
8028 }
8029 if (TREE_CODE (arg0) == CONJ_EXPR)
8030 {
8031 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8032 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8033 return fold_convert_loc (loc, type, negate_expr (tem));
8034 }
8035 if (TREE_CODE (arg0) == CALL_EXPR)
8036 {
8037 tree fn = get_callee_fndecl (arg0);
8038 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8039 switch (DECL_FUNCTION_CODE (fn))
8040 {
8041 CASE_FLT_FN (BUILT_IN_CEXPI):
8042 fn = mathfn_built_in (type, BUILT_IN_SIN);
8043 if (fn)
8044 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8045 break;
8046
8047 default:
8048 break;
8049 }
8050 }
8051 return NULL_TREE;
8052
8053 case INDIRECT_REF:
8054 /* Fold *&X to X if X is an lvalue. */
8055 if (TREE_CODE (op0) == ADDR_EXPR)
8056 {
8057 tree op00 = TREE_OPERAND (op0, 0);
8058 if ((TREE_CODE (op00) == VAR_DECL
8059 || TREE_CODE (op00) == PARM_DECL
8060 || TREE_CODE (op00) == RESULT_DECL)
8061 && !TREE_READONLY (op00))
8062 return op00;
8063 }
8064 return NULL_TREE;
8065
8066 default:
8067 return NULL_TREE;
8068 } /* switch (code) */
8069 }
8070
8071
8072 /* If the operation was a conversion do _not_ mark a resulting constant
8073 with TREE_OVERFLOW if the original constant was not. These conversions
8074 have implementation defined behavior and retaining the TREE_OVERFLOW
8075 flag here would confuse later passes such as VRP. */
8076 tree
8077 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8078 tree type, tree op0)
8079 {
8080 tree res = fold_unary_loc (loc, code, type, op0);
8081 if (res
8082 && TREE_CODE (res) == INTEGER_CST
8083 && TREE_CODE (op0) == INTEGER_CST
8084 && CONVERT_EXPR_CODE_P (code))
8085 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8086
8087 return res;
8088 }
8089
8090 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8091 operands OP0 and OP1. LOC is the location of the resulting expression.
8092 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8093 Return the folded expression if folding is successful. Otherwise,
8094 return NULL_TREE. */
8095 static tree
8096 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8097 tree arg0, tree arg1, tree op0, tree op1)
8098 {
8099 tree tem;
8100
8101 /* We only do these simplifications if we are optimizing. */
8102 if (!optimize)
8103 return NULL_TREE;
8104
8105 /* Check for things like (A || B) && (A || C). We can convert this
8106 to A || (B && C). Note that either operator can be any of the four
8107 truth and/or operations and the transformation will still be
8108 valid. Also note that we only care about order for the
8109 ANDIF and ORIF operators. If B contains side effects, this
8110 might change the truth-value of A. */
8111 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8112 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8113 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8114 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8115 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8116 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8117 {
8118 tree a00 = TREE_OPERAND (arg0, 0);
8119 tree a01 = TREE_OPERAND (arg0, 1);
8120 tree a10 = TREE_OPERAND (arg1, 0);
8121 tree a11 = TREE_OPERAND (arg1, 1);
8122 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8123 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8124 && (code == TRUTH_AND_EXPR
8125 || code == TRUTH_OR_EXPR));
8126
8127 if (operand_equal_p (a00, a10, 0))
8128 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8129 fold_build2_loc (loc, code, type, a01, a11));
8130 else if (commutative && operand_equal_p (a00, a11, 0))
8131 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8132 fold_build2_loc (loc, code, type, a01, a10));
8133 else if (commutative && operand_equal_p (a01, a10, 0))
8134 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8135 fold_build2_loc (loc, code, type, a00, a11));
8136
8137 /* This case if tricky because we must either have commutative
8138 operators or else A10 must not have side-effects. */
8139
8140 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8141 && operand_equal_p (a01, a11, 0))
8142 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8143 fold_build2_loc (loc, code, type, a00, a10),
8144 a01);
8145 }
8146
8147 /* See if we can build a range comparison. */
8148 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8149 return tem;
8150
8151 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8152 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8153 {
8154 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8155 if (tem)
8156 return fold_build2_loc (loc, code, type, tem, arg1);
8157 }
8158
8159 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8160 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8161 {
8162 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8163 if (tem)
8164 return fold_build2_loc (loc, code, type, arg0, tem);
8165 }
8166
8167 /* Check for the possibility of merging component references. If our
8168 lhs is another similar operation, try to merge its rhs with our
8169 rhs. Then try to merge our lhs and rhs. */
8170 if (TREE_CODE (arg0) == code
8171 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8172 TREE_OPERAND (arg0, 1), arg1)))
8173 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8174
8175 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8176 return tem;
8177
8178 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8179 && (code == TRUTH_AND_EXPR
8180 || code == TRUTH_ANDIF_EXPR
8181 || code == TRUTH_OR_EXPR
8182 || code == TRUTH_ORIF_EXPR))
8183 {
8184 enum tree_code ncode, icode;
8185
8186 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8187 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8188 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8189
8190 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8191 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8192 We don't want to pack more than two leafs to a non-IF AND/OR
8193 expression.
8194 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8195 equal to IF-CODE, then we don't want to add right-hand operand.
8196 If the inner right-hand side of left-hand operand has
8197 side-effects, or isn't simple, then we can't add to it,
8198 as otherwise we might destroy if-sequence. */
8199 if (TREE_CODE (arg0) == icode
8200 && simple_operand_p_2 (arg1)
8201 /* Needed for sequence points to handle trappings, and
8202 side-effects. */
8203 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8204 {
8205 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8206 arg1);
8207 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8208 tem);
8209 }
8210 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8211 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8212 else if (TREE_CODE (arg1) == icode
8213 && simple_operand_p_2 (arg0)
8214 /* Needed for sequence points to handle trappings, and
8215 side-effects. */
8216 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8217 {
8218 tem = fold_build2_loc (loc, ncode, type,
8219 arg0, TREE_OPERAND (arg1, 0));
8220 return fold_build2_loc (loc, icode, type, tem,
8221 TREE_OPERAND (arg1, 1));
8222 }
8223 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8224 into (A OR B).
8225 For sequence point consistancy, we need to check for trapping,
8226 and side-effects. */
8227 else if (code == icode && simple_operand_p_2 (arg0)
8228 && simple_operand_p_2 (arg1))
8229 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8230 }
8231
8232 return NULL_TREE;
8233 }
8234
8235 /* Fold a binary expression of code CODE and type TYPE with operands
8236 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8237 Return the folded expression if folding is successful. Otherwise,
8238 return NULL_TREE. */
8239
8240 static tree
8241 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8242 {
8243 enum tree_code compl_code;
8244
8245 if (code == MIN_EXPR)
8246 compl_code = MAX_EXPR;
8247 else if (code == MAX_EXPR)
8248 compl_code = MIN_EXPR;
8249 else
8250 gcc_unreachable ();
8251
8252 /* MIN (MAX (a, b), b) == b. */
8253 if (TREE_CODE (op0) == compl_code
8254 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8255 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8256
8257 /* MIN (MAX (b, a), b) == b. */
8258 if (TREE_CODE (op0) == compl_code
8259 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8260 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8261 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8262
8263 /* MIN (a, MAX (a, b)) == a. */
8264 if (TREE_CODE (op1) == compl_code
8265 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8266 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8267 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8268
8269 /* MIN (a, MAX (b, a)) == a. */
8270 if (TREE_CODE (op1) == compl_code
8271 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8272 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8273 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8274
8275 return NULL_TREE;
8276 }
8277
8278 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8279 by changing CODE to reduce the magnitude of constants involved in
8280 ARG0 of the comparison.
8281 Returns a canonicalized comparison tree if a simplification was
8282 possible, otherwise returns NULL_TREE.
8283 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8284 valid if signed overflow is undefined. */
8285
8286 static tree
8287 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8288 tree arg0, tree arg1,
8289 bool *strict_overflow_p)
8290 {
8291 enum tree_code code0 = TREE_CODE (arg0);
8292 tree t, cst0 = NULL_TREE;
8293 int sgn0;
8294 bool swap = false;
8295
8296 /* Match A +- CST code arg1 and CST code arg1. We can change the
8297 first form only if overflow is undefined. */
8298 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8299 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8300 /* In principle pointers also have undefined overflow behavior,
8301 but that causes problems elsewhere. */
8302 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8303 && (code0 == MINUS_EXPR
8304 || code0 == PLUS_EXPR)
8305 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8306 || code0 == INTEGER_CST))
8307 return NULL_TREE;
8308
8309 /* Identify the constant in arg0 and its sign. */
8310 if (code0 == INTEGER_CST)
8311 cst0 = arg0;
8312 else
8313 cst0 = TREE_OPERAND (arg0, 1);
8314 sgn0 = tree_int_cst_sgn (cst0);
8315
8316 /* Overflowed constants and zero will cause problems. */
8317 if (integer_zerop (cst0)
8318 || TREE_OVERFLOW (cst0))
8319 return NULL_TREE;
8320
8321 /* See if we can reduce the magnitude of the constant in
8322 arg0 by changing the comparison code. */
8323 if (code0 == INTEGER_CST)
8324 {
8325 /* CST <= arg1 -> CST-1 < arg1. */
8326 if (code == LE_EXPR && sgn0 == 1)
8327 code = LT_EXPR;
8328 /* -CST < arg1 -> -CST-1 <= arg1. */
8329 else if (code == LT_EXPR && sgn0 == -1)
8330 code = LE_EXPR;
8331 /* CST > arg1 -> CST-1 >= arg1. */
8332 else if (code == GT_EXPR && sgn0 == 1)
8333 code = GE_EXPR;
8334 /* -CST >= arg1 -> -CST-1 > arg1. */
8335 else if (code == GE_EXPR && sgn0 == -1)
8336 code = GT_EXPR;
8337 else
8338 return NULL_TREE;
8339 /* arg1 code' CST' might be more canonical. */
8340 swap = true;
8341 }
8342 else
8343 {
8344 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8345 if (code == LT_EXPR
8346 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8347 code = LE_EXPR;
8348 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8349 else if (code == GT_EXPR
8350 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8351 code = GE_EXPR;
8352 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8353 else if (code == LE_EXPR
8354 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8355 code = LT_EXPR;
8356 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8357 else if (code == GE_EXPR
8358 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8359 code = GT_EXPR;
8360 else
8361 return NULL_TREE;
8362 *strict_overflow_p = true;
8363 }
8364
8365 /* Now build the constant reduced in magnitude. But not if that
8366 would produce one outside of its types range. */
8367 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8368 && ((sgn0 == 1
8369 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8370 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8371 || (sgn0 == -1
8372 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8373 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8374 /* We cannot swap the comparison here as that would cause us to
8375 endlessly recurse. */
8376 return NULL_TREE;
8377
8378 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8379 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8380 if (code0 != INTEGER_CST)
8381 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8382 t = fold_convert (TREE_TYPE (arg1), t);
8383
8384 /* If swapping might yield to a more canonical form, do so. */
8385 if (swap)
8386 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8387 else
8388 return fold_build2_loc (loc, code, type, t, arg1);
8389 }
8390
8391 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8392 overflow further. Try to decrease the magnitude of constants involved
8393 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8394 and put sole constants at the second argument position.
8395 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8396
8397 static tree
8398 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8399 tree arg0, tree arg1)
8400 {
8401 tree t;
8402 bool strict_overflow_p;
8403 const char * const warnmsg = G_("assuming signed overflow does not occur "
8404 "when reducing constant in comparison");
8405
8406 /* Try canonicalization by simplifying arg0. */
8407 strict_overflow_p = false;
8408 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8409 &strict_overflow_p);
8410 if (t)
8411 {
8412 if (strict_overflow_p)
8413 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8414 return t;
8415 }
8416
8417 /* Try canonicalization by simplifying arg1 using the swapped
8418 comparison. */
8419 code = swap_tree_comparison (code);
8420 strict_overflow_p = false;
8421 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8422 &strict_overflow_p);
8423 if (t && strict_overflow_p)
8424 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8425 return t;
8426 }
8427
8428 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8429 space. This is used to avoid issuing overflow warnings for
8430 expressions like &p->x which can not wrap. */
8431
8432 static bool
8433 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8434 {
8435 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8436 return true;
8437
8438 if (bitpos < 0)
8439 return true;
8440
8441 wide_int wi_offset;
8442 int precision = TYPE_PRECISION (TREE_TYPE (base));
8443 if (offset == NULL_TREE)
8444 wi_offset = wi::zero (precision);
8445 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8446 return true;
8447 else
8448 wi_offset = offset;
8449
8450 bool overflow;
8451 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8452 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8453 if (overflow)
8454 return true;
8455
8456 if (!wi::fits_uhwi_p (total))
8457 return true;
8458
8459 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8460 if (size <= 0)
8461 return true;
8462
8463 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8464 array. */
8465 if (TREE_CODE (base) == ADDR_EXPR)
8466 {
8467 HOST_WIDE_INT base_size;
8468
8469 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8470 if (base_size > 0 && size < base_size)
8471 size = base_size;
8472 }
8473
8474 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8475 }
8476
8477 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8478 kind INTEGER_CST. This makes sure to properly sign-extend the
8479 constant. */
8480
8481 static HOST_WIDE_INT
8482 size_low_cst (const_tree t)
8483 {
8484 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8485 int prec = TYPE_PRECISION (TREE_TYPE (t));
8486 if (prec < HOST_BITS_PER_WIDE_INT)
8487 return sext_hwi (w, prec);
8488 return w;
8489 }
8490
8491 /* Subroutine of fold_binary. This routine performs all of the
8492 transformations that are common to the equality/inequality
8493 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8494 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8495 fold_binary should call fold_binary. Fold a comparison with
8496 tree code CODE and type TYPE with operands OP0 and OP1. Return
8497 the folded comparison or NULL_TREE. */
8498
8499 static tree
8500 fold_comparison (location_t loc, enum tree_code code, tree type,
8501 tree op0, tree op1)
8502 {
8503 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8504 tree arg0, arg1, tem;
8505
8506 arg0 = op0;
8507 arg1 = op1;
8508
8509 STRIP_SIGN_NOPS (arg0);
8510 STRIP_SIGN_NOPS (arg1);
8511
8512 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8513 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8514 && (equality_code
8515 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8516 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8517 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8518 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8519 && TREE_CODE (arg1) == INTEGER_CST
8520 && !TREE_OVERFLOW (arg1))
8521 {
8522 const enum tree_code
8523 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8524 tree const1 = TREE_OPERAND (arg0, 1);
8525 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8526 tree variable = TREE_OPERAND (arg0, 0);
8527 tree new_const = int_const_binop (reverse_op, const2, const1);
8528
8529 /* If the constant operation overflowed this can be
8530 simplified as a comparison against INT_MAX/INT_MIN. */
8531 if (TREE_OVERFLOW (new_const)
8532 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8533 {
8534 int const1_sgn = tree_int_cst_sgn (const1);
8535 enum tree_code code2 = code;
8536
8537 /* Get the sign of the constant on the lhs if the
8538 operation were VARIABLE + CONST1. */
8539 if (TREE_CODE (arg0) == MINUS_EXPR)
8540 const1_sgn = -const1_sgn;
8541
8542 /* The sign of the constant determines if we overflowed
8543 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8544 Canonicalize to the INT_MIN overflow by swapping the comparison
8545 if necessary. */
8546 if (const1_sgn == -1)
8547 code2 = swap_tree_comparison (code);
8548
8549 /* We now can look at the canonicalized case
8550 VARIABLE + 1 CODE2 INT_MIN
8551 and decide on the result. */
8552 switch (code2)
8553 {
8554 case EQ_EXPR:
8555 case LT_EXPR:
8556 case LE_EXPR:
8557 return
8558 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8559
8560 case NE_EXPR:
8561 case GE_EXPR:
8562 case GT_EXPR:
8563 return
8564 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8565
8566 default:
8567 gcc_unreachable ();
8568 }
8569 }
8570 else
8571 {
8572 if (!equality_code)
8573 fold_overflow_warning ("assuming signed overflow does not occur "
8574 "when changing X +- C1 cmp C2 to "
8575 "X cmp C2 -+ C1",
8576 WARN_STRICT_OVERFLOW_COMPARISON);
8577 return fold_build2_loc (loc, code, type, variable, new_const);
8578 }
8579 }
8580
8581 /* For comparisons of pointers we can decompose it to a compile time
8582 comparison of the base objects and the offsets into the object.
8583 This requires at least one operand being an ADDR_EXPR or a
8584 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8585 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8586 && (TREE_CODE (arg0) == ADDR_EXPR
8587 || TREE_CODE (arg1) == ADDR_EXPR
8588 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8589 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8590 {
8591 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8592 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8593 machine_mode mode;
8594 int volatilep, unsignedp;
8595 bool indirect_base0 = false, indirect_base1 = false;
8596
8597 /* Get base and offset for the access. Strip ADDR_EXPR for
8598 get_inner_reference, but put it back by stripping INDIRECT_REF
8599 off the base object if possible. indirect_baseN will be true
8600 if baseN is not an address but refers to the object itself. */
8601 base0 = arg0;
8602 if (TREE_CODE (arg0) == ADDR_EXPR)
8603 {
8604 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8605 &bitsize, &bitpos0, &offset0, &mode,
8606 &unsignedp, &volatilep, false);
8607 if (TREE_CODE (base0) == INDIRECT_REF)
8608 base0 = TREE_OPERAND (base0, 0);
8609 else
8610 indirect_base0 = true;
8611 }
8612 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8613 {
8614 base0 = TREE_OPERAND (arg0, 0);
8615 STRIP_SIGN_NOPS (base0);
8616 if (TREE_CODE (base0) == ADDR_EXPR)
8617 {
8618 base0 = TREE_OPERAND (base0, 0);
8619 indirect_base0 = true;
8620 }
8621 offset0 = TREE_OPERAND (arg0, 1);
8622 if (tree_fits_shwi_p (offset0))
8623 {
8624 HOST_WIDE_INT off = size_low_cst (offset0);
8625 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8626 * BITS_PER_UNIT)
8627 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8628 {
8629 bitpos0 = off * BITS_PER_UNIT;
8630 offset0 = NULL_TREE;
8631 }
8632 }
8633 }
8634
8635 base1 = arg1;
8636 if (TREE_CODE (arg1) == ADDR_EXPR)
8637 {
8638 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8639 &bitsize, &bitpos1, &offset1, &mode,
8640 &unsignedp, &volatilep, false);
8641 if (TREE_CODE (base1) == INDIRECT_REF)
8642 base1 = TREE_OPERAND (base1, 0);
8643 else
8644 indirect_base1 = true;
8645 }
8646 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8647 {
8648 base1 = TREE_OPERAND (arg1, 0);
8649 STRIP_SIGN_NOPS (base1);
8650 if (TREE_CODE (base1) == ADDR_EXPR)
8651 {
8652 base1 = TREE_OPERAND (base1, 0);
8653 indirect_base1 = true;
8654 }
8655 offset1 = TREE_OPERAND (arg1, 1);
8656 if (tree_fits_shwi_p (offset1))
8657 {
8658 HOST_WIDE_INT off = size_low_cst (offset1);
8659 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8660 * BITS_PER_UNIT)
8661 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8662 {
8663 bitpos1 = off * BITS_PER_UNIT;
8664 offset1 = NULL_TREE;
8665 }
8666 }
8667 }
8668
8669 /* A local variable can never be pointed to by
8670 the default SSA name of an incoming parameter. */
8671 if ((TREE_CODE (arg0) == ADDR_EXPR
8672 && indirect_base0
8673 && TREE_CODE (base0) == VAR_DECL
8674 && auto_var_in_fn_p (base0, current_function_decl)
8675 && !indirect_base1
8676 && TREE_CODE (base1) == SSA_NAME
8677 && SSA_NAME_IS_DEFAULT_DEF (base1)
8678 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8679 || (TREE_CODE (arg1) == ADDR_EXPR
8680 && indirect_base1
8681 && TREE_CODE (base1) == VAR_DECL
8682 && auto_var_in_fn_p (base1, current_function_decl)
8683 && !indirect_base0
8684 && TREE_CODE (base0) == SSA_NAME
8685 && SSA_NAME_IS_DEFAULT_DEF (base0)
8686 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8687 {
8688 if (code == NE_EXPR)
8689 return constant_boolean_node (1, type);
8690 else if (code == EQ_EXPR)
8691 return constant_boolean_node (0, type);
8692 }
8693 /* If we have equivalent bases we might be able to simplify. */
8694 else if (indirect_base0 == indirect_base1
8695 && operand_equal_p (base0, base1, 0))
8696 {
8697 /* We can fold this expression to a constant if the non-constant
8698 offset parts are equal. */
8699 if ((offset0 == offset1
8700 || (offset0 && offset1
8701 && operand_equal_p (offset0, offset1, 0)))
8702 && (code == EQ_EXPR
8703 || code == NE_EXPR
8704 || (indirect_base0 && DECL_P (base0))
8705 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8706
8707 {
8708 if (!equality_code
8709 && bitpos0 != bitpos1
8710 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8711 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8712 fold_overflow_warning (("assuming pointer wraparound does not "
8713 "occur when comparing P +- C1 with "
8714 "P +- C2"),
8715 WARN_STRICT_OVERFLOW_CONDITIONAL);
8716
8717 switch (code)
8718 {
8719 case EQ_EXPR:
8720 return constant_boolean_node (bitpos0 == bitpos1, type);
8721 case NE_EXPR:
8722 return constant_boolean_node (bitpos0 != bitpos1, type);
8723 case LT_EXPR:
8724 return constant_boolean_node (bitpos0 < bitpos1, type);
8725 case LE_EXPR:
8726 return constant_boolean_node (bitpos0 <= bitpos1, type);
8727 case GE_EXPR:
8728 return constant_boolean_node (bitpos0 >= bitpos1, type);
8729 case GT_EXPR:
8730 return constant_boolean_node (bitpos0 > bitpos1, type);
8731 default:;
8732 }
8733 }
8734 /* We can simplify the comparison to a comparison of the variable
8735 offset parts if the constant offset parts are equal.
8736 Be careful to use signed sizetype here because otherwise we
8737 mess with array offsets in the wrong way. This is possible
8738 because pointer arithmetic is restricted to retain within an
8739 object and overflow on pointer differences is undefined as of
8740 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8741 else if (bitpos0 == bitpos1
8742 && (equality_code
8743 || (indirect_base0 && DECL_P (base0))
8744 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8745 {
8746 /* By converting to signed sizetype we cover middle-end pointer
8747 arithmetic which operates on unsigned pointer types of size
8748 type size and ARRAY_REF offsets which are properly sign or
8749 zero extended from their type in case it is narrower than
8750 sizetype. */
8751 if (offset0 == NULL_TREE)
8752 offset0 = build_int_cst (ssizetype, 0);
8753 else
8754 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8755 if (offset1 == NULL_TREE)
8756 offset1 = build_int_cst (ssizetype, 0);
8757 else
8758 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8759
8760 if (!equality_code
8761 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8762 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8763 fold_overflow_warning (("assuming pointer wraparound does not "
8764 "occur when comparing P +- C1 with "
8765 "P +- C2"),
8766 WARN_STRICT_OVERFLOW_COMPARISON);
8767
8768 return fold_build2_loc (loc, code, type, offset0, offset1);
8769 }
8770 }
8771 /* For non-equal bases we can simplify if they are addresses
8772 declarations with different addresses. */
8773 else if (indirect_base0 && indirect_base1
8774 /* We know that !operand_equal_p (base0, base1, 0)
8775 because the if condition was false. But make
8776 sure two decls are not the same. */
8777 && base0 != base1
8778 && TREE_CODE (arg0) == ADDR_EXPR
8779 && TREE_CODE (arg1) == ADDR_EXPR
8780 && DECL_P (base0)
8781 && DECL_P (base1)
8782 /* Watch for aliases. */
8783 && (!decl_in_symtab_p (base0)
8784 || !decl_in_symtab_p (base1)
8785 || !symtab_node::get_create (base0)->equal_address_to
8786 (symtab_node::get_create (base1))))
8787 {
8788 if (code == EQ_EXPR)
8789 return omit_two_operands_loc (loc, type, boolean_false_node,
8790 arg0, arg1);
8791 else if (code == NE_EXPR)
8792 return omit_two_operands_loc (loc, type, boolean_true_node,
8793 arg0, arg1);
8794 }
8795 /* For equal offsets we can simplify to a comparison of the
8796 base addresses. */
8797 else if (bitpos0 == bitpos1
8798 && (indirect_base0
8799 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8800 && (indirect_base1
8801 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8802 && ((offset0 == offset1)
8803 || (offset0 && offset1
8804 && operand_equal_p (offset0, offset1, 0))))
8805 {
8806 if (indirect_base0)
8807 base0 = build_fold_addr_expr_loc (loc, base0);
8808 if (indirect_base1)
8809 base1 = build_fold_addr_expr_loc (loc, base1);
8810 return fold_build2_loc (loc, code, type, base0, base1);
8811 }
8812 }
8813
8814 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8815 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8816 the resulting offset is smaller in absolute value than the
8817 original one and has the same sign. */
8818 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8819 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8820 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8821 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8822 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8823 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8824 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8825 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8826 {
8827 tree const1 = TREE_OPERAND (arg0, 1);
8828 tree const2 = TREE_OPERAND (arg1, 1);
8829 tree variable1 = TREE_OPERAND (arg0, 0);
8830 tree variable2 = TREE_OPERAND (arg1, 0);
8831 tree cst;
8832 const char * const warnmsg = G_("assuming signed overflow does not "
8833 "occur when combining constants around "
8834 "a comparison");
8835
8836 /* Put the constant on the side where it doesn't overflow and is
8837 of lower absolute value and of same sign than before. */
8838 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8839 ? MINUS_EXPR : PLUS_EXPR,
8840 const2, const1);
8841 if (!TREE_OVERFLOW (cst)
8842 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8843 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8844 {
8845 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8846 return fold_build2_loc (loc, code, type,
8847 variable1,
8848 fold_build2_loc (loc, TREE_CODE (arg1),
8849 TREE_TYPE (arg1),
8850 variable2, cst));
8851 }
8852
8853 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8854 ? MINUS_EXPR : PLUS_EXPR,
8855 const1, const2);
8856 if (!TREE_OVERFLOW (cst)
8857 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8858 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8859 {
8860 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8861 return fold_build2_loc (loc, code, type,
8862 fold_build2_loc (loc, TREE_CODE (arg0),
8863 TREE_TYPE (arg0),
8864 variable1, cst),
8865 variable2);
8866 }
8867 }
8868
8869 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8870 if (tem)
8871 return tem;
8872
8873 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8874 && CONVERT_EXPR_P (arg0))
8875 {
8876 /* If we are widening one operand of an integer comparison,
8877 see if the other operand is similarly being widened. Perhaps we
8878 can do the comparison in the narrower type. */
8879 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8880 if (tem)
8881 return tem;
8882
8883 /* Or if we are changing signedness. */
8884 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
8885 if (tem)
8886 return tem;
8887 }
8888
8889 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8890 constant, we can simplify it. */
8891 if (TREE_CODE (arg1) == INTEGER_CST
8892 && (TREE_CODE (arg0) == MIN_EXPR
8893 || TREE_CODE (arg0) == MAX_EXPR)
8894 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8895 {
8896 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8897 if (tem)
8898 return tem;
8899 }
8900
8901 /* If we are comparing an expression that just has comparisons
8902 of two integer values, arithmetic expressions of those comparisons,
8903 and constants, we can simplify it. There are only three cases
8904 to check: the two values can either be equal, the first can be
8905 greater, or the second can be greater. Fold the expression for
8906 those three values. Since each value must be 0 or 1, we have
8907 eight possibilities, each of which corresponds to the constant 0
8908 or 1 or one of the six possible comparisons.
8909
8910 This handles common cases like (a > b) == 0 but also handles
8911 expressions like ((x > y) - (y > x)) > 0, which supposedly
8912 occur in macroized code. */
8913
8914 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8915 {
8916 tree cval1 = 0, cval2 = 0;
8917 int save_p = 0;
8918
8919 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8920 /* Don't handle degenerate cases here; they should already
8921 have been handled anyway. */
8922 && cval1 != 0 && cval2 != 0
8923 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8924 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8925 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8926 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8927 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8928 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8929 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8930 {
8931 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8932 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8933
8934 /* We can't just pass T to eval_subst in case cval1 or cval2
8935 was the same as ARG1. */
8936
8937 tree high_result
8938 = fold_build2_loc (loc, code, type,
8939 eval_subst (loc, arg0, cval1, maxval,
8940 cval2, minval),
8941 arg1);
8942 tree equal_result
8943 = fold_build2_loc (loc, code, type,
8944 eval_subst (loc, arg0, cval1, maxval,
8945 cval2, maxval),
8946 arg1);
8947 tree low_result
8948 = fold_build2_loc (loc, code, type,
8949 eval_subst (loc, arg0, cval1, minval,
8950 cval2, maxval),
8951 arg1);
8952
8953 /* All three of these results should be 0 or 1. Confirm they are.
8954 Then use those values to select the proper code to use. */
8955
8956 if (TREE_CODE (high_result) == INTEGER_CST
8957 && TREE_CODE (equal_result) == INTEGER_CST
8958 && TREE_CODE (low_result) == INTEGER_CST)
8959 {
8960 /* Make a 3-bit mask with the high-order bit being the
8961 value for `>', the next for '=', and the low for '<'. */
8962 switch ((integer_onep (high_result) * 4)
8963 + (integer_onep (equal_result) * 2)
8964 + integer_onep (low_result))
8965 {
8966 case 0:
8967 /* Always false. */
8968 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8969 case 1:
8970 code = LT_EXPR;
8971 break;
8972 case 2:
8973 code = EQ_EXPR;
8974 break;
8975 case 3:
8976 code = LE_EXPR;
8977 break;
8978 case 4:
8979 code = GT_EXPR;
8980 break;
8981 case 5:
8982 code = NE_EXPR;
8983 break;
8984 case 6:
8985 code = GE_EXPR;
8986 break;
8987 case 7:
8988 /* Always true. */
8989 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8990 }
8991
8992 if (save_p)
8993 {
8994 tem = save_expr (build2 (code, type, cval1, cval2));
8995 SET_EXPR_LOCATION (tem, loc);
8996 return tem;
8997 }
8998 return fold_build2_loc (loc, code, type, cval1, cval2);
8999 }
9000 }
9001 }
9002
9003 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9004 into a single range test. */
9005 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9006 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9007 && TREE_CODE (arg1) == INTEGER_CST
9008 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9009 && !integer_zerop (TREE_OPERAND (arg0, 1))
9010 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9011 && !TREE_OVERFLOW (arg1))
9012 {
9013 tem = fold_div_compare (loc, code, type, arg0, arg1);
9014 if (tem != NULL_TREE)
9015 return tem;
9016 }
9017
9018 return NULL_TREE;
9019 }
9020
9021
9022 /* Subroutine of fold_binary. Optimize complex multiplications of the
9023 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9024 argument EXPR represents the expression "z" of type TYPE. */
9025
9026 static tree
9027 fold_mult_zconjz (location_t loc, tree type, tree expr)
9028 {
9029 tree itype = TREE_TYPE (type);
9030 tree rpart, ipart, tem;
9031
9032 if (TREE_CODE (expr) == COMPLEX_EXPR)
9033 {
9034 rpart = TREE_OPERAND (expr, 0);
9035 ipart = TREE_OPERAND (expr, 1);
9036 }
9037 else if (TREE_CODE (expr) == COMPLEX_CST)
9038 {
9039 rpart = TREE_REALPART (expr);
9040 ipart = TREE_IMAGPART (expr);
9041 }
9042 else
9043 {
9044 expr = save_expr (expr);
9045 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9046 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9047 }
9048
9049 rpart = save_expr (rpart);
9050 ipart = save_expr (ipart);
9051 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9052 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9053 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9054 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9055 build_zero_cst (itype));
9056 }
9057
9058
9059 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9060 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9061
9062 static bool
9063 vec_cst_ctor_to_array (tree arg, tree *elts)
9064 {
9065 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9066
9067 if (TREE_CODE (arg) == VECTOR_CST)
9068 {
9069 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9070 elts[i] = VECTOR_CST_ELT (arg, i);
9071 }
9072 else if (TREE_CODE (arg) == CONSTRUCTOR)
9073 {
9074 constructor_elt *elt;
9075
9076 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9077 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9078 return false;
9079 else
9080 elts[i] = elt->value;
9081 }
9082 else
9083 return false;
9084 for (; i < nelts; i++)
9085 elts[i]
9086 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9087 return true;
9088 }
9089
9090 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9091 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9092 NULL_TREE otherwise. */
9093
9094 static tree
9095 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9096 {
9097 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9098 tree *elts;
9099 bool need_ctor = false;
9100
9101 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9102 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9103 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9104 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9105 return NULL_TREE;
9106
9107 elts = XALLOCAVEC (tree, nelts * 3);
9108 if (!vec_cst_ctor_to_array (arg0, elts)
9109 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9110 return NULL_TREE;
9111
9112 for (i = 0; i < nelts; i++)
9113 {
9114 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9115 need_ctor = true;
9116 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9117 }
9118
9119 if (need_ctor)
9120 {
9121 vec<constructor_elt, va_gc> *v;
9122 vec_alloc (v, nelts);
9123 for (i = 0; i < nelts; i++)
9124 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9125 return build_constructor (type, v);
9126 }
9127 else
9128 return build_vector (type, &elts[2 * nelts]);
9129 }
9130
9131 /* Try to fold a pointer difference of type TYPE two address expressions of
9132 array references AREF0 and AREF1 using location LOC. Return a
9133 simplified expression for the difference or NULL_TREE. */
9134
9135 static tree
9136 fold_addr_of_array_ref_difference (location_t loc, tree type,
9137 tree aref0, tree aref1)
9138 {
9139 tree base0 = TREE_OPERAND (aref0, 0);
9140 tree base1 = TREE_OPERAND (aref1, 0);
9141 tree base_offset = build_int_cst (type, 0);
9142
9143 /* If the bases are array references as well, recurse. If the bases
9144 are pointer indirections compute the difference of the pointers.
9145 If the bases are equal, we are set. */
9146 if ((TREE_CODE (base0) == ARRAY_REF
9147 && TREE_CODE (base1) == ARRAY_REF
9148 && (base_offset
9149 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9150 || (INDIRECT_REF_P (base0)
9151 && INDIRECT_REF_P (base1)
9152 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9153 TREE_OPERAND (base0, 0),
9154 TREE_OPERAND (base1, 0))))
9155 || operand_equal_p (base0, base1, 0))
9156 {
9157 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9158 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9159 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9160 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9161 return fold_build2_loc (loc, PLUS_EXPR, type,
9162 base_offset,
9163 fold_build2_loc (loc, MULT_EXPR, type,
9164 diff, esz));
9165 }
9166 return NULL_TREE;
9167 }
9168
9169 /* If the real or vector real constant CST of type TYPE has an exact
9170 inverse, return it, else return NULL. */
9171
9172 tree
9173 exact_inverse (tree type, tree cst)
9174 {
9175 REAL_VALUE_TYPE r;
9176 tree unit_type, *elts;
9177 machine_mode mode;
9178 unsigned vec_nelts, i;
9179
9180 switch (TREE_CODE (cst))
9181 {
9182 case REAL_CST:
9183 r = TREE_REAL_CST (cst);
9184
9185 if (exact_real_inverse (TYPE_MODE (type), &r))
9186 return build_real (type, r);
9187
9188 return NULL_TREE;
9189
9190 case VECTOR_CST:
9191 vec_nelts = VECTOR_CST_NELTS (cst);
9192 elts = XALLOCAVEC (tree, vec_nelts);
9193 unit_type = TREE_TYPE (type);
9194 mode = TYPE_MODE (unit_type);
9195
9196 for (i = 0; i < vec_nelts; i++)
9197 {
9198 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9199 if (!exact_real_inverse (mode, &r))
9200 return NULL_TREE;
9201 elts[i] = build_real (unit_type, r);
9202 }
9203
9204 return build_vector (type, elts);
9205
9206 default:
9207 return NULL_TREE;
9208 }
9209 }
9210
9211 /* Mask out the tz least significant bits of X of type TYPE where
9212 tz is the number of trailing zeroes in Y. */
9213 static wide_int
9214 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9215 {
9216 int tz = wi::ctz (y);
9217 if (tz > 0)
9218 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9219 return x;
9220 }
9221
9222 /* Return true when T is an address and is known to be nonzero.
9223 For floating point we further ensure that T is not denormal.
9224 Similar logic is present in nonzero_address in rtlanal.h.
9225
9226 If the return value is based on the assumption that signed overflow
9227 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9228 change *STRICT_OVERFLOW_P. */
9229
9230 static bool
9231 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9232 {
9233 tree type = TREE_TYPE (t);
9234 enum tree_code code;
9235
9236 /* Doing something useful for floating point would need more work. */
9237 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9238 return false;
9239
9240 code = TREE_CODE (t);
9241 switch (TREE_CODE_CLASS (code))
9242 {
9243 case tcc_unary:
9244 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9245 strict_overflow_p);
9246 case tcc_binary:
9247 case tcc_comparison:
9248 return tree_binary_nonzero_warnv_p (code, type,
9249 TREE_OPERAND (t, 0),
9250 TREE_OPERAND (t, 1),
9251 strict_overflow_p);
9252 case tcc_constant:
9253 case tcc_declaration:
9254 case tcc_reference:
9255 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9256
9257 default:
9258 break;
9259 }
9260
9261 switch (code)
9262 {
9263 case TRUTH_NOT_EXPR:
9264 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9265 strict_overflow_p);
9266
9267 case TRUTH_AND_EXPR:
9268 case TRUTH_OR_EXPR:
9269 case TRUTH_XOR_EXPR:
9270 return tree_binary_nonzero_warnv_p (code, type,
9271 TREE_OPERAND (t, 0),
9272 TREE_OPERAND (t, 1),
9273 strict_overflow_p);
9274
9275 case COND_EXPR:
9276 case CONSTRUCTOR:
9277 case OBJ_TYPE_REF:
9278 case ASSERT_EXPR:
9279 case ADDR_EXPR:
9280 case WITH_SIZE_EXPR:
9281 case SSA_NAME:
9282 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9283
9284 case COMPOUND_EXPR:
9285 case MODIFY_EXPR:
9286 case BIND_EXPR:
9287 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9288 strict_overflow_p);
9289
9290 case SAVE_EXPR:
9291 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9292 strict_overflow_p);
9293
9294 case CALL_EXPR:
9295 {
9296 tree fndecl = get_callee_fndecl (t);
9297 if (!fndecl) return false;
9298 if (flag_delete_null_pointer_checks && !flag_check_new
9299 && DECL_IS_OPERATOR_NEW (fndecl)
9300 && !TREE_NOTHROW (fndecl))
9301 return true;
9302 if (flag_delete_null_pointer_checks
9303 && lookup_attribute ("returns_nonnull",
9304 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9305 return true;
9306 return alloca_call_p (t);
9307 }
9308
9309 default:
9310 break;
9311 }
9312 return false;
9313 }
9314
9315 /* Return true when T is an address and is known to be nonzero.
9316 Handle warnings about undefined signed overflow. */
9317
9318 static bool
9319 tree_expr_nonzero_p (tree t)
9320 {
9321 bool ret, strict_overflow_p;
9322
9323 strict_overflow_p = false;
9324 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9325 if (strict_overflow_p)
9326 fold_overflow_warning (("assuming signed overflow does not occur when "
9327 "determining that expression is always "
9328 "non-zero"),
9329 WARN_STRICT_OVERFLOW_MISC);
9330 return ret;
9331 }
9332
9333 /* Fold a binary expression of code CODE and type TYPE with operands
9334 OP0 and OP1. LOC is the location of the resulting expression.
9335 Return the folded expression if folding is successful. Otherwise,
9336 return NULL_TREE. */
9337
9338 tree
9339 fold_binary_loc (location_t loc,
9340 enum tree_code code, tree type, tree op0, tree op1)
9341 {
9342 enum tree_code_class kind = TREE_CODE_CLASS (code);
9343 tree arg0, arg1, tem;
9344 tree t1 = NULL_TREE;
9345 bool strict_overflow_p;
9346 unsigned int prec;
9347
9348 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9349 && TREE_CODE_LENGTH (code) == 2
9350 && op0 != NULL_TREE
9351 && op1 != NULL_TREE);
9352
9353 arg0 = op0;
9354 arg1 = op1;
9355
9356 /* Strip any conversions that don't change the mode. This is
9357 safe for every expression, except for a comparison expression
9358 because its signedness is derived from its operands. So, in
9359 the latter case, only strip conversions that don't change the
9360 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9361 preserved.
9362
9363 Note that this is done as an internal manipulation within the
9364 constant folder, in order to find the simplest representation
9365 of the arguments so that their form can be studied. In any
9366 cases, the appropriate type conversions should be put back in
9367 the tree that will get out of the constant folder. */
9368
9369 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9370 {
9371 STRIP_SIGN_NOPS (arg0);
9372 STRIP_SIGN_NOPS (arg1);
9373 }
9374 else
9375 {
9376 STRIP_NOPS (arg0);
9377 STRIP_NOPS (arg1);
9378 }
9379
9380 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9381 constant but we can't do arithmetic on them. */
9382 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9383 {
9384 tem = const_binop (code, type, arg0, arg1);
9385 if (tem != NULL_TREE)
9386 {
9387 if (TREE_TYPE (tem) != type)
9388 tem = fold_convert_loc (loc, type, tem);
9389 return tem;
9390 }
9391 }
9392
9393 /* If this is a commutative operation, and ARG0 is a constant, move it
9394 to ARG1 to reduce the number of tests below. */
9395 if (commutative_tree_code (code)
9396 && tree_swap_operands_p (arg0, arg1, true))
9397 return fold_build2_loc (loc, code, type, op1, op0);
9398
9399 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9400 to ARG1 to reduce the number of tests below. */
9401 if (kind == tcc_comparison
9402 && tree_swap_operands_p (arg0, arg1, true))
9403 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9404
9405 tem = generic_simplify (loc, code, type, op0, op1);
9406 if (tem)
9407 return tem;
9408
9409 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9410
9411 First check for cases where an arithmetic operation is applied to a
9412 compound, conditional, or comparison operation. Push the arithmetic
9413 operation inside the compound or conditional to see if any folding
9414 can then be done. Convert comparison to conditional for this purpose.
9415 The also optimizes non-constant cases that used to be done in
9416 expand_expr.
9417
9418 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9419 one of the operands is a comparison and the other is a comparison, a
9420 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9421 code below would make the expression more complex. Change it to a
9422 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9423 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9424
9425 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9426 || code == EQ_EXPR || code == NE_EXPR)
9427 && TREE_CODE (type) != VECTOR_TYPE
9428 && ((truth_value_p (TREE_CODE (arg0))
9429 && (truth_value_p (TREE_CODE (arg1))
9430 || (TREE_CODE (arg1) == BIT_AND_EXPR
9431 && integer_onep (TREE_OPERAND (arg1, 1)))))
9432 || (truth_value_p (TREE_CODE (arg1))
9433 && (truth_value_p (TREE_CODE (arg0))
9434 || (TREE_CODE (arg0) == BIT_AND_EXPR
9435 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9436 {
9437 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9438 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9439 : TRUTH_XOR_EXPR,
9440 boolean_type_node,
9441 fold_convert_loc (loc, boolean_type_node, arg0),
9442 fold_convert_loc (loc, boolean_type_node, arg1));
9443
9444 if (code == EQ_EXPR)
9445 tem = invert_truthvalue_loc (loc, tem);
9446
9447 return fold_convert_loc (loc, type, tem);
9448 }
9449
9450 if (TREE_CODE_CLASS (code) == tcc_binary
9451 || TREE_CODE_CLASS (code) == tcc_comparison)
9452 {
9453 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9454 {
9455 tem = fold_build2_loc (loc, code, type,
9456 fold_convert_loc (loc, TREE_TYPE (op0),
9457 TREE_OPERAND (arg0, 1)), op1);
9458 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9459 tem);
9460 }
9461 if (TREE_CODE (arg1) == COMPOUND_EXPR
9462 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9463 {
9464 tem = fold_build2_loc (loc, code, type, op0,
9465 fold_convert_loc (loc, TREE_TYPE (op1),
9466 TREE_OPERAND (arg1, 1)));
9467 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9468 tem);
9469 }
9470
9471 if (TREE_CODE (arg0) == COND_EXPR
9472 || TREE_CODE (arg0) == VEC_COND_EXPR
9473 || COMPARISON_CLASS_P (arg0))
9474 {
9475 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9476 arg0, arg1,
9477 /*cond_first_p=*/1);
9478 if (tem != NULL_TREE)
9479 return tem;
9480 }
9481
9482 if (TREE_CODE (arg1) == COND_EXPR
9483 || TREE_CODE (arg1) == VEC_COND_EXPR
9484 || COMPARISON_CLASS_P (arg1))
9485 {
9486 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9487 arg1, arg0,
9488 /*cond_first_p=*/0);
9489 if (tem != NULL_TREE)
9490 return tem;
9491 }
9492 }
9493
9494 switch (code)
9495 {
9496 case MEM_REF:
9497 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9498 if (TREE_CODE (arg0) == ADDR_EXPR
9499 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9500 {
9501 tree iref = TREE_OPERAND (arg0, 0);
9502 return fold_build2 (MEM_REF, type,
9503 TREE_OPERAND (iref, 0),
9504 int_const_binop (PLUS_EXPR, arg1,
9505 TREE_OPERAND (iref, 1)));
9506 }
9507
9508 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9509 if (TREE_CODE (arg0) == ADDR_EXPR
9510 && handled_component_p (TREE_OPERAND (arg0, 0)))
9511 {
9512 tree base;
9513 HOST_WIDE_INT coffset;
9514 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9515 &coffset);
9516 if (!base)
9517 return NULL_TREE;
9518 return fold_build2 (MEM_REF, type,
9519 build_fold_addr_expr (base),
9520 int_const_binop (PLUS_EXPR, arg1,
9521 size_int (coffset)));
9522 }
9523
9524 return NULL_TREE;
9525
9526 case POINTER_PLUS_EXPR:
9527 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9528 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9529 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9530 return fold_convert_loc (loc, type,
9531 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9532 fold_convert_loc (loc, sizetype,
9533 arg1),
9534 fold_convert_loc (loc, sizetype,
9535 arg0)));
9536
9537 return NULL_TREE;
9538
9539 case PLUS_EXPR:
9540 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9541 {
9542 /* X + (X / CST) * -CST is X % CST. */
9543 if (TREE_CODE (arg1) == MULT_EXPR
9544 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9545 && operand_equal_p (arg0,
9546 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9547 {
9548 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9549 tree cst1 = TREE_OPERAND (arg1, 1);
9550 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9551 cst1, cst0);
9552 if (sum && integer_zerop (sum))
9553 return fold_convert_loc (loc, type,
9554 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9555 TREE_TYPE (arg0), arg0,
9556 cst0));
9557 }
9558 }
9559
9560 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9561 one. Make sure the type is not saturating and has the signedness of
9562 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9563 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9564 if ((TREE_CODE (arg0) == MULT_EXPR
9565 || TREE_CODE (arg1) == MULT_EXPR)
9566 && !TYPE_SATURATING (type)
9567 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9568 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9569 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9570 {
9571 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9572 if (tem)
9573 return tem;
9574 }
9575
9576 if (! FLOAT_TYPE_P (type))
9577 {
9578 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9579 with a constant, and the two constants have no bits in common,
9580 we should treat this as a BIT_IOR_EXPR since this may produce more
9581 simplifications. */
9582 if (TREE_CODE (arg0) == BIT_AND_EXPR
9583 && TREE_CODE (arg1) == BIT_AND_EXPR
9584 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9585 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9586 && wi::bit_and (TREE_OPERAND (arg0, 1),
9587 TREE_OPERAND (arg1, 1)) == 0)
9588 {
9589 code = BIT_IOR_EXPR;
9590 goto bit_ior;
9591 }
9592
9593 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9594 (plus (plus (mult) (mult)) (foo)) so that we can
9595 take advantage of the factoring cases below. */
9596 if (ANY_INTEGRAL_TYPE_P (type)
9597 && TYPE_OVERFLOW_WRAPS (type)
9598 && (((TREE_CODE (arg0) == PLUS_EXPR
9599 || TREE_CODE (arg0) == MINUS_EXPR)
9600 && TREE_CODE (arg1) == MULT_EXPR)
9601 || ((TREE_CODE (arg1) == PLUS_EXPR
9602 || TREE_CODE (arg1) == MINUS_EXPR)
9603 && TREE_CODE (arg0) == MULT_EXPR)))
9604 {
9605 tree parg0, parg1, parg, marg;
9606 enum tree_code pcode;
9607
9608 if (TREE_CODE (arg1) == MULT_EXPR)
9609 parg = arg0, marg = arg1;
9610 else
9611 parg = arg1, marg = arg0;
9612 pcode = TREE_CODE (parg);
9613 parg0 = TREE_OPERAND (parg, 0);
9614 parg1 = TREE_OPERAND (parg, 1);
9615 STRIP_NOPS (parg0);
9616 STRIP_NOPS (parg1);
9617
9618 if (TREE_CODE (parg0) == MULT_EXPR
9619 && TREE_CODE (parg1) != MULT_EXPR)
9620 return fold_build2_loc (loc, pcode, type,
9621 fold_build2_loc (loc, PLUS_EXPR, type,
9622 fold_convert_loc (loc, type,
9623 parg0),
9624 fold_convert_loc (loc, type,
9625 marg)),
9626 fold_convert_loc (loc, type, parg1));
9627 if (TREE_CODE (parg0) != MULT_EXPR
9628 && TREE_CODE (parg1) == MULT_EXPR)
9629 return
9630 fold_build2_loc (loc, PLUS_EXPR, type,
9631 fold_convert_loc (loc, type, parg0),
9632 fold_build2_loc (loc, pcode, type,
9633 fold_convert_loc (loc, type, marg),
9634 fold_convert_loc (loc, type,
9635 parg1)));
9636 }
9637 }
9638 else
9639 {
9640 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9641 to __complex__ ( x, y ). This is not the same for SNaNs or
9642 if signed zeros are involved. */
9643 if (!HONOR_SNANS (element_mode (arg0))
9644 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9645 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9646 {
9647 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9648 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9649 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9650 bool arg0rz = false, arg0iz = false;
9651 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9652 || (arg0i && (arg0iz = real_zerop (arg0i))))
9653 {
9654 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9655 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9656 if (arg0rz && arg1i && real_zerop (arg1i))
9657 {
9658 tree rp = arg1r ? arg1r
9659 : build1 (REALPART_EXPR, rtype, arg1);
9660 tree ip = arg0i ? arg0i
9661 : build1 (IMAGPART_EXPR, rtype, arg0);
9662 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9663 }
9664 else if (arg0iz && arg1r && real_zerop (arg1r))
9665 {
9666 tree rp = arg0r ? arg0r
9667 : build1 (REALPART_EXPR, rtype, arg0);
9668 tree ip = arg1i ? arg1i
9669 : build1 (IMAGPART_EXPR, rtype, arg1);
9670 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9671 }
9672 }
9673 }
9674
9675 if (flag_unsafe_math_optimizations
9676 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9677 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9678 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9679 return tem;
9680
9681 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9682 We associate floats only if the user has specified
9683 -fassociative-math. */
9684 if (flag_associative_math
9685 && TREE_CODE (arg1) == PLUS_EXPR
9686 && TREE_CODE (arg0) != MULT_EXPR)
9687 {
9688 tree tree10 = TREE_OPERAND (arg1, 0);
9689 tree tree11 = TREE_OPERAND (arg1, 1);
9690 if (TREE_CODE (tree11) == MULT_EXPR
9691 && TREE_CODE (tree10) == MULT_EXPR)
9692 {
9693 tree tree0;
9694 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9695 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9696 }
9697 }
9698 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9699 We associate floats only if the user has specified
9700 -fassociative-math. */
9701 if (flag_associative_math
9702 && TREE_CODE (arg0) == PLUS_EXPR
9703 && TREE_CODE (arg1) != MULT_EXPR)
9704 {
9705 tree tree00 = TREE_OPERAND (arg0, 0);
9706 tree tree01 = TREE_OPERAND (arg0, 1);
9707 if (TREE_CODE (tree01) == MULT_EXPR
9708 && TREE_CODE (tree00) == MULT_EXPR)
9709 {
9710 tree tree0;
9711 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9712 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9713 }
9714 }
9715 }
9716
9717 bit_rotate:
9718 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9719 is a rotate of A by C1 bits. */
9720 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9721 is a rotate of A by B bits. */
9722 {
9723 enum tree_code code0, code1;
9724 tree rtype;
9725 code0 = TREE_CODE (arg0);
9726 code1 = TREE_CODE (arg1);
9727 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9728 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9729 && operand_equal_p (TREE_OPERAND (arg0, 0),
9730 TREE_OPERAND (arg1, 0), 0)
9731 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9732 TYPE_UNSIGNED (rtype))
9733 /* Only create rotates in complete modes. Other cases are not
9734 expanded properly. */
9735 && (element_precision (rtype)
9736 == element_precision (TYPE_MODE (rtype))))
9737 {
9738 tree tree01, tree11;
9739 enum tree_code code01, code11;
9740
9741 tree01 = TREE_OPERAND (arg0, 1);
9742 tree11 = TREE_OPERAND (arg1, 1);
9743 STRIP_NOPS (tree01);
9744 STRIP_NOPS (tree11);
9745 code01 = TREE_CODE (tree01);
9746 code11 = TREE_CODE (tree11);
9747 if (code01 == INTEGER_CST
9748 && code11 == INTEGER_CST
9749 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9750 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9751 {
9752 tem = build2_loc (loc, LROTATE_EXPR,
9753 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9754 TREE_OPERAND (arg0, 0),
9755 code0 == LSHIFT_EXPR
9756 ? TREE_OPERAND (arg0, 1)
9757 : TREE_OPERAND (arg1, 1));
9758 return fold_convert_loc (loc, type, tem);
9759 }
9760 else if (code11 == MINUS_EXPR)
9761 {
9762 tree tree110, tree111;
9763 tree110 = TREE_OPERAND (tree11, 0);
9764 tree111 = TREE_OPERAND (tree11, 1);
9765 STRIP_NOPS (tree110);
9766 STRIP_NOPS (tree111);
9767 if (TREE_CODE (tree110) == INTEGER_CST
9768 && 0 == compare_tree_int (tree110,
9769 element_precision
9770 (TREE_TYPE (TREE_OPERAND
9771 (arg0, 0))))
9772 && operand_equal_p (tree01, tree111, 0))
9773 return
9774 fold_convert_loc (loc, type,
9775 build2 ((code0 == LSHIFT_EXPR
9776 ? LROTATE_EXPR
9777 : RROTATE_EXPR),
9778 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9779 TREE_OPERAND (arg0, 0),
9780 TREE_OPERAND (arg0, 1)));
9781 }
9782 else if (code01 == MINUS_EXPR)
9783 {
9784 tree tree010, tree011;
9785 tree010 = TREE_OPERAND (tree01, 0);
9786 tree011 = TREE_OPERAND (tree01, 1);
9787 STRIP_NOPS (tree010);
9788 STRIP_NOPS (tree011);
9789 if (TREE_CODE (tree010) == INTEGER_CST
9790 && 0 == compare_tree_int (tree010,
9791 element_precision
9792 (TREE_TYPE (TREE_OPERAND
9793 (arg0, 0))))
9794 && operand_equal_p (tree11, tree011, 0))
9795 return fold_convert_loc
9796 (loc, type,
9797 build2 ((code0 != LSHIFT_EXPR
9798 ? LROTATE_EXPR
9799 : RROTATE_EXPR),
9800 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9801 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9802 }
9803 }
9804 }
9805
9806 associate:
9807 /* In most languages, can't associate operations on floats through
9808 parentheses. Rather than remember where the parentheses were, we
9809 don't associate floats at all, unless the user has specified
9810 -fassociative-math.
9811 And, we need to make sure type is not saturating. */
9812
9813 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9814 && !TYPE_SATURATING (type))
9815 {
9816 tree var0, con0, lit0, minus_lit0;
9817 tree var1, con1, lit1, minus_lit1;
9818 tree atype = type;
9819 bool ok = true;
9820
9821 /* Split both trees into variables, constants, and literals. Then
9822 associate each group together, the constants with literals,
9823 then the result with variables. This increases the chances of
9824 literals being recombined later and of generating relocatable
9825 expressions for the sum of a constant and literal. */
9826 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9827 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9828 code == MINUS_EXPR);
9829
9830 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9831 if (code == MINUS_EXPR)
9832 code = PLUS_EXPR;
9833
9834 /* With undefined overflow prefer doing association in a type
9835 which wraps on overflow, if that is one of the operand types. */
9836 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9837 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9838 {
9839 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9840 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9841 atype = TREE_TYPE (arg0);
9842 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9843 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9844 atype = TREE_TYPE (arg1);
9845 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9846 }
9847
9848 /* With undefined overflow we can only associate constants with one
9849 variable, and constants whose association doesn't overflow. */
9850 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9851 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9852 {
9853 if (var0 && var1)
9854 {
9855 tree tmp0 = var0;
9856 tree tmp1 = var1;
9857
9858 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9859 tmp0 = TREE_OPERAND (tmp0, 0);
9860 if (CONVERT_EXPR_P (tmp0)
9861 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9862 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9863 <= TYPE_PRECISION (atype)))
9864 tmp0 = TREE_OPERAND (tmp0, 0);
9865 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9866 tmp1 = TREE_OPERAND (tmp1, 0);
9867 if (CONVERT_EXPR_P (tmp1)
9868 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9869 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9870 <= TYPE_PRECISION (atype)))
9871 tmp1 = TREE_OPERAND (tmp1, 0);
9872 /* The only case we can still associate with two variables
9873 is if they are the same, modulo negation and bit-pattern
9874 preserving conversions. */
9875 if (!operand_equal_p (tmp0, tmp1, 0))
9876 ok = false;
9877 }
9878 }
9879
9880 /* Only do something if we found more than two objects. Otherwise,
9881 nothing has changed and we risk infinite recursion. */
9882 if (ok
9883 && (2 < ((var0 != 0) + (var1 != 0)
9884 + (con0 != 0) + (con1 != 0)
9885 + (lit0 != 0) + (lit1 != 0)
9886 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9887 {
9888 bool any_overflows = false;
9889 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9890 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9891 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9892 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9893 var0 = associate_trees (loc, var0, var1, code, atype);
9894 con0 = associate_trees (loc, con0, con1, code, atype);
9895 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9896 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9897 code, atype);
9898
9899 /* Preserve the MINUS_EXPR if the negative part of the literal is
9900 greater than the positive part. Otherwise, the multiplicative
9901 folding code (i.e extract_muldiv) may be fooled in case
9902 unsigned constants are subtracted, like in the following
9903 example: ((X*2 + 4) - 8U)/2. */
9904 if (minus_lit0 && lit0)
9905 {
9906 if (TREE_CODE (lit0) == INTEGER_CST
9907 && TREE_CODE (minus_lit0) == INTEGER_CST
9908 && tree_int_cst_lt (lit0, minus_lit0))
9909 {
9910 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9911 MINUS_EXPR, atype);
9912 lit0 = 0;
9913 }
9914 else
9915 {
9916 lit0 = associate_trees (loc, lit0, minus_lit0,
9917 MINUS_EXPR, atype);
9918 minus_lit0 = 0;
9919 }
9920 }
9921
9922 /* Don't introduce overflows through reassociation. */
9923 if (!any_overflows
9924 && ((lit0 && TREE_OVERFLOW_P (lit0))
9925 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9926 return NULL_TREE;
9927
9928 if (minus_lit0)
9929 {
9930 if (con0 == 0)
9931 return
9932 fold_convert_loc (loc, type,
9933 associate_trees (loc, var0, minus_lit0,
9934 MINUS_EXPR, atype));
9935 else
9936 {
9937 con0 = associate_trees (loc, con0, minus_lit0,
9938 MINUS_EXPR, atype);
9939 return
9940 fold_convert_loc (loc, type,
9941 associate_trees (loc, var0, con0,
9942 PLUS_EXPR, atype));
9943 }
9944 }
9945
9946 con0 = associate_trees (loc, con0, lit0, code, atype);
9947 return
9948 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9949 code, atype));
9950 }
9951 }
9952
9953 return NULL_TREE;
9954
9955 case MINUS_EXPR:
9956 /* Pointer simplifications for subtraction, simple reassociations. */
9957 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9958 {
9959 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9960 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9961 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9962 {
9963 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9964 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9965 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9966 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9967 return fold_build2_loc (loc, PLUS_EXPR, type,
9968 fold_build2_loc (loc, MINUS_EXPR, type,
9969 arg00, arg10),
9970 fold_build2_loc (loc, MINUS_EXPR, type,
9971 arg01, arg11));
9972 }
9973 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9974 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9975 {
9976 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9977 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9978 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
9979 fold_convert_loc (loc, type, arg1));
9980 if (tmp)
9981 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
9982 }
9983 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
9984 simplifies. */
9985 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9986 {
9987 tree arg10 = fold_convert_loc (loc, type,
9988 TREE_OPERAND (arg1, 0));
9989 tree arg11 = fold_convert_loc (loc, type,
9990 TREE_OPERAND (arg1, 1));
9991 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
9992 fold_convert_loc (loc, type, arg0),
9993 arg10);
9994 if (tmp)
9995 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
9996 }
9997 }
9998 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9999 if (TREE_CODE (arg0) == NEGATE_EXPR
10000 && negate_expr_p (arg1)
10001 && reorder_operands_p (arg0, arg1))
10002 return fold_build2_loc (loc, MINUS_EXPR, type,
10003 fold_convert_loc (loc, type,
10004 negate_expr (arg1)),
10005 fold_convert_loc (loc, type,
10006 TREE_OPERAND (arg0, 0)));
10007
10008 if (! FLOAT_TYPE_P (type))
10009 {
10010 /* Fold A - (A & B) into ~B & A. */
10011 if (!TREE_SIDE_EFFECTS (arg0)
10012 && TREE_CODE (arg1) == BIT_AND_EXPR)
10013 {
10014 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10015 {
10016 tree arg10 = fold_convert_loc (loc, type,
10017 TREE_OPERAND (arg1, 0));
10018 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10019 fold_build1_loc (loc, BIT_NOT_EXPR,
10020 type, arg10),
10021 fold_convert_loc (loc, type, arg0));
10022 }
10023 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10024 {
10025 tree arg11 = fold_convert_loc (loc,
10026 type, TREE_OPERAND (arg1, 1));
10027 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10028 fold_build1_loc (loc, BIT_NOT_EXPR,
10029 type, arg11),
10030 fold_convert_loc (loc, type, arg0));
10031 }
10032 }
10033
10034 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10035 any power of 2 minus 1. */
10036 if (TREE_CODE (arg0) == BIT_AND_EXPR
10037 && TREE_CODE (arg1) == BIT_AND_EXPR
10038 && operand_equal_p (TREE_OPERAND (arg0, 0),
10039 TREE_OPERAND (arg1, 0), 0))
10040 {
10041 tree mask0 = TREE_OPERAND (arg0, 1);
10042 tree mask1 = TREE_OPERAND (arg1, 1);
10043 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10044
10045 if (operand_equal_p (tem, mask1, 0))
10046 {
10047 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10048 TREE_OPERAND (arg0, 0), mask1);
10049 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10050 }
10051 }
10052 }
10053
10054 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10055 __complex__ ( x, -y ). This is not the same for SNaNs or if
10056 signed zeros are involved. */
10057 if (!HONOR_SNANS (element_mode (arg0))
10058 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10059 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10060 {
10061 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10062 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10063 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10064 bool arg0rz = false, arg0iz = false;
10065 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10066 || (arg0i && (arg0iz = real_zerop (arg0i))))
10067 {
10068 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10069 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10070 if (arg0rz && arg1i && real_zerop (arg1i))
10071 {
10072 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10073 arg1r ? arg1r
10074 : build1 (REALPART_EXPR, rtype, arg1));
10075 tree ip = arg0i ? arg0i
10076 : build1 (IMAGPART_EXPR, rtype, arg0);
10077 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10078 }
10079 else if (arg0iz && arg1r && real_zerop (arg1r))
10080 {
10081 tree rp = arg0r ? arg0r
10082 : build1 (REALPART_EXPR, rtype, arg0);
10083 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10084 arg1i ? arg1i
10085 : build1 (IMAGPART_EXPR, rtype, arg1));
10086 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10087 }
10088 }
10089 }
10090
10091 /* A - B -> A + (-B) if B is easily negatable. */
10092 if (negate_expr_p (arg1)
10093 && !TYPE_OVERFLOW_SANITIZED (type)
10094 && ((FLOAT_TYPE_P (type)
10095 /* Avoid this transformation if B is a positive REAL_CST. */
10096 && (TREE_CODE (arg1) != REAL_CST
10097 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10098 || INTEGRAL_TYPE_P (type)))
10099 return fold_build2_loc (loc, PLUS_EXPR, type,
10100 fold_convert_loc (loc, type, arg0),
10101 fold_convert_loc (loc, type,
10102 negate_expr (arg1)));
10103
10104 /* Fold &a[i] - &a[j] to i-j. */
10105 if (TREE_CODE (arg0) == ADDR_EXPR
10106 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10107 && TREE_CODE (arg1) == ADDR_EXPR
10108 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10109 {
10110 tree tem = fold_addr_of_array_ref_difference (loc, type,
10111 TREE_OPERAND (arg0, 0),
10112 TREE_OPERAND (arg1, 0));
10113 if (tem)
10114 return tem;
10115 }
10116
10117 if (FLOAT_TYPE_P (type)
10118 && flag_unsafe_math_optimizations
10119 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10120 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10121 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10122 return tem;
10123
10124 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10125 one. Make sure the type is not saturating and has the signedness of
10126 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10127 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10128 if ((TREE_CODE (arg0) == MULT_EXPR
10129 || TREE_CODE (arg1) == MULT_EXPR)
10130 && !TYPE_SATURATING (type)
10131 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10132 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10133 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10134 {
10135 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10136 if (tem)
10137 return tem;
10138 }
10139
10140 goto associate;
10141
10142 case MULT_EXPR:
10143 /* (-A) * (-B) -> A * B */
10144 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10145 return fold_build2_loc (loc, MULT_EXPR, type,
10146 fold_convert_loc (loc, type,
10147 TREE_OPERAND (arg0, 0)),
10148 fold_convert_loc (loc, type,
10149 negate_expr (arg1)));
10150 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10151 return fold_build2_loc (loc, MULT_EXPR, type,
10152 fold_convert_loc (loc, type,
10153 negate_expr (arg0)),
10154 fold_convert_loc (loc, type,
10155 TREE_OPERAND (arg1, 0)));
10156
10157 if (! FLOAT_TYPE_P (type))
10158 {
10159 /* Transform x * -C into -x * C if x is easily negatable. */
10160 if (TREE_CODE (arg1) == INTEGER_CST
10161 && tree_int_cst_sgn (arg1) == -1
10162 && negate_expr_p (arg0)
10163 && (tem = negate_expr (arg1)) != arg1
10164 && !TREE_OVERFLOW (tem))
10165 return fold_build2_loc (loc, MULT_EXPR, type,
10166 fold_convert_loc (loc, type,
10167 negate_expr (arg0)),
10168 tem);
10169
10170 /* (a * (1 << b)) is (a << b) */
10171 if (TREE_CODE (arg1) == LSHIFT_EXPR
10172 && integer_onep (TREE_OPERAND (arg1, 0)))
10173 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10174 TREE_OPERAND (arg1, 1));
10175 if (TREE_CODE (arg0) == LSHIFT_EXPR
10176 && integer_onep (TREE_OPERAND (arg0, 0)))
10177 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10178 TREE_OPERAND (arg0, 1));
10179
10180 /* (A + A) * C -> A * 2 * C */
10181 if (TREE_CODE (arg0) == PLUS_EXPR
10182 && TREE_CODE (arg1) == INTEGER_CST
10183 && operand_equal_p (TREE_OPERAND (arg0, 0),
10184 TREE_OPERAND (arg0, 1), 0))
10185 return fold_build2_loc (loc, MULT_EXPR, type,
10186 omit_one_operand_loc (loc, type,
10187 TREE_OPERAND (arg0, 0),
10188 TREE_OPERAND (arg0, 1)),
10189 fold_build2_loc (loc, MULT_EXPR, type,
10190 build_int_cst (type, 2) , arg1));
10191
10192 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10193 sign-changing only. */
10194 if (TREE_CODE (arg1) == INTEGER_CST
10195 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10196 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10197 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10198
10199 strict_overflow_p = false;
10200 if (TREE_CODE (arg1) == INTEGER_CST
10201 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10202 &strict_overflow_p)))
10203 {
10204 if (strict_overflow_p)
10205 fold_overflow_warning (("assuming signed overflow does not "
10206 "occur when simplifying "
10207 "multiplication"),
10208 WARN_STRICT_OVERFLOW_MISC);
10209 return fold_convert_loc (loc, type, tem);
10210 }
10211
10212 /* Optimize z * conj(z) for integer complex numbers. */
10213 if (TREE_CODE (arg0) == CONJ_EXPR
10214 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10215 return fold_mult_zconjz (loc, type, arg1);
10216 if (TREE_CODE (arg1) == CONJ_EXPR
10217 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10218 return fold_mult_zconjz (loc, type, arg0);
10219 }
10220 else
10221 {
10222 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10223 the result for floating point types due to rounding so it is applied
10224 only if -fassociative-math was specify. */
10225 if (flag_associative_math
10226 && TREE_CODE (arg0) == RDIV_EXPR
10227 && TREE_CODE (arg1) == REAL_CST
10228 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10229 {
10230 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10231 arg1);
10232 if (tem)
10233 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10234 TREE_OPERAND (arg0, 1));
10235 }
10236
10237 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10238 if (operand_equal_p (arg0, arg1, 0))
10239 {
10240 tree tem = fold_strip_sign_ops (arg0);
10241 if (tem != NULL_TREE)
10242 {
10243 tem = fold_convert_loc (loc, type, tem);
10244 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10245 }
10246 }
10247
10248 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10249 This is not the same for NaNs or if signed zeros are
10250 involved. */
10251 if (!HONOR_NANS (arg0)
10252 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10253 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10254 && TREE_CODE (arg1) == COMPLEX_CST
10255 && real_zerop (TREE_REALPART (arg1)))
10256 {
10257 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10258 if (real_onep (TREE_IMAGPART (arg1)))
10259 return
10260 fold_build2_loc (loc, COMPLEX_EXPR, type,
10261 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10262 rtype, arg0)),
10263 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10264 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10265 return
10266 fold_build2_loc (loc, COMPLEX_EXPR, type,
10267 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10268 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10269 rtype, arg0)));
10270 }
10271
10272 /* Optimize z * conj(z) for floating point complex numbers.
10273 Guarded by flag_unsafe_math_optimizations as non-finite
10274 imaginary components don't produce scalar results. */
10275 if (flag_unsafe_math_optimizations
10276 && TREE_CODE (arg0) == CONJ_EXPR
10277 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10278 return fold_mult_zconjz (loc, type, arg1);
10279 if (flag_unsafe_math_optimizations
10280 && TREE_CODE (arg1) == CONJ_EXPR
10281 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10282 return fold_mult_zconjz (loc, type, arg0);
10283
10284 if (flag_unsafe_math_optimizations)
10285 {
10286 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10287 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10288
10289 /* Optimizations of root(...)*root(...). */
10290 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10291 {
10292 tree rootfn, arg;
10293 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10294 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10295
10296 /* Optimize sqrt(x)*sqrt(x) as x. */
10297 if (BUILTIN_SQRT_P (fcode0)
10298 && operand_equal_p (arg00, arg10, 0)
10299 && ! HONOR_SNANS (element_mode (type)))
10300 return arg00;
10301
10302 /* Optimize root(x)*root(y) as root(x*y). */
10303 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10304 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10305 return build_call_expr_loc (loc, rootfn, 1, arg);
10306 }
10307
10308 /* Optimize expN(x)*expN(y) as expN(x+y). */
10309 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10310 {
10311 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10312 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10313 CALL_EXPR_ARG (arg0, 0),
10314 CALL_EXPR_ARG (arg1, 0));
10315 return build_call_expr_loc (loc, expfn, 1, arg);
10316 }
10317
10318 /* Optimizations of pow(...)*pow(...). */
10319 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10320 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10321 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10322 {
10323 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10324 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10325 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10326 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10327
10328 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10329 if (operand_equal_p (arg01, arg11, 0))
10330 {
10331 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10332 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10333 arg00, arg10);
10334 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10335 }
10336
10337 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10338 if (operand_equal_p (arg00, arg10, 0))
10339 {
10340 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10341 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10342 arg01, arg11);
10343 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10344 }
10345 }
10346
10347 /* Optimize tan(x)*cos(x) as sin(x). */
10348 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10349 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10350 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10351 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10352 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10353 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10354 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10355 CALL_EXPR_ARG (arg1, 0), 0))
10356 {
10357 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10358
10359 if (sinfn != NULL_TREE)
10360 return build_call_expr_loc (loc, sinfn, 1,
10361 CALL_EXPR_ARG (arg0, 0));
10362 }
10363
10364 /* Optimize x*pow(x,c) as pow(x,c+1). */
10365 if (fcode1 == BUILT_IN_POW
10366 || fcode1 == BUILT_IN_POWF
10367 || fcode1 == BUILT_IN_POWL)
10368 {
10369 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10370 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10371 if (TREE_CODE (arg11) == REAL_CST
10372 && !TREE_OVERFLOW (arg11)
10373 && operand_equal_p (arg0, arg10, 0))
10374 {
10375 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10376 REAL_VALUE_TYPE c;
10377 tree arg;
10378
10379 c = TREE_REAL_CST (arg11);
10380 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10381 arg = build_real (type, c);
10382 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10383 }
10384 }
10385
10386 /* Optimize pow(x,c)*x as pow(x,c+1). */
10387 if (fcode0 == BUILT_IN_POW
10388 || fcode0 == BUILT_IN_POWF
10389 || fcode0 == BUILT_IN_POWL)
10390 {
10391 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10392 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10393 if (TREE_CODE (arg01) == REAL_CST
10394 && !TREE_OVERFLOW (arg01)
10395 && operand_equal_p (arg1, arg00, 0))
10396 {
10397 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10398 REAL_VALUE_TYPE c;
10399 tree arg;
10400
10401 c = TREE_REAL_CST (arg01);
10402 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10403 arg = build_real (type, c);
10404 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10405 }
10406 }
10407
10408 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10409 if (!in_gimple_form
10410 && optimize
10411 && operand_equal_p (arg0, arg1, 0))
10412 {
10413 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10414
10415 if (powfn)
10416 {
10417 tree arg = build_real (type, dconst2);
10418 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10419 }
10420 }
10421 }
10422 }
10423 goto associate;
10424
10425 case BIT_IOR_EXPR:
10426 bit_ior:
10427 /* Canonicalize (X & C1) | C2. */
10428 if (TREE_CODE (arg0) == BIT_AND_EXPR
10429 && TREE_CODE (arg1) == INTEGER_CST
10430 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10431 {
10432 int width = TYPE_PRECISION (type), w;
10433 wide_int c1 = TREE_OPERAND (arg0, 1);
10434 wide_int c2 = arg1;
10435
10436 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10437 if ((c1 & c2) == c1)
10438 return omit_one_operand_loc (loc, type, arg1,
10439 TREE_OPERAND (arg0, 0));
10440
10441 wide_int msk = wi::mask (width, false,
10442 TYPE_PRECISION (TREE_TYPE (arg1)));
10443
10444 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10445 if (msk.and_not (c1 | c2) == 0)
10446 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10447 TREE_OPERAND (arg0, 0), arg1);
10448
10449 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10450 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10451 mode which allows further optimizations. */
10452 c1 &= msk;
10453 c2 &= msk;
10454 wide_int c3 = c1.and_not (c2);
10455 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10456 {
10457 wide_int mask = wi::mask (w, false,
10458 TYPE_PRECISION (type));
10459 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10460 {
10461 c3 = mask;
10462 break;
10463 }
10464 }
10465
10466 if (c3 != c1)
10467 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10468 fold_build2_loc (loc, BIT_AND_EXPR, type,
10469 TREE_OPERAND (arg0, 0),
10470 wide_int_to_tree (type,
10471 c3)),
10472 arg1);
10473 }
10474
10475 /* (X & ~Y) | (~X & Y) is X ^ Y */
10476 if (TREE_CODE (arg0) == BIT_AND_EXPR
10477 && TREE_CODE (arg1) == BIT_AND_EXPR)
10478 {
10479 tree a0, a1, l0, l1, n0, n1;
10480
10481 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10482 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10483
10484 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10485 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10486
10487 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10488 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10489
10490 if ((operand_equal_p (n0, a0, 0)
10491 && operand_equal_p (n1, a1, 0))
10492 || (operand_equal_p (n0, a1, 0)
10493 && operand_equal_p (n1, a0, 0)))
10494 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10495 }
10496
10497 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10498 if (t1 != NULL_TREE)
10499 return t1;
10500
10501 /* See if this can be simplified into a rotate first. If that
10502 is unsuccessful continue in the association code. */
10503 goto bit_rotate;
10504
10505 case BIT_XOR_EXPR:
10506 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10507 if (TREE_CODE (arg0) == BIT_AND_EXPR
10508 && INTEGRAL_TYPE_P (type)
10509 && integer_onep (TREE_OPERAND (arg0, 1))
10510 && integer_onep (arg1))
10511 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10512 build_zero_cst (TREE_TYPE (arg0)));
10513
10514 /* See if this can be simplified into a rotate first. If that
10515 is unsuccessful continue in the association code. */
10516 goto bit_rotate;
10517
10518 case BIT_AND_EXPR:
10519 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10520 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
10521 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
10522 || (TREE_CODE (arg0) == EQ_EXPR
10523 && integer_zerop (TREE_OPERAND (arg0, 1))))
10524 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10525 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10526
10527 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10528 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
10529 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
10530 || (TREE_CODE (arg1) == EQ_EXPR
10531 && integer_zerop (TREE_OPERAND (arg1, 1))))
10532 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10533 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10534
10535 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10536 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10537 && INTEGRAL_TYPE_P (type)
10538 && integer_onep (TREE_OPERAND (arg0, 1))
10539 && integer_onep (arg1))
10540 {
10541 tree tem2;
10542 tem = TREE_OPERAND (arg0, 0);
10543 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10544 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10545 tem, tem2);
10546 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10547 build_zero_cst (TREE_TYPE (tem)));
10548 }
10549 /* Fold ~X & 1 as (X & 1) == 0. */
10550 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10551 && INTEGRAL_TYPE_P (type)
10552 && integer_onep (arg1))
10553 {
10554 tree tem2;
10555 tem = TREE_OPERAND (arg0, 0);
10556 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10557 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10558 tem, tem2);
10559 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10560 build_zero_cst (TREE_TYPE (tem)));
10561 }
10562 /* Fold !X & 1 as X == 0. */
10563 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10564 && integer_onep (arg1))
10565 {
10566 tem = TREE_OPERAND (arg0, 0);
10567 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10568 build_zero_cst (TREE_TYPE (tem)));
10569 }
10570
10571 /* Fold (X ^ Y) & Y as ~X & Y. */
10572 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10573 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10574 {
10575 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10576 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10577 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10578 fold_convert_loc (loc, type, arg1));
10579 }
10580 /* Fold (X ^ Y) & X as ~Y & X. */
10581 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10582 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10583 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10584 {
10585 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10586 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10587 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10588 fold_convert_loc (loc, type, arg1));
10589 }
10590 /* Fold X & (X ^ Y) as X & ~Y. */
10591 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10592 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10593 {
10594 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10595 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10596 fold_convert_loc (loc, type, arg0),
10597 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10598 }
10599 /* Fold X & (Y ^ X) as ~Y & X. */
10600 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10601 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10602 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10603 {
10604 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10605 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10606 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10607 fold_convert_loc (loc, type, arg0));
10608 }
10609
10610 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10611 multiple of 1 << CST. */
10612 if (TREE_CODE (arg1) == INTEGER_CST)
10613 {
10614 wide_int cst1 = arg1;
10615 wide_int ncst1 = -cst1;
10616 if ((cst1 & ncst1) == ncst1
10617 && multiple_of_p (type, arg0,
10618 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10619 return fold_convert_loc (loc, type, arg0);
10620 }
10621
10622 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10623 bits from CST2. */
10624 if (TREE_CODE (arg1) == INTEGER_CST
10625 && TREE_CODE (arg0) == MULT_EXPR
10626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10627 {
10628 wide_int warg1 = arg1;
10629 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10630
10631 if (masked == 0)
10632 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10633 arg0, arg1);
10634 else if (masked != warg1)
10635 {
10636 /* Avoid the transform if arg1 is a mask of some
10637 mode which allows further optimizations. */
10638 int pop = wi::popcount (warg1);
10639 if (!(pop >= BITS_PER_UNIT
10640 && exact_log2 (pop) != -1
10641 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10642 return fold_build2_loc (loc, code, type, op0,
10643 wide_int_to_tree (type, masked));
10644 }
10645 }
10646
10647 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10648 ((A & N) + B) & M -> (A + B) & M
10649 Similarly if (N & M) == 0,
10650 ((A | N) + B) & M -> (A + B) & M
10651 and for - instead of + (or unary - instead of +)
10652 and/or ^ instead of |.
10653 If B is constant and (B & M) == 0, fold into A & M. */
10654 if (TREE_CODE (arg1) == INTEGER_CST)
10655 {
10656 wide_int cst1 = arg1;
10657 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10658 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10659 && (TREE_CODE (arg0) == PLUS_EXPR
10660 || TREE_CODE (arg0) == MINUS_EXPR
10661 || TREE_CODE (arg0) == NEGATE_EXPR)
10662 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10663 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10664 {
10665 tree pmop[2];
10666 int which = 0;
10667 wide_int cst0;
10668
10669 /* Now we know that arg0 is (C + D) or (C - D) or
10670 -C and arg1 (M) is == (1LL << cst) - 1.
10671 Store C into PMOP[0] and D into PMOP[1]. */
10672 pmop[0] = TREE_OPERAND (arg0, 0);
10673 pmop[1] = NULL;
10674 if (TREE_CODE (arg0) != NEGATE_EXPR)
10675 {
10676 pmop[1] = TREE_OPERAND (arg0, 1);
10677 which = 1;
10678 }
10679
10680 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10681 which = -1;
10682
10683 for (; which >= 0; which--)
10684 switch (TREE_CODE (pmop[which]))
10685 {
10686 case BIT_AND_EXPR:
10687 case BIT_IOR_EXPR:
10688 case BIT_XOR_EXPR:
10689 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10690 != INTEGER_CST)
10691 break;
10692 cst0 = TREE_OPERAND (pmop[which], 1);
10693 cst0 &= cst1;
10694 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10695 {
10696 if (cst0 != cst1)
10697 break;
10698 }
10699 else if (cst0 != 0)
10700 break;
10701 /* If C or D is of the form (A & N) where
10702 (N & M) == M, or of the form (A | N) or
10703 (A ^ N) where (N & M) == 0, replace it with A. */
10704 pmop[which] = TREE_OPERAND (pmop[which], 0);
10705 break;
10706 case INTEGER_CST:
10707 /* If C or D is a N where (N & M) == 0, it can be
10708 omitted (assumed 0). */
10709 if ((TREE_CODE (arg0) == PLUS_EXPR
10710 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10711 && (cst1 & pmop[which]) == 0)
10712 pmop[which] = NULL;
10713 break;
10714 default:
10715 break;
10716 }
10717
10718 /* Only build anything new if we optimized one or both arguments
10719 above. */
10720 if (pmop[0] != TREE_OPERAND (arg0, 0)
10721 || (TREE_CODE (arg0) != NEGATE_EXPR
10722 && pmop[1] != TREE_OPERAND (arg0, 1)))
10723 {
10724 tree utype = TREE_TYPE (arg0);
10725 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10726 {
10727 /* Perform the operations in a type that has defined
10728 overflow behavior. */
10729 utype = unsigned_type_for (TREE_TYPE (arg0));
10730 if (pmop[0] != NULL)
10731 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10732 if (pmop[1] != NULL)
10733 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10734 }
10735
10736 if (TREE_CODE (arg0) == NEGATE_EXPR)
10737 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10738 else if (TREE_CODE (arg0) == PLUS_EXPR)
10739 {
10740 if (pmop[0] != NULL && pmop[1] != NULL)
10741 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10742 pmop[0], pmop[1]);
10743 else if (pmop[0] != NULL)
10744 tem = pmop[0];
10745 else if (pmop[1] != NULL)
10746 tem = pmop[1];
10747 else
10748 return build_int_cst (type, 0);
10749 }
10750 else if (pmop[0] == NULL)
10751 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10752 else
10753 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10754 pmop[0], pmop[1]);
10755 /* TEM is now the new binary +, - or unary - replacement. */
10756 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10757 fold_convert_loc (loc, utype, arg1));
10758 return fold_convert_loc (loc, type, tem);
10759 }
10760 }
10761 }
10762
10763 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10764 if (t1 != NULL_TREE)
10765 return t1;
10766 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10767 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10768 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10769 {
10770 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10771
10772 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10773 if (mask == -1)
10774 return
10775 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10776 }
10777
10778 goto associate;
10779
10780 case RDIV_EXPR:
10781 /* Don't touch a floating-point divide by zero unless the mode
10782 of the constant can represent infinity. */
10783 if (TREE_CODE (arg1) == REAL_CST
10784 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10785 && real_zerop (arg1))
10786 return NULL_TREE;
10787
10788 /* (-A) / (-B) -> A / B */
10789 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10790 return fold_build2_loc (loc, RDIV_EXPR, type,
10791 TREE_OPERAND (arg0, 0),
10792 negate_expr (arg1));
10793 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10794 return fold_build2_loc (loc, RDIV_EXPR, type,
10795 negate_expr (arg0),
10796 TREE_OPERAND (arg1, 0));
10797
10798 /* Convert A/B/C to A/(B*C). */
10799 if (flag_reciprocal_math
10800 && TREE_CODE (arg0) == RDIV_EXPR)
10801 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10802 fold_build2_loc (loc, MULT_EXPR, type,
10803 TREE_OPERAND (arg0, 1), arg1));
10804
10805 /* Convert A/(B/C) to (A/B)*C. */
10806 if (flag_reciprocal_math
10807 && TREE_CODE (arg1) == RDIV_EXPR)
10808 return fold_build2_loc (loc, MULT_EXPR, type,
10809 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10810 TREE_OPERAND (arg1, 0)),
10811 TREE_OPERAND (arg1, 1));
10812
10813 /* Convert C1/(X*C2) into (C1/C2)/X. */
10814 if (flag_reciprocal_math
10815 && TREE_CODE (arg1) == MULT_EXPR
10816 && TREE_CODE (arg0) == REAL_CST
10817 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10818 {
10819 tree tem = const_binop (RDIV_EXPR, arg0,
10820 TREE_OPERAND (arg1, 1));
10821 if (tem)
10822 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10823 TREE_OPERAND (arg1, 0));
10824 }
10825
10826 if (flag_unsafe_math_optimizations)
10827 {
10828 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10829 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10830
10831 /* Optimize sin(x)/cos(x) as tan(x). */
10832 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10833 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10834 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10835 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10836 CALL_EXPR_ARG (arg1, 0), 0))
10837 {
10838 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10839
10840 if (tanfn != NULL_TREE)
10841 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10842 }
10843
10844 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10845 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10846 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10847 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10848 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10849 CALL_EXPR_ARG (arg1, 0), 0))
10850 {
10851 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10852
10853 if (tanfn != NULL_TREE)
10854 {
10855 tree tmp = build_call_expr_loc (loc, tanfn, 1,
10856 CALL_EXPR_ARG (arg0, 0));
10857 return fold_build2_loc (loc, RDIV_EXPR, type,
10858 build_real (type, dconst1), tmp);
10859 }
10860 }
10861
10862 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10863 NaNs or Infinities. */
10864 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10865 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10866 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10867 {
10868 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10869 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10870
10871 if (! HONOR_NANS (arg00)
10872 && ! HONOR_INFINITIES (element_mode (arg00))
10873 && operand_equal_p (arg00, arg01, 0))
10874 {
10875 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10876
10877 if (cosfn != NULL_TREE)
10878 return build_call_expr_loc (loc, cosfn, 1, arg00);
10879 }
10880 }
10881
10882 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10883 NaNs or Infinities. */
10884 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10885 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10886 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10887 {
10888 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10889 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10890
10891 if (! HONOR_NANS (arg00)
10892 && ! HONOR_INFINITIES (element_mode (arg00))
10893 && operand_equal_p (arg00, arg01, 0))
10894 {
10895 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10896
10897 if (cosfn != NULL_TREE)
10898 {
10899 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
10900 return fold_build2_loc (loc, RDIV_EXPR, type,
10901 build_real (type, dconst1),
10902 tmp);
10903 }
10904 }
10905 }
10906
10907 /* Optimize pow(x,c)/x as pow(x,c-1). */
10908 if (fcode0 == BUILT_IN_POW
10909 || fcode0 == BUILT_IN_POWF
10910 || fcode0 == BUILT_IN_POWL)
10911 {
10912 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10913 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10914 if (TREE_CODE (arg01) == REAL_CST
10915 && !TREE_OVERFLOW (arg01)
10916 && operand_equal_p (arg1, arg00, 0))
10917 {
10918 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10919 REAL_VALUE_TYPE c;
10920 tree arg;
10921
10922 c = TREE_REAL_CST (arg01);
10923 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10924 arg = build_real (type, c);
10925 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10926 }
10927 }
10928
10929 /* Optimize a/root(b/c) into a*root(c/b). */
10930 if (BUILTIN_ROOT_P (fcode1))
10931 {
10932 tree rootarg = CALL_EXPR_ARG (arg1, 0);
10933
10934 if (TREE_CODE (rootarg) == RDIV_EXPR)
10935 {
10936 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10937 tree b = TREE_OPERAND (rootarg, 0);
10938 tree c = TREE_OPERAND (rootarg, 1);
10939
10940 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
10941
10942 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
10943 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
10944 }
10945 }
10946
10947 /* Optimize x/expN(y) into x*expN(-y). */
10948 if (BUILTIN_EXPONENT_P (fcode1))
10949 {
10950 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10951 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10952 arg1 = build_call_expr_loc (loc,
10953 expfn, 1,
10954 fold_convert_loc (loc, type, arg));
10955 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
10956 }
10957
10958 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10959 if (fcode1 == BUILT_IN_POW
10960 || fcode1 == BUILT_IN_POWF
10961 || fcode1 == BUILT_IN_POWL)
10962 {
10963 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10964 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10965 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10966 tree neg11 = fold_convert_loc (loc, type,
10967 negate_expr (arg11));
10968 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
10969 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
10970 }
10971 }
10972 return NULL_TREE;
10973
10974 case TRUNC_DIV_EXPR:
10975 /* Optimize (X & (-A)) / A where A is a power of 2,
10976 to X >> log2(A) */
10977 if (TREE_CODE (arg0) == BIT_AND_EXPR
10978 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10979 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10980 {
10981 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10982 arg1, TREE_OPERAND (arg0, 1));
10983 if (sum && integer_zerop (sum)) {
10984 tree pow2 = build_int_cst (integer_type_node,
10985 wi::exact_log2 (arg1));
10986 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10987 TREE_OPERAND (arg0, 0), pow2);
10988 }
10989 }
10990
10991 /* Fall through */
10992
10993 case FLOOR_DIV_EXPR:
10994 /* Simplify A / (B << N) where A and B are positive and B is
10995 a power of 2, to A >> (N + log2(B)). */
10996 strict_overflow_p = false;
10997 if (TREE_CODE (arg1) == LSHIFT_EXPR
10998 && (TYPE_UNSIGNED (type)
10999 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11000 {
11001 tree sval = TREE_OPERAND (arg1, 0);
11002 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11003 {
11004 tree sh_cnt = TREE_OPERAND (arg1, 1);
11005 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11006 wi::exact_log2 (sval));
11007
11008 if (strict_overflow_p)
11009 fold_overflow_warning (("assuming signed overflow does not "
11010 "occur when simplifying A / (B << N)"),
11011 WARN_STRICT_OVERFLOW_MISC);
11012
11013 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11014 sh_cnt, pow2);
11015 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11016 fold_convert_loc (loc, type, arg0), sh_cnt);
11017 }
11018 }
11019
11020 /* Fall through */
11021
11022 case ROUND_DIV_EXPR:
11023 case CEIL_DIV_EXPR:
11024 case EXACT_DIV_EXPR:
11025 if (integer_zerop (arg1))
11026 return NULL_TREE;
11027
11028 /* Convert -A / -B to A / B when the type is signed and overflow is
11029 undefined. */
11030 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11031 && TREE_CODE (arg0) == NEGATE_EXPR
11032 && negate_expr_p (arg1))
11033 {
11034 if (INTEGRAL_TYPE_P (type))
11035 fold_overflow_warning (("assuming signed overflow does not occur "
11036 "when distributing negation across "
11037 "division"),
11038 WARN_STRICT_OVERFLOW_MISC);
11039 return fold_build2_loc (loc, code, type,
11040 fold_convert_loc (loc, type,
11041 TREE_OPERAND (arg0, 0)),
11042 fold_convert_loc (loc, type,
11043 negate_expr (arg1)));
11044 }
11045 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11046 && TREE_CODE (arg1) == NEGATE_EXPR
11047 && negate_expr_p (arg0))
11048 {
11049 if (INTEGRAL_TYPE_P (type))
11050 fold_overflow_warning (("assuming signed overflow does not occur "
11051 "when distributing negation across "
11052 "division"),
11053 WARN_STRICT_OVERFLOW_MISC);
11054 return fold_build2_loc (loc, code, type,
11055 fold_convert_loc (loc, type,
11056 negate_expr (arg0)),
11057 fold_convert_loc (loc, type,
11058 TREE_OPERAND (arg1, 0)));
11059 }
11060
11061 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11062 operation, EXACT_DIV_EXPR.
11063
11064 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11065 At one time others generated faster code, it's not clear if they do
11066 after the last round to changes to the DIV code in expmed.c. */
11067 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11068 && multiple_of_p (type, arg0, arg1))
11069 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11070
11071 strict_overflow_p = false;
11072 if (TREE_CODE (arg1) == INTEGER_CST
11073 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11074 &strict_overflow_p)))
11075 {
11076 if (strict_overflow_p)
11077 fold_overflow_warning (("assuming signed overflow does not occur "
11078 "when simplifying division"),
11079 WARN_STRICT_OVERFLOW_MISC);
11080 return fold_convert_loc (loc, type, tem);
11081 }
11082
11083 return NULL_TREE;
11084
11085 case CEIL_MOD_EXPR:
11086 case FLOOR_MOD_EXPR:
11087 case ROUND_MOD_EXPR:
11088 case TRUNC_MOD_EXPR:
11089 strict_overflow_p = false;
11090 if (TREE_CODE (arg1) == INTEGER_CST
11091 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11092 &strict_overflow_p)))
11093 {
11094 if (strict_overflow_p)
11095 fold_overflow_warning (("assuming signed overflow does not occur "
11096 "when simplifying modulus"),
11097 WARN_STRICT_OVERFLOW_MISC);
11098 return fold_convert_loc (loc, type, tem);
11099 }
11100
11101 return NULL_TREE;
11102
11103 case LROTATE_EXPR:
11104 case RROTATE_EXPR:
11105 case RSHIFT_EXPR:
11106 case LSHIFT_EXPR:
11107 /* Since negative shift count is not well-defined,
11108 don't try to compute it in the compiler. */
11109 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11110 return NULL_TREE;
11111
11112 prec = element_precision (type);
11113
11114 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11115 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11116 && tree_to_uhwi (arg1) < prec
11117 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11118 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11119 {
11120 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11121 + tree_to_uhwi (arg1));
11122
11123 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11124 being well defined. */
11125 if (low >= prec)
11126 {
11127 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11128 low = low % prec;
11129 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11130 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11131 TREE_OPERAND (arg0, 0));
11132 else
11133 low = prec - 1;
11134 }
11135
11136 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11137 build_int_cst (TREE_TYPE (arg1), low));
11138 }
11139
11140 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11141 into x & ((unsigned)-1 >> c) for unsigned types. */
11142 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11143 || (TYPE_UNSIGNED (type)
11144 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11145 && tree_fits_uhwi_p (arg1)
11146 && tree_to_uhwi (arg1) < prec
11147 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11148 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11149 {
11150 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11151 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
11152 tree lshift;
11153 tree arg00;
11154
11155 if (low0 == low1)
11156 {
11157 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11158
11159 lshift = build_minus_one_cst (type);
11160 lshift = const_binop (code, lshift, arg1);
11161
11162 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11163 }
11164 }
11165
11166 /* If we have a rotate of a bit operation with the rotate count and
11167 the second operand of the bit operation both constant,
11168 permute the two operations. */
11169 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11170 && (TREE_CODE (arg0) == BIT_AND_EXPR
11171 || TREE_CODE (arg0) == BIT_IOR_EXPR
11172 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11173 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11174 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11175 fold_build2_loc (loc, code, type,
11176 TREE_OPERAND (arg0, 0), arg1),
11177 fold_build2_loc (loc, code, type,
11178 TREE_OPERAND (arg0, 1), arg1));
11179
11180 /* Two consecutive rotates adding up to the some integer
11181 multiple of the precision of the type can be ignored. */
11182 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11183 && TREE_CODE (arg0) == RROTATE_EXPR
11184 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11185 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
11186 prec) == 0)
11187 return TREE_OPERAND (arg0, 0);
11188
11189 return NULL_TREE;
11190
11191 case MIN_EXPR:
11192 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11193 if (tem)
11194 return tem;
11195 goto associate;
11196
11197 case MAX_EXPR:
11198 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11199 if (tem)
11200 return tem;
11201 goto associate;
11202
11203 case TRUTH_ANDIF_EXPR:
11204 /* Note that the operands of this must be ints
11205 and their values must be 0 or 1.
11206 ("true" is a fixed value perhaps depending on the language.) */
11207 /* If first arg is constant zero, return it. */
11208 if (integer_zerop (arg0))
11209 return fold_convert_loc (loc, type, arg0);
11210 case TRUTH_AND_EXPR:
11211 /* If either arg is constant true, drop it. */
11212 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11213 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11214 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11215 /* Preserve sequence points. */
11216 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11217 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11218 /* If second arg is constant zero, result is zero, but first arg
11219 must be evaluated. */
11220 if (integer_zerop (arg1))
11221 return omit_one_operand_loc (loc, type, arg1, arg0);
11222 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11223 case will be handled here. */
11224 if (integer_zerop (arg0))
11225 return omit_one_operand_loc (loc, type, arg0, arg1);
11226
11227 /* !X && X is always false. */
11228 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11229 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11230 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11231 /* X && !X is always false. */
11232 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11233 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11234 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11235
11236 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11237 means A >= Y && A != MAX, but in this case we know that
11238 A < X <= MAX. */
11239
11240 if (!TREE_SIDE_EFFECTS (arg0)
11241 && !TREE_SIDE_EFFECTS (arg1))
11242 {
11243 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11244 if (tem && !operand_equal_p (tem, arg0, 0))
11245 return fold_build2_loc (loc, code, type, tem, arg1);
11246
11247 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11248 if (tem && !operand_equal_p (tem, arg1, 0))
11249 return fold_build2_loc (loc, code, type, arg0, tem);
11250 }
11251
11252 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11253 != NULL_TREE)
11254 return tem;
11255
11256 return NULL_TREE;
11257
11258 case TRUTH_ORIF_EXPR:
11259 /* Note that the operands of this must be ints
11260 and their values must be 0 or true.
11261 ("true" is a fixed value perhaps depending on the language.) */
11262 /* If first arg is constant true, return it. */
11263 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11264 return fold_convert_loc (loc, type, arg0);
11265 case TRUTH_OR_EXPR:
11266 /* If either arg is constant zero, drop it. */
11267 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11268 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11269 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11270 /* Preserve sequence points. */
11271 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11272 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11273 /* If second arg is constant true, result is true, but we must
11274 evaluate first arg. */
11275 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11276 return omit_one_operand_loc (loc, type, arg1, arg0);
11277 /* Likewise for first arg, but note this only occurs here for
11278 TRUTH_OR_EXPR. */
11279 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11280 return omit_one_operand_loc (loc, type, arg0, arg1);
11281
11282 /* !X || X is always true. */
11283 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11284 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11285 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11286 /* X || !X is always true. */
11287 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11288 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11289 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11290
11291 /* (X && !Y) || (!X && Y) is X ^ Y */
11292 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11293 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11294 {
11295 tree a0, a1, l0, l1, n0, n1;
11296
11297 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11298 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11299
11300 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11301 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11302
11303 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11304 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11305
11306 if ((operand_equal_p (n0, a0, 0)
11307 && operand_equal_p (n1, a1, 0))
11308 || (operand_equal_p (n0, a1, 0)
11309 && operand_equal_p (n1, a0, 0)))
11310 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11311 }
11312
11313 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11314 != NULL_TREE)
11315 return tem;
11316
11317 return NULL_TREE;
11318
11319 case TRUTH_XOR_EXPR:
11320 /* If the second arg is constant zero, drop it. */
11321 if (integer_zerop (arg1))
11322 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11323 /* If the second arg is constant true, this is a logical inversion. */
11324 if (integer_onep (arg1))
11325 {
11326 tem = invert_truthvalue_loc (loc, arg0);
11327 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11328 }
11329 /* Identical arguments cancel to zero. */
11330 if (operand_equal_p (arg0, arg1, 0))
11331 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11332
11333 /* !X ^ X is always true. */
11334 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11335 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11336 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11337
11338 /* X ^ !X is always true. */
11339 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11340 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11341 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11342
11343 return NULL_TREE;
11344
11345 case EQ_EXPR:
11346 case NE_EXPR:
11347 STRIP_NOPS (arg0);
11348 STRIP_NOPS (arg1);
11349
11350 tem = fold_comparison (loc, code, type, op0, op1);
11351 if (tem != NULL_TREE)
11352 return tem;
11353
11354 /* bool_var != 0 becomes bool_var. */
11355 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11356 && code == NE_EXPR)
11357 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11358
11359 /* bool_var == 1 becomes bool_var. */
11360 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11361 && code == EQ_EXPR)
11362 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11363
11364 /* bool_var != 1 becomes !bool_var. */
11365 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11366 && code == NE_EXPR)
11367 return fold_convert_loc (loc, type,
11368 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11369 TREE_TYPE (arg0), arg0));
11370
11371 /* bool_var == 0 becomes !bool_var. */
11372 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11373 && code == EQ_EXPR)
11374 return fold_convert_loc (loc, type,
11375 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11376 TREE_TYPE (arg0), arg0));
11377
11378 /* !exp != 0 becomes !exp */
11379 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11380 && code == NE_EXPR)
11381 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11382
11383 /* If this is an equality comparison of the address of two non-weak,
11384 unaliased symbols neither of which are extern (since we do not
11385 have access to attributes for externs), then we know the result. */
11386 if (TREE_CODE (arg0) == ADDR_EXPR
11387 && DECL_P (TREE_OPERAND (arg0, 0))
11388 && TREE_CODE (arg1) == ADDR_EXPR
11389 && DECL_P (TREE_OPERAND (arg1, 0)))
11390 {
11391 int equal;
11392
11393 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
11394 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
11395 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
11396 ->equal_address_to (symtab_node::get_create
11397 (TREE_OPERAND (arg1, 0)));
11398 else
11399 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11400 if (equal != 2)
11401 return constant_boolean_node (equal
11402 ? code == EQ_EXPR : code != EQ_EXPR,
11403 type);
11404 }
11405
11406 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11407 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11408 && TREE_CODE (arg1) == INTEGER_CST
11409 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11410 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11411 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
11412 fold_convert_loc (loc,
11413 TREE_TYPE (arg0),
11414 arg1),
11415 TREE_OPERAND (arg0, 1)));
11416
11417 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
11418 if ((TREE_CODE (arg0) == PLUS_EXPR
11419 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
11420 || TREE_CODE (arg0) == MINUS_EXPR)
11421 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
11422 0)),
11423 arg1, 0)
11424 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11425 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11426 {
11427 tree val = TREE_OPERAND (arg0, 1);
11428 return omit_two_operands_loc (loc, type,
11429 fold_build2_loc (loc, code, type,
11430 val,
11431 build_int_cst (TREE_TYPE (val),
11432 0)),
11433 TREE_OPERAND (arg0, 0), arg1);
11434 }
11435
11436 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
11437 if (TREE_CODE (arg0) == MINUS_EXPR
11438 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
11439 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
11440 1)),
11441 arg1, 0)
11442 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
11443 {
11444 return omit_two_operands_loc (loc, type,
11445 code == NE_EXPR
11446 ? boolean_true_node : boolean_false_node,
11447 TREE_OPERAND (arg0, 1), arg1);
11448 }
11449
11450 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11451 if (TREE_CODE (arg0) == ABS_EXPR
11452 && (integer_zerop (arg1) || real_zerop (arg1)))
11453 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
11454
11455 /* If this is an EQ or NE comparison with zero and ARG0 is
11456 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11457 two operations, but the latter can be done in one less insn
11458 on machines that have only two-operand insns or on which a
11459 constant cannot be the first operand. */
11460 if (TREE_CODE (arg0) == BIT_AND_EXPR
11461 && integer_zerop (arg1))
11462 {
11463 tree arg00 = TREE_OPERAND (arg0, 0);
11464 tree arg01 = TREE_OPERAND (arg0, 1);
11465 if (TREE_CODE (arg00) == LSHIFT_EXPR
11466 && integer_onep (TREE_OPERAND (arg00, 0)))
11467 {
11468 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11469 arg01, TREE_OPERAND (arg00, 1));
11470 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11471 build_int_cst (TREE_TYPE (arg0), 1));
11472 return fold_build2_loc (loc, code, type,
11473 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11474 arg1);
11475 }
11476 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11477 && integer_onep (TREE_OPERAND (arg01, 0)))
11478 {
11479 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11480 arg00, TREE_OPERAND (arg01, 1));
11481 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11482 build_int_cst (TREE_TYPE (arg0), 1));
11483 return fold_build2_loc (loc, code, type,
11484 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11485 arg1);
11486 }
11487 }
11488
11489 /* If this is an NE or EQ comparison of zero against the result of a
11490 signed MOD operation whose second operand is a power of 2, make
11491 the MOD operation unsigned since it is simpler and equivalent. */
11492 if (integer_zerop (arg1)
11493 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11494 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11495 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11496 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11497 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11498 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11499 {
11500 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11501 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
11502 fold_convert_loc (loc, newtype,
11503 TREE_OPERAND (arg0, 0)),
11504 fold_convert_loc (loc, newtype,
11505 TREE_OPERAND (arg0, 1)));
11506
11507 return fold_build2_loc (loc, code, type, newmod,
11508 fold_convert_loc (loc, newtype, arg1));
11509 }
11510
11511 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11512 C1 is a valid shift constant, and C2 is a power of two, i.e.
11513 a single bit. */
11514 if (TREE_CODE (arg0) == BIT_AND_EXPR
11515 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11516 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11517 == INTEGER_CST
11518 && integer_pow2p (TREE_OPERAND (arg0, 1))
11519 && integer_zerop (arg1))
11520 {
11521 tree itype = TREE_TYPE (arg0);
11522 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11523 prec = TYPE_PRECISION (itype);
11524
11525 /* Check for a valid shift count. */
11526 if (wi::ltu_p (arg001, prec))
11527 {
11528 tree arg01 = TREE_OPERAND (arg0, 1);
11529 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11530 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11531 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11532 can be rewritten as (X & (C2 << C1)) != 0. */
11533 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11534 {
11535 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11536 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11537 return fold_build2_loc (loc, code, type, tem,
11538 fold_convert_loc (loc, itype, arg1));
11539 }
11540 /* Otherwise, for signed (arithmetic) shifts,
11541 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11542 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11543 else if (!TYPE_UNSIGNED (itype))
11544 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11545 arg000, build_int_cst (itype, 0));
11546 /* Otherwise, of unsigned (logical) shifts,
11547 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11548 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11549 else
11550 return omit_one_operand_loc (loc, type,
11551 code == EQ_EXPR ? integer_one_node
11552 : integer_zero_node,
11553 arg000);
11554 }
11555 }
11556
11557 /* If we have (A & C) == C where C is a power of 2, convert this into
11558 (A & C) != 0. Similarly for NE_EXPR. */
11559 if (TREE_CODE (arg0) == BIT_AND_EXPR
11560 && integer_pow2p (TREE_OPERAND (arg0, 1))
11561 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11562 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11563 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
11564 integer_zero_node));
11565
11566 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11567 bit, then fold the expression into A < 0 or A >= 0. */
11568 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
11569 if (tem)
11570 return tem;
11571
11572 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11573 Similarly for NE_EXPR. */
11574 if (TREE_CODE (arg0) == BIT_AND_EXPR
11575 && TREE_CODE (arg1) == INTEGER_CST
11576 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11577 {
11578 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
11579 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11580 TREE_OPERAND (arg0, 1));
11581 tree dandnotc
11582 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11583 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
11584 notc);
11585 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11586 if (integer_nonzerop (dandnotc))
11587 return omit_one_operand_loc (loc, type, rslt, arg0);
11588 }
11589
11590 /* If this is a comparison of a field, we may be able to simplify it. */
11591 if ((TREE_CODE (arg0) == COMPONENT_REF
11592 || TREE_CODE (arg0) == BIT_FIELD_REF)
11593 /* Handle the constant case even without -O
11594 to make sure the warnings are given. */
11595 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11596 {
11597 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11598 if (t1)
11599 return t1;
11600 }
11601
11602 /* Optimize comparisons of strlen vs zero to a compare of the
11603 first character of the string vs zero. To wit,
11604 strlen(ptr) == 0 => *ptr == 0
11605 strlen(ptr) != 0 => *ptr != 0
11606 Other cases should reduce to one of these two (or a constant)
11607 due to the return value of strlen being unsigned. */
11608 if (TREE_CODE (arg0) == CALL_EXPR
11609 && integer_zerop (arg1))
11610 {
11611 tree fndecl = get_callee_fndecl (arg0);
11612
11613 if (fndecl
11614 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11615 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11616 && call_expr_nargs (arg0) == 1
11617 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11618 {
11619 tree iref = build_fold_indirect_ref_loc (loc,
11620 CALL_EXPR_ARG (arg0, 0));
11621 return fold_build2_loc (loc, code, type, iref,
11622 build_int_cst (TREE_TYPE (iref), 0));
11623 }
11624 }
11625
11626 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11627 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11628 if (TREE_CODE (arg0) == RSHIFT_EXPR
11629 && integer_zerop (arg1)
11630 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11631 {
11632 tree arg00 = TREE_OPERAND (arg0, 0);
11633 tree arg01 = TREE_OPERAND (arg0, 1);
11634 tree itype = TREE_TYPE (arg00);
11635 if (wi::eq_p (arg01, element_precision (itype) - 1))
11636 {
11637 if (TYPE_UNSIGNED (itype))
11638 {
11639 itype = signed_type_for (itype);
11640 arg00 = fold_convert_loc (loc, itype, arg00);
11641 }
11642 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11643 type, arg00, build_zero_cst (itype));
11644 }
11645 }
11646
11647 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11648 (X & C) == 0 when C is a single bit. */
11649 if (TREE_CODE (arg0) == BIT_AND_EXPR
11650 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11651 && integer_zerop (arg1)
11652 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11653 {
11654 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11655 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11656 TREE_OPERAND (arg0, 1));
11657 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11658 type, tem,
11659 fold_convert_loc (loc, TREE_TYPE (arg0),
11660 arg1));
11661 }
11662
11663 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11664 constant C is a power of two, i.e. a single bit. */
11665 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11666 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11667 && integer_zerop (arg1)
11668 && integer_pow2p (TREE_OPERAND (arg0, 1))
11669 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11670 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11671 {
11672 tree arg00 = TREE_OPERAND (arg0, 0);
11673 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11674 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11675 }
11676
11677 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11678 when is C is a power of two, i.e. a single bit. */
11679 if (TREE_CODE (arg0) == BIT_AND_EXPR
11680 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11681 && integer_zerop (arg1)
11682 && integer_pow2p (TREE_OPERAND (arg0, 1))
11683 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11684 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11685 {
11686 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11687 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11688 arg000, TREE_OPERAND (arg0, 1));
11689 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11690 tem, build_int_cst (TREE_TYPE (tem), 0));
11691 }
11692
11693 if (integer_zerop (arg1)
11694 && tree_expr_nonzero_p (arg0))
11695 {
11696 tree res = constant_boolean_node (code==NE_EXPR, type);
11697 return omit_one_operand_loc (loc, type, res, arg0);
11698 }
11699
11700 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11701 if (TREE_CODE (arg0) == BIT_AND_EXPR
11702 && TREE_CODE (arg1) == BIT_AND_EXPR)
11703 {
11704 tree arg00 = TREE_OPERAND (arg0, 0);
11705 tree arg01 = TREE_OPERAND (arg0, 1);
11706 tree arg10 = TREE_OPERAND (arg1, 0);
11707 tree arg11 = TREE_OPERAND (arg1, 1);
11708 tree itype = TREE_TYPE (arg0);
11709
11710 if (operand_equal_p (arg01, arg11, 0))
11711 return fold_build2_loc (loc, code, type,
11712 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11713 fold_build2_loc (loc,
11714 BIT_XOR_EXPR, itype,
11715 arg00, arg10),
11716 arg01),
11717 build_zero_cst (itype));
11718
11719 if (operand_equal_p (arg01, arg10, 0))
11720 return fold_build2_loc (loc, code, type,
11721 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11722 fold_build2_loc (loc,
11723 BIT_XOR_EXPR, itype,
11724 arg00, arg11),
11725 arg01),
11726 build_zero_cst (itype));
11727
11728 if (operand_equal_p (arg00, arg11, 0))
11729 return fold_build2_loc (loc, code, type,
11730 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11731 fold_build2_loc (loc,
11732 BIT_XOR_EXPR, itype,
11733 arg01, arg10),
11734 arg00),
11735 build_zero_cst (itype));
11736
11737 if (operand_equal_p (arg00, arg10, 0))
11738 return fold_build2_loc (loc, code, type,
11739 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11740 fold_build2_loc (loc,
11741 BIT_XOR_EXPR, itype,
11742 arg01, arg11),
11743 arg00),
11744 build_zero_cst (itype));
11745 }
11746
11747 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11748 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11749 {
11750 tree arg00 = TREE_OPERAND (arg0, 0);
11751 tree arg01 = TREE_OPERAND (arg0, 1);
11752 tree arg10 = TREE_OPERAND (arg1, 0);
11753 tree arg11 = TREE_OPERAND (arg1, 1);
11754 tree itype = TREE_TYPE (arg0);
11755
11756 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11757 operand_equal_p guarantees no side-effects so we don't need
11758 to use omit_one_operand on Z. */
11759 if (operand_equal_p (arg01, arg11, 0))
11760 return fold_build2_loc (loc, code, type, arg00,
11761 fold_convert_loc (loc, TREE_TYPE (arg00),
11762 arg10));
11763 if (operand_equal_p (arg01, arg10, 0))
11764 return fold_build2_loc (loc, code, type, arg00,
11765 fold_convert_loc (loc, TREE_TYPE (arg00),
11766 arg11));
11767 if (operand_equal_p (arg00, arg11, 0))
11768 return fold_build2_loc (loc, code, type, arg01,
11769 fold_convert_loc (loc, TREE_TYPE (arg01),
11770 arg10));
11771 if (operand_equal_p (arg00, arg10, 0))
11772 return fold_build2_loc (loc, code, type, arg01,
11773 fold_convert_loc (loc, TREE_TYPE (arg01),
11774 arg11));
11775
11776 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11777 if (TREE_CODE (arg01) == INTEGER_CST
11778 && TREE_CODE (arg11) == INTEGER_CST)
11779 {
11780 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11781 fold_convert_loc (loc, itype, arg11));
11782 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11783 return fold_build2_loc (loc, code, type, tem,
11784 fold_convert_loc (loc, itype, arg10));
11785 }
11786 }
11787
11788 /* Attempt to simplify equality/inequality comparisons of complex
11789 values. Only lower the comparison if the result is known or
11790 can be simplified to a single scalar comparison. */
11791 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11792 || TREE_CODE (arg0) == COMPLEX_CST)
11793 && (TREE_CODE (arg1) == COMPLEX_EXPR
11794 || TREE_CODE (arg1) == COMPLEX_CST))
11795 {
11796 tree real0, imag0, real1, imag1;
11797 tree rcond, icond;
11798
11799 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11800 {
11801 real0 = TREE_OPERAND (arg0, 0);
11802 imag0 = TREE_OPERAND (arg0, 1);
11803 }
11804 else
11805 {
11806 real0 = TREE_REALPART (arg0);
11807 imag0 = TREE_IMAGPART (arg0);
11808 }
11809
11810 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11811 {
11812 real1 = TREE_OPERAND (arg1, 0);
11813 imag1 = TREE_OPERAND (arg1, 1);
11814 }
11815 else
11816 {
11817 real1 = TREE_REALPART (arg1);
11818 imag1 = TREE_IMAGPART (arg1);
11819 }
11820
11821 rcond = fold_binary_loc (loc, code, type, real0, real1);
11822 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11823 {
11824 if (integer_zerop (rcond))
11825 {
11826 if (code == EQ_EXPR)
11827 return omit_two_operands_loc (loc, type, boolean_false_node,
11828 imag0, imag1);
11829 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11830 }
11831 else
11832 {
11833 if (code == NE_EXPR)
11834 return omit_two_operands_loc (loc, type, boolean_true_node,
11835 imag0, imag1);
11836 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11837 }
11838 }
11839
11840 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11841 if (icond && TREE_CODE (icond) == INTEGER_CST)
11842 {
11843 if (integer_zerop (icond))
11844 {
11845 if (code == EQ_EXPR)
11846 return omit_two_operands_loc (loc, type, boolean_false_node,
11847 real0, real1);
11848 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11849 }
11850 else
11851 {
11852 if (code == NE_EXPR)
11853 return omit_two_operands_loc (loc, type, boolean_true_node,
11854 real0, real1);
11855 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11856 }
11857 }
11858 }
11859
11860 return NULL_TREE;
11861
11862 case LT_EXPR:
11863 case GT_EXPR:
11864 case LE_EXPR:
11865 case GE_EXPR:
11866 tem = fold_comparison (loc, code, type, op0, op1);
11867 if (tem != NULL_TREE)
11868 return tem;
11869
11870 /* Transform comparisons of the form X +- C CMP X. */
11871 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11872 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11873 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11874 && !HONOR_SNANS (arg0))
11875 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11876 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11877 {
11878 tree arg01 = TREE_OPERAND (arg0, 1);
11879 enum tree_code code0 = TREE_CODE (arg0);
11880 int is_positive;
11881
11882 if (TREE_CODE (arg01) == REAL_CST)
11883 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11884 else
11885 is_positive = tree_int_cst_sgn (arg01);
11886
11887 /* (X - c) > X becomes false. */
11888 if (code == GT_EXPR
11889 && ((code0 == MINUS_EXPR && is_positive >= 0)
11890 || (code0 == PLUS_EXPR && is_positive <= 0)))
11891 {
11892 if (TREE_CODE (arg01) == INTEGER_CST
11893 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11894 fold_overflow_warning (("assuming signed overflow does not "
11895 "occur when assuming that (X - c) > X "
11896 "is always false"),
11897 WARN_STRICT_OVERFLOW_ALL);
11898 return constant_boolean_node (0, type);
11899 }
11900
11901 /* Likewise (X + c) < X becomes false. */
11902 if (code == LT_EXPR
11903 && ((code0 == PLUS_EXPR && is_positive >= 0)
11904 || (code0 == MINUS_EXPR && is_positive <= 0)))
11905 {
11906 if (TREE_CODE (arg01) == INTEGER_CST
11907 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11908 fold_overflow_warning (("assuming signed overflow does not "
11909 "occur when assuming that "
11910 "(X + c) < X is always false"),
11911 WARN_STRICT_OVERFLOW_ALL);
11912 return constant_boolean_node (0, type);
11913 }
11914
11915 /* Convert (X - c) <= X to true. */
11916 if (!HONOR_NANS (arg1)
11917 && code == LE_EXPR
11918 && ((code0 == MINUS_EXPR && is_positive >= 0)
11919 || (code0 == PLUS_EXPR && is_positive <= 0)))
11920 {
11921 if (TREE_CODE (arg01) == INTEGER_CST
11922 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11923 fold_overflow_warning (("assuming signed overflow does not "
11924 "occur when assuming that "
11925 "(X - c) <= X is always true"),
11926 WARN_STRICT_OVERFLOW_ALL);
11927 return constant_boolean_node (1, type);
11928 }
11929
11930 /* Convert (X + c) >= X to true. */
11931 if (!HONOR_NANS (arg1)
11932 && code == GE_EXPR
11933 && ((code0 == PLUS_EXPR && is_positive >= 0)
11934 || (code0 == MINUS_EXPR && is_positive <= 0)))
11935 {
11936 if (TREE_CODE (arg01) == INTEGER_CST
11937 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11938 fold_overflow_warning (("assuming signed overflow does not "
11939 "occur when assuming that "
11940 "(X + c) >= X is always true"),
11941 WARN_STRICT_OVERFLOW_ALL);
11942 return constant_boolean_node (1, type);
11943 }
11944
11945 if (TREE_CODE (arg01) == INTEGER_CST)
11946 {
11947 /* Convert X + c > X and X - c < X to true for integers. */
11948 if (code == GT_EXPR
11949 && ((code0 == PLUS_EXPR && is_positive > 0)
11950 || (code0 == MINUS_EXPR && is_positive < 0)))
11951 {
11952 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11953 fold_overflow_warning (("assuming signed overflow does "
11954 "not occur when assuming that "
11955 "(X + c) > X is always true"),
11956 WARN_STRICT_OVERFLOW_ALL);
11957 return constant_boolean_node (1, type);
11958 }
11959
11960 if (code == LT_EXPR
11961 && ((code0 == MINUS_EXPR && is_positive > 0)
11962 || (code0 == PLUS_EXPR && is_positive < 0)))
11963 {
11964 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11965 fold_overflow_warning (("assuming signed overflow does "
11966 "not occur when assuming that "
11967 "(X - c) < X is always true"),
11968 WARN_STRICT_OVERFLOW_ALL);
11969 return constant_boolean_node (1, type);
11970 }
11971
11972 /* Convert X + c <= X and X - c >= X to false for integers. */
11973 if (code == LE_EXPR
11974 && ((code0 == PLUS_EXPR && is_positive > 0)
11975 || (code0 == MINUS_EXPR && is_positive < 0)))
11976 {
11977 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11978 fold_overflow_warning (("assuming signed overflow does "
11979 "not occur when assuming that "
11980 "(X + c) <= X is always false"),
11981 WARN_STRICT_OVERFLOW_ALL);
11982 return constant_boolean_node (0, type);
11983 }
11984
11985 if (code == GE_EXPR
11986 && ((code0 == MINUS_EXPR && is_positive > 0)
11987 || (code0 == PLUS_EXPR && is_positive < 0)))
11988 {
11989 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11990 fold_overflow_warning (("assuming signed overflow does "
11991 "not occur when assuming that "
11992 "(X - c) >= X is always false"),
11993 WARN_STRICT_OVERFLOW_ALL);
11994 return constant_boolean_node (0, type);
11995 }
11996 }
11997 }
11998
11999 /* Comparisons with the highest or lowest possible integer of
12000 the specified precision will have known values. */
12001 {
12002 tree arg1_type = TREE_TYPE (arg1);
12003 unsigned int prec = TYPE_PRECISION (arg1_type);
12004
12005 if (TREE_CODE (arg1) == INTEGER_CST
12006 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12007 {
12008 wide_int max = wi::max_value (arg1_type);
12009 wide_int signed_max = wi::max_value (prec, SIGNED);
12010 wide_int min = wi::min_value (arg1_type);
12011
12012 if (wi::eq_p (arg1, max))
12013 switch (code)
12014 {
12015 case GT_EXPR:
12016 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12017
12018 case GE_EXPR:
12019 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12020
12021 case LE_EXPR:
12022 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12023
12024 case LT_EXPR:
12025 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12026
12027 /* The GE_EXPR and LT_EXPR cases above are not normally
12028 reached because of previous transformations. */
12029
12030 default:
12031 break;
12032 }
12033 else if (wi::eq_p (arg1, max - 1))
12034 switch (code)
12035 {
12036 case GT_EXPR:
12037 arg1 = const_binop (PLUS_EXPR, arg1,
12038 build_int_cst (TREE_TYPE (arg1), 1));
12039 return fold_build2_loc (loc, EQ_EXPR, type,
12040 fold_convert_loc (loc,
12041 TREE_TYPE (arg1), arg0),
12042 arg1);
12043 case LE_EXPR:
12044 arg1 = const_binop (PLUS_EXPR, arg1,
12045 build_int_cst (TREE_TYPE (arg1), 1));
12046 return fold_build2_loc (loc, NE_EXPR, type,
12047 fold_convert_loc (loc, TREE_TYPE (arg1),
12048 arg0),
12049 arg1);
12050 default:
12051 break;
12052 }
12053 else if (wi::eq_p (arg1, min))
12054 switch (code)
12055 {
12056 case LT_EXPR:
12057 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12058
12059 case LE_EXPR:
12060 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12061
12062 case GE_EXPR:
12063 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12064
12065 case GT_EXPR:
12066 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12067
12068 default:
12069 break;
12070 }
12071 else if (wi::eq_p (arg1, min + 1))
12072 switch (code)
12073 {
12074 case GE_EXPR:
12075 arg1 = const_binop (MINUS_EXPR, arg1,
12076 build_int_cst (TREE_TYPE (arg1), 1));
12077 return fold_build2_loc (loc, NE_EXPR, type,
12078 fold_convert_loc (loc,
12079 TREE_TYPE (arg1), arg0),
12080 arg1);
12081 case LT_EXPR:
12082 arg1 = const_binop (MINUS_EXPR, arg1,
12083 build_int_cst (TREE_TYPE (arg1), 1));
12084 return fold_build2_loc (loc, EQ_EXPR, type,
12085 fold_convert_loc (loc, TREE_TYPE (arg1),
12086 arg0),
12087 arg1);
12088 default:
12089 break;
12090 }
12091
12092 else if (wi::eq_p (arg1, signed_max)
12093 && TYPE_UNSIGNED (arg1_type)
12094 /* We will flip the signedness of the comparison operator
12095 associated with the mode of arg1, so the sign bit is
12096 specified by this mode. Check that arg1 is the signed
12097 max associated with this sign bit. */
12098 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
12099 /* signed_type does not work on pointer types. */
12100 && INTEGRAL_TYPE_P (arg1_type))
12101 {
12102 /* The following case also applies to X < signed_max+1
12103 and X >= signed_max+1 because previous transformations. */
12104 if (code == LE_EXPR || code == GT_EXPR)
12105 {
12106 tree st = signed_type_for (arg1_type);
12107 return fold_build2_loc (loc,
12108 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12109 type, fold_convert_loc (loc, st, arg0),
12110 build_int_cst (st, 0));
12111 }
12112 }
12113 }
12114 }
12115
12116 /* If we are comparing an ABS_EXPR with a constant, we can
12117 convert all the cases into explicit comparisons, but they may
12118 well not be faster than doing the ABS and one comparison.
12119 But ABS (X) <= C is a range comparison, which becomes a subtraction
12120 and a comparison, and is probably faster. */
12121 if (code == LE_EXPR
12122 && TREE_CODE (arg1) == INTEGER_CST
12123 && TREE_CODE (arg0) == ABS_EXPR
12124 && ! TREE_SIDE_EFFECTS (arg0)
12125 && (0 != (tem = negate_expr (arg1)))
12126 && TREE_CODE (tem) == INTEGER_CST
12127 && !TREE_OVERFLOW (tem))
12128 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12129 build2 (GE_EXPR, type,
12130 TREE_OPERAND (arg0, 0), tem),
12131 build2 (LE_EXPR, type,
12132 TREE_OPERAND (arg0, 0), arg1));
12133
12134 /* Convert ABS_EXPR<x> >= 0 to true. */
12135 strict_overflow_p = false;
12136 if (code == GE_EXPR
12137 && (integer_zerop (arg1)
12138 || (! HONOR_NANS (arg0)
12139 && real_zerop (arg1)))
12140 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12141 {
12142 if (strict_overflow_p)
12143 fold_overflow_warning (("assuming signed overflow does not occur "
12144 "when simplifying comparison of "
12145 "absolute value and zero"),
12146 WARN_STRICT_OVERFLOW_CONDITIONAL);
12147 return omit_one_operand_loc (loc, type,
12148 constant_boolean_node (true, type),
12149 arg0);
12150 }
12151
12152 /* Convert ABS_EXPR<x> < 0 to false. */
12153 strict_overflow_p = false;
12154 if (code == LT_EXPR
12155 && (integer_zerop (arg1) || real_zerop (arg1))
12156 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12157 {
12158 if (strict_overflow_p)
12159 fold_overflow_warning (("assuming signed overflow does not occur "
12160 "when simplifying comparison of "
12161 "absolute value and zero"),
12162 WARN_STRICT_OVERFLOW_CONDITIONAL);
12163 return omit_one_operand_loc (loc, type,
12164 constant_boolean_node (false, type),
12165 arg0);
12166 }
12167
12168 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12169 and similarly for >= into !=. */
12170 if ((code == LT_EXPR || code == GE_EXPR)
12171 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12172 && TREE_CODE (arg1) == LSHIFT_EXPR
12173 && integer_onep (TREE_OPERAND (arg1, 0)))
12174 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12175 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12176 TREE_OPERAND (arg1, 1)),
12177 build_zero_cst (TREE_TYPE (arg0)));
12178
12179 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12180 otherwise Y might be >= # of bits in X's type and thus e.g.
12181 (unsigned char) (1 << Y) for Y 15 might be 0.
12182 If the cast is widening, then 1 << Y should have unsigned type,
12183 otherwise if Y is number of bits in the signed shift type minus 1,
12184 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12185 31 might be 0xffffffff80000000. */
12186 if ((code == LT_EXPR || code == GE_EXPR)
12187 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12188 && CONVERT_EXPR_P (arg1)
12189 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12190 && (element_precision (TREE_TYPE (arg1))
12191 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12192 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12193 || (element_precision (TREE_TYPE (arg1))
12194 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12195 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12196 {
12197 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12198 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12199 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12200 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12201 build_zero_cst (TREE_TYPE (arg0)));
12202 }
12203
12204 return NULL_TREE;
12205
12206 case UNORDERED_EXPR:
12207 case ORDERED_EXPR:
12208 case UNLT_EXPR:
12209 case UNLE_EXPR:
12210 case UNGT_EXPR:
12211 case UNGE_EXPR:
12212 case UNEQ_EXPR:
12213 case LTGT_EXPR:
12214 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12215 {
12216 t1 = fold_relational_const (code, type, arg0, arg1);
12217 if (t1 != NULL_TREE)
12218 return t1;
12219 }
12220
12221 /* If the first operand is NaN, the result is constant. */
12222 if (TREE_CODE (arg0) == REAL_CST
12223 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12224 && (code != LTGT_EXPR || ! flag_trapping_math))
12225 {
12226 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12227 ? integer_zero_node
12228 : integer_one_node;
12229 return omit_one_operand_loc (loc, type, t1, arg1);
12230 }
12231
12232 /* If the second operand is NaN, the result is constant. */
12233 if (TREE_CODE (arg1) == REAL_CST
12234 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12235 && (code != LTGT_EXPR || ! flag_trapping_math))
12236 {
12237 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12238 ? integer_zero_node
12239 : integer_one_node;
12240 return omit_one_operand_loc (loc, type, t1, arg0);
12241 }
12242
12243 /* Simplify unordered comparison of something with itself. */
12244 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12245 && operand_equal_p (arg0, arg1, 0))
12246 return constant_boolean_node (1, type);
12247
12248 if (code == LTGT_EXPR
12249 && !flag_trapping_math
12250 && operand_equal_p (arg0, arg1, 0))
12251 return constant_boolean_node (0, type);
12252
12253 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12254 {
12255 tree targ0 = strip_float_extensions (arg0);
12256 tree targ1 = strip_float_extensions (arg1);
12257 tree newtype = TREE_TYPE (targ0);
12258
12259 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12260 newtype = TREE_TYPE (targ1);
12261
12262 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12263 return fold_build2_loc (loc, code, type,
12264 fold_convert_loc (loc, newtype, targ0),
12265 fold_convert_loc (loc, newtype, targ1));
12266 }
12267
12268 return NULL_TREE;
12269
12270 case COMPOUND_EXPR:
12271 /* When pedantic, a compound expression can be neither an lvalue
12272 nor an integer constant expression. */
12273 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12274 return NULL_TREE;
12275 /* Don't let (0, 0) be null pointer constant. */
12276 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12277 : fold_convert_loc (loc, type, arg1);
12278 return pedantic_non_lvalue_loc (loc, tem);
12279
12280 case ASSERT_EXPR:
12281 /* An ASSERT_EXPR should never be passed to fold_binary. */
12282 gcc_unreachable ();
12283
12284 default:
12285 return NULL_TREE;
12286 } /* switch (code) */
12287 }
12288
12289 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12290 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
12291 of GOTO_EXPR. */
12292
12293 static tree
12294 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
12295 {
12296 switch (TREE_CODE (*tp))
12297 {
12298 case LABEL_EXPR:
12299 return *tp;
12300
12301 case GOTO_EXPR:
12302 *walk_subtrees = 0;
12303
12304 /* ... fall through ... */
12305
12306 default:
12307 return NULL_TREE;
12308 }
12309 }
12310
12311 /* Return whether the sub-tree ST contains a label which is accessible from
12312 outside the sub-tree. */
12313
12314 static bool
12315 contains_label_p (tree st)
12316 {
12317 return
12318 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
12319 }
12320
12321 /* Fold a ternary expression of code CODE and type TYPE with operands
12322 OP0, OP1, and OP2. Return the folded expression if folding is
12323 successful. Otherwise, return NULL_TREE. */
12324
12325 tree
12326 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12327 tree op0, tree op1, tree op2)
12328 {
12329 tree tem;
12330 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12331 enum tree_code_class kind = TREE_CODE_CLASS (code);
12332
12333 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12334 && TREE_CODE_LENGTH (code) == 3);
12335
12336 /* If this is a commutative operation, and OP0 is a constant, move it
12337 to OP1 to reduce the number of tests below. */
12338 if (commutative_ternary_tree_code (code)
12339 && tree_swap_operands_p (op0, op1, true))
12340 return fold_build3_loc (loc, code, type, op1, op0, op2);
12341
12342 tem = generic_simplify (loc, code, type, op0, op1, op2);
12343 if (tem)
12344 return tem;
12345
12346 /* Strip any conversions that don't change the mode. This is safe
12347 for every expression, except for a comparison expression because
12348 its signedness is derived from its operands. So, in the latter
12349 case, only strip conversions that don't change the signedness.
12350
12351 Note that this is done as an internal manipulation within the
12352 constant folder, in order to find the simplest representation of
12353 the arguments so that their form can be studied. In any cases,
12354 the appropriate type conversions should be put back in the tree
12355 that will get out of the constant folder. */
12356 if (op0)
12357 {
12358 arg0 = op0;
12359 STRIP_NOPS (arg0);
12360 }
12361
12362 if (op1)
12363 {
12364 arg1 = op1;
12365 STRIP_NOPS (arg1);
12366 }
12367
12368 if (op2)
12369 {
12370 arg2 = op2;
12371 STRIP_NOPS (arg2);
12372 }
12373
12374 switch (code)
12375 {
12376 case COMPONENT_REF:
12377 if (TREE_CODE (arg0) == CONSTRUCTOR
12378 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12379 {
12380 unsigned HOST_WIDE_INT idx;
12381 tree field, value;
12382 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12383 if (field == arg1)
12384 return value;
12385 }
12386 return NULL_TREE;
12387
12388 case COND_EXPR:
12389 case VEC_COND_EXPR:
12390 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12391 so all simple results must be passed through pedantic_non_lvalue. */
12392 if (TREE_CODE (arg0) == INTEGER_CST)
12393 {
12394 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12395 tem = integer_zerop (arg0) ? op2 : op1;
12396 /* Only optimize constant conditions when the selected branch
12397 has the same type as the COND_EXPR. This avoids optimizing
12398 away "c ? x : throw", where the throw has a void type.
12399 Avoid throwing away that operand which contains label. */
12400 if ((!TREE_SIDE_EFFECTS (unused_op)
12401 || !contains_label_p (unused_op))
12402 && (! VOID_TYPE_P (TREE_TYPE (tem))
12403 || VOID_TYPE_P (type)))
12404 return pedantic_non_lvalue_loc (loc, tem);
12405 return NULL_TREE;
12406 }
12407 else if (TREE_CODE (arg0) == VECTOR_CST)
12408 {
12409 if ((TREE_CODE (arg1) == VECTOR_CST
12410 || TREE_CODE (arg1) == CONSTRUCTOR)
12411 && (TREE_CODE (arg2) == VECTOR_CST
12412 || TREE_CODE (arg2) == CONSTRUCTOR))
12413 {
12414 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
12415 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
12416 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
12417 for (i = 0; i < nelts; i++)
12418 {
12419 tree val = VECTOR_CST_ELT (arg0, i);
12420 if (integer_all_onesp (val))
12421 sel[i] = i;
12422 else if (integer_zerop (val))
12423 sel[i] = nelts + i;
12424 else /* Currently unreachable. */
12425 return NULL_TREE;
12426 }
12427 tree t = fold_vec_perm (type, arg1, arg2, sel);
12428 if (t != NULL_TREE)
12429 return t;
12430 }
12431 }
12432
12433 /* If we have A op B ? A : C, we may be able to convert this to a
12434 simpler expression, depending on the operation and the values
12435 of B and C. Signed zeros prevent all of these transformations,
12436 for reasons given above each one.
12437
12438 Also try swapping the arguments and inverting the conditional. */
12439 if (COMPARISON_CLASS_P (arg0)
12440 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12441 arg1, TREE_OPERAND (arg0, 1))
12442 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
12443 {
12444 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12445 if (tem)
12446 return tem;
12447 }
12448
12449 if (COMPARISON_CLASS_P (arg0)
12450 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12451 op2,
12452 TREE_OPERAND (arg0, 1))
12453 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12454 {
12455 location_t loc0 = expr_location_or (arg0, loc);
12456 tem = fold_invert_truthvalue (loc0, arg0);
12457 if (tem && COMPARISON_CLASS_P (tem))
12458 {
12459 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12460 if (tem)
12461 return tem;
12462 }
12463 }
12464
12465 /* If the second operand is simpler than the third, swap them
12466 since that produces better jump optimization results. */
12467 if (truth_value_p (TREE_CODE (arg0))
12468 && tree_swap_operands_p (op1, op2, false))
12469 {
12470 location_t loc0 = expr_location_or (arg0, loc);
12471 /* See if this can be inverted. If it can't, possibly because
12472 it was a floating-point inequality comparison, don't do
12473 anything. */
12474 tem = fold_invert_truthvalue (loc0, arg0);
12475 if (tem)
12476 return fold_build3_loc (loc, code, type, tem, op2, op1);
12477 }
12478
12479 /* Convert A ? 1 : 0 to simply A. */
12480 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12481 : (integer_onep (op1)
12482 && !VECTOR_TYPE_P (type)))
12483 && integer_zerop (op2)
12484 /* If we try to convert OP0 to our type, the
12485 call to fold will try to move the conversion inside
12486 a COND, which will recurse. In that case, the COND_EXPR
12487 is probably the best choice, so leave it alone. */
12488 && type == TREE_TYPE (arg0))
12489 return pedantic_non_lvalue_loc (loc, arg0);
12490
12491 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12492 over COND_EXPR in cases such as floating point comparisons. */
12493 if (integer_zerop (op1)
12494 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
12495 : (integer_onep (op2)
12496 && !VECTOR_TYPE_P (type)))
12497 && truth_value_p (TREE_CODE (arg0)))
12498 return pedantic_non_lvalue_loc (loc,
12499 fold_convert_loc (loc, type,
12500 invert_truthvalue_loc (loc,
12501 arg0)));
12502
12503 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12504 if (TREE_CODE (arg0) == LT_EXPR
12505 && integer_zerop (TREE_OPERAND (arg0, 1))
12506 && integer_zerop (op2)
12507 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12508 {
12509 /* sign_bit_p looks through both zero and sign extensions,
12510 but for this optimization only sign extensions are
12511 usable. */
12512 tree tem2 = TREE_OPERAND (arg0, 0);
12513 while (tem != tem2)
12514 {
12515 if (TREE_CODE (tem2) != NOP_EXPR
12516 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12517 {
12518 tem = NULL_TREE;
12519 break;
12520 }
12521 tem2 = TREE_OPERAND (tem2, 0);
12522 }
12523 /* sign_bit_p only checks ARG1 bits within A's precision.
12524 If <sign bit of A> has wider type than A, bits outside
12525 of A's precision in <sign bit of A> need to be checked.
12526 If they are all 0, this optimization needs to be done
12527 in unsigned A's type, if they are all 1 in signed A's type,
12528 otherwise this can't be done. */
12529 if (tem
12530 && TYPE_PRECISION (TREE_TYPE (tem))
12531 < TYPE_PRECISION (TREE_TYPE (arg1))
12532 && TYPE_PRECISION (TREE_TYPE (tem))
12533 < TYPE_PRECISION (type))
12534 {
12535 int inner_width, outer_width;
12536 tree tem_type;
12537
12538 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12539 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12540 if (outer_width > TYPE_PRECISION (type))
12541 outer_width = TYPE_PRECISION (type);
12542
12543 wide_int mask = wi::shifted_mask
12544 (inner_width, outer_width - inner_width, false,
12545 TYPE_PRECISION (TREE_TYPE (arg1)));
12546
12547 wide_int common = mask & arg1;
12548 if (common == mask)
12549 {
12550 tem_type = signed_type_for (TREE_TYPE (tem));
12551 tem = fold_convert_loc (loc, tem_type, tem);
12552 }
12553 else if (common == 0)
12554 {
12555 tem_type = unsigned_type_for (TREE_TYPE (tem));
12556 tem = fold_convert_loc (loc, tem_type, tem);
12557 }
12558 else
12559 tem = NULL;
12560 }
12561
12562 if (tem)
12563 return
12564 fold_convert_loc (loc, type,
12565 fold_build2_loc (loc, BIT_AND_EXPR,
12566 TREE_TYPE (tem), tem,
12567 fold_convert_loc (loc,
12568 TREE_TYPE (tem),
12569 arg1)));
12570 }
12571
12572 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12573 already handled above. */
12574 if (TREE_CODE (arg0) == BIT_AND_EXPR
12575 && integer_onep (TREE_OPERAND (arg0, 1))
12576 && integer_zerop (op2)
12577 && integer_pow2p (arg1))
12578 {
12579 tree tem = TREE_OPERAND (arg0, 0);
12580 STRIP_NOPS (tem);
12581 if (TREE_CODE (tem) == RSHIFT_EXPR
12582 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12583 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12584 tree_to_uhwi (TREE_OPERAND (tem, 1)))
12585 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12586 TREE_OPERAND (tem, 0), arg1);
12587 }
12588
12589 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12590 is probably obsolete because the first operand should be a
12591 truth value (that's why we have the two cases above), but let's
12592 leave it in until we can confirm this for all front-ends. */
12593 if (integer_zerop (op2)
12594 && TREE_CODE (arg0) == NE_EXPR
12595 && integer_zerop (TREE_OPERAND (arg0, 1))
12596 && integer_pow2p (arg1)
12597 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12598 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12599 arg1, OEP_ONLY_CONST))
12600 return pedantic_non_lvalue_loc (loc,
12601 fold_convert_loc (loc, type,
12602 TREE_OPERAND (arg0, 0)));
12603
12604 /* Disable the transformations below for vectors, since
12605 fold_binary_op_with_conditional_arg may undo them immediately,
12606 yielding an infinite loop. */
12607 if (code == VEC_COND_EXPR)
12608 return NULL_TREE;
12609
12610 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12611 if (integer_zerop (op2)
12612 && truth_value_p (TREE_CODE (arg0))
12613 && truth_value_p (TREE_CODE (arg1))
12614 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12615 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12616 : TRUTH_ANDIF_EXPR,
12617 type, fold_convert_loc (loc, type, arg0), arg1);
12618
12619 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12620 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12621 && truth_value_p (TREE_CODE (arg0))
12622 && truth_value_p (TREE_CODE (arg1))
12623 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12624 {
12625 location_t loc0 = expr_location_or (arg0, loc);
12626 /* Only perform transformation if ARG0 is easily inverted. */
12627 tem = fold_invert_truthvalue (loc0, arg0);
12628 if (tem)
12629 return fold_build2_loc (loc, code == VEC_COND_EXPR
12630 ? BIT_IOR_EXPR
12631 : TRUTH_ORIF_EXPR,
12632 type, fold_convert_loc (loc, type, tem),
12633 arg1);
12634 }
12635
12636 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12637 if (integer_zerop (arg1)
12638 && truth_value_p (TREE_CODE (arg0))
12639 && truth_value_p (TREE_CODE (op2))
12640 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12641 {
12642 location_t loc0 = expr_location_or (arg0, loc);
12643 /* Only perform transformation if ARG0 is easily inverted. */
12644 tem = fold_invert_truthvalue (loc0, arg0);
12645 if (tem)
12646 return fold_build2_loc (loc, code == VEC_COND_EXPR
12647 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12648 type, fold_convert_loc (loc, type, tem),
12649 op2);
12650 }
12651
12652 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12653 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12654 && truth_value_p (TREE_CODE (arg0))
12655 && truth_value_p (TREE_CODE (op2))
12656 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12657 return fold_build2_loc (loc, code == VEC_COND_EXPR
12658 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12659 type, fold_convert_loc (loc, type, arg0), op2);
12660
12661 return NULL_TREE;
12662
12663 case CALL_EXPR:
12664 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12665 of fold_ternary on them. */
12666 gcc_unreachable ();
12667
12668 case BIT_FIELD_REF:
12669 if ((TREE_CODE (arg0) == VECTOR_CST
12670 || (TREE_CODE (arg0) == CONSTRUCTOR
12671 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
12672 && (type == TREE_TYPE (TREE_TYPE (arg0))
12673 || (TREE_CODE (type) == VECTOR_TYPE
12674 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
12675 {
12676 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12677 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12678 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12679 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12680
12681 if (n != 0
12682 && (idx % width) == 0
12683 && (n % width) == 0
12684 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12685 {
12686 idx = idx / width;
12687 n = n / width;
12688
12689 if (TREE_CODE (arg0) == VECTOR_CST)
12690 {
12691 if (n == 1)
12692 return VECTOR_CST_ELT (arg0, idx);
12693
12694 tree *vals = XALLOCAVEC (tree, n);
12695 for (unsigned i = 0; i < n; ++i)
12696 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
12697 return build_vector (type, vals);
12698 }
12699
12700 /* Constructor elements can be subvectors. */
12701 unsigned HOST_WIDE_INT k = 1;
12702 if (CONSTRUCTOR_NELTS (arg0) != 0)
12703 {
12704 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
12705 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
12706 k = TYPE_VECTOR_SUBPARTS (cons_elem);
12707 }
12708
12709 /* We keep an exact subset of the constructor elements. */
12710 if ((idx % k) == 0 && (n % k) == 0)
12711 {
12712 if (CONSTRUCTOR_NELTS (arg0) == 0)
12713 return build_constructor (type, NULL);
12714 idx /= k;
12715 n /= k;
12716 if (n == 1)
12717 {
12718 if (idx < CONSTRUCTOR_NELTS (arg0))
12719 return CONSTRUCTOR_ELT (arg0, idx)->value;
12720 return build_zero_cst (type);
12721 }
12722
12723 vec<constructor_elt, va_gc> *vals;
12724 vec_alloc (vals, n);
12725 for (unsigned i = 0;
12726 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
12727 ++i)
12728 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
12729 CONSTRUCTOR_ELT
12730 (arg0, idx + i)->value);
12731 return build_constructor (type, vals);
12732 }
12733 /* The bitfield references a single constructor element. */
12734 else if (idx + n <= (idx / k + 1) * k)
12735 {
12736 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
12737 return build_zero_cst (type);
12738 else if (n == k)
12739 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
12740 else
12741 return fold_build3_loc (loc, code, type,
12742 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
12743 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
12744 }
12745 }
12746 }
12747
12748 /* A bit-field-ref that referenced the full argument can be stripped. */
12749 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12750 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
12751 && integer_zerop (op2))
12752 return fold_convert_loc (loc, type, arg0);
12753
12754 /* On constants we can use native encode/interpret to constant
12755 fold (nearly) all BIT_FIELD_REFs. */
12756 if (CONSTANT_CLASS_P (arg0)
12757 && can_native_interpret_type_p (type)
12758 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
12759 /* This limitation should not be necessary, we just need to
12760 round this up to mode size. */
12761 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
12762 /* Need bit-shifting of the buffer to relax the following. */
12763 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
12764 {
12765 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12766 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12767 unsigned HOST_WIDE_INT clen;
12768 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
12769 /* ??? We cannot tell native_encode_expr to start at
12770 some random byte only. So limit us to a reasonable amount
12771 of work. */
12772 if (clen <= 4096)
12773 {
12774 unsigned char *b = XALLOCAVEC (unsigned char, clen);
12775 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
12776 if (len > 0
12777 && len * BITS_PER_UNIT >= bitpos + bitsize)
12778 {
12779 tree v = native_interpret_expr (type,
12780 b + bitpos / BITS_PER_UNIT,
12781 bitsize / BITS_PER_UNIT);
12782 if (v)
12783 return v;
12784 }
12785 }
12786 }
12787
12788 return NULL_TREE;
12789
12790 case FMA_EXPR:
12791 /* For integers we can decompose the FMA if possible. */
12792 if (TREE_CODE (arg0) == INTEGER_CST
12793 && TREE_CODE (arg1) == INTEGER_CST)
12794 return fold_build2_loc (loc, PLUS_EXPR, type,
12795 const_binop (MULT_EXPR, arg0, arg1), arg2);
12796 if (integer_zerop (arg2))
12797 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12798
12799 return fold_fma (loc, type, arg0, arg1, arg2);
12800
12801 case VEC_PERM_EXPR:
12802 if (TREE_CODE (arg2) == VECTOR_CST)
12803 {
12804 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
12805 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
12806 unsigned char *sel2 = sel + nelts;
12807 bool need_mask_canon = false;
12808 bool need_mask_canon2 = false;
12809 bool all_in_vec0 = true;
12810 bool all_in_vec1 = true;
12811 bool maybe_identity = true;
12812 bool single_arg = (op0 == op1);
12813 bool changed = false;
12814
12815 mask2 = 2 * nelts - 1;
12816 mask = single_arg ? (nelts - 1) : mask2;
12817 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
12818 for (i = 0; i < nelts; i++)
12819 {
12820 tree val = VECTOR_CST_ELT (arg2, i);
12821 if (TREE_CODE (val) != INTEGER_CST)
12822 return NULL_TREE;
12823
12824 /* Make sure that the perm value is in an acceptable
12825 range. */
12826 wide_int t = val;
12827 need_mask_canon |= wi::gtu_p (t, mask);
12828 need_mask_canon2 |= wi::gtu_p (t, mask2);
12829 sel[i] = t.to_uhwi () & mask;
12830 sel2[i] = t.to_uhwi () & mask2;
12831
12832 if (sel[i] < nelts)
12833 all_in_vec1 = false;
12834 else
12835 all_in_vec0 = false;
12836
12837 if ((sel[i] & (nelts-1)) != i)
12838 maybe_identity = false;
12839 }
12840
12841 if (maybe_identity)
12842 {
12843 if (all_in_vec0)
12844 return op0;
12845 if (all_in_vec1)
12846 return op1;
12847 }
12848
12849 if (all_in_vec0)
12850 op1 = op0;
12851 else if (all_in_vec1)
12852 {
12853 op0 = op1;
12854 for (i = 0; i < nelts; i++)
12855 sel[i] -= nelts;
12856 need_mask_canon = true;
12857 }
12858
12859 if ((TREE_CODE (op0) == VECTOR_CST
12860 || TREE_CODE (op0) == CONSTRUCTOR)
12861 && (TREE_CODE (op1) == VECTOR_CST
12862 || TREE_CODE (op1) == CONSTRUCTOR))
12863 {
12864 tree t = fold_vec_perm (type, op0, op1, sel);
12865 if (t != NULL_TREE)
12866 return t;
12867 }
12868
12869 if (op0 == op1 && !single_arg)
12870 changed = true;
12871
12872 /* Some targets are deficient and fail to expand a single
12873 argument permutation while still allowing an equivalent
12874 2-argument version. */
12875 if (need_mask_canon && arg2 == op2
12876 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
12877 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
12878 {
12879 need_mask_canon = need_mask_canon2;
12880 sel = sel2;
12881 }
12882
12883 if (need_mask_canon && arg2 == op2)
12884 {
12885 tree *tsel = XALLOCAVEC (tree, nelts);
12886 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
12887 for (i = 0; i < nelts; i++)
12888 tsel[i] = build_int_cst (eltype, sel[i]);
12889 op2 = build_vector (TREE_TYPE (arg2), tsel);
12890 changed = true;
12891 }
12892
12893 if (changed)
12894 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
12895 }
12896 return NULL_TREE;
12897
12898 default:
12899 return NULL_TREE;
12900 } /* switch (code) */
12901 }
12902
12903 /* Perform constant folding and related simplification of EXPR.
12904 The related simplifications include x*1 => x, x*0 => 0, etc.,
12905 and application of the associative law.
12906 NOP_EXPR conversions may be removed freely (as long as we
12907 are careful not to change the type of the overall expression).
12908 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12909 but we can constant-fold them if they have constant operands. */
12910
12911 #ifdef ENABLE_FOLD_CHECKING
12912 # define fold(x) fold_1 (x)
12913 static tree fold_1 (tree);
12914 static
12915 #endif
12916 tree
12917 fold (tree expr)
12918 {
12919 const tree t = expr;
12920 enum tree_code code = TREE_CODE (t);
12921 enum tree_code_class kind = TREE_CODE_CLASS (code);
12922 tree tem;
12923 location_t loc = EXPR_LOCATION (expr);
12924
12925 /* Return right away if a constant. */
12926 if (kind == tcc_constant)
12927 return t;
12928
12929 /* CALL_EXPR-like objects with variable numbers of operands are
12930 treated specially. */
12931 if (kind == tcc_vl_exp)
12932 {
12933 if (code == CALL_EXPR)
12934 {
12935 tem = fold_call_expr (loc, expr, false);
12936 return tem ? tem : expr;
12937 }
12938 return expr;
12939 }
12940
12941 if (IS_EXPR_CODE_CLASS (kind))
12942 {
12943 tree type = TREE_TYPE (t);
12944 tree op0, op1, op2;
12945
12946 switch (TREE_CODE_LENGTH (code))
12947 {
12948 case 1:
12949 op0 = TREE_OPERAND (t, 0);
12950 tem = fold_unary_loc (loc, code, type, op0);
12951 return tem ? tem : expr;
12952 case 2:
12953 op0 = TREE_OPERAND (t, 0);
12954 op1 = TREE_OPERAND (t, 1);
12955 tem = fold_binary_loc (loc, code, type, op0, op1);
12956 return tem ? tem : expr;
12957 case 3:
12958 op0 = TREE_OPERAND (t, 0);
12959 op1 = TREE_OPERAND (t, 1);
12960 op2 = TREE_OPERAND (t, 2);
12961 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12962 return tem ? tem : expr;
12963 default:
12964 break;
12965 }
12966 }
12967
12968 switch (code)
12969 {
12970 case ARRAY_REF:
12971 {
12972 tree op0 = TREE_OPERAND (t, 0);
12973 tree op1 = TREE_OPERAND (t, 1);
12974
12975 if (TREE_CODE (op1) == INTEGER_CST
12976 && TREE_CODE (op0) == CONSTRUCTOR
12977 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12978 {
12979 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
12980 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
12981 unsigned HOST_WIDE_INT begin = 0;
12982
12983 /* Find a matching index by means of a binary search. */
12984 while (begin != end)
12985 {
12986 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
12987 tree index = (*elts)[middle].index;
12988
12989 if (TREE_CODE (index) == INTEGER_CST
12990 && tree_int_cst_lt (index, op1))
12991 begin = middle + 1;
12992 else if (TREE_CODE (index) == INTEGER_CST
12993 && tree_int_cst_lt (op1, index))
12994 end = middle;
12995 else if (TREE_CODE (index) == RANGE_EXPR
12996 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
12997 begin = middle + 1;
12998 else if (TREE_CODE (index) == RANGE_EXPR
12999 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13000 end = middle;
13001 else
13002 return (*elts)[middle].value;
13003 }
13004 }
13005
13006 return t;
13007 }
13008
13009 /* Return a VECTOR_CST if possible. */
13010 case CONSTRUCTOR:
13011 {
13012 tree type = TREE_TYPE (t);
13013 if (TREE_CODE (type) != VECTOR_TYPE)
13014 return t;
13015
13016 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13017 unsigned HOST_WIDE_INT idx, pos = 0;
13018 tree value;
13019
13020 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13021 {
13022 if (!CONSTANT_CLASS_P (value))
13023 return t;
13024 if (TREE_CODE (value) == VECTOR_CST)
13025 {
13026 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13027 vec[pos++] = VECTOR_CST_ELT (value, i);
13028 }
13029 else
13030 vec[pos++] = value;
13031 }
13032 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13033 vec[pos] = build_zero_cst (TREE_TYPE (type));
13034
13035 return build_vector (type, vec);
13036 }
13037
13038 case CONST_DECL:
13039 return fold (DECL_INITIAL (t));
13040
13041 default:
13042 return t;
13043 } /* switch (code) */
13044 }
13045
13046 #ifdef ENABLE_FOLD_CHECKING
13047 #undef fold
13048
13049 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13050 hash_table<nofree_ptr_hash<const tree_node> > *);
13051 static void fold_check_failed (const_tree, const_tree);
13052 void print_fold_checksum (const_tree);
13053
13054 /* When --enable-checking=fold, compute a digest of expr before
13055 and after actual fold call to see if fold did not accidentally
13056 change original expr. */
13057
13058 tree
13059 fold (tree expr)
13060 {
13061 tree ret;
13062 struct md5_ctx ctx;
13063 unsigned char checksum_before[16], checksum_after[16];
13064 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13065
13066 md5_init_ctx (&ctx);
13067 fold_checksum_tree (expr, &ctx, &ht);
13068 md5_finish_ctx (&ctx, checksum_before);
13069 ht.empty ();
13070
13071 ret = fold_1 (expr);
13072
13073 md5_init_ctx (&ctx);
13074 fold_checksum_tree (expr, &ctx, &ht);
13075 md5_finish_ctx (&ctx, checksum_after);
13076
13077 if (memcmp (checksum_before, checksum_after, 16))
13078 fold_check_failed (expr, ret);
13079
13080 return ret;
13081 }
13082
13083 void
13084 print_fold_checksum (const_tree expr)
13085 {
13086 struct md5_ctx ctx;
13087 unsigned char checksum[16], cnt;
13088 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13089
13090 md5_init_ctx (&ctx);
13091 fold_checksum_tree (expr, &ctx, &ht);
13092 md5_finish_ctx (&ctx, checksum);
13093 for (cnt = 0; cnt < 16; ++cnt)
13094 fprintf (stderr, "%02x", checksum[cnt]);
13095 putc ('\n', stderr);
13096 }
13097
13098 static void
13099 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13100 {
13101 internal_error ("fold check: original tree changed by fold");
13102 }
13103
13104 static void
13105 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13106 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13107 {
13108 const tree_node **slot;
13109 enum tree_code code;
13110 union tree_node buf;
13111 int i, len;
13112
13113 recursive_label:
13114 if (expr == NULL)
13115 return;
13116 slot = ht->find_slot (expr, INSERT);
13117 if (*slot != NULL)
13118 return;
13119 *slot = expr;
13120 code = TREE_CODE (expr);
13121 if (TREE_CODE_CLASS (code) == tcc_declaration
13122 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13123 {
13124 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13125 memcpy ((char *) &buf, expr, tree_size (expr));
13126 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13127 buf.decl_with_vis.symtab_node = NULL;
13128 expr = (tree) &buf;
13129 }
13130 else if (TREE_CODE_CLASS (code) == tcc_type
13131 && (TYPE_POINTER_TO (expr)
13132 || TYPE_REFERENCE_TO (expr)
13133 || TYPE_CACHED_VALUES_P (expr)
13134 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13135 || TYPE_NEXT_VARIANT (expr)))
13136 {
13137 /* Allow these fields to be modified. */
13138 tree tmp;
13139 memcpy ((char *) &buf, expr, tree_size (expr));
13140 expr = tmp = (tree) &buf;
13141 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13142 TYPE_POINTER_TO (tmp) = NULL;
13143 TYPE_REFERENCE_TO (tmp) = NULL;
13144 TYPE_NEXT_VARIANT (tmp) = NULL;
13145 if (TYPE_CACHED_VALUES_P (tmp))
13146 {
13147 TYPE_CACHED_VALUES_P (tmp) = 0;
13148 TYPE_CACHED_VALUES (tmp) = NULL;
13149 }
13150 }
13151 md5_process_bytes (expr, tree_size (expr), ctx);
13152 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13153 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13154 if (TREE_CODE_CLASS (code) != tcc_type
13155 && TREE_CODE_CLASS (code) != tcc_declaration
13156 && code != TREE_LIST
13157 && code != SSA_NAME
13158 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13159 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13160 switch (TREE_CODE_CLASS (code))
13161 {
13162 case tcc_constant:
13163 switch (code)
13164 {
13165 case STRING_CST:
13166 md5_process_bytes (TREE_STRING_POINTER (expr),
13167 TREE_STRING_LENGTH (expr), ctx);
13168 break;
13169 case COMPLEX_CST:
13170 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13171 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13172 break;
13173 case VECTOR_CST:
13174 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
13175 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
13176 break;
13177 default:
13178 break;
13179 }
13180 break;
13181 case tcc_exceptional:
13182 switch (code)
13183 {
13184 case TREE_LIST:
13185 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13186 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13187 expr = TREE_CHAIN (expr);
13188 goto recursive_label;
13189 break;
13190 case TREE_VEC:
13191 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13192 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13193 break;
13194 default:
13195 break;
13196 }
13197 break;
13198 case tcc_expression:
13199 case tcc_reference:
13200 case tcc_comparison:
13201 case tcc_unary:
13202 case tcc_binary:
13203 case tcc_statement:
13204 case tcc_vl_exp:
13205 len = TREE_OPERAND_LENGTH (expr);
13206 for (i = 0; i < len; ++i)
13207 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13208 break;
13209 case tcc_declaration:
13210 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13211 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13212 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13213 {
13214 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13215 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13216 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13217 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13218 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13219 }
13220
13221 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13222 {
13223 if (TREE_CODE (expr) == FUNCTION_DECL)
13224 {
13225 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13226 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13227 }
13228 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13229 }
13230 break;
13231 case tcc_type:
13232 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13233 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13234 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13235 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13236 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13237 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13238 if (INTEGRAL_TYPE_P (expr)
13239 || SCALAR_FLOAT_TYPE_P (expr))
13240 {
13241 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13242 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13243 }
13244 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13245 if (TREE_CODE (expr) == RECORD_TYPE
13246 || TREE_CODE (expr) == UNION_TYPE
13247 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13248 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13249 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13250 break;
13251 default:
13252 break;
13253 }
13254 }
13255
13256 /* Helper function for outputting the checksum of a tree T. When
13257 debugging with gdb, you can "define mynext" to be "next" followed
13258 by "call debug_fold_checksum (op0)", then just trace down till the
13259 outputs differ. */
13260
13261 DEBUG_FUNCTION void
13262 debug_fold_checksum (const_tree t)
13263 {
13264 int i;
13265 unsigned char checksum[16];
13266 struct md5_ctx ctx;
13267 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13268
13269 md5_init_ctx (&ctx);
13270 fold_checksum_tree (t, &ctx, &ht);
13271 md5_finish_ctx (&ctx, checksum);
13272 ht.empty ();
13273
13274 for (i = 0; i < 16; i++)
13275 fprintf (stderr, "%d ", checksum[i]);
13276
13277 fprintf (stderr, "\n");
13278 }
13279
13280 #endif
13281
13282 /* Fold a unary tree expression with code CODE of type TYPE with an
13283 operand OP0. LOC is the location of the resulting expression.
13284 Return a folded expression if successful. Otherwise, return a tree
13285 expression with code CODE of type TYPE with an operand OP0. */
13286
13287 tree
13288 fold_build1_stat_loc (location_t loc,
13289 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13290 {
13291 tree tem;
13292 #ifdef ENABLE_FOLD_CHECKING
13293 unsigned char checksum_before[16], checksum_after[16];
13294 struct md5_ctx ctx;
13295 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13296
13297 md5_init_ctx (&ctx);
13298 fold_checksum_tree (op0, &ctx, &ht);
13299 md5_finish_ctx (&ctx, checksum_before);
13300 ht.empty ();
13301 #endif
13302
13303 tem = fold_unary_loc (loc, code, type, op0);
13304 if (!tem)
13305 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13306
13307 #ifdef ENABLE_FOLD_CHECKING
13308 md5_init_ctx (&ctx);
13309 fold_checksum_tree (op0, &ctx, &ht);
13310 md5_finish_ctx (&ctx, checksum_after);
13311
13312 if (memcmp (checksum_before, checksum_after, 16))
13313 fold_check_failed (op0, tem);
13314 #endif
13315 return tem;
13316 }
13317
13318 /* Fold a binary tree expression with code CODE of type TYPE with
13319 operands OP0 and OP1. LOC is the location of the resulting
13320 expression. Return a folded expression if successful. Otherwise,
13321 return a tree expression with code CODE of type TYPE with operands
13322 OP0 and OP1. */
13323
13324 tree
13325 fold_build2_stat_loc (location_t loc,
13326 enum tree_code code, tree type, tree op0, tree op1
13327 MEM_STAT_DECL)
13328 {
13329 tree tem;
13330 #ifdef ENABLE_FOLD_CHECKING
13331 unsigned char checksum_before_op0[16],
13332 checksum_before_op1[16],
13333 checksum_after_op0[16],
13334 checksum_after_op1[16];
13335 struct md5_ctx ctx;
13336 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13337
13338 md5_init_ctx (&ctx);
13339 fold_checksum_tree (op0, &ctx, &ht);
13340 md5_finish_ctx (&ctx, checksum_before_op0);
13341 ht.empty ();
13342
13343 md5_init_ctx (&ctx);
13344 fold_checksum_tree (op1, &ctx, &ht);
13345 md5_finish_ctx (&ctx, checksum_before_op1);
13346 ht.empty ();
13347 #endif
13348
13349 tem = fold_binary_loc (loc, code, type, op0, op1);
13350 if (!tem)
13351 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13352
13353 #ifdef ENABLE_FOLD_CHECKING
13354 md5_init_ctx (&ctx);
13355 fold_checksum_tree (op0, &ctx, &ht);
13356 md5_finish_ctx (&ctx, checksum_after_op0);
13357 ht.empty ();
13358
13359 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13360 fold_check_failed (op0, tem);
13361
13362 md5_init_ctx (&ctx);
13363 fold_checksum_tree (op1, &ctx, &ht);
13364 md5_finish_ctx (&ctx, checksum_after_op1);
13365
13366 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13367 fold_check_failed (op1, tem);
13368 #endif
13369 return tem;
13370 }
13371
13372 /* Fold a ternary tree expression with code CODE of type TYPE with
13373 operands OP0, OP1, and OP2. Return a folded expression if
13374 successful. Otherwise, return a tree expression with code CODE of
13375 type TYPE with operands OP0, OP1, and OP2. */
13376
13377 tree
13378 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13379 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13380 {
13381 tree tem;
13382 #ifdef ENABLE_FOLD_CHECKING
13383 unsigned char checksum_before_op0[16],
13384 checksum_before_op1[16],
13385 checksum_before_op2[16],
13386 checksum_after_op0[16],
13387 checksum_after_op1[16],
13388 checksum_after_op2[16];
13389 struct md5_ctx ctx;
13390 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13391
13392 md5_init_ctx (&ctx);
13393 fold_checksum_tree (op0, &ctx, &ht);
13394 md5_finish_ctx (&ctx, checksum_before_op0);
13395 ht.empty ();
13396
13397 md5_init_ctx (&ctx);
13398 fold_checksum_tree (op1, &ctx, &ht);
13399 md5_finish_ctx (&ctx, checksum_before_op1);
13400 ht.empty ();
13401
13402 md5_init_ctx (&ctx);
13403 fold_checksum_tree (op2, &ctx, &ht);
13404 md5_finish_ctx (&ctx, checksum_before_op2);
13405 ht.empty ();
13406 #endif
13407
13408 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13409 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13410 if (!tem)
13411 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13412
13413 #ifdef ENABLE_FOLD_CHECKING
13414 md5_init_ctx (&ctx);
13415 fold_checksum_tree (op0, &ctx, &ht);
13416 md5_finish_ctx (&ctx, checksum_after_op0);
13417 ht.empty ();
13418
13419 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13420 fold_check_failed (op0, tem);
13421
13422 md5_init_ctx (&ctx);
13423 fold_checksum_tree (op1, &ctx, &ht);
13424 md5_finish_ctx (&ctx, checksum_after_op1);
13425 ht.empty ();
13426
13427 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13428 fold_check_failed (op1, tem);
13429
13430 md5_init_ctx (&ctx);
13431 fold_checksum_tree (op2, &ctx, &ht);
13432 md5_finish_ctx (&ctx, checksum_after_op2);
13433
13434 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13435 fold_check_failed (op2, tem);
13436 #endif
13437 return tem;
13438 }
13439
13440 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13441 arguments in ARGARRAY, and a null static chain.
13442 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13443 of type TYPE from the given operands as constructed by build_call_array. */
13444
13445 tree
13446 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13447 int nargs, tree *argarray)
13448 {
13449 tree tem;
13450 #ifdef ENABLE_FOLD_CHECKING
13451 unsigned char checksum_before_fn[16],
13452 checksum_before_arglist[16],
13453 checksum_after_fn[16],
13454 checksum_after_arglist[16];
13455 struct md5_ctx ctx;
13456 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13457 int i;
13458
13459 md5_init_ctx (&ctx);
13460 fold_checksum_tree (fn, &ctx, &ht);
13461 md5_finish_ctx (&ctx, checksum_before_fn);
13462 ht.empty ();
13463
13464 md5_init_ctx (&ctx);
13465 for (i = 0; i < nargs; i++)
13466 fold_checksum_tree (argarray[i], &ctx, &ht);
13467 md5_finish_ctx (&ctx, checksum_before_arglist);
13468 ht.empty ();
13469 #endif
13470
13471 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13472 if (!tem)
13473 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13474
13475 #ifdef ENABLE_FOLD_CHECKING
13476 md5_init_ctx (&ctx);
13477 fold_checksum_tree (fn, &ctx, &ht);
13478 md5_finish_ctx (&ctx, checksum_after_fn);
13479 ht.empty ();
13480
13481 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13482 fold_check_failed (fn, tem);
13483
13484 md5_init_ctx (&ctx);
13485 for (i = 0; i < nargs; i++)
13486 fold_checksum_tree (argarray[i], &ctx, &ht);
13487 md5_finish_ctx (&ctx, checksum_after_arglist);
13488
13489 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13490 fold_check_failed (NULL_TREE, tem);
13491 #endif
13492 return tem;
13493 }
13494
13495 /* Perform constant folding and related simplification of initializer
13496 expression EXPR. These behave identically to "fold_buildN" but ignore
13497 potential run-time traps and exceptions that fold must preserve. */
13498
13499 #define START_FOLD_INIT \
13500 int saved_signaling_nans = flag_signaling_nans;\
13501 int saved_trapping_math = flag_trapping_math;\
13502 int saved_rounding_math = flag_rounding_math;\
13503 int saved_trapv = flag_trapv;\
13504 int saved_folding_initializer = folding_initializer;\
13505 flag_signaling_nans = 0;\
13506 flag_trapping_math = 0;\
13507 flag_rounding_math = 0;\
13508 flag_trapv = 0;\
13509 folding_initializer = 1;
13510
13511 #define END_FOLD_INIT \
13512 flag_signaling_nans = saved_signaling_nans;\
13513 flag_trapping_math = saved_trapping_math;\
13514 flag_rounding_math = saved_rounding_math;\
13515 flag_trapv = saved_trapv;\
13516 folding_initializer = saved_folding_initializer;
13517
13518 tree
13519 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13520 tree type, tree op)
13521 {
13522 tree result;
13523 START_FOLD_INIT;
13524
13525 result = fold_build1_loc (loc, code, type, op);
13526
13527 END_FOLD_INIT;
13528 return result;
13529 }
13530
13531 tree
13532 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13533 tree type, tree op0, tree op1)
13534 {
13535 tree result;
13536 START_FOLD_INIT;
13537
13538 result = fold_build2_loc (loc, code, type, op0, op1);
13539
13540 END_FOLD_INIT;
13541 return result;
13542 }
13543
13544 tree
13545 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13546 int nargs, tree *argarray)
13547 {
13548 tree result;
13549 START_FOLD_INIT;
13550
13551 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13552
13553 END_FOLD_INIT;
13554 return result;
13555 }
13556
13557 #undef START_FOLD_INIT
13558 #undef END_FOLD_INIT
13559
13560 /* Determine if first argument is a multiple of second argument. Return 0 if
13561 it is not, or we cannot easily determined it to be.
13562
13563 An example of the sort of thing we care about (at this point; this routine
13564 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13565 fold cases do now) is discovering that
13566
13567 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13568
13569 is a multiple of
13570
13571 SAVE_EXPR (J * 8)
13572
13573 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13574
13575 This code also handles discovering that
13576
13577 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13578
13579 is a multiple of 8 so we don't have to worry about dealing with a
13580 possible remainder.
13581
13582 Note that we *look* inside a SAVE_EXPR only to determine how it was
13583 calculated; it is not safe for fold to do much of anything else with the
13584 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13585 at run time. For example, the latter example above *cannot* be implemented
13586 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13587 evaluation time of the original SAVE_EXPR is not necessarily the same at
13588 the time the new expression is evaluated. The only optimization of this
13589 sort that would be valid is changing
13590
13591 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13592
13593 divided by 8 to
13594
13595 SAVE_EXPR (I) * SAVE_EXPR (J)
13596
13597 (where the same SAVE_EXPR (J) is used in the original and the
13598 transformed version). */
13599
13600 int
13601 multiple_of_p (tree type, const_tree top, const_tree bottom)
13602 {
13603 if (operand_equal_p (top, bottom, 0))
13604 return 1;
13605
13606 if (TREE_CODE (type) != INTEGER_TYPE)
13607 return 0;
13608
13609 switch (TREE_CODE (top))
13610 {
13611 case BIT_AND_EXPR:
13612 /* Bitwise and provides a power of two multiple. If the mask is
13613 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13614 if (!integer_pow2p (bottom))
13615 return 0;
13616 /* FALLTHRU */
13617
13618 case MULT_EXPR:
13619 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13620 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13621
13622 case PLUS_EXPR:
13623 case MINUS_EXPR:
13624 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13625 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13626
13627 case LSHIFT_EXPR:
13628 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13629 {
13630 tree op1, t1;
13631
13632 op1 = TREE_OPERAND (top, 1);
13633 /* const_binop may not detect overflow correctly,
13634 so check for it explicitly here. */
13635 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
13636 && 0 != (t1 = fold_convert (type,
13637 const_binop (LSHIFT_EXPR,
13638 size_one_node,
13639 op1)))
13640 && !TREE_OVERFLOW (t1))
13641 return multiple_of_p (type, t1, bottom);
13642 }
13643 return 0;
13644
13645 case NOP_EXPR:
13646 /* Can't handle conversions from non-integral or wider integral type. */
13647 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13648 || (TYPE_PRECISION (type)
13649 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13650 return 0;
13651
13652 /* .. fall through ... */
13653
13654 case SAVE_EXPR:
13655 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13656
13657 case COND_EXPR:
13658 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13659 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13660
13661 case INTEGER_CST:
13662 if (TREE_CODE (bottom) != INTEGER_CST
13663 || integer_zerop (bottom)
13664 || (TYPE_UNSIGNED (type)
13665 && (tree_int_cst_sgn (top) < 0
13666 || tree_int_cst_sgn (bottom) < 0)))
13667 return 0;
13668 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13669 SIGNED);
13670
13671 default:
13672 return 0;
13673 }
13674 }
13675
13676 /* Return true if CODE or TYPE is known to be non-negative. */
13677
13678 static bool
13679 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13680 {
13681 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13682 && truth_value_p (code))
13683 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13684 have a signed:1 type (where the value is -1 and 0). */
13685 return true;
13686 return false;
13687 }
13688
13689 /* Return true if (CODE OP0) is known to be non-negative. If the return
13690 value is based on the assumption that signed overflow is undefined,
13691 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13692 *STRICT_OVERFLOW_P. */
13693
13694 bool
13695 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13696 bool *strict_overflow_p)
13697 {
13698 if (TYPE_UNSIGNED (type))
13699 return true;
13700
13701 switch (code)
13702 {
13703 case ABS_EXPR:
13704 /* We can't return 1 if flag_wrapv is set because
13705 ABS_EXPR<INT_MIN> = INT_MIN. */
13706 if (!ANY_INTEGRAL_TYPE_P (type))
13707 return true;
13708 if (TYPE_OVERFLOW_UNDEFINED (type))
13709 {
13710 *strict_overflow_p = true;
13711 return true;
13712 }
13713 break;
13714
13715 case NON_LVALUE_EXPR:
13716 case FLOAT_EXPR:
13717 case FIX_TRUNC_EXPR:
13718 return tree_expr_nonnegative_warnv_p (op0,
13719 strict_overflow_p);
13720
13721 CASE_CONVERT:
13722 {
13723 tree inner_type = TREE_TYPE (op0);
13724 tree outer_type = type;
13725
13726 if (TREE_CODE (outer_type) == REAL_TYPE)
13727 {
13728 if (TREE_CODE (inner_type) == REAL_TYPE)
13729 return tree_expr_nonnegative_warnv_p (op0,
13730 strict_overflow_p);
13731 if (INTEGRAL_TYPE_P (inner_type))
13732 {
13733 if (TYPE_UNSIGNED (inner_type))
13734 return true;
13735 return tree_expr_nonnegative_warnv_p (op0,
13736 strict_overflow_p);
13737 }
13738 }
13739 else if (INTEGRAL_TYPE_P (outer_type))
13740 {
13741 if (TREE_CODE (inner_type) == REAL_TYPE)
13742 return tree_expr_nonnegative_warnv_p (op0,
13743 strict_overflow_p);
13744 if (INTEGRAL_TYPE_P (inner_type))
13745 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13746 && TYPE_UNSIGNED (inner_type);
13747 }
13748 }
13749 break;
13750
13751 default:
13752 return tree_simple_nonnegative_warnv_p (code, type);
13753 }
13754
13755 /* We don't know sign of `t', so be conservative and return false. */
13756 return false;
13757 }
13758
13759 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13760 value is based on the assumption that signed overflow is undefined,
13761 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13762 *STRICT_OVERFLOW_P. */
13763
13764 bool
13765 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13766 tree op1, bool *strict_overflow_p)
13767 {
13768 if (TYPE_UNSIGNED (type))
13769 return true;
13770
13771 switch (code)
13772 {
13773 case POINTER_PLUS_EXPR:
13774 case PLUS_EXPR:
13775 if (FLOAT_TYPE_P (type))
13776 return (tree_expr_nonnegative_warnv_p (op0,
13777 strict_overflow_p)
13778 && tree_expr_nonnegative_warnv_p (op1,
13779 strict_overflow_p));
13780
13781 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13782 both unsigned and at least 2 bits shorter than the result. */
13783 if (TREE_CODE (type) == INTEGER_TYPE
13784 && TREE_CODE (op0) == NOP_EXPR
13785 && TREE_CODE (op1) == NOP_EXPR)
13786 {
13787 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13788 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13789 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13790 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13791 {
13792 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13793 TYPE_PRECISION (inner2)) + 1;
13794 return prec < TYPE_PRECISION (type);
13795 }
13796 }
13797 break;
13798
13799 case MULT_EXPR:
13800 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13801 {
13802 /* x * x is always non-negative for floating point x
13803 or without overflow. */
13804 if (operand_equal_p (op0, op1, 0)
13805 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
13806 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
13807 {
13808 if (ANY_INTEGRAL_TYPE_P (type)
13809 && TYPE_OVERFLOW_UNDEFINED (type))
13810 *strict_overflow_p = true;
13811 return true;
13812 }
13813 }
13814
13815 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13816 both unsigned and their total bits is shorter than the result. */
13817 if (TREE_CODE (type) == INTEGER_TYPE
13818 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13819 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13820 {
13821 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13822 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13823 : TREE_TYPE (op0);
13824 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13825 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13826 : TREE_TYPE (op1);
13827
13828 bool unsigned0 = TYPE_UNSIGNED (inner0);
13829 bool unsigned1 = TYPE_UNSIGNED (inner1);
13830
13831 if (TREE_CODE (op0) == INTEGER_CST)
13832 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13833
13834 if (TREE_CODE (op1) == INTEGER_CST)
13835 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13836
13837 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13838 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13839 {
13840 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13841 ? tree_int_cst_min_precision (op0, UNSIGNED)
13842 : TYPE_PRECISION (inner0);
13843
13844 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13845 ? tree_int_cst_min_precision (op1, UNSIGNED)
13846 : TYPE_PRECISION (inner1);
13847
13848 return precision0 + precision1 < TYPE_PRECISION (type);
13849 }
13850 }
13851 return false;
13852
13853 case BIT_AND_EXPR:
13854 case MAX_EXPR:
13855 return (tree_expr_nonnegative_warnv_p (op0,
13856 strict_overflow_p)
13857 || tree_expr_nonnegative_warnv_p (op1,
13858 strict_overflow_p));
13859
13860 case BIT_IOR_EXPR:
13861 case BIT_XOR_EXPR:
13862 case MIN_EXPR:
13863 case RDIV_EXPR:
13864 case TRUNC_DIV_EXPR:
13865 case CEIL_DIV_EXPR:
13866 case FLOOR_DIV_EXPR:
13867 case ROUND_DIV_EXPR:
13868 return (tree_expr_nonnegative_warnv_p (op0,
13869 strict_overflow_p)
13870 && tree_expr_nonnegative_warnv_p (op1,
13871 strict_overflow_p));
13872
13873 case TRUNC_MOD_EXPR:
13874 case CEIL_MOD_EXPR:
13875 case FLOOR_MOD_EXPR:
13876 case ROUND_MOD_EXPR:
13877 return tree_expr_nonnegative_warnv_p (op0,
13878 strict_overflow_p);
13879 default:
13880 return tree_simple_nonnegative_warnv_p (code, type);
13881 }
13882
13883 /* We don't know sign of `t', so be conservative and return false. */
13884 return false;
13885 }
13886
13887 /* Return true if T is known to be non-negative. If the return
13888 value is based on the assumption that signed overflow is undefined,
13889 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13890 *STRICT_OVERFLOW_P. */
13891
13892 bool
13893 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13894 {
13895 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13896 return true;
13897
13898 switch (TREE_CODE (t))
13899 {
13900 case INTEGER_CST:
13901 return tree_int_cst_sgn (t) >= 0;
13902
13903 case REAL_CST:
13904 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13905
13906 case FIXED_CST:
13907 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13908
13909 case COND_EXPR:
13910 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13911 strict_overflow_p)
13912 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13913 strict_overflow_p));
13914 default:
13915 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
13916 TREE_TYPE (t));
13917 }
13918 /* We don't know sign of `t', so be conservative and return false. */
13919 return false;
13920 }
13921
13922 /* Return true if T is known to be non-negative. If the return
13923 value is based on the assumption that signed overflow is undefined,
13924 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13925 *STRICT_OVERFLOW_P. */
13926
13927 bool
13928 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
13929 tree arg0, tree arg1, bool *strict_overflow_p)
13930 {
13931 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13932 switch (DECL_FUNCTION_CODE (fndecl))
13933 {
13934 CASE_FLT_FN (BUILT_IN_ACOS):
13935 CASE_FLT_FN (BUILT_IN_ACOSH):
13936 CASE_FLT_FN (BUILT_IN_CABS):
13937 CASE_FLT_FN (BUILT_IN_COSH):
13938 CASE_FLT_FN (BUILT_IN_ERFC):
13939 CASE_FLT_FN (BUILT_IN_EXP):
13940 CASE_FLT_FN (BUILT_IN_EXP10):
13941 CASE_FLT_FN (BUILT_IN_EXP2):
13942 CASE_FLT_FN (BUILT_IN_FABS):
13943 CASE_FLT_FN (BUILT_IN_FDIM):
13944 CASE_FLT_FN (BUILT_IN_HYPOT):
13945 CASE_FLT_FN (BUILT_IN_POW10):
13946 CASE_INT_FN (BUILT_IN_FFS):
13947 CASE_INT_FN (BUILT_IN_PARITY):
13948 CASE_INT_FN (BUILT_IN_POPCOUNT):
13949 CASE_INT_FN (BUILT_IN_CLZ):
13950 CASE_INT_FN (BUILT_IN_CLRSB):
13951 case BUILT_IN_BSWAP32:
13952 case BUILT_IN_BSWAP64:
13953 /* Always true. */
13954 return true;
13955
13956 CASE_FLT_FN (BUILT_IN_SQRT):
13957 /* sqrt(-0.0) is -0.0. */
13958 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13959 return true;
13960 return tree_expr_nonnegative_warnv_p (arg0,
13961 strict_overflow_p);
13962
13963 CASE_FLT_FN (BUILT_IN_ASINH):
13964 CASE_FLT_FN (BUILT_IN_ATAN):
13965 CASE_FLT_FN (BUILT_IN_ATANH):
13966 CASE_FLT_FN (BUILT_IN_CBRT):
13967 CASE_FLT_FN (BUILT_IN_CEIL):
13968 CASE_FLT_FN (BUILT_IN_ERF):
13969 CASE_FLT_FN (BUILT_IN_EXPM1):
13970 CASE_FLT_FN (BUILT_IN_FLOOR):
13971 CASE_FLT_FN (BUILT_IN_FMOD):
13972 CASE_FLT_FN (BUILT_IN_FREXP):
13973 CASE_FLT_FN (BUILT_IN_ICEIL):
13974 CASE_FLT_FN (BUILT_IN_IFLOOR):
13975 CASE_FLT_FN (BUILT_IN_IRINT):
13976 CASE_FLT_FN (BUILT_IN_IROUND):
13977 CASE_FLT_FN (BUILT_IN_LCEIL):
13978 CASE_FLT_FN (BUILT_IN_LDEXP):
13979 CASE_FLT_FN (BUILT_IN_LFLOOR):
13980 CASE_FLT_FN (BUILT_IN_LLCEIL):
13981 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13982 CASE_FLT_FN (BUILT_IN_LLRINT):
13983 CASE_FLT_FN (BUILT_IN_LLROUND):
13984 CASE_FLT_FN (BUILT_IN_LRINT):
13985 CASE_FLT_FN (BUILT_IN_LROUND):
13986 CASE_FLT_FN (BUILT_IN_MODF):
13987 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13988 CASE_FLT_FN (BUILT_IN_RINT):
13989 CASE_FLT_FN (BUILT_IN_ROUND):
13990 CASE_FLT_FN (BUILT_IN_SCALB):
13991 CASE_FLT_FN (BUILT_IN_SCALBLN):
13992 CASE_FLT_FN (BUILT_IN_SCALBN):
13993 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13994 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13995 CASE_FLT_FN (BUILT_IN_SINH):
13996 CASE_FLT_FN (BUILT_IN_TANH):
13997 CASE_FLT_FN (BUILT_IN_TRUNC):
13998 /* True if the 1st argument is nonnegative. */
13999 return tree_expr_nonnegative_warnv_p (arg0,
14000 strict_overflow_p);
14001
14002 CASE_FLT_FN (BUILT_IN_FMAX):
14003 /* True if the 1st OR 2nd arguments are nonnegative. */
14004 return (tree_expr_nonnegative_warnv_p (arg0,
14005 strict_overflow_p)
14006 || (tree_expr_nonnegative_warnv_p (arg1,
14007 strict_overflow_p)));
14008
14009 CASE_FLT_FN (BUILT_IN_FMIN):
14010 /* True if the 1st AND 2nd arguments are nonnegative. */
14011 return (tree_expr_nonnegative_warnv_p (arg0,
14012 strict_overflow_p)
14013 && (tree_expr_nonnegative_warnv_p (arg1,
14014 strict_overflow_p)));
14015
14016 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14017 /* True if the 2nd argument is nonnegative. */
14018 return tree_expr_nonnegative_warnv_p (arg1,
14019 strict_overflow_p);
14020
14021 CASE_FLT_FN (BUILT_IN_POWI):
14022 /* True if the 1st argument is nonnegative or the second
14023 argument is an even integer. */
14024 if (TREE_CODE (arg1) == INTEGER_CST
14025 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14026 return true;
14027 return tree_expr_nonnegative_warnv_p (arg0,
14028 strict_overflow_p);
14029
14030 CASE_FLT_FN (BUILT_IN_POW):
14031 /* True if the 1st argument is nonnegative or the second
14032 argument is an even integer valued real. */
14033 if (TREE_CODE (arg1) == REAL_CST)
14034 {
14035 REAL_VALUE_TYPE c;
14036 HOST_WIDE_INT n;
14037
14038 c = TREE_REAL_CST (arg1);
14039 n = real_to_integer (&c);
14040 if ((n & 1) == 0)
14041 {
14042 REAL_VALUE_TYPE cint;
14043 real_from_integer (&cint, VOIDmode, n, SIGNED);
14044 if (real_identical (&c, &cint))
14045 return true;
14046 }
14047 }
14048 return tree_expr_nonnegative_warnv_p (arg0,
14049 strict_overflow_p);
14050
14051 default:
14052 break;
14053 }
14054 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14055 type);
14056 }
14057
14058 /* Return true if T is known to be non-negative. If the return
14059 value is based on the assumption that signed overflow is undefined,
14060 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14061 *STRICT_OVERFLOW_P. */
14062
14063 static bool
14064 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14065 {
14066 enum tree_code code = TREE_CODE (t);
14067 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14068 return true;
14069
14070 switch (code)
14071 {
14072 case TARGET_EXPR:
14073 {
14074 tree temp = TARGET_EXPR_SLOT (t);
14075 t = TARGET_EXPR_INITIAL (t);
14076
14077 /* If the initializer is non-void, then it's a normal expression
14078 that will be assigned to the slot. */
14079 if (!VOID_TYPE_P (t))
14080 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14081
14082 /* Otherwise, the initializer sets the slot in some way. One common
14083 way is an assignment statement at the end of the initializer. */
14084 while (1)
14085 {
14086 if (TREE_CODE (t) == BIND_EXPR)
14087 t = expr_last (BIND_EXPR_BODY (t));
14088 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14089 || TREE_CODE (t) == TRY_CATCH_EXPR)
14090 t = expr_last (TREE_OPERAND (t, 0));
14091 else if (TREE_CODE (t) == STATEMENT_LIST)
14092 t = expr_last (t);
14093 else
14094 break;
14095 }
14096 if (TREE_CODE (t) == MODIFY_EXPR
14097 && TREE_OPERAND (t, 0) == temp)
14098 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14099 strict_overflow_p);
14100
14101 return false;
14102 }
14103
14104 case CALL_EXPR:
14105 {
14106 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14107 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14108
14109 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14110 get_callee_fndecl (t),
14111 arg0,
14112 arg1,
14113 strict_overflow_p);
14114 }
14115 case COMPOUND_EXPR:
14116 case MODIFY_EXPR:
14117 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14118 strict_overflow_p);
14119 case BIND_EXPR:
14120 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14121 strict_overflow_p);
14122 case SAVE_EXPR:
14123 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14124 strict_overflow_p);
14125
14126 default:
14127 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14128 TREE_TYPE (t));
14129 }
14130
14131 /* We don't know sign of `t', so be conservative and return false. */
14132 return false;
14133 }
14134
14135 /* Return true if T is known to be non-negative. If the return
14136 value is based on the assumption that signed overflow is undefined,
14137 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14138 *STRICT_OVERFLOW_P. */
14139
14140 bool
14141 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14142 {
14143 enum tree_code code;
14144 if (t == error_mark_node)
14145 return false;
14146
14147 code = TREE_CODE (t);
14148 switch (TREE_CODE_CLASS (code))
14149 {
14150 case tcc_binary:
14151 case tcc_comparison:
14152 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14153 TREE_TYPE (t),
14154 TREE_OPERAND (t, 0),
14155 TREE_OPERAND (t, 1),
14156 strict_overflow_p);
14157
14158 case tcc_unary:
14159 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14160 TREE_TYPE (t),
14161 TREE_OPERAND (t, 0),
14162 strict_overflow_p);
14163
14164 case tcc_constant:
14165 case tcc_declaration:
14166 case tcc_reference:
14167 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14168
14169 default:
14170 break;
14171 }
14172
14173 switch (code)
14174 {
14175 case TRUTH_AND_EXPR:
14176 case TRUTH_OR_EXPR:
14177 case TRUTH_XOR_EXPR:
14178 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14179 TREE_TYPE (t),
14180 TREE_OPERAND (t, 0),
14181 TREE_OPERAND (t, 1),
14182 strict_overflow_p);
14183 case TRUTH_NOT_EXPR:
14184 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14185 TREE_TYPE (t),
14186 TREE_OPERAND (t, 0),
14187 strict_overflow_p);
14188
14189 case COND_EXPR:
14190 case CONSTRUCTOR:
14191 case OBJ_TYPE_REF:
14192 case ASSERT_EXPR:
14193 case ADDR_EXPR:
14194 case WITH_SIZE_EXPR:
14195 case SSA_NAME:
14196 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14197
14198 default:
14199 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14200 }
14201 }
14202
14203 /* Return true if `t' is known to be non-negative. Handle warnings
14204 about undefined signed overflow. */
14205
14206 bool
14207 tree_expr_nonnegative_p (tree t)
14208 {
14209 bool ret, strict_overflow_p;
14210
14211 strict_overflow_p = false;
14212 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14213 if (strict_overflow_p)
14214 fold_overflow_warning (("assuming signed overflow does not occur when "
14215 "determining that expression is always "
14216 "non-negative"),
14217 WARN_STRICT_OVERFLOW_MISC);
14218 return ret;
14219 }
14220
14221
14222 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14223 For floating point we further ensure that T is not denormal.
14224 Similar logic is present in nonzero_address in rtlanal.h.
14225
14226 If the return value is based on the assumption that signed overflow
14227 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14228 change *STRICT_OVERFLOW_P. */
14229
14230 bool
14231 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14232 bool *strict_overflow_p)
14233 {
14234 switch (code)
14235 {
14236 case ABS_EXPR:
14237 return tree_expr_nonzero_warnv_p (op0,
14238 strict_overflow_p);
14239
14240 case NOP_EXPR:
14241 {
14242 tree inner_type = TREE_TYPE (op0);
14243 tree outer_type = type;
14244
14245 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14246 && tree_expr_nonzero_warnv_p (op0,
14247 strict_overflow_p));
14248 }
14249 break;
14250
14251 case NON_LVALUE_EXPR:
14252 return tree_expr_nonzero_warnv_p (op0,
14253 strict_overflow_p);
14254
14255 default:
14256 break;
14257 }
14258
14259 return false;
14260 }
14261
14262 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14263 For floating point we further ensure that T is not denormal.
14264 Similar logic is present in nonzero_address in rtlanal.h.
14265
14266 If the return value is based on the assumption that signed overflow
14267 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14268 change *STRICT_OVERFLOW_P. */
14269
14270 bool
14271 tree_binary_nonzero_warnv_p (enum tree_code code,
14272 tree type,
14273 tree op0,
14274 tree op1, bool *strict_overflow_p)
14275 {
14276 bool sub_strict_overflow_p;
14277 switch (code)
14278 {
14279 case POINTER_PLUS_EXPR:
14280 case PLUS_EXPR:
14281 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14282 {
14283 /* With the presence of negative values it is hard
14284 to say something. */
14285 sub_strict_overflow_p = false;
14286 if (!tree_expr_nonnegative_warnv_p (op0,
14287 &sub_strict_overflow_p)
14288 || !tree_expr_nonnegative_warnv_p (op1,
14289 &sub_strict_overflow_p))
14290 return false;
14291 /* One of operands must be positive and the other non-negative. */
14292 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14293 overflows, on a twos-complement machine the sum of two
14294 nonnegative numbers can never be zero. */
14295 return (tree_expr_nonzero_warnv_p (op0,
14296 strict_overflow_p)
14297 || tree_expr_nonzero_warnv_p (op1,
14298 strict_overflow_p));
14299 }
14300 break;
14301
14302 case MULT_EXPR:
14303 if (TYPE_OVERFLOW_UNDEFINED (type))
14304 {
14305 if (tree_expr_nonzero_warnv_p (op0,
14306 strict_overflow_p)
14307 && tree_expr_nonzero_warnv_p (op1,
14308 strict_overflow_p))
14309 {
14310 *strict_overflow_p = true;
14311 return true;
14312 }
14313 }
14314 break;
14315
14316 case MIN_EXPR:
14317 sub_strict_overflow_p = false;
14318 if (tree_expr_nonzero_warnv_p (op0,
14319 &sub_strict_overflow_p)
14320 && tree_expr_nonzero_warnv_p (op1,
14321 &sub_strict_overflow_p))
14322 {
14323 if (sub_strict_overflow_p)
14324 *strict_overflow_p = true;
14325 }
14326 break;
14327
14328 case MAX_EXPR:
14329 sub_strict_overflow_p = false;
14330 if (tree_expr_nonzero_warnv_p (op0,
14331 &sub_strict_overflow_p))
14332 {
14333 if (sub_strict_overflow_p)
14334 *strict_overflow_p = true;
14335
14336 /* When both operands are nonzero, then MAX must be too. */
14337 if (tree_expr_nonzero_warnv_p (op1,
14338 strict_overflow_p))
14339 return true;
14340
14341 /* MAX where operand 0 is positive is positive. */
14342 return tree_expr_nonnegative_warnv_p (op0,
14343 strict_overflow_p);
14344 }
14345 /* MAX where operand 1 is positive is positive. */
14346 else if (tree_expr_nonzero_warnv_p (op1,
14347 &sub_strict_overflow_p)
14348 && tree_expr_nonnegative_warnv_p (op1,
14349 &sub_strict_overflow_p))
14350 {
14351 if (sub_strict_overflow_p)
14352 *strict_overflow_p = true;
14353 return true;
14354 }
14355 break;
14356
14357 case BIT_IOR_EXPR:
14358 return (tree_expr_nonzero_warnv_p (op1,
14359 strict_overflow_p)
14360 || tree_expr_nonzero_warnv_p (op0,
14361 strict_overflow_p));
14362
14363 default:
14364 break;
14365 }
14366
14367 return false;
14368 }
14369
14370 /* Return true when T is an address and is known to be nonzero.
14371 For floating point we further ensure that T is not denormal.
14372 Similar logic is present in nonzero_address in rtlanal.h.
14373
14374 If the return value is based on the assumption that signed overflow
14375 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14376 change *STRICT_OVERFLOW_P. */
14377
14378 bool
14379 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14380 {
14381 bool sub_strict_overflow_p;
14382 switch (TREE_CODE (t))
14383 {
14384 case INTEGER_CST:
14385 return !integer_zerop (t);
14386
14387 case ADDR_EXPR:
14388 {
14389 tree base = TREE_OPERAND (t, 0);
14390
14391 if (!DECL_P (base))
14392 base = get_base_address (base);
14393
14394 if (!base)
14395 return false;
14396
14397 /* For objects in symbol table check if we know they are non-zero.
14398 Don't do anything for variables and functions before symtab is built;
14399 it is quite possible that they will be declared weak later. */
14400 if (DECL_P (base) && decl_in_symtab_p (base))
14401 {
14402 struct symtab_node *symbol;
14403
14404 symbol = symtab_node::get_create (base);
14405 if (symbol)
14406 return symbol->nonzero_address ();
14407 else
14408 return false;
14409 }
14410
14411 /* Function local objects are never NULL. */
14412 if (DECL_P (base)
14413 && (DECL_CONTEXT (base)
14414 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14415 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
14416 return true;
14417
14418 /* Constants are never weak. */
14419 if (CONSTANT_CLASS_P (base))
14420 return true;
14421
14422 return false;
14423 }
14424
14425 case COND_EXPR:
14426 sub_strict_overflow_p = false;
14427 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14428 &sub_strict_overflow_p)
14429 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14430 &sub_strict_overflow_p))
14431 {
14432 if (sub_strict_overflow_p)
14433 *strict_overflow_p = true;
14434 return true;
14435 }
14436 break;
14437
14438 default:
14439 break;
14440 }
14441 return false;
14442 }
14443
14444 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14445 attempt to fold the expression to a constant without modifying TYPE,
14446 OP0 or OP1.
14447
14448 If the expression could be simplified to a constant, then return
14449 the constant. If the expression would not be simplified to a
14450 constant, then return NULL_TREE. */
14451
14452 tree
14453 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14454 {
14455 tree tem = fold_binary (code, type, op0, op1);
14456 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14457 }
14458
14459 /* Given the components of a unary expression CODE, TYPE and OP0,
14460 attempt to fold the expression to a constant without modifying
14461 TYPE or OP0.
14462
14463 If the expression could be simplified to a constant, then return
14464 the constant. If the expression would not be simplified to a
14465 constant, then return NULL_TREE. */
14466
14467 tree
14468 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14469 {
14470 tree tem = fold_unary (code, type, op0);
14471 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14472 }
14473
14474 /* If EXP represents referencing an element in a constant string
14475 (either via pointer arithmetic or array indexing), return the
14476 tree representing the value accessed, otherwise return NULL. */
14477
14478 tree
14479 fold_read_from_constant_string (tree exp)
14480 {
14481 if ((TREE_CODE (exp) == INDIRECT_REF
14482 || TREE_CODE (exp) == ARRAY_REF)
14483 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14484 {
14485 tree exp1 = TREE_OPERAND (exp, 0);
14486 tree index;
14487 tree string;
14488 location_t loc = EXPR_LOCATION (exp);
14489
14490 if (TREE_CODE (exp) == INDIRECT_REF)
14491 string = string_constant (exp1, &index);
14492 else
14493 {
14494 tree low_bound = array_ref_low_bound (exp);
14495 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14496
14497 /* Optimize the special-case of a zero lower bound.
14498
14499 We convert the low_bound to sizetype to avoid some problems
14500 with constant folding. (E.g. suppose the lower bound is 1,
14501 and its mode is QI. Without the conversion,l (ARRAY
14502 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14503 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14504 if (! integer_zerop (low_bound))
14505 index = size_diffop_loc (loc, index,
14506 fold_convert_loc (loc, sizetype, low_bound));
14507
14508 string = exp1;
14509 }
14510
14511 if (string
14512 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14513 && TREE_CODE (string) == STRING_CST
14514 && TREE_CODE (index) == INTEGER_CST
14515 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14516 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14517 == MODE_INT)
14518 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14519 return build_int_cst_type (TREE_TYPE (exp),
14520 (TREE_STRING_POINTER (string)
14521 [TREE_INT_CST_LOW (index)]));
14522 }
14523 return NULL;
14524 }
14525
14526 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14527 an integer constant, real, or fixed-point constant.
14528
14529 TYPE is the type of the result. */
14530
14531 static tree
14532 fold_negate_const (tree arg0, tree type)
14533 {
14534 tree t = NULL_TREE;
14535
14536 switch (TREE_CODE (arg0))
14537 {
14538 case INTEGER_CST:
14539 {
14540 bool overflow;
14541 wide_int val = wi::neg (arg0, &overflow);
14542 t = force_fit_type (type, val, 1,
14543 (overflow | TREE_OVERFLOW (arg0))
14544 && !TYPE_UNSIGNED (type));
14545 break;
14546 }
14547
14548 case REAL_CST:
14549 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14550 break;
14551
14552 case FIXED_CST:
14553 {
14554 FIXED_VALUE_TYPE f;
14555 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14556 &(TREE_FIXED_CST (arg0)), NULL,
14557 TYPE_SATURATING (type));
14558 t = build_fixed (type, f);
14559 /* Propagate overflow flags. */
14560 if (overflow_p | TREE_OVERFLOW (arg0))
14561 TREE_OVERFLOW (t) = 1;
14562 break;
14563 }
14564
14565 default:
14566 gcc_unreachable ();
14567 }
14568
14569 return t;
14570 }
14571
14572 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14573 an integer constant or real constant.
14574
14575 TYPE is the type of the result. */
14576
14577 tree
14578 fold_abs_const (tree arg0, tree type)
14579 {
14580 tree t = NULL_TREE;
14581
14582 switch (TREE_CODE (arg0))
14583 {
14584 case INTEGER_CST:
14585 {
14586 /* If the value is unsigned or non-negative, then the absolute value
14587 is the same as the ordinary value. */
14588 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
14589 t = arg0;
14590
14591 /* If the value is negative, then the absolute value is
14592 its negation. */
14593 else
14594 {
14595 bool overflow;
14596 wide_int val = wi::neg (arg0, &overflow);
14597 t = force_fit_type (type, val, -1,
14598 overflow | TREE_OVERFLOW (arg0));
14599 }
14600 }
14601 break;
14602
14603 case REAL_CST:
14604 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14605 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14606 else
14607 t = arg0;
14608 break;
14609
14610 default:
14611 gcc_unreachable ();
14612 }
14613
14614 return t;
14615 }
14616
14617 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14618 constant. TYPE is the type of the result. */
14619
14620 static tree
14621 fold_not_const (const_tree arg0, tree type)
14622 {
14623 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14624
14625 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
14626 }
14627
14628 /* Given CODE, a relational operator, the target type, TYPE and two
14629 constant operands OP0 and OP1, return the result of the
14630 relational operation. If the result is not a compile time
14631 constant, then return NULL_TREE. */
14632
14633 static tree
14634 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14635 {
14636 int result, invert;
14637
14638 /* From here on, the only cases we handle are when the result is
14639 known to be a constant. */
14640
14641 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14642 {
14643 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14644 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14645
14646 /* Handle the cases where either operand is a NaN. */
14647 if (real_isnan (c0) || real_isnan (c1))
14648 {
14649 switch (code)
14650 {
14651 case EQ_EXPR:
14652 case ORDERED_EXPR:
14653 result = 0;
14654 break;
14655
14656 case NE_EXPR:
14657 case UNORDERED_EXPR:
14658 case UNLT_EXPR:
14659 case UNLE_EXPR:
14660 case UNGT_EXPR:
14661 case UNGE_EXPR:
14662 case UNEQ_EXPR:
14663 result = 1;
14664 break;
14665
14666 case LT_EXPR:
14667 case LE_EXPR:
14668 case GT_EXPR:
14669 case GE_EXPR:
14670 case LTGT_EXPR:
14671 if (flag_trapping_math)
14672 return NULL_TREE;
14673 result = 0;
14674 break;
14675
14676 default:
14677 gcc_unreachable ();
14678 }
14679
14680 return constant_boolean_node (result, type);
14681 }
14682
14683 return constant_boolean_node (real_compare (code, c0, c1), type);
14684 }
14685
14686 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14687 {
14688 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14689 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14690 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14691 }
14692
14693 /* Handle equality/inequality of complex constants. */
14694 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14695 {
14696 tree rcond = fold_relational_const (code, type,
14697 TREE_REALPART (op0),
14698 TREE_REALPART (op1));
14699 tree icond = fold_relational_const (code, type,
14700 TREE_IMAGPART (op0),
14701 TREE_IMAGPART (op1));
14702 if (code == EQ_EXPR)
14703 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14704 else if (code == NE_EXPR)
14705 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14706 else
14707 return NULL_TREE;
14708 }
14709
14710 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14711 {
14712 unsigned count = VECTOR_CST_NELTS (op0);
14713 tree *elts = XALLOCAVEC (tree, count);
14714 gcc_assert (VECTOR_CST_NELTS (op1) == count
14715 && TYPE_VECTOR_SUBPARTS (type) == count);
14716
14717 for (unsigned i = 0; i < count; i++)
14718 {
14719 tree elem_type = TREE_TYPE (type);
14720 tree elem0 = VECTOR_CST_ELT (op0, i);
14721 tree elem1 = VECTOR_CST_ELT (op1, i);
14722
14723 tree tem = fold_relational_const (code, elem_type,
14724 elem0, elem1);
14725
14726 if (tem == NULL_TREE)
14727 return NULL_TREE;
14728
14729 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14730 }
14731
14732 return build_vector (type, elts);
14733 }
14734
14735 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14736
14737 To compute GT, swap the arguments and do LT.
14738 To compute GE, do LT and invert the result.
14739 To compute LE, swap the arguments, do LT and invert the result.
14740 To compute NE, do EQ and invert the result.
14741
14742 Therefore, the code below must handle only EQ and LT. */
14743
14744 if (code == LE_EXPR || code == GT_EXPR)
14745 {
14746 std::swap (op0, op1);
14747 code = swap_tree_comparison (code);
14748 }
14749
14750 /* Note that it is safe to invert for real values here because we
14751 have already handled the one case that it matters. */
14752
14753 invert = 0;
14754 if (code == NE_EXPR || code == GE_EXPR)
14755 {
14756 invert = 1;
14757 code = invert_tree_comparison (code, false);
14758 }
14759
14760 /* Compute a result for LT or EQ if args permit;
14761 Otherwise return T. */
14762 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14763 {
14764 if (code == EQ_EXPR)
14765 result = tree_int_cst_equal (op0, op1);
14766 else
14767 result = tree_int_cst_lt (op0, op1);
14768 }
14769 else
14770 return NULL_TREE;
14771
14772 if (invert)
14773 result ^= 1;
14774 return constant_boolean_node (result, type);
14775 }
14776
14777 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14778 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14779 itself. */
14780
14781 tree
14782 fold_build_cleanup_point_expr (tree type, tree expr)
14783 {
14784 /* If the expression does not have side effects then we don't have to wrap
14785 it with a cleanup point expression. */
14786 if (!TREE_SIDE_EFFECTS (expr))
14787 return expr;
14788
14789 /* If the expression is a return, check to see if the expression inside the
14790 return has no side effects or the right hand side of the modify expression
14791 inside the return. If either don't have side effects set we don't need to
14792 wrap the expression in a cleanup point expression. Note we don't check the
14793 left hand side of the modify because it should always be a return decl. */
14794 if (TREE_CODE (expr) == RETURN_EXPR)
14795 {
14796 tree op = TREE_OPERAND (expr, 0);
14797 if (!op || !TREE_SIDE_EFFECTS (op))
14798 return expr;
14799 op = TREE_OPERAND (op, 1);
14800 if (!TREE_SIDE_EFFECTS (op))
14801 return expr;
14802 }
14803
14804 return build1 (CLEANUP_POINT_EXPR, type, expr);
14805 }
14806
14807 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14808 of an indirection through OP0, or NULL_TREE if no simplification is
14809 possible. */
14810
14811 tree
14812 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14813 {
14814 tree sub = op0;
14815 tree subtype;
14816
14817 STRIP_NOPS (sub);
14818 subtype = TREE_TYPE (sub);
14819 if (!POINTER_TYPE_P (subtype))
14820 return NULL_TREE;
14821
14822 if (TREE_CODE (sub) == ADDR_EXPR)
14823 {
14824 tree op = TREE_OPERAND (sub, 0);
14825 tree optype = TREE_TYPE (op);
14826 /* *&CONST_DECL -> to the value of the const decl. */
14827 if (TREE_CODE (op) == CONST_DECL)
14828 return DECL_INITIAL (op);
14829 /* *&p => p; make sure to handle *&"str"[cst] here. */
14830 if (type == optype)
14831 {
14832 tree fop = fold_read_from_constant_string (op);
14833 if (fop)
14834 return fop;
14835 else
14836 return op;
14837 }
14838 /* *(foo *)&fooarray => fooarray[0] */
14839 else if (TREE_CODE (optype) == ARRAY_TYPE
14840 && type == TREE_TYPE (optype)
14841 && (!in_gimple_form
14842 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14843 {
14844 tree type_domain = TYPE_DOMAIN (optype);
14845 tree min_val = size_zero_node;
14846 if (type_domain && TYPE_MIN_VALUE (type_domain))
14847 min_val = TYPE_MIN_VALUE (type_domain);
14848 if (in_gimple_form
14849 && TREE_CODE (min_val) != INTEGER_CST)
14850 return NULL_TREE;
14851 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14852 NULL_TREE, NULL_TREE);
14853 }
14854 /* *(foo *)&complexfoo => __real__ complexfoo */
14855 else if (TREE_CODE (optype) == COMPLEX_TYPE
14856 && type == TREE_TYPE (optype))
14857 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14858 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14859 else if (TREE_CODE (optype) == VECTOR_TYPE
14860 && type == TREE_TYPE (optype))
14861 {
14862 tree part_width = TYPE_SIZE (type);
14863 tree index = bitsize_int (0);
14864 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14865 }
14866 }
14867
14868 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14869 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14870 {
14871 tree op00 = TREE_OPERAND (sub, 0);
14872 tree op01 = TREE_OPERAND (sub, 1);
14873
14874 STRIP_NOPS (op00);
14875 if (TREE_CODE (op00) == ADDR_EXPR)
14876 {
14877 tree op00type;
14878 op00 = TREE_OPERAND (op00, 0);
14879 op00type = TREE_TYPE (op00);
14880
14881 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14882 if (TREE_CODE (op00type) == VECTOR_TYPE
14883 && type == TREE_TYPE (op00type))
14884 {
14885 HOST_WIDE_INT offset = tree_to_shwi (op01);
14886 tree part_width = TYPE_SIZE (type);
14887 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14888 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14889 tree index = bitsize_int (indexi);
14890
14891 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14892 return fold_build3_loc (loc,
14893 BIT_FIELD_REF, type, op00,
14894 part_width, index);
14895
14896 }
14897 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14898 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14899 && type == TREE_TYPE (op00type))
14900 {
14901 tree size = TYPE_SIZE_UNIT (type);
14902 if (tree_int_cst_equal (size, op01))
14903 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14904 }
14905 /* ((foo *)&fooarray)[1] => fooarray[1] */
14906 else if (TREE_CODE (op00type) == ARRAY_TYPE
14907 && type == TREE_TYPE (op00type))
14908 {
14909 tree type_domain = TYPE_DOMAIN (op00type);
14910 tree min_val = size_zero_node;
14911 if (type_domain && TYPE_MIN_VALUE (type_domain))
14912 min_val = TYPE_MIN_VALUE (type_domain);
14913 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14914 TYPE_SIZE_UNIT (type));
14915 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14916 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14917 NULL_TREE, NULL_TREE);
14918 }
14919 }
14920 }
14921
14922 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14923 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14924 && type == TREE_TYPE (TREE_TYPE (subtype))
14925 && (!in_gimple_form
14926 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14927 {
14928 tree type_domain;
14929 tree min_val = size_zero_node;
14930 sub = build_fold_indirect_ref_loc (loc, sub);
14931 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14932 if (type_domain && TYPE_MIN_VALUE (type_domain))
14933 min_val = TYPE_MIN_VALUE (type_domain);
14934 if (in_gimple_form
14935 && TREE_CODE (min_val) != INTEGER_CST)
14936 return NULL_TREE;
14937 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14938 NULL_TREE);
14939 }
14940
14941 return NULL_TREE;
14942 }
14943
14944 /* Builds an expression for an indirection through T, simplifying some
14945 cases. */
14946
14947 tree
14948 build_fold_indirect_ref_loc (location_t loc, tree t)
14949 {
14950 tree type = TREE_TYPE (TREE_TYPE (t));
14951 tree sub = fold_indirect_ref_1 (loc, type, t);
14952
14953 if (sub)
14954 return sub;
14955
14956 return build1_loc (loc, INDIRECT_REF, type, t);
14957 }
14958
14959 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14960
14961 tree
14962 fold_indirect_ref_loc (location_t loc, tree t)
14963 {
14964 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14965
14966 if (sub)
14967 return sub;
14968 else
14969 return t;
14970 }
14971
14972 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14973 whose result is ignored. The type of the returned tree need not be
14974 the same as the original expression. */
14975
14976 tree
14977 fold_ignored_result (tree t)
14978 {
14979 if (!TREE_SIDE_EFFECTS (t))
14980 return integer_zero_node;
14981
14982 for (;;)
14983 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14984 {
14985 case tcc_unary:
14986 t = TREE_OPERAND (t, 0);
14987 break;
14988
14989 case tcc_binary:
14990 case tcc_comparison:
14991 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14992 t = TREE_OPERAND (t, 0);
14993 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14994 t = TREE_OPERAND (t, 1);
14995 else
14996 return t;
14997 break;
14998
14999 case tcc_expression:
15000 switch (TREE_CODE (t))
15001 {
15002 case COMPOUND_EXPR:
15003 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15004 return t;
15005 t = TREE_OPERAND (t, 0);
15006 break;
15007
15008 case COND_EXPR:
15009 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15010 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15011 return t;
15012 t = TREE_OPERAND (t, 0);
15013 break;
15014
15015 default:
15016 return t;
15017 }
15018 break;
15019
15020 default:
15021 return t;
15022 }
15023 }
15024
15025 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15026
15027 tree
15028 round_up_loc (location_t loc, tree value, unsigned int divisor)
15029 {
15030 tree div = NULL_TREE;
15031
15032 if (divisor == 1)
15033 return value;
15034
15035 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15036 have to do anything. Only do this when we are not given a const,
15037 because in that case, this check is more expensive than just
15038 doing it. */
15039 if (TREE_CODE (value) != INTEGER_CST)
15040 {
15041 div = build_int_cst (TREE_TYPE (value), divisor);
15042
15043 if (multiple_of_p (TREE_TYPE (value), value, div))
15044 return value;
15045 }
15046
15047 /* If divisor is a power of two, simplify this to bit manipulation. */
15048 if (divisor == (divisor & -divisor))
15049 {
15050 if (TREE_CODE (value) == INTEGER_CST)
15051 {
15052 wide_int val = value;
15053 bool overflow_p;
15054
15055 if ((val & (divisor - 1)) == 0)
15056 return value;
15057
15058 overflow_p = TREE_OVERFLOW (value);
15059 val += divisor - 1;
15060 val &= - (int) divisor;
15061 if (val == 0)
15062 overflow_p = true;
15063
15064 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15065 }
15066 else
15067 {
15068 tree t;
15069
15070 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15071 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15072 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15073 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15074 }
15075 }
15076 else
15077 {
15078 if (!div)
15079 div = build_int_cst (TREE_TYPE (value), divisor);
15080 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15081 value = size_binop_loc (loc, MULT_EXPR, value, div);
15082 }
15083
15084 return value;
15085 }
15086
15087 /* Likewise, but round down. */
15088
15089 tree
15090 round_down_loc (location_t loc, tree value, int divisor)
15091 {
15092 tree div = NULL_TREE;
15093
15094 gcc_assert (divisor > 0);
15095 if (divisor == 1)
15096 return value;
15097
15098 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15099 have to do anything. Only do this when we are not given a const,
15100 because in that case, this check is more expensive than just
15101 doing it. */
15102 if (TREE_CODE (value) != INTEGER_CST)
15103 {
15104 div = build_int_cst (TREE_TYPE (value), divisor);
15105
15106 if (multiple_of_p (TREE_TYPE (value), value, div))
15107 return value;
15108 }
15109
15110 /* If divisor is a power of two, simplify this to bit manipulation. */
15111 if (divisor == (divisor & -divisor))
15112 {
15113 tree t;
15114
15115 t = build_int_cst (TREE_TYPE (value), -divisor);
15116 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15117 }
15118 else
15119 {
15120 if (!div)
15121 div = build_int_cst (TREE_TYPE (value), divisor);
15122 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15123 value = size_binop_loc (loc, MULT_EXPR, value, div);
15124 }
15125
15126 return value;
15127 }
15128
15129 /* Returns the pointer to the base of the object addressed by EXP and
15130 extracts the information about the offset of the access, storing it
15131 to PBITPOS and POFFSET. */
15132
15133 static tree
15134 split_address_to_core_and_offset (tree exp,
15135 HOST_WIDE_INT *pbitpos, tree *poffset)
15136 {
15137 tree core;
15138 machine_mode mode;
15139 int unsignedp, volatilep;
15140 HOST_WIDE_INT bitsize;
15141 location_t loc = EXPR_LOCATION (exp);
15142
15143 if (TREE_CODE (exp) == ADDR_EXPR)
15144 {
15145 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15146 poffset, &mode, &unsignedp, &volatilep,
15147 false);
15148 core = build_fold_addr_expr_loc (loc, core);
15149 }
15150 else
15151 {
15152 core = exp;
15153 *pbitpos = 0;
15154 *poffset = NULL_TREE;
15155 }
15156
15157 return core;
15158 }
15159
15160 /* Returns true if addresses of E1 and E2 differ by a constant, false
15161 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15162
15163 bool
15164 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15165 {
15166 tree core1, core2;
15167 HOST_WIDE_INT bitpos1, bitpos2;
15168 tree toffset1, toffset2, tdiff, type;
15169
15170 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15171 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15172
15173 if (bitpos1 % BITS_PER_UNIT != 0
15174 || bitpos2 % BITS_PER_UNIT != 0
15175 || !operand_equal_p (core1, core2, 0))
15176 return false;
15177
15178 if (toffset1 && toffset2)
15179 {
15180 type = TREE_TYPE (toffset1);
15181 if (type != TREE_TYPE (toffset2))
15182 toffset2 = fold_convert (type, toffset2);
15183
15184 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15185 if (!cst_and_fits_in_hwi (tdiff))
15186 return false;
15187
15188 *diff = int_cst_value (tdiff);
15189 }
15190 else if (toffset1 || toffset2)
15191 {
15192 /* If only one of the offsets is non-constant, the difference cannot
15193 be a constant. */
15194 return false;
15195 }
15196 else
15197 *diff = 0;
15198
15199 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15200 return true;
15201 }
15202
15203 /* Simplify the floating point expression EXP when the sign of the
15204 result is not significant. Return NULL_TREE if no simplification
15205 is possible. */
15206
15207 tree
15208 fold_strip_sign_ops (tree exp)
15209 {
15210 tree arg0, arg1;
15211 location_t loc = EXPR_LOCATION (exp);
15212
15213 switch (TREE_CODE (exp))
15214 {
15215 case ABS_EXPR:
15216 case NEGATE_EXPR:
15217 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15218 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15219
15220 case MULT_EXPR:
15221 case RDIV_EXPR:
15222 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
15223 return NULL_TREE;
15224 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15225 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15226 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15227 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15228 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15229 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15230 break;
15231
15232 case COMPOUND_EXPR:
15233 arg0 = TREE_OPERAND (exp, 0);
15234 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15235 if (arg1)
15236 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15237 break;
15238
15239 case COND_EXPR:
15240 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15241 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15242 if (arg0 || arg1)
15243 return fold_build3_loc (loc,
15244 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15245 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15246 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15247 break;
15248
15249 case CALL_EXPR:
15250 {
15251 const enum built_in_function fcode = builtin_mathfn_code (exp);
15252 switch (fcode)
15253 {
15254 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15255 /* Strip copysign function call, return the 1st argument. */
15256 arg0 = CALL_EXPR_ARG (exp, 0);
15257 arg1 = CALL_EXPR_ARG (exp, 1);
15258 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15259
15260 default:
15261 /* Strip sign ops from the argument of "odd" math functions. */
15262 if (negate_mathfn_p (fcode))
15263 {
15264 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15265 if (arg0)
15266 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
15267 }
15268 break;
15269 }
15270 }
15271 break;
15272
15273 default:
15274 break;
15275 }
15276 return NULL_TREE;
15277 }
15278
15279 /* Return OFF converted to a pointer offset type suitable as offset for
15280 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15281 tree
15282 convert_to_ptrofftype_loc (location_t loc, tree off)
15283 {
15284 return fold_convert_loc (loc, sizetype, off);
15285 }
15286
15287 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15288 tree
15289 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15290 {
15291 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15292 ptr, convert_to_ptrofftype_loc (loc, off));
15293 }
15294
15295 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15296 tree
15297 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15298 {
15299 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15300 ptr, size_int (off));
15301 }