fold-const.c (fold_binary_loc): Move (X & C2) << C1 -> (X << C1) & (C2 << C1) simplif...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "alias.h"
49 #include "symtab.h"
50 #include "tree.h"
51 #include "fold-const.h"
52 #include "stor-layout.h"
53 #include "calls.h"
54 #include "tree-iterator.h"
55 #include "realmpfr.h"
56 #include "rtl.h"
57 #include "hard-reg-set.h"
58 #include "function.h"
59 #include "insn-config.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "emit-rtl.h"
64 #include "varasm.h"
65 #include "stmt.h"
66 #include "expr.h"
67 #include "tm_p.h"
68 #include "target.h"
69 #include "diagnostic-core.h"
70 #include "intl.h"
71 #include "langhooks.h"
72 #include "md5.h"
73 #include "predict.h"
74 #include "basic-block.h"
75 #include "tree-ssa-alias.h"
76 #include "internal-fn.h"
77 #include "tree-eh.h"
78 #include "gimple-expr.h"
79 #include "gimple.h"
80 #include "gimplify.h"
81 #include "tree-dfa.h"
82 #include "builtins.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
86
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
90
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
111 };
112
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
124 static tree make_bit_field_ref (location_t, tree, tree,
125 HOST_WIDE_INT, HOST_WIDE_INT, int);
126 static tree optimize_bit_field_compare (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
129 HOST_WIDE_INT *,
130 machine_mode *, int *, int *,
131 tree *, tree *);
132 static int simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree optimize_minmax_comparison (location_t, enum tree_code,
141 tree, tree, tree);
142 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
143 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
144 static tree fold_binary_op_with_conditional_arg (location_t,
145 enum tree_code, tree,
146 tree, tree,
147 tree, tree, int);
148 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
149 static bool reorder_operands_p (const_tree, const_tree);
150 static tree fold_negate_const (tree, tree);
151 static tree fold_not_const (const_tree, tree);
152 static tree fold_relational_const (enum tree_code, tree, tree, tree);
153 static tree fold_convert_const (enum tree_code, tree, tree);
154 static tree fold_view_convert_expr (tree, tree);
155 static bool vec_cst_ctor_to_array (tree, tree *);
156
157
158 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
159 Otherwise, return LOC. */
160
161 static location_t
162 expr_location_or (tree t, location_t loc)
163 {
164 location_t tloc = EXPR_LOCATION (t);
165 return tloc == UNKNOWN_LOCATION ? loc : tloc;
166 }
167
168 /* Similar to protected_set_expr_location, but never modify x in place,
169 if location can and needs to be set, unshare it. */
170
171 static inline tree
172 protected_set_expr_location_unshare (tree x, location_t loc)
173 {
174 if (CAN_HAVE_LOCATION_P (x)
175 && EXPR_LOCATION (x) != loc
176 && !(TREE_CODE (x) == SAVE_EXPR
177 || TREE_CODE (x) == TARGET_EXPR
178 || TREE_CODE (x) == BIND_EXPR))
179 {
180 x = copy_node (x);
181 SET_EXPR_LOCATION (x, loc);
182 }
183 return x;
184 }
185 \f
186 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
187 division and returns the quotient. Otherwise returns
188 NULL_TREE. */
189
190 tree
191 div_if_zero_remainder (const_tree arg1, const_tree arg2)
192 {
193 widest_int quo;
194
195 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
196 SIGNED, &quo))
197 return wide_int_to_tree (TREE_TYPE (arg1), quo);
198
199 return NULL_TREE;
200 }
201 \f
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
209 used. */
210
211 static int fold_deferring_overflow_warnings;
212
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
217
218 static const char* fold_deferred_overflow_warning;
219
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
222
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
224
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
227
228 void
229 fold_defer_overflow_warnings (void)
230 {
231 ++fold_deferring_overflow_warnings;
232 }
233
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
241 deferred code. */
242
243 void
244 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
245 {
246 const char *warnmsg;
247 location_t locus;
248
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
252 {
253 if (fold_deferred_overflow_warning != NULL
254 && code != 0
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
257 return;
258 }
259
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
262
263 if (!issue || warnmsg == NULL)
264 return;
265
266 if (gimple_no_warning_p (stmt))
267 return;
268
269 /* Use the smallest code level when deciding to issue the
270 warning. */
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
273
274 if (!issue_strict_overflow_warning (code))
275 return;
276
277 if (stmt == NULL)
278 locus = input_location;
279 else
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
282 }
283
284 /* Stop deferring overflow warnings, ignoring any deferred
285 warnings. */
286
287 void
288 fold_undefer_and_ignore_overflow_warnings (void)
289 {
290 fold_undefer_overflow_warnings (false, NULL, 0);
291 }
292
293 /* Whether we are deferring overflow warnings. */
294
295 bool
296 fold_deferring_overflow_warnings_p (void)
297 {
298 return fold_deferring_overflow_warnings > 0;
299 }
300
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
303
304 static void
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
306 {
307 if (fold_deferring_overflow_warnings > 0)
308 {
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
311 {
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
314 }
315 }
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
318 }
319 \f
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
322
323 static bool
324 negate_mathfn_p (enum built_in_function code)
325 {
326 switch (code)
327 {
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
351 return true;
352
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
358
359 default:
360 break;
361 }
362 return false;
363 }
364
365 /* Check whether we may negate an integer constant T without causing
366 overflow. */
367
368 bool
369 may_negate_without_overflow_p (const_tree t)
370 {
371 tree type;
372
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
374
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
378
379 return !wi::only_sign_bit_p (t);
380 }
381
382 /* Determine whether an expression T can be cheaply negated using
383 the function negate_expr without introducing undefined overflow. */
384
385 static bool
386 negate_expr_p (tree t)
387 {
388 tree type;
389
390 if (t == 0)
391 return false;
392
393 type = TREE_TYPE (t);
394
395 STRIP_SIGN_NOPS (t);
396 switch (TREE_CODE (t))
397 {
398 case INTEGER_CST:
399 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
400 return true;
401
402 /* Check that -CST will not overflow type. */
403 return may_negate_without_overflow_p (t);
404 case BIT_NOT_EXPR:
405 return (INTEGRAL_TYPE_P (type)
406 && TYPE_OVERFLOW_WRAPS (type));
407
408 case FIXED_CST:
409 return true;
410
411 case NEGATE_EXPR:
412 return !TYPE_OVERFLOW_SANITIZED (type);
413
414 case REAL_CST:
415 /* We want to canonicalize to positive real constants. Pretend
416 that only negative ones can be easily negated. */
417 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
418
419 case COMPLEX_CST:
420 return negate_expr_p (TREE_REALPART (t))
421 && negate_expr_p (TREE_IMAGPART (t));
422
423 case VECTOR_CST:
424 {
425 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
426 return true;
427
428 int count = TYPE_VECTOR_SUBPARTS (type), i;
429
430 for (i = 0; i < count; i++)
431 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
432 return false;
433
434 return true;
435 }
436
437 case COMPLEX_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0))
439 && negate_expr_p (TREE_OPERAND (t, 1));
440
441 case CONJ_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0));
443
444 case PLUS_EXPR:
445 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 || HONOR_SIGNED_ZEROS (element_mode (type)))
447 return false;
448 /* -(A + B) -> (-B) - A. */
449 if (negate_expr_p (TREE_OPERAND (t, 1))
450 && reorder_operands_p (TREE_OPERAND (t, 0),
451 TREE_OPERAND (t, 1)))
452 return true;
453 /* -(A + B) -> (-A) - B. */
454 return negate_expr_p (TREE_OPERAND (t, 0));
455
456 case MINUS_EXPR:
457 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
458 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
459 && !HONOR_SIGNED_ZEROS (element_mode (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
462
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (TREE_TYPE (t)))
465 break;
466
467 /* Fall through. */
468
469 case RDIV_EXPR:
470 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
471 return negate_expr_p (TREE_OPERAND (t, 1))
472 || negate_expr_p (TREE_OPERAND (t, 0));
473 break;
474
475 case TRUNC_DIV_EXPR:
476 case ROUND_DIV_EXPR:
477 case EXACT_DIV_EXPR:
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
482 overflow. */
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
484 {
485 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
486 break;
487 /* If overflow is undefined then we have to be careful because
488 we ask whether it's ok to associate the negate with the
489 division which is not ok for example for
490 -((a - b) / c) where (-(a - b)) / c may invoke undefined
491 overflow because of negating INT_MIN. So do not use
492 negate_expr_p here but open-code the two important cases. */
493 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
494 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
495 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
496 return true;
497 }
498 else if (negate_expr_p (TREE_OPERAND (t, 0)))
499 return true;
500 return negate_expr_p (TREE_OPERAND (t, 1));
501
502 case NOP_EXPR:
503 /* Negate -((double)float) as (double)(-float). */
504 if (TREE_CODE (type) == REAL_TYPE)
505 {
506 tree tem = strip_float_extensions (t);
507 if (tem != t)
508 return negate_expr_p (tem);
509 }
510 break;
511
512 case CALL_EXPR:
513 /* Negate -f(x) as f(-x). */
514 if (negate_mathfn_p (builtin_mathfn_code (t)))
515 return negate_expr_p (CALL_EXPR_ARG (t, 0));
516 break;
517
518 case RSHIFT_EXPR:
519 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
520 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
521 {
522 tree op1 = TREE_OPERAND (t, 1);
523 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
524 return true;
525 }
526 break;
527
528 default:
529 break;
530 }
531 return false;
532 }
533
534 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
535 simplification is possible.
536 If negate_expr_p would return true for T, NULL_TREE will never be
537 returned. */
538
539 static tree
540 fold_negate_expr (location_t loc, tree t)
541 {
542 tree type = TREE_TYPE (t);
543 tree tem;
544
545 switch (TREE_CODE (t))
546 {
547 /* Convert - (~A) to A + 1. */
548 case BIT_NOT_EXPR:
549 if (INTEGRAL_TYPE_P (type))
550 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
551 build_one_cst (type));
552 break;
553
554 case INTEGER_CST:
555 tem = fold_negate_const (t, type);
556 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
557 || (ANY_INTEGRAL_TYPE_P (type)
558 && !TYPE_OVERFLOW_TRAPS (type)
559 && TYPE_OVERFLOW_WRAPS (type))
560 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
561 return tem;
562 break;
563
564 case REAL_CST:
565 tem = fold_negate_const (t, type);
566 return tem;
567
568 case FIXED_CST:
569 tem = fold_negate_const (t, type);
570 return tem;
571
572 case COMPLEX_CST:
573 {
574 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
575 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
576 if (rpart && ipart)
577 return build_complex (type, rpart, ipart);
578 }
579 break;
580
581 case VECTOR_CST:
582 {
583 int count = TYPE_VECTOR_SUBPARTS (type), i;
584 tree *elts = XALLOCAVEC (tree, count);
585
586 for (i = 0; i < count; i++)
587 {
588 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
589 if (elts[i] == NULL_TREE)
590 return NULL_TREE;
591 }
592
593 return build_vector (type, elts);
594 }
595
596 case COMPLEX_EXPR:
597 if (negate_expr_p (t))
598 return fold_build2_loc (loc, COMPLEX_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
600 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
601 break;
602
603 case CONJ_EXPR:
604 if (negate_expr_p (t))
605 return fold_build1_loc (loc, CONJ_EXPR, type,
606 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
607 break;
608
609 case NEGATE_EXPR:
610 if (!TYPE_OVERFLOW_SANITIZED (type))
611 return TREE_OPERAND (t, 0);
612 break;
613
614 case PLUS_EXPR:
615 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
616 && !HONOR_SIGNED_ZEROS (element_mode (type)))
617 {
618 /* -(A + B) -> (-B) - A. */
619 if (negate_expr_p (TREE_OPERAND (t, 1))
620 && reorder_operands_p (TREE_OPERAND (t, 0),
621 TREE_OPERAND (t, 1)))
622 {
623 tem = negate_expr (TREE_OPERAND (t, 1));
624 return fold_build2_loc (loc, MINUS_EXPR, type,
625 tem, TREE_OPERAND (t, 0));
626 }
627
628 /* -(A + B) -> (-A) - B. */
629 if (negate_expr_p (TREE_OPERAND (t, 0)))
630 {
631 tem = negate_expr (TREE_OPERAND (t, 0));
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 tem, TREE_OPERAND (t, 1));
634 }
635 }
636 break;
637
638 case MINUS_EXPR:
639 /* - (A - B) -> B - A */
640 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
641 && !HONOR_SIGNED_ZEROS (element_mode (type))
642 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
643 return fold_build2_loc (loc, MINUS_EXPR, type,
644 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
645 break;
646
647 case MULT_EXPR:
648 if (TYPE_UNSIGNED (type))
649 break;
650
651 /* Fall through. */
652
653 case RDIV_EXPR:
654 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
655 {
656 tem = TREE_OPERAND (t, 1);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 TREE_OPERAND (t, 0), negate_expr (tem));
660 tem = TREE_OPERAND (t, 0);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 negate_expr (tem), TREE_OPERAND (t, 1));
664 }
665 break;
666
667 case TRUNC_DIV_EXPR:
668 case ROUND_DIV_EXPR:
669 case EXACT_DIV_EXPR:
670 /* In general we can't negate A / B, because if A is INT_MIN and
671 B is 1, we may turn this into INT_MIN / -1 which is undefined
672 and actually traps on some architectures. But if overflow is
673 undefined, we can negate, because - (INT_MIN / 1) is an
674 overflow. */
675 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
676 {
677 const char * const warnmsg = G_("assuming signed overflow does not "
678 "occur when negating a division");
679 tem = TREE_OPERAND (t, 1);
680 if (negate_expr_p (tem))
681 {
682 if (INTEGRAL_TYPE_P (type)
683 && (TREE_CODE (tem) != INTEGER_CST
684 || integer_onep (tem)))
685 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
686 return fold_build2_loc (loc, TREE_CODE (t), type,
687 TREE_OPERAND (t, 0), negate_expr (tem));
688 }
689 /* If overflow is undefined then we have to be careful because
690 we ask whether it's ok to associate the negate with the
691 division which is not ok for example for
692 -((a - b) / c) where (-(a - b)) / c may invoke undefined
693 overflow because of negating INT_MIN. So do not use
694 negate_expr_p here but open-code the two important cases. */
695 tem = TREE_OPERAND (t, 0);
696 if ((INTEGRAL_TYPE_P (type)
697 && (TREE_CODE (tem) == NEGATE_EXPR
698 || (TREE_CODE (tem) == INTEGER_CST
699 && may_negate_without_overflow_p (tem))))
700 || !INTEGRAL_TYPE_P (type))
701 return fold_build2_loc (loc, TREE_CODE (t), type,
702 negate_expr (tem), TREE_OPERAND (t, 1));
703 }
704 break;
705
706 case NOP_EXPR:
707 /* Convert -((double)float) into (double)(-float). */
708 if (TREE_CODE (type) == REAL_TYPE)
709 {
710 tem = strip_float_extensions (t);
711 if (tem != t && negate_expr_p (tem))
712 return fold_convert_loc (loc, type, negate_expr (tem));
713 }
714 break;
715
716 case CALL_EXPR:
717 /* Negate -f(x) as f(-x). */
718 if (negate_mathfn_p (builtin_mathfn_code (t))
719 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
720 {
721 tree fndecl, arg;
722
723 fndecl = get_callee_fndecl (t);
724 arg = negate_expr (CALL_EXPR_ARG (t, 0));
725 return build_call_expr_loc (loc, fndecl, 1, arg);
726 }
727 break;
728
729 case RSHIFT_EXPR:
730 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
731 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
732 {
733 tree op1 = TREE_OPERAND (t, 1);
734 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
735 {
736 tree ntype = TYPE_UNSIGNED (type)
737 ? signed_type_for (type)
738 : unsigned_type_for (type);
739 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
740 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
741 return fold_convert_loc (loc, type, temp);
742 }
743 }
744 break;
745
746 default:
747 break;
748 }
749
750 return NULL_TREE;
751 }
752
753 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
754 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
755 return NULL_TREE. */
756
757 static tree
758 negate_expr (tree t)
759 {
760 tree type, tem;
761 location_t loc;
762
763 if (t == NULL_TREE)
764 return NULL_TREE;
765
766 loc = EXPR_LOCATION (t);
767 type = TREE_TYPE (t);
768 STRIP_SIGN_NOPS (t);
769
770 tem = fold_negate_expr (loc, t);
771 if (!tem)
772 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
773 return fold_convert_loc (loc, type, tem);
774 }
775 \f
776 /* Split a tree IN into a constant, literal and variable parts that could be
777 combined with CODE to make IN. "constant" means an expression with
778 TREE_CONSTANT but that isn't an actual constant. CODE must be a
779 commutative arithmetic operation. Store the constant part into *CONP,
780 the literal in *LITP and return the variable part. If a part isn't
781 present, set it to null. If the tree does not decompose in this way,
782 return the entire tree as the variable part and the other parts as null.
783
784 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
785 case, we negate an operand that was subtracted. Except if it is a
786 literal for which we use *MINUS_LITP instead.
787
788 If NEGATE_P is true, we are negating all of IN, again except a literal
789 for which we use *MINUS_LITP instead.
790
791 If IN is itself a literal or constant, return it as appropriate.
792
793 Note that we do not guarantee that any of the three values will be the
794 same type as IN, but they will have the same signedness and mode. */
795
796 static tree
797 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
798 tree *minus_litp, int negate_p)
799 {
800 tree var = 0;
801
802 *conp = 0;
803 *litp = 0;
804 *minus_litp = 0;
805
806 /* Strip any conversions that don't change the machine mode or signedness. */
807 STRIP_SIGN_NOPS (in);
808
809 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
810 || TREE_CODE (in) == FIXED_CST)
811 *litp = in;
812 else if (TREE_CODE (in) == code
813 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
814 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
815 /* We can associate addition and subtraction together (even
816 though the C standard doesn't say so) for integers because
817 the value is not affected. For reals, the value might be
818 affected, so we can't. */
819 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
820 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
821 {
822 tree op0 = TREE_OPERAND (in, 0);
823 tree op1 = TREE_OPERAND (in, 1);
824 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
825 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
826
827 /* First see if either of the operands is a literal, then a constant. */
828 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
829 || TREE_CODE (op0) == FIXED_CST)
830 *litp = op0, op0 = 0;
831 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
832 || TREE_CODE (op1) == FIXED_CST)
833 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
834
835 if (op0 != 0 && TREE_CONSTANT (op0))
836 *conp = op0, op0 = 0;
837 else if (op1 != 0 && TREE_CONSTANT (op1))
838 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
839
840 /* If we haven't dealt with either operand, this is not a case we can
841 decompose. Otherwise, VAR is either of the ones remaining, if any. */
842 if (op0 != 0 && op1 != 0)
843 var = in;
844 else if (op0 != 0)
845 var = op0;
846 else
847 var = op1, neg_var_p = neg1_p;
848
849 /* Now do any needed negations. */
850 if (neg_litp_p)
851 *minus_litp = *litp, *litp = 0;
852 if (neg_conp_p)
853 *conp = negate_expr (*conp);
854 if (neg_var_p)
855 var = negate_expr (var);
856 }
857 else if (TREE_CODE (in) == BIT_NOT_EXPR
858 && code == PLUS_EXPR)
859 {
860 /* -X - 1 is folded to ~X, undo that here. */
861 *minus_litp = build_one_cst (TREE_TYPE (in));
862 var = negate_expr (TREE_OPERAND (in, 0));
863 }
864 else if (TREE_CONSTANT (in))
865 *conp = in;
866 else
867 var = in;
868
869 if (negate_p)
870 {
871 if (*litp)
872 *minus_litp = *litp, *litp = 0;
873 else if (*minus_litp)
874 *litp = *minus_litp, *minus_litp = 0;
875 *conp = negate_expr (*conp);
876 var = negate_expr (var);
877 }
878
879 return var;
880 }
881
882 /* Re-associate trees split by the above function. T1 and T2 are
883 either expressions to associate or null. Return the new
884 expression, if any. LOC is the location of the new expression. If
885 we build an operation, do it in TYPE and with CODE. */
886
887 static tree
888 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
889 {
890 if (t1 == 0)
891 return t2;
892 else if (t2 == 0)
893 return t1;
894
895 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
896 try to fold this since we will have infinite recursion. But do
897 deal with any NEGATE_EXPRs. */
898 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
899 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
900 {
901 if (code == PLUS_EXPR)
902 {
903 if (TREE_CODE (t1) == NEGATE_EXPR)
904 return build2_loc (loc, MINUS_EXPR, type,
905 fold_convert_loc (loc, type, t2),
906 fold_convert_loc (loc, type,
907 TREE_OPERAND (t1, 0)));
908 else if (TREE_CODE (t2) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t1),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t2, 0)));
913 else if (integer_zerop (t2))
914 return fold_convert_loc (loc, type, t1);
915 }
916 else if (code == MINUS_EXPR)
917 {
918 if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
920 }
921
922 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type, t2));
924 }
925
926 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
928 }
929 \f
930 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
931 for use in int_const_binop, size_binop and size_diffop. */
932
933 static bool
934 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
935 {
936 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
937 return false;
938 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
939 return false;
940
941 switch (code)
942 {
943 case LSHIFT_EXPR:
944 case RSHIFT_EXPR:
945 case LROTATE_EXPR:
946 case RROTATE_EXPR:
947 return true;
948
949 default:
950 break;
951 }
952
953 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
954 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
955 && TYPE_MODE (type1) == TYPE_MODE (type2);
956 }
957
958
959 /* Combine two integer constants ARG1 and ARG2 under operation CODE
960 to produce a new constant. Return NULL_TREE if we don't know how
961 to evaluate CODE at compile-time. */
962
963 static tree
964 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
965 int overflowable)
966 {
967 wide_int res;
968 tree t;
969 tree type = TREE_TYPE (arg1);
970 signop sign = TYPE_SIGN (type);
971 bool overflow = false;
972
973 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
974 TYPE_SIGN (TREE_TYPE (parg2)));
975
976 switch (code)
977 {
978 case BIT_IOR_EXPR:
979 res = wi::bit_or (arg1, arg2);
980 break;
981
982 case BIT_XOR_EXPR:
983 res = wi::bit_xor (arg1, arg2);
984 break;
985
986 case BIT_AND_EXPR:
987 res = wi::bit_and (arg1, arg2);
988 break;
989
990 case RSHIFT_EXPR:
991 case LSHIFT_EXPR:
992 if (wi::neg_p (arg2))
993 {
994 arg2 = -arg2;
995 if (code == RSHIFT_EXPR)
996 code = LSHIFT_EXPR;
997 else
998 code = RSHIFT_EXPR;
999 }
1000
1001 if (code == RSHIFT_EXPR)
1002 /* It's unclear from the C standard whether shifts can overflow.
1003 The following code ignores overflow; perhaps a C standard
1004 interpretation ruling is needed. */
1005 res = wi::rshift (arg1, arg2, sign);
1006 else
1007 res = wi::lshift (arg1, arg2);
1008 break;
1009
1010 case RROTATE_EXPR:
1011 case LROTATE_EXPR:
1012 if (wi::neg_p (arg2))
1013 {
1014 arg2 = -arg2;
1015 if (code == RROTATE_EXPR)
1016 code = LROTATE_EXPR;
1017 else
1018 code = RROTATE_EXPR;
1019 }
1020
1021 if (code == RROTATE_EXPR)
1022 res = wi::rrotate (arg1, arg2);
1023 else
1024 res = wi::lrotate (arg1, arg2);
1025 break;
1026
1027 case PLUS_EXPR:
1028 res = wi::add (arg1, arg2, sign, &overflow);
1029 break;
1030
1031 case MINUS_EXPR:
1032 res = wi::sub (arg1, arg2, sign, &overflow);
1033 break;
1034
1035 case MULT_EXPR:
1036 res = wi::mul (arg1, arg2, sign, &overflow);
1037 break;
1038
1039 case MULT_HIGHPART_EXPR:
1040 res = wi::mul_high (arg1, arg2, sign);
1041 break;
1042
1043 case TRUNC_DIV_EXPR:
1044 case EXACT_DIV_EXPR:
1045 if (arg2 == 0)
1046 return NULL_TREE;
1047 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1048 break;
1049
1050 case FLOOR_DIV_EXPR:
1051 if (arg2 == 0)
1052 return NULL_TREE;
1053 res = wi::div_floor (arg1, arg2, sign, &overflow);
1054 break;
1055
1056 case CEIL_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1060 break;
1061
1062 case ROUND_DIV_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::div_round (arg1, arg2, sign, &overflow);
1066 break;
1067
1068 case TRUNC_MOD_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1072 break;
1073
1074 case FLOOR_MOD_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1078 break;
1079
1080 case CEIL_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1084 break;
1085
1086 case ROUND_MOD_EXPR:
1087 if (arg2 == 0)
1088 return NULL_TREE;
1089 res = wi::mod_round (arg1, arg2, sign, &overflow);
1090 break;
1091
1092 case MIN_EXPR:
1093 res = wi::min (arg1, arg2, sign);
1094 break;
1095
1096 case MAX_EXPR:
1097 res = wi::max (arg1, arg2, sign);
1098 break;
1099
1100 default:
1101 return NULL_TREE;
1102 }
1103
1104 t = force_fit_type (type, res, overflowable,
1105 (((sign == SIGNED || overflowable == -1)
1106 && overflow)
1107 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1108
1109 return t;
1110 }
1111
1112 tree
1113 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1114 {
1115 return int_const_binop_1 (code, arg1, arg2, 1);
1116 }
1117
1118 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1119 constant. We assume ARG1 and ARG2 have the same data type, or at least
1120 are the same kind of constant and the same machine mode. Return zero if
1121 combining the constants is not allowed in the current operating mode. */
1122
1123 static tree
1124 const_binop (enum tree_code code, tree arg1, tree arg2)
1125 {
1126 /* Sanity check for the recursive cases. */
1127 if (!arg1 || !arg2)
1128 return NULL_TREE;
1129
1130 STRIP_NOPS (arg1);
1131 STRIP_NOPS (arg2);
1132
1133 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1134 {
1135 if (code == POINTER_PLUS_EXPR)
1136 return int_const_binop (PLUS_EXPR,
1137 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1138
1139 return int_const_binop (code, arg1, arg2);
1140 }
1141
1142 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1143 {
1144 machine_mode mode;
1145 REAL_VALUE_TYPE d1;
1146 REAL_VALUE_TYPE d2;
1147 REAL_VALUE_TYPE value;
1148 REAL_VALUE_TYPE result;
1149 bool inexact;
1150 tree t, type;
1151
1152 /* The following codes are handled by real_arithmetic. */
1153 switch (code)
1154 {
1155 case PLUS_EXPR:
1156 case MINUS_EXPR:
1157 case MULT_EXPR:
1158 case RDIV_EXPR:
1159 case MIN_EXPR:
1160 case MAX_EXPR:
1161 break;
1162
1163 default:
1164 return NULL_TREE;
1165 }
1166
1167 d1 = TREE_REAL_CST (arg1);
1168 d2 = TREE_REAL_CST (arg2);
1169
1170 type = TREE_TYPE (arg1);
1171 mode = TYPE_MODE (type);
1172
1173 /* Don't perform operation if we honor signaling NaNs and
1174 either operand is a NaN. */
1175 if (HONOR_SNANS (mode)
1176 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1177 return NULL_TREE;
1178
1179 /* Don't perform operation if it would raise a division
1180 by zero exception. */
1181 if (code == RDIV_EXPR
1182 && REAL_VALUES_EQUAL (d2, dconst0)
1183 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1184 return NULL_TREE;
1185
1186 /* If either operand is a NaN, just return it. Otherwise, set up
1187 for floating-point trap; we return an overflow. */
1188 if (REAL_VALUE_ISNAN (d1))
1189 return arg1;
1190 else if (REAL_VALUE_ISNAN (d2))
1191 return arg2;
1192
1193 inexact = real_arithmetic (&value, code, &d1, &d2);
1194 real_convert (&result, mode, &value);
1195
1196 /* Don't constant fold this floating point operation if
1197 the result has overflowed and flag_trapping_math. */
1198 if (flag_trapping_math
1199 && MODE_HAS_INFINITIES (mode)
1200 && REAL_VALUE_ISINF (result)
1201 && !REAL_VALUE_ISINF (d1)
1202 && !REAL_VALUE_ISINF (d2))
1203 return NULL_TREE;
1204
1205 /* Don't constant fold this floating point operation if the
1206 result may dependent upon the run-time rounding mode and
1207 flag_rounding_math is set, or if GCC's software emulation
1208 is unable to accurately represent the result. */
1209 if ((flag_rounding_math
1210 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1211 && (inexact || !real_identical (&result, &value)))
1212 return NULL_TREE;
1213
1214 t = build_real (type, result);
1215
1216 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1217 return t;
1218 }
1219
1220 if (TREE_CODE (arg1) == FIXED_CST)
1221 {
1222 FIXED_VALUE_TYPE f1;
1223 FIXED_VALUE_TYPE f2;
1224 FIXED_VALUE_TYPE result;
1225 tree t, type;
1226 int sat_p;
1227 bool overflow_p;
1228
1229 /* The following codes are handled by fixed_arithmetic. */
1230 switch (code)
1231 {
1232 case PLUS_EXPR:
1233 case MINUS_EXPR:
1234 case MULT_EXPR:
1235 case TRUNC_DIV_EXPR:
1236 if (TREE_CODE (arg2) != FIXED_CST)
1237 return NULL_TREE;
1238 f2 = TREE_FIXED_CST (arg2);
1239 break;
1240
1241 case LSHIFT_EXPR:
1242 case RSHIFT_EXPR:
1243 {
1244 if (TREE_CODE (arg2) != INTEGER_CST)
1245 return NULL_TREE;
1246 wide_int w2 = arg2;
1247 f2.data.high = w2.elt (1);
1248 f2.data.low = w2.elt (0);
1249 f2.mode = SImode;
1250 }
1251 break;
1252
1253 default:
1254 return NULL_TREE;
1255 }
1256
1257 f1 = TREE_FIXED_CST (arg1);
1258 type = TREE_TYPE (arg1);
1259 sat_p = TYPE_SATURATING (type);
1260 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1261 t = build_fixed (type, result);
1262 /* Propagate overflow flags. */
1263 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1264 TREE_OVERFLOW (t) = 1;
1265 return t;
1266 }
1267
1268 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1269 {
1270 tree type = TREE_TYPE (arg1);
1271 tree r1 = TREE_REALPART (arg1);
1272 tree i1 = TREE_IMAGPART (arg1);
1273 tree r2 = TREE_REALPART (arg2);
1274 tree i2 = TREE_IMAGPART (arg2);
1275 tree real, imag;
1276
1277 switch (code)
1278 {
1279 case PLUS_EXPR:
1280 case MINUS_EXPR:
1281 real = const_binop (code, r1, r2);
1282 imag = const_binop (code, i1, i2);
1283 break;
1284
1285 case MULT_EXPR:
1286 if (COMPLEX_FLOAT_TYPE_P (type))
1287 return do_mpc_arg2 (arg1, arg2, type,
1288 /* do_nonfinite= */ folding_initializer,
1289 mpc_mul);
1290
1291 real = const_binop (MINUS_EXPR,
1292 const_binop (MULT_EXPR, r1, r2),
1293 const_binop (MULT_EXPR, i1, i2));
1294 imag = const_binop (PLUS_EXPR,
1295 const_binop (MULT_EXPR, r1, i2),
1296 const_binop (MULT_EXPR, i1, r2));
1297 break;
1298
1299 case RDIV_EXPR:
1300 if (COMPLEX_FLOAT_TYPE_P (type))
1301 return do_mpc_arg2 (arg1, arg2, type,
1302 /* do_nonfinite= */ folding_initializer,
1303 mpc_div);
1304 /* Fallthru ... */
1305 case TRUNC_DIV_EXPR:
1306 case CEIL_DIV_EXPR:
1307 case FLOOR_DIV_EXPR:
1308 case ROUND_DIV_EXPR:
1309 if (flag_complex_method == 0)
1310 {
1311 /* Keep this algorithm in sync with
1312 tree-complex.c:expand_complex_div_straight().
1313
1314 Expand complex division to scalars, straightforward algorithm.
1315 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1316 t = br*br + bi*bi
1317 */
1318 tree magsquared
1319 = const_binop (PLUS_EXPR,
1320 const_binop (MULT_EXPR, r2, r2),
1321 const_binop (MULT_EXPR, i2, i2));
1322 tree t1
1323 = const_binop (PLUS_EXPR,
1324 const_binop (MULT_EXPR, r1, r2),
1325 const_binop (MULT_EXPR, i1, i2));
1326 tree t2
1327 = const_binop (MINUS_EXPR,
1328 const_binop (MULT_EXPR, i1, r2),
1329 const_binop (MULT_EXPR, r1, i2));
1330
1331 real = const_binop (code, t1, magsquared);
1332 imag = const_binop (code, t2, magsquared);
1333 }
1334 else
1335 {
1336 /* Keep this algorithm in sync with
1337 tree-complex.c:expand_complex_div_wide().
1338
1339 Expand complex division to scalars, modified algorithm to minimize
1340 overflow with wide input ranges. */
1341 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1342 fold_abs_const (r2, TREE_TYPE (type)),
1343 fold_abs_const (i2, TREE_TYPE (type)));
1344
1345 if (integer_nonzerop (compare))
1346 {
1347 /* In the TRUE branch, we compute
1348 ratio = br/bi;
1349 div = (br * ratio) + bi;
1350 tr = (ar * ratio) + ai;
1351 ti = (ai * ratio) - ar;
1352 tr = tr / div;
1353 ti = ti / div; */
1354 tree ratio = const_binop (code, r2, i2);
1355 tree div = const_binop (PLUS_EXPR, i2,
1356 const_binop (MULT_EXPR, r2, ratio));
1357 real = const_binop (MULT_EXPR, r1, ratio);
1358 real = const_binop (PLUS_EXPR, real, i1);
1359 real = const_binop (code, real, div);
1360
1361 imag = const_binop (MULT_EXPR, i1, ratio);
1362 imag = const_binop (MINUS_EXPR, imag, r1);
1363 imag = const_binop (code, imag, div);
1364 }
1365 else
1366 {
1367 /* In the FALSE branch, we compute
1368 ratio = d/c;
1369 divisor = (d * ratio) + c;
1370 tr = (b * ratio) + a;
1371 ti = b - (a * ratio);
1372 tr = tr / div;
1373 ti = ti / div; */
1374 tree ratio = const_binop (code, i2, r2);
1375 tree div = const_binop (PLUS_EXPR, r2,
1376 const_binop (MULT_EXPR, i2, ratio));
1377
1378 real = const_binop (MULT_EXPR, i1, ratio);
1379 real = const_binop (PLUS_EXPR, real, r1);
1380 real = const_binop (code, real, div);
1381
1382 imag = const_binop (MULT_EXPR, r1, ratio);
1383 imag = const_binop (MINUS_EXPR, i1, imag);
1384 imag = const_binop (code, imag, div);
1385 }
1386 }
1387 break;
1388
1389 default:
1390 return NULL_TREE;
1391 }
1392
1393 if (real && imag)
1394 return build_complex (type, real, imag);
1395 }
1396
1397 if (TREE_CODE (arg1) == VECTOR_CST
1398 && TREE_CODE (arg2) == VECTOR_CST)
1399 {
1400 tree type = TREE_TYPE (arg1);
1401 int count = TYPE_VECTOR_SUBPARTS (type), i;
1402 tree *elts = XALLOCAVEC (tree, count);
1403
1404 for (i = 0; i < count; i++)
1405 {
1406 tree elem1 = VECTOR_CST_ELT (arg1, i);
1407 tree elem2 = VECTOR_CST_ELT (arg2, i);
1408
1409 elts[i] = const_binop (code, elem1, elem2);
1410
1411 /* It is possible that const_binop cannot handle the given
1412 code and return NULL_TREE */
1413 if (elts[i] == NULL_TREE)
1414 return NULL_TREE;
1415 }
1416
1417 return build_vector (type, elts);
1418 }
1419
1420 /* Shifts allow a scalar offset for a vector. */
1421 if (TREE_CODE (arg1) == VECTOR_CST
1422 && TREE_CODE (arg2) == INTEGER_CST)
1423 {
1424 tree type = TREE_TYPE (arg1);
1425 int count = TYPE_VECTOR_SUBPARTS (type), i;
1426 tree *elts = XALLOCAVEC (tree, count);
1427
1428 for (i = 0; i < count; i++)
1429 {
1430 tree elem1 = VECTOR_CST_ELT (arg1, i);
1431
1432 elts[i] = const_binop (code, elem1, arg2);
1433
1434 /* It is possible that const_binop cannot handle the given
1435 code and return NULL_TREE. */
1436 if (elts[i] == NULL_TREE)
1437 return NULL_TREE;
1438 }
1439
1440 return build_vector (type, elts);
1441 }
1442 return NULL_TREE;
1443 }
1444
1445 /* Overload that adds a TYPE parameter to be able to dispatch
1446 to fold_relational_const. */
1447
1448 tree
1449 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1450 {
1451 if (TREE_CODE_CLASS (code) == tcc_comparison)
1452 return fold_relational_const (code, type, arg1, arg2);
1453
1454 /* ??? Until we make the const_binop worker take the type of the
1455 result as argument put those cases that need it here. */
1456 switch (code)
1457 {
1458 case COMPLEX_EXPR:
1459 if ((TREE_CODE (arg1) == REAL_CST
1460 && TREE_CODE (arg2) == REAL_CST)
1461 || (TREE_CODE (arg1) == INTEGER_CST
1462 && TREE_CODE (arg2) == INTEGER_CST))
1463 return build_complex (type, arg1, arg2);
1464 return NULL_TREE;
1465
1466 case VEC_PACK_TRUNC_EXPR:
1467 case VEC_PACK_FIX_TRUNC_EXPR:
1468 {
1469 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1470 tree *elts;
1471
1472 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1473 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1474 if (TREE_CODE (arg1) != VECTOR_CST
1475 || TREE_CODE (arg2) != VECTOR_CST)
1476 return NULL_TREE;
1477
1478 elts = XALLOCAVEC (tree, nelts);
1479 if (!vec_cst_ctor_to_array (arg1, elts)
1480 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1481 return NULL_TREE;
1482
1483 for (i = 0; i < nelts; i++)
1484 {
1485 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1486 ? NOP_EXPR : FIX_TRUNC_EXPR,
1487 TREE_TYPE (type), elts[i]);
1488 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1489 return NULL_TREE;
1490 }
1491
1492 return build_vector (type, elts);
1493 }
1494
1495 case VEC_WIDEN_MULT_LO_EXPR:
1496 case VEC_WIDEN_MULT_HI_EXPR:
1497 case VEC_WIDEN_MULT_EVEN_EXPR:
1498 case VEC_WIDEN_MULT_ODD_EXPR:
1499 {
1500 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1501 unsigned int out, ofs, scale;
1502 tree *elts;
1503
1504 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1505 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1506 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1507 return NULL_TREE;
1508
1509 elts = XALLOCAVEC (tree, nelts * 4);
1510 if (!vec_cst_ctor_to_array (arg1, elts)
1511 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1512 return NULL_TREE;
1513
1514 if (code == VEC_WIDEN_MULT_LO_EXPR)
1515 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1516 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1517 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1518 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1519 scale = 1, ofs = 0;
1520 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1521 scale = 1, ofs = 1;
1522
1523 for (out = 0; out < nelts; out++)
1524 {
1525 unsigned int in1 = (out << scale) + ofs;
1526 unsigned int in2 = in1 + nelts * 2;
1527 tree t1, t2;
1528
1529 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1530 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1531
1532 if (t1 == NULL_TREE || t2 == NULL_TREE)
1533 return NULL_TREE;
1534 elts[out] = const_binop (MULT_EXPR, t1, t2);
1535 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1536 return NULL_TREE;
1537 }
1538
1539 return build_vector (type, elts);
1540 }
1541
1542 default:;
1543 }
1544
1545 if (TREE_CODE_CLASS (code) != tcc_binary)
1546 return NULL_TREE;
1547
1548 /* Make sure type and arg0 have the same saturating flag. */
1549 gcc_checking_assert (TYPE_SATURATING (type)
1550 == TYPE_SATURATING (TREE_TYPE (arg1)));
1551
1552 return const_binop (code, arg1, arg2);
1553 }
1554
1555 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1556 Return zero if computing the constants is not possible. */
1557
1558 tree
1559 const_unop (enum tree_code code, tree type, tree arg0)
1560 {
1561 switch (code)
1562 {
1563 CASE_CONVERT:
1564 case FLOAT_EXPR:
1565 case FIX_TRUNC_EXPR:
1566 case FIXED_CONVERT_EXPR:
1567 return fold_convert_const (code, type, arg0);
1568
1569 case ADDR_SPACE_CONVERT_EXPR:
1570 if (integer_zerop (arg0))
1571 return fold_convert_const (code, type, arg0);
1572 break;
1573
1574 case VIEW_CONVERT_EXPR:
1575 return fold_view_convert_expr (type, arg0);
1576
1577 case NEGATE_EXPR:
1578 {
1579 /* Can't call fold_negate_const directly here as that doesn't
1580 handle all cases and we might not be able to negate some
1581 constants. */
1582 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1583 if (tem && CONSTANT_CLASS_P (tem))
1584 return tem;
1585 break;
1586 }
1587
1588 case ABS_EXPR:
1589 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1590 return fold_abs_const (arg0, type);
1591 break;
1592
1593 case CONJ_EXPR:
1594 if (TREE_CODE (arg0) == COMPLEX_CST)
1595 {
1596 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1597 TREE_TYPE (type));
1598 return build_complex (type, TREE_REALPART (arg0), ipart);
1599 }
1600 break;
1601
1602 case BIT_NOT_EXPR:
1603 if (TREE_CODE (arg0) == INTEGER_CST)
1604 return fold_not_const (arg0, type);
1605 /* Perform BIT_NOT_EXPR on each element individually. */
1606 else if (TREE_CODE (arg0) == VECTOR_CST)
1607 {
1608 tree *elements;
1609 tree elem;
1610 unsigned count = VECTOR_CST_NELTS (arg0), i;
1611
1612 elements = XALLOCAVEC (tree, count);
1613 for (i = 0; i < count; i++)
1614 {
1615 elem = VECTOR_CST_ELT (arg0, i);
1616 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1617 if (elem == NULL_TREE)
1618 break;
1619 elements[i] = elem;
1620 }
1621 if (i == count)
1622 return build_vector (type, elements);
1623 }
1624 break;
1625
1626 case TRUTH_NOT_EXPR:
1627 if (TREE_CODE (arg0) == INTEGER_CST)
1628 return constant_boolean_node (integer_zerop (arg0), type);
1629 break;
1630
1631 case REALPART_EXPR:
1632 if (TREE_CODE (arg0) == COMPLEX_CST)
1633 return fold_convert (type, TREE_REALPART (arg0));
1634 break;
1635
1636 case IMAGPART_EXPR:
1637 if (TREE_CODE (arg0) == COMPLEX_CST)
1638 return fold_convert (type, TREE_IMAGPART (arg0));
1639 break;
1640
1641 case VEC_UNPACK_LO_EXPR:
1642 case VEC_UNPACK_HI_EXPR:
1643 case VEC_UNPACK_FLOAT_LO_EXPR:
1644 case VEC_UNPACK_FLOAT_HI_EXPR:
1645 {
1646 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1647 tree *elts;
1648 enum tree_code subcode;
1649
1650 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1651 if (TREE_CODE (arg0) != VECTOR_CST)
1652 return NULL_TREE;
1653
1654 elts = XALLOCAVEC (tree, nelts * 2);
1655 if (!vec_cst_ctor_to_array (arg0, elts))
1656 return NULL_TREE;
1657
1658 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1659 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1660 elts += nelts;
1661
1662 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1663 subcode = NOP_EXPR;
1664 else
1665 subcode = FLOAT_EXPR;
1666
1667 for (i = 0; i < nelts; i++)
1668 {
1669 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1670 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1671 return NULL_TREE;
1672 }
1673
1674 return build_vector (type, elts);
1675 }
1676
1677 case REDUC_MIN_EXPR:
1678 case REDUC_MAX_EXPR:
1679 case REDUC_PLUS_EXPR:
1680 {
1681 unsigned int nelts, i;
1682 tree *elts;
1683 enum tree_code subcode;
1684
1685 if (TREE_CODE (arg0) != VECTOR_CST)
1686 return NULL_TREE;
1687 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1688
1689 elts = XALLOCAVEC (tree, nelts);
1690 if (!vec_cst_ctor_to_array (arg0, elts))
1691 return NULL_TREE;
1692
1693 switch (code)
1694 {
1695 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1696 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1697 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1698 default: gcc_unreachable ();
1699 }
1700
1701 for (i = 1; i < nelts; i++)
1702 {
1703 elts[0] = const_binop (subcode, elts[0], elts[i]);
1704 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1705 return NULL_TREE;
1706 }
1707
1708 return elts[0];
1709 }
1710
1711 default:
1712 break;
1713 }
1714
1715 return NULL_TREE;
1716 }
1717
1718 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1719 indicates which particular sizetype to create. */
1720
1721 tree
1722 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1723 {
1724 return build_int_cst (sizetype_tab[(int) kind], number);
1725 }
1726 \f
1727 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1728 is a tree code. The type of the result is taken from the operands.
1729 Both must be equivalent integer types, ala int_binop_types_match_p.
1730 If the operands are constant, so is the result. */
1731
1732 tree
1733 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1734 {
1735 tree type = TREE_TYPE (arg0);
1736
1737 if (arg0 == error_mark_node || arg1 == error_mark_node)
1738 return error_mark_node;
1739
1740 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1741 TREE_TYPE (arg1)));
1742
1743 /* Handle the special case of two integer constants faster. */
1744 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1745 {
1746 /* And some specific cases even faster than that. */
1747 if (code == PLUS_EXPR)
1748 {
1749 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1750 return arg1;
1751 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1752 return arg0;
1753 }
1754 else if (code == MINUS_EXPR)
1755 {
1756 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1757 return arg0;
1758 }
1759 else if (code == MULT_EXPR)
1760 {
1761 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1762 return arg1;
1763 }
1764
1765 /* Handle general case of two integer constants. For sizetype
1766 constant calculations we always want to know about overflow,
1767 even in the unsigned case. */
1768 return int_const_binop_1 (code, arg0, arg1, -1);
1769 }
1770
1771 return fold_build2_loc (loc, code, type, arg0, arg1);
1772 }
1773
1774 /* Given two values, either both of sizetype or both of bitsizetype,
1775 compute the difference between the two values. Return the value
1776 in signed type corresponding to the type of the operands. */
1777
1778 tree
1779 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1780 {
1781 tree type = TREE_TYPE (arg0);
1782 tree ctype;
1783
1784 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1785 TREE_TYPE (arg1)));
1786
1787 /* If the type is already signed, just do the simple thing. */
1788 if (!TYPE_UNSIGNED (type))
1789 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1790
1791 if (type == sizetype)
1792 ctype = ssizetype;
1793 else if (type == bitsizetype)
1794 ctype = sbitsizetype;
1795 else
1796 ctype = signed_type_for (type);
1797
1798 /* If either operand is not a constant, do the conversions to the signed
1799 type and subtract. The hardware will do the right thing with any
1800 overflow in the subtraction. */
1801 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1802 return size_binop_loc (loc, MINUS_EXPR,
1803 fold_convert_loc (loc, ctype, arg0),
1804 fold_convert_loc (loc, ctype, arg1));
1805
1806 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1807 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1808 overflow) and negate (which can't either). Special-case a result
1809 of zero while we're here. */
1810 if (tree_int_cst_equal (arg0, arg1))
1811 return build_int_cst (ctype, 0);
1812 else if (tree_int_cst_lt (arg1, arg0))
1813 return fold_convert_loc (loc, ctype,
1814 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1815 else
1816 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1817 fold_convert_loc (loc, ctype,
1818 size_binop_loc (loc,
1819 MINUS_EXPR,
1820 arg1, arg0)));
1821 }
1822 \f
1823 /* A subroutine of fold_convert_const handling conversions of an
1824 INTEGER_CST to another integer type. */
1825
1826 static tree
1827 fold_convert_const_int_from_int (tree type, const_tree arg1)
1828 {
1829 /* Given an integer constant, make new constant with new type,
1830 appropriately sign-extended or truncated. Use widest_int
1831 so that any extension is done according ARG1's type. */
1832 return force_fit_type (type, wi::to_widest (arg1),
1833 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1834 TREE_OVERFLOW (arg1));
1835 }
1836
1837 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1838 to an integer type. */
1839
1840 static tree
1841 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1842 {
1843 bool overflow = false;
1844 tree t;
1845
1846 /* The following code implements the floating point to integer
1847 conversion rules required by the Java Language Specification,
1848 that IEEE NaNs are mapped to zero and values that overflow
1849 the target precision saturate, i.e. values greater than
1850 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1851 are mapped to INT_MIN. These semantics are allowed by the
1852 C and C++ standards that simply state that the behavior of
1853 FP-to-integer conversion is unspecified upon overflow. */
1854
1855 wide_int val;
1856 REAL_VALUE_TYPE r;
1857 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1858
1859 switch (code)
1860 {
1861 case FIX_TRUNC_EXPR:
1862 real_trunc (&r, VOIDmode, &x);
1863 break;
1864
1865 default:
1866 gcc_unreachable ();
1867 }
1868
1869 /* If R is NaN, return zero and show we have an overflow. */
1870 if (REAL_VALUE_ISNAN (r))
1871 {
1872 overflow = true;
1873 val = wi::zero (TYPE_PRECISION (type));
1874 }
1875
1876 /* See if R is less than the lower bound or greater than the
1877 upper bound. */
1878
1879 if (! overflow)
1880 {
1881 tree lt = TYPE_MIN_VALUE (type);
1882 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1883 if (REAL_VALUES_LESS (r, l))
1884 {
1885 overflow = true;
1886 val = lt;
1887 }
1888 }
1889
1890 if (! overflow)
1891 {
1892 tree ut = TYPE_MAX_VALUE (type);
1893 if (ut)
1894 {
1895 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1896 if (REAL_VALUES_LESS (u, r))
1897 {
1898 overflow = true;
1899 val = ut;
1900 }
1901 }
1902 }
1903
1904 if (! overflow)
1905 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1906
1907 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1908 return t;
1909 }
1910
1911 /* A subroutine of fold_convert_const handling conversions of a
1912 FIXED_CST to an integer type. */
1913
1914 static tree
1915 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1916 {
1917 tree t;
1918 double_int temp, temp_trunc;
1919 unsigned int mode;
1920
1921 /* Right shift FIXED_CST to temp by fbit. */
1922 temp = TREE_FIXED_CST (arg1).data;
1923 mode = TREE_FIXED_CST (arg1).mode;
1924 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1925 {
1926 temp = temp.rshift (GET_MODE_FBIT (mode),
1927 HOST_BITS_PER_DOUBLE_INT,
1928 SIGNED_FIXED_POINT_MODE_P (mode));
1929
1930 /* Left shift temp to temp_trunc by fbit. */
1931 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1932 HOST_BITS_PER_DOUBLE_INT,
1933 SIGNED_FIXED_POINT_MODE_P (mode));
1934 }
1935 else
1936 {
1937 temp = double_int_zero;
1938 temp_trunc = double_int_zero;
1939 }
1940
1941 /* If FIXED_CST is negative, we need to round the value toward 0.
1942 By checking if the fractional bits are not zero to add 1 to temp. */
1943 if (SIGNED_FIXED_POINT_MODE_P (mode)
1944 && temp_trunc.is_negative ()
1945 && TREE_FIXED_CST (arg1).data != temp_trunc)
1946 temp += double_int_one;
1947
1948 /* Given a fixed-point constant, make new constant with new type,
1949 appropriately sign-extended or truncated. */
1950 t = force_fit_type (type, temp, -1,
1951 (temp.is_negative ()
1952 && (TYPE_UNSIGNED (type)
1953 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1954 | TREE_OVERFLOW (arg1));
1955
1956 return t;
1957 }
1958
1959 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1960 to another floating point type. */
1961
1962 static tree
1963 fold_convert_const_real_from_real (tree type, const_tree arg1)
1964 {
1965 REAL_VALUE_TYPE value;
1966 tree t;
1967
1968 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1969 t = build_real (type, value);
1970
1971 /* If converting an infinity or NAN to a representation that doesn't
1972 have one, set the overflow bit so that we can produce some kind of
1973 error message at the appropriate point if necessary. It's not the
1974 most user-friendly message, but it's better than nothing. */
1975 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1976 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1977 TREE_OVERFLOW (t) = 1;
1978 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1979 && !MODE_HAS_NANS (TYPE_MODE (type)))
1980 TREE_OVERFLOW (t) = 1;
1981 /* Regular overflow, conversion produced an infinity in a mode that
1982 can't represent them. */
1983 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1984 && REAL_VALUE_ISINF (value)
1985 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1986 TREE_OVERFLOW (t) = 1;
1987 else
1988 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1989 return t;
1990 }
1991
1992 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1993 to a floating point type. */
1994
1995 static tree
1996 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1997 {
1998 REAL_VALUE_TYPE value;
1999 tree t;
2000
2001 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2002 t = build_real (type, value);
2003
2004 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2005 return t;
2006 }
2007
2008 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2009 to another fixed-point type. */
2010
2011 static tree
2012 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2013 {
2014 FIXED_VALUE_TYPE value;
2015 tree t;
2016 bool overflow_p;
2017
2018 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2019 TYPE_SATURATING (type));
2020 t = build_fixed (type, value);
2021
2022 /* Propagate overflow flags. */
2023 if (overflow_p | TREE_OVERFLOW (arg1))
2024 TREE_OVERFLOW (t) = 1;
2025 return t;
2026 }
2027
2028 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2029 to a fixed-point type. */
2030
2031 static tree
2032 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2033 {
2034 FIXED_VALUE_TYPE value;
2035 tree t;
2036 bool overflow_p;
2037 double_int di;
2038
2039 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2040
2041 di.low = TREE_INT_CST_ELT (arg1, 0);
2042 if (TREE_INT_CST_NUNITS (arg1) == 1)
2043 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2044 else
2045 di.high = TREE_INT_CST_ELT (arg1, 1);
2046
2047 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2048 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2049 TYPE_SATURATING (type));
2050 t = build_fixed (type, value);
2051
2052 /* Propagate overflow flags. */
2053 if (overflow_p | TREE_OVERFLOW (arg1))
2054 TREE_OVERFLOW (t) = 1;
2055 return t;
2056 }
2057
2058 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2059 to a fixed-point type. */
2060
2061 static tree
2062 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2063 {
2064 FIXED_VALUE_TYPE value;
2065 tree t;
2066 bool overflow_p;
2067
2068 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2069 &TREE_REAL_CST (arg1),
2070 TYPE_SATURATING (type));
2071 t = build_fixed (type, value);
2072
2073 /* Propagate overflow flags. */
2074 if (overflow_p | TREE_OVERFLOW (arg1))
2075 TREE_OVERFLOW (t) = 1;
2076 return t;
2077 }
2078
2079 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2080 type TYPE. If no simplification can be done return NULL_TREE. */
2081
2082 static tree
2083 fold_convert_const (enum tree_code code, tree type, tree arg1)
2084 {
2085 if (TREE_TYPE (arg1) == type)
2086 return arg1;
2087
2088 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2089 || TREE_CODE (type) == OFFSET_TYPE)
2090 {
2091 if (TREE_CODE (arg1) == INTEGER_CST)
2092 return fold_convert_const_int_from_int (type, arg1);
2093 else if (TREE_CODE (arg1) == REAL_CST)
2094 return fold_convert_const_int_from_real (code, type, arg1);
2095 else if (TREE_CODE (arg1) == FIXED_CST)
2096 return fold_convert_const_int_from_fixed (type, arg1);
2097 }
2098 else if (TREE_CODE (type) == REAL_TYPE)
2099 {
2100 if (TREE_CODE (arg1) == INTEGER_CST)
2101 return build_real_from_int_cst (type, arg1);
2102 else if (TREE_CODE (arg1) == REAL_CST)
2103 return fold_convert_const_real_from_real (type, arg1);
2104 else if (TREE_CODE (arg1) == FIXED_CST)
2105 return fold_convert_const_real_from_fixed (type, arg1);
2106 }
2107 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2108 {
2109 if (TREE_CODE (arg1) == FIXED_CST)
2110 return fold_convert_const_fixed_from_fixed (type, arg1);
2111 else if (TREE_CODE (arg1) == INTEGER_CST)
2112 return fold_convert_const_fixed_from_int (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_fixed_from_real (type, arg1);
2115 }
2116 return NULL_TREE;
2117 }
2118
2119 /* Construct a vector of zero elements of vector type TYPE. */
2120
2121 static tree
2122 build_zero_vector (tree type)
2123 {
2124 tree t;
2125
2126 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2127 return build_vector_from_val (type, t);
2128 }
2129
2130 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2131
2132 bool
2133 fold_convertible_p (const_tree type, const_tree arg)
2134 {
2135 tree orig = TREE_TYPE (arg);
2136
2137 if (type == orig)
2138 return true;
2139
2140 if (TREE_CODE (arg) == ERROR_MARK
2141 || TREE_CODE (type) == ERROR_MARK
2142 || TREE_CODE (orig) == ERROR_MARK)
2143 return false;
2144
2145 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2146 return true;
2147
2148 switch (TREE_CODE (type))
2149 {
2150 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2151 case POINTER_TYPE: case REFERENCE_TYPE:
2152 case OFFSET_TYPE:
2153 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2154 || TREE_CODE (orig) == OFFSET_TYPE)
2155 return true;
2156 return (TREE_CODE (orig) == VECTOR_TYPE
2157 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2158
2159 case REAL_TYPE:
2160 case FIXED_POINT_TYPE:
2161 case COMPLEX_TYPE:
2162 case VECTOR_TYPE:
2163 case VOID_TYPE:
2164 return TREE_CODE (type) == TREE_CODE (orig);
2165
2166 default:
2167 return false;
2168 }
2169 }
2170
2171 /* Convert expression ARG to type TYPE. Used by the middle-end for
2172 simple conversions in preference to calling the front-end's convert. */
2173
2174 tree
2175 fold_convert_loc (location_t loc, tree type, tree arg)
2176 {
2177 tree orig = TREE_TYPE (arg);
2178 tree tem;
2179
2180 if (type == orig)
2181 return arg;
2182
2183 if (TREE_CODE (arg) == ERROR_MARK
2184 || TREE_CODE (type) == ERROR_MARK
2185 || TREE_CODE (orig) == ERROR_MARK)
2186 return error_mark_node;
2187
2188 switch (TREE_CODE (type))
2189 {
2190 case POINTER_TYPE:
2191 case REFERENCE_TYPE:
2192 /* Handle conversions between pointers to different address spaces. */
2193 if (POINTER_TYPE_P (orig)
2194 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2195 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2196 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2197 /* fall through */
2198
2199 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2200 case OFFSET_TYPE:
2201 if (TREE_CODE (arg) == INTEGER_CST)
2202 {
2203 tem = fold_convert_const (NOP_EXPR, type, arg);
2204 if (tem != NULL_TREE)
2205 return tem;
2206 }
2207 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2208 || TREE_CODE (orig) == OFFSET_TYPE)
2209 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2210 if (TREE_CODE (orig) == COMPLEX_TYPE)
2211 return fold_convert_loc (loc, type,
2212 fold_build1_loc (loc, REALPART_EXPR,
2213 TREE_TYPE (orig), arg));
2214 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2215 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2216 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2217
2218 case REAL_TYPE:
2219 if (TREE_CODE (arg) == INTEGER_CST)
2220 {
2221 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2222 if (tem != NULL_TREE)
2223 return tem;
2224 }
2225 else if (TREE_CODE (arg) == REAL_CST)
2226 {
2227 tem = fold_convert_const (NOP_EXPR, type, arg);
2228 if (tem != NULL_TREE)
2229 return tem;
2230 }
2231 else if (TREE_CODE (arg) == FIXED_CST)
2232 {
2233 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2234 if (tem != NULL_TREE)
2235 return tem;
2236 }
2237
2238 switch (TREE_CODE (orig))
2239 {
2240 case INTEGER_TYPE:
2241 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2242 case POINTER_TYPE: case REFERENCE_TYPE:
2243 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2244
2245 case REAL_TYPE:
2246 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2247
2248 case FIXED_POINT_TYPE:
2249 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2250
2251 case COMPLEX_TYPE:
2252 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2253 return fold_convert_loc (loc, type, tem);
2254
2255 default:
2256 gcc_unreachable ();
2257 }
2258
2259 case FIXED_POINT_TYPE:
2260 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2261 || TREE_CODE (arg) == REAL_CST)
2262 {
2263 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2264 if (tem != NULL_TREE)
2265 goto fold_convert_exit;
2266 }
2267
2268 switch (TREE_CODE (orig))
2269 {
2270 case FIXED_POINT_TYPE:
2271 case INTEGER_TYPE:
2272 case ENUMERAL_TYPE:
2273 case BOOLEAN_TYPE:
2274 case REAL_TYPE:
2275 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2276
2277 case COMPLEX_TYPE:
2278 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2279 return fold_convert_loc (loc, type, tem);
2280
2281 default:
2282 gcc_unreachable ();
2283 }
2284
2285 case COMPLEX_TYPE:
2286 switch (TREE_CODE (orig))
2287 {
2288 case INTEGER_TYPE:
2289 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2290 case POINTER_TYPE: case REFERENCE_TYPE:
2291 case REAL_TYPE:
2292 case FIXED_POINT_TYPE:
2293 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2294 fold_convert_loc (loc, TREE_TYPE (type), arg),
2295 fold_convert_loc (loc, TREE_TYPE (type),
2296 integer_zero_node));
2297 case COMPLEX_TYPE:
2298 {
2299 tree rpart, ipart;
2300
2301 if (TREE_CODE (arg) == COMPLEX_EXPR)
2302 {
2303 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2304 TREE_OPERAND (arg, 0));
2305 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2306 TREE_OPERAND (arg, 1));
2307 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2308 }
2309
2310 arg = save_expr (arg);
2311 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2312 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2313 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2314 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2315 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2316 }
2317
2318 default:
2319 gcc_unreachable ();
2320 }
2321
2322 case VECTOR_TYPE:
2323 if (integer_zerop (arg))
2324 return build_zero_vector (type);
2325 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2326 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2327 || TREE_CODE (orig) == VECTOR_TYPE);
2328 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2329
2330 case VOID_TYPE:
2331 tem = fold_ignored_result (arg);
2332 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2333
2334 default:
2335 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2336 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2337 gcc_unreachable ();
2338 }
2339 fold_convert_exit:
2340 protected_set_expr_location_unshare (tem, loc);
2341 return tem;
2342 }
2343 \f
2344 /* Return false if expr can be assumed not to be an lvalue, true
2345 otherwise. */
2346
2347 static bool
2348 maybe_lvalue_p (const_tree x)
2349 {
2350 /* We only need to wrap lvalue tree codes. */
2351 switch (TREE_CODE (x))
2352 {
2353 case VAR_DECL:
2354 case PARM_DECL:
2355 case RESULT_DECL:
2356 case LABEL_DECL:
2357 case FUNCTION_DECL:
2358 case SSA_NAME:
2359
2360 case COMPONENT_REF:
2361 case MEM_REF:
2362 case INDIRECT_REF:
2363 case ARRAY_REF:
2364 case ARRAY_RANGE_REF:
2365 case BIT_FIELD_REF:
2366 case OBJ_TYPE_REF:
2367
2368 case REALPART_EXPR:
2369 case IMAGPART_EXPR:
2370 case PREINCREMENT_EXPR:
2371 case PREDECREMENT_EXPR:
2372 case SAVE_EXPR:
2373 case TRY_CATCH_EXPR:
2374 case WITH_CLEANUP_EXPR:
2375 case COMPOUND_EXPR:
2376 case MODIFY_EXPR:
2377 case TARGET_EXPR:
2378 case COND_EXPR:
2379 case BIND_EXPR:
2380 break;
2381
2382 default:
2383 /* Assume the worst for front-end tree codes. */
2384 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2385 break;
2386 return false;
2387 }
2388
2389 return true;
2390 }
2391
2392 /* Return an expr equal to X but certainly not valid as an lvalue. */
2393
2394 tree
2395 non_lvalue_loc (location_t loc, tree x)
2396 {
2397 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2398 us. */
2399 if (in_gimple_form)
2400 return x;
2401
2402 if (! maybe_lvalue_p (x))
2403 return x;
2404 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2405 }
2406
2407 /* When pedantic, return an expr equal to X but certainly not valid as a
2408 pedantic lvalue. Otherwise, return X. */
2409
2410 static tree
2411 pedantic_non_lvalue_loc (location_t loc, tree x)
2412 {
2413 return protected_set_expr_location_unshare (x, loc);
2414 }
2415 \f
2416 /* Given a tree comparison code, return the code that is the logical inverse.
2417 It is generally not safe to do this for floating-point comparisons, except
2418 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2419 ERROR_MARK in this case. */
2420
2421 enum tree_code
2422 invert_tree_comparison (enum tree_code code, bool honor_nans)
2423 {
2424 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2425 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2426 return ERROR_MARK;
2427
2428 switch (code)
2429 {
2430 case EQ_EXPR:
2431 return NE_EXPR;
2432 case NE_EXPR:
2433 return EQ_EXPR;
2434 case GT_EXPR:
2435 return honor_nans ? UNLE_EXPR : LE_EXPR;
2436 case GE_EXPR:
2437 return honor_nans ? UNLT_EXPR : LT_EXPR;
2438 case LT_EXPR:
2439 return honor_nans ? UNGE_EXPR : GE_EXPR;
2440 case LE_EXPR:
2441 return honor_nans ? UNGT_EXPR : GT_EXPR;
2442 case LTGT_EXPR:
2443 return UNEQ_EXPR;
2444 case UNEQ_EXPR:
2445 return LTGT_EXPR;
2446 case UNGT_EXPR:
2447 return LE_EXPR;
2448 case UNGE_EXPR:
2449 return LT_EXPR;
2450 case UNLT_EXPR:
2451 return GE_EXPR;
2452 case UNLE_EXPR:
2453 return GT_EXPR;
2454 case ORDERED_EXPR:
2455 return UNORDERED_EXPR;
2456 case UNORDERED_EXPR:
2457 return ORDERED_EXPR;
2458 default:
2459 gcc_unreachable ();
2460 }
2461 }
2462
2463 /* Similar, but return the comparison that results if the operands are
2464 swapped. This is safe for floating-point. */
2465
2466 enum tree_code
2467 swap_tree_comparison (enum tree_code code)
2468 {
2469 switch (code)
2470 {
2471 case EQ_EXPR:
2472 case NE_EXPR:
2473 case ORDERED_EXPR:
2474 case UNORDERED_EXPR:
2475 case LTGT_EXPR:
2476 case UNEQ_EXPR:
2477 return code;
2478 case GT_EXPR:
2479 return LT_EXPR;
2480 case GE_EXPR:
2481 return LE_EXPR;
2482 case LT_EXPR:
2483 return GT_EXPR;
2484 case LE_EXPR:
2485 return GE_EXPR;
2486 case UNGT_EXPR:
2487 return UNLT_EXPR;
2488 case UNGE_EXPR:
2489 return UNLE_EXPR;
2490 case UNLT_EXPR:
2491 return UNGT_EXPR;
2492 case UNLE_EXPR:
2493 return UNGE_EXPR;
2494 default:
2495 gcc_unreachable ();
2496 }
2497 }
2498
2499
2500 /* Convert a comparison tree code from an enum tree_code representation
2501 into a compcode bit-based encoding. This function is the inverse of
2502 compcode_to_comparison. */
2503
2504 static enum comparison_code
2505 comparison_to_compcode (enum tree_code code)
2506 {
2507 switch (code)
2508 {
2509 case LT_EXPR:
2510 return COMPCODE_LT;
2511 case EQ_EXPR:
2512 return COMPCODE_EQ;
2513 case LE_EXPR:
2514 return COMPCODE_LE;
2515 case GT_EXPR:
2516 return COMPCODE_GT;
2517 case NE_EXPR:
2518 return COMPCODE_NE;
2519 case GE_EXPR:
2520 return COMPCODE_GE;
2521 case ORDERED_EXPR:
2522 return COMPCODE_ORD;
2523 case UNORDERED_EXPR:
2524 return COMPCODE_UNORD;
2525 case UNLT_EXPR:
2526 return COMPCODE_UNLT;
2527 case UNEQ_EXPR:
2528 return COMPCODE_UNEQ;
2529 case UNLE_EXPR:
2530 return COMPCODE_UNLE;
2531 case UNGT_EXPR:
2532 return COMPCODE_UNGT;
2533 case LTGT_EXPR:
2534 return COMPCODE_LTGT;
2535 case UNGE_EXPR:
2536 return COMPCODE_UNGE;
2537 default:
2538 gcc_unreachable ();
2539 }
2540 }
2541
2542 /* Convert a compcode bit-based encoding of a comparison operator back
2543 to GCC's enum tree_code representation. This function is the
2544 inverse of comparison_to_compcode. */
2545
2546 static enum tree_code
2547 compcode_to_comparison (enum comparison_code code)
2548 {
2549 switch (code)
2550 {
2551 case COMPCODE_LT:
2552 return LT_EXPR;
2553 case COMPCODE_EQ:
2554 return EQ_EXPR;
2555 case COMPCODE_LE:
2556 return LE_EXPR;
2557 case COMPCODE_GT:
2558 return GT_EXPR;
2559 case COMPCODE_NE:
2560 return NE_EXPR;
2561 case COMPCODE_GE:
2562 return GE_EXPR;
2563 case COMPCODE_ORD:
2564 return ORDERED_EXPR;
2565 case COMPCODE_UNORD:
2566 return UNORDERED_EXPR;
2567 case COMPCODE_UNLT:
2568 return UNLT_EXPR;
2569 case COMPCODE_UNEQ:
2570 return UNEQ_EXPR;
2571 case COMPCODE_UNLE:
2572 return UNLE_EXPR;
2573 case COMPCODE_UNGT:
2574 return UNGT_EXPR;
2575 case COMPCODE_LTGT:
2576 return LTGT_EXPR;
2577 case COMPCODE_UNGE:
2578 return UNGE_EXPR;
2579 default:
2580 gcc_unreachable ();
2581 }
2582 }
2583
2584 /* Return a tree for the comparison which is the combination of
2585 doing the AND or OR (depending on CODE) of the two operations LCODE
2586 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2587 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2588 if this makes the transformation invalid. */
2589
2590 tree
2591 combine_comparisons (location_t loc,
2592 enum tree_code code, enum tree_code lcode,
2593 enum tree_code rcode, tree truth_type,
2594 tree ll_arg, tree lr_arg)
2595 {
2596 bool honor_nans = HONOR_NANS (ll_arg);
2597 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2598 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2599 int compcode;
2600
2601 switch (code)
2602 {
2603 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2604 compcode = lcompcode & rcompcode;
2605 break;
2606
2607 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2608 compcode = lcompcode | rcompcode;
2609 break;
2610
2611 default:
2612 return NULL_TREE;
2613 }
2614
2615 if (!honor_nans)
2616 {
2617 /* Eliminate unordered comparisons, as well as LTGT and ORD
2618 which are not used unless the mode has NaNs. */
2619 compcode &= ~COMPCODE_UNORD;
2620 if (compcode == COMPCODE_LTGT)
2621 compcode = COMPCODE_NE;
2622 else if (compcode == COMPCODE_ORD)
2623 compcode = COMPCODE_TRUE;
2624 }
2625 else if (flag_trapping_math)
2626 {
2627 /* Check that the original operation and the optimized ones will trap
2628 under the same condition. */
2629 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2630 && (lcompcode != COMPCODE_EQ)
2631 && (lcompcode != COMPCODE_ORD);
2632 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2633 && (rcompcode != COMPCODE_EQ)
2634 && (rcompcode != COMPCODE_ORD);
2635 bool trap = (compcode & COMPCODE_UNORD) == 0
2636 && (compcode != COMPCODE_EQ)
2637 && (compcode != COMPCODE_ORD);
2638
2639 /* In a short-circuited boolean expression the LHS might be
2640 such that the RHS, if evaluated, will never trap. For
2641 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2642 if neither x nor y is NaN. (This is a mixed blessing: for
2643 example, the expression above will never trap, hence
2644 optimizing it to x < y would be invalid). */
2645 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2646 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2647 rtrap = false;
2648
2649 /* If the comparison was short-circuited, and only the RHS
2650 trapped, we may now generate a spurious trap. */
2651 if (rtrap && !ltrap
2652 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2653 return NULL_TREE;
2654
2655 /* If we changed the conditions that cause a trap, we lose. */
2656 if ((ltrap || rtrap) != trap)
2657 return NULL_TREE;
2658 }
2659
2660 if (compcode == COMPCODE_TRUE)
2661 return constant_boolean_node (true, truth_type);
2662 else if (compcode == COMPCODE_FALSE)
2663 return constant_boolean_node (false, truth_type);
2664 else
2665 {
2666 enum tree_code tcode;
2667
2668 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2669 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2670 }
2671 }
2672 \f
2673 /* Return nonzero if two operands (typically of the same tree node)
2674 are necessarily equal. If either argument has side-effects this
2675 function returns zero. FLAGS modifies behavior as follows:
2676
2677 If OEP_ONLY_CONST is set, only return nonzero for constants.
2678 This function tests whether the operands are indistinguishable;
2679 it does not test whether they are equal using C's == operation.
2680 The distinction is important for IEEE floating point, because
2681 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2682 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2683
2684 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2685 even though it may hold multiple values during a function.
2686 This is because a GCC tree node guarantees that nothing else is
2687 executed between the evaluation of its "operands" (which may often
2688 be evaluated in arbitrary order). Hence if the operands themselves
2689 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2690 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2691 unset means assuming isochronic (or instantaneous) tree equivalence.
2692 Unless comparing arbitrary expression trees, such as from different
2693 statements, this flag can usually be left unset.
2694
2695 If OEP_PURE_SAME is set, then pure functions with identical arguments
2696 are considered the same. It is used when the caller has other ways
2697 to ensure that global memory is unchanged in between. */
2698
2699 int
2700 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2701 {
2702 /* If either is ERROR_MARK, they aren't equal. */
2703 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2704 || TREE_TYPE (arg0) == error_mark_node
2705 || TREE_TYPE (arg1) == error_mark_node)
2706 return 0;
2707
2708 /* Similar, if either does not have a type (like a released SSA name),
2709 they aren't equal. */
2710 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2711 return 0;
2712
2713 /* Check equality of integer constants before bailing out due to
2714 precision differences. */
2715 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2716 return tree_int_cst_equal (arg0, arg1);
2717
2718 /* If both types don't have the same signedness, then we can't consider
2719 them equal. We must check this before the STRIP_NOPS calls
2720 because they may change the signedness of the arguments. As pointers
2721 strictly don't have a signedness, require either two pointers or
2722 two non-pointers as well. */
2723 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2724 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2725 return 0;
2726
2727 /* We cannot consider pointers to different address space equal. */
2728 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2729 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2730 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2731 return 0;
2732
2733 /* If both types don't have the same precision, then it is not safe
2734 to strip NOPs. */
2735 if (element_precision (TREE_TYPE (arg0))
2736 != element_precision (TREE_TYPE (arg1)))
2737 return 0;
2738
2739 STRIP_NOPS (arg0);
2740 STRIP_NOPS (arg1);
2741
2742 /* In case both args are comparisons but with different comparison
2743 code, try to swap the comparison operands of one arg to produce
2744 a match and compare that variant. */
2745 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2746 && COMPARISON_CLASS_P (arg0)
2747 && COMPARISON_CLASS_P (arg1))
2748 {
2749 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2750
2751 if (TREE_CODE (arg0) == swap_code)
2752 return operand_equal_p (TREE_OPERAND (arg0, 0),
2753 TREE_OPERAND (arg1, 1), flags)
2754 && operand_equal_p (TREE_OPERAND (arg0, 1),
2755 TREE_OPERAND (arg1, 0), flags);
2756 }
2757
2758 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2759 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2760 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2761 return 0;
2762
2763 /* This is needed for conversions and for COMPONENT_REF.
2764 Might as well play it safe and always test this. */
2765 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2766 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2767 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2768 return 0;
2769
2770 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2771 We don't care about side effects in that case because the SAVE_EXPR
2772 takes care of that for us. In all other cases, two expressions are
2773 equal if they have no side effects. If we have two identical
2774 expressions with side effects that should be treated the same due
2775 to the only side effects being identical SAVE_EXPR's, that will
2776 be detected in the recursive calls below.
2777 If we are taking an invariant address of two identical objects
2778 they are necessarily equal as well. */
2779 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2780 && (TREE_CODE (arg0) == SAVE_EXPR
2781 || (flags & OEP_CONSTANT_ADDRESS_OF)
2782 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2783 return 1;
2784
2785 /* Next handle constant cases, those for which we can return 1 even
2786 if ONLY_CONST is set. */
2787 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2788 switch (TREE_CODE (arg0))
2789 {
2790 case INTEGER_CST:
2791 return tree_int_cst_equal (arg0, arg1);
2792
2793 case FIXED_CST:
2794 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2795 TREE_FIXED_CST (arg1));
2796
2797 case REAL_CST:
2798 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2799 TREE_REAL_CST (arg1)))
2800 return 1;
2801
2802
2803 if (!HONOR_SIGNED_ZEROS (arg0))
2804 {
2805 /* If we do not distinguish between signed and unsigned zero,
2806 consider them equal. */
2807 if (real_zerop (arg0) && real_zerop (arg1))
2808 return 1;
2809 }
2810 return 0;
2811
2812 case VECTOR_CST:
2813 {
2814 unsigned i;
2815
2816 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2817 return 0;
2818
2819 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2820 {
2821 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2822 VECTOR_CST_ELT (arg1, i), flags))
2823 return 0;
2824 }
2825 return 1;
2826 }
2827
2828 case COMPLEX_CST:
2829 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2830 flags)
2831 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2832 flags));
2833
2834 case STRING_CST:
2835 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2836 && ! memcmp (TREE_STRING_POINTER (arg0),
2837 TREE_STRING_POINTER (arg1),
2838 TREE_STRING_LENGTH (arg0)));
2839
2840 case ADDR_EXPR:
2841 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2842 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2843 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2844 default:
2845 break;
2846 }
2847
2848 if (flags & OEP_ONLY_CONST)
2849 return 0;
2850
2851 /* Define macros to test an operand from arg0 and arg1 for equality and a
2852 variant that allows null and views null as being different from any
2853 non-null value. In the latter case, if either is null, the both
2854 must be; otherwise, do the normal comparison. */
2855 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2856 TREE_OPERAND (arg1, N), flags)
2857
2858 #define OP_SAME_WITH_NULL(N) \
2859 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2860 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2861
2862 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2863 {
2864 case tcc_unary:
2865 /* Two conversions are equal only if signedness and modes match. */
2866 switch (TREE_CODE (arg0))
2867 {
2868 CASE_CONVERT:
2869 case FIX_TRUNC_EXPR:
2870 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2871 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2872 return 0;
2873 break;
2874 default:
2875 break;
2876 }
2877
2878 return OP_SAME (0);
2879
2880
2881 case tcc_comparison:
2882 case tcc_binary:
2883 if (OP_SAME (0) && OP_SAME (1))
2884 return 1;
2885
2886 /* For commutative ops, allow the other order. */
2887 return (commutative_tree_code (TREE_CODE (arg0))
2888 && operand_equal_p (TREE_OPERAND (arg0, 0),
2889 TREE_OPERAND (arg1, 1), flags)
2890 && operand_equal_p (TREE_OPERAND (arg0, 1),
2891 TREE_OPERAND (arg1, 0), flags));
2892
2893 case tcc_reference:
2894 /* If either of the pointer (or reference) expressions we are
2895 dereferencing contain a side effect, these cannot be equal,
2896 but their addresses can be. */
2897 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2898 && (TREE_SIDE_EFFECTS (arg0)
2899 || TREE_SIDE_EFFECTS (arg1)))
2900 return 0;
2901
2902 switch (TREE_CODE (arg0))
2903 {
2904 case INDIRECT_REF:
2905 if (!(flags & OEP_ADDRESS_OF)
2906 && (TYPE_ALIGN (TREE_TYPE (arg0))
2907 != TYPE_ALIGN (TREE_TYPE (arg1))))
2908 return 0;
2909 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2910 return OP_SAME (0);
2911
2912 case REALPART_EXPR:
2913 case IMAGPART_EXPR:
2914 return OP_SAME (0);
2915
2916 case TARGET_MEM_REF:
2917 case MEM_REF:
2918 /* Require equal access sizes, and similar pointer types.
2919 We can have incomplete types for array references of
2920 variable-sized arrays from the Fortran frontend
2921 though. Also verify the types are compatible. */
2922 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2923 || (TYPE_SIZE (TREE_TYPE (arg0))
2924 && TYPE_SIZE (TREE_TYPE (arg1))
2925 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2926 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2927 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2928 && ((flags & OEP_ADDRESS_OF)
2929 || (alias_ptr_types_compatible_p
2930 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2931 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2932 && (MR_DEPENDENCE_CLIQUE (arg0)
2933 == MR_DEPENDENCE_CLIQUE (arg1))
2934 && (MR_DEPENDENCE_BASE (arg0)
2935 == MR_DEPENDENCE_BASE (arg1))
2936 && (TYPE_ALIGN (TREE_TYPE (arg0))
2937 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2938 return 0;
2939 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2940 return (OP_SAME (0) && OP_SAME (1)
2941 /* TARGET_MEM_REF require equal extra operands. */
2942 && (TREE_CODE (arg0) != TARGET_MEM_REF
2943 || (OP_SAME_WITH_NULL (2)
2944 && OP_SAME_WITH_NULL (3)
2945 && OP_SAME_WITH_NULL (4))));
2946
2947 case ARRAY_REF:
2948 case ARRAY_RANGE_REF:
2949 /* Operands 2 and 3 may be null.
2950 Compare the array index by value if it is constant first as we
2951 may have different types but same value here. */
2952 if (!OP_SAME (0))
2953 return 0;
2954 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2955 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2956 TREE_OPERAND (arg1, 1))
2957 || OP_SAME (1))
2958 && OP_SAME_WITH_NULL (2)
2959 && OP_SAME_WITH_NULL (3));
2960
2961 case COMPONENT_REF:
2962 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2963 may be NULL when we're called to compare MEM_EXPRs. */
2964 if (!OP_SAME_WITH_NULL (0)
2965 || !OP_SAME (1))
2966 return 0;
2967 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2968 return OP_SAME_WITH_NULL (2);
2969
2970 case BIT_FIELD_REF:
2971 if (!OP_SAME (0))
2972 return 0;
2973 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2974 return OP_SAME (1) && OP_SAME (2);
2975
2976 default:
2977 return 0;
2978 }
2979
2980 case tcc_expression:
2981 switch (TREE_CODE (arg0))
2982 {
2983 case ADDR_EXPR:
2984 return operand_equal_p (TREE_OPERAND (arg0, 0),
2985 TREE_OPERAND (arg1, 0),
2986 flags | OEP_ADDRESS_OF);
2987
2988 case TRUTH_NOT_EXPR:
2989 return OP_SAME (0);
2990
2991 case TRUTH_ANDIF_EXPR:
2992 case TRUTH_ORIF_EXPR:
2993 return OP_SAME (0) && OP_SAME (1);
2994
2995 case FMA_EXPR:
2996 case WIDEN_MULT_PLUS_EXPR:
2997 case WIDEN_MULT_MINUS_EXPR:
2998 if (!OP_SAME (2))
2999 return 0;
3000 /* The multiplcation operands are commutative. */
3001 /* FALLTHRU */
3002
3003 case TRUTH_AND_EXPR:
3004 case TRUTH_OR_EXPR:
3005 case TRUTH_XOR_EXPR:
3006 if (OP_SAME (0) && OP_SAME (1))
3007 return 1;
3008
3009 /* Otherwise take into account this is a commutative operation. */
3010 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3011 TREE_OPERAND (arg1, 1), flags)
3012 && operand_equal_p (TREE_OPERAND (arg0, 1),
3013 TREE_OPERAND (arg1, 0), flags));
3014
3015 case COND_EXPR:
3016 case VEC_COND_EXPR:
3017 case DOT_PROD_EXPR:
3018 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3019
3020 default:
3021 return 0;
3022 }
3023
3024 case tcc_vl_exp:
3025 switch (TREE_CODE (arg0))
3026 {
3027 case CALL_EXPR:
3028 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3029 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3030 /* If not both CALL_EXPRs are either internal or normal function
3031 functions, then they are not equal. */
3032 return 0;
3033 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3034 {
3035 /* If the CALL_EXPRs call different internal functions, then they
3036 are not equal. */
3037 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3038 return 0;
3039 }
3040 else
3041 {
3042 /* If the CALL_EXPRs call different functions, then they are not
3043 equal. */
3044 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3045 flags))
3046 return 0;
3047 }
3048
3049 {
3050 unsigned int cef = call_expr_flags (arg0);
3051 if (flags & OEP_PURE_SAME)
3052 cef &= ECF_CONST | ECF_PURE;
3053 else
3054 cef &= ECF_CONST;
3055 if (!cef)
3056 return 0;
3057 }
3058
3059 /* Now see if all the arguments are the same. */
3060 {
3061 const_call_expr_arg_iterator iter0, iter1;
3062 const_tree a0, a1;
3063 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3064 a1 = first_const_call_expr_arg (arg1, &iter1);
3065 a0 && a1;
3066 a0 = next_const_call_expr_arg (&iter0),
3067 a1 = next_const_call_expr_arg (&iter1))
3068 if (! operand_equal_p (a0, a1, flags))
3069 return 0;
3070
3071 /* If we get here and both argument lists are exhausted
3072 then the CALL_EXPRs are equal. */
3073 return ! (a0 || a1);
3074 }
3075 default:
3076 return 0;
3077 }
3078
3079 case tcc_declaration:
3080 /* Consider __builtin_sqrt equal to sqrt. */
3081 return (TREE_CODE (arg0) == FUNCTION_DECL
3082 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3083 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3084 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3085
3086 default:
3087 return 0;
3088 }
3089
3090 #undef OP_SAME
3091 #undef OP_SAME_WITH_NULL
3092 }
3093 \f
3094 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3095 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3096
3097 When in doubt, return 0. */
3098
3099 static int
3100 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3101 {
3102 int unsignedp1, unsignedpo;
3103 tree primarg0, primarg1, primother;
3104 unsigned int correct_width;
3105
3106 if (operand_equal_p (arg0, arg1, 0))
3107 return 1;
3108
3109 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3110 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3111 return 0;
3112
3113 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3114 and see if the inner values are the same. This removes any
3115 signedness comparison, which doesn't matter here. */
3116 primarg0 = arg0, primarg1 = arg1;
3117 STRIP_NOPS (primarg0);
3118 STRIP_NOPS (primarg1);
3119 if (operand_equal_p (primarg0, primarg1, 0))
3120 return 1;
3121
3122 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3123 actual comparison operand, ARG0.
3124
3125 First throw away any conversions to wider types
3126 already present in the operands. */
3127
3128 primarg1 = get_narrower (arg1, &unsignedp1);
3129 primother = get_narrower (other, &unsignedpo);
3130
3131 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3132 if (unsignedp1 == unsignedpo
3133 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3134 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3135 {
3136 tree type = TREE_TYPE (arg0);
3137
3138 /* Make sure shorter operand is extended the right way
3139 to match the longer operand. */
3140 primarg1 = fold_convert (signed_or_unsigned_type_for
3141 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3142
3143 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3144 return 1;
3145 }
3146
3147 return 0;
3148 }
3149 \f
3150 /* See if ARG is an expression that is either a comparison or is performing
3151 arithmetic on comparisons. The comparisons must only be comparing
3152 two different values, which will be stored in *CVAL1 and *CVAL2; if
3153 they are nonzero it means that some operands have already been found.
3154 No variables may be used anywhere else in the expression except in the
3155 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3156 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3157
3158 If this is true, return 1. Otherwise, return zero. */
3159
3160 static int
3161 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3162 {
3163 enum tree_code code = TREE_CODE (arg);
3164 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3165
3166 /* We can handle some of the tcc_expression cases here. */
3167 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3168 tclass = tcc_unary;
3169 else if (tclass == tcc_expression
3170 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3171 || code == COMPOUND_EXPR))
3172 tclass = tcc_binary;
3173
3174 else if (tclass == tcc_expression && code == SAVE_EXPR
3175 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3176 {
3177 /* If we've already found a CVAL1 or CVAL2, this expression is
3178 two complex to handle. */
3179 if (*cval1 || *cval2)
3180 return 0;
3181
3182 tclass = tcc_unary;
3183 *save_p = 1;
3184 }
3185
3186 switch (tclass)
3187 {
3188 case tcc_unary:
3189 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3190
3191 case tcc_binary:
3192 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3193 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3194 cval1, cval2, save_p));
3195
3196 case tcc_constant:
3197 return 1;
3198
3199 case tcc_expression:
3200 if (code == COND_EXPR)
3201 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3202 cval1, cval2, save_p)
3203 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3204 cval1, cval2, save_p)
3205 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3206 cval1, cval2, save_p));
3207 return 0;
3208
3209 case tcc_comparison:
3210 /* First see if we can handle the first operand, then the second. For
3211 the second operand, we know *CVAL1 can't be zero. It must be that
3212 one side of the comparison is each of the values; test for the
3213 case where this isn't true by failing if the two operands
3214 are the same. */
3215
3216 if (operand_equal_p (TREE_OPERAND (arg, 0),
3217 TREE_OPERAND (arg, 1), 0))
3218 return 0;
3219
3220 if (*cval1 == 0)
3221 *cval1 = TREE_OPERAND (arg, 0);
3222 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3223 ;
3224 else if (*cval2 == 0)
3225 *cval2 = TREE_OPERAND (arg, 0);
3226 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3227 ;
3228 else
3229 return 0;
3230
3231 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3232 ;
3233 else if (*cval2 == 0)
3234 *cval2 = TREE_OPERAND (arg, 1);
3235 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3236 ;
3237 else
3238 return 0;
3239
3240 return 1;
3241
3242 default:
3243 return 0;
3244 }
3245 }
3246 \f
3247 /* ARG is a tree that is known to contain just arithmetic operations and
3248 comparisons. Evaluate the operations in the tree substituting NEW0 for
3249 any occurrence of OLD0 as an operand of a comparison and likewise for
3250 NEW1 and OLD1. */
3251
3252 static tree
3253 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3254 tree old1, tree new1)
3255 {
3256 tree type = TREE_TYPE (arg);
3257 enum tree_code code = TREE_CODE (arg);
3258 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3259
3260 /* We can handle some of the tcc_expression cases here. */
3261 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3262 tclass = tcc_unary;
3263 else if (tclass == tcc_expression
3264 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3265 tclass = tcc_binary;
3266
3267 switch (tclass)
3268 {
3269 case tcc_unary:
3270 return fold_build1_loc (loc, code, type,
3271 eval_subst (loc, TREE_OPERAND (arg, 0),
3272 old0, new0, old1, new1));
3273
3274 case tcc_binary:
3275 return fold_build2_loc (loc, code, type,
3276 eval_subst (loc, TREE_OPERAND (arg, 0),
3277 old0, new0, old1, new1),
3278 eval_subst (loc, TREE_OPERAND (arg, 1),
3279 old0, new0, old1, new1));
3280
3281 case tcc_expression:
3282 switch (code)
3283 {
3284 case SAVE_EXPR:
3285 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3286 old1, new1);
3287
3288 case COMPOUND_EXPR:
3289 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3290 old1, new1);
3291
3292 case COND_EXPR:
3293 return fold_build3_loc (loc, code, type,
3294 eval_subst (loc, TREE_OPERAND (arg, 0),
3295 old0, new0, old1, new1),
3296 eval_subst (loc, TREE_OPERAND (arg, 1),
3297 old0, new0, old1, new1),
3298 eval_subst (loc, TREE_OPERAND (arg, 2),
3299 old0, new0, old1, new1));
3300 default:
3301 break;
3302 }
3303 /* Fall through - ??? */
3304
3305 case tcc_comparison:
3306 {
3307 tree arg0 = TREE_OPERAND (arg, 0);
3308 tree arg1 = TREE_OPERAND (arg, 1);
3309
3310 /* We need to check both for exact equality and tree equality. The
3311 former will be true if the operand has a side-effect. In that
3312 case, we know the operand occurred exactly once. */
3313
3314 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3315 arg0 = new0;
3316 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3317 arg0 = new1;
3318
3319 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3320 arg1 = new0;
3321 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3322 arg1 = new1;
3323
3324 return fold_build2_loc (loc, code, type, arg0, arg1);
3325 }
3326
3327 default:
3328 return arg;
3329 }
3330 }
3331 \f
3332 /* Return a tree for the case when the result of an expression is RESULT
3333 converted to TYPE and OMITTED was previously an operand of the expression
3334 but is now not needed (e.g., we folded OMITTED * 0).
3335
3336 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3337 the conversion of RESULT to TYPE. */
3338
3339 tree
3340 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3341 {
3342 tree t = fold_convert_loc (loc, type, result);
3343
3344 /* If the resulting operand is an empty statement, just return the omitted
3345 statement casted to void. */
3346 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3347 return build1_loc (loc, NOP_EXPR, void_type_node,
3348 fold_ignored_result (omitted));
3349
3350 if (TREE_SIDE_EFFECTS (omitted))
3351 return build2_loc (loc, COMPOUND_EXPR, type,
3352 fold_ignored_result (omitted), t);
3353
3354 return non_lvalue_loc (loc, t);
3355 }
3356
3357 /* Return a tree for the case when the result of an expression is RESULT
3358 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3359 of the expression but are now not needed.
3360
3361 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3362 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3363 evaluated before OMITTED2. Otherwise, if neither has side effects,
3364 just do the conversion of RESULT to TYPE. */
3365
3366 tree
3367 omit_two_operands_loc (location_t loc, tree type, tree result,
3368 tree omitted1, tree omitted2)
3369 {
3370 tree t = fold_convert_loc (loc, type, result);
3371
3372 if (TREE_SIDE_EFFECTS (omitted2))
3373 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3374 if (TREE_SIDE_EFFECTS (omitted1))
3375 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3376
3377 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3378 }
3379
3380 \f
3381 /* Return a simplified tree node for the truth-negation of ARG. This
3382 never alters ARG itself. We assume that ARG is an operation that
3383 returns a truth value (0 or 1).
3384
3385 FIXME: one would think we would fold the result, but it causes
3386 problems with the dominator optimizer. */
3387
3388 static tree
3389 fold_truth_not_expr (location_t loc, tree arg)
3390 {
3391 tree type = TREE_TYPE (arg);
3392 enum tree_code code = TREE_CODE (arg);
3393 location_t loc1, loc2;
3394
3395 /* If this is a comparison, we can simply invert it, except for
3396 floating-point non-equality comparisons, in which case we just
3397 enclose a TRUTH_NOT_EXPR around what we have. */
3398
3399 if (TREE_CODE_CLASS (code) == tcc_comparison)
3400 {
3401 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3402 if (FLOAT_TYPE_P (op_type)
3403 && flag_trapping_math
3404 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3405 && code != NE_EXPR && code != EQ_EXPR)
3406 return NULL_TREE;
3407
3408 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3409 if (code == ERROR_MARK)
3410 return NULL_TREE;
3411
3412 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3413 TREE_OPERAND (arg, 1));
3414 }
3415
3416 switch (code)
3417 {
3418 case INTEGER_CST:
3419 return constant_boolean_node (integer_zerop (arg), type);
3420
3421 case TRUTH_AND_EXPR:
3422 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3423 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3424 return build2_loc (loc, TRUTH_OR_EXPR, type,
3425 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3426 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3427
3428 case TRUTH_OR_EXPR:
3429 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3430 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3431 return build2_loc (loc, TRUTH_AND_EXPR, type,
3432 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3433 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3434
3435 case TRUTH_XOR_EXPR:
3436 /* Here we can invert either operand. We invert the first operand
3437 unless the second operand is a TRUTH_NOT_EXPR in which case our
3438 result is the XOR of the first operand with the inside of the
3439 negation of the second operand. */
3440
3441 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3442 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3443 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3444 else
3445 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3446 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3447 TREE_OPERAND (arg, 1));
3448
3449 case TRUTH_ANDIF_EXPR:
3450 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3451 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3452 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3453 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3454 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3455
3456 case TRUTH_ORIF_EXPR:
3457 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3458 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3459 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3460 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3461 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3462
3463 case TRUTH_NOT_EXPR:
3464 return TREE_OPERAND (arg, 0);
3465
3466 case COND_EXPR:
3467 {
3468 tree arg1 = TREE_OPERAND (arg, 1);
3469 tree arg2 = TREE_OPERAND (arg, 2);
3470
3471 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3472 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3473
3474 /* A COND_EXPR may have a throw as one operand, which
3475 then has void type. Just leave void operands
3476 as they are. */
3477 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3478 VOID_TYPE_P (TREE_TYPE (arg1))
3479 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3480 VOID_TYPE_P (TREE_TYPE (arg2))
3481 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3482 }
3483
3484 case COMPOUND_EXPR:
3485 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3486 return build2_loc (loc, COMPOUND_EXPR, type,
3487 TREE_OPERAND (arg, 0),
3488 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3489
3490 case NON_LVALUE_EXPR:
3491 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3492 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3493
3494 CASE_CONVERT:
3495 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3496 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3497
3498 /* ... fall through ... */
3499
3500 case FLOAT_EXPR:
3501 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3502 return build1_loc (loc, TREE_CODE (arg), type,
3503 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3504
3505 case BIT_AND_EXPR:
3506 if (!integer_onep (TREE_OPERAND (arg, 1)))
3507 return NULL_TREE;
3508 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3509
3510 case SAVE_EXPR:
3511 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3512
3513 case CLEANUP_POINT_EXPR:
3514 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3515 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3516 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3517
3518 default:
3519 return NULL_TREE;
3520 }
3521 }
3522
3523 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3524 assume that ARG is an operation that returns a truth value (0 or 1
3525 for scalars, 0 or -1 for vectors). Return the folded expression if
3526 folding is successful. Otherwise, return NULL_TREE. */
3527
3528 static tree
3529 fold_invert_truthvalue (location_t loc, tree arg)
3530 {
3531 tree type = TREE_TYPE (arg);
3532 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3533 ? BIT_NOT_EXPR
3534 : TRUTH_NOT_EXPR,
3535 type, arg);
3536 }
3537
3538 /* Return a simplified tree node for the truth-negation of ARG. This
3539 never alters ARG itself. We assume that ARG is an operation that
3540 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3541
3542 tree
3543 invert_truthvalue_loc (location_t loc, tree arg)
3544 {
3545 if (TREE_CODE (arg) == ERROR_MARK)
3546 return arg;
3547
3548 tree type = TREE_TYPE (arg);
3549 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3550 ? BIT_NOT_EXPR
3551 : TRUTH_NOT_EXPR,
3552 type, arg);
3553 }
3554
3555 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3556 operands are another bit-wise operation with a common input. If so,
3557 distribute the bit operations to save an operation and possibly two if
3558 constants are involved. For example, convert
3559 (A | B) & (A | C) into A | (B & C)
3560 Further simplification will occur if B and C are constants.
3561
3562 If this optimization cannot be done, 0 will be returned. */
3563
3564 static tree
3565 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3566 tree arg0, tree arg1)
3567 {
3568 tree common;
3569 tree left, right;
3570
3571 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3572 || TREE_CODE (arg0) == code
3573 || (TREE_CODE (arg0) != BIT_AND_EXPR
3574 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3575 return 0;
3576
3577 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3578 {
3579 common = TREE_OPERAND (arg0, 0);
3580 left = TREE_OPERAND (arg0, 1);
3581 right = TREE_OPERAND (arg1, 1);
3582 }
3583 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3584 {
3585 common = TREE_OPERAND (arg0, 0);
3586 left = TREE_OPERAND (arg0, 1);
3587 right = TREE_OPERAND (arg1, 0);
3588 }
3589 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3590 {
3591 common = TREE_OPERAND (arg0, 1);
3592 left = TREE_OPERAND (arg0, 0);
3593 right = TREE_OPERAND (arg1, 1);
3594 }
3595 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3596 {
3597 common = TREE_OPERAND (arg0, 1);
3598 left = TREE_OPERAND (arg0, 0);
3599 right = TREE_OPERAND (arg1, 0);
3600 }
3601 else
3602 return 0;
3603
3604 common = fold_convert_loc (loc, type, common);
3605 left = fold_convert_loc (loc, type, left);
3606 right = fold_convert_loc (loc, type, right);
3607 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3608 fold_build2_loc (loc, code, type, left, right));
3609 }
3610
3611 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3612 with code CODE. This optimization is unsafe. */
3613 static tree
3614 distribute_real_division (location_t loc, enum tree_code code, tree type,
3615 tree arg0, tree arg1)
3616 {
3617 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3618 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3619
3620 /* (A / C) +- (B / C) -> (A +- B) / C. */
3621 if (mul0 == mul1
3622 && operand_equal_p (TREE_OPERAND (arg0, 1),
3623 TREE_OPERAND (arg1, 1), 0))
3624 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3625 fold_build2_loc (loc, code, type,
3626 TREE_OPERAND (arg0, 0),
3627 TREE_OPERAND (arg1, 0)),
3628 TREE_OPERAND (arg0, 1));
3629
3630 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3631 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3632 TREE_OPERAND (arg1, 0), 0)
3633 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3634 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3635 {
3636 REAL_VALUE_TYPE r0, r1;
3637 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3638 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3639 if (!mul0)
3640 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3641 if (!mul1)
3642 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3643 real_arithmetic (&r0, code, &r0, &r1);
3644 return fold_build2_loc (loc, MULT_EXPR, type,
3645 TREE_OPERAND (arg0, 0),
3646 build_real (type, r0));
3647 }
3648
3649 return NULL_TREE;
3650 }
3651 \f
3652 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3653 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3654
3655 static tree
3656 make_bit_field_ref (location_t loc, tree inner, tree type,
3657 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3658 {
3659 tree result, bftype;
3660
3661 if (bitpos == 0)
3662 {
3663 tree size = TYPE_SIZE (TREE_TYPE (inner));
3664 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3665 || POINTER_TYPE_P (TREE_TYPE (inner)))
3666 && tree_fits_shwi_p (size)
3667 && tree_to_shwi (size) == bitsize)
3668 return fold_convert_loc (loc, type, inner);
3669 }
3670
3671 bftype = type;
3672 if (TYPE_PRECISION (bftype) != bitsize
3673 || TYPE_UNSIGNED (bftype) == !unsignedp)
3674 bftype = build_nonstandard_integer_type (bitsize, 0);
3675
3676 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3677 size_int (bitsize), bitsize_int (bitpos));
3678
3679 if (bftype != type)
3680 result = fold_convert_loc (loc, type, result);
3681
3682 return result;
3683 }
3684
3685 /* Optimize a bit-field compare.
3686
3687 There are two cases: First is a compare against a constant and the
3688 second is a comparison of two items where the fields are at the same
3689 bit position relative to the start of a chunk (byte, halfword, word)
3690 large enough to contain it. In these cases we can avoid the shift
3691 implicit in bitfield extractions.
3692
3693 For constants, we emit a compare of the shifted constant with the
3694 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3695 compared. For two fields at the same position, we do the ANDs with the
3696 similar mask and compare the result of the ANDs.
3697
3698 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3699 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3700 are the left and right operands of the comparison, respectively.
3701
3702 If the optimization described above can be done, we return the resulting
3703 tree. Otherwise we return zero. */
3704
3705 static tree
3706 optimize_bit_field_compare (location_t loc, enum tree_code code,
3707 tree compare_type, tree lhs, tree rhs)
3708 {
3709 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3710 tree type = TREE_TYPE (lhs);
3711 tree unsigned_type;
3712 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3713 machine_mode lmode, rmode, nmode;
3714 int lunsignedp, runsignedp;
3715 int lvolatilep = 0, rvolatilep = 0;
3716 tree linner, rinner = NULL_TREE;
3717 tree mask;
3718 tree offset;
3719
3720 /* Get all the information about the extractions being done. If the bit size
3721 if the same as the size of the underlying object, we aren't doing an
3722 extraction at all and so can do nothing. We also don't want to
3723 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3724 then will no longer be able to replace it. */
3725 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3726 &lunsignedp, &lvolatilep, false);
3727 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3728 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3729 return 0;
3730
3731 if (!const_p)
3732 {
3733 /* If this is not a constant, we can only do something if bit positions,
3734 sizes, and signedness are the same. */
3735 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3736 &runsignedp, &rvolatilep, false);
3737
3738 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3739 || lunsignedp != runsignedp || offset != 0
3740 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3741 return 0;
3742 }
3743
3744 /* See if we can find a mode to refer to this field. We should be able to,
3745 but fail if we can't. */
3746 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3747 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3748 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3749 TYPE_ALIGN (TREE_TYPE (rinner))),
3750 word_mode, false);
3751 if (nmode == VOIDmode)
3752 return 0;
3753
3754 /* Set signed and unsigned types of the precision of this mode for the
3755 shifts below. */
3756 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3757
3758 /* Compute the bit position and size for the new reference and our offset
3759 within it. If the new reference is the same size as the original, we
3760 won't optimize anything, so return zero. */
3761 nbitsize = GET_MODE_BITSIZE (nmode);
3762 nbitpos = lbitpos & ~ (nbitsize - 1);
3763 lbitpos -= nbitpos;
3764 if (nbitsize == lbitsize)
3765 return 0;
3766
3767 if (BYTES_BIG_ENDIAN)
3768 lbitpos = nbitsize - lbitsize - lbitpos;
3769
3770 /* Make the mask to be used against the extracted field. */
3771 mask = build_int_cst_type (unsigned_type, -1);
3772 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3773 mask = const_binop (RSHIFT_EXPR, mask,
3774 size_int (nbitsize - lbitsize - lbitpos));
3775
3776 if (! const_p)
3777 /* If not comparing with constant, just rework the comparison
3778 and return. */
3779 return fold_build2_loc (loc, code, compare_type,
3780 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3781 make_bit_field_ref (loc, linner,
3782 unsigned_type,
3783 nbitsize, nbitpos,
3784 1),
3785 mask),
3786 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3787 make_bit_field_ref (loc, rinner,
3788 unsigned_type,
3789 nbitsize, nbitpos,
3790 1),
3791 mask));
3792
3793 /* Otherwise, we are handling the constant case. See if the constant is too
3794 big for the field. Warn and return a tree of for 0 (false) if so. We do
3795 this not only for its own sake, but to avoid having to test for this
3796 error case below. If we didn't, we might generate wrong code.
3797
3798 For unsigned fields, the constant shifted right by the field length should
3799 be all zero. For signed fields, the high-order bits should agree with
3800 the sign bit. */
3801
3802 if (lunsignedp)
3803 {
3804 if (wi::lrshift (rhs, lbitsize) != 0)
3805 {
3806 warning (0, "comparison is always %d due to width of bit-field",
3807 code == NE_EXPR);
3808 return constant_boolean_node (code == NE_EXPR, compare_type);
3809 }
3810 }
3811 else
3812 {
3813 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3814 if (tem != 0 && tem != -1)
3815 {
3816 warning (0, "comparison is always %d due to width of bit-field",
3817 code == NE_EXPR);
3818 return constant_boolean_node (code == NE_EXPR, compare_type);
3819 }
3820 }
3821
3822 /* Single-bit compares should always be against zero. */
3823 if (lbitsize == 1 && ! integer_zerop (rhs))
3824 {
3825 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3826 rhs = build_int_cst (type, 0);
3827 }
3828
3829 /* Make a new bitfield reference, shift the constant over the
3830 appropriate number of bits and mask it with the computed mask
3831 (in case this was a signed field). If we changed it, make a new one. */
3832 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3833
3834 rhs = const_binop (BIT_AND_EXPR,
3835 const_binop (LSHIFT_EXPR,
3836 fold_convert_loc (loc, unsigned_type, rhs),
3837 size_int (lbitpos)),
3838 mask);
3839
3840 lhs = build2_loc (loc, code, compare_type,
3841 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3842 return lhs;
3843 }
3844 \f
3845 /* Subroutine for fold_truth_andor_1: decode a field reference.
3846
3847 If EXP is a comparison reference, we return the innermost reference.
3848
3849 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3850 set to the starting bit number.
3851
3852 If the innermost field can be completely contained in a mode-sized
3853 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3854
3855 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3856 otherwise it is not changed.
3857
3858 *PUNSIGNEDP is set to the signedness of the field.
3859
3860 *PMASK is set to the mask used. This is either contained in a
3861 BIT_AND_EXPR or derived from the width of the field.
3862
3863 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3864
3865 Return 0 if this is not a component reference or is one that we can't
3866 do anything with. */
3867
3868 static tree
3869 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3870 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3871 int *punsignedp, int *pvolatilep,
3872 tree *pmask, tree *pand_mask)
3873 {
3874 tree outer_type = 0;
3875 tree and_mask = 0;
3876 tree mask, inner, offset;
3877 tree unsigned_type;
3878 unsigned int precision;
3879
3880 /* All the optimizations using this function assume integer fields.
3881 There are problems with FP fields since the type_for_size call
3882 below can fail for, e.g., XFmode. */
3883 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3884 return 0;
3885
3886 /* We are interested in the bare arrangement of bits, so strip everything
3887 that doesn't affect the machine mode. However, record the type of the
3888 outermost expression if it may matter below. */
3889 if (CONVERT_EXPR_P (exp)
3890 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3891 outer_type = TREE_TYPE (exp);
3892 STRIP_NOPS (exp);
3893
3894 if (TREE_CODE (exp) == BIT_AND_EXPR)
3895 {
3896 and_mask = TREE_OPERAND (exp, 1);
3897 exp = TREE_OPERAND (exp, 0);
3898 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3899 if (TREE_CODE (and_mask) != INTEGER_CST)
3900 return 0;
3901 }
3902
3903 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3904 punsignedp, pvolatilep, false);
3905 if ((inner == exp && and_mask == 0)
3906 || *pbitsize < 0 || offset != 0
3907 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3908 return 0;
3909
3910 /* If the number of bits in the reference is the same as the bitsize of
3911 the outer type, then the outer type gives the signedness. Otherwise
3912 (in case of a small bitfield) the signedness is unchanged. */
3913 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3914 *punsignedp = TYPE_UNSIGNED (outer_type);
3915
3916 /* Compute the mask to access the bitfield. */
3917 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3918 precision = TYPE_PRECISION (unsigned_type);
3919
3920 mask = build_int_cst_type (unsigned_type, -1);
3921
3922 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3923 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3924
3925 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3926 if (and_mask != 0)
3927 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3928 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3929
3930 *pmask = mask;
3931 *pand_mask = and_mask;
3932 return inner;
3933 }
3934
3935 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3936 bit positions and MASK is SIGNED. */
3937
3938 static int
3939 all_ones_mask_p (const_tree mask, unsigned int size)
3940 {
3941 tree type = TREE_TYPE (mask);
3942 unsigned int precision = TYPE_PRECISION (type);
3943
3944 /* If this function returns true when the type of the mask is
3945 UNSIGNED, then there will be errors. In particular see
3946 gcc.c-torture/execute/990326-1.c. There does not appear to be
3947 any documentation paper trail as to why this is so. But the pre
3948 wide-int worked with that restriction and it has been preserved
3949 here. */
3950 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3951 return false;
3952
3953 return wi::mask (size, false, precision) == mask;
3954 }
3955
3956 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3957 represents the sign bit of EXP's type. If EXP represents a sign
3958 or zero extension, also test VAL against the unextended type.
3959 The return value is the (sub)expression whose sign bit is VAL,
3960 or NULL_TREE otherwise. */
3961
3962 tree
3963 sign_bit_p (tree exp, const_tree val)
3964 {
3965 int width;
3966 tree t;
3967
3968 /* Tree EXP must have an integral type. */
3969 t = TREE_TYPE (exp);
3970 if (! INTEGRAL_TYPE_P (t))
3971 return NULL_TREE;
3972
3973 /* Tree VAL must be an integer constant. */
3974 if (TREE_CODE (val) != INTEGER_CST
3975 || TREE_OVERFLOW (val))
3976 return NULL_TREE;
3977
3978 width = TYPE_PRECISION (t);
3979 if (wi::only_sign_bit_p (val, width))
3980 return exp;
3981
3982 /* Handle extension from a narrower type. */
3983 if (TREE_CODE (exp) == NOP_EXPR
3984 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3985 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3986
3987 return NULL_TREE;
3988 }
3989
3990 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3991 to be evaluated unconditionally. */
3992
3993 static int
3994 simple_operand_p (const_tree exp)
3995 {
3996 /* Strip any conversions that don't change the machine mode. */
3997 STRIP_NOPS (exp);
3998
3999 return (CONSTANT_CLASS_P (exp)
4000 || TREE_CODE (exp) == SSA_NAME
4001 || (DECL_P (exp)
4002 && ! TREE_ADDRESSABLE (exp)
4003 && ! TREE_THIS_VOLATILE (exp)
4004 && ! DECL_NONLOCAL (exp)
4005 /* Don't regard global variables as simple. They may be
4006 allocated in ways unknown to the compiler (shared memory,
4007 #pragma weak, etc). */
4008 && ! TREE_PUBLIC (exp)
4009 && ! DECL_EXTERNAL (exp)
4010 /* Weakrefs are not safe to be read, since they can be NULL.
4011 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4012 have DECL_WEAK flag set. */
4013 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4014 /* Loading a static variable is unduly expensive, but global
4015 registers aren't expensive. */
4016 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4017 }
4018
4019 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4020 to be evaluated unconditionally.
4021 I addition to simple_operand_p, we assume that comparisons, conversions,
4022 and logic-not operations are simple, if their operands are simple, too. */
4023
4024 static bool
4025 simple_operand_p_2 (tree exp)
4026 {
4027 enum tree_code code;
4028
4029 if (TREE_SIDE_EFFECTS (exp)
4030 || tree_could_trap_p (exp))
4031 return false;
4032
4033 while (CONVERT_EXPR_P (exp))
4034 exp = TREE_OPERAND (exp, 0);
4035
4036 code = TREE_CODE (exp);
4037
4038 if (TREE_CODE_CLASS (code) == tcc_comparison)
4039 return (simple_operand_p (TREE_OPERAND (exp, 0))
4040 && simple_operand_p (TREE_OPERAND (exp, 1)));
4041
4042 if (code == TRUTH_NOT_EXPR)
4043 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4044
4045 return simple_operand_p (exp);
4046 }
4047
4048 \f
4049 /* The following functions are subroutines to fold_range_test and allow it to
4050 try to change a logical combination of comparisons into a range test.
4051
4052 For example, both
4053 X == 2 || X == 3 || X == 4 || X == 5
4054 and
4055 X >= 2 && X <= 5
4056 are converted to
4057 (unsigned) (X - 2) <= 3
4058
4059 We describe each set of comparisons as being either inside or outside
4060 a range, using a variable named like IN_P, and then describe the
4061 range with a lower and upper bound. If one of the bounds is omitted,
4062 it represents either the highest or lowest value of the type.
4063
4064 In the comments below, we represent a range by two numbers in brackets
4065 preceded by a "+" to designate being inside that range, or a "-" to
4066 designate being outside that range, so the condition can be inverted by
4067 flipping the prefix. An omitted bound is represented by a "-". For
4068 example, "- [-, 10]" means being outside the range starting at the lowest
4069 possible value and ending at 10, in other words, being greater than 10.
4070 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4071 always false.
4072
4073 We set up things so that the missing bounds are handled in a consistent
4074 manner so neither a missing bound nor "true" and "false" need to be
4075 handled using a special case. */
4076
4077 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4078 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4079 and UPPER1_P are nonzero if the respective argument is an upper bound
4080 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4081 must be specified for a comparison. ARG1 will be converted to ARG0's
4082 type if both are specified. */
4083
4084 static tree
4085 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4086 tree arg1, int upper1_p)
4087 {
4088 tree tem;
4089 int result;
4090 int sgn0, sgn1;
4091
4092 /* If neither arg represents infinity, do the normal operation.
4093 Else, if not a comparison, return infinity. Else handle the special
4094 comparison rules. Note that most of the cases below won't occur, but
4095 are handled for consistency. */
4096
4097 if (arg0 != 0 && arg1 != 0)
4098 {
4099 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4100 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4101 STRIP_NOPS (tem);
4102 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4103 }
4104
4105 if (TREE_CODE_CLASS (code) != tcc_comparison)
4106 return 0;
4107
4108 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4109 for neither. In real maths, we cannot assume open ended ranges are
4110 the same. But, this is computer arithmetic, where numbers are finite.
4111 We can therefore make the transformation of any unbounded range with
4112 the value Z, Z being greater than any representable number. This permits
4113 us to treat unbounded ranges as equal. */
4114 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4115 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4116 switch (code)
4117 {
4118 case EQ_EXPR:
4119 result = sgn0 == sgn1;
4120 break;
4121 case NE_EXPR:
4122 result = sgn0 != sgn1;
4123 break;
4124 case LT_EXPR:
4125 result = sgn0 < sgn1;
4126 break;
4127 case LE_EXPR:
4128 result = sgn0 <= sgn1;
4129 break;
4130 case GT_EXPR:
4131 result = sgn0 > sgn1;
4132 break;
4133 case GE_EXPR:
4134 result = sgn0 >= sgn1;
4135 break;
4136 default:
4137 gcc_unreachable ();
4138 }
4139
4140 return constant_boolean_node (result, type);
4141 }
4142 \f
4143 /* Helper routine for make_range. Perform one step for it, return
4144 new expression if the loop should continue or NULL_TREE if it should
4145 stop. */
4146
4147 tree
4148 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4149 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4150 bool *strict_overflow_p)
4151 {
4152 tree arg0_type = TREE_TYPE (arg0);
4153 tree n_low, n_high, low = *p_low, high = *p_high;
4154 int in_p = *p_in_p, n_in_p;
4155
4156 switch (code)
4157 {
4158 case TRUTH_NOT_EXPR:
4159 /* We can only do something if the range is testing for zero. */
4160 if (low == NULL_TREE || high == NULL_TREE
4161 || ! integer_zerop (low) || ! integer_zerop (high))
4162 return NULL_TREE;
4163 *p_in_p = ! in_p;
4164 return arg0;
4165
4166 case EQ_EXPR: case NE_EXPR:
4167 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4168 /* We can only do something if the range is testing for zero
4169 and if the second operand is an integer constant. Note that
4170 saying something is "in" the range we make is done by
4171 complementing IN_P since it will set in the initial case of
4172 being not equal to zero; "out" is leaving it alone. */
4173 if (low == NULL_TREE || high == NULL_TREE
4174 || ! integer_zerop (low) || ! integer_zerop (high)
4175 || TREE_CODE (arg1) != INTEGER_CST)
4176 return NULL_TREE;
4177
4178 switch (code)
4179 {
4180 case NE_EXPR: /* - [c, c] */
4181 low = high = arg1;
4182 break;
4183 case EQ_EXPR: /* + [c, c] */
4184 in_p = ! in_p, low = high = arg1;
4185 break;
4186 case GT_EXPR: /* - [-, c] */
4187 low = 0, high = arg1;
4188 break;
4189 case GE_EXPR: /* + [c, -] */
4190 in_p = ! in_p, low = arg1, high = 0;
4191 break;
4192 case LT_EXPR: /* - [c, -] */
4193 low = arg1, high = 0;
4194 break;
4195 case LE_EXPR: /* + [-, c] */
4196 in_p = ! in_p, low = 0, high = arg1;
4197 break;
4198 default:
4199 gcc_unreachable ();
4200 }
4201
4202 /* If this is an unsigned comparison, we also know that EXP is
4203 greater than or equal to zero. We base the range tests we make
4204 on that fact, so we record it here so we can parse existing
4205 range tests. We test arg0_type since often the return type
4206 of, e.g. EQ_EXPR, is boolean. */
4207 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4208 {
4209 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4210 in_p, low, high, 1,
4211 build_int_cst (arg0_type, 0),
4212 NULL_TREE))
4213 return NULL_TREE;
4214
4215 in_p = n_in_p, low = n_low, high = n_high;
4216
4217 /* If the high bound is missing, but we have a nonzero low
4218 bound, reverse the range so it goes from zero to the low bound
4219 minus 1. */
4220 if (high == 0 && low && ! integer_zerop (low))
4221 {
4222 in_p = ! in_p;
4223 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4224 build_int_cst (TREE_TYPE (low), 1), 0);
4225 low = build_int_cst (arg0_type, 0);
4226 }
4227 }
4228
4229 *p_low = low;
4230 *p_high = high;
4231 *p_in_p = in_p;
4232 return arg0;
4233
4234 case NEGATE_EXPR:
4235 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4236 low and high are non-NULL, then normalize will DTRT. */
4237 if (!TYPE_UNSIGNED (arg0_type)
4238 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4239 {
4240 if (low == NULL_TREE)
4241 low = TYPE_MIN_VALUE (arg0_type);
4242 if (high == NULL_TREE)
4243 high = TYPE_MAX_VALUE (arg0_type);
4244 }
4245
4246 /* (-x) IN [a,b] -> x in [-b, -a] */
4247 n_low = range_binop (MINUS_EXPR, exp_type,
4248 build_int_cst (exp_type, 0),
4249 0, high, 1);
4250 n_high = range_binop (MINUS_EXPR, exp_type,
4251 build_int_cst (exp_type, 0),
4252 0, low, 0);
4253 if (n_high != 0 && TREE_OVERFLOW (n_high))
4254 return NULL_TREE;
4255 goto normalize;
4256
4257 case BIT_NOT_EXPR:
4258 /* ~ X -> -X - 1 */
4259 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4260 build_int_cst (exp_type, 1));
4261
4262 case PLUS_EXPR:
4263 case MINUS_EXPR:
4264 if (TREE_CODE (arg1) != INTEGER_CST)
4265 return NULL_TREE;
4266
4267 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4268 move a constant to the other side. */
4269 if (!TYPE_UNSIGNED (arg0_type)
4270 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4271 return NULL_TREE;
4272
4273 /* If EXP is signed, any overflow in the computation is undefined,
4274 so we don't worry about it so long as our computations on
4275 the bounds don't overflow. For unsigned, overflow is defined
4276 and this is exactly the right thing. */
4277 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4278 arg0_type, low, 0, arg1, 0);
4279 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4280 arg0_type, high, 1, arg1, 0);
4281 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4282 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4283 return NULL_TREE;
4284
4285 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4286 *strict_overflow_p = true;
4287
4288 normalize:
4289 /* Check for an unsigned range which has wrapped around the maximum
4290 value thus making n_high < n_low, and normalize it. */
4291 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4292 {
4293 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4294 build_int_cst (TREE_TYPE (n_high), 1), 0);
4295 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4296 build_int_cst (TREE_TYPE (n_low), 1), 0);
4297
4298 /* If the range is of the form +/- [ x+1, x ], we won't
4299 be able to normalize it. But then, it represents the
4300 whole range or the empty set, so make it
4301 +/- [ -, - ]. */
4302 if (tree_int_cst_equal (n_low, low)
4303 && tree_int_cst_equal (n_high, high))
4304 low = high = 0;
4305 else
4306 in_p = ! in_p;
4307 }
4308 else
4309 low = n_low, high = n_high;
4310
4311 *p_low = low;
4312 *p_high = high;
4313 *p_in_p = in_p;
4314 return arg0;
4315
4316 CASE_CONVERT:
4317 case NON_LVALUE_EXPR:
4318 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4319 return NULL_TREE;
4320
4321 if (! INTEGRAL_TYPE_P (arg0_type)
4322 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4323 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4324 return NULL_TREE;
4325
4326 n_low = low, n_high = high;
4327
4328 if (n_low != 0)
4329 n_low = fold_convert_loc (loc, arg0_type, n_low);
4330
4331 if (n_high != 0)
4332 n_high = fold_convert_loc (loc, arg0_type, n_high);
4333
4334 /* If we're converting arg0 from an unsigned type, to exp,
4335 a signed type, we will be doing the comparison as unsigned.
4336 The tests above have already verified that LOW and HIGH
4337 are both positive.
4338
4339 So we have to ensure that we will handle large unsigned
4340 values the same way that the current signed bounds treat
4341 negative values. */
4342
4343 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4344 {
4345 tree high_positive;
4346 tree equiv_type;
4347 /* For fixed-point modes, we need to pass the saturating flag
4348 as the 2nd parameter. */
4349 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4350 equiv_type
4351 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4352 TYPE_SATURATING (arg0_type));
4353 else
4354 equiv_type
4355 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4356
4357 /* A range without an upper bound is, naturally, unbounded.
4358 Since convert would have cropped a very large value, use
4359 the max value for the destination type. */
4360 high_positive
4361 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4362 : TYPE_MAX_VALUE (arg0_type);
4363
4364 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4365 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4366 fold_convert_loc (loc, arg0_type,
4367 high_positive),
4368 build_int_cst (arg0_type, 1));
4369
4370 /* If the low bound is specified, "and" the range with the
4371 range for which the original unsigned value will be
4372 positive. */
4373 if (low != 0)
4374 {
4375 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4376 1, fold_convert_loc (loc, arg0_type,
4377 integer_zero_node),
4378 high_positive))
4379 return NULL_TREE;
4380
4381 in_p = (n_in_p == in_p);
4382 }
4383 else
4384 {
4385 /* Otherwise, "or" the range with the range of the input
4386 that will be interpreted as negative. */
4387 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4388 1, fold_convert_loc (loc, arg0_type,
4389 integer_zero_node),
4390 high_positive))
4391 return NULL_TREE;
4392
4393 in_p = (in_p != n_in_p);
4394 }
4395 }
4396
4397 *p_low = n_low;
4398 *p_high = n_high;
4399 *p_in_p = in_p;
4400 return arg0;
4401
4402 default:
4403 return NULL_TREE;
4404 }
4405 }
4406
4407 /* Given EXP, a logical expression, set the range it is testing into
4408 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4409 actually being tested. *PLOW and *PHIGH will be made of the same
4410 type as the returned expression. If EXP is not a comparison, we
4411 will most likely not be returning a useful value and range. Set
4412 *STRICT_OVERFLOW_P to true if the return value is only valid
4413 because signed overflow is undefined; otherwise, do not change
4414 *STRICT_OVERFLOW_P. */
4415
4416 tree
4417 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4418 bool *strict_overflow_p)
4419 {
4420 enum tree_code code;
4421 tree arg0, arg1 = NULL_TREE;
4422 tree exp_type, nexp;
4423 int in_p;
4424 tree low, high;
4425 location_t loc = EXPR_LOCATION (exp);
4426
4427 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4428 and see if we can refine the range. Some of the cases below may not
4429 happen, but it doesn't seem worth worrying about this. We "continue"
4430 the outer loop when we've changed something; otherwise we "break"
4431 the switch, which will "break" the while. */
4432
4433 in_p = 0;
4434 low = high = build_int_cst (TREE_TYPE (exp), 0);
4435
4436 while (1)
4437 {
4438 code = TREE_CODE (exp);
4439 exp_type = TREE_TYPE (exp);
4440 arg0 = NULL_TREE;
4441
4442 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4443 {
4444 if (TREE_OPERAND_LENGTH (exp) > 0)
4445 arg0 = TREE_OPERAND (exp, 0);
4446 if (TREE_CODE_CLASS (code) == tcc_binary
4447 || TREE_CODE_CLASS (code) == tcc_comparison
4448 || (TREE_CODE_CLASS (code) == tcc_expression
4449 && TREE_OPERAND_LENGTH (exp) > 1))
4450 arg1 = TREE_OPERAND (exp, 1);
4451 }
4452 if (arg0 == NULL_TREE)
4453 break;
4454
4455 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4456 &high, &in_p, strict_overflow_p);
4457 if (nexp == NULL_TREE)
4458 break;
4459 exp = nexp;
4460 }
4461
4462 /* If EXP is a constant, we can evaluate whether this is true or false. */
4463 if (TREE_CODE (exp) == INTEGER_CST)
4464 {
4465 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4466 exp, 0, low, 0))
4467 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4468 exp, 1, high, 1)));
4469 low = high = 0;
4470 exp = 0;
4471 }
4472
4473 *pin_p = in_p, *plow = low, *phigh = high;
4474 return exp;
4475 }
4476 \f
4477 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4478 type, TYPE, return an expression to test if EXP is in (or out of, depending
4479 on IN_P) the range. Return 0 if the test couldn't be created. */
4480
4481 tree
4482 build_range_check (location_t loc, tree type, tree exp, int in_p,
4483 tree low, tree high)
4484 {
4485 tree etype = TREE_TYPE (exp), value;
4486
4487 /* Disable this optimization for function pointer expressions
4488 on targets that require function pointer canonicalization. */
4489 if (targetm.have_canonicalize_funcptr_for_compare ()
4490 && TREE_CODE (etype) == POINTER_TYPE
4491 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4492 return NULL_TREE;
4493
4494 if (! in_p)
4495 {
4496 value = build_range_check (loc, type, exp, 1, low, high);
4497 if (value != 0)
4498 return invert_truthvalue_loc (loc, value);
4499
4500 return 0;
4501 }
4502
4503 if (low == 0 && high == 0)
4504 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4505
4506 if (low == 0)
4507 return fold_build2_loc (loc, LE_EXPR, type, exp,
4508 fold_convert_loc (loc, etype, high));
4509
4510 if (high == 0)
4511 return fold_build2_loc (loc, GE_EXPR, type, exp,
4512 fold_convert_loc (loc, etype, low));
4513
4514 if (operand_equal_p (low, high, 0))
4515 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4516 fold_convert_loc (loc, etype, low));
4517
4518 if (integer_zerop (low))
4519 {
4520 if (! TYPE_UNSIGNED (etype))
4521 {
4522 etype = unsigned_type_for (etype);
4523 high = fold_convert_loc (loc, etype, high);
4524 exp = fold_convert_loc (loc, etype, exp);
4525 }
4526 return build_range_check (loc, type, exp, 1, 0, high);
4527 }
4528
4529 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4530 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4531 {
4532 int prec = TYPE_PRECISION (etype);
4533
4534 if (wi::mask (prec - 1, false, prec) == high)
4535 {
4536 if (TYPE_UNSIGNED (etype))
4537 {
4538 tree signed_etype = signed_type_for (etype);
4539 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4540 etype
4541 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4542 else
4543 etype = signed_etype;
4544 exp = fold_convert_loc (loc, etype, exp);
4545 }
4546 return fold_build2_loc (loc, GT_EXPR, type, exp,
4547 build_int_cst (etype, 0));
4548 }
4549 }
4550
4551 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4552 This requires wrap-around arithmetics for the type of the expression.
4553 First make sure that arithmetics in this type is valid, then make sure
4554 that it wraps around. */
4555 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4556 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4557 TYPE_UNSIGNED (etype));
4558
4559 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4560 {
4561 tree utype, minv, maxv;
4562
4563 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4564 for the type in question, as we rely on this here. */
4565 utype = unsigned_type_for (etype);
4566 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4567 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4568 build_int_cst (TREE_TYPE (maxv), 1), 1);
4569 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4570
4571 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4572 minv, 1, maxv, 1)))
4573 etype = utype;
4574 else
4575 return 0;
4576 }
4577
4578 high = fold_convert_loc (loc, etype, high);
4579 low = fold_convert_loc (loc, etype, low);
4580 exp = fold_convert_loc (loc, etype, exp);
4581
4582 value = const_binop (MINUS_EXPR, high, low);
4583
4584
4585 if (POINTER_TYPE_P (etype))
4586 {
4587 if (value != 0 && !TREE_OVERFLOW (value))
4588 {
4589 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4590 return build_range_check (loc, type,
4591 fold_build_pointer_plus_loc (loc, exp, low),
4592 1, build_int_cst (etype, 0), value);
4593 }
4594 return 0;
4595 }
4596
4597 if (value != 0 && !TREE_OVERFLOW (value))
4598 return build_range_check (loc, type,
4599 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4600 1, build_int_cst (etype, 0), value);
4601
4602 return 0;
4603 }
4604 \f
4605 /* Return the predecessor of VAL in its type, handling the infinite case. */
4606
4607 static tree
4608 range_predecessor (tree val)
4609 {
4610 tree type = TREE_TYPE (val);
4611
4612 if (INTEGRAL_TYPE_P (type)
4613 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4614 return 0;
4615 else
4616 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4617 build_int_cst (TREE_TYPE (val), 1), 0);
4618 }
4619
4620 /* Return the successor of VAL in its type, handling the infinite case. */
4621
4622 static tree
4623 range_successor (tree val)
4624 {
4625 tree type = TREE_TYPE (val);
4626
4627 if (INTEGRAL_TYPE_P (type)
4628 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4629 return 0;
4630 else
4631 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4632 build_int_cst (TREE_TYPE (val), 1), 0);
4633 }
4634
4635 /* Given two ranges, see if we can merge them into one. Return 1 if we
4636 can, 0 if we can't. Set the output range into the specified parameters. */
4637
4638 bool
4639 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4640 tree high0, int in1_p, tree low1, tree high1)
4641 {
4642 int no_overlap;
4643 int subset;
4644 int temp;
4645 tree tem;
4646 int in_p;
4647 tree low, high;
4648 int lowequal = ((low0 == 0 && low1 == 0)
4649 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4650 low0, 0, low1, 0)));
4651 int highequal = ((high0 == 0 && high1 == 0)
4652 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4653 high0, 1, high1, 1)));
4654
4655 /* Make range 0 be the range that starts first, or ends last if they
4656 start at the same value. Swap them if it isn't. */
4657 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4658 low0, 0, low1, 0))
4659 || (lowequal
4660 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4661 high1, 1, high0, 1))))
4662 {
4663 temp = in0_p, in0_p = in1_p, in1_p = temp;
4664 tem = low0, low0 = low1, low1 = tem;
4665 tem = high0, high0 = high1, high1 = tem;
4666 }
4667
4668 /* Now flag two cases, whether the ranges are disjoint or whether the
4669 second range is totally subsumed in the first. Note that the tests
4670 below are simplified by the ones above. */
4671 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4672 high0, 1, low1, 0));
4673 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4674 high1, 1, high0, 1));
4675
4676 /* We now have four cases, depending on whether we are including or
4677 excluding the two ranges. */
4678 if (in0_p && in1_p)
4679 {
4680 /* If they don't overlap, the result is false. If the second range
4681 is a subset it is the result. Otherwise, the range is from the start
4682 of the second to the end of the first. */
4683 if (no_overlap)
4684 in_p = 0, low = high = 0;
4685 else if (subset)
4686 in_p = 1, low = low1, high = high1;
4687 else
4688 in_p = 1, low = low1, high = high0;
4689 }
4690
4691 else if (in0_p && ! in1_p)
4692 {
4693 /* If they don't overlap, the result is the first range. If they are
4694 equal, the result is false. If the second range is a subset of the
4695 first, and the ranges begin at the same place, we go from just after
4696 the end of the second range to the end of the first. If the second
4697 range is not a subset of the first, or if it is a subset and both
4698 ranges end at the same place, the range starts at the start of the
4699 first range and ends just before the second range.
4700 Otherwise, we can't describe this as a single range. */
4701 if (no_overlap)
4702 in_p = 1, low = low0, high = high0;
4703 else if (lowequal && highequal)
4704 in_p = 0, low = high = 0;
4705 else if (subset && lowequal)
4706 {
4707 low = range_successor (high1);
4708 high = high0;
4709 in_p = 1;
4710 if (low == 0)
4711 {
4712 /* We are in the weird situation where high0 > high1 but
4713 high1 has no successor. Punt. */
4714 return 0;
4715 }
4716 }
4717 else if (! subset || highequal)
4718 {
4719 low = low0;
4720 high = range_predecessor (low1);
4721 in_p = 1;
4722 if (high == 0)
4723 {
4724 /* low0 < low1 but low1 has no predecessor. Punt. */
4725 return 0;
4726 }
4727 }
4728 else
4729 return 0;
4730 }
4731
4732 else if (! in0_p && in1_p)
4733 {
4734 /* If they don't overlap, the result is the second range. If the second
4735 is a subset of the first, the result is false. Otherwise,
4736 the range starts just after the first range and ends at the
4737 end of the second. */
4738 if (no_overlap)
4739 in_p = 1, low = low1, high = high1;
4740 else if (subset || highequal)
4741 in_p = 0, low = high = 0;
4742 else
4743 {
4744 low = range_successor (high0);
4745 high = high1;
4746 in_p = 1;
4747 if (low == 0)
4748 {
4749 /* high1 > high0 but high0 has no successor. Punt. */
4750 return 0;
4751 }
4752 }
4753 }
4754
4755 else
4756 {
4757 /* The case where we are excluding both ranges. Here the complex case
4758 is if they don't overlap. In that case, the only time we have a
4759 range is if they are adjacent. If the second is a subset of the
4760 first, the result is the first. Otherwise, the range to exclude
4761 starts at the beginning of the first range and ends at the end of the
4762 second. */
4763 if (no_overlap)
4764 {
4765 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4766 range_successor (high0),
4767 1, low1, 0)))
4768 in_p = 0, low = low0, high = high1;
4769 else
4770 {
4771 /* Canonicalize - [min, x] into - [-, x]. */
4772 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4773 switch (TREE_CODE (TREE_TYPE (low0)))
4774 {
4775 case ENUMERAL_TYPE:
4776 if (TYPE_PRECISION (TREE_TYPE (low0))
4777 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4778 break;
4779 /* FALLTHROUGH */
4780 case INTEGER_TYPE:
4781 if (tree_int_cst_equal (low0,
4782 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4783 low0 = 0;
4784 break;
4785 case POINTER_TYPE:
4786 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4787 && integer_zerop (low0))
4788 low0 = 0;
4789 break;
4790 default:
4791 break;
4792 }
4793
4794 /* Canonicalize - [x, max] into - [x, -]. */
4795 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4796 switch (TREE_CODE (TREE_TYPE (high1)))
4797 {
4798 case ENUMERAL_TYPE:
4799 if (TYPE_PRECISION (TREE_TYPE (high1))
4800 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4801 break;
4802 /* FALLTHROUGH */
4803 case INTEGER_TYPE:
4804 if (tree_int_cst_equal (high1,
4805 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4806 high1 = 0;
4807 break;
4808 case POINTER_TYPE:
4809 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4810 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4811 high1, 1,
4812 build_int_cst (TREE_TYPE (high1), 1),
4813 1)))
4814 high1 = 0;
4815 break;
4816 default:
4817 break;
4818 }
4819
4820 /* The ranges might be also adjacent between the maximum and
4821 minimum values of the given type. For
4822 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4823 return + [x + 1, y - 1]. */
4824 if (low0 == 0 && high1 == 0)
4825 {
4826 low = range_successor (high0);
4827 high = range_predecessor (low1);
4828 if (low == 0 || high == 0)
4829 return 0;
4830
4831 in_p = 1;
4832 }
4833 else
4834 return 0;
4835 }
4836 }
4837 else if (subset)
4838 in_p = 0, low = low0, high = high0;
4839 else
4840 in_p = 0, low = low0, high = high1;
4841 }
4842
4843 *pin_p = in_p, *plow = low, *phigh = high;
4844 return 1;
4845 }
4846 \f
4847
4848 /* Subroutine of fold, looking inside expressions of the form
4849 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4850 of the COND_EXPR. This function is being used also to optimize
4851 A op B ? C : A, by reversing the comparison first.
4852
4853 Return a folded expression whose code is not a COND_EXPR
4854 anymore, or NULL_TREE if no folding opportunity is found. */
4855
4856 static tree
4857 fold_cond_expr_with_comparison (location_t loc, tree type,
4858 tree arg0, tree arg1, tree arg2)
4859 {
4860 enum tree_code comp_code = TREE_CODE (arg0);
4861 tree arg00 = TREE_OPERAND (arg0, 0);
4862 tree arg01 = TREE_OPERAND (arg0, 1);
4863 tree arg1_type = TREE_TYPE (arg1);
4864 tree tem;
4865
4866 STRIP_NOPS (arg1);
4867 STRIP_NOPS (arg2);
4868
4869 /* If we have A op 0 ? A : -A, consider applying the following
4870 transformations:
4871
4872 A == 0? A : -A same as -A
4873 A != 0? A : -A same as A
4874 A >= 0? A : -A same as abs (A)
4875 A > 0? A : -A same as abs (A)
4876 A <= 0? A : -A same as -abs (A)
4877 A < 0? A : -A same as -abs (A)
4878
4879 None of these transformations work for modes with signed
4880 zeros. If A is +/-0, the first two transformations will
4881 change the sign of the result (from +0 to -0, or vice
4882 versa). The last four will fix the sign of the result,
4883 even though the original expressions could be positive or
4884 negative, depending on the sign of A.
4885
4886 Note that all these transformations are correct if A is
4887 NaN, since the two alternatives (A and -A) are also NaNs. */
4888 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4889 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4890 ? real_zerop (arg01)
4891 : integer_zerop (arg01))
4892 && ((TREE_CODE (arg2) == NEGATE_EXPR
4893 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4894 /* In the case that A is of the form X-Y, '-A' (arg2) may
4895 have already been folded to Y-X, check for that. */
4896 || (TREE_CODE (arg1) == MINUS_EXPR
4897 && TREE_CODE (arg2) == MINUS_EXPR
4898 && operand_equal_p (TREE_OPERAND (arg1, 0),
4899 TREE_OPERAND (arg2, 1), 0)
4900 && operand_equal_p (TREE_OPERAND (arg1, 1),
4901 TREE_OPERAND (arg2, 0), 0))))
4902 switch (comp_code)
4903 {
4904 case EQ_EXPR:
4905 case UNEQ_EXPR:
4906 tem = fold_convert_loc (loc, arg1_type, arg1);
4907 return pedantic_non_lvalue_loc (loc,
4908 fold_convert_loc (loc, type,
4909 negate_expr (tem)));
4910 case NE_EXPR:
4911 case LTGT_EXPR:
4912 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4913 case UNGE_EXPR:
4914 case UNGT_EXPR:
4915 if (flag_trapping_math)
4916 break;
4917 /* Fall through. */
4918 case GE_EXPR:
4919 case GT_EXPR:
4920 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4921 arg1 = fold_convert_loc (loc, signed_type_for
4922 (TREE_TYPE (arg1)), arg1);
4923 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4924 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4925 case UNLE_EXPR:
4926 case UNLT_EXPR:
4927 if (flag_trapping_math)
4928 break;
4929 case LE_EXPR:
4930 case LT_EXPR:
4931 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4932 arg1 = fold_convert_loc (loc, signed_type_for
4933 (TREE_TYPE (arg1)), arg1);
4934 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4935 return negate_expr (fold_convert_loc (loc, type, tem));
4936 default:
4937 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4938 break;
4939 }
4940
4941 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4942 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4943 both transformations are correct when A is NaN: A != 0
4944 is then true, and A == 0 is false. */
4945
4946 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4947 && integer_zerop (arg01) && integer_zerop (arg2))
4948 {
4949 if (comp_code == NE_EXPR)
4950 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4951 else if (comp_code == EQ_EXPR)
4952 return build_zero_cst (type);
4953 }
4954
4955 /* Try some transformations of A op B ? A : B.
4956
4957 A == B? A : B same as B
4958 A != B? A : B same as A
4959 A >= B? A : B same as max (A, B)
4960 A > B? A : B same as max (B, A)
4961 A <= B? A : B same as min (A, B)
4962 A < B? A : B same as min (B, A)
4963
4964 As above, these transformations don't work in the presence
4965 of signed zeros. For example, if A and B are zeros of
4966 opposite sign, the first two transformations will change
4967 the sign of the result. In the last four, the original
4968 expressions give different results for (A=+0, B=-0) and
4969 (A=-0, B=+0), but the transformed expressions do not.
4970
4971 The first two transformations are correct if either A or B
4972 is a NaN. In the first transformation, the condition will
4973 be false, and B will indeed be chosen. In the case of the
4974 second transformation, the condition A != B will be true,
4975 and A will be chosen.
4976
4977 The conversions to max() and min() are not correct if B is
4978 a number and A is not. The conditions in the original
4979 expressions will be false, so all four give B. The min()
4980 and max() versions would give a NaN instead. */
4981 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4982 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4983 /* Avoid these transformations if the COND_EXPR may be used
4984 as an lvalue in the C++ front-end. PR c++/19199. */
4985 && (in_gimple_form
4986 || VECTOR_TYPE_P (type)
4987 || (! lang_GNU_CXX ()
4988 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4989 || ! maybe_lvalue_p (arg1)
4990 || ! maybe_lvalue_p (arg2)))
4991 {
4992 tree comp_op0 = arg00;
4993 tree comp_op1 = arg01;
4994 tree comp_type = TREE_TYPE (comp_op0);
4995
4996 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4997 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4998 {
4999 comp_type = type;
5000 comp_op0 = arg1;
5001 comp_op1 = arg2;
5002 }
5003
5004 switch (comp_code)
5005 {
5006 case EQ_EXPR:
5007 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5008 case NE_EXPR:
5009 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5010 case LE_EXPR:
5011 case LT_EXPR:
5012 case UNLE_EXPR:
5013 case UNLT_EXPR:
5014 /* In C++ a ?: expression can be an lvalue, so put the
5015 operand which will be used if they are equal first
5016 so that we can convert this back to the
5017 corresponding COND_EXPR. */
5018 if (!HONOR_NANS (arg1))
5019 {
5020 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5021 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5022 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5023 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5024 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5025 comp_op1, comp_op0);
5026 return pedantic_non_lvalue_loc (loc,
5027 fold_convert_loc (loc, type, tem));
5028 }
5029 break;
5030 case GE_EXPR:
5031 case GT_EXPR:
5032 case UNGE_EXPR:
5033 case UNGT_EXPR:
5034 if (!HONOR_NANS (arg1))
5035 {
5036 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5037 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5038 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5039 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5040 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5041 comp_op1, comp_op0);
5042 return pedantic_non_lvalue_loc (loc,
5043 fold_convert_loc (loc, type, tem));
5044 }
5045 break;
5046 case UNEQ_EXPR:
5047 if (!HONOR_NANS (arg1))
5048 return pedantic_non_lvalue_loc (loc,
5049 fold_convert_loc (loc, type, arg2));
5050 break;
5051 case LTGT_EXPR:
5052 if (!HONOR_NANS (arg1))
5053 return pedantic_non_lvalue_loc (loc,
5054 fold_convert_loc (loc, type, arg1));
5055 break;
5056 default:
5057 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5058 break;
5059 }
5060 }
5061
5062 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5063 we might still be able to simplify this. For example,
5064 if C1 is one less or one more than C2, this might have started
5065 out as a MIN or MAX and been transformed by this function.
5066 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5067
5068 if (INTEGRAL_TYPE_P (type)
5069 && TREE_CODE (arg01) == INTEGER_CST
5070 && TREE_CODE (arg2) == INTEGER_CST)
5071 switch (comp_code)
5072 {
5073 case EQ_EXPR:
5074 if (TREE_CODE (arg1) == INTEGER_CST)
5075 break;
5076 /* We can replace A with C1 in this case. */
5077 arg1 = fold_convert_loc (loc, type, arg01);
5078 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5079
5080 case LT_EXPR:
5081 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5082 MIN_EXPR, to preserve the signedness of the comparison. */
5083 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5084 OEP_ONLY_CONST)
5085 && operand_equal_p (arg01,
5086 const_binop (PLUS_EXPR, arg2,
5087 build_int_cst (type, 1)),
5088 OEP_ONLY_CONST))
5089 {
5090 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5091 fold_convert_loc (loc, TREE_TYPE (arg00),
5092 arg2));
5093 return pedantic_non_lvalue_loc (loc,
5094 fold_convert_loc (loc, type, tem));
5095 }
5096 break;
5097
5098 case LE_EXPR:
5099 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5100 as above. */
5101 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5102 OEP_ONLY_CONST)
5103 && operand_equal_p (arg01,
5104 const_binop (MINUS_EXPR, arg2,
5105 build_int_cst (type, 1)),
5106 OEP_ONLY_CONST))
5107 {
5108 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5109 fold_convert_loc (loc, TREE_TYPE (arg00),
5110 arg2));
5111 return pedantic_non_lvalue_loc (loc,
5112 fold_convert_loc (loc, type, tem));
5113 }
5114 break;
5115
5116 case GT_EXPR:
5117 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5118 MAX_EXPR, to preserve the signedness of the comparison. */
5119 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5120 OEP_ONLY_CONST)
5121 && operand_equal_p (arg01,
5122 const_binop (MINUS_EXPR, arg2,
5123 build_int_cst (type, 1)),
5124 OEP_ONLY_CONST))
5125 {
5126 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5127 fold_convert_loc (loc, TREE_TYPE (arg00),
5128 arg2));
5129 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5130 }
5131 break;
5132
5133 case GE_EXPR:
5134 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5135 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5136 OEP_ONLY_CONST)
5137 && operand_equal_p (arg01,
5138 const_binop (PLUS_EXPR, arg2,
5139 build_int_cst (type, 1)),
5140 OEP_ONLY_CONST))
5141 {
5142 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5143 fold_convert_loc (loc, TREE_TYPE (arg00),
5144 arg2));
5145 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5146 }
5147 break;
5148 case NE_EXPR:
5149 break;
5150 default:
5151 gcc_unreachable ();
5152 }
5153
5154 return NULL_TREE;
5155 }
5156
5157
5158 \f
5159 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5160 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5161 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5162 false) >= 2)
5163 #endif
5164
5165 /* EXP is some logical combination of boolean tests. See if we can
5166 merge it into some range test. Return the new tree if so. */
5167
5168 static tree
5169 fold_range_test (location_t loc, enum tree_code code, tree type,
5170 tree op0, tree op1)
5171 {
5172 int or_op = (code == TRUTH_ORIF_EXPR
5173 || code == TRUTH_OR_EXPR);
5174 int in0_p, in1_p, in_p;
5175 tree low0, low1, low, high0, high1, high;
5176 bool strict_overflow_p = false;
5177 tree tem, lhs, rhs;
5178 const char * const warnmsg = G_("assuming signed overflow does not occur "
5179 "when simplifying range test");
5180
5181 if (!INTEGRAL_TYPE_P (type))
5182 return 0;
5183
5184 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5185 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5186
5187 /* If this is an OR operation, invert both sides; we will invert
5188 again at the end. */
5189 if (or_op)
5190 in0_p = ! in0_p, in1_p = ! in1_p;
5191
5192 /* If both expressions are the same, if we can merge the ranges, and we
5193 can build the range test, return it or it inverted. If one of the
5194 ranges is always true or always false, consider it to be the same
5195 expression as the other. */
5196 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5197 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5198 in1_p, low1, high1)
5199 && 0 != (tem = (build_range_check (loc, type,
5200 lhs != 0 ? lhs
5201 : rhs != 0 ? rhs : integer_zero_node,
5202 in_p, low, high))))
5203 {
5204 if (strict_overflow_p)
5205 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5206 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5207 }
5208
5209 /* On machines where the branch cost is expensive, if this is a
5210 short-circuited branch and the underlying object on both sides
5211 is the same, make a non-short-circuit operation. */
5212 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5213 && lhs != 0 && rhs != 0
5214 && (code == TRUTH_ANDIF_EXPR
5215 || code == TRUTH_ORIF_EXPR)
5216 && operand_equal_p (lhs, rhs, 0))
5217 {
5218 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5219 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5220 which cases we can't do this. */
5221 if (simple_operand_p (lhs))
5222 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5223 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5224 type, op0, op1);
5225
5226 else if (!lang_hooks.decls.global_bindings_p ()
5227 && !CONTAINS_PLACEHOLDER_P (lhs))
5228 {
5229 tree common = save_expr (lhs);
5230
5231 if (0 != (lhs = build_range_check (loc, type, common,
5232 or_op ? ! in0_p : in0_p,
5233 low0, high0))
5234 && (0 != (rhs = build_range_check (loc, type, common,
5235 or_op ? ! in1_p : in1_p,
5236 low1, high1))))
5237 {
5238 if (strict_overflow_p)
5239 fold_overflow_warning (warnmsg,
5240 WARN_STRICT_OVERFLOW_COMPARISON);
5241 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5242 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5243 type, lhs, rhs);
5244 }
5245 }
5246 }
5247
5248 return 0;
5249 }
5250 \f
5251 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5252 bit value. Arrange things so the extra bits will be set to zero if and
5253 only if C is signed-extended to its full width. If MASK is nonzero,
5254 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5255
5256 static tree
5257 unextend (tree c, int p, int unsignedp, tree mask)
5258 {
5259 tree type = TREE_TYPE (c);
5260 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5261 tree temp;
5262
5263 if (p == modesize || unsignedp)
5264 return c;
5265
5266 /* We work by getting just the sign bit into the low-order bit, then
5267 into the high-order bit, then sign-extend. We then XOR that value
5268 with C. */
5269 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5270
5271 /* We must use a signed type in order to get an arithmetic right shift.
5272 However, we must also avoid introducing accidental overflows, so that
5273 a subsequent call to integer_zerop will work. Hence we must
5274 do the type conversion here. At this point, the constant is either
5275 zero or one, and the conversion to a signed type can never overflow.
5276 We could get an overflow if this conversion is done anywhere else. */
5277 if (TYPE_UNSIGNED (type))
5278 temp = fold_convert (signed_type_for (type), temp);
5279
5280 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5281 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5282 if (mask != 0)
5283 temp = const_binop (BIT_AND_EXPR, temp,
5284 fold_convert (TREE_TYPE (c), mask));
5285 /* If necessary, convert the type back to match the type of C. */
5286 if (TYPE_UNSIGNED (type))
5287 temp = fold_convert (type, temp);
5288
5289 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5290 }
5291 \f
5292 /* For an expression that has the form
5293 (A && B) || ~B
5294 or
5295 (A || B) && ~B,
5296 we can drop one of the inner expressions and simplify to
5297 A || ~B
5298 or
5299 A && ~B
5300 LOC is the location of the resulting expression. OP is the inner
5301 logical operation; the left-hand side in the examples above, while CMPOP
5302 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5303 removing a condition that guards another, as in
5304 (A != NULL && A->...) || A == NULL
5305 which we must not transform. If RHS_ONLY is true, only eliminate the
5306 right-most operand of the inner logical operation. */
5307
5308 static tree
5309 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5310 bool rhs_only)
5311 {
5312 tree type = TREE_TYPE (cmpop);
5313 enum tree_code code = TREE_CODE (cmpop);
5314 enum tree_code truthop_code = TREE_CODE (op);
5315 tree lhs = TREE_OPERAND (op, 0);
5316 tree rhs = TREE_OPERAND (op, 1);
5317 tree orig_lhs = lhs, orig_rhs = rhs;
5318 enum tree_code rhs_code = TREE_CODE (rhs);
5319 enum tree_code lhs_code = TREE_CODE (lhs);
5320 enum tree_code inv_code;
5321
5322 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5323 return NULL_TREE;
5324
5325 if (TREE_CODE_CLASS (code) != tcc_comparison)
5326 return NULL_TREE;
5327
5328 if (rhs_code == truthop_code)
5329 {
5330 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5331 if (newrhs != NULL_TREE)
5332 {
5333 rhs = newrhs;
5334 rhs_code = TREE_CODE (rhs);
5335 }
5336 }
5337 if (lhs_code == truthop_code && !rhs_only)
5338 {
5339 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5340 if (newlhs != NULL_TREE)
5341 {
5342 lhs = newlhs;
5343 lhs_code = TREE_CODE (lhs);
5344 }
5345 }
5346
5347 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5348 if (inv_code == rhs_code
5349 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5350 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5351 return lhs;
5352 if (!rhs_only && inv_code == lhs_code
5353 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5354 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5355 return rhs;
5356 if (rhs != orig_rhs || lhs != orig_lhs)
5357 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5358 lhs, rhs);
5359 return NULL_TREE;
5360 }
5361
5362 /* Find ways of folding logical expressions of LHS and RHS:
5363 Try to merge two comparisons to the same innermost item.
5364 Look for range tests like "ch >= '0' && ch <= '9'".
5365 Look for combinations of simple terms on machines with expensive branches
5366 and evaluate the RHS unconditionally.
5367
5368 For example, if we have p->a == 2 && p->b == 4 and we can make an
5369 object large enough to span both A and B, we can do this with a comparison
5370 against the object ANDed with the a mask.
5371
5372 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5373 operations to do this with one comparison.
5374
5375 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5376 function and the one above.
5377
5378 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5379 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5380
5381 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5382 two operands.
5383
5384 We return the simplified tree or 0 if no optimization is possible. */
5385
5386 static tree
5387 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5388 tree lhs, tree rhs)
5389 {
5390 /* If this is the "or" of two comparisons, we can do something if
5391 the comparisons are NE_EXPR. If this is the "and", we can do something
5392 if the comparisons are EQ_EXPR. I.e.,
5393 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5394
5395 WANTED_CODE is this operation code. For single bit fields, we can
5396 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5397 comparison for one-bit fields. */
5398
5399 enum tree_code wanted_code;
5400 enum tree_code lcode, rcode;
5401 tree ll_arg, lr_arg, rl_arg, rr_arg;
5402 tree ll_inner, lr_inner, rl_inner, rr_inner;
5403 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5404 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5405 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5406 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5407 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5408 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5409 machine_mode lnmode, rnmode;
5410 tree ll_mask, lr_mask, rl_mask, rr_mask;
5411 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5412 tree l_const, r_const;
5413 tree lntype, rntype, result;
5414 HOST_WIDE_INT first_bit, end_bit;
5415 int volatilep;
5416
5417 /* Start by getting the comparison codes. Fail if anything is volatile.
5418 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5419 it were surrounded with a NE_EXPR. */
5420
5421 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5422 return 0;
5423
5424 lcode = TREE_CODE (lhs);
5425 rcode = TREE_CODE (rhs);
5426
5427 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5428 {
5429 lhs = build2 (NE_EXPR, truth_type, lhs,
5430 build_int_cst (TREE_TYPE (lhs), 0));
5431 lcode = NE_EXPR;
5432 }
5433
5434 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5435 {
5436 rhs = build2 (NE_EXPR, truth_type, rhs,
5437 build_int_cst (TREE_TYPE (rhs), 0));
5438 rcode = NE_EXPR;
5439 }
5440
5441 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5442 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5443 return 0;
5444
5445 ll_arg = TREE_OPERAND (lhs, 0);
5446 lr_arg = TREE_OPERAND (lhs, 1);
5447 rl_arg = TREE_OPERAND (rhs, 0);
5448 rr_arg = TREE_OPERAND (rhs, 1);
5449
5450 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5451 if (simple_operand_p (ll_arg)
5452 && simple_operand_p (lr_arg))
5453 {
5454 if (operand_equal_p (ll_arg, rl_arg, 0)
5455 && operand_equal_p (lr_arg, rr_arg, 0))
5456 {
5457 result = combine_comparisons (loc, code, lcode, rcode,
5458 truth_type, ll_arg, lr_arg);
5459 if (result)
5460 return result;
5461 }
5462 else if (operand_equal_p (ll_arg, rr_arg, 0)
5463 && operand_equal_p (lr_arg, rl_arg, 0))
5464 {
5465 result = combine_comparisons (loc, code, lcode,
5466 swap_tree_comparison (rcode),
5467 truth_type, ll_arg, lr_arg);
5468 if (result)
5469 return result;
5470 }
5471 }
5472
5473 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5474 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5475
5476 /* If the RHS can be evaluated unconditionally and its operands are
5477 simple, it wins to evaluate the RHS unconditionally on machines
5478 with expensive branches. In this case, this isn't a comparison
5479 that can be merged. */
5480
5481 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5482 false) >= 2
5483 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5484 && simple_operand_p (rl_arg)
5485 && simple_operand_p (rr_arg))
5486 {
5487 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5488 if (code == TRUTH_OR_EXPR
5489 && lcode == NE_EXPR && integer_zerop (lr_arg)
5490 && rcode == NE_EXPR && integer_zerop (rr_arg)
5491 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5492 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5493 return build2_loc (loc, NE_EXPR, truth_type,
5494 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5495 ll_arg, rl_arg),
5496 build_int_cst (TREE_TYPE (ll_arg), 0));
5497
5498 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5499 if (code == TRUTH_AND_EXPR
5500 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5501 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5502 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5503 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5504 return build2_loc (loc, EQ_EXPR, truth_type,
5505 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5506 ll_arg, rl_arg),
5507 build_int_cst (TREE_TYPE (ll_arg), 0));
5508 }
5509
5510 /* See if the comparisons can be merged. Then get all the parameters for
5511 each side. */
5512
5513 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5514 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5515 return 0;
5516
5517 volatilep = 0;
5518 ll_inner = decode_field_reference (loc, ll_arg,
5519 &ll_bitsize, &ll_bitpos, &ll_mode,
5520 &ll_unsignedp, &volatilep, &ll_mask,
5521 &ll_and_mask);
5522 lr_inner = decode_field_reference (loc, lr_arg,
5523 &lr_bitsize, &lr_bitpos, &lr_mode,
5524 &lr_unsignedp, &volatilep, &lr_mask,
5525 &lr_and_mask);
5526 rl_inner = decode_field_reference (loc, rl_arg,
5527 &rl_bitsize, &rl_bitpos, &rl_mode,
5528 &rl_unsignedp, &volatilep, &rl_mask,
5529 &rl_and_mask);
5530 rr_inner = decode_field_reference (loc, rr_arg,
5531 &rr_bitsize, &rr_bitpos, &rr_mode,
5532 &rr_unsignedp, &volatilep, &rr_mask,
5533 &rr_and_mask);
5534
5535 /* It must be true that the inner operation on the lhs of each
5536 comparison must be the same if we are to be able to do anything.
5537 Then see if we have constants. If not, the same must be true for
5538 the rhs's. */
5539 if (volatilep || ll_inner == 0 || rl_inner == 0
5540 || ! operand_equal_p (ll_inner, rl_inner, 0))
5541 return 0;
5542
5543 if (TREE_CODE (lr_arg) == INTEGER_CST
5544 && TREE_CODE (rr_arg) == INTEGER_CST)
5545 l_const = lr_arg, r_const = rr_arg;
5546 else if (lr_inner == 0 || rr_inner == 0
5547 || ! operand_equal_p (lr_inner, rr_inner, 0))
5548 return 0;
5549 else
5550 l_const = r_const = 0;
5551
5552 /* If either comparison code is not correct for our logical operation,
5553 fail. However, we can convert a one-bit comparison against zero into
5554 the opposite comparison against that bit being set in the field. */
5555
5556 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5557 if (lcode != wanted_code)
5558 {
5559 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5560 {
5561 /* Make the left operand unsigned, since we are only interested
5562 in the value of one bit. Otherwise we are doing the wrong
5563 thing below. */
5564 ll_unsignedp = 1;
5565 l_const = ll_mask;
5566 }
5567 else
5568 return 0;
5569 }
5570
5571 /* This is analogous to the code for l_const above. */
5572 if (rcode != wanted_code)
5573 {
5574 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5575 {
5576 rl_unsignedp = 1;
5577 r_const = rl_mask;
5578 }
5579 else
5580 return 0;
5581 }
5582
5583 /* See if we can find a mode that contains both fields being compared on
5584 the left. If we can't, fail. Otherwise, update all constants and masks
5585 to be relative to a field of that size. */
5586 first_bit = MIN (ll_bitpos, rl_bitpos);
5587 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5588 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5589 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5590 volatilep);
5591 if (lnmode == VOIDmode)
5592 return 0;
5593
5594 lnbitsize = GET_MODE_BITSIZE (lnmode);
5595 lnbitpos = first_bit & ~ (lnbitsize - 1);
5596 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5597 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5598
5599 if (BYTES_BIG_ENDIAN)
5600 {
5601 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5602 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5603 }
5604
5605 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5606 size_int (xll_bitpos));
5607 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5608 size_int (xrl_bitpos));
5609
5610 if (l_const)
5611 {
5612 l_const = fold_convert_loc (loc, lntype, l_const);
5613 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5614 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5615 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5616 fold_build1_loc (loc, BIT_NOT_EXPR,
5617 lntype, ll_mask))))
5618 {
5619 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5620
5621 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5622 }
5623 }
5624 if (r_const)
5625 {
5626 r_const = fold_convert_loc (loc, lntype, r_const);
5627 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5628 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5629 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5630 fold_build1_loc (loc, BIT_NOT_EXPR,
5631 lntype, rl_mask))))
5632 {
5633 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5634
5635 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5636 }
5637 }
5638
5639 /* If the right sides are not constant, do the same for it. Also,
5640 disallow this optimization if a size or signedness mismatch occurs
5641 between the left and right sides. */
5642 if (l_const == 0)
5643 {
5644 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5645 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5646 /* Make sure the two fields on the right
5647 correspond to the left without being swapped. */
5648 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5649 return 0;
5650
5651 first_bit = MIN (lr_bitpos, rr_bitpos);
5652 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5653 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5654 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5655 volatilep);
5656 if (rnmode == VOIDmode)
5657 return 0;
5658
5659 rnbitsize = GET_MODE_BITSIZE (rnmode);
5660 rnbitpos = first_bit & ~ (rnbitsize - 1);
5661 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5662 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5663
5664 if (BYTES_BIG_ENDIAN)
5665 {
5666 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5667 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5668 }
5669
5670 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5671 rntype, lr_mask),
5672 size_int (xlr_bitpos));
5673 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5674 rntype, rr_mask),
5675 size_int (xrr_bitpos));
5676
5677 /* Make a mask that corresponds to both fields being compared.
5678 Do this for both items being compared. If the operands are the
5679 same size and the bits being compared are in the same position
5680 then we can do this by masking both and comparing the masked
5681 results. */
5682 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5683 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5684 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5685 {
5686 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5687 ll_unsignedp || rl_unsignedp);
5688 if (! all_ones_mask_p (ll_mask, lnbitsize))
5689 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5690
5691 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5692 lr_unsignedp || rr_unsignedp);
5693 if (! all_ones_mask_p (lr_mask, rnbitsize))
5694 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5695
5696 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5697 }
5698
5699 /* There is still another way we can do something: If both pairs of
5700 fields being compared are adjacent, we may be able to make a wider
5701 field containing them both.
5702
5703 Note that we still must mask the lhs/rhs expressions. Furthermore,
5704 the mask must be shifted to account for the shift done by
5705 make_bit_field_ref. */
5706 if ((ll_bitsize + ll_bitpos == rl_bitpos
5707 && lr_bitsize + lr_bitpos == rr_bitpos)
5708 || (ll_bitpos == rl_bitpos + rl_bitsize
5709 && lr_bitpos == rr_bitpos + rr_bitsize))
5710 {
5711 tree type;
5712
5713 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5714 ll_bitsize + rl_bitsize,
5715 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5716 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5717 lr_bitsize + rr_bitsize,
5718 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5719
5720 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5721 size_int (MIN (xll_bitpos, xrl_bitpos)));
5722 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5723 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5724
5725 /* Convert to the smaller type before masking out unwanted bits. */
5726 type = lntype;
5727 if (lntype != rntype)
5728 {
5729 if (lnbitsize > rnbitsize)
5730 {
5731 lhs = fold_convert_loc (loc, rntype, lhs);
5732 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5733 type = rntype;
5734 }
5735 else if (lnbitsize < rnbitsize)
5736 {
5737 rhs = fold_convert_loc (loc, lntype, rhs);
5738 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5739 type = lntype;
5740 }
5741 }
5742
5743 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5744 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5745
5746 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5747 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5748
5749 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5750 }
5751
5752 return 0;
5753 }
5754
5755 /* Handle the case of comparisons with constants. If there is something in
5756 common between the masks, those bits of the constants must be the same.
5757 If not, the condition is always false. Test for this to avoid generating
5758 incorrect code below. */
5759 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5760 if (! integer_zerop (result)
5761 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5762 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5763 {
5764 if (wanted_code == NE_EXPR)
5765 {
5766 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5767 return constant_boolean_node (true, truth_type);
5768 }
5769 else
5770 {
5771 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5772 return constant_boolean_node (false, truth_type);
5773 }
5774 }
5775
5776 /* Construct the expression we will return. First get the component
5777 reference we will make. Unless the mask is all ones the width of
5778 that field, perform the mask operation. Then compare with the
5779 merged constant. */
5780 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5781 ll_unsignedp || rl_unsignedp);
5782
5783 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5784 if (! all_ones_mask_p (ll_mask, lnbitsize))
5785 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5786
5787 return build2_loc (loc, wanted_code, truth_type, result,
5788 const_binop (BIT_IOR_EXPR, l_const, r_const));
5789 }
5790 \f
5791 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5792 constant. */
5793
5794 static tree
5795 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5796 tree op0, tree op1)
5797 {
5798 tree arg0 = op0;
5799 enum tree_code op_code;
5800 tree comp_const;
5801 tree minmax_const;
5802 int consts_equal, consts_lt;
5803 tree inner;
5804
5805 STRIP_SIGN_NOPS (arg0);
5806
5807 op_code = TREE_CODE (arg0);
5808 minmax_const = TREE_OPERAND (arg0, 1);
5809 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5810 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5811 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5812 inner = TREE_OPERAND (arg0, 0);
5813
5814 /* If something does not permit us to optimize, return the original tree. */
5815 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5816 || TREE_CODE (comp_const) != INTEGER_CST
5817 || TREE_OVERFLOW (comp_const)
5818 || TREE_CODE (minmax_const) != INTEGER_CST
5819 || TREE_OVERFLOW (minmax_const))
5820 return NULL_TREE;
5821
5822 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5823 and GT_EXPR, doing the rest with recursive calls using logical
5824 simplifications. */
5825 switch (code)
5826 {
5827 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5828 {
5829 tree tem
5830 = optimize_minmax_comparison (loc,
5831 invert_tree_comparison (code, false),
5832 type, op0, op1);
5833 if (tem)
5834 return invert_truthvalue_loc (loc, tem);
5835 return NULL_TREE;
5836 }
5837
5838 case GE_EXPR:
5839 return
5840 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5841 optimize_minmax_comparison
5842 (loc, EQ_EXPR, type, arg0, comp_const),
5843 optimize_minmax_comparison
5844 (loc, GT_EXPR, type, arg0, comp_const));
5845
5846 case EQ_EXPR:
5847 if (op_code == MAX_EXPR && consts_equal)
5848 /* MAX (X, 0) == 0 -> X <= 0 */
5849 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5850
5851 else if (op_code == MAX_EXPR && consts_lt)
5852 /* MAX (X, 0) == 5 -> X == 5 */
5853 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5854
5855 else if (op_code == MAX_EXPR)
5856 /* MAX (X, 0) == -1 -> false */
5857 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5858
5859 else if (consts_equal)
5860 /* MIN (X, 0) == 0 -> X >= 0 */
5861 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5862
5863 else if (consts_lt)
5864 /* MIN (X, 0) == 5 -> false */
5865 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5866
5867 else
5868 /* MIN (X, 0) == -1 -> X == -1 */
5869 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5870
5871 case GT_EXPR:
5872 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5873 /* MAX (X, 0) > 0 -> X > 0
5874 MAX (X, 0) > 5 -> X > 5 */
5875 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5876
5877 else if (op_code == MAX_EXPR)
5878 /* MAX (X, 0) > -1 -> true */
5879 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5880
5881 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5882 /* MIN (X, 0) > 0 -> false
5883 MIN (X, 0) > 5 -> false */
5884 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5885
5886 else
5887 /* MIN (X, 0) > -1 -> X > -1 */
5888 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5889
5890 default:
5891 return NULL_TREE;
5892 }
5893 }
5894 \f
5895 /* T is an integer expression that is being multiplied, divided, or taken a
5896 modulus (CODE says which and what kind of divide or modulus) by a
5897 constant C. See if we can eliminate that operation by folding it with
5898 other operations already in T. WIDE_TYPE, if non-null, is a type that
5899 should be used for the computation if wider than our type.
5900
5901 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5902 (X * 2) + (Y * 4). We must, however, be assured that either the original
5903 expression would not overflow or that overflow is undefined for the type
5904 in the language in question.
5905
5906 If we return a non-null expression, it is an equivalent form of the
5907 original computation, but need not be in the original type.
5908
5909 We set *STRICT_OVERFLOW_P to true if the return values depends on
5910 signed overflow being undefined. Otherwise we do not change
5911 *STRICT_OVERFLOW_P. */
5912
5913 static tree
5914 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5915 bool *strict_overflow_p)
5916 {
5917 /* To avoid exponential search depth, refuse to allow recursion past
5918 three levels. Beyond that (1) it's highly unlikely that we'll find
5919 something interesting and (2) we've probably processed it before
5920 when we built the inner expression. */
5921
5922 static int depth;
5923 tree ret;
5924
5925 if (depth > 3)
5926 return NULL;
5927
5928 depth++;
5929 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5930 depth--;
5931
5932 return ret;
5933 }
5934
5935 static tree
5936 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5937 bool *strict_overflow_p)
5938 {
5939 tree type = TREE_TYPE (t);
5940 enum tree_code tcode = TREE_CODE (t);
5941 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5942 > GET_MODE_SIZE (TYPE_MODE (type)))
5943 ? wide_type : type);
5944 tree t1, t2;
5945 int same_p = tcode == code;
5946 tree op0 = NULL_TREE, op1 = NULL_TREE;
5947 bool sub_strict_overflow_p;
5948
5949 /* Don't deal with constants of zero here; they confuse the code below. */
5950 if (integer_zerop (c))
5951 return NULL_TREE;
5952
5953 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5954 op0 = TREE_OPERAND (t, 0);
5955
5956 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5957 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5958
5959 /* Note that we need not handle conditional operations here since fold
5960 already handles those cases. So just do arithmetic here. */
5961 switch (tcode)
5962 {
5963 case INTEGER_CST:
5964 /* For a constant, we can always simplify if we are a multiply
5965 or (for divide and modulus) if it is a multiple of our constant. */
5966 if (code == MULT_EXPR
5967 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5968 return const_binop (code, fold_convert (ctype, t),
5969 fold_convert (ctype, c));
5970 break;
5971
5972 CASE_CONVERT: case NON_LVALUE_EXPR:
5973 /* If op0 is an expression ... */
5974 if ((COMPARISON_CLASS_P (op0)
5975 || UNARY_CLASS_P (op0)
5976 || BINARY_CLASS_P (op0)
5977 || VL_EXP_CLASS_P (op0)
5978 || EXPRESSION_CLASS_P (op0))
5979 /* ... and has wrapping overflow, and its type is smaller
5980 than ctype, then we cannot pass through as widening. */
5981 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5982 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5983 && (TYPE_PRECISION (ctype)
5984 > TYPE_PRECISION (TREE_TYPE (op0))))
5985 /* ... or this is a truncation (t is narrower than op0),
5986 then we cannot pass through this narrowing. */
5987 || (TYPE_PRECISION (type)
5988 < TYPE_PRECISION (TREE_TYPE (op0)))
5989 /* ... or signedness changes for division or modulus,
5990 then we cannot pass through this conversion. */
5991 || (code != MULT_EXPR
5992 && (TYPE_UNSIGNED (ctype)
5993 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5994 /* ... or has undefined overflow while the converted to
5995 type has not, we cannot do the operation in the inner type
5996 as that would introduce undefined overflow. */
5997 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5998 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
5999 && !TYPE_OVERFLOW_UNDEFINED (type))))
6000 break;
6001
6002 /* Pass the constant down and see if we can make a simplification. If
6003 we can, replace this expression with the inner simplification for
6004 possible later conversion to our or some other type. */
6005 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6006 && TREE_CODE (t2) == INTEGER_CST
6007 && !TREE_OVERFLOW (t2)
6008 && (0 != (t1 = extract_muldiv (op0, t2, code,
6009 code == MULT_EXPR
6010 ? ctype : NULL_TREE,
6011 strict_overflow_p))))
6012 return t1;
6013 break;
6014
6015 case ABS_EXPR:
6016 /* If widening the type changes it from signed to unsigned, then we
6017 must avoid building ABS_EXPR itself as unsigned. */
6018 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6019 {
6020 tree cstype = (*signed_type_for) (ctype);
6021 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6022 != 0)
6023 {
6024 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6025 return fold_convert (ctype, t1);
6026 }
6027 break;
6028 }
6029 /* If the constant is negative, we cannot simplify this. */
6030 if (tree_int_cst_sgn (c) == -1)
6031 break;
6032 /* FALLTHROUGH */
6033 case NEGATE_EXPR:
6034 /* For division and modulus, type can't be unsigned, as e.g.
6035 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6036 For signed types, even with wrapping overflow, this is fine. */
6037 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6038 break;
6039 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6040 != 0)
6041 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6042 break;
6043
6044 case MIN_EXPR: case MAX_EXPR:
6045 /* If widening the type changes the signedness, then we can't perform
6046 this optimization as that changes the result. */
6047 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6048 break;
6049
6050 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6051 sub_strict_overflow_p = false;
6052 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6053 &sub_strict_overflow_p)) != 0
6054 && (t2 = extract_muldiv (op1, c, code, wide_type,
6055 &sub_strict_overflow_p)) != 0)
6056 {
6057 if (tree_int_cst_sgn (c) < 0)
6058 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6059 if (sub_strict_overflow_p)
6060 *strict_overflow_p = true;
6061 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6062 fold_convert (ctype, t2));
6063 }
6064 break;
6065
6066 case LSHIFT_EXPR: case RSHIFT_EXPR:
6067 /* If the second operand is constant, this is a multiplication
6068 or floor division, by a power of two, so we can treat it that
6069 way unless the multiplier or divisor overflows. Signed
6070 left-shift overflow is implementation-defined rather than
6071 undefined in C90, so do not convert signed left shift into
6072 multiplication. */
6073 if (TREE_CODE (op1) == INTEGER_CST
6074 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6075 /* const_binop may not detect overflow correctly,
6076 so check for it explicitly here. */
6077 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6078 && 0 != (t1 = fold_convert (ctype,
6079 const_binop (LSHIFT_EXPR,
6080 size_one_node,
6081 op1)))
6082 && !TREE_OVERFLOW (t1))
6083 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6084 ? MULT_EXPR : FLOOR_DIV_EXPR,
6085 ctype,
6086 fold_convert (ctype, op0),
6087 t1),
6088 c, code, wide_type, strict_overflow_p);
6089 break;
6090
6091 case PLUS_EXPR: case MINUS_EXPR:
6092 /* See if we can eliminate the operation on both sides. If we can, we
6093 can return a new PLUS or MINUS. If we can't, the only remaining
6094 cases where we can do anything are if the second operand is a
6095 constant. */
6096 sub_strict_overflow_p = false;
6097 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6098 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6099 if (t1 != 0 && t2 != 0
6100 && (code == MULT_EXPR
6101 /* If not multiplication, we can only do this if both operands
6102 are divisible by c. */
6103 || (multiple_of_p (ctype, op0, c)
6104 && multiple_of_p (ctype, op1, c))))
6105 {
6106 if (sub_strict_overflow_p)
6107 *strict_overflow_p = true;
6108 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6109 fold_convert (ctype, t2));
6110 }
6111
6112 /* If this was a subtraction, negate OP1 and set it to be an addition.
6113 This simplifies the logic below. */
6114 if (tcode == MINUS_EXPR)
6115 {
6116 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6117 /* If OP1 was not easily negatable, the constant may be OP0. */
6118 if (TREE_CODE (op0) == INTEGER_CST)
6119 {
6120 std::swap (op0, op1);
6121 std::swap (t1, t2);
6122 }
6123 }
6124
6125 if (TREE_CODE (op1) != INTEGER_CST)
6126 break;
6127
6128 /* If either OP1 or C are negative, this optimization is not safe for
6129 some of the division and remainder types while for others we need
6130 to change the code. */
6131 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6132 {
6133 if (code == CEIL_DIV_EXPR)
6134 code = FLOOR_DIV_EXPR;
6135 else if (code == FLOOR_DIV_EXPR)
6136 code = CEIL_DIV_EXPR;
6137 else if (code != MULT_EXPR
6138 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6139 break;
6140 }
6141
6142 /* If it's a multiply or a division/modulus operation of a multiple
6143 of our constant, do the operation and verify it doesn't overflow. */
6144 if (code == MULT_EXPR
6145 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6146 {
6147 op1 = const_binop (code, fold_convert (ctype, op1),
6148 fold_convert (ctype, c));
6149 /* We allow the constant to overflow with wrapping semantics. */
6150 if (op1 == 0
6151 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6152 break;
6153 }
6154 else
6155 break;
6156
6157 /* If we have an unsigned type, we cannot widen the operation since it
6158 will change the result if the original computation overflowed. */
6159 if (TYPE_UNSIGNED (ctype) && ctype != type)
6160 break;
6161
6162 /* If we were able to eliminate our operation from the first side,
6163 apply our operation to the second side and reform the PLUS. */
6164 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6165 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6166
6167 /* The last case is if we are a multiply. In that case, we can
6168 apply the distributive law to commute the multiply and addition
6169 if the multiplication of the constants doesn't overflow
6170 and overflow is defined. With undefined overflow
6171 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6172 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6173 return fold_build2 (tcode, ctype,
6174 fold_build2 (code, ctype,
6175 fold_convert (ctype, op0),
6176 fold_convert (ctype, c)),
6177 op1);
6178
6179 break;
6180
6181 case MULT_EXPR:
6182 /* We have a special case here if we are doing something like
6183 (C * 8) % 4 since we know that's zero. */
6184 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6185 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6186 /* If the multiplication can overflow we cannot optimize this. */
6187 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6188 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6189 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6190 {
6191 *strict_overflow_p = true;
6192 return omit_one_operand (type, integer_zero_node, op0);
6193 }
6194
6195 /* ... fall through ... */
6196
6197 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6198 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6199 /* If we can extract our operation from the LHS, do so and return a
6200 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6201 do something only if the second operand is a constant. */
6202 if (same_p
6203 && (t1 = extract_muldiv (op0, c, code, wide_type,
6204 strict_overflow_p)) != 0)
6205 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6206 fold_convert (ctype, op1));
6207 else if (tcode == MULT_EXPR && code == MULT_EXPR
6208 && (t1 = extract_muldiv (op1, c, code, wide_type,
6209 strict_overflow_p)) != 0)
6210 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6211 fold_convert (ctype, t1));
6212 else if (TREE_CODE (op1) != INTEGER_CST)
6213 return 0;
6214
6215 /* If these are the same operation types, we can associate them
6216 assuming no overflow. */
6217 if (tcode == code)
6218 {
6219 bool overflow_p = false;
6220 bool overflow_mul_p;
6221 signop sign = TYPE_SIGN (ctype);
6222 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6223 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6224 if (overflow_mul_p
6225 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6226 overflow_p = true;
6227 if (!overflow_p)
6228 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6229 wide_int_to_tree (ctype, mul));
6230 }
6231
6232 /* If these operations "cancel" each other, we have the main
6233 optimizations of this pass, which occur when either constant is a
6234 multiple of the other, in which case we replace this with either an
6235 operation or CODE or TCODE.
6236
6237 If we have an unsigned type, we cannot do this since it will change
6238 the result if the original computation overflowed. */
6239 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6240 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6241 || (tcode == MULT_EXPR
6242 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6243 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6244 && code != MULT_EXPR)))
6245 {
6246 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6247 {
6248 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6249 *strict_overflow_p = true;
6250 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6251 fold_convert (ctype,
6252 const_binop (TRUNC_DIV_EXPR,
6253 op1, c)));
6254 }
6255 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6256 {
6257 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6258 *strict_overflow_p = true;
6259 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6260 fold_convert (ctype,
6261 const_binop (TRUNC_DIV_EXPR,
6262 c, op1)));
6263 }
6264 }
6265 break;
6266
6267 default:
6268 break;
6269 }
6270
6271 return 0;
6272 }
6273 \f
6274 /* Return a node which has the indicated constant VALUE (either 0 or
6275 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6276 and is of the indicated TYPE. */
6277
6278 tree
6279 constant_boolean_node (bool value, tree type)
6280 {
6281 if (type == integer_type_node)
6282 return value ? integer_one_node : integer_zero_node;
6283 else if (type == boolean_type_node)
6284 return value ? boolean_true_node : boolean_false_node;
6285 else if (TREE_CODE (type) == VECTOR_TYPE)
6286 return build_vector_from_val (type,
6287 build_int_cst (TREE_TYPE (type),
6288 value ? -1 : 0));
6289 else
6290 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6291 }
6292
6293
6294 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6295 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6296 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6297 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6298 COND is the first argument to CODE; otherwise (as in the example
6299 given here), it is the second argument. TYPE is the type of the
6300 original expression. Return NULL_TREE if no simplification is
6301 possible. */
6302
6303 static tree
6304 fold_binary_op_with_conditional_arg (location_t loc,
6305 enum tree_code code,
6306 tree type, tree op0, tree op1,
6307 tree cond, tree arg, int cond_first_p)
6308 {
6309 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6310 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6311 tree test, true_value, false_value;
6312 tree lhs = NULL_TREE;
6313 tree rhs = NULL_TREE;
6314 enum tree_code cond_code = COND_EXPR;
6315
6316 if (TREE_CODE (cond) == COND_EXPR
6317 || TREE_CODE (cond) == VEC_COND_EXPR)
6318 {
6319 test = TREE_OPERAND (cond, 0);
6320 true_value = TREE_OPERAND (cond, 1);
6321 false_value = TREE_OPERAND (cond, 2);
6322 /* If this operand throws an expression, then it does not make
6323 sense to try to perform a logical or arithmetic operation
6324 involving it. */
6325 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6326 lhs = true_value;
6327 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6328 rhs = false_value;
6329 }
6330 else
6331 {
6332 tree testtype = TREE_TYPE (cond);
6333 test = cond;
6334 true_value = constant_boolean_node (true, testtype);
6335 false_value = constant_boolean_node (false, testtype);
6336 }
6337
6338 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6339 cond_code = VEC_COND_EXPR;
6340
6341 /* This transformation is only worthwhile if we don't have to wrap ARG
6342 in a SAVE_EXPR and the operation can be simplified without recursing
6343 on at least one of the branches once its pushed inside the COND_EXPR. */
6344 if (!TREE_CONSTANT (arg)
6345 && (TREE_SIDE_EFFECTS (arg)
6346 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6347 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6348 return NULL_TREE;
6349
6350 arg = fold_convert_loc (loc, arg_type, arg);
6351 if (lhs == 0)
6352 {
6353 true_value = fold_convert_loc (loc, cond_type, true_value);
6354 if (cond_first_p)
6355 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6356 else
6357 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6358 }
6359 if (rhs == 0)
6360 {
6361 false_value = fold_convert_loc (loc, cond_type, false_value);
6362 if (cond_first_p)
6363 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6364 else
6365 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6366 }
6367
6368 /* Check that we have simplified at least one of the branches. */
6369 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6370 return NULL_TREE;
6371
6372 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6373 }
6374
6375 \f
6376 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6377
6378 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6379 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6380 ADDEND is the same as X.
6381
6382 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6383 and finite. The problematic cases are when X is zero, and its mode
6384 has signed zeros. In the case of rounding towards -infinity,
6385 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6386 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6387
6388 bool
6389 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6390 {
6391 if (!real_zerop (addend))
6392 return false;
6393
6394 /* Don't allow the fold with -fsignaling-nans. */
6395 if (HONOR_SNANS (element_mode (type)))
6396 return false;
6397
6398 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6399 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6400 return true;
6401
6402 /* In a vector or complex, we would need to check the sign of all zeros. */
6403 if (TREE_CODE (addend) != REAL_CST)
6404 return false;
6405
6406 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6407 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6408 negate = !negate;
6409
6410 /* The mode has signed zeros, and we have to honor their sign.
6411 In this situation, there is only one case we can return true for.
6412 X - 0 is the same as X unless rounding towards -infinity is
6413 supported. */
6414 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6415 }
6416
6417 /* Subroutine of fold() that optimizes comparisons of a division by
6418 a nonzero integer constant against an integer constant, i.e.
6419 X/C1 op C2.
6420
6421 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6422 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6423 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6424
6425 The function returns the constant folded tree if a simplification
6426 can be made, and NULL_TREE otherwise. */
6427
6428 static tree
6429 fold_div_compare (location_t loc,
6430 enum tree_code code, tree type, tree arg0, tree arg1)
6431 {
6432 tree prod, tmp, hi, lo;
6433 tree arg00 = TREE_OPERAND (arg0, 0);
6434 tree arg01 = TREE_OPERAND (arg0, 1);
6435 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6436 bool neg_overflow = false;
6437 bool overflow;
6438
6439 /* We have to do this the hard way to detect unsigned overflow.
6440 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6441 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6442 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6443 neg_overflow = false;
6444
6445 if (sign == UNSIGNED)
6446 {
6447 tmp = int_const_binop (MINUS_EXPR, arg01,
6448 build_int_cst (TREE_TYPE (arg01), 1));
6449 lo = prod;
6450
6451 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6452 val = wi::add (prod, tmp, sign, &overflow);
6453 hi = force_fit_type (TREE_TYPE (arg00), val,
6454 -1, overflow | TREE_OVERFLOW (prod));
6455 }
6456 else if (tree_int_cst_sgn (arg01) >= 0)
6457 {
6458 tmp = int_const_binop (MINUS_EXPR, arg01,
6459 build_int_cst (TREE_TYPE (arg01), 1));
6460 switch (tree_int_cst_sgn (arg1))
6461 {
6462 case -1:
6463 neg_overflow = true;
6464 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6465 hi = prod;
6466 break;
6467
6468 case 0:
6469 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6470 hi = tmp;
6471 break;
6472
6473 case 1:
6474 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6475 lo = prod;
6476 break;
6477
6478 default:
6479 gcc_unreachable ();
6480 }
6481 }
6482 else
6483 {
6484 /* A negative divisor reverses the relational operators. */
6485 code = swap_tree_comparison (code);
6486
6487 tmp = int_const_binop (PLUS_EXPR, arg01,
6488 build_int_cst (TREE_TYPE (arg01), 1));
6489 switch (tree_int_cst_sgn (arg1))
6490 {
6491 case -1:
6492 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6493 lo = prod;
6494 break;
6495
6496 case 0:
6497 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6498 lo = tmp;
6499 break;
6500
6501 case 1:
6502 neg_overflow = true;
6503 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6504 hi = prod;
6505 break;
6506
6507 default:
6508 gcc_unreachable ();
6509 }
6510 }
6511
6512 switch (code)
6513 {
6514 case EQ_EXPR:
6515 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6516 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6517 if (TREE_OVERFLOW (hi))
6518 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6519 if (TREE_OVERFLOW (lo))
6520 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6521 return build_range_check (loc, type, arg00, 1, lo, hi);
6522
6523 case NE_EXPR:
6524 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6525 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6526 if (TREE_OVERFLOW (hi))
6527 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6528 if (TREE_OVERFLOW (lo))
6529 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6530 return build_range_check (loc, type, arg00, 0, lo, hi);
6531
6532 case LT_EXPR:
6533 if (TREE_OVERFLOW (lo))
6534 {
6535 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6536 return omit_one_operand_loc (loc, type, tmp, arg00);
6537 }
6538 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6539
6540 case LE_EXPR:
6541 if (TREE_OVERFLOW (hi))
6542 {
6543 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6544 return omit_one_operand_loc (loc, type, tmp, arg00);
6545 }
6546 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6547
6548 case GT_EXPR:
6549 if (TREE_OVERFLOW (hi))
6550 {
6551 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6552 return omit_one_operand_loc (loc, type, tmp, arg00);
6553 }
6554 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6555
6556 case GE_EXPR:
6557 if (TREE_OVERFLOW (lo))
6558 {
6559 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6560 return omit_one_operand_loc (loc, type, tmp, arg00);
6561 }
6562 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6563
6564 default:
6565 break;
6566 }
6567
6568 return NULL_TREE;
6569 }
6570
6571
6572 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6573 equality/inequality test, then return a simplified form of the test
6574 using a sign testing. Otherwise return NULL. TYPE is the desired
6575 result type. */
6576
6577 static tree
6578 fold_single_bit_test_into_sign_test (location_t loc,
6579 enum tree_code code, tree arg0, tree arg1,
6580 tree result_type)
6581 {
6582 /* If this is testing a single bit, we can optimize the test. */
6583 if ((code == NE_EXPR || code == EQ_EXPR)
6584 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6585 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6586 {
6587 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6588 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6589 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6590
6591 if (arg00 != NULL_TREE
6592 /* This is only a win if casting to a signed type is cheap,
6593 i.e. when arg00's type is not a partial mode. */
6594 && TYPE_PRECISION (TREE_TYPE (arg00))
6595 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6596 {
6597 tree stype = signed_type_for (TREE_TYPE (arg00));
6598 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6599 result_type,
6600 fold_convert_loc (loc, stype, arg00),
6601 build_int_cst (stype, 0));
6602 }
6603 }
6604
6605 return NULL_TREE;
6606 }
6607
6608 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6609 equality/inequality test, then return a simplified form of
6610 the test using shifts and logical operations. Otherwise return
6611 NULL. TYPE is the desired result type. */
6612
6613 tree
6614 fold_single_bit_test (location_t loc, enum tree_code code,
6615 tree arg0, tree arg1, tree result_type)
6616 {
6617 /* If this is testing a single bit, we can optimize the test. */
6618 if ((code == NE_EXPR || code == EQ_EXPR)
6619 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6620 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6621 {
6622 tree inner = TREE_OPERAND (arg0, 0);
6623 tree type = TREE_TYPE (arg0);
6624 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6625 machine_mode operand_mode = TYPE_MODE (type);
6626 int ops_unsigned;
6627 tree signed_type, unsigned_type, intermediate_type;
6628 tree tem, one;
6629
6630 /* First, see if we can fold the single bit test into a sign-bit
6631 test. */
6632 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6633 result_type);
6634 if (tem)
6635 return tem;
6636
6637 /* Otherwise we have (A & C) != 0 where C is a single bit,
6638 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6639 Similarly for (A & C) == 0. */
6640
6641 /* If INNER is a right shift of a constant and it plus BITNUM does
6642 not overflow, adjust BITNUM and INNER. */
6643 if (TREE_CODE (inner) == RSHIFT_EXPR
6644 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6645 && bitnum < TYPE_PRECISION (type)
6646 && wi::ltu_p (TREE_OPERAND (inner, 1),
6647 TYPE_PRECISION (type) - bitnum))
6648 {
6649 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6650 inner = TREE_OPERAND (inner, 0);
6651 }
6652
6653 /* If we are going to be able to omit the AND below, we must do our
6654 operations as unsigned. If we must use the AND, we have a choice.
6655 Normally unsigned is faster, but for some machines signed is. */
6656 #ifdef LOAD_EXTEND_OP
6657 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6658 && !flag_syntax_only) ? 0 : 1;
6659 #else
6660 ops_unsigned = 1;
6661 #endif
6662
6663 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6664 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6665 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6666 inner = fold_convert_loc (loc, intermediate_type, inner);
6667
6668 if (bitnum != 0)
6669 inner = build2 (RSHIFT_EXPR, intermediate_type,
6670 inner, size_int (bitnum));
6671
6672 one = build_int_cst (intermediate_type, 1);
6673
6674 if (code == EQ_EXPR)
6675 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6676
6677 /* Put the AND last so it can combine with more things. */
6678 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6679
6680 /* Make sure to return the proper type. */
6681 inner = fold_convert_loc (loc, result_type, inner);
6682
6683 return inner;
6684 }
6685 return NULL_TREE;
6686 }
6687
6688 /* Check whether we are allowed to reorder operands arg0 and arg1,
6689 such that the evaluation of arg1 occurs before arg0. */
6690
6691 static bool
6692 reorder_operands_p (const_tree arg0, const_tree arg1)
6693 {
6694 if (! flag_evaluation_order)
6695 return true;
6696 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6697 return true;
6698 return ! TREE_SIDE_EFFECTS (arg0)
6699 && ! TREE_SIDE_EFFECTS (arg1);
6700 }
6701
6702 /* Test whether it is preferable two swap two operands, ARG0 and
6703 ARG1, for example because ARG0 is an integer constant and ARG1
6704 isn't. If REORDER is true, only recommend swapping if we can
6705 evaluate the operands in reverse order. */
6706
6707 bool
6708 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6709 {
6710 if (CONSTANT_CLASS_P (arg1))
6711 return 0;
6712 if (CONSTANT_CLASS_P (arg0))
6713 return 1;
6714
6715 STRIP_NOPS (arg0);
6716 STRIP_NOPS (arg1);
6717
6718 if (TREE_CONSTANT (arg1))
6719 return 0;
6720 if (TREE_CONSTANT (arg0))
6721 return 1;
6722
6723 if (reorder && flag_evaluation_order
6724 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6725 return 0;
6726
6727 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6728 for commutative and comparison operators. Ensuring a canonical
6729 form allows the optimizers to find additional redundancies without
6730 having to explicitly check for both orderings. */
6731 if (TREE_CODE (arg0) == SSA_NAME
6732 && TREE_CODE (arg1) == SSA_NAME
6733 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6734 return 1;
6735
6736 /* Put SSA_NAMEs last. */
6737 if (TREE_CODE (arg1) == SSA_NAME)
6738 return 0;
6739 if (TREE_CODE (arg0) == SSA_NAME)
6740 return 1;
6741
6742 /* Put variables last. */
6743 if (DECL_P (arg1))
6744 return 0;
6745 if (DECL_P (arg0))
6746 return 1;
6747
6748 return 0;
6749 }
6750
6751 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6752 ARG0 is extended to a wider type. */
6753
6754 static tree
6755 fold_widened_comparison (location_t loc, enum tree_code code,
6756 tree type, tree arg0, tree arg1)
6757 {
6758 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6759 tree arg1_unw;
6760 tree shorter_type, outer_type;
6761 tree min, max;
6762 bool above, below;
6763
6764 if (arg0_unw == arg0)
6765 return NULL_TREE;
6766 shorter_type = TREE_TYPE (arg0_unw);
6767
6768 /* Disable this optimization if we're casting a function pointer
6769 type on targets that require function pointer canonicalization. */
6770 if (targetm.have_canonicalize_funcptr_for_compare ()
6771 && TREE_CODE (shorter_type) == POINTER_TYPE
6772 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6773 return NULL_TREE;
6774
6775 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6776 return NULL_TREE;
6777
6778 arg1_unw = get_unwidened (arg1, NULL_TREE);
6779
6780 /* If possible, express the comparison in the shorter mode. */
6781 if ((code == EQ_EXPR || code == NE_EXPR
6782 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6783 && (TREE_TYPE (arg1_unw) == shorter_type
6784 || ((TYPE_PRECISION (shorter_type)
6785 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6786 && (TYPE_UNSIGNED (shorter_type)
6787 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6788 || (TREE_CODE (arg1_unw) == INTEGER_CST
6789 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6790 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6791 && int_fits_type_p (arg1_unw, shorter_type))))
6792 return fold_build2_loc (loc, code, type, arg0_unw,
6793 fold_convert_loc (loc, shorter_type, arg1_unw));
6794
6795 if (TREE_CODE (arg1_unw) != INTEGER_CST
6796 || TREE_CODE (shorter_type) != INTEGER_TYPE
6797 || !int_fits_type_p (arg1_unw, shorter_type))
6798 return NULL_TREE;
6799
6800 /* If we are comparing with the integer that does not fit into the range
6801 of the shorter type, the result is known. */
6802 outer_type = TREE_TYPE (arg1_unw);
6803 min = lower_bound_in_type (outer_type, shorter_type);
6804 max = upper_bound_in_type (outer_type, shorter_type);
6805
6806 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6807 max, arg1_unw));
6808 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6809 arg1_unw, min));
6810
6811 switch (code)
6812 {
6813 case EQ_EXPR:
6814 if (above || below)
6815 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6816 break;
6817
6818 case NE_EXPR:
6819 if (above || below)
6820 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6821 break;
6822
6823 case LT_EXPR:
6824 case LE_EXPR:
6825 if (above)
6826 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6827 else if (below)
6828 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6829
6830 case GT_EXPR:
6831 case GE_EXPR:
6832 if (above)
6833 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6834 else if (below)
6835 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6836
6837 default:
6838 break;
6839 }
6840
6841 return NULL_TREE;
6842 }
6843
6844 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6845 ARG0 just the signedness is changed. */
6846
6847 static tree
6848 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6849 tree arg0, tree arg1)
6850 {
6851 tree arg0_inner;
6852 tree inner_type, outer_type;
6853
6854 if (!CONVERT_EXPR_P (arg0))
6855 return NULL_TREE;
6856
6857 outer_type = TREE_TYPE (arg0);
6858 arg0_inner = TREE_OPERAND (arg0, 0);
6859 inner_type = TREE_TYPE (arg0_inner);
6860
6861 /* Disable this optimization if we're casting a function pointer
6862 type on targets that require function pointer canonicalization. */
6863 if (targetm.have_canonicalize_funcptr_for_compare ()
6864 && TREE_CODE (inner_type) == POINTER_TYPE
6865 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6866 return NULL_TREE;
6867
6868 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6869 return NULL_TREE;
6870
6871 if (TREE_CODE (arg1) != INTEGER_CST
6872 && !(CONVERT_EXPR_P (arg1)
6873 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6874 return NULL_TREE;
6875
6876 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6877 && code != NE_EXPR
6878 && code != EQ_EXPR)
6879 return NULL_TREE;
6880
6881 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6882 return NULL_TREE;
6883
6884 if (TREE_CODE (arg1) == INTEGER_CST)
6885 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6886 TREE_OVERFLOW (arg1));
6887 else
6888 arg1 = fold_convert_loc (loc, inner_type, arg1);
6889
6890 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6891 }
6892
6893
6894 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6895 means A >= Y && A != MAX, but in this case we know that
6896 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6897
6898 static tree
6899 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6900 {
6901 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6902
6903 if (TREE_CODE (bound) == LT_EXPR)
6904 a = TREE_OPERAND (bound, 0);
6905 else if (TREE_CODE (bound) == GT_EXPR)
6906 a = TREE_OPERAND (bound, 1);
6907 else
6908 return NULL_TREE;
6909
6910 typea = TREE_TYPE (a);
6911 if (!INTEGRAL_TYPE_P (typea)
6912 && !POINTER_TYPE_P (typea))
6913 return NULL_TREE;
6914
6915 if (TREE_CODE (ineq) == LT_EXPR)
6916 {
6917 a1 = TREE_OPERAND (ineq, 1);
6918 y = TREE_OPERAND (ineq, 0);
6919 }
6920 else if (TREE_CODE (ineq) == GT_EXPR)
6921 {
6922 a1 = TREE_OPERAND (ineq, 0);
6923 y = TREE_OPERAND (ineq, 1);
6924 }
6925 else
6926 return NULL_TREE;
6927
6928 if (TREE_TYPE (a1) != typea)
6929 return NULL_TREE;
6930
6931 if (POINTER_TYPE_P (typea))
6932 {
6933 /* Convert the pointer types into integer before taking the difference. */
6934 tree ta = fold_convert_loc (loc, ssizetype, a);
6935 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6936 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6937 }
6938 else
6939 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6940
6941 if (!diff || !integer_onep (diff))
6942 return NULL_TREE;
6943
6944 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6945 }
6946
6947 /* Fold a sum or difference of at least one multiplication.
6948 Returns the folded tree or NULL if no simplification could be made. */
6949
6950 static tree
6951 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6952 tree arg0, tree arg1)
6953 {
6954 tree arg00, arg01, arg10, arg11;
6955 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6956
6957 /* (A * C) +- (B * C) -> (A+-B) * C.
6958 (A * C) +- A -> A * (C+-1).
6959 We are most concerned about the case where C is a constant,
6960 but other combinations show up during loop reduction. Since
6961 it is not difficult, try all four possibilities. */
6962
6963 if (TREE_CODE (arg0) == MULT_EXPR)
6964 {
6965 arg00 = TREE_OPERAND (arg0, 0);
6966 arg01 = TREE_OPERAND (arg0, 1);
6967 }
6968 else if (TREE_CODE (arg0) == INTEGER_CST)
6969 {
6970 arg00 = build_one_cst (type);
6971 arg01 = arg0;
6972 }
6973 else
6974 {
6975 /* We cannot generate constant 1 for fract. */
6976 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6977 return NULL_TREE;
6978 arg00 = arg0;
6979 arg01 = build_one_cst (type);
6980 }
6981 if (TREE_CODE (arg1) == MULT_EXPR)
6982 {
6983 arg10 = TREE_OPERAND (arg1, 0);
6984 arg11 = TREE_OPERAND (arg1, 1);
6985 }
6986 else if (TREE_CODE (arg1) == INTEGER_CST)
6987 {
6988 arg10 = build_one_cst (type);
6989 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6990 the purpose of this canonicalization. */
6991 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6992 && negate_expr_p (arg1)
6993 && code == PLUS_EXPR)
6994 {
6995 arg11 = negate_expr (arg1);
6996 code = MINUS_EXPR;
6997 }
6998 else
6999 arg11 = arg1;
7000 }
7001 else
7002 {
7003 /* We cannot generate constant 1 for fract. */
7004 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7005 return NULL_TREE;
7006 arg10 = arg1;
7007 arg11 = build_one_cst (type);
7008 }
7009 same = NULL_TREE;
7010
7011 if (operand_equal_p (arg01, arg11, 0))
7012 same = arg01, alt0 = arg00, alt1 = arg10;
7013 else if (operand_equal_p (arg00, arg10, 0))
7014 same = arg00, alt0 = arg01, alt1 = arg11;
7015 else if (operand_equal_p (arg00, arg11, 0))
7016 same = arg00, alt0 = arg01, alt1 = arg10;
7017 else if (operand_equal_p (arg01, arg10, 0))
7018 same = arg01, alt0 = arg00, alt1 = arg11;
7019
7020 /* No identical multiplicands; see if we can find a common
7021 power-of-two factor in non-power-of-two multiplies. This
7022 can help in multi-dimensional array access. */
7023 else if (tree_fits_shwi_p (arg01)
7024 && tree_fits_shwi_p (arg11))
7025 {
7026 HOST_WIDE_INT int01, int11, tmp;
7027 bool swap = false;
7028 tree maybe_same;
7029 int01 = tree_to_shwi (arg01);
7030 int11 = tree_to_shwi (arg11);
7031
7032 /* Move min of absolute values to int11. */
7033 if (absu_hwi (int01) < absu_hwi (int11))
7034 {
7035 tmp = int01, int01 = int11, int11 = tmp;
7036 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7037 maybe_same = arg01;
7038 swap = true;
7039 }
7040 else
7041 maybe_same = arg11;
7042
7043 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7044 /* The remainder should not be a constant, otherwise we
7045 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7046 increased the number of multiplications necessary. */
7047 && TREE_CODE (arg10) != INTEGER_CST)
7048 {
7049 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7050 build_int_cst (TREE_TYPE (arg00),
7051 int01 / int11));
7052 alt1 = arg10;
7053 same = maybe_same;
7054 if (swap)
7055 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7056 }
7057 }
7058
7059 if (same)
7060 return fold_build2_loc (loc, MULT_EXPR, type,
7061 fold_build2_loc (loc, code, type,
7062 fold_convert_loc (loc, type, alt0),
7063 fold_convert_loc (loc, type, alt1)),
7064 fold_convert_loc (loc, type, same));
7065
7066 return NULL_TREE;
7067 }
7068
7069 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7070 specified by EXPR into the buffer PTR of length LEN bytes.
7071 Return the number of bytes placed in the buffer, or zero
7072 upon failure. */
7073
7074 static int
7075 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7076 {
7077 tree type = TREE_TYPE (expr);
7078 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7079 int byte, offset, word, words;
7080 unsigned char value;
7081
7082 if ((off == -1 && total_bytes > len)
7083 || off >= total_bytes)
7084 return 0;
7085 if (off == -1)
7086 off = 0;
7087 words = total_bytes / UNITS_PER_WORD;
7088
7089 for (byte = 0; byte < total_bytes; byte++)
7090 {
7091 int bitpos = byte * BITS_PER_UNIT;
7092 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7093 number of bytes. */
7094 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7095
7096 if (total_bytes > UNITS_PER_WORD)
7097 {
7098 word = byte / UNITS_PER_WORD;
7099 if (WORDS_BIG_ENDIAN)
7100 word = (words - 1) - word;
7101 offset = word * UNITS_PER_WORD;
7102 if (BYTES_BIG_ENDIAN)
7103 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7104 else
7105 offset += byte % UNITS_PER_WORD;
7106 }
7107 else
7108 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7109 if (offset >= off
7110 && offset - off < len)
7111 ptr[offset - off] = value;
7112 }
7113 return MIN (len, total_bytes - off);
7114 }
7115
7116
7117 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7118 specified by EXPR into the buffer PTR of length LEN bytes.
7119 Return the number of bytes placed in the buffer, or zero
7120 upon failure. */
7121
7122 static int
7123 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7124 {
7125 tree type = TREE_TYPE (expr);
7126 machine_mode mode = TYPE_MODE (type);
7127 int total_bytes = GET_MODE_SIZE (mode);
7128 FIXED_VALUE_TYPE value;
7129 tree i_value, i_type;
7130
7131 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7132 return 0;
7133
7134 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7135
7136 if (NULL_TREE == i_type
7137 || TYPE_PRECISION (i_type) != total_bytes)
7138 return 0;
7139
7140 value = TREE_FIXED_CST (expr);
7141 i_value = double_int_to_tree (i_type, value.data);
7142
7143 return native_encode_int (i_value, ptr, len, off);
7144 }
7145
7146
7147 /* Subroutine of native_encode_expr. Encode the REAL_CST
7148 specified by EXPR into the buffer PTR of length LEN bytes.
7149 Return the number of bytes placed in the buffer, or zero
7150 upon failure. */
7151
7152 static int
7153 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7154 {
7155 tree type = TREE_TYPE (expr);
7156 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7157 int byte, offset, word, words, bitpos;
7158 unsigned char value;
7159
7160 /* There are always 32 bits in each long, no matter the size of
7161 the hosts long. We handle floating point representations with
7162 up to 192 bits. */
7163 long tmp[6];
7164
7165 if ((off == -1 && total_bytes > len)
7166 || off >= total_bytes)
7167 return 0;
7168 if (off == -1)
7169 off = 0;
7170 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7171
7172 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7173
7174 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7175 bitpos += BITS_PER_UNIT)
7176 {
7177 byte = (bitpos / BITS_PER_UNIT) & 3;
7178 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7179
7180 if (UNITS_PER_WORD < 4)
7181 {
7182 word = byte / UNITS_PER_WORD;
7183 if (WORDS_BIG_ENDIAN)
7184 word = (words - 1) - word;
7185 offset = word * UNITS_PER_WORD;
7186 if (BYTES_BIG_ENDIAN)
7187 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7188 else
7189 offset += byte % UNITS_PER_WORD;
7190 }
7191 else
7192 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7193 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7194 if (offset >= off
7195 && offset - off < len)
7196 ptr[offset - off] = value;
7197 }
7198 return MIN (len, total_bytes - off);
7199 }
7200
7201 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7202 specified by EXPR into the buffer PTR of length LEN bytes.
7203 Return the number of bytes placed in the buffer, or zero
7204 upon failure. */
7205
7206 static int
7207 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7208 {
7209 int rsize, isize;
7210 tree part;
7211
7212 part = TREE_REALPART (expr);
7213 rsize = native_encode_expr (part, ptr, len, off);
7214 if (off == -1
7215 && rsize == 0)
7216 return 0;
7217 part = TREE_IMAGPART (expr);
7218 if (off != -1)
7219 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7220 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7221 if (off == -1
7222 && isize != rsize)
7223 return 0;
7224 return rsize + isize;
7225 }
7226
7227
7228 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7229 specified by EXPR into the buffer PTR of length LEN bytes.
7230 Return the number of bytes placed in the buffer, or zero
7231 upon failure. */
7232
7233 static int
7234 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7235 {
7236 unsigned i, count;
7237 int size, offset;
7238 tree itype, elem;
7239
7240 offset = 0;
7241 count = VECTOR_CST_NELTS (expr);
7242 itype = TREE_TYPE (TREE_TYPE (expr));
7243 size = GET_MODE_SIZE (TYPE_MODE (itype));
7244 for (i = 0; i < count; i++)
7245 {
7246 if (off >= size)
7247 {
7248 off -= size;
7249 continue;
7250 }
7251 elem = VECTOR_CST_ELT (expr, i);
7252 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7253 if ((off == -1 && res != size)
7254 || res == 0)
7255 return 0;
7256 offset += res;
7257 if (offset >= len)
7258 return offset;
7259 if (off != -1)
7260 off = 0;
7261 }
7262 return offset;
7263 }
7264
7265
7266 /* Subroutine of native_encode_expr. Encode the STRING_CST
7267 specified by EXPR into the buffer PTR of length LEN bytes.
7268 Return the number of bytes placed in the buffer, or zero
7269 upon failure. */
7270
7271 static int
7272 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7273 {
7274 tree type = TREE_TYPE (expr);
7275 HOST_WIDE_INT total_bytes;
7276
7277 if (TREE_CODE (type) != ARRAY_TYPE
7278 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7279 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7280 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7281 return 0;
7282 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7283 if ((off == -1 && total_bytes > len)
7284 || off >= total_bytes)
7285 return 0;
7286 if (off == -1)
7287 off = 0;
7288 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7289 {
7290 int written = 0;
7291 if (off < TREE_STRING_LENGTH (expr))
7292 {
7293 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7294 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7295 }
7296 memset (ptr + written, 0,
7297 MIN (total_bytes - written, len - written));
7298 }
7299 else
7300 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7301 return MIN (total_bytes - off, len);
7302 }
7303
7304
7305 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7306 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7307 buffer PTR of length LEN bytes. If OFF is not -1 then start
7308 the encoding at byte offset OFF and encode at most LEN bytes.
7309 Return the number of bytes placed in the buffer, or zero upon failure. */
7310
7311 int
7312 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7313 {
7314 switch (TREE_CODE (expr))
7315 {
7316 case INTEGER_CST:
7317 return native_encode_int (expr, ptr, len, off);
7318
7319 case REAL_CST:
7320 return native_encode_real (expr, ptr, len, off);
7321
7322 case FIXED_CST:
7323 return native_encode_fixed (expr, ptr, len, off);
7324
7325 case COMPLEX_CST:
7326 return native_encode_complex (expr, ptr, len, off);
7327
7328 case VECTOR_CST:
7329 return native_encode_vector (expr, ptr, len, off);
7330
7331 case STRING_CST:
7332 return native_encode_string (expr, ptr, len, off);
7333
7334 default:
7335 return 0;
7336 }
7337 }
7338
7339
7340 /* Subroutine of native_interpret_expr. Interpret the contents of
7341 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7342 If the buffer cannot be interpreted, return NULL_TREE. */
7343
7344 static tree
7345 native_interpret_int (tree type, const unsigned char *ptr, int len)
7346 {
7347 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7348
7349 if (total_bytes > len
7350 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7351 return NULL_TREE;
7352
7353 wide_int result = wi::from_buffer (ptr, total_bytes);
7354
7355 return wide_int_to_tree (type, result);
7356 }
7357
7358
7359 /* Subroutine of native_interpret_expr. Interpret the contents of
7360 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7361 If the buffer cannot be interpreted, return NULL_TREE. */
7362
7363 static tree
7364 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7365 {
7366 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7367 double_int result;
7368 FIXED_VALUE_TYPE fixed_value;
7369
7370 if (total_bytes > len
7371 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7372 return NULL_TREE;
7373
7374 result = double_int::from_buffer (ptr, total_bytes);
7375 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7376
7377 return build_fixed (type, fixed_value);
7378 }
7379
7380
7381 /* Subroutine of native_interpret_expr. Interpret the contents of
7382 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7383 If the buffer cannot be interpreted, return NULL_TREE. */
7384
7385 static tree
7386 native_interpret_real (tree type, const unsigned char *ptr, int len)
7387 {
7388 machine_mode mode = TYPE_MODE (type);
7389 int total_bytes = GET_MODE_SIZE (mode);
7390 int byte, offset, word, words, bitpos;
7391 unsigned char value;
7392 /* There are always 32 bits in each long, no matter the size of
7393 the hosts long. We handle floating point representations with
7394 up to 192 bits. */
7395 REAL_VALUE_TYPE r;
7396 long tmp[6];
7397
7398 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7399 if (total_bytes > len || total_bytes > 24)
7400 return NULL_TREE;
7401 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7402
7403 memset (tmp, 0, sizeof (tmp));
7404 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7405 bitpos += BITS_PER_UNIT)
7406 {
7407 byte = (bitpos / BITS_PER_UNIT) & 3;
7408 if (UNITS_PER_WORD < 4)
7409 {
7410 word = byte / UNITS_PER_WORD;
7411 if (WORDS_BIG_ENDIAN)
7412 word = (words - 1) - word;
7413 offset = word * UNITS_PER_WORD;
7414 if (BYTES_BIG_ENDIAN)
7415 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7416 else
7417 offset += byte % UNITS_PER_WORD;
7418 }
7419 else
7420 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7421 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7422
7423 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7424 }
7425
7426 real_from_target (&r, tmp, mode);
7427 return build_real (type, r);
7428 }
7429
7430
7431 /* Subroutine of native_interpret_expr. Interpret the contents of
7432 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7433 If the buffer cannot be interpreted, return NULL_TREE. */
7434
7435 static tree
7436 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7437 {
7438 tree etype, rpart, ipart;
7439 int size;
7440
7441 etype = TREE_TYPE (type);
7442 size = GET_MODE_SIZE (TYPE_MODE (etype));
7443 if (size * 2 > len)
7444 return NULL_TREE;
7445 rpart = native_interpret_expr (etype, ptr, size);
7446 if (!rpart)
7447 return NULL_TREE;
7448 ipart = native_interpret_expr (etype, ptr+size, size);
7449 if (!ipart)
7450 return NULL_TREE;
7451 return build_complex (type, rpart, ipart);
7452 }
7453
7454
7455 /* Subroutine of native_interpret_expr. Interpret the contents of
7456 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7457 If the buffer cannot be interpreted, return NULL_TREE. */
7458
7459 static tree
7460 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7461 {
7462 tree etype, elem;
7463 int i, size, count;
7464 tree *elements;
7465
7466 etype = TREE_TYPE (type);
7467 size = GET_MODE_SIZE (TYPE_MODE (etype));
7468 count = TYPE_VECTOR_SUBPARTS (type);
7469 if (size * count > len)
7470 return NULL_TREE;
7471
7472 elements = XALLOCAVEC (tree, count);
7473 for (i = count - 1; i >= 0; i--)
7474 {
7475 elem = native_interpret_expr (etype, ptr+(i*size), size);
7476 if (!elem)
7477 return NULL_TREE;
7478 elements[i] = elem;
7479 }
7480 return build_vector (type, elements);
7481 }
7482
7483
7484 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7485 the buffer PTR of length LEN as a constant of type TYPE. For
7486 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7487 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7488 return NULL_TREE. */
7489
7490 tree
7491 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7492 {
7493 switch (TREE_CODE (type))
7494 {
7495 case INTEGER_TYPE:
7496 case ENUMERAL_TYPE:
7497 case BOOLEAN_TYPE:
7498 case POINTER_TYPE:
7499 case REFERENCE_TYPE:
7500 return native_interpret_int (type, ptr, len);
7501
7502 case REAL_TYPE:
7503 return native_interpret_real (type, ptr, len);
7504
7505 case FIXED_POINT_TYPE:
7506 return native_interpret_fixed (type, ptr, len);
7507
7508 case COMPLEX_TYPE:
7509 return native_interpret_complex (type, ptr, len);
7510
7511 case VECTOR_TYPE:
7512 return native_interpret_vector (type, ptr, len);
7513
7514 default:
7515 return NULL_TREE;
7516 }
7517 }
7518
7519 /* Returns true if we can interpret the contents of a native encoding
7520 as TYPE. */
7521
7522 static bool
7523 can_native_interpret_type_p (tree type)
7524 {
7525 switch (TREE_CODE (type))
7526 {
7527 case INTEGER_TYPE:
7528 case ENUMERAL_TYPE:
7529 case BOOLEAN_TYPE:
7530 case POINTER_TYPE:
7531 case REFERENCE_TYPE:
7532 case FIXED_POINT_TYPE:
7533 case REAL_TYPE:
7534 case COMPLEX_TYPE:
7535 case VECTOR_TYPE:
7536 return true;
7537 default:
7538 return false;
7539 }
7540 }
7541
7542 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7543 TYPE at compile-time. If we're unable to perform the conversion
7544 return NULL_TREE. */
7545
7546 static tree
7547 fold_view_convert_expr (tree type, tree expr)
7548 {
7549 /* We support up to 512-bit values (for V8DFmode). */
7550 unsigned char buffer[64];
7551 int len;
7552
7553 /* Check that the host and target are sane. */
7554 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7555 return NULL_TREE;
7556
7557 len = native_encode_expr (expr, buffer, sizeof (buffer));
7558 if (len == 0)
7559 return NULL_TREE;
7560
7561 return native_interpret_expr (type, buffer, len);
7562 }
7563
7564 /* Build an expression for the address of T. Folds away INDIRECT_REF
7565 to avoid confusing the gimplify process. */
7566
7567 tree
7568 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7569 {
7570 /* The size of the object is not relevant when talking about its address. */
7571 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7572 t = TREE_OPERAND (t, 0);
7573
7574 if (TREE_CODE (t) == INDIRECT_REF)
7575 {
7576 t = TREE_OPERAND (t, 0);
7577
7578 if (TREE_TYPE (t) != ptrtype)
7579 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7580 }
7581 else if (TREE_CODE (t) == MEM_REF
7582 && integer_zerop (TREE_OPERAND (t, 1)))
7583 return TREE_OPERAND (t, 0);
7584 else if (TREE_CODE (t) == MEM_REF
7585 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7586 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7587 TREE_OPERAND (t, 0),
7588 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7589 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7590 {
7591 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7592
7593 if (TREE_TYPE (t) != ptrtype)
7594 t = fold_convert_loc (loc, ptrtype, t);
7595 }
7596 else
7597 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7598
7599 return t;
7600 }
7601
7602 /* Build an expression for the address of T. */
7603
7604 tree
7605 build_fold_addr_expr_loc (location_t loc, tree t)
7606 {
7607 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7608
7609 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7610 }
7611
7612 /* Fold a unary expression of code CODE and type TYPE with operand
7613 OP0. Return the folded expression if folding is successful.
7614 Otherwise, return NULL_TREE. */
7615
7616 tree
7617 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7618 {
7619 tree tem;
7620 tree arg0;
7621 enum tree_code_class kind = TREE_CODE_CLASS (code);
7622
7623 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7624 && TREE_CODE_LENGTH (code) == 1);
7625
7626 arg0 = op0;
7627 if (arg0)
7628 {
7629 if (CONVERT_EXPR_CODE_P (code)
7630 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7631 {
7632 /* Don't use STRIP_NOPS, because signedness of argument type
7633 matters. */
7634 STRIP_SIGN_NOPS (arg0);
7635 }
7636 else
7637 {
7638 /* Strip any conversions that don't change the mode. This
7639 is safe for every expression, except for a comparison
7640 expression because its signedness is derived from its
7641 operands.
7642
7643 Note that this is done as an internal manipulation within
7644 the constant folder, in order to find the simplest
7645 representation of the arguments so that their form can be
7646 studied. In any cases, the appropriate type conversions
7647 should be put back in the tree that will get out of the
7648 constant folder. */
7649 STRIP_NOPS (arg0);
7650 }
7651
7652 if (CONSTANT_CLASS_P (arg0))
7653 {
7654 tree tem = const_unop (code, type, arg0);
7655 if (tem)
7656 {
7657 if (TREE_TYPE (tem) != type)
7658 tem = fold_convert_loc (loc, type, tem);
7659 return tem;
7660 }
7661 }
7662 }
7663
7664 tem = generic_simplify (loc, code, type, op0);
7665 if (tem)
7666 return tem;
7667
7668 if (TREE_CODE_CLASS (code) == tcc_unary)
7669 {
7670 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7671 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7672 fold_build1_loc (loc, code, type,
7673 fold_convert_loc (loc, TREE_TYPE (op0),
7674 TREE_OPERAND (arg0, 1))));
7675 else if (TREE_CODE (arg0) == COND_EXPR)
7676 {
7677 tree arg01 = TREE_OPERAND (arg0, 1);
7678 tree arg02 = TREE_OPERAND (arg0, 2);
7679 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7680 arg01 = fold_build1_loc (loc, code, type,
7681 fold_convert_loc (loc,
7682 TREE_TYPE (op0), arg01));
7683 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7684 arg02 = fold_build1_loc (loc, code, type,
7685 fold_convert_loc (loc,
7686 TREE_TYPE (op0), arg02));
7687 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7688 arg01, arg02);
7689
7690 /* If this was a conversion, and all we did was to move into
7691 inside the COND_EXPR, bring it back out. But leave it if
7692 it is a conversion from integer to integer and the
7693 result precision is no wider than a word since such a
7694 conversion is cheap and may be optimized away by combine,
7695 while it couldn't if it were outside the COND_EXPR. Then return
7696 so we don't get into an infinite recursion loop taking the
7697 conversion out and then back in. */
7698
7699 if ((CONVERT_EXPR_CODE_P (code)
7700 || code == NON_LVALUE_EXPR)
7701 && TREE_CODE (tem) == COND_EXPR
7702 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7703 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7704 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7705 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7706 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7707 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7708 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7709 && (INTEGRAL_TYPE_P
7710 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7711 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7712 || flag_syntax_only))
7713 tem = build1_loc (loc, code, type,
7714 build3 (COND_EXPR,
7715 TREE_TYPE (TREE_OPERAND
7716 (TREE_OPERAND (tem, 1), 0)),
7717 TREE_OPERAND (tem, 0),
7718 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7719 TREE_OPERAND (TREE_OPERAND (tem, 2),
7720 0)));
7721 return tem;
7722 }
7723 }
7724
7725 switch (code)
7726 {
7727 case NON_LVALUE_EXPR:
7728 if (!maybe_lvalue_p (op0))
7729 return fold_convert_loc (loc, type, op0);
7730 return NULL_TREE;
7731
7732 CASE_CONVERT:
7733 case FLOAT_EXPR:
7734 case FIX_TRUNC_EXPR:
7735 if (COMPARISON_CLASS_P (op0))
7736 {
7737 /* If we have (type) (a CMP b) and type is an integral type, return
7738 new expression involving the new type. Canonicalize
7739 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7740 non-integral type.
7741 Do not fold the result as that would not simplify further, also
7742 folding again results in recursions. */
7743 if (TREE_CODE (type) == BOOLEAN_TYPE)
7744 return build2_loc (loc, TREE_CODE (op0), type,
7745 TREE_OPERAND (op0, 0),
7746 TREE_OPERAND (op0, 1));
7747 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7748 && TREE_CODE (type) != VECTOR_TYPE)
7749 return build3_loc (loc, COND_EXPR, type, op0,
7750 constant_boolean_node (true, type),
7751 constant_boolean_node (false, type));
7752 }
7753
7754 /* Handle (T *)&A.B.C for A being of type T and B and C
7755 living at offset zero. This occurs frequently in
7756 C++ upcasting and then accessing the base. */
7757 if (TREE_CODE (op0) == ADDR_EXPR
7758 && POINTER_TYPE_P (type)
7759 && handled_component_p (TREE_OPERAND (op0, 0)))
7760 {
7761 HOST_WIDE_INT bitsize, bitpos;
7762 tree offset;
7763 machine_mode mode;
7764 int unsignedp, volatilep;
7765 tree base = TREE_OPERAND (op0, 0);
7766 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7767 &mode, &unsignedp, &volatilep, false);
7768 /* If the reference was to a (constant) zero offset, we can use
7769 the address of the base if it has the same base type
7770 as the result type and the pointer type is unqualified. */
7771 if (! offset && bitpos == 0
7772 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7773 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7774 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7775 return fold_convert_loc (loc, type,
7776 build_fold_addr_expr_loc (loc, base));
7777 }
7778
7779 if (TREE_CODE (op0) == MODIFY_EXPR
7780 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7781 /* Detect assigning a bitfield. */
7782 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7783 && DECL_BIT_FIELD
7784 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7785 {
7786 /* Don't leave an assignment inside a conversion
7787 unless assigning a bitfield. */
7788 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7789 /* First do the assignment, then return converted constant. */
7790 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7791 TREE_NO_WARNING (tem) = 1;
7792 TREE_USED (tem) = 1;
7793 return tem;
7794 }
7795
7796 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7797 constants (if x has signed type, the sign bit cannot be set
7798 in c). This folds extension into the BIT_AND_EXPR.
7799 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7800 very likely don't have maximal range for their precision and this
7801 transformation effectively doesn't preserve non-maximal ranges. */
7802 if (TREE_CODE (type) == INTEGER_TYPE
7803 && TREE_CODE (op0) == BIT_AND_EXPR
7804 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7805 {
7806 tree and_expr = op0;
7807 tree and0 = TREE_OPERAND (and_expr, 0);
7808 tree and1 = TREE_OPERAND (and_expr, 1);
7809 int change = 0;
7810
7811 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7812 || (TYPE_PRECISION (type)
7813 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7814 change = 1;
7815 else if (TYPE_PRECISION (TREE_TYPE (and1))
7816 <= HOST_BITS_PER_WIDE_INT
7817 && tree_fits_uhwi_p (and1))
7818 {
7819 unsigned HOST_WIDE_INT cst;
7820
7821 cst = tree_to_uhwi (and1);
7822 cst &= HOST_WIDE_INT_M1U
7823 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7824 change = (cst == 0);
7825 #ifdef LOAD_EXTEND_OP
7826 if (change
7827 && !flag_syntax_only
7828 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7829 == ZERO_EXTEND))
7830 {
7831 tree uns = unsigned_type_for (TREE_TYPE (and0));
7832 and0 = fold_convert_loc (loc, uns, and0);
7833 and1 = fold_convert_loc (loc, uns, and1);
7834 }
7835 #endif
7836 }
7837 if (change)
7838 {
7839 tem = force_fit_type (type, wi::to_widest (and1), 0,
7840 TREE_OVERFLOW (and1));
7841 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7842 fold_convert_loc (loc, type, and0), tem);
7843 }
7844 }
7845
7846 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7847 when one of the new casts will fold away. Conservatively we assume
7848 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7849 if (POINTER_TYPE_P (type)
7850 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7851 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7852 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7853 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7854 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7855 {
7856 tree arg00 = TREE_OPERAND (arg0, 0);
7857 tree arg01 = TREE_OPERAND (arg0, 1);
7858
7859 return fold_build_pointer_plus_loc
7860 (loc, fold_convert_loc (loc, type, arg00), arg01);
7861 }
7862
7863 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7864 of the same precision, and X is an integer type not narrower than
7865 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7866 if (INTEGRAL_TYPE_P (type)
7867 && TREE_CODE (op0) == BIT_NOT_EXPR
7868 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7869 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7870 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7871 {
7872 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7873 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7874 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7875 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7876 fold_convert_loc (loc, type, tem));
7877 }
7878
7879 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7880 type of X and Y (integer types only). */
7881 if (INTEGRAL_TYPE_P (type)
7882 && TREE_CODE (op0) == MULT_EXPR
7883 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7884 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7885 {
7886 /* Be careful not to introduce new overflows. */
7887 tree mult_type;
7888 if (TYPE_OVERFLOW_WRAPS (type))
7889 mult_type = type;
7890 else
7891 mult_type = unsigned_type_for (type);
7892
7893 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7894 {
7895 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7896 fold_convert_loc (loc, mult_type,
7897 TREE_OPERAND (op0, 0)),
7898 fold_convert_loc (loc, mult_type,
7899 TREE_OPERAND (op0, 1)));
7900 return fold_convert_loc (loc, type, tem);
7901 }
7902 }
7903
7904 return NULL_TREE;
7905
7906 case VIEW_CONVERT_EXPR:
7907 if (TREE_CODE (op0) == MEM_REF)
7908 return fold_build2_loc (loc, MEM_REF, type,
7909 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7910
7911 return NULL_TREE;
7912
7913 case NEGATE_EXPR:
7914 tem = fold_negate_expr (loc, arg0);
7915 if (tem)
7916 return fold_convert_loc (loc, type, tem);
7917 return NULL_TREE;
7918
7919 case ABS_EXPR:
7920 /* Convert fabs((double)float) into (double)fabsf(float). */
7921 if (TREE_CODE (arg0) == NOP_EXPR
7922 && TREE_CODE (type) == REAL_TYPE)
7923 {
7924 tree targ0 = strip_float_extensions (arg0);
7925 if (targ0 != arg0)
7926 return fold_convert_loc (loc, type,
7927 fold_build1_loc (loc, ABS_EXPR,
7928 TREE_TYPE (targ0),
7929 targ0));
7930 }
7931
7932 /* Strip sign ops from argument. */
7933 if (TREE_CODE (type) == REAL_TYPE)
7934 {
7935 tem = fold_strip_sign_ops (arg0);
7936 if (tem)
7937 return fold_build1_loc (loc, ABS_EXPR, type,
7938 fold_convert_loc (loc, type, tem));
7939 }
7940 return NULL_TREE;
7941
7942 case CONJ_EXPR:
7943 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7944 return fold_convert_loc (loc, type, arg0);
7945 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7946 {
7947 tree itype = TREE_TYPE (type);
7948 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7949 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7950 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7951 negate_expr (ipart));
7952 }
7953 if (TREE_CODE (arg0) == CONJ_EXPR)
7954 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7955 return NULL_TREE;
7956
7957 case BIT_NOT_EXPR:
7958 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7959 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7960 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7961 fold_convert_loc (loc, type,
7962 TREE_OPERAND (arg0, 0)))))
7963 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7964 fold_convert_loc (loc, type,
7965 TREE_OPERAND (arg0, 1)));
7966 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7967 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7968 fold_convert_loc (loc, type,
7969 TREE_OPERAND (arg0, 1)))))
7970 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7971 fold_convert_loc (loc, type,
7972 TREE_OPERAND (arg0, 0)), tem);
7973
7974 return NULL_TREE;
7975
7976 case TRUTH_NOT_EXPR:
7977 /* Note that the operand of this must be an int
7978 and its values must be 0 or 1.
7979 ("true" is a fixed value perhaps depending on the language,
7980 but we don't handle values other than 1 correctly yet.) */
7981 tem = fold_truth_not_expr (loc, arg0);
7982 if (!tem)
7983 return NULL_TREE;
7984 return fold_convert_loc (loc, type, tem);
7985
7986 case REALPART_EXPR:
7987 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7988 return fold_convert_loc (loc, type, arg0);
7989 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7990 {
7991 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7992 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7993 fold_build1_loc (loc, REALPART_EXPR, itype,
7994 TREE_OPERAND (arg0, 0)),
7995 fold_build1_loc (loc, REALPART_EXPR, itype,
7996 TREE_OPERAND (arg0, 1)));
7997 return fold_convert_loc (loc, type, tem);
7998 }
7999 if (TREE_CODE (arg0) == CONJ_EXPR)
8000 {
8001 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8002 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8003 TREE_OPERAND (arg0, 0));
8004 return fold_convert_loc (loc, type, tem);
8005 }
8006 if (TREE_CODE (arg0) == CALL_EXPR)
8007 {
8008 tree fn = get_callee_fndecl (arg0);
8009 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8010 switch (DECL_FUNCTION_CODE (fn))
8011 {
8012 CASE_FLT_FN (BUILT_IN_CEXPI):
8013 fn = mathfn_built_in (type, BUILT_IN_COS);
8014 if (fn)
8015 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8016 break;
8017
8018 default:
8019 break;
8020 }
8021 }
8022 return NULL_TREE;
8023
8024 case IMAGPART_EXPR:
8025 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8026 return build_zero_cst (type);
8027 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8028 {
8029 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8030 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8031 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8032 TREE_OPERAND (arg0, 0)),
8033 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8034 TREE_OPERAND (arg0, 1)));
8035 return fold_convert_loc (loc, type, tem);
8036 }
8037 if (TREE_CODE (arg0) == CONJ_EXPR)
8038 {
8039 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8040 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8041 return fold_convert_loc (loc, type, negate_expr (tem));
8042 }
8043 if (TREE_CODE (arg0) == CALL_EXPR)
8044 {
8045 tree fn = get_callee_fndecl (arg0);
8046 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8047 switch (DECL_FUNCTION_CODE (fn))
8048 {
8049 CASE_FLT_FN (BUILT_IN_CEXPI):
8050 fn = mathfn_built_in (type, BUILT_IN_SIN);
8051 if (fn)
8052 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8053 break;
8054
8055 default:
8056 break;
8057 }
8058 }
8059 return NULL_TREE;
8060
8061 case INDIRECT_REF:
8062 /* Fold *&X to X if X is an lvalue. */
8063 if (TREE_CODE (op0) == ADDR_EXPR)
8064 {
8065 tree op00 = TREE_OPERAND (op0, 0);
8066 if ((TREE_CODE (op00) == VAR_DECL
8067 || TREE_CODE (op00) == PARM_DECL
8068 || TREE_CODE (op00) == RESULT_DECL)
8069 && !TREE_READONLY (op00))
8070 return op00;
8071 }
8072 return NULL_TREE;
8073
8074 default:
8075 return NULL_TREE;
8076 } /* switch (code) */
8077 }
8078
8079
8080 /* If the operation was a conversion do _not_ mark a resulting constant
8081 with TREE_OVERFLOW if the original constant was not. These conversions
8082 have implementation defined behavior and retaining the TREE_OVERFLOW
8083 flag here would confuse later passes such as VRP. */
8084 tree
8085 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8086 tree type, tree op0)
8087 {
8088 tree res = fold_unary_loc (loc, code, type, op0);
8089 if (res
8090 && TREE_CODE (res) == INTEGER_CST
8091 && TREE_CODE (op0) == INTEGER_CST
8092 && CONVERT_EXPR_CODE_P (code))
8093 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8094
8095 return res;
8096 }
8097
8098 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8099 operands OP0 and OP1. LOC is the location of the resulting expression.
8100 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8101 Return the folded expression if folding is successful. Otherwise,
8102 return NULL_TREE. */
8103 static tree
8104 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8105 tree arg0, tree arg1, tree op0, tree op1)
8106 {
8107 tree tem;
8108
8109 /* We only do these simplifications if we are optimizing. */
8110 if (!optimize)
8111 return NULL_TREE;
8112
8113 /* Check for things like (A || B) && (A || C). We can convert this
8114 to A || (B && C). Note that either operator can be any of the four
8115 truth and/or operations and the transformation will still be
8116 valid. Also note that we only care about order for the
8117 ANDIF and ORIF operators. If B contains side effects, this
8118 might change the truth-value of A. */
8119 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8120 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8121 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8122 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8123 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8124 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8125 {
8126 tree a00 = TREE_OPERAND (arg0, 0);
8127 tree a01 = TREE_OPERAND (arg0, 1);
8128 tree a10 = TREE_OPERAND (arg1, 0);
8129 tree a11 = TREE_OPERAND (arg1, 1);
8130 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8131 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8132 && (code == TRUTH_AND_EXPR
8133 || code == TRUTH_OR_EXPR));
8134
8135 if (operand_equal_p (a00, a10, 0))
8136 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8137 fold_build2_loc (loc, code, type, a01, a11));
8138 else if (commutative && operand_equal_p (a00, a11, 0))
8139 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8140 fold_build2_loc (loc, code, type, a01, a10));
8141 else if (commutative && operand_equal_p (a01, a10, 0))
8142 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8143 fold_build2_loc (loc, code, type, a00, a11));
8144
8145 /* This case if tricky because we must either have commutative
8146 operators or else A10 must not have side-effects. */
8147
8148 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8149 && operand_equal_p (a01, a11, 0))
8150 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8151 fold_build2_loc (loc, code, type, a00, a10),
8152 a01);
8153 }
8154
8155 /* See if we can build a range comparison. */
8156 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8157 return tem;
8158
8159 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8160 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8161 {
8162 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8163 if (tem)
8164 return fold_build2_loc (loc, code, type, tem, arg1);
8165 }
8166
8167 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8168 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8169 {
8170 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8171 if (tem)
8172 return fold_build2_loc (loc, code, type, arg0, tem);
8173 }
8174
8175 /* Check for the possibility of merging component references. If our
8176 lhs is another similar operation, try to merge its rhs with our
8177 rhs. Then try to merge our lhs and rhs. */
8178 if (TREE_CODE (arg0) == code
8179 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8180 TREE_OPERAND (arg0, 1), arg1)))
8181 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8182
8183 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8184 return tem;
8185
8186 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8187 && (code == TRUTH_AND_EXPR
8188 || code == TRUTH_ANDIF_EXPR
8189 || code == TRUTH_OR_EXPR
8190 || code == TRUTH_ORIF_EXPR))
8191 {
8192 enum tree_code ncode, icode;
8193
8194 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8195 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8196 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8197
8198 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8199 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8200 We don't want to pack more than two leafs to a non-IF AND/OR
8201 expression.
8202 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8203 equal to IF-CODE, then we don't want to add right-hand operand.
8204 If the inner right-hand side of left-hand operand has
8205 side-effects, or isn't simple, then we can't add to it,
8206 as otherwise we might destroy if-sequence. */
8207 if (TREE_CODE (arg0) == icode
8208 && simple_operand_p_2 (arg1)
8209 /* Needed for sequence points to handle trappings, and
8210 side-effects. */
8211 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8212 {
8213 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8214 arg1);
8215 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8216 tem);
8217 }
8218 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8219 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8220 else if (TREE_CODE (arg1) == icode
8221 && simple_operand_p_2 (arg0)
8222 /* Needed for sequence points to handle trappings, and
8223 side-effects. */
8224 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8225 {
8226 tem = fold_build2_loc (loc, ncode, type,
8227 arg0, TREE_OPERAND (arg1, 0));
8228 return fold_build2_loc (loc, icode, type, tem,
8229 TREE_OPERAND (arg1, 1));
8230 }
8231 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8232 into (A OR B).
8233 For sequence point consistancy, we need to check for trapping,
8234 and side-effects. */
8235 else if (code == icode && simple_operand_p_2 (arg0)
8236 && simple_operand_p_2 (arg1))
8237 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8238 }
8239
8240 return NULL_TREE;
8241 }
8242
8243 /* Fold a binary expression of code CODE and type TYPE with operands
8244 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8245 Return the folded expression if folding is successful. Otherwise,
8246 return NULL_TREE. */
8247
8248 static tree
8249 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8250 {
8251 enum tree_code compl_code;
8252
8253 if (code == MIN_EXPR)
8254 compl_code = MAX_EXPR;
8255 else if (code == MAX_EXPR)
8256 compl_code = MIN_EXPR;
8257 else
8258 gcc_unreachable ();
8259
8260 /* MIN (MAX (a, b), b) == b. */
8261 if (TREE_CODE (op0) == compl_code
8262 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8263 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8264
8265 /* MIN (MAX (b, a), b) == b. */
8266 if (TREE_CODE (op0) == compl_code
8267 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8268 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8269 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8270
8271 /* MIN (a, MAX (a, b)) == a. */
8272 if (TREE_CODE (op1) == compl_code
8273 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8274 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8275 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8276
8277 /* MIN (a, MAX (b, a)) == a. */
8278 if (TREE_CODE (op1) == compl_code
8279 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8280 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8281 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8282
8283 return NULL_TREE;
8284 }
8285
8286 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8287 by changing CODE to reduce the magnitude of constants involved in
8288 ARG0 of the comparison.
8289 Returns a canonicalized comparison tree if a simplification was
8290 possible, otherwise returns NULL_TREE.
8291 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8292 valid if signed overflow is undefined. */
8293
8294 static tree
8295 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8296 tree arg0, tree arg1,
8297 bool *strict_overflow_p)
8298 {
8299 enum tree_code code0 = TREE_CODE (arg0);
8300 tree t, cst0 = NULL_TREE;
8301 int sgn0;
8302 bool swap = false;
8303
8304 /* Match A +- CST code arg1 and CST code arg1. We can change the
8305 first form only if overflow is undefined. */
8306 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8307 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8308 /* In principle pointers also have undefined overflow behavior,
8309 but that causes problems elsewhere. */
8310 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8311 && (code0 == MINUS_EXPR
8312 || code0 == PLUS_EXPR)
8313 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8314 || code0 == INTEGER_CST))
8315 return NULL_TREE;
8316
8317 /* Identify the constant in arg0 and its sign. */
8318 if (code0 == INTEGER_CST)
8319 cst0 = arg0;
8320 else
8321 cst0 = TREE_OPERAND (arg0, 1);
8322 sgn0 = tree_int_cst_sgn (cst0);
8323
8324 /* Overflowed constants and zero will cause problems. */
8325 if (integer_zerop (cst0)
8326 || TREE_OVERFLOW (cst0))
8327 return NULL_TREE;
8328
8329 /* See if we can reduce the magnitude of the constant in
8330 arg0 by changing the comparison code. */
8331 if (code0 == INTEGER_CST)
8332 {
8333 /* CST <= arg1 -> CST-1 < arg1. */
8334 if (code == LE_EXPR && sgn0 == 1)
8335 code = LT_EXPR;
8336 /* -CST < arg1 -> -CST-1 <= arg1. */
8337 else if (code == LT_EXPR && sgn0 == -1)
8338 code = LE_EXPR;
8339 /* CST > arg1 -> CST-1 >= arg1. */
8340 else if (code == GT_EXPR && sgn0 == 1)
8341 code = GE_EXPR;
8342 /* -CST >= arg1 -> -CST-1 > arg1. */
8343 else if (code == GE_EXPR && sgn0 == -1)
8344 code = GT_EXPR;
8345 else
8346 return NULL_TREE;
8347 /* arg1 code' CST' might be more canonical. */
8348 swap = true;
8349 }
8350 else
8351 {
8352 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8353 if (code == LT_EXPR
8354 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8355 code = LE_EXPR;
8356 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8357 else if (code == GT_EXPR
8358 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8359 code = GE_EXPR;
8360 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8361 else if (code == LE_EXPR
8362 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8363 code = LT_EXPR;
8364 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8365 else if (code == GE_EXPR
8366 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8367 code = GT_EXPR;
8368 else
8369 return NULL_TREE;
8370 *strict_overflow_p = true;
8371 }
8372
8373 /* Now build the constant reduced in magnitude. But not if that
8374 would produce one outside of its types range. */
8375 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8376 && ((sgn0 == 1
8377 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8378 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8379 || (sgn0 == -1
8380 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8381 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8382 /* We cannot swap the comparison here as that would cause us to
8383 endlessly recurse. */
8384 return NULL_TREE;
8385
8386 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8387 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8388 if (code0 != INTEGER_CST)
8389 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8390 t = fold_convert (TREE_TYPE (arg1), t);
8391
8392 /* If swapping might yield to a more canonical form, do so. */
8393 if (swap)
8394 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8395 else
8396 return fold_build2_loc (loc, code, type, t, arg1);
8397 }
8398
8399 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8400 overflow further. Try to decrease the magnitude of constants involved
8401 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8402 and put sole constants at the second argument position.
8403 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8404
8405 static tree
8406 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8407 tree arg0, tree arg1)
8408 {
8409 tree t;
8410 bool strict_overflow_p;
8411 const char * const warnmsg = G_("assuming signed overflow does not occur "
8412 "when reducing constant in comparison");
8413
8414 /* Try canonicalization by simplifying arg0. */
8415 strict_overflow_p = false;
8416 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8417 &strict_overflow_p);
8418 if (t)
8419 {
8420 if (strict_overflow_p)
8421 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8422 return t;
8423 }
8424
8425 /* Try canonicalization by simplifying arg1 using the swapped
8426 comparison. */
8427 code = swap_tree_comparison (code);
8428 strict_overflow_p = false;
8429 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8430 &strict_overflow_p);
8431 if (t && strict_overflow_p)
8432 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8433 return t;
8434 }
8435
8436 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8437 space. This is used to avoid issuing overflow warnings for
8438 expressions like &p->x which can not wrap. */
8439
8440 static bool
8441 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8442 {
8443 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8444 return true;
8445
8446 if (bitpos < 0)
8447 return true;
8448
8449 wide_int wi_offset;
8450 int precision = TYPE_PRECISION (TREE_TYPE (base));
8451 if (offset == NULL_TREE)
8452 wi_offset = wi::zero (precision);
8453 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8454 return true;
8455 else
8456 wi_offset = offset;
8457
8458 bool overflow;
8459 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8460 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8461 if (overflow)
8462 return true;
8463
8464 if (!wi::fits_uhwi_p (total))
8465 return true;
8466
8467 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8468 if (size <= 0)
8469 return true;
8470
8471 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8472 array. */
8473 if (TREE_CODE (base) == ADDR_EXPR)
8474 {
8475 HOST_WIDE_INT base_size;
8476
8477 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8478 if (base_size > 0 && size < base_size)
8479 size = base_size;
8480 }
8481
8482 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8483 }
8484
8485 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8486 kind INTEGER_CST. This makes sure to properly sign-extend the
8487 constant. */
8488
8489 static HOST_WIDE_INT
8490 size_low_cst (const_tree t)
8491 {
8492 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8493 int prec = TYPE_PRECISION (TREE_TYPE (t));
8494 if (prec < HOST_BITS_PER_WIDE_INT)
8495 return sext_hwi (w, prec);
8496 return w;
8497 }
8498
8499 /* Subroutine of fold_binary. This routine performs all of the
8500 transformations that are common to the equality/inequality
8501 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8502 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8503 fold_binary should call fold_binary. Fold a comparison with
8504 tree code CODE and type TYPE with operands OP0 and OP1. Return
8505 the folded comparison or NULL_TREE. */
8506
8507 static tree
8508 fold_comparison (location_t loc, enum tree_code code, tree type,
8509 tree op0, tree op1)
8510 {
8511 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8512 tree arg0, arg1, tem;
8513
8514 arg0 = op0;
8515 arg1 = op1;
8516
8517 STRIP_SIGN_NOPS (arg0);
8518 STRIP_SIGN_NOPS (arg1);
8519
8520 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8521 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8522 && (equality_code
8523 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8524 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8526 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8527 && TREE_CODE (arg1) == INTEGER_CST
8528 && !TREE_OVERFLOW (arg1))
8529 {
8530 const enum tree_code
8531 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8532 tree const1 = TREE_OPERAND (arg0, 1);
8533 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8534 tree variable = TREE_OPERAND (arg0, 0);
8535 tree new_const = int_const_binop (reverse_op, const2, const1);
8536
8537 /* If the constant operation overflowed this can be
8538 simplified as a comparison against INT_MAX/INT_MIN. */
8539 if (TREE_OVERFLOW (new_const)
8540 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8541 {
8542 int const1_sgn = tree_int_cst_sgn (const1);
8543 enum tree_code code2 = code;
8544
8545 /* Get the sign of the constant on the lhs if the
8546 operation were VARIABLE + CONST1. */
8547 if (TREE_CODE (arg0) == MINUS_EXPR)
8548 const1_sgn = -const1_sgn;
8549
8550 /* The sign of the constant determines if we overflowed
8551 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8552 Canonicalize to the INT_MIN overflow by swapping the comparison
8553 if necessary. */
8554 if (const1_sgn == -1)
8555 code2 = swap_tree_comparison (code);
8556
8557 /* We now can look at the canonicalized case
8558 VARIABLE + 1 CODE2 INT_MIN
8559 and decide on the result. */
8560 switch (code2)
8561 {
8562 case EQ_EXPR:
8563 case LT_EXPR:
8564 case LE_EXPR:
8565 return
8566 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8567
8568 case NE_EXPR:
8569 case GE_EXPR:
8570 case GT_EXPR:
8571 return
8572 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8573
8574 default:
8575 gcc_unreachable ();
8576 }
8577 }
8578 else
8579 {
8580 if (!equality_code)
8581 fold_overflow_warning ("assuming signed overflow does not occur "
8582 "when changing X +- C1 cmp C2 to "
8583 "X cmp C2 -+ C1",
8584 WARN_STRICT_OVERFLOW_COMPARISON);
8585 return fold_build2_loc (loc, code, type, variable, new_const);
8586 }
8587 }
8588
8589 /* For comparisons of pointers we can decompose it to a compile time
8590 comparison of the base objects and the offsets into the object.
8591 This requires at least one operand being an ADDR_EXPR or a
8592 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8593 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8594 && (TREE_CODE (arg0) == ADDR_EXPR
8595 || TREE_CODE (arg1) == ADDR_EXPR
8596 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8597 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8598 {
8599 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8600 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8601 machine_mode mode;
8602 int volatilep, unsignedp;
8603 bool indirect_base0 = false, indirect_base1 = false;
8604
8605 /* Get base and offset for the access. Strip ADDR_EXPR for
8606 get_inner_reference, but put it back by stripping INDIRECT_REF
8607 off the base object if possible. indirect_baseN will be true
8608 if baseN is not an address but refers to the object itself. */
8609 base0 = arg0;
8610 if (TREE_CODE (arg0) == ADDR_EXPR)
8611 {
8612 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8613 &bitsize, &bitpos0, &offset0, &mode,
8614 &unsignedp, &volatilep, false);
8615 if (TREE_CODE (base0) == INDIRECT_REF)
8616 base0 = TREE_OPERAND (base0, 0);
8617 else
8618 indirect_base0 = true;
8619 }
8620 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8621 {
8622 base0 = TREE_OPERAND (arg0, 0);
8623 STRIP_SIGN_NOPS (base0);
8624 if (TREE_CODE (base0) == ADDR_EXPR)
8625 {
8626 base0 = TREE_OPERAND (base0, 0);
8627 indirect_base0 = true;
8628 }
8629 offset0 = TREE_OPERAND (arg0, 1);
8630 if (tree_fits_shwi_p (offset0))
8631 {
8632 HOST_WIDE_INT off = size_low_cst (offset0);
8633 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8634 * BITS_PER_UNIT)
8635 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8636 {
8637 bitpos0 = off * BITS_PER_UNIT;
8638 offset0 = NULL_TREE;
8639 }
8640 }
8641 }
8642
8643 base1 = arg1;
8644 if (TREE_CODE (arg1) == ADDR_EXPR)
8645 {
8646 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8647 &bitsize, &bitpos1, &offset1, &mode,
8648 &unsignedp, &volatilep, false);
8649 if (TREE_CODE (base1) == INDIRECT_REF)
8650 base1 = TREE_OPERAND (base1, 0);
8651 else
8652 indirect_base1 = true;
8653 }
8654 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8655 {
8656 base1 = TREE_OPERAND (arg1, 0);
8657 STRIP_SIGN_NOPS (base1);
8658 if (TREE_CODE (base1) == ADDR_EXPR)
8659 {
8660 base1 = TREE_OPERAND (base1, 0);
8661 indirect_base1 = true;
8662 }
8663 offset1 = TREE_OPERAND (arg1, 1);
8664 if (tree_fits_shwi_p (offset1))
8665 {
8666 HOST_WIDE_INT off = size_low_cst (offset1);
8667 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8668 * BITS_PER_UNIT)
8669 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8670 {
8671 bitpos1 = off * BITS_PER_UNIT;
8672 offset1 = NULL_TREE;
8673 }
8674 }
8675 }
8676
8677 /* A local variable can never be pointed to by
8678 the default SSA name of an incoming parameter. */
8679 if ((TREE_CODE (arg0) == ADDR_EXPR
8680 && indirect_base0
8681 && TREE_CODE (base0) == VAR_DECL
8682 && auto_var_in_fn_p (base0, current_function_decl)
8683 && !indirect_base1
8684 && TREE_CODE (base1) == SSA_NAME
8685 && SSA_NAME_IS_DEFAULT_DEF (base1)
8686 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8687 || (TREE_CODE (arg1) == ADDR_EXPR
8688 && indirect_base1
8689 && TREE_CODE (base1) == VAR_DECL
8690 && auto_var_in_fn_p (base1, current_function_decl)
8691 && !indirect_base0
8692 && TREE_CODE (base0) == SSA_NAME
8693 && SSA_NAME_IS_DEFAULT_DEF (base0)
8694 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8695 {
8696 if (code == NE_EXPR)
8697 return constant_boolean_node (1, type);
8698 else if (code == EQ_EXPR)
8699 return constant_boolean_node (0, type);
8700 }
8701 /* If we have equivalent bases we might be able to simplify. */
8702 else if (indirect_base0 == indirect_base1
8703 && operand_equal_p (base0, base1, 0))
8704 {
8705 /* We can fold this expression to a constant if the non-constant
8706 offset parts are equal. */
8707 if ((offset0 == offset1
8708 || (offset0 && offset1
8709 && operand_equal_p (offset0, offset1, 0)))
8710 && (code == EQ_EXPR
8711 || code == NE_EXPR
8712 || (indirect_base0 && DECL_P (base0))
8713 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8714
8715 {
8716 if (!equality_code
8717 && bitpos0 != bitpos1
8718 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8719 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8720 fold_overflow_warning (("assuming pointer wraparound does not "
8721 "occur when comparing P +- C1 with "
8722 "P +- C2"),
8723 WARN_STRICT_OVERFLOW_CONDITIONAL);
8724
8725 switch (code)
8726 {
8727 case EQ_EXPR:
8728 return constant_boolean_node (bitpos0 == bitpos1, type);
8729 case NE_EXPR:
8730 return constant_boolean_node (bitpos0 != bitpos1, type);
8731 case LT_EXPR:
8732 return constant_boolean_node (bitpos0 < bitpos1, type);
8733 case LE_EXPR:
8734 return constant_boolean_node (bitpos0 <= bitpos1, type);
8735 case GE_EXPR:
8736 return constant_boolean_node (bitpos0 >= bitpos1, type);
8737 case GT_EXPR:
8738 return constant_boolean_node (bitpos0 > bitpos1, type);
8739 default:;
8740 }
8741 }
8742 /* We can simplify the comparison to a comparison of the variable
8743 offset parts if the constant offset parts are equal.
8744 Be careful to use signed sizetype here because otherwise we
8745 mess with array offsets in the wrong way. This is possible
8746 because pointer arithmetic is restricted to retain within an
8747 object and overflow on pointer differences is undefined as of
8748 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8749 else if (bitpos0 == bitpos1
8750 && (equality_code
8751 || (indirect_base0 && DECL_P (base0))
8752 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8753 {
8754 /* By converting to signed sizetype we cover middle-end pointer
8755 arithmetic which operates on unsigned pointer types of size
8756 type size and ARRAY_REF offsets which are properly sign or
8757 zero extended from their type in case it is narrower than
8758 sizetype. */
8759 if (offset0 == NULL_TREE)
8760 offset0 = build_int_cst (ssizetype, 0);
8761 else
8762 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8763 if (offset1 == NULL_TREE)
8764 offset1 = build_int_cst (ssizetype, 0);
8765 else
8766 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8767
8768 if (!equality_code
8769 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8770 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8771 fold_overflow_warning (("assuming pointer wraparound does not "
8772 "occur when comparing P +- C1 with "
8773 "P +- C2"),
8774 WARN_STRICT_OVERFLOW_COMPARISON);
8775
8776 return fold_build2_loc (loc, code, type, offset0, offset1);
8777 }
8778 }
8779 /* For non-equal bases we can simplify if they are addresses
8780 declarations with different addresses. */
8781 else if (indirect_base0 && indirect_base1
8782 /* We know that !operand_equal_p (base0, base1, 0)
8783 because the if condition was false. But make
8784 sure two decls are not the same. */
8785 && base0 != base1
8786 && TREE_CODE (arg0) == ADDR_EXPR
8787 && TREE_CODE (arg1) == ADDR_EXPR
8788 && DECL_P (base0)
8789 && DECL_P (base1)
8790 /* Watch for aliases. */
8791 && (!decl_in_symtab_p (base0)
8792 || !decl_in_symtab_p (base1)
8793 || !symtab_node::get_create (base0)->equal_address_to
8794 (symtab_node::get_create (base1))))
8795 {
8796 if (code == EQ_EXPR)
8797 return omit_two_operands_loc (loc, type, boolean_false_node,
8798 arg0, arg1);
8799 else if (code == NE_EXPR)
8800 return omit_two_operands_loc (loc, type, boolean_true_node,
8801 arg0, arg1);
8802 }
8803 /* For equal offsets we can simplify to a comparison of the
8804 base addresses. */
8805 else if (bitpos0 == bitpos1
8806 && (indirect_base0
8807 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8808 && (indirect_base1
8809 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8810 && ((offset0 == offset1)
8811 || (offset0 && offset1
8812 && operand_equal_p (offset0, offset1, 0))))
8813 {
8814 if (indirect_base0)
8815 base0 = build_fold_addr_expr_loc (loc, base0);
8816 if (indirect_base1)
8817 base1 = build_fold_addr_expr_loc (loc, base1);
8818 return fold_build2_loc (loc, code, type, base0, base1);
8819 }
8820 }
8821
8822 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8823 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8824 the resulting offset is smaller in absolute value than the
8825 original one and has the same sign. */
8826 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8827 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8828 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8829 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8830 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8831 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8832 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8833 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8834 {
8835 tree const1 = TREE_OPERAND (arg0, 1);
8836 tree const2 = TREE_OPERAND (arg1, 1);
8837 tree variable1 = TREE_OPERAND (arg0, 0);
8838 tree variable2 = TREE_OPERAND (arg1, 0);
8839 tree cst;
8840 const char * const warnmsg = G_("assuming signed overflow does not "
8841 "occur when combining constants around "
8842 "a comparison");
8843
8844 /* Put the constant on the side where it doesn't overflow and is
8845 of lower absolute value and of same sign than before. */
8846 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8847 ? MINUS_EXPR : PLUS_EXPR,
8848 const2, const1);
8849 if (!TREE_OVERFLOW (cst)
8850 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8851 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8852 {
8853 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8854 return fold_build2_loc (loc, code, type,
8855 variable1,
8856 fold_build2_loc (loc, TREE_CODE (arg1),
8857 TREE_TYPE (arg1),
8858 variable2, cst));
8859 }
8860
8861 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8862 ? MINUS_EXPR : PLUS_EXPR,
8863 const1, const2);
8864 if (!TREE_OVERFLOW (cst)
8865 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8866 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8867 {
8868 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8869 return fold_build2_loc (loc, code, type,
8870 fold_build2_loc (loc, TREE_CODE (arg0),
8871 TREE_TYPE (arg0),
8872 variable1, cst),
8873 variable2);
8874 }
8875 }
8876
8877 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8878 if (tem)
8879 return tem;
8880
8881 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8882 && CONVERT_EXPR_P (arg0))
8883 {
8884 /* If we are widening one operand of an integer comparison,
8885 see if the other operand is similarly being widened. Perhaps we
8886 can do the comparison in the narrower type. */
8887 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8888 if (tem)
8889 return tem;
8890
8891 /* Or if we are changing signedness. */
8892 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
8893 if (tem)
8894 return tem;
8895 }
8896
8897 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8898 constant, we can simplify it. */
8899 if (TREE_CODE (arg1) == INTEGER_CST
8900 && (TREE_CODE (arg0) == MIN_EXPR
8901 || TREE_CODE (arg0) == MAX_EXPR)
8902 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8903 {
8904 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8905 if (tem)
8906 return tem;
8907 }
8908
8909 /* If we are comparing an expression that just has comparisons
8910 of two integer values, arithmetic expressions of those comparisons,
8911 and constants, we can simplify it. There are only three cases
8912 to check: the two values can either be equal, the first can be
8913 greater, or the second can be greater. Fold the expression for
8914 those three values. Since each value must be 0 or 1, we have
8915 eight possibilities, each of which corresponds to the constant 0
8916 or 1 or one of the six possible comparisons.
8917
8918 This handles common cases like (a > b) == 0 but also handles
8919 expressions like ((x > y) - (y > x)) > 0, which supposedly
8920 occur in macroized code. */
8921
8922 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8923 {
8924 tree cval1 = 0, cval2 = 0;
8925 int save_p = 0;
8926
8927 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8928 /* Don't handle degenerate cases here; they should already
8929 have been handled anyway. */
8930 && cval1 != 0 && cval2 != 0
8931 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8932 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8933 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8934 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8935 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8936 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8937 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8938 {
8939 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8940 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8941
8942 /* We can't just pass T to eval_subst in case cval1 or cval2
8943 was the same as ARG1. */
8944
8945 tree high_result
8946 = fold_build2_loc (loc, code, type,
8947 eval_subst (loc, arg0, cval1, maxval,
8948 cval2, minval),
8949 arg1);
8950 tree equal_result
8951 = fold_build2_loc (loc, code, type,
8952 eval_subst (loc, arg0, cval1, maxval,
8953 cval2, maxval),
8954 arg1);
8955 tree low_result
8956 = fold_build2_loc (loc, code, type,
8957 eval_subst (loc, arg0, cval1, minval,
8958 cval2, maxval),
8959 arg1);
8960
8961 /* All three of these results should be 0 or 1. Confirm they are.
8962 Then use those values to select the proper code to use. */
8963
8964 if (TREE_CODE (high_result) == INTEGER_CST
8965 && TREE_CODE (equal_result) == INTEGER_CST
8966 && TREE_CODE (low_result) == INTEGER_CST)
8967 {
8968 /* Make a 3-bit mask with the high-order bit being the
8969 value for `>', the next for '=', and the low for '<'. */
8970 switch ((integer_onep (high_result) * 4)
8971 + (integer_onep (equal_result) * 2)
8972 + integer_onep (low_result))
8973 {
8974 case 0:
8975 /* Always false. */
8976 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8977 case 1:
8978 code = LT_EXPR;
8979 break;
8980 case 2:
8981 code = EQ_EXPR;
8982 break;
8983 case 3:
8984 code = LE_EXPR;
8985 break;
8986 case 4:
8987 code = GT_EXPR;
8988 break;
8989 case 5:
8990 code = NE_EXPR;
8991 break;
8992 case 6:
8993 code = GE_EXPR;
8994 break;
8995 case 7:
8996 /* Always true. */
8997 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8998 }
8999
9000 if (save_p)
9001 {
9002 tem = save_expr (build2 (code, type, cval1, cval2));
9003 SET_EXPR_LOCATION (tem, loc);
9004 return tem;
9005 }
9006 return fold_build2_loc (loc, code, type, cval1, cval2);
9007 }
9008 }
9009 }
9010
9011 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9012 into a single range test. */
9013 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9014 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9015 && TREE_CODE (arg1) == INTEGER_CST
9016 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9017 && !integer_zerop (TREE_OPERAND (arg0, 1))
9018 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9019 && !TREE_OVERFLOW (arg1))
9020 {
9021 tem = fold_div_compare (loc, code, type, arg0, arg1);
9022 if (tem != NULL_TREE)
9023 return tem;
9024 }
9025
9026 return NULL_TREE;
9027 }
9028
9029
9030 /* Subroutine of fold_binary. Optimize complex multiplications of the
9031 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9032 argument EXPR represents the expression "z" of type TYPE. */
9033
9034 static tree
9035 fold_mult_zconjz (location_t loc, tree type, tree expr)
9036 {
9037 tree itype = TREE_TYPE (type);
9038 tree rpart, ipart, tem;
9039
9040 if (TREE_CODE (expr) == COMPLEX_EXPR)
9041 {
9042 rpart = TREE_OPERAND (expr, 0);
9043 ipart = TREE_OPERAND (expr, 1);
9044 }
9045 else if (TREE_CODE (expr) == COMPLEX_CST)
9046 {
9047 rpart = TREE_REALPART (expr);
9048 ipart = TREE_IMAGPART (expr);
9049 }
9050 else
9051 {
9052 expr = save_expr (expr);
9053 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9054 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9055 }
9056
9057 rpart = save_expr (rpart);
9058 ipart = save_expr (ipart);
9059 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9060 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9061 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9062 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9063 build_zero_cst (itype));
9064 }
9065
9066
9067 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9068 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9069
9070 static bool
9071 vec_cst_ctor_to_array (tree arg, tree *elts)
9072 {
9073 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9074
9075 if (TREE_CODE (arg) == VECTOR_CST)
9076 {
9077 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9078 elts[i] = VECTOR_CST_ELT (arg, i);
9079 }
9080 else if (TREE_CODE (arg) == CONSTRUCTOR)
9081 {
9082 constructor_elt *elt;
9083
9084 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9085 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9086 return false;
9087 else
9088 elts[i] = elt->value;
9089 }
9090 else
9091 return false;
9092 for (; i < nelts; i++)
9093 elts[i]
9094 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9095 return true;
9096 }
9097
9098 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9099 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9100 NULL_TREE otherwise. */
9101
9102 static tree
9103 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9104 {
9105 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9106 tree *elts;
9107 bool need_ctor = false;
9108
9109 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9110 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9111 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9112 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9113 return NULL_TREE;
9114
9115 elts = XALLOCAVEC (tree, nelts * 3);
9116 if (!vec_cst_ctor_to_array (arg0, elts)
9117 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9118 return NULL_TREE;
9119
9120 for (i = 0; i < nelts; i++)
9121 {
9122 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9123 need_ctor = true;
9124 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9125 }
9126
9127 if (need_ctor)
9128 {
9129 vec<constructor_elt, va_gc> *v;
9130 vec_alloc (v, nelts);
9131 for (i = 0; i < nelts; i++)
9132 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9133 return build_constructor (type, v);
9134 }
9135 else
9136 return build_vector (type, &elts[2 * nelts]);
9137 }
9138
9139 /* Try to fold a pointer difference of type TYPE two address expressions of
9140 array references AREF0 and AREF1 using location LOC. Return a
9141 simplified expression for the difference or NULL_TREE. */
9142
9143 static tree
9144 fold_addr_of_array_ref_difference (location_t loc, tree type,
9145 tree aref0, tree aref1)
9146 {
9147 tree base0 = TREE_OPERAND (aref0, 0);
9148 tree base1 = TREE_OPERAND (aref1, 0);
9149 tree base_offset = build_int_cst (type, 0);
9150
9151 /* If the bases are array references as well, recurse. If the bases
9152 are pointer indirections compute the difference of the pointers.
9153 If the bases are equal, we are set. */
9154 if ((TREE_CODE (base0) == ARRAY_REF
9155 && TREE_CODE (base1) == ARRAY_REF
9156 && (base_offset
9157 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9158 || (INDIRECT_REF_P (base0)
9159 && INDIRECT_REF_P (base1)
9160 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9161 TREE_OPERAND (base0, 0),
9162 TREE_OPERAND (base1, 0))))
9163 || operand_equal_p (base0, base1, 0))
9164 {
9165 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9166 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9167 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9168 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9169 return fold_build2_loc (loc, PLUS_EXPR, type,
9170 base_offset,
9171 fold_build2_loc (loc, MULT_EXPR, type,
9172 diff, esz));
9173 }
9174 return NULL_TREE;
9175 }
9176
9177 /* If the real or vector real constant CST of type TYPE has an exact
9178 inverse, return it, else return NULL. */
9179
9180 tree
9181 exact_inverse (tree type, tree cst)
9182 {
9183 REAL_VALUE_TYPE r;
9184 tree unit_type, *elts;
9185 machine_mode mode;
9186 unsigned vec_nelts, i;
9187
9188 switch (TREE_CODE (cst))
9189 {
9190 case REAL_CST:
9191 r = TREE_REAL_CST (cst);
9192
9193 if (exact_real_inverse (TYPE_MODE (type), &r))
9194 return build_real (type, r);
9195
9196 return NULL_TREE;
9197
9198 case VECTOR_CST:
9199 vec_nelts = VECTOR_CST_NELTS (cst);
9200 elts = XALLOCAVEC (tree, vec_nelts);
9201 unit_type = TREE_TYPE (type);
9202 mode = TYPE_MODE (unit_type);
9203
9204 for (i = 0; i < vec_nelts; i++)
9205 {
9206 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9207 if (!exact_real_inverse (mode, &r))
9208 return NULL_TREE;
9209 elts[i] = build_real (unit_type, r);
9210 }
9211
9212 return build_vector (type, elts);
9213
9214 default:
9215 return NULL_TREE;
9216 }
9217 }
9218
9219 /* Mask out the tz least significant bits of X of type TYPE where
9220 tz is the number of trailing zeroes in Y. */
9221 static wide_int
9222 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9223 {
9224 int tz = wi::ctz (y);
9225 if (tz > 0)
9226 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9227 return x;
9228 }
9229
9230 /* Return true when T is an address and is known to be nonzero.
9231 For floating point we further ensure that T is not denormal.
9232 Similar logic is present in nonzero_address in rtlanal.h.
9233
9234 If the return value is based on the assumption that signed overflow
9235 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9236 change *STRICT_OVERFLOW_P. */
9237
9238 static bool
9239 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9240 {
9241 tree type = TREE_TYPE (t);
9242 enum tree_code code;
9243
9244 /* Doing something useful for floating point would need more work. */
9245 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9246 return false;
9247
9248 code = TREE_CODE (t);
9249 switch (TREE_CODE_CLASS (code))
9250 {
9251 case tcc_unary:
9252 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9253 strict_overflow_p);
9254 case tcc_binary:
9255 case tcc_comparison:
9256 return tree_binary_nonzero_warnv_p (code, type,
9257 TREE_OPERAND (t, 0),
9258 TREE_OPERAND (t, 1),
9259 strict_overflow_p);
9260 case tcc_constant:
9261 case tcc_declaration:
9262 case tcc_reference:
9263 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9264
9265 default:
9266 break;
9267 }
9268
9269 switch (code)
9270 {
9271 case TRUTH_NOT_EXPR:
9272 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9273 strict_overflow_p);
9274
9275 case TRUTH_AND_EXPR:
9276 case TRUTH_OR_EXPR:
9277 case TRUTH_XOR_EXPR:
9278 return tree_binary_nonzero_warnv_p (code, type,
9279 TREE_OPERAND (t, 0),
9280 TREE_OPERAND (t, 1),
9281 strict_overflow_p);
9282
9283 case COND_EXPR:
9284 case CONSTRUCTOR:
9285 case OBJ_TYPE_REF:
9286 case ASSERT_EXPR:
9287 case ADDR_EXPR:
9288 case WITH_SIZE_EXPR:
9289 case SSA_NAME:
9290 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9291
9292 case COMPOUND_EXPR:
9293 case MODIFY_EXPR:
9294 case BIND_EXPR:
9295 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9296 strict_overflow_p);
9297
9298 case SAVE_EXPR:
9299 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9300 strict_overflow_p);
9301
9302 case CALL_EXPR:
9303 {
9304 tree fndecl = get_callee_fndecl (t);
9305 if (!fndecl) return false;
9306 if (flag_delete_null_pointer_checks && !flag_check_new
9307 && DECL_IS_OPERATOR_NEW (fndecl)
9308 && !TREE_NOTHROW (fndecl))
9309 return true;
9310 if (flag_delete_null_pointer_checks
9311 && lookup_attribute ("returns_nonnull",
9312 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9313 return true;
9314 return alloca_call_p (t);
9315 }
9316
9317 default:
9318 break;
9319 }
9320 return false;
9321 }
9322
9323 /* Return true when T is an address and is known to be nonzero.
9324 Handle warnings about undefined signed overflow. */
9325
9326 static bool
9327 tree_expr_nonzero_p (tree t)
9328 {
9329 bool ret, strict_overflow_p;
9330
9331 strict_overflow_p = false;
9332 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9333 if (strict_overflow_p)
9334 fold_overflow_warning (("assuming signed overflow does not occur when "
9335 "determining that expression is always "
9336 "non-zero"),
9337 WARN_STRICT_OVERFLOW_MISC);
9338 return ret;
9339 }
9340
9341 /* Fold a binary expression of code CODE and type TYPE with operands
9342 OP0 and OP1. LOC is the location of the resulting expression.
9343 Return the folded expression if folding is successful. Otherwise,
9344 return NULL_TREE. */
9345
9346 tree
9347 fold_binary_loc (location_t loc,
9348 enum tree_code code, tree type, tree op0, tree op1)
9349 {
9350 enum tree_code_class kind = TREE_CODE_CLASS (code);
9351 tree arg0, arg1, tem;
9352 tree t1 = NULL_TREE;
9353 bool strict_overflow_p;
9354 unsigned int prec;
9355
9356 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9357 && TREE_CODE_LENGTH (code) == 2
9358 && op0 != NULL_TREE
9359 && op1 != NULL_TREE);
9360
9361 arg0 = op0;
9362 arg1 = op1;
9363
9364 /* Strip any conversions that don't change the mode. This is
9365 safe for every expression, except for a comparison expression
9366 because its signedness is derived from its operands. So, in
9367 the latter case, only strip conversions that don't change the
9368 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9369 preserved.
9370
9371 Note that this is done as an internal manipulation within the
9372 constant folder, in order to find the simplest representation
9373 of the arguments so that their form can be studied. In any
9374 cases, the appropriate type conversions should be put back in
9375 the tree that will get out of the constant folder. */
9376
9377 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9378 {
9379 STRIP_SIGN_NOPS (arg0);
9380 STRIP_SIGN_NOPS (arg1);
9381 }
9382 else
9383 {
9384 STRIP_NOPS (arg0);
9385 STRIP_NOPS (arg1);
9386 }
9387
9388 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9389 constant but we can't do arithmetic on them. */
9390 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9391 {
9392 tem = const_binop (code, type, arg0, arg1);
9393 if (tem != NULL_TREE)
9394 {
9395 if (TREE_TYPE (tem) != type)
9396 tem = fold_convert_loc (loc, type, tem);
9397 return tem;
9398 }
9399 }
9400
9401 /* If this is a commutative operation, and ARG0 is a constant, move it
9402 to ARG1 to reduce the number of tests below. */
9403 if (commutative_tree_code (code)
9404 && tree_swap_operands_p (arg0, arg1, true))
9405 return fold_build2_loc (loc, code, type, op1, op0);
9406
9407 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9408 to ARG1 to reduce the number of tests below. */
9409 if (kind == tcc_comparison
9410 && tree_swap_operands_p (arg0, arg1, true))
9411 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9412
9413 tem = generic_simplify (loc, code, type, op0, op1);
9414 if (tem)
9415 return tem;
9416
9417 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9418
9419 First check for cases where an arithmetic operation is applied to a
9420 compound, conditional, or comparison operation. Push the arithmetic
9421 operation inside the compound or conditional to see if any folding
9422 can then be done. Convert comparison to conditional for this purpose.
9423 The also optimizes non-constant cases that used to be done in
9424 expand_expr.
9425
9426 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9427 one of the operands is a comparison and the other is a comparison, a
9428 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9429 code below would make the expression more complex. Change it to a
9430 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9431 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9432
9433 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9434 || code == EQ_EXPR || code == NE_EXPR)
9435 && TREE_CODE (type) != VECTOR_TYPE
9436 && ((truth_value_p (TREE_CODE (arg0))
9437 && (truth_value_p (TREE_CODE (arg1))
9438 || (TREE_CODE (arg1) == BIT_AND_EXPR
9439 && integer_onep (TREE_OPERAND (arg1, 1)))))
9440 || (truth_value_p (TREE_CODE (arg1))
9441 && (truth_value_p (TREE_CODE (arg0))
9442 || (TREE_CODE (arg0) == BIT_AND_EXPR
9443 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9444 {
9445 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9446 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9447 : TRUTH_XOR_EXPR,
9448 boolean_type_node,
9449 fold_convert_loc (loc, boolean_type_node, arg0),
9450 fold_convert_loc (loc, boolean_type_node, arg1));
9451
9452 if (code == EQ_EXPR)
9453 tem = invert_truthvalue_loc (loc, tem);
9454
9455 return fold_convert_loc (loc, type, tem);
9456 }
9457
9458 if (TREE_CODE_CLASS (code) == tcc_binary
9459 || TREE_CODE_CLASS (code) == tcc_comparison)
9460 {
9461 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9462 {
9463 tem = fold_build2_loc (loc, code, type,
9464 fold_convert_loc (loc, TREE_TYPE (op0),
9465 TREE_OPERAND (arg0, 1)), op1);
9466 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9467 tem);
9468 }
9469 if (TREE_CODE (arg1) == COMPOUND_EXPR
9470 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9471 {
9472 tem = fold_build2_loc (loc, code, type, op0,
9473 fold_convert_loc (loc, TREE_TYPE (op1),
9474 TREE_OPERAND (arg1, 1)));
9475 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9476 tem);
9477 }
9478
9479 if (TREE_CODE (arg0) == COND_EXPR
9480 || TREE_CODE (arg0) == VEC_COND_EXPR
9481 || COMPARISON_CLASS_P (arg0))
9482 {
9483 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9484 arg0, arg1,
9485 /*cond_first_p=*/1);
9486 if (tem != NULL_TREE)
9487 return tem;
9488 }
9489
9490 if (TREE_CODE (arg1) == COND_EXPR
9491 || TREE_CODE (arg1) == VEC_COND_EXPR
9492 || COMPARISON_CLASS_P (arg1))
9493 {
9494 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9495 arg1, arg0,
9496 /*cond_first_p=*/0);
9497 if (tem != NULL_TREE)
9498 return tem;
9499 }
9500 }
9501
9502 switch (code)
9503 {
9504 case MEM_REF:
9505 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9506 if (TREE_CODE (arg0) == ADDR_EXPR
9507 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9508 {
9509 tree iref = TREE_OPERAND (arg0, 0);
9510 return fold_build2 (MEM_REF, type,
9511 TREE_OPERAND (iref, 0),
9512 int_const_binop (PLUS_EXPR, arg1,
9513 TREE_OPERAND (iref, 1)));
9514 }
9515
9516 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9517 if (TREE_CODE (arg0) == ADDR_EXPR
9518 && handled_component_p (TREE_OPERAND (arg0, 0)))
9519 {
9520 tree base;
9521 HOST_WIDE_INT coffset;
9522 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9523 &coffset);
9524 if (!base)
9525 return NULL_TREE;
9526 return fold_build2 (MEM_REF, type,
9527 build_fold_addr_expr (base),
9528 int_const_binop (PLUS_EXPR, arg1,
9529 size_int (coffset)));
9530 }
9531
9532 return NULL_TREE;
9533
9534 case POINTER_PLUS_EXPR:
9535 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9536 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9537 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9538 return fold_convert_loc (loc, type,
9539 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9540 fold_convert_loc (loc, sizetype,
9541 arg1),
9542 fold_convert_loc (loc, sizetype,
9543 arg0)));
9544
9545 return NULL_TREE;
9546
9547 case PLUS_EXPR:
9548 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9549 {
9550 /* X + (X / CST) * -CST is X % CST. */
9551 if (TREE_CODE (arg1) == MULT_EXPR
9552 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9553 && operand_equal_p (arg0,
9554 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9555 {
9556 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9557 tree cst1 = TREE_OPERAND (arg1, 1);
9558 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9559 cst1, cst0);
9560 if (sum && integer_zerop (sum))
9561 return fold_convert_loc (loc, type,
9562 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9563 TREE_TYPE (arg0), arg0,
9564 cst0));
9565 }
9566 }
9567
9568 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9569 one. Make sure the type is not saturating and has the signedness of
9570 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9571 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9572 if ((TREE_CODE (arg0) == MULT_EXPR
9573 || TREE_CODE (arg1) == MULT_EXPR)
9574 && !TYPE_SATURATING (type)
9575 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9576 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9577 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9578 {
9579 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9580 if (tem)
9581 return tem;
9582 }
9583
9584 if (! FLOAT_TYPE_P (type))
9585 {
9586 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9587 with a constant, and the two constants have no bits in common,
9588 we should treat this as a BIT_IOR_EXPR since this may produce more
9589 simplifications. */
9590 if (TREE_CODE (arg0) == BIT_AND_EXPR
9591 && TREE_CODE (arg1) == BIT_AND_EXPR
9592 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9593 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9594 && wi::bit_and (TREE_OPERAND (arg0, 1),
9595 TREE_OPERAND (arg1, 1)) == 0)
9596 {
9597 code = BIT_IOR_EXPR;
9598 goto bit_ior;
9599 }
9600
9601 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9602 (plus (plus (mult) (mult)) (foo)) so that we can
9603 take advantage of the factoring cases below. */
9604 if (ANY_INTEGRAL_TYPE_P (type)
9605 && TYPE_OVERFLOW_WRAPS (type)
9606 && (((TREE_CODE (arg0) == PLUS_EXPR
9607 || TREE_CODE (arg0) == MINUS_EXPR)
9608 && TREE_CODE (arg1) == MULT_EXPR)
9609 || ((TREE_CODE (arg1) == PLUS_EXPR
9610 || TREE_CODE (arg1) == MINUS_EXPR)
9611 && TREE_CODE (arg0) == MULT_EXPR)))
9612 {
9613 tree parg0, parg1, parg, marg;
9614 enum tree_code pcode;
9615
9616 if (TREE_CODE (arg1) == MULT_EXPR)
9617 parg = arg0, marg = arg1;
9618 else
9619 parg = arg1, marg = arg0;
9620 pcode = TREE_CODE (parg);
9621 parg0 = TREE_OPERAND (parg, 0);
9622 parg1 = TREE_OPERAND (parg, 1);
9623 STRIP_NOPS (parg0);
9624 STRIP_NOPS (parg1);
9625
9626 if (TREE_CODE (parg0) == MULT_EXPR
9627 && TREE_CODE (parg1) != MULT_EXPR)
9628 return fold_build2_loc (loc, pcode, type,
9629 fold_build2_loc (loc, PLUS_EXPR, type,
9630 fold_convert_loc (loc, type,
9631 parg0),
9632 fold_convert_loc (loc, type,
9633 marg)),
9634 fold_convert_loc (loc, type, parg1));
9635 if (TREE_CODE (parg0) != MULT_EXPR
9636 && TREE_CODE (parg1) == MULT_EXPR)
9637 return
9638 fold_build2_loc (loc, PLUS_EXPR, type,
9639 fold_convert_loc (loc, type, parg0),
9640 fold_build2_loc (loc, pcode, type,
9641 fold_convert_loc (loc, type, marg),
9642 fold_convert_loc (loc, type,
9643 parg1)));
9644 }
9645 }
9646 else
9647 {
9648 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9649 to __complex__ ( x, y ). This is not the same for SNaNs or
9650 if signed zeros are involved. */
9651 if (!HONOR_SNANS (element_mode (arg0))
9652 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9653 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9654 {
9655 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9656 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9657 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9658 bool arg0rz = false, arg0iz = false;
9659 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9660 || (arg0i && (arg0iz = real_zerop (arg0i))))
9661 {
9662 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9663 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9664 if (arg0rz && arg1i && real_zerop (arg1i))
9665 {
9666 tree rp = arg1r ? arg1r
9667 : build1 (REALPART_EXPR, rtype, arg1);
9668 tree ip = arg0i ? arg0i
9669 : build1 (IMAGPART_EXPR, rtype, arg0);
9670 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9671 }
9672 else if (arg0iz && arg1r && real_zerop (arg1r))
9673 {
9674 tree rp = arg0r ? arg0r
9675 : build1 (REALPART_EXPR, rtype, arg0);
9676 tree ip = arg1i ? arg1i
9677 : build1 (IMAGPART_EXPR, rtype, arg1);
9678 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9679 }
9680 }
9681 }
9682
9683 if (flag_unsafe_math_optimizations
9684 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9685 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9686 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9687 return tem;
9688
9689 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9690 We associate floats only if the user has specified
9691 -fassociative-math. */
9692 if (flag_associative_math
9693 && TREE_CODE (arg1) == PLUS_EXPR
9694 && TREE_CODE (arg0) != MULT_EXPR)
9695 {
9696 tree tree10 = TREE_OPERAND (arg1, 0);
9697 tree tree11 = TREE_OPERAND (arg1, 1);
9698 if (TREE_CODE (tree11) == MULT_EXPR
9699 && TREE_CODE (tree10) == MULT_EXPR)
9700 {
9701 tree tree0;
9702 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9703 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9704 }
9705 }
9706 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9707 We associate floats only if the user has specified
9708 -fassociative-math. */
9709 if (flag_associative_math
9710 && TREE_CODE (arg0) == PLUS_EXPR
9711 && TREE_CODE (arg1) != MULT_EXPR)
9712 {
9713 tree tree00 = TREE_OPERAND (arg0, 0);
9714 tree tree01 = TREE_OPERAND (arg0, 1);
9715 if (TREE_CODE (tree01) == MULT_EXPR
9716 && TREE_CODE (tree00) == MULT_EXPR)
9717 {
9718 tree tree0;
9719 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9720 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9721 }
9722 }
9723 }
9724
9725 bit_rotate:
9726 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9727 is a rotate of A by C1 bits. */
9728 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9729 is a rotate of A by B bits. */
9730 {
9731 enum tree_code code0, code1;
9732 tree rtype;
9733 code0 = TREE_CODE (arg0);
9734 code1 = TREE_CODE (arg1);
9735 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9736 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9737 && operand_equal_p (TREE_OPERAND (arg0, 0),
9738 TREE_OPERAND (arg1, 0), 0)
9739 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9740 TYPE_UNSIGNED (rtype))
9741 /* Only create rotates in complete modes. Other cases are not
9742 expanded properly. */
9743 && (element_precision (rtype)
9744 == element_precision (TYPE_MODE (rtype))))
9745 {
9746 tree tree01, tree11;
9747 enum tree_code code01, code11;
9748
9749 tree01 = TREE_OPERAND (arg0, 1);
9750 tree11 = TREE_OPERAND (arg1, 1);
9751 STRIP_NOPS (tree01);
9752 STRIP_NOPS (tree11);
9753 code01 = TREE_CODE (tree01);
9754 code11 = TREE_CODE (tree11);
9755 if (code01 == INTEGER_CST
9756 && code11 == INTEGER_CST
9757 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9758 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9759 {
9760 tem = build2_loc (loc, LROTATE_EXPR,
9761 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9762 TREE_OPERAND (arg0, 0),
9763 code0 == LSHIFT_EXPR
9764 ? TREE_OPERAND (arg0, 1)
9765 : TREE_OPERAND (arg1, 1));
9766 return fold_convert_loc (loc, type, tem);
9767 }
9768 else if (code11 == MINUS_EXPR)
9769 {
9770 tree tree110, tree111;
9771 tree110 = TREE_OPERAND (tree11, 0);
9772 tree111 = TREE_OPERAND (tree11, 1);
9773 STRIP_NOPS (tree110);
9774 STRIP_NOPS (tree111);
9775 if (TREE_CODE (tree110) == INTEGER_CST
9776 && 0 == compare_tree_int (tree110,
9777 element_precision
9778 (TREE_TYPE (TREE_OPERAND
9779 (arg0, 0))))
9780 && operand_equal_p (tree01, tree111, 0))
9781 return
9782 fold_convert_loc (loc, type,
9783 build2 ((code0 == LSHIFT_EXPR
9784 ? LROTATE_EXPR
9785 : RROTATE_EXPR),
9786 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9787 TREE_OPERAND (arg0, 0),
9788 TREE_OPERAND (arg0, 1)));
9789 }
9790 else if (code01 == MINUS_EXPR)
9791 {
9792 tree tree010, tree011;
9793 tree010 = TREE_OPERAND (tree01, 0);
9794 tree011 = TREE_OPERAND (tree01, 1);
9795 STRIP_NOPS (tree010);
9796 STRIP_NOPS (tree011);
9797 if (TREE_CODE (tree010) == INTEGER_CST
9798 && 0 == compare_tree_int (tree010,
9799 element_precision
9800 (TREE_TYPE (TREE_OPERAND
9801 (arg0, 0))))
9802 && operand_equal_p (tree11, tree011, 0))
9803 return fold_convert_loc
9804 (loc, type,
9805 build2 ((code0 != LSHIFT_EXPR
9806 ? LROTATE_EXPR
9807 : RROTATE_EXPR),
9808 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9809 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9810 }
9811 }
9812 }
9813
9814 associate:
9815 /* In most languages, can't associate operations on floats through
9816 parentheses. Rather than remember where the parentheses were, we
9817 don't associate floats at all, unless the user has specified
9818 -fassociative-math.
9819 And, we need to make sure type is not saturating. */
9820
9821 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9822 && !TYPE_SATURATING (type))
9823 {
9824 tree var0, con0, lit0, minus_lit0;
9825 tree var1, con1, lit1, minus_lit1;
9826 tree atype = type;
9827 bool ok = true;
9828
9829 /* Split both trees into variables, constants, and literals. Then
9830 associate each group together, the constants with literals,
9831 then the result with variables. This increases the chances of
9832 literals being recombined later and of generating relocatable
9833 expressions for the sum of a constant and literal. */
9834 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9835 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9836 code == MINUS_EXPR);
9837
9838 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9839 if (code == MINUS_EXPR)
9840 code = PLUS_EXPR;
9841
9842 /* With undefined overflow prefer doing association in a type
9843 which wraps on overflow, if that is one of the operand types. */
9844 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9845 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9846 {
9847 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9848 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9849 atype = TREE_TYPE (arg0);
9850 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9851 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9852 atype = TREE_TYPE (arg1);
9853 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9854 }
9855
9856 /* With undefined overflow we can only associate constants with one
9857 variable, and constants whose association doesn't overflow. */
9858 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9859 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9860 {
9861 if (var0 && var1)
9862 {
9863 tree tmp0 = var0;
9864 tree tmp1 = var1;
9865
9866 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9867 tmp0 = TREE_OPERAND (tmp0, 0);
9868 if (CONVERT_EXPR_P (tmp0)
9869 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9870 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9871 <= TYPE_PRECISION (atype)))
9872 tmp0 = TREE_OPERAND (tmp0, 0);
9873 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9874 tmp1 = TREE_OPERAND (tmp1, 0);
9875 if (CONVERT_EXPR_P (tmp1)
9876 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9877 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9878 <= TYPE_PRECISION (atype)))
9879 tmp1 = TREE_OPERAND (tmp1, 0);
9880 /* The only case we can still associate with two variables
9881 is if they are the same, modulo negation and bit-pattern
9882 preserving conversions. */
9883 if (!operand_equal_p (tmp0, tmp1, 0))
9884 ok = false;
9885 }
9886 }
9887
9888 /* Only do something if we found more than two objects. Otherwise,
9889 nothing has changed and we risk infinite recursion. */
9890 if (ok
9891 && (2 < ((var0 != 0) + (var1 != 0)
9892 + (con0 != 0) + (con1 != 0)
9893 + (lit0 != 0) + (lit1 != 0)
9894 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9895 {
9896 bool any_overflows = false;
9897 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9898 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9899 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9900 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9901 var0 = associate_trees (loc, var0, var1, code, atype);
9902 con0 = associate_trees (loc, con0, con1, code, atype);
9903 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9904 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9905 code, atype);
9906
9907 /* Preserve the MINUS_EXPR if the negative part of the literal is
9908 greater than the positive part. Otherwise, the multiplicative
9909 folding code (i.e extract_muldiv) may be fooled in case
9910 unsigned constants are subtracted, like in the following
9911 example: ((X*2 + 4) - 8U)/2. */
9912 if (minus_lit0 && lit0)
9913 {
9914 if (TREE_CODE (lit0) == INTEGER_CST
9915 && TREE_CODE (minus_lit0) == INTEGER_CST
9916 && tree_int_cst_lt (lit0, minus_lit0))
9917 {
9918 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9919 MINUS_EXPR, atype);
9920 lit0 = 0;
9921 }
9922 else
9923 {
9924 lit0 = associate_trees (loc, lit0, minus_lit0,
9925 MINUS_EXPR, atype);
9926 minus_lit0 = 0;
9927 }
9928 }
9929
9930 /* Don't introduce overflows through reassociation. */
9931 if (!any_overflows
9932 && ((lit0 && TREE_OVERFLOW_P (lit0))
9933 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9934 return NULL_TREE;
9935
9936 if (minus_lit0)
9937 {
9938 if (con0 == 0)
9939 return
9940 fold_convert_loc (loc, type,
9941 associate_trees (loc, var0, minus_lit0,
9942 MINUS_EXPR, atype));
9943 else
9944 {
9945 con0 = associate_trees (loc, con0, minus_lit0,
9946 MINUS_EXPR, atype);
9947 return
9948 fold_convert_loc (loc, type,
9949 associate_trees (loc, var0, con0,
9950 PLUS_EXPR, atype));
9951 }
9952 }
9953
9954 con0 = associate_trees (loc, con0, lit0, code, atype);
9955 return
9956 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9957 code, atype));
9958 }
9959 }
9960
9961 return NULL_TREE;
9962
9963 case MINUS_EXPR:
9964 /* Pointer simplifications for subtraction, simple reassociations. */
9965 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9966 {
9967 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9968 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9969 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9970 {
9971 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9972 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9973 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9974 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9975 return fold_build2_loc (loc, PLUS_EXPR, type,
9976 fold_build2_loc (loc, MINUS_EXPR, type,
9977 arg00, arg10),
9978 fold_build2_loc (loc, MINUS_EXPR, type,
9979 arg01, arg11));
9980 }
9981 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9982 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9983 {
9984 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9985 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9986 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
9987 fold_convert_loc (loc, type, arg1));
9988 if (tmp)
9989 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
9990 }
9991 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
9992 simplifies. */
9993 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9994 {
9995 tree arg10 = fold_convert_loc (loc, type,
9996 TREE_OPERAND (arg1, 0));
9997 tree arg11 = fold_convert_loc (loc, type,
9998 TREE_OPERAND (arg1, 1));
9999 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10000 fold_convert_loc (loc, type, arg0),
10001 arg10);
10002 if (tmp)
10003 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10004 }
10005 }
10006 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10007 if (TREE_CODE (arg0) == NEGATE_EXPR
10008 && negate_expr_p (arg1)
10009 && reorder_operands_p (arg0, arg1))
10010 return fold_build2_loc (loc, MINUS_EXPR, type,
10011 fold_convert_loc (loc, type,
10012 negate_expr (arg1)),
10013 fold_convert_loc (loc, type,
10014 TREE_OPERAND (arg0, 0)));
10015
10016 if (! FLOAT_TYPE_P (type))
10017 {
10018 /* Fold A - (A & B) into ~B & A. */
10019 if (!TREE_SIDE_EFFECTS (arg0)
10020 && TREE_CODE (arg1) == BIT_AND_EXPR)
10021 {
10022 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10023 {
10024 tree arg10 = fold_convert_loc (loc, type,
10025 TREE_OPERAND (arg1, 0));
10026 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10027 fold_build1_loc (loc, BIT_NOT_EXPR,
10028 type, arg10),
10029 fold_convert_loc (loc, type, arg0));
10030 }
10031 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10032 {
10033 tree arg11 = fold_convert_loc (loc,
10034 type, TREE_OPERAND (arg1, 1));
10035 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10036 fold_build1_loc (loc, BIT_NOT_EXPR,
10037 type, arg11),
10038 fold_convert_loc (loc, type, arg0));
10039 }
10040 }
10041
10042 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10043 any power of 2 minus 1. */
10044 if (TREE_CODE (arg0) == BIT_AND_EXPR
10045 && TREE_CODE (arg1) == BIT_AND_EXPR
10046 && operand_equal_p (TREE_OPERAND (arg0, 0),
10047 TREE_OPERAND (arg1, 0), 0))
10048 {
10049 tree mask0 = TREE_OPERAND (arg0, 1);
10050 tree mask1 = TREE_OPERAND (arg1, 1);
10051 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10052
10053 if (operand_equal_p (tem, mask1, 0))
10054 {
10055 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10056 TREE_OPERAND (arg0, 0), mask1);
10057 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10058 }
10059 }
10060 }
10061
10062 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10063 __complex__ ( x, -y ). This is not the same for SNaNs or if
10064 signed zeros are involved. */
10065 if (!HONOR_SNANS (element_mode (arg0))
10066 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10067 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10068 {
10069 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10070 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10071 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10072 bool arg0rz = false, arg0iz = false;
10073 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10074 || (arg0i && (arg0iz = real_zerop (arg0i))))
10075 {
10076 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10077 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10078 if (arg0rz && arg1i && real_zerop (arg1i))
10079 {
10080 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10081 arg1r ? arg1r
10082 : build1 (REALPART_EXPR, rtype, arg1));
10083 tree ip = arg0i ? arg0i
10084 : build1 (IMAGPART_EXPR, rtype, arg0);
10085 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10086 }
10087 else if (arg0iz && arg1r && real_zerop (arg1r))
10088 {
10089 tree rp = arg0r ? arg0r
10090 : build1 (REALPART_EXPR, rtype, arg0);
10091 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10092 arg1i ? arg1i
10093 : build1 (IMAGPART_EXPR, rtype, arg1));
10094 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10095 }
10096 }
10097 }
10098
10099 /* A - B -> A + (-B) if B is easily negatable. */
10100 if (negate_expr_p (arg1)
10101 && !TYPE_OVERFLOW_SANITIZED (type)
10102 && ((FLOAT_TYPE_P (type)
10103 /* Avoid this transformation if B is a positive REAL_CST. */
10104 && (TREE_CODE (arg1) != REAL_CST
10105 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10106 || INTEGRAL_TYPE_P (type)))
10107 return fold_build2_loc (loc, PLUS_EXPR, type,
10108 fold_convert_loc (loc, type, arg0),
10109 fold_convert_loc (loc, type,
10110 negate_expr (arg1)));
10111
10112 /* Fold &a[i] - &a[j] to i-j. */
10113 if (TREE_CODE (arg0) == ADDR_EXPR
10114 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10115 && TREE_CODE (arg1) == ADDR_EXPR
10116 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10117 {
10118 tree tem = fold_addr_of_array_ref_difference (loc, type,
10119 TREE_OPERAND (arg0, 0),
10120 TREE_OPERAND (arg1, 0));
10121 if (tem)
10122 return tem;
10123 }
10124
10125 if (FLOAT_TYPE_P (type)
10126 && flag_unsafe_math_optimizations
10127 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10128 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10129 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10130 return tem;
10131
10132 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10133 one. Make sure the type is not saturating and has the signedness of
10134 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10135 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10136 if ((TREE_CODE (arg0) == MULT_EXPR
10137 || TREE_CODE (arg1) == MULT_EXPR)
10138 && !TYPE_SATURATING (type)
10139 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10140 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10141 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10142 {
10143 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10144 if (tem)
10145 return tem;
10146 }
10147
10148 goto associate;
10149
10150 case MULT_EXPR:
10151 /* (-A) * (-B) -> A * B */
10152 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10153 return fold_build2_loc (loc, MULT_EXPR, type,
10154 fold_convert_loc (loc, type,
10155 TREE_OPERAND (arg0, 0)),
10156 fold_convert_loc (loc, type,
10157 negate_expr (arg1)));
10158 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10159 return fold_build2_loc (loc, MULT_EXPR, type,
10160 fold_convert_loc (loc, type,
10161 negate_expr (arg0)),
10162 fold_convert_loc (loc, type,
10163 TREE_OPERAND (arg1, 0)));
10164
10165 if (! FLOAT_TYPE_P (type))
10166 {
10167 /* Transform x * -C into -x * C if x is easily negatable. */
10168 if (TREE_CODE (arg1) == INTEGER_CST
10169 && tree_int_cst_sgn (arg1) == -1
10170 && negate_expr_p (arg0)
10171 && (tem = negate_expr (arg1)) != arg1
10172 && !TREE_OVERFLOW (tem))
10173 return fold_build2_loc (loc, MULT_EXPR, type,
10174 fold_convert_loc (loc, type,
10175 negate_expr (arg0)),
10176 tem);
10177
10178 /* (a * (1 << b)) is (a << b) */
10179 if (TREE_CODE (arg1) == LSHIFT_EXPR
10180 && integer_onep (TREE_OPERAND (arg1, 0)))
10181 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10182 TREE_OPERAND (arg1, 1));
10183 if (TREE_CODE (arg0) == LSHIFT_EXPR
10184 && integer_onep (TREE_OPERAND (arg0, 0)))
10185 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10186 TREE_OPERAND (arg0, 1));
10187
10188 /* (A + A) * C -> A * 2 * C */
10189 if (TREE_CODE (arg0) == PLUS_EXPR
10190 && TREE_CODE (arg1) == INTEGER_CST
10191 && operand_equal_p (TREE_OPERAND (arg0, 0),
10192 TREE_OPERAND (arg0, 1), 0))
10193 return fold_build2_loc (loc, MULT_EXPR, type,
10194 omit_one_operand_loc (loc, type,
10195 TREE_OPERAND (arg0, 0),
10196 TREE_OPERAND (arg0, 1)),
10197 fold_build2_loc (loc, MULT_EXPR, type,
10198 build_int_cst (type, 2) , arg1));
10199
10200 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10201 sign-changing only. */
10202 if (TREE_CODE (arg1) == INTEGER_CST
10203 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10204 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10205 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10206
10207 strict_overflow_p = false;
10208 if (TREE_CODE (arg1) == INTEGER_CST
10209 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10210 &strict_overflow_p)))
10211 {
10212 if (strict_overflow_p)
10213 fold_overflow_warning (("assuming signed overflow does not "
10214 "occur when simplifying "
10215 "multiplication"),
10216 WARN_STRICT_OVERFLOW_MISC);
10217 return fold_convert_loc (loc, type, tem);
10218 }
10219
10220 /* Optimize z * conj(z) for integer complex numbers. */
10221 if (TREE_CODE (arg0) == CONJ_EXPR
10222 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10223 return fold_mult_zconjz (loc, type, arg1);
10224 if (TREE_CODE (arg1) == CONJ_EXPR
10225 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10226 return fold_mult_zconjz (loc, type, arg0);
10227 }
10228 else
10229 {
10230 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10231 the result for floating point types due to rounding so it is applied
10232 only if -fassociative-math was specify. */
10233 if (flag_associative_math
10234 && TREE_CODE (arg0) == RDIV_EXPR
10235 && TREE_CODE (arg1) == REAL_CST
10236 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10237 {
10238 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10239 arg1);
10240 if (tem)
10241 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10242 TREE_OPERAND (arg0, 1));
10243 }
10244
10245 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10246 if (operand_equal_p (arg0, arg1, 0))
10247 {
10248 tree tem = fold_strip_sign_ops (arg0);
10249 if (tem != NULL_TREE)
10250 {
10251 tem = fold_convert_loc (loc, type, tem);
10252 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10253 }
10254 }
10255
10256 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10257 This is not the same for NaNs or if signed zeros are
10258 involved. */
10259 if (!HONOR_NANS (arg0)
10260 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10261 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10262 && TREE_CODE (arg1) == COMPLEX_CST
10263 && real_zerop (TREE_REALPART (arg1)))
10264 {
10265 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10266 if (real_onep (TREE_IMAGPART (arg1)))
10267 return
10268 fold_build2_loc (loc, COMPLEX_EXPR, type,
10269 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10270 rtype, arg0)),
10271 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10272 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10273 return
10274 fold_build2_loc (loc, COMPLEX_EXPR, type,
10275 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10276 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10277 rtype, arg0)));
10278 }
10279
10280 /* Optimize z * conj(z) for floating point complex numbers.
10281 Guarded by flag_unsafe_math_optimizations as non-finite
10282 imaginary components don't produce scalar results. */
10283 if (flag_unsafe_math_optimizations
10284 && TREE_CODE (arg0) == CONJ_EXPR
10285 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10286 return fold_mult_zconjz (loc, type, arg1);
10287 if (flag_unsafe_math_optimizations
10288 && TREE_CODE (arg1) == CONJ_EXPR
10289 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10290 return fold_mult_zconjz (loc, type, arg0);
10291
10292 if (flag_unsafe_math_optimizations)
10293 {
10294 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10295 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10296
10297 /* Optimizations of root(...)*root(...). */
10298 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10299 {
10300 tree rootfn, arg;
10301 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10302 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10303
10304 /* Optimize sqrt(x)*sqrt(x) as x. */
10305 if (BUILTIN_SQRT_P (fcode0)
10306 && operand_equal_p (arg00, arg10, 0)
10307 && ! HONOR_SNANS (element_mode (type)))
10308 return arg00;
10309
10310 /* Optimize root(x)*root(y) as root(x*y). */
10311 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10312 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10313 return build_call_expr_loc (loc, rootfn, 1, arg);
10314 }
10315
10316 /* Optimize expN(x)*expN(y) as expN(x+y). */
10317 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10318 {
10319 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10320 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10321 CALL_EXPR_ARG (arg0, 0),
10322 CALL_EXPR_ARG (arg1, 0));
10323 return build_call_expr_loc (loc, expfn, 1, arg);
10324 }
10325
10326 /* Optimizations of pow(...)*pow(...). */
10327 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10328 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10329 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10330 {
10331 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10332 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10333 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10334 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10335
10336 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10337 if (operand_equal_p (arg01, arg11, 0))
10338 {
10339 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10340 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10341 arg00, arg10);
10342 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10343 }
10344
10345 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10346 if (operand_equal_p (arg00, arg10, 0))
10347 {
10348 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10349 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10350 arg01, arg11);
10351 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10352 }
10353 }
10354
10355 /* Optimize tan(x)*cos(x) as sin(x). */
10356 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10357 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10358 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10359 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10360 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10361 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10362 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10363 CALL_EXPR_ARG (arg1, 0), 0))
10364 {
10365 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10366
10367 if (sinfn != NULL_TREE)
10368 return build_call_expr_loc (loc, sinfn, 1,
10369 CALL_EXPR_ARG (arg0, 0));
10370 }
10371
10372 /* Optimize x*pow(x,c) as pow(x,c+1). */
10373 if (fcode1 == BUILT_IN_POW
10374 || fcode1 == BUILT_IN_POWF
10375 || fcode1 == BUILT_IN_POWL)
10376 {
10377 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10378 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10379 if (TREE_CODE (arg11) == REAL_CST
10380 && !TREE_OVERFLOW (arg11)
10381 && operand_equal_p (arg0, arg10, 0))
10382 {
10383 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10384 REAL_VALUE_TYPE c;
10385 tree arg;
10386
10387 c = TREE_REAL_CST (arg11);
10388 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10389 arg = build_real (type, c);
10390 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10391 }
10392 }
10393
10394 /* Optimize pow(x,c)*x as pow(x,c+1). */
10395 if (fcode0 == BUILT_IN_POW
10396 || fcode0 == BUILT_IN_POWF
10397 || fcode0 == BUILT_IN_POWL)
10398 {
10399 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10400 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10401 if (TREE_CODE (arg01) == REAL_CST
10402 && !TREE_OVERFLOW (arg01)
10403 && operand_equal_p (arg1, arg00, 0))
10404 {
10405 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10406 REAL_VALUE_TYPE c;
10407 tree arg;
10408
10409 c = TREE_REAL_CST (arg01);
10410 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10411 arg = build_real (type, c);
10412 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10413 }
10414 }
10415
10416 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10417 if (!in_gimple_form
10418 && optimize
10419 && operand_equal_p (arg0, arg1, 0))
10420 {
10421 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10422
10423 if (powfn)
10424 {
10425 tree arg = build_real (type, dconst2);
10426 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10427 }
10428 }
10429 }
10430 }
10431 goto associate;
10432
10433 case BIT_IOR_EXPR:
10434 bit_ior:
10435 /* Canonicalize (X & C1) | C2. */
10436 if (TREE_CODE (arg0) == BIT_AND_EXPR
10437 && TREE_CODE (arg1) == INTEGER_CST
10438 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10439 {
10440 int width = TYPE_PRECISION (type), w;
10441 wide_int c1 = TREE_OPERAND (arg0, 1);
10442 wide_int c2 = arg1;
10443
10444 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10445 if ((c1 & c2) == c1)
10446 return omit_one_operand_loc (loc, type, arg1,
10447 TREE_OPERAND (arg0, 0));
10448
10449 wide_int msk = wi::mask (width, false,
10450 TYPE_PRECISION (TREE_TYPE (arg1)));
10451
10452 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10453 if (msk.and_not (c1 | c2) == 0)
10454 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10455 TREE_OPERAND (arg0, 0), arg1);
10456
10457 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10458 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10459 mode which allows further optimizations. */
10460 c1 &= msk;
10461 c2 &= msk;
10462 wide_int c3 = c1.and_not (c2);
10463 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10464 {
10465 wide_int mask = wi::mask (w, false,
10466 TYPE_PRECISION (type));
10467 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10468 {
10469 c3 = mask;
10470 break;
10471 }
10472 }
10473
10474 if (c3 != c1)
10475 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10476 fold_build2_loc (loc, BIT_AND_EXPR, type,
10477 TREE_OPERAND (arg0, 0),
10478 wide_int_to_tree (type,
10479 c3)),
10480 arg1);
10481 }
10482
10483 /* (X & ~Y) | (~X & Y) is X ^ Y */
10484 if (TREE_CODE (arg0) == BIT_AND_EXPR
10485 && TREE_CODE (arg1) == BIT_AND_EXPR)
10486 {
10487 tree a0, a1, l0, l1, n0, n1;
10488
10489 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10490 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10491
10492 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10493 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10494
10495 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10496 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10497
10498 if ((operand_equal_p (n0, a0, 0)
10499 && operand_equal_p (n1, a1, 0))
10500 || (operand_equal_p (n0, a1, 0)
10501 && operand_equal_p (n1, a0, 0)))
10502 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10503 }
10504
10505 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10506 if (t1 != NULL_TREE)
10507 return t1;
10508
10509 /* See if this can be simplified into a rotate first. If that
10510 is unsuccessful continue in the association code. */
10511 goto bit_rotate;
10512
10513 case BIT_XOR_EXPR:
10514 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10515 if (TREE_CODE (arg0) == BIT_AND_EXPR
10516 && INTEGRAL_TYPE_P (type)
10517 && integer_onep (TREE_OPERAND (arg0, 1))
10518 && integer_onep (arg1))
10519 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10520 build_zero_cst (TREE_TYPE (arg0)));
10521
10522 /* See if this can be simplified into a rotate first. If that
10523 is unsuccessful continue in the association code. */
10524 goto bit_rotate;
10525
10526 case BIT_AND_EXPR:
10527 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10528 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
10529 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
10530 || (TREE_CODE (arg0) == EQ_EXPR
10531 && integer_zerop (TREE_OPERAND (arg0, 1))))
10532 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10533 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10534
10535 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10536 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
10537 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
10538 || (TREE_CODE (arg1) == EQ_EXPR
10539 && integer_zerop (TREE_OPERAND (arg1, 1))))
10540 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10541 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10542
10543 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10544 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10545 && INTEGRAL_TYPE_P (type)
10546 && integer_onep (TREE_OPERAND (arg0, 1))
10547 && integer_onep (arg1))
10548 {
10549 tree tem2;
10550 tem = TREE_OPERAND (arg0, 0);
10551 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10552 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10553 tem, tem2);
10554 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10555 build_zero_cst (TREE_TYPE (tem)));
10556 }
10557 /* Fold ~X & 1 as (X & 1) == 0. */
10558 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10559 && INTEGRAL_TYPE_P (type)
10560 && integer_onep (arg1))
10561 {
10562 tree tem2;
10563 tem = TREE_OPERAND (arg0, 0);
10564 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10565 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10566 tem, tem2);
10567 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10568 build_zero_cst (TREE_TYPE (tem)));
10569 }
10570 /* Fold !X & 1 as X == 0. */
10571 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10572 && integer_onep (arg1))
10573 {
10574 tem = TREE_OPERAND (arg0, 0);
10575 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10576 build_zero_cst (TREE_TYPE (tem)));
10577 }
10578
10579 /* Fold (X ^ Y) & Y as ~X & Y. */
10580 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10581 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10582 {
10583 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10584 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10585 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10586 fold_convert_loc (loc, type, arg1));
10587 }
10588 /* Fold (X ^ Y) & X as ~Y & X. */
10589 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10590 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10591 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10592 {
10593 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10594 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10595 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10596 fold_convert_loc (loc, type, arg1));
10597 }
10598 /* Fold X & (X ^ Y) as X & ~Y. */
10599 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10600 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10601 {
10602 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10603 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10604 fold_convert_loc (loc, type, arg0),
10605 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10606 }
10607 /* Fold X & (Y ^ X) as ~Y & X. */
10608 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10609 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10610 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10611 {
10612 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10613 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10614 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10615 fold_convert_loc (loc, type, arg0));
10616 }
10617
10618 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10619 multiple of 1 << CST. */
10620 if (TREE_CODE (arg1) == INTEGER_CST)
10621 {
10622 wide_int cst1 = arg1;
10623 wide_int ncst1 = -cst1;
10624 if ((cst1 & ncst1) == ncst1
10625 && multiple_of_p (type, arg0,
10626 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10627 return fold_convert_loc (loc, type, arg0);
10628 }
10629
10630 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10631 bits from CST2. */
10632 if (TREE_CODE (arg1) == INTEGER_CST
10633 && TREE_CODE (arg0) == MULT_EXPR
10634 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10635 {
10636 wide_int warg1 = arg1;
10637 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10638
10639 if (masked == 0)
10640 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10641 arg0, arg1);
10642 else if (masked != warg1)
10643 {
10644 /* Avoid the transform if arg1 is a mask of some
10645 mode which allows further optimizations. */
10646 int pop = wi::popcount (warg1);
10647 if (!(pop >= BITS_PER_UNIT
10648 && exact_log2 (pop) != -1
10649 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10650 return fold_build2_loc (loc, code, type, op0,
10651 wide_int_to_tree (type, masked));
10652 }
10653 }
10654
10655 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10656 ((A & N) + B) & M -> (A + B) & M
10657 Similarly if (N & M) == 0,
10658 ((A | N) + B) & M -> (A + B) & M
10659 and for - instead of + (or unary - instead of +)
10660 and/or ^ instead of |.
10661 If B is constant and (B & M) == 0, fold into A & M. */
10662 if (TREE_CODE (arg1) == INTEGER_CST)
10663 {
10664 wide_int cst1 = arg1;
10665 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10666 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10667 && (TREE_CODE (arg0) == PLUS_EXPR
10668 || TREE_CODE (arg0) == MINUS_EXPR
10669 || TREE_CODE (arg0) == NEGATE_EXPR)
10670 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10671 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10672 {
10673 tree pmop[2];
10674 int which = 0;
10675 wide_int cst0;
10676
10677 /* Now we know that arg0 is (C + D) or (C - D) or
10678 -C and arg1 (M) is == (1LL << cst) - 1.
10679 Store C into PMOP[0] and D into PMOP[1]. */
10680 pmop[0] = TREE_OPERAND (arg0, 0);
10681 pmop[1] = NULL;
10682 if (TREE_CODE (arg0) != NEGATE_EXPR)
10683 {
10684 pmop[1] = TREE_OPERAND (arg0, 1);
10685 which = 1;
10686 }
10687
10688 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10689 which = -1;
10690
10691 for (; which >= 0; which--)
10692 switch (TREE_CODE (pmop[which]))
10693 {
10694 case BIT_AND_EXPR:
10695 case BIT_IOR_EXPR:
10696 case BIT_XOR_EXPR:
10697 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10698 != INTEGER_CST)
10699 break;
10700 cst0 = TREE_OPERAND (pmop[which], 1);
10701 cst0 &= cst1;
10702 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10703 {
10704 if (cst0 != cst1)
10705 break;
10706 }
10707 else if (cst0 != 0)
10708 break;
10709 /* If C or D is of the form (A & N) where
10710 (N & M) == M, or of the form (A | N) or
10711 (A ^ N) where (N & M) == 0, replace it with A. */
10712 pmop[which] = TREE_OPERAND (pmop[which], 0);
10713 break;
10714 case INTEGER_CST:
10715 /* If C or D is a N where (N & M) == 0, it can be
10716 omitted (assumed 0). */
10717 if ((TREE_CODE (arg0) == PLUS_EXPR
10718 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10719 && (cst1 & pmop[which]) == 0)
10720 pmop[which] = NULL;
10721 break;
10722 default:
10723 break;
10724 }
10725
10726 /* Only build anything new if we optimized one or both arguments
10727 above. */
10728 if (pmop[0] != TREE_OPERAND (arg0, 0)
10729 || (TREE_CODE (arg0) != NEGATE_EXPR
10730 && pmop[1] != TREE_OPERAND (arg0, 1)))
10731 {
10732 tree utype = TREE_TYPE (arg0);
10733 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10734 {
10735 /* Perform the operations in a type that has defined
10736 overflow behavior. */
10737 utype = unsigned_type_for (TREE_TYPE (arg0));
10738 if (pmop[0] != NULL)
10739 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10740 if (pmop[1] != NULL)
10741 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10742 }
10743
10744 if (TREE_CODE (arg0) == NEGATE_EXPR)
10745 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10746 else if (TREE_CODE (arg0) == PLUS_EXPR)
10747 {
10748 if (pmop[0] != NULL && pmop[1] != NULL)
10749 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10750 pmop[0], pmop[1]);
10751 else if (pmop[0] != NULL)
10752 tem = pmop[0];
10753 else if (pmop[1] != NULL)
10754 tem = pmop[1];
10755 else
10756 return build_int_cst (type, 0);
10757 }
10758 else if (pmop[0] == NULL)
10759 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10760 else
10761 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10762 pmop[0], pmop[1]);
10763 /* TEM is now the new binary +, - or unary - replacement. */
10764 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10765 fold_convert_loc (loc, utype, arg1));
10766 return fold_convert_loc (loc, type, tem);
10767 }
10768 }
10769 }
10770
10771 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10772 if (t1 != NULL_TREE)
10773 return t1;
10774 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10775 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10776 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10777 {
10778 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10779
10780 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10781 if (mask == -1)
10782 return
10783 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10784 }
10785
10786 goto associate;
10787
10788 case RDIV_EXPR:
10789 /* Don't touch a floating-point divide by zero unless the mode
10790 of the constant can represent infinity. */
10791 if (TREE_CODE (arg1) == REAL_CST
10792 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10793 && real_zerop (arg1))
10794 return NULL_TREE;
10795
10796 /* (-A) / (-B) -> A / B */
10797 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10798 return fold_build2_loc (loc, RDIV_EXPR, type,
10799 TREE_OPERAND (arg0, 0),
10800 negate_expr (arg1));
10801 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10802 return fold_build2_loc (loc, RDIV_EXPR, type,
10803 negate_expr (arg0),
10804 TREE_OPERAND (arg1, 0));
10805
10806 /* Convert A/B/C to A/(B*C). */
10807 if (flag_reciprocal_math
10808 && TREE_CODE (arg0) == RDIV_EXPR)
10809 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10810 fold_build2_loc (loc, MULT_EXPR, type,
10811 TREE_OPERAND (arg0, 1), arg1));
10812
10813 /* Convert A/(B/C) to (A/B)*C. */
10814 if (flag_reciprocal_math
10815 && TREE_CODE (arg1) == RDIV_EXPR)
10816 return fold_build2_loc (loc, MULT_EXPR, type,
10817 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10818 TREE_OPERAND (arg1, 0)),
10819 TREE_OPERAND (arg1, 1));
10820
10821 /* Convert C1/(X*C2) into (C1/C2)/X. */
10822 if (flag_reciprocal_math
10823 && TREE_CODE (arg1) == MULT_EXPR
10824 && TREE_CODE (arg0) == REAL_CST
10825 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10826 {
10827 tree tem = const_binop (RDIV_EXPR, arg0,
10828 TREE_OPERAND (arg1, 1));
10829 if (tem)
10830 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10831 TREE_OPERAND (arg1, 0));
10832 }
10833
10834 if (flag_unsafe_math_optimizations)
10835 {
10836 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10837 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10838
10839 /* Optimize sin(x)/cos(x) as tan(x). */
10840 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10841 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10842 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10843 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10844 CALL_EXPR_ARG (arg1, 0), 0))
10845 {
10846 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10847
10848 if (tanfn != NULL_TREE)
10849 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10850 }
10851
10852 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10853 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10854 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10855 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10856 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10857 CALL_EXPR_ARG (arg1, 0), 0))
10858 {
10859 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10860
10861 if (tanfn != NULL_TREE)
10862 {
10863 tree tmp = build_call_expr_loc (loc, tanfn, 1,
10864 CALL_EXPR_ARG (arg0, 0));
10865 return fold_build2_loc (loc, RDIV_EXPR, type,
10866 build_real (type, dconst1), tmp);
10867 }
10868 }
10869
10870 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10871 NaNs or Infinities. */
10872 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10873 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10874 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10875 {
10876 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10877 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10878
10879 if (! HONOR_NANS (arg00)
10880 && ! HONOR_INFINITIES (element_mode (arg00))
10881 && operand_equal_p (arg00, arg01, 0))
10882 {
10883 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10884
10885 if (cosfn != NULL_TREE)
10886 return build_call_expr_loc (loc, cosfn, 1, arg00);
10887 }
10888 }
10889
10890 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10891 NaNs or Infinities. */
10892 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10893 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10894 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10895 {
10896 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10897 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10898
10899 if (! HONOR_NANS (arg00)
10900 && ! HONOR_INFINITIES (element_mode (arg00))
10901 && operand_equal_p (arg00, arg01, 0))
10902 {
10903 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10904
10905 if (cosfn != NULL_TREE)
10906 {
10907 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
10908 return fold_build2_loc (loc, RDIV_EXPR, type,
10909 build_real (type, dconst1),
10910 tmp);
10911 }
10912 }
10913 }
10914
10915 /* Optimize pow(x,c)/x as pow(x,c-1). */
10916 if (fcode0 == BUILT_IN_POW
10917 || fcode0 == BUILT_IN_POWF
10918 || fcode0 == BUILT_IN_POWL)
10919 {
10920 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10921 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10922 if (TREE_CODE (arg01) == REAL_CST
10923 && !TREE_OVERFLOW (arg01)
10924 && operand_equal_p (arg1, arg00, 0))
10925 {
10926 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10927 REAL_VALUE_TYPE c;
10928 tree arg;
10929
10930 c = TREE_REAL_CST (arg01);
10931 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10932 arg = build_real (type, c);
10933 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10934 }
10935 }
10936
10937 /* Optimize a/root(b/c) into a*root(c/b). */
10938 if (BUILTIN_ROOT_P (fcode1))
10939 {
10940 tree rootarg = CALL_EXPR_ARG (arg1, 0);
10941
10942 if (TREE_CODE (rootarg) == RDIV_EXPR)
10943 {
10944 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10945 tree b = TREE_OPERAND (rootarg, 0);
10946 tree c = TREE_OPERAND (rootarg, 1);
10947
10948 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
10949
10950 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
10951 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
10952 }
10953 }
10954
10955 /* Optimize x/expN(y) into x*expN(-y). */
10956 if (BUILTIN_EXPONENT_P (fcode1))
10957 {
10958 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10959 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10960 arg1 = build_call_expr_loc (loc,
10961 expfn, 1,
10962 fold_convert_loc (loc, type, arg));
10963 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
10964 }
10965
10966 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10967 if (fcode1 == BUILT_IN_POW
10968 || fcode1 == BUILT_IN_POWF
10969 || fcode1 == BUILT_IN_POWL)
10970 {
10971 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10972 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10973 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10974 tree neg11 = fold_convert_loc (loc, type,
10975 negate_expr (arg11));
10976 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
10977 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
10978 }
10979 }
10980 return NULL_TREE;
10981
10982 case TRUNC_DIV_EXPR:
10983 /* Optimize (X & (-A)) / A where A is a power of 2,
10984 to X >> log2(A) */
10985 if (TREE_CODE (arg0) == BIT_AND_EXPR
10986 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10987 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10988 {
10989 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10990 arg1, TREE_OPERAND (arg0, 1));
10991 if (sum && integer_zerop (sum)) {
10992 tree pow2 = build_int_cst (integer_type_node,
10993 wi::exact_log2 (arg1));
10994 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10995 TREE_OPERAND (arg0, 0), pow2);
10996 }
10997 }
10998
10999 /* Fall through */
11000
11001 case FLOOR_DIV_EXPR:
11002 /* Simplify A / (B << N) where A and B are positive and B is
11003 a power of 2, to A >> (N + log2(B)). */
11004 strict_overflow_p = false;
11005 if (TREE_CODE (arg1) == LSHIFT_EXPR
11006 && (TYPE_UNSIGNED (type)
11007 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11008 {
11009 tree sval = TREE_OPERAND (arg1, 0);
11010 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11011 {
11012 tree sh_cnt = TREE_OPERAND (arg1, 1);
11013 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11014 wi::exact_log2 (sval));
11015
11016 if (strict_overflow_p)
11017 fold_overflow_warning (("assuming signed overflow does not "
11018 "occur when simplifying A / (B << N)"),
11019 WARN_STRICT_OVERFLOW_MISC);
11020
11021 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11022 sh_cnt, pow2);
11023 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11024 fold_convert_loc (loc, type, arg0), sh_cnt);
11025 }
11026 }
11027
11028 /* Fall through */
11029
11030 case ROUND_DIV_EXPR:
11031 case CEIL_DIV_EXPR:
11032 case EXACT_DIV_EXPR:
11033 if (integer_zerop (arg1))
11034 return NULL_TREE;
11035
11036 /* Convert -A / -B to A / B when the type is signed and overflow is
11037 undefined. */
11038 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11039 && TREE_CODE (arg0) == NEGATE_EXPR
11040 && negate_expr_p (arg1))
11041 {
11042 if (INTEGRAL_TYPE_P (type))
11043 fold_overflow_warning (("assuming signed overflow does not occur "
11044 "when distributing negation across "
11045 "division"),
11046 WARN_STRICT_OVERFLOW_MISC);
11047 return fold_build2_loc (loc, code, type,
11048 fold_convert_loc (loc, type,
11049 TREE_OPERAND (arg0, 0)),
11050 fold_convert_loc (loc, type,
11051 negate_expr (arg1)));
11052 }
11053 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11054 && TREE_CODE (arg1) == NEGATE_EXPR
11055 && negate_expr_p (arg0))
11056 {
11057 if (INTEGRAL_TYPE_P (type))
11058 fold_overflow_warning (("assuming signed overflow does not occur "
11059 "when distributing negation across "
11060 "division"),
11061 WARN_STRICT_OVERFLOW_MISC);
11062 return fold_build2_loc (loc, code, type,
11063 fold_convert_loc (loc, type,
11064 negate_expr (arg0)),
11065 fold_convert_loc (loc, type,
11066 TREE_OPERAND (arg1, 0)));
11067 }
11068
11069 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11070 operation, EXACT_DIV_EXPR.
11071
11072 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11073 At one time others generated faster code, it's not clear if they do
11074 after the last round to changes to the DIV code in expmed.c. */
11075 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11076 && multiple_of_p (type, arg0, arg1))
11077 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11078
11079 strict_overflow_p = false;
11080 if (TREE_CODE (arg1) == INTEGER_CST
11081 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11082 &strict_overflow_p)))
11083 {
11084 if (strict_overflow_p)
11085 fold_overflow_warning (("assuming signed overflow does not occur "
11086 "when simplifying division"),
11087 WARN_STRICT_OVERFLOW_MISC);
11088 return fold_convert_loc (loc, type, tem);
11089 }
11090
11091 return NULL_TREE;
11092
11093 case CEIL_MOD_EXPR:
11094 case FLOOR_MOD_EXPR:
11095 case ROUND_MOD_EXPR:
11096 case TRUNC_MOD_EXPR:
11097 strict_overflow_p = false;
11098 if (TREE_CODE (arg1) == INTEGER_CST
11099 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11100 &strict_overflow_p)))
11101 {
11102 if (strict_overflow_p)
11103 fold_overflow_warning (("assuming signed overflow does not occur "
11104 "when simplifying modulus"),
11105 WARN_STRICT_OVERFLOW_MISC);
11106 return fold_convert_loc (loc, type, tem);
11107 }
11108
11109 return NULL_TREE;
11110
11111 case LROTATE_EXPR:
11112 case RROTATE_EXPR:
11113 case RSHIFT_EXPR:
11114 case LSHIFT_EXPR:
11115 /* Since negative shift count is not well-defined,
11116 don't try to compute it in the compiler. */
11117 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11118 return NULL_TREE;
11119
11120 prec = element_precision (type);
11121
11122 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11123 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11124 && tree_to_uhwi (arg1) < prec
11125 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11126 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11127 {
11128 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11129 + tree_to_uhwi (arg1));
11130
11131 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11132 being well defined. */
11133 if (low >= prec)
11134 {
11135 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11136 low = low % prec;
11137 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11138 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11139 TREE_OPERAND (arg0, 0));
11140 else
11141 low = prec - 1;
11142 }
11143
11144 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11145 build_int_cst (TREE_TYPE (arg1), low));
11146 }
11147
11148 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11149 into x & ((unsigned)-1 >> c) for unsigned types. */
11150 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11151 || (TYPE_UNSIGNED (type)
11152 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11153 && tree_fits_uhwi_p (arg1)
11154 && tree_to_uhwi (arg1) < prec
11155 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11156 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11157 {
11158 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11159 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
11160 tree lshift;
11161 tree arg00;
11162
11163 if (low0 == low1)
11164 {
11165 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11166
11167 lshift = build_minus_one_cst (type);
11168 lshift = const_binop (code, lshift, arg1);
11169
11170 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11171 }
11172 }
11173
11174 /* If we have a rotate of a bit operation with the rotate count and
11175 the second operand of the bit operation both constant,
11176 permute the two operations. */
11177 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11178 && (TREE_CODE (arg0) == BIT_AND_EXPR
11179 || TREE_CODE (arg0) == BIT_IOR_EXPR
11180 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11181 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11182 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11183 fold_build2_loc (loc, code, type,
11184 TREE_OPERAND (arg0, 0), arg1),
11185 fold_build2_loc (loc, code, type,
11186 TREE_OPERAND (arg0, 1), arg1));
11187
11188 /* Two consecutive rotates adding up to the some integer
11189 multiple of the precision of the type can be ignored. */
11190 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11191 && TREE_CODE (arg0) == RROTATE_EXPR
11192 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11193 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
11194 prec) == 0)
11195 return TREE_OPERAND (arg0, 0);
11196
11197 return NULL_TREE;
11198
11199 case MIN_EXPR:
11200 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11201 if (tem)
11202 return tem;
11203 goto associate;
11204
11205 case MAX_EXPR:
11206 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11207 if (tem)
11208 return tem;
11209 goto associate;
11210
11211 case TRUTH_ANDIF_EXPR:
11212 /* Note that the operands of this must be ints
11213 and their values must be 0 or 1.
11214 ("true" is a fixed value perhaps depending on the language.) */
11215 /* If first arg is constant zero, return it. */
11216 if (integer_zerop (arg0))
11217 return fold_convert_loc (loc, type, arg0);
11218 case TRUTH_AND_EXPR:
11219 /* If either arg is constant true, drop it. */
11220 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11221 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11222 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11223 /* Preserve sequence points. */
11224 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11225 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11226 /* If second arg is constant zero, result is zero, but first arg
11227 must be evaluated. */
11228 if (integer_zerop (arg1))
11229 return omit_one_operand_loc (loc, type, arg1, arg0);
11230 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11231 case will be handled here. */
11232 if (integer_zerop (arg0))
11233 return omit_one_operand_loc (loc, type, arg0, arg1);
11234
11235 /* !X && X is always false. */
11236 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11237 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11238 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11239 /* X && !X is always false. */
11240 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11241 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11242 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11243
11244 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11245 means A >= Y && A != MAX, but in this case we know that
11246 A < X <= MAX. */
11247
11248 if (!TREE_SIDE_EFFECTS (arg0)
11249 && !TREE_SIDE_EFFECTS (arg1))
11250 {
11251 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11252 if (tem && !operand_equal_p (tem, arg0, 0))
11253 return fold_build2_loc (loc, code, type, tem, arg1);
11254
11255 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11256 if (tem && !operand_equal_p (tem, arg1, 0))
11257 return fold_build2_loc (loc, code, type, arg0, tem);
11258 }
11259
11260 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11261 != NULL_TREE)
11262 return tem;
11263
11264 return NULL_TREE;
11265
11266 case TRUTH_ORIF_EXPR:
11267 /* Note that the operands of this must be ints
11268 and their values must be 0 or true.
11269 ("true" is a fixed value perhaps depending on the language.) */
11270 /* If first arg is constant true, return it. */
11271 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11272 return fold_convert_loc (loc, type, arg0);
11273 case TRUTH_OR_EXPR:
11274 /* If either arg is constant zero, drop it. */
11275 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11276 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11277 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11278 /* Preserve sequence points. */
11279 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11280 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11281 /* If second arg is constant true, result is true, but we must
11282 evaluate first arg. */
11283 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11284 return omit_one_operand_loc (loc, type, arg1, arg0);
11285 /* Likewise for first arg, but note this only occurs here for
11286 TRUTH_OR_EXPR. */
11287 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11288 return omit_one_operand_loc (loc, type, arg0, arg1);
11289
11290 /* !X || X is always true. */
11291 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11292 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11293 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11294 /* X || !X is always true. */
11295 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11296 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11297 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11298
11299 /* (X && !Y) || (!X && Y) is X ^ Y */
11300 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11301 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11302 {
11303 tree a0, a1, l0, l1, n0, n1;
11304
11305 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11306 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11307
11308 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11309 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11310
11311 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11312 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11313
11314 if ((operand_equal_p (n0, a0, 0)
11315 && operand_equal_p (n1, a1, 0))
11316 || (operand_equal_p (n0, a1, 0)
11317 && operand_equal_p (n1, a0, 0)))
11318 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11319 }
11320
11321 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11322 != NULL_TREE)
11323 return tem;
11324
11325 return NULL_TREE;
11326
11327 case TRUTH_XOR_EXPR:
11328 /* If the second arg is constant zero, drop it. */
11329 if (integer_zerop (arg1))
11330 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11331 /* If the second arg is constant true, this is a logical inversion. */
11332 if (integer_onep (arg1))
11333 {
11334 tem = invert_truthvalue_loc (loc, arg0);
11335 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11336 }
11337 /* Identical arguments cancel to zero. */
11338 if (operand_equal_p (arg0, arg1, 0))
11339 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11340
11341 /* !X ^ X is always true. */
11342 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11343 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11344 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11345
11346 /* X ^ !X is always true. */
11347 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11348 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11349 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11350
11351 return NULL_TREE;
11352
11353 case EQ_EXPR:
11354 case NE_EXPR:
11355 STRIP_NOPS (arg0);
11356 STRIP_NOPS (arg1);
11357
11358 tem = fold_comparison (loc, code, type, op0, op1);
11359 if (tem != NULL_TREE)
11360 return tem;
11361
11362 /* bool_var != 0 becomes bool_var. */
11363 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11364 && code == NE_EXPR)
11365 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11366
11367 /* bool_var == 1 becomes bool_var. */
11368 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11369 && code == EQ_EXPR)
11370 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11371
11372 /* bool_var != 1 becomes !bool_var. */
11373 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11374 && code == NE_EXPR)
11375 return fold_convert_loc (loc, type,
11376 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11377 TREE_TYPE (arg0), arg0));
11378
11379 /* bool_var == 0 becomes !bool_var. */
11380 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11381 && code == EQ_EXPR)
11382 return fold_convert_loc (loc, type,
11383 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11384 TREE_TYPE (arg0), arg0));
11385
11386 /* !exp != 0 becomes !exp */
11387 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11388 && code == NE_EXPR)
11389 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11390
11391 /* If this is an equality comparison of the address of two non-weak,
11392 unaliased symbols neither of which are extern (since we do not
11393 have access to attributes for externs), then we know the result. */
11394 if (TREE_CODE (arg0) == ADDR_EXPR
11395 && DECL_P (TREE_OPERAND (arg0, 0))
11396 && TREE_CODE (arg1) == ADDR_EXPR
11397 && DECL_P (TREE_OPERAND (arg1, 0)))
11398 {
11399 int equal;
11400
11401 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
11402 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
11403 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
11404 ->equal_address_to (symtab_node::get_create
11405 (TREE_OPERAND (arg1, 0)));
11406 else
11407 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11408 if (equal != 2)
11409 return constant_boolean_node (equal
11410 ? code == EQ_EXPR : code != EQ_EXPR,
11411 type);
11412 }
11413
11414 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11415 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11416 && TREE_CODE (arg1) == INTEGER_CST
11417 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11418 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11419 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
11420 fold_convert_loc (loc,
11421 TREE_TYPE (arg0),
11422 arg1),
11423 TREE_OPERAND (arg0, 1)));
11424
11425 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
11426 if ((TREE_CODE (arg0) == PLUS_EXPR
11427 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
11428 || TREE_CODE (arg0) == MINUS_EXPR)
11429 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
11430 0)),
11431 arg1, 0)
11432 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11433 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11434 {
11435 tree val = TREE_OPERAND (arg0, 1);
11436 return omit_two_operands_loc (loc, type,
11437 fold_build2_loc (loc, code, type,
11438 val,
11439 build_int_cst (TREE_TYPE (val),
11440 0)),
11441 TREE_OPERAND (arg0, 0), arg1);
11442 }
11443
11444 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
11445 if (TREE_CODE (arg0) == MINUS_EXPR
11446 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
11447 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
11448 1)),
11449 arg1, 0)
11450 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
11451 {
11452 return omit_two_operands_loc (loc, type,
11453 code == NE_EXPR
11454 ? boolean_true_node : boolean_false_node,
11455 TREE_OPERAND (arg0, 1), arg1);
11456 }
11457
11458 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11459 if (TREE_CODE (arg0) == ABS_EXPR
11460 && (integer_zerop (arg1) || real_zerop (arg1)))
11461 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
11462
11463 /* If this is an EQ or NE comparison with zero and ARG0 is
11464 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11465 two operations, but the latter can be done in one less insn
11466 on machines that have only two-operand insns or on which a
11467 constant cannot be the first operand. */
11468 if (TREE_CODE (arg0) == BIT_AND_EXPR
11469 && integer_zerop (arg1))
11470 {
11471 tree arg00 = TREE_OPERAND (arg0, 0);
11472 tree arg01 = TREE_OPERAND (arg0, 1);
11473 if (TREE_CODE (arg00) == LSHIFT_EXPR
11474 && integer_onep (TREE_OPERAND (arg00, 0)))
11475 {
11476 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11477 arg01, TREE_OPERAND (arg00, 1));
11478 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11479 build_int_cst (TREE_TYPE (arg0), 1));
11480 return fold_build2_loc (loc, code, type,
11481 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11482 arg1);
11483 }
11484 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11485 && integer_onep (TREE_OPERAND (arg01, 0)))
11486 {
11487 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11488 arg00, TREE_OPERAND (arg01, 1));
11489 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11490 build_int_cst (TREE_TYPE (arg0), 1));
11491 return fold_build2_loc (loc, code, type,
11492 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11493 arg1);
11494 }
11495 }
11496
11497 /* If this is an NE or EQ comparison of zero against the result of a
11498 signed MOD operation whose second operand is a power of 2, make
11499 the MOD operation unsigned since it is simpler and equivalent. */
11500 if (integer_zerop (arg1)
11501 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11502 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11503 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11504 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11505 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11506 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11507 {
11508 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11509 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
11510 fold_convert_loc (loc, newtype,
11511 TREE_OPERAND (arg0, 0)),
11512 fold_convert_loc (loc, newtype,
11513 TREE_OPERAND (arg0, 1)));
11514
11515 return fold_build2_loc (loc, code, type, newmod,
11516 fold_convert_loc (loc, newtype, arg1));
11517 }
11518
11519 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11520 C1 is a valid shift constant, and C2 is a power of two, i.e.
11521 a single bit. */
11522 if (TREE_CODE (arg0) == BIT_AND_EXPR
11523 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11524 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11525 == INTEGER_CST
11526 && integer_pow2p (TREE_OPERAND (arg0, 1))
11527 && integer_zerop (arg1))
11528 {
11529 tree itype = TREE_TYPE (arg0);
11530 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11531 prec = TYPE_PRECISION (itype);
11532
11533 /* Check for a valid shift count. */
11534 if (wi::ltu_p (arg001, prec))
11535 {
11536 tree arg01 = TREE_OPERAND (arg0, 1);
11537 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11538 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11539 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11540 can be rewritten as (X & (C2 << C1)) != 0. */
11541 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11542 {
11543 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11544 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11545 return fold_build2_loc (loc, code, type, tem,
11546 fold_convert_loc (loc, itype, arg1));
11547 }
11548 /* Otherwise, for signed (arithmetic) shifts,
11549 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11550 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11551 else if (!TYPE_UNSIGNED (itype))
11552 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11553 arg000, build_int_cst (itype, 0));
11554 /* Otherwise, of unsigned (logical) shifts,
11555 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11556 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11557 else
11558 return omit_one_operand_loc (loc, type,
11559 code == EQ_EXPR ? integer_one_node
11560 : integer_zero_node,
11561 arg000);
11562 }
11563 }
11564
11565 /* If we have (A & C) == C where C is a power of 2, convert this into
11566 (A & C) != 0. Similarly for NE_EXPR. */
11567 if (TREE_CODE (arg0) == BIT_AND_EXPR
11568 && integer_pow2p (TREE_OPERAND (arg0, 1))
11569 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11570 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11571 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
11572 integer_zero_node));
11573
11574 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11575 bit, then fold the expression into A < 0 or A >= 0. */
11576 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
11577 if (tem)
11578 return tem;
11579
11580 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11581 Similarly for NE_EXPR. */
11582 if (TREE_CODE (arg0) == BIT_AND_EXPR
11583 && TREE_CODE (arg1) == INTEGER_CST
11584 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11585 {
11586 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
11587 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11588 TREE_OPERAND (arg0, 1));
11589 tree dandnotc
11590 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11591 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
11592 notc);
11593 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11594 if (integer_nonzerop (dandnotc))
11595 return omit_one_operand_loc (loc, type, rslt, arg0);
11596 }
11597
11598 /* If this is a comparison of a field, we may be able to simplify it. */
11599 if ((TREE_CODE (arg0) == COMPONENT_REF
11600 || TREE_CODE (arg0) == BIT_FIELD_REF)
11601 /* Handle the constant case even without -O
11602 to make sure the warnings are given. */
11603 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11604 {
11605 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11606 if (t1)
11607 return t1;
11608 }
11609
11610 /* Optimize comparisons of strlen vs zero to a compare of the
11611 first character of the string vs zero. To wit,
11612 strlen(ptr) == 0 => *ptr == 0
11613 strlen(ptr) != 0 => *ptr != 0
11614 Other cases should reduce to one of these two (or a constant)
11615 due to the return value of strlen being unsigned. */
11616 if (TREE_CODE (arg0) == CALL_EXPR
11617 && integer_zerop (arg1))
11618 {
11619 tree fndecl = get_callee_fndecl (arg0);
11620
11621 if (fndecl
11622 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11623 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11624 && call_expr_nargs (arg0) == 1
11625 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11626 {
11627 tree iref = build_fold_indirect_ref_loc (loc,
11628 CALL_EXPR_ARG (arg0, 0));
11629 return fold_build2_loc (loc, code, type, iref,
11630 build_int_cst (TREE_TYPE (iref), 0));
11631 }
11632 }
11633
11634 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11635 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11636 if (TREE_CODE (arg0) == RSHIFT_EXPR
11637 && integer_zerop (arg1)
11638 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11639 {
11640 tree arg00 = TREE_OPERAND (arg0, 0);
11641 tree arg01 = TREE_OPERAND (arg0, 1);
11642 tree itype = TREE_TYPE (arg00);
11643 if (wi::eq_p (arg01, element_precision (itype) - 1))
11644 {
11645 if (TYPE_UNSIGNED (itype))
11646 {
11647 itype = signed_type_for (itype);
11648 arg00 = fold_convert_loc (loc, itype, arg00);
11649 }
11650 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11651 type, arg00, build_zero_cst (itype));
11652 }
11653 }
11654
11655 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11656 (X & C) == 0 when C is a single bit. */
11657 if (TREE_CODE (arg0) == BIT_AND_EXPR
11658 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11659 && integer_zerop (arg1)
11660 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11661 {
11662 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11663 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11664 TREE_OPERAND (arg0, 1));
11665 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11666 type, tem,
11667 fold_convert_loc (loc, TREE_TYPE (arg0),
11668 arg1));
11669 }
11670
11671 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11672 constant C is a power of two, i.e. a single bit. */
11673 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11674 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11675 && integer_zerop (arg1)
11676 && integer_pow2p (TREE_OPERAND (arg0, 1))
11677 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11678 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11679 {
11680 tree arg00 = TREE_OPERAND (arg0, 0);
11681 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11682 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11683 }
11684
11685 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11686 when is C is a power of two, i.e. a single bit. */
11687 if (TREE_CODE (arg0) == BIT_AND_EXPR
11688 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11689 && integer_zerop (arg1)
11690 && integer_pow2p (TREE_OPERAND (arg0, 1))
11691 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11692 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11693 {
11694 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11695 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11696 arg000, TREE_OPERAND (arg0, 1));
11697 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11698 tem, build_int_cst (TREE_TYPE (tem), 0));
11699 }
11700
11701 if (integer_zerop (arg1)
11702 && tree_expr_nonzero_p (arg0))
11703 {
11704 tree res = constant_boolean_node (code==NE_EXPR, type);
11705 return omit_one_operand_loc (loc, type, res, arg0);
11706 }
11707
11708 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11709 if (TREE_CODE (arg0) == BIT_AND_EXPR
11710 && TREE_CODE (arg1) == BIT_AND_EXPR)
11711 {
11712 tree arg00 = TREE_OPERAND (arg0, 0);
11713 tree arg01 = TREE_OPERAND (arg0, 1);
11714 tree arg10 = TREE_OPERAND (arg1, 0);
11715 tree arg11 = TREE_OPERAND (arg1, 1);
11716 tree itype = TREE_TYPE (arg0);
11717
11718 if (operand_equal_p (arg01, arg11, 0))
11719 return fold_build2_loc (loc, code, type,
11720 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11721 fold_build2_loc (loc,
11722 BIT_XOR_EXPR, itype,
11723 arg00, arg10),
11724 arg01),
11725 build_zero_cst (itype));
11726
11727 if (operand_equal_p (arg01, arg10, 0))
11728 return fold_build2_loc (loc, code, type,
11729 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11730 fold_build2_loc (loc,
11731 BIT_XOR_EXPR, itype,
11732 arg00, arg11),
11733 arg01),
11734 build_zero_cst (itype));
11735
11736 if (operand_equal_p (arg00, arg11, 0))
11737 return fold_build2_loc (loc, code, type,
11738 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11739 fold_build2_loc (loc,
11740 BIT_XOR_EXPR, itype,
11741 arg01, arg10),
11742 arg00),
11743 build_zero_cst (itype));
11744
11745 if (operand_equal_p (arg00, arg10, 0))
11746 return fold_build2_loc (loc, code, type,
11747 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11748 fold_build2_loc (loc,
11749 BIT_XOR_EXPR, itype,
11750 arg01, arg11),
11751 arg00),
11752 build_zero_cst (itype));
11753 }
11754
11755 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11756 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11757 {
11758 tree arg00 = TREE_OPERAND (arg0, 0);
11759 tree arg01 = TREE_OPERAND (arg0, 1);
11760 tree arg10 = TREE_OPERAND (arg1, 0);
11761 tree arg11 = TREE_OPERAND (arg1, 1);
11762 tree itype = TREE_TYPE (arg0);
11763
11764 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11765 operand_equal_p guarantees no side-effects so we don't need
11766 to use omit_one_operand on Z. */
11767 if (operand_equal_p (arg01, arg11, 0))
11768 return fold_build2_loc (loc, code, type, arg00,
11769 fold_convert_loc (loc, TREE_TYPE (arg00),
11770 arg10));
11771 if (operand_equal_p (arg01, arg10, 0))
11772 return fold_build2_loc (loc, code, type, arg00,
11773 fold_convert_loc (loc, TREE_TYPE (arg00),
11774 arg11));
11775 if (operand_equal_p (arg00, arg11, 0))
11776 return fold_build2_loc (loc, code, type, arg01,
11777 fold_convert_loc (loc, TREE_TYPE (arg01),
11778 arg10));
11779 if (operand_equal_p (arg00, arg10, 0))
11780 return fold_build2_loc (loc, code, type, arg01,
11781 fold_convert_loc (loc, TREE_TYPE (arg01),
11782 arg11));
11783
11784 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11785 if (TREE_CODE (arg01) == INTEGER_CST
11786 && TREE_CODE (arg11) == INTEGER_CST)
11787 {
11788 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11789 fold_convert_loc (loc, itype, arg11));
11790 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11791 return fold_build2_loc (loc, code, type, tem,
11792 fold_convert_loc (loc, itype, arg10));
11793 }
11794 }
11795
11796 /* Attempt to simplify equality/inequality comparisons of complex
11797 values. Only lower the comparison if the result is known or
11798 can be simplified to a single scalar comparison. */
11799 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11800 || TREE_CODE (arg0) == COMPLEX_CST)
11801 && (TREE_CODE (arg1) == COMPLEX_EXPR
11802 || TREE_CODE (arg1) == COMPLEX_CST))
11803 {
11804 tree real0, imag0, real1, imag1;
11805 tree rcond, icond;
11806
11807 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11808 {
11809 real0 = TREE_OPERAND (arg0, 0);
11810 imag0 = TREE_OPERAND (arg0, 1);
11811 }
11812 else
11813 {
11814 real0 = TREE_REALPART (arg0);
11815 imag0 = TREE_IMAGPART (arg0);
11816 }
11817
11818 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11819 {
11820 real1 = TREE_OPERAND (arg1, 0);
11821 imag1 = TREE_OPERAND (arg1, 1);
11822 }
11823 else
11824 {
11825 real1 = TREE_REALPART (arg1);
11826 imag1 = TREE_IMAGPART (arg1);
11827 }
11828
11829 rcond = fold_binary_loc (loc, code, type, real0, real1);
11830 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11831 {
11832 if (integer_zerop (rcond))
11833 {
11834 if (code == EQ_EXPR)
11835 return omit_two_operands_loc (loc, type, boolean_false_node,
11836 imag0, imag1);
11837 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11838 }
11839 else
11840 {
11841 if (code == NE_EXPR)
11842 return omit_two_operands_loc (loc, type, boolean_true_node,
11843 imag0, imag1);
11844 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11845 }
11846 }
11847
11848 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11849 if (icond && TREE_CODE (icond) == INTEGER_CST)
11850 {
11851 if (integer_zerop (icond))
11852 {
11853 if (code == EQ_EXPR)
11854 return omit_two_operands_loc (loc, type, boolean_false_node,
11855 real0, real1);
11856 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11857 }
11858 else
11859 {
11860 if (code == NE_EXPR)
11861 return omit_two_operands_loc (loc, type, boolean_true_node,
11862 real0, real1);
11863 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11864 }
11865 }
11866 }
11867
11868 return NULL_TREE;
11869
11870 case LT_EXPR:
11871 case GT_EXPR:
11872 case LE_EXPR:
11873 case GE_EXPR:
11874 tem = fold_comparison (loc, code, type, op0, op1);
11875 if (tem != NULL_TREE)
11876 return tem;
11877
11878 /* Transform comparisons of the form X +- C CMP X. */
11879 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11880 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11881 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11882 && !HONOR_SNANS (arg0))
11883 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11884 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11885 {
11886 tree arg01 = TREE_OPERAND (arg0, 1);
11887 enum tree_code code0 = TREE_CODE (arg0);
11888 int is_positive;
11889
11890 if (TREE_CODE (arg01) == REAL_CST)
11891 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11892 else
11893 is_positive = tree_int_cst_sgn (arg01);
11894
11895 /* (X - c) > X becomes false. */
11896 if (code == GT_EXPR
11897 && ((code0 == MINUS_EXPR && is_positive >= 0)
11898 || (code0 == PLUS_EXPR && is_positive <= 0)))
11899 {
11900 if (TREE_CODE (arg01) == INTEGER_CST
11901 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11902 fold_overflow_warning (("assuming signed overflow does not "
11903 "occur when assuming that (X - c) > X "
11904 "is always false"),
11905 WARN_STRICT_OVERFLOW_ALL);
11906 return constant_boolean_node (0, type);
11907 }
11908
11909 /* Likewise (X + c) < X becomes false. */
11910 if (code == LT_EXPR
11911 && ((code0 == PLUS_EXPR && is_positive >= 0)
11912 || (code0 == MINUS_EXPR && is_positive <= 0)))
11913 {
11914 if (TREE_CODE (arg01) == INTEGER_CST
11915 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11916 fold_overflow_warning (("assuming signed overflow does not "
11917 "occur when assuming that "
11918 "(X + c) < X is always false"),
11919 WARN_STRICT_OVERFLOW_ALL);
11920 return constant_boolean_node (0, type);
11921 }
11922
11923 /* Convert (X - c) <= X to true. */
11924 if (!HONOR_NANS (arg1)
11925 && code == LE_EXPR
11926 && ((code0 == MINUS_EXPR && is_positive >= 0)
11927 || (code0 == PLUS_EXPR && is_positive <= 0)))
11928 {
11929 if (TREE_CODE (arg01) == INTEGER_CST
11930 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11931 fold_overflow_warning (("assuming signed overflow does not "
11932 "occur when assuming that "
11933 "(X - c) <= X is always true"),
11934 WARN_STRICT_OVERFLOW_ALL);
11935 return constant_boolean_node (1, type);
11936 }
11937
11938 /* Convert (X + c) >= X to true. */
11939 if (!HONOR_NANS (arg1)
11940 && code == GE_EXPR
11941 && ((code0 == PLUS_EXPR && is_positive >= 0)
11942 || (code0 == MINUS_EXPR && is_positive <= 0)))
11943 {
11944 if (TREE_CODE (arg01) == INTEGER_CST
11945 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11946 fold_overflow_warning (("assuming signed overflow does not "
11947 "occur when assuming that "
11948 "(X + c) >= X is always true"),
11949 WARN_STRICT_OVERFLOW_ALL);
11950 return constant_boolean_node (1, type);
11951 }
11952
11953 if (TREE_CODE (arg01) == INTEGER_CST)
11954 {
11955 /* Convert X + c > X and X - c < X to true for integers. */
11956 if (code == GT_EXPR
11957 && ((code0 == PLUS_EXPR && is_positive > 0)
11958 || (code0 == MINUS_EXPR && is_positive < 0)))
11959 {
11960 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11961 fold_overflow_warning (("assuming signed overflow does "
11962 "not occur when assuming that "
11963 "(X + c) > X is always true"),
11964 WARN_STRICT_OVERFLOW_ALL);
11965 return constant_boolean_node (1, type);
11966 }
11967
11968 if (code == LT_EXPR
11969 && ((code0 == MINUS_EXPR && is_positive > 0)
11970 || (code0 == PLUS_EXPR && is_positive < 0)))
11971 {
11972 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11973 fold_overflow_warning (("assuming signed overflow does "
11974 "not occur when assuming that "
11975 "(X - c) < X is always true"),
11976 WARN_STRICT_OVERFLOW_ALL);
11977 return constant_boolean_node (1, type);
11978 }
11979
11980 /* Convert X + c <= X and X - c >= X to false for integers. */
11981 if (code == LE_EXPR
11982 && ((code0 == PLUS_EXPR && is_positive > 0)
11983 || (code0 == MINUS_EXPR && is_positive < 0)))
11984 {
11985 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11986 fold_overflow_warning (("assuming signed overflow does "
11987 "not occur when assuming that "
11988 "(X + c) <= X is always false"),
11989 WARN_STRICT_OVERFLOW_ALL);
11990 return constant_boolean_node (0, type);
11991 }
11992
11993 if (code == GE_EXPR
11994 && ((code0 == MINUS_EXPR && is_positive > 0)
11995 || (code0 == PLUS_EXPR && is_positive < 0)))
11996 {
11997 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11998 fold_overflow_warning (("assuming signed overflow does "
11999 "not occur when assuming that "
12000 "(X - c) >= X is always false"),
12001 WARN_STRICT_OVERFLOW_ALL);
12002 return constant_boolean_node (0, type);
12003 }
12004 }
12005 }
12006
12007 /* Comparisons with the highest or lowest possible integer of
12008 the specified precision will have known values. */
12009 {
12010 tree arg1_type = TREE_TYPE (arg1);
12011 unsigned int prec = TYPE_PRECISION (arg1_type);
12012
12013 if (TREE_CODE (arg1) == INTEGER_CST
12014 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12015 {
12016 wide_int max = wi::max_value (arg1_type);
12017 wide_int signed_max = wi::max_value (prec, SIGNED);
12018 wide_int min = wi::min_value (arg1_type);
12019
12020 if (wi::eq_p (arg1, max))
12021 switch (code)
12022 {
12023 case GT_EXPR:
12024 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12025
12026 case GE_EXPR:
12027 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12028
12029 case LE_EXPR:
12030 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12031
12032 case LT_EXPR:
12033 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12034
12035 /* The GE_EXPR and LT_EXPR cases above are not normally
12036 reached because of previous transformations. */
12037
12038 default:
12039 break;
12040 }
12041 else if (wi::eq_p (arg1, max - 1))
12042 switch (code)
12043 {
12044 case GT_EXPR:
12045 arg1 = const_binop (PLUS_EXPR, arg1,
12046 build_int_cst (TREE_TYPE (arg1), 1));
12047 return fold_build2_loc (loc, EQ_EXPR, type,
12048 fold_convert_loc (loc,
12049 TREE_TYPE (arg1), arg0),
12050 arg1);
12051 case LE_EXPR:
12052 arg1 = const_binop (PLUS_EXPR, arg1,
12053 build_int_cst (TREE_TYPE (arg1), 1));
12054 return fold_build2_loc (loc, NE_EXPR, type,
12055 fold_convert_loc (loc, TREE_TYPE (arg1),
12056 arg0),
12057 arg1);
12058 default:
12059 break;
12060 }
12061 else if (wi::eq_p (arg1, min))
12062 switch (code)
12063 {
12064 case LT_EXPR:
12065 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12066
12067 case LE_EXPR:
12068 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12069
12070 case GE_EXPR:
12071 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12072
12073 case GT_EXPR:
12074 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12075
12076 default:
12077 break;
12078 }
12079 else if (wi::eq_p (arg1, min + 1))
12080 switch (code)
12081 {
12082 case GE_EXPR:
12083 arg1 = const_binop (MINUS_EXPR, arg1,
12084 build_int_cst (TREE_TYPE (arg1), 1));
12085 return fold_build2_loc (loc, NE_EXPR, type,
12086 fold_convert_loc (loc,
12087 TREE_TYPE (arg1), arg0),
12088 arg1);
12089 case LT_EXPR:
12090 arg1 = const_binop (MINUS_EXPR, arg1,
12091 build_int_cst (TREE_TYPE (arg1), 1));
12092 return fold_build2_loc (loc, EQ_EXPR, type,
12093 fold_convert_loc (loc, TREE_TYPE (arg1),
12094 arg0),
12095 arg1);
12096 default:
12097 break;
12098 }
12099
12100 else if (wi::eq_p (arg1, signed_max)
12101 && TYPE_UNSIGNED (arg1_type)
12102 /* We will flip the signedness of the comparison operator
12103 associated with the mode of arg1, so the sign bit is
12104 specified by this mode. Check that arg1 is the signed
12105 max associated with this sign bit. */
12106 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
12107 /* signed_type does not work on pointer types. */
12108 && INTEGRAL_TYPE_P (arg1_type))
12109 {
12110 /* The following case also applies to X < signed_max+1
12111 and X >= signed_max+1 because previous transformations. */
12112 if (code == LE_EXPR || code == GT_EXPR)
12113 {
12114 tree st = signed_type_for (arg1_type);
12115 return fold_build2_loc (loc,
12116 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12117 type, fold_convert_loc (loc, st, arg0),
12118 build_int_cst (st, 0));
12119 }
12120 }
12121 }
12122 }
12123
12124 /* If we are comparing an ABS_EXPR with a constant, we can
12125 convert all the cases into explicit comparisons, but they may
12126 well not be faster than doing the ABS and one comparison.
12127 But ABS (X) <= C is a range comparison, which becomes a subtraction
12128 and a comparison, and is probably faster. */
12129 if (code == LE_EXPR
12130 && TREE_CODE (arg1) == INTEGER_CST
12131 && TREE_CODE (arg0) == ABS_EXPR
12132 && ! TREE_SIDE_EFFECTS (arg0)
12133 && (0 != (tem = negate_expr (arg1)))
12134 && TREE_CODE (tem) == INTEGER_CST
12135 && !TREE_OVERFLOW (tem))
12136 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12137 build2 (GE_EXPR, type,
12138 TREE_OPERAND (arg0, 0), tem),
12139 build2 (LE_EXPR, type,
12140 TREE_OPERAND (arg0, 0), arg1));
12141
12142 /* Convert ABS_EXPR<x> >= 0 to true. */
12143 strict_overflow_p = false;
12144 if (code == GE_EXPR
12145 && (integer_zerop (arg1)
12146 || (! HONOR_NANS (arg0)
12147 && real_zerop (arg1)))
12148 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12149 {
12150 if (strict_overflow_p)
12151 fold_overflow_warning (("assuming signed overflow does not occur "
12152 "when simplifying comparison of "
12153 "absolute value and zero"),
12154 WARN_STRICT_OVERFLOW_CONDITIONAL);
12155 return omit_one_operand_loc (loc, type,
12156 constant_boolean_node (true, type),
12157 arg0);
12158 }
12159
12160 /* Convert ABS_EXPR<x> < 0 to false. */
12161 strict_overflow_p = false;
12162 if (code == LT_EXPR
12163 && (integer_zerop (arg1) || real_zerop (arg1))
12164 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12165 {
12166 if (strict_overflow_p)
12167 fold_overflow_warning (("assuming signed overflow does not occur "
12168 "when simplifying comparison of "
12169 "absolute value and zero"),
12170 WARN_STRICT_OVERFLOW_CONDITIONAL);
12171 return omit_one_operand_loc (loc, type,
12172 constant_boolean_node (false, type),
12173 arg0);
12174 }
12175
12176 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12177 and similarly for >= into !=. */
12178 if ((code == LT_EXPR || code == GE_EXPR)
12179 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12180 && TREE_CODE (arg1) == LSHIFT_EXPR
12181 && integer_onep (TREE_OPERAND (arg1, 0)))
12182 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12183 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12184 TREE_OPERAND (arg1, 1)),
12185 build_zero_cst (TREE_TYPE (arg0)));
12186
12187 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12188 otherwise Y might be >= # of bits in X's type and thus e.g.
12189 (unsigned char) (1 << Y) for Y 15 might be 0.
12190 If the cast is widening, then 1 << Y should have unsigned type,
12191 otherwise if Y is number of bits in the signed shift type minus 1,
12192 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12193 31 might be 0xffffffff80000000. */
12194 if ((code == LT_EXPR || code == GE_EXPR)
12195 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12196 && CONVERT_EXPR_P (arg1)
12197 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12198 && (element_precision (TREE_TYPE (arg1))
12199 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12200 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12201 || (element_precision (TREE_TYPE (arg1))
12202 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12203 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12204 {
12205 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12206 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12207 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12208 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12209 build_zero_cst (TREE_TYPE (arg0)));
12210 }
12211
12212 return NULL_TREE;
12213
12214 case UNORDERED_EXPR:
12215 case ORDERED_EXPR:
12216 case UNLT_EXPR:
12217 case UNLE_EXPR:
12218 case UNGT_EXPR:
12219 case UNGE_EXPR:
12220 case UNEQ_EXPR:
12221 case LTGT_EXPR:
12222 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12223 {
12224 t1 = fold_relational_const (code, type, arg0, arg1);
12225 if (t1 != NULL_TREE)
12226 return t1;
12227 }
12228
12229 /* If the first operand is NaN, the result is constant. */
12230 if (TREE_CODE (arg0) == REAL_CST
12231 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12232 && (code != LTGT_EXPR || ! flag_trapping_math))
12233 {
12234 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12235 ? integer_zero_node
12236 : integer_one_node;
12237 return omit_one_operand_loc (loc, type, t1, arg1);
12238 }
12239
12240 /* If the second operand is NaN, the result is constant. */
12241 if (TREE_CODE (arg1) == REAL_CST
12242 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12243 && (code != LTGT_EXPR || ! flag_trapping_math))
12244 {
12245 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12246 ? integer_zero_node
12247 : integer_one_node;
12248 return omit_one_operand_loc (loc, type, t1, arg0);
12249 }
12250
12251 /* Simplify unordered comparison of something with itself. */
12252 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12253 && operand_equal_p (arg0, arg1, 0))
12254 return constant_boolean_node (1, type);
12255
12256 if (code == LTGT_EXPR
12257 && !flag_trapping_math
12258 && operand_equal_p (arg0, arg1, 0))
12259 return constant_boolean_node (0, type);
12260
12261 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12262 {
12263 tree targ0 = strip_float_extensions (arg0);
12264 tree targ1 = strip_float_extensions (arg1);
12265 tree newtype = TREE_TYPE (targ0);
12266
12267 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12268 newtype = TREE_TYPE (targ1);
12269
12270 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12271 return fold_build2_loc (loc, code, type,
12272 fold_convert_loc (loc, newtype, targ0),
12273 fold_convert_loc (loc, newtype, targ1));
12274 }
12275
12276 return NULL_TREE;
12277
12278 case COMPOUND_EXPR:
12279 /* When pedantic, a compound expression can be neither an lvalue
12280 nor an integer constant expression. */
12281 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12282 return NULL_TREE;
12283 /* Don't let (0, 0) be null pointer constant. */
12284 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12285 : fold_convert_loc (loc, type, arg1);
12286 return pedantic_non_lvalue_loc (loc, tem);
12287
12288 case ASSERT_EXPR:
12289 /* An ASSERT_EXPR should never be passed to fold_binary. */
12290 gcc_unreachable ();
12291
12292 default:
12293 return NULL_TREE;
12294 } /* switch (code) */
12295 }
12296
12297 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12298 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
12299 of GOTO_EXPR. */
12300
12301 static tree
12302 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
12303 {
12304 switch (TREE_CODE (*tp))
12305 {
12306 case LABEL_EXPR:
12307 return *tp;
12308
12309 case GOTO_EXPR:
12310 *walk_subtrees = 0;
12311
12312 /* ... fall through ... */
12313
12314 default:
12315 return NULL_TREE;
12316 }
12317 }
12318
12319 /* Return whether the sub-tree ST contains a label which is accessible from
12320 outside the sub-tree. */
12321
12322 static bool
12323 contains_label_p (tree st)
12324 {
12325 return
12326 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
12327 }
12328
12329 /* Fold a ternary expression of code CODE and type TYPE with operands
12330 OP0, OP1, and OP2. Return the folded expression if folding is
12331 successful. Otherwise, return NULL_TREE. */
12332
12333 tree
12334 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12335 tree op0, tree op1, tree op2)
12336 {
12337 tree tem;
12338 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12339 enum tree_code_class kind = TREE_CODE_CLASS (code);
12340
12341 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12342 && TREE_CODE_LENGTH (code) == 3);
12343
12344 /* If this is a commutative operation, and OP0 is a constant, move it
12345 to OP1 to reduce the number of tests below. */
12346 if (commutative_ternary_tree_code (code)
12347 && tree_swap_operands_p (op0, op1, true))
12348 return fold_build3_loc (loc, code, type, op1, op0, op2);
12349
12350 tem = generic_simplify (loc, code, type, op0, op1, op2);
12351 if (tem)
12352 return tem;
12353
12354 /* Strip any conversions that don't change the mode. This is safe
12355 for every expression, except for a comparison expression because
12356 its signedness is derived from its operands. So, in the latter
12357 case, only strip conversions that don't change the signedness.
12358
12359 Note that this is done as an internal manipulation within the
12360 constant folder, in order to find the simplest representation of
12361 the arguments so that their form can be studied. In any cases,
12362 the appropriate type conversions should be put back in the tree
12363 that will get out of the constant folder. */
12364 if (op0)
12365 {
12366 arg0 = op0;
12367 STRIP_NOPS (arg0);
12368 }
12369
12370 if (op1)
12371 {
12372 arg1 = op1;
12373 STRIP_NOPS (arg1);
12374 }
12375
12376 if (op2)
12377 {
12378 arg2 = op2;
12379 STRIP_NOPS (arg2);
12380 }
12381
12382 switch (code)
12383 {
12384 case COMPONENT_REF:
12385 if (TREE_CODE (arg0) == CONSTRUCTOR
12386 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12387 {
12388 unsigned HOST_WIDE_INT idx;
12389 tree field, value;
12390 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12391 if (field == arg1)
12392 return value;
12393 }
12394 return NULL_TREE;
12395
12396 case COND_EXPR:
12397 case VEC_COND_EXPR:
12398 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12399 so all simple results must be passed through pedantic_non_lvalue. */
12400 if (TREE_CODE (arg0) == INTEGER_CST)
12401 {
12402 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12403 tem = integer_zerop (arg0) ? op2 : op1;
12404 /* Only optimize constant conditions when the selected branch
12405 has the same type as the COND_EXPR. This avoids optimizing
12406 away "c ? x : throw", where the throw has a void type.
12407 Avoid throwing away that operand which contains label. */
12408 if ((!TREE_SIDE_EFFECTS (unused_op)
12409 || !contains_label_p (unused_op))
12410 && (! VOID_TYPE_P (TREE_TYPE (tem))
12411 || VOID_TYPE_P (type)))
12412 return pedantic_non_lvalue_loc (loc, tem);
12413 return NULL_TREE;
12414 }
12415 else if (TREE_CODE (arg0) == VECTOR_CST)
12416 {
12417 if ((TREE_CODE (arg1) == VECTOR_CST
12418 || TREE_CODE (arg1) == CONSTRUCTOR)
12419 && (TREE_CODE (arg2) == VECTOR_CST
12420 || TREE_CODE (arg2) == CONSTRUCTOR))
12421 {
12422 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
12423 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
12424 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
12425 for (i = 0; i < nelts; i++)
12426 {
12427 tree val = VECTOR_CST_ELT (arg0, i);
12428 if (integer_all_onesp (val))
12429 sel[i] = i;
12430 else if (integer_zerop (val))
12431 sel[i] = nelts + i;
12432 else /* Currently unreachable. */
12433 return NULL_TREE;
12434 }
12435 tree t = fold_vec_perm (type, arg1, arg2, sel);
12436 if (t != NULL_TREE)
12437 return t;
12438 }
12439 }
12440
12441 /* If we have A op B ? A : C, we may be able to convert this to a
12442 simpler expression, depending on the operation and the values
12443 of B and C. Signed zeros prevent all of these transformations,
12444 for reasons given above each one.
12445
12446 Also try swapping the arguments and inverting the conditional. */
12447 if (COMPARISON_CLASS_P (arg0)
12448 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12449 arg1, TREE_OPERAND (arg0, 1))
12450 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
12451 {
12452 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12453 if (tem)
12454 return tem;
12455 }
12456
12457 if (COMPARISON_CLASS_P (arg0)
12458 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12459 op2,
12460 TREE_OPERAND (arg0, 1))
12461 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12462 {
12463 location_t loc0 = expr_location_or (arg0, loc);
12464 tem = fold_invert_truthvalue (loc0, arg0);
12465 if (tem && COMPARISON_CLASS_P (tem))
12466 {
12467 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12468 if (tem)
12469 return tem;
12470 }
12471 }
12472
12473 /* If the second operand is simpler than the third, swap them
12474 since that produces better jump optimization results. */
12475 if (truth_value_p (TREE_CODE (arg0))
12476 && tree_swap_operands_p (op1, op2, false))
12477 {
12478 location_t loc0 = expr_location_or (arg0, loc);
12479 /* See if this can be inverted. If it can't, possibly because
12480 it was a floating-point inequality comparison, don't do
12481 anything. */
12482 tem = fold_invert_truthvalue (loc0, arg0);
12483 if (tem)
12484 return fold_build3_loc (loc, code, type, tem, op2, op1);
12485 }
12486
12487 /* Convert A ? 1 : 0 to simply A. */
12488 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12489 : (integer_onep (op1)
12490 && !VECTOR_TYPE_P (type)))
12491 && integer_zerop (op2)
12492 /* If we try to convert OP0 to our type, the
12493 call to fold will try to move the conversion inside
12494 a COND, which will recurse. In that case, the COND_EXPR
12495 is probably the best choice, so leave it alone. */
12496 && type == TREE_TYPE (arg0))
12497 return pedantic_non_lvalue_loc (loc, arg0);
12498
12499 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12500 over COND_EXPR in cases such as floating point comparisons. */
12501 if (integer_zerop (op1)
12502 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
12503 : (integer_onep (op2)
12504 && !VECTOR_TYPE_P (type)))
12505 && truth_value_p (TREE_CODE (arg0)))
12506 return pedantic_non_lvalue_loc (loc,
12507 fold_convert_loc (loc, type,
12508 invert_truthvalue_loc (loc,
12509 arg0)));
12510
12511 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12512 if (TREE_CODE (arg0) == LT_EXPR
12513 && integer_zerop (TREE_OPERAND (arg0, 1))
12514 && integer_zerop (op2)
12515 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12516 {
12517 /* sign_bit_p looks through both zero and sign extensions,
12518 but for this optimization only sign extensions are
12519 usable. */
12520 tree tem2 = TREE_OPERAND (arg0, 0);
12521 while (tem != tem2)
12522 {
12523 if (TREE_CODE (tem2) != NOP_EXPR
12524 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12525 {
12526 tem = NULL_TREE;
12527 break;
12528 }
12529 tem2 = TREE_OPERAND (tem2, 0);
12530 }
12531 /* sign_bit_p only checks ARG1 bits within A's precision.
12532 If <sign bit of A> has wider type than A, bits outside
12533 of A's precision in <sign bit of A> need to be checked.
12534 If they are all 0, this optimization needs to be done
12535 in unsigned A's type, if they are all 1 in signed A's type,
12536 otherwise this can't be done. */
12537 if (tem
12538 && TYPE_PRECISION (TREE_TYPE (tem))
12539 < TYPE_PRECISION (TREE_TYPE (arg1))
12540 && TYPE_PRECISION (TREE_TYPE (tem))
12541 < TYPE_PRECISION (type))
12542 {
12543 int inner_width, outer_width;
12544 tree tem_type;
12545
12546 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12547 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12548 if (outer_width > TYPE_PRECISION (type))
12549 outer_width = TYPE_PRECISION (type);
12550
12551 wide_int mask = wi::shifted_mask
12552 (inner_width, outer_width - inner_width, false,
12553 TYPE_PRECISION (TREE_TYPE (arg1)));
12554
12555 wide_int common = mask & arg1;
12556 if (common == mask)
12557 {
12558 tem_type = signed_type_for (TREE_TYPE (tem));
12559 tem = fold_convert_loc (loc, tem_type, tem);
12560 }
12561 else if (common == 0)
12562 {
12563 tem_type = unsigned_type_for (TREE_TYPE (tem));
12564 tem = fold_convert_loc (loc, tem_type, tem);
12565 }
12566 else
12567 tem = NULL;
12568 }
12569
12570 if (tem)
12571 return
12572 fold_convert_loc (loc, type,
12573 fold_build2_loc (loc, BIT_AND_EXPR,
12574 TREE_TYPE (tem), tem,
12575 fold_convert_loc (loc,
12576 TREE_TYPE (tem),
12577 arg1)));
12578 }
12579
12580 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12581 already handled above. */
12582 if (TREE_CODE (arg0) == BIT_AND_EXPR
12583 && integer_onep (TREE_OPERAND (arg0, 1))
12584 && integer_zerop (op2)
12585 && integer_pow2p (arg1))
12586 {
12587 tree tem = TREE_OPERAND (arg0, 0);
12588 STRIP_NOPS (tem);
12589 if (TREE_CODE (tem) == RSHIFT_EXPR
12590 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12591 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12592 tree_to_uhwi (TREE_OPERAND (tem, 1)))
12593 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12594 TREE_OPERAND (tem, 0), arg1);
12595 }
12596
12597 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12598 is probably obsolete because the first operand should be a
12599 truth value (that's why we have the two cases above), but let's
12600 leave it in until we can confirm this for all front-ends. */
12601 if (integer_zerop (op2)
12602 && TREE_CODE (arg0) == NE_EXPR
12603 && integer_zerop (TREE_OPERAND (arg0, 1))
12604 && integer_pow2p (arg1)
12605 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12606 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12607 arg1, OEP_ONLY_CONST))
12608 return pedantic_non_lvalue_loc (loc,
12609 fold_convert_loc (loc, type,
12610 TREE_OPERAND (arg0, 0)));
12611
12612 /* Disable the transformations below for vectors, since
12613 fold_binary_op_with_conditional_arg may undo them immediately,
12614 yielding an infinite loop. */
12615 if (code == VEC_COND_EXPR)
12616 return NULL_TREE;
12617
12618 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12619 if (integer_zerop (op2)
12620 && truth_value_p (TREE_CODE (arg0))
12621 && truth_value_p (TREE_CODE (arg1))
12622 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12623 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12624 : TRUTH_ANDIF_EXPR,
12625 type, fold_convert_loc (loc, type, arg0), arg1);
12626
12627 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12628 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12629 && truth_value_p (TREE_CODE (arg0))
12630 && truth_value_p (TREE_CODE (arg1))
12631 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12632 {
12633 location_t loc0 = expr_location_or (arg0, loc);
12634 /* Only perform transformation if ARG0 is easily inverted. */
12635 tem = fold_invert_truthvalue (loc0, arg0);
12636 if (tem)
12637 return fold_build2_loc (loc, code == VEC_COND_EXPR
12638 ? BIT_IOR_EXPR
12639 : TRUTH_ORIF_EXPR,
12640 type, fold_convert_loc (loc, type, tem),
12641 arg1);
12642 }
12643
12644 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12645 if (integer_zerop (arg1)
12646 && truth_value_p (TREE_CODE (arg0))
12647 && truth_value_p (TREE_CODE (op2))
12648 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12649 {
12650 location_t loc0 = expr_location_or (arg0, loc);
12651 /* Only perform transformation if ARG0 is easily inverted. */
12652 tem = fold_invert_truthvalue (loc0, arg0);
12653 if (tem)
12654 return fold_build2_loc (loc, code == VEC_COND_EXPR
12655 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12656 type, fold_convert_loc (loc, type, tem),
12657 op2);
12658 }
12659
12660 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12661 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12662 && truth_value_p (TREE_CODE (arg0))
12663 && truth_value_p (TREE_CODE (op2))
12664 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12665 return fold_build2_loc (loc, code == VEC_COND_EXPR
12666 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12667 type, fold_convert_loc (loc, type, arg0), op2);
12668
12669 return NULL_TREE;
12670
12671 case CALL_EXPR:
12672 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12673 of fold_ternary on them. */
12674 gcc_unreachable ();
12675
12676 case BIT_FIELD_REF:
12677 if ((TREE_CODE (arg0) == VECTOR_CST
12678 || (TREE_CODE (arg0) == CONSTRUCTOR
12679 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
12680 && (type == TREE_TYPE (TREE_TYPE (arg0))
12681 || (TREE_CODE (type) == VECTOR_TYPE
12682 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
12683 {
12684 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12685 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12686 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12687 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12688
12689 if (n != 0
12690 && (idx % width) == 0
12691 && (n % width) == 0
12692 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12693 {
12694 idx = idx / width;
12695 n = n / width;
12696
12697 if (TREE_CODE (arg0) == VECTOR_CST)
12698 {
12699 if (n == 1)
12700 return VECTOR_CST_ELT (arg0, idx);
12701
12702 tree *vals = XALLOCAVEC (tree, n);
12703 for (unsigned i = 0; i < n; ++i)
12704 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
12705 return build_vector (type, vals);
12706 }
12707
12708 /* Constructor elements can be subvectors. */
12709 unsigned HOST_WIDE_INT k = 1;
12710 if (CONSTRUCTOR_NELTS (arg0) != 0)
12711 {
12712 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
12713 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
12714 k = TYPE_VECTOR_SUBPARTS (cons_elem);
12715 }
12716
12717 /* We keep an exact subset of the constructor elements. */
12718 if ((idx % k) == 0 && (n % k) == 0)
12719 {
12720 if (CONSTRUCTOR_NELTS (arg0) == 0)
12721 return build_constructor (type, NULL);
12722 idx /= k;
12723 n /= k;
12724 if (n == 1)
12725 {
12726 if (idx < CONSTRUCTOR_NELTS (arg0))
12727 return CONSTRUCTOR_ELT (arg0, idx)->value;
12728 return build_zero_cst (type);
12729 }
12730
12731 vec<constructor_elt, va_gc> *vals;
12732 vec_alloc (vals, n);
12733 for (unsigned i = 0;
12734 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
12735 ++i)
12736 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
12737 CONSTRUCTOR_ELT
12738 (arg0, idx + i)->value);
12739 return build_constructor (type, vals);
12740 }
12741 /* The bitfield references a single constructor element. */
12742 else if (idx + n <= (idx / k + 1) * k)
12743 {
12744 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
12745 return build_zero_cst (type);
12746 else if (n == k)
12747 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
12748 else
12749 return fold_build3_loc (loc, code, type,
12750 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
12751 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
12752 }
12753 }
12754 }
12755
12756 /* A bit-field-ref that referenced the full argument can be stripped. */
12757 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12758 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
12759 && integer_zerop (op2))
12760 return fold_convert_loc (loc, type, arg0);
12761
12762 /* On constants we can use native encode/interpret to constant
12763 fold (nearly) all BIT_FIELD_REFs. */
12764 if (CONSTANT_CLASS_P (arg0)
12765 && can_native_interpret_type_p (type)
12766 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
12767 /* This limitation should not be necessary, we just need to
12768 round this up to mode size. */
12769 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
12770 /* Need bit-shifting of the buffer to relax the following. */
12771 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
12772 {
12773 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12774 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12775 unsigned HOST_WIDE_INT clen;
12776 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
12777 /* ??? We cannot tell native_encode_expr to start at
12778 some random byte only. So limit us to a reasonable amount
12779 of work. */
12780 if (clen <= 4096)
12781 {
12782 unsigned char *b = XALLOCAVEC (unsigned char, clen);
12783 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
12784 if (len > 0
12785 && len * BITS_PER_UNIT >= bitpos + bitsize)
12786 {
12787 tree v = native_interpret_expr (type,
12788 b + bitpos / BITS_PER_UNIT,
12789 bitsize / BITS_PER_UNIT);
12790 if (v)
12791 return v;
12792 }
12793 }
12794 }
12795
12796 return NULL_TREE;
12797
12798 case FMA_EXPR:
12799 /* For integers we can decompose the FMA if possible. */
12800 if (TREE_CODE (arg0) == INTEGER_CST
12801 && TREE_CODE (arg1) == INTEGER_CST)
12802 return fold_build2_loc (loc, PLUS_EXPR, type,
12803 const_binop (MULT_EXPR, arg0, arg1), arg2);
12804 if (integer_zerop (arg2))
12805 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12806
12807 return fold_fma (loc, type, arg0, arg1, arg2);
12808
12809 case VEC_PERM_EXPR:
12810 if (TREE_CODE (arg2) == VECTOR_CST)
12811 {
12812 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
12813 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
12814 unsigned char *sel2 = sel + nelts;
12815 bool need_mask_canon = false;
12816 bool need_mask_canon2 = false;
12817 bool all_in_vec0 = true;
12818 bool all_in_vec1 = true;
12819 bool maybe_identity = true;
12820 bool single_arg = (op0 == op1);
12821 bool changed = false;
12822
12823 mask2 = 2 * nelts - 1;
12824 mask = single_arg ? (nelts - 1) : mask2;
12825 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
12826 for (i = 0; i < nelts; i++)
12827 {
12828 tree val = VECTOR_CST_ELT (arg2, i);
12829 if (TREE_CODE (val) != INTEGER_CST)
12830 return NULL_TREE;
12831
12832 /* Make sure that the perm value is in an acceptable
12833 range. */
12834 wide_int t = val;
12835 need_mask_canon |= wi::gtu_p (t, mask);
12836 need_mask_canon2 |= wi::gtu_p (t, mask2);
12837 sel[i] = t.to_uhwi () & mask;
12838 sel2[i] = t.to_uhwi () & mask2;
12839
12840 if (sel[i] < nelts)
12841 all_in_vec1 = false;
12842 else
12843 all_in_vec0 = false;
12844
12845 if ((sel[i] & (nelts-1)) != i)
12846 maybe_identity = false;
12847 }
12848
12849 if (maybe_identity)
12850 {
12851 if (all_in_vec0)
12852 return op0;
12853 if (all_in_vec1)
12854 return op1;
12855 }
12856
12857 if (all_in_vec0)
12858 op1 = op0;
12859 else if (all_in_vec1)
12860 {
12861 op0 = op1;
12862 for (i = 0; i < nelts; i++)
12863 sel[i] -= nelts;
12864 need_mask_canon = true;
12865 }
12866
12867 if ((TREE_CODE (op0) == VECTOR_CST
12868 || TREE_CODE (op0) == CONSTRUCTOR)
12869 && (TREE_CODE (op1) == VECTOR_CST
12870 || TREE_CODE (op1) == CONSTRUCTOR))
12871 {
12872 tree t = fold_vec_perm (type, op0, op1, sel);
12873 if (t != NULL_TREE)
12874 return t;
12875 }
12876
12877 if (op0 == op1 && !single_arg)
12878 changed = true;
12879
12880 /* Some targets are deficient and fail to expand a single
12881 argument permutation while still allowing an equivalent
12882 2-argument version. */
12883 if (need_mask_canon && arg2 == op2
12884 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
12885 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
12886 {
12887 need_mask_canon = need_mask_canon2;
12888 sel = sel2;
12889 }
12890
12891 if (need_mask_canon && arg2 == op2)
12892 {
12893 tree *tsel = XALLOCAVEC (tree, nelts);
12894 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
12895 for (i = 0; i < nelts; i++)
12896 tsel[i] = build_int_cst (eltype, sel[i]);
12897 op2 = build_vector (TREE_TYPE (arg2), tsel);
12898 changed = true;
12899 }
12900
12901 if (changed)
12902 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
12903 }
12904 return NULL_TREE;
12905
12906 default:
12907 return NULL_TREE;
12908 } /* switch (code) */
12909 }
12910
12911 /* Perform constant folding and related simplification of EXPR.
12912 The related simplifications include x*1 => x, x*0 => 0, etc.,
12913 and application of the associative law.
12914 NOP_EXPR conversions may be removed freely (as long as we
12915 are careful not to change the type of the overall expression).
12916 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12917 but we can constant-fold them if they have constant operands. */
12918
12919 #ifdef ENABLE_FOLD_CHECKING
12920 # define fold(x) fold_1 (x)
12921 static tree fold_1 (tree);
12922 static
12923 #endif
12924 tree
12925 fold (tree expr)
12926 {
12927 const tree t = expr;
12928 enum tree_code code = TREE_CODE (t);
12929 enum tree_code_class kind = TREE_CODE_CLASS (code);
12930 tree tem;
12931 location_t loc = EXPR_LOCATION (expr);
12932
12933 /* Return right away if a constant. */
12934 if (kind == tcc_constant)
12935 return t;
12936
12937 /* CALL_EXPR-like objects with variable numbers of operands are
12938 treated specially. */
12939 if (kind == tcc_vl_exp)
12940 {
12941 if (code == CALL_EXPR)
12942 {
12943 tem = fold_call_expr (loc, expr, false);
12944 return tem ? tem : expr;
12945 }
12946 return expr;
12947 }
12948
12949 if (IS_EXPR_CODE_CLASS (kind))
12950 {
12951 tree type = TREE_TYPE (t);
12952 tree op0, op1, op2;
12953
12954 switch (TREE_CODE_LENGTH (code))
12955 {
12956 case 1:
12957 op0 = TREE_OPERAND (t, 0);
12958 tem = fold_unary_loc (loc, code, type, op0);
12959 return tem ? tem : expr;
12960 case 2:
12961 op0 = TREE_OPERAND (t, 0);
12962 op1 = TREE_OPERAND (t, 1);
12963 tem = fold_binary_loc (loc, code, type, op0, op1);
12964 return tem ? tem : expr;
12965 case 3:
12966 op0 = TREE_OPERAND (t, 0);
12967 op1 = TREE_OPERAND (t, 1);
12968 op2 = TREE_OPERAND (t, 2);
12969 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12970 return tem ? tem : expr;
12971 default:
12972 break;
12973 }
12974 }
12975
12976 switch (code)
12977 {
12978 case ARRAY_REF:
12979 {
12980 tree op0 = TREE_OPERAND (t, 0);
12981 tree op1 = TREE_OPERAND (t, 1);
12982
12983 if (TREE_CODE (op1) == INTEGER_CST
12984 && TREE_CODE (op0) == CONSTRUCTOR
12985 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12986 {
12987 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
12988 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
12989 unsigned HOST_WIDE_INT begin = 0;
12990
12991 /* Find a matching index by means of a binary search. */
12992 while (begin != end)
12993 {
12994 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
12995 tree index = (*elts)[middle].index;
12996
12997 if (TREE_CODE (index) == INTEGER_CST
12998 && tree_int_cst_lt (index, op1))
12999 begin = middle + 1;
13000 else if (TREE_CODE (index) == INTEGER_CST
13001 && tree_int_cst_lt (op1, index))
13002 end = middle;
13003 else if (TREE_CODE (index) == RANGE_EXPR
13004 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13005 begin = middle + 1;
13006 else if (TREE_CODE (index) == RANGE_EXPR
13007 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13008 end = middle;
13009 else
13010 return (*elts)[middle].value;
13011 }
13012 }
13013
13014 return t;
13015 }
13016
13017 /* Return a VECTOR_CST if possible. */
13018 case CONSTRUCTOR:
13019 {
13020 tree type = TREE_TYPE (t);
13021 if (TREE_CODE (type) != VECTOR_TYPE)
13022 return t;
13023
13024 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13025 unsigned HOST_WIDE_INT idx, pos = 0;
13026 tree value;
13027
13028 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13029 {
13030 if (!CONSTANT_CLASS_P (value))
13031 return t;
13032 if (TREE_CODE (value) == VECTOR_CST)
13033 {
13034 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13035 vec[pos++] = VECTOR_CST_ELT (value, i);
13036 }
13037 else
13038 vec[pos++] = value;
13039 }
13040 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13041 vec[pos] = build_zero_cst (TREE_TYPE (type));
13042
13043 return build_vector (type, vec);
13044 }
13045
13046 case CONST_DECL:
13047 return fold (DECL_INITIAL (t));
13048
13049 default:
13050 return t;
13051 } /* switch (code) */
13052 }
13053
13054 #ifdef ENABLE_FOLD_CHECKING
13055 #undef fold
13056
13057 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13058 hash_table<nofree_ptr_hash<const tree_node> > *);
13059 static void fold_check_failed (const_tree, const_tree);
13060 void print_fold_checksum (const_tree);
13061
13062 /* When --enable-checking=fold, compute a digest of expr before
13063 and after actual fold call to see if fold did not accidentally
13064 change original expr. */
13065
13066 tree
13067 fold (tree expr)
13068 {
13069 tree ret;
13070 struct md5_ctx ctx;
13071 unsigned char checksum_before[16], checksum_after[16];
13072 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13073
13074 md5_init_ctx (&ctx);
13075 fold_checksum_tree (expr, &ctx, &ht);
13076 md5_finish_ctx (&ctx, checksum_before);
13077 ht.empty ();
13078
13079 ret = fold_1 (expr);
13080
13081 md5_init_ctx (&ctx);
13082 fold_checksum_tree (expr, &ctx, &ht);
13083 md5_finish_ctx (&ctx, checksum_after);
13084
13085 if (memcmp (checksum_before, checksum_after, 16))
13086 fold_check_failed (expr, ret);
13087
13088 return ret;
13089 }
13090
13091 void
13092 print_fold_checksum (const_tree expr)
13093 {
13094 struct md5_ctx ctx;
13095 unsigned char checksum[16], cnt;
13096 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13097
13098 md5_init_ctx (&ctx);
13099 fold_checksum_tree (expr, &ctx, &ht);
13100 md5_finish_ctx (&ctx, checksum);
13101 for (cnt = 0; cnt < 16; ++cnt)
13102 fprintf (stderr, "%02x", checksum[cnt]);
13103 putc ('\n', stderr);
13104 }
13105
13106 static void
13107 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13108 {
13109 internal_error ("fold check: original tree changed by fold");
13110 }
13111
13112 static void
13113 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13114 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13115 {
13116 const tree_node **slot;
13117 enum tree_code code;
13118 union tree_node buf;
13119 int i, len;
13120
13121 recursive_label:
13122 if (expr == NULL)
13123 return;
13124 slot = ht->find_slot (expr, INSERT);
13125 if (*slot != NULL)
13126 return;
13127 *slot = expr;
13128 code = TREE_CODE (expr);
13129 if (TREE_CODE_CLASS (code) == tcc_declaration
13130 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13131 {
13132 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13133 memcpy ((char *) &buf, expr, tree_size (expr));
13134 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13135 buf.decl_with_vis.symtab_node = NULL;
13136 expr = (tree) &buf;
13137 }
13138 else if (TREE_CODE_CLASS (code) == tcc_type
13139 && (TYPE_POINTER_TO (expr)
13140 || TYPE_REFERENCE_TO (expr)
13141 || TYPE_CACHED_VALUES_P (expr)
13142 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13143 || TYPE_NEXT_VARIANT (expr)))
13144 {
13145 /* Allow these fields to be modified. */
13146 tree tmp;
13147 memcpy ((char *) &buf, expr, tree_size (expr));
13148 expr = tmp = (tree) &buf;
13149 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13150 TYPE_POINTER_TO (tmp) = NULL;
13151 TYPE_REFERENCE_TO (tmp) = NULL;
13152 TYPE_NEXT_VARIANT (tmp) = NULL;
13153 if (TYPE_CACHED_VALUES_P (tmp))
13154 {
13155 TYPE_CACHED_VALUES_P (tmp) = 0;
13156 TYPE_CACHED_VALUES (tmp) = NULL;
13157 }
13158 }
13159 md5_process_bytes (expr, tree_size (expr), ctx);
13160 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13161 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13162 if (TREE_CODE_CLASS (code) != tcc_type
13163 && TREE_CODE_CLASS (code) != tcc_declaration
13164 && code != TREE_LIST
13165 && code != SSA_NAME
13166 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13167 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13168 switch (TREE_CODE_CLASS (code))
13169 {
13170 case tcc_constant:
13171 switch (code)
13172 {
13173 case STRING_CST:
13174 md5_process_bytes (TREE_STRING_POINTER (expr),
13175 TREE_STRING_LENGTH (expr), ctx);
13176 break;
13177 case COMPLEX_CST:
13178 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13179 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13180 break;
13181 case VECTOR_CST:
13182 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
13183 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
13184 break;
13185 default:
13186 break;
13187 }
13188 break;
13189 case tcc_exceptional:
13190 switch (code)
13191 {
13192 case TREE_LIST:
13193 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13194 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13195 expr = TREE_CHAIN (expr);
13196 goto recursive_label;
13197 break;
13198 case TREE_VEC:
13199 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13200 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13201 break;
13202 default:
13203 break;
13204 }
13205 break;
13206 case tcc_expression:
13207 case tcc_reference:
13208 case tcc_comparison:
13209 case tcc_unary:
13210 case tcc_binary:
13211 case tcc_statement:
13212 case tcc_vl_exp:
13213 len = TREE_OPERAND_LENGTH (expr);
13214 for (i = 0; i < len; ++i)
13215 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13216 break;
13217 case tcc_declaration:
13218 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13219 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13220 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13221 {
13222 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13223 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13224 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13225 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13226 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13227 }
13228
13229 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13230 {
13231 if (TREE_CODE (expr) == FUNCTION_DECL)
13232 {
13233 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13234 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13235 }
13236 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13237 }
13238 break;
13239 case tcc_type:
13240 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13241 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13242 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13243 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13244 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13245 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13246 if (INTEGRAL_TYPE_P (expr)
13247 || SCALAR_FLOAT_TYPE_P (expr))
13248 {
13249 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13250 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13251 }
13252 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13253 if (TREE_CODE (expr) == RECORD_TYPE
13254 || TREE_CODE (expr) == UNION_TYPE
13255 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13256 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13257 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13258 break;
13259 default:
13260 break;
13261 }
13262 }
13263
13264 /* Helper function for outputting the checksum of a tree T. When
13265 debugging with gdb, you can "define mynext" to be "next" followed
13266 by "call debug_fold_checksum (op0)", then just trace down till the
13267 outputs differ. */
13268
13269 DEBUG_FUNCTION void
13270 debug_fold_checksum (const_tree t)
13271 {
13272 int i;
13273 unsigned char checksum[16];
13274 struct md5_ctx ctx;
13275 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13276
13277 md5_init_ctx (&ctx);
13278 fold_checksum_tree (t, &ctx, &ht);
13279 md5_finish_ctx (&ctx, checksum);
13280 ht.empty ();
13281
13282 for (i = 0; i < 16; i++)
13283 fprintf (stderr, "%d ", checksum[i]);
13284
13285 fprintf (stderr, "\n");
13286 }
13287
13288 #endif
13289
13290 /* Fold a unary tree expression with code CODE of type TYPE with an
13291 operand OP0. LOC is the location of the resulting expression.
13292 Return a folded expression if successful. Otherwise, return a tree
13293 expression with code CODE of type TYPE with an operand OP0. */
13294
13295 tree
13296 fold_build1_stat_loc (location_t loc,
13297 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13298 {
13299 tree tem;
13300 #ifdef ENABLE_FOLD_CHECKING
13301 unsigned char checksum_before[16], checksum_after[16];
13302 struct md5_ctx ctx;
13303 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13304
13305 md5_init_ctx (&ctx);
13306 fold_checksum_tree (op0, &ctx, &ht);
13307 md5_finish_ctx (&ctx, checksum_before);
13308 ht.empty ();
13309 #endif
13310
13311 tem = fold_unary_loc (loc, code, type, op0);
13312 if (!tem)
13313 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13314
13315 #ifdef ENABLE_FOLD_CHECKING
13316 md5_init_ctx (&ctx);
13317 fold_checksum_tree (op0, &ctx, &ht);
13318 md5_finish_ctx (&ctx, checksum_after);
13319
13320 if (memcmp (checksum_before, checksum_after, 16))
13321 fold_check_failed (op0, tem);
13322 #endif
13323 return tem;
13324 }
13325
13326 /* Fold a binary tree expression with code CODE of type TYPE with
13327 operands OP0 and OP1. LOC is the location of the resulting
13328 expression. Return a folded expression if successful. Otherwise,
13329 return a tree expression with code CODE of type TYPE with operands
13330 OP0 and OP1. */
13331
13332 tree
13333 fold_build2_stat_loc (location_t loc,
13334 enum tree_code code, tree type, tree op0, tree op1
13335 MEM_STAT_DECL)
13336 {
13337 tree tem;
13338 #ifdef ENABLE_FOLD_CHECKING
13339 unsigned char checksum_before_op0[16],
13340 checksum_before_op1[16],
13341 checksum_after_op0[16],
13342 checksum_after_op1[16];
13343 struct md5_ctx ctx;
13344 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13345
13346 md5_init_ctx (&ctx);
13347 fold_checksum_tree (op0, &ctx, &ht);
13348 md5_finish_ctx (&ctx, checksum_before_op0);
13349 ht.empty ();
13350
13351 md5_init_ctx (&ctx);
13352 fold_checksum_tree (op1, &ctx, &ht);
13353 md5_finish_ctx (&ctx, checksum_before_op1);
13354 ht.empty ();
13355 #endif
13356
13357 tem = fold_binary_loc (loc, code, type, op0, op1);
13358 if (!tem)
13359 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13360
13361 #ifdef ENABLE_FOLD_CHECKING
13362 md5_init_ctx (&ctx);
13363 fold_checksum_tree (op0, &ctx, &ht);
13364 md5_finish_ctx (&ctx, checksum_after_op0);
13365 ht.empty ();
13366
13367 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13368 fold_check_failed (op0, tem);
13369
13370 md5_init_ctx (&ctx);
13371 fold_checksum_tree (op1, &ctx, &ht);
13372 md5_finish_ctx (&ctx, checksum_after_op1);
13373
13374 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13375 fold_check_failed (op1, tem);
13376 #endif
13377 return tem;
13378 }
13379
13380 /* Fold a ternary tree expression with code CODE of type TYPE with
13381 operands OP0, OP1, and OP2. Return a folded expression if
13382 successful. Otherwise, return a tree expression with code CODE of
13383 type TYPE with operands OP0, OP1, and OP2. */
13384
13385 tree
13386 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13387 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13388 {
13389 tree tem;
13390 #ifdef ENABLE_FOLD_CHECKING
13391 unsigned char checksum_before_op0[16],
13392 checksum_before_op1[16],
13393 checksum_before_op2[16],
13394 checksum_after_op0[16],
13395 checksum_after_op1[16],
13396 checksum_after_op2[16];
13397 struct md5_ctx ctx;
13398 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13399
13400 md5_init_ctx (&ctx);
13401 fold_checksum_tree (op0, &ctx, &ht);
13402 md5_finish_ctx (&ctx, checksum_before_op0);
13403 ht.empty ();
13404
13405 md5_init_ctx (&ctx);
13406 fold_checksum_tree (op1, &ctx, &ht);
13407 md5_finish_ctx (&ctx, checksum_before_op1);
13408 ht.empty ();
13409
13410 md5_init_ctx (&ctx);
13411 fold_checksum_tree (op2, &ctx, &ht);
13412 md5_finish_ctx (&ctx, checksum_before_op2);
13413 ht.empty ();
13414 #endif
13415
13416 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13417 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13418 if (!tem)
13419 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13420
13421 #ifdef ENABLE_FOLD_CHECKING
13422 md5_init_ctx (&ctx);
13423 fold_checksum_tree (op0, &ctx, &ht);
13424 md5_finish_ctx (&ctx, checksum_after_op0);
13425 ht.empty ();
13426
13427 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13428 fold_check_failed (op0, tem);
13429
13430 md5_init_ctx (&ctx);
13431 fold_checksum_tree (op1, &ctx, &ht);
13432 md5_finish_ctx (&ctx, checksum_after_op1);
13433 ht.empty ();
13434
13435 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13436 fold_check_failed (op1, tem);
13437
13438 md5_init_ctx (&ctx);
13439 fold_checksum_tree (op2, &ctx, &ht);
13440 md5_finish_ctx (&ctx, checksum_after_op2);
13441
13442 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13443 fold_check_failed (op2, tem);
13444 #endif
13445 return tem;
13446 }
13447
13448 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13449 arguments in ARGARRAY, and a null static chain.
13450 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13451 of type TYPE from the given operands as constructed by build_call_array. */
13452
13453 tree
13454 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13455 int nargs, tree *argarray)
13456 {
13457 tree tem;
13458 #ifdef ENABLE_FOLD_CHECKING
13459 unsigned char checksum_before_fn[16],
13460 checksum_before_arglist[16],
13461 checksum_after_fn[16],
13462 checksum_after_arglist[16];
13463 struct md5_ctx ctx;
13464 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13465 int i;
13466
13467 md5_init_ctx (&ctx);
13468 fold_checksum_tree (fn, &ctx, &ht);
13469 md5_finish_ctx (&ctx, checksum_before_fn);
13470 ht.empty ();
13471
13472 md5_init_ctx (&ctx);
13473 for (i = 0; i < nargs; i++)
13474 fold_checksum_tree (argarray[i], &ctx, &ht);
13475 md5_finish_ctx (&ctx, checksum_before_arglist);
13476 ht.empty ();
13477 #endif
13478
13479 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13480 if (!tem)
13481 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13482
13483 #ifdef ENABLE_FOLD_CHECKING
13484 md5_init_ctx (&ctx);
13485 fold_checksum_tree (fn, &ctx, &ht);
13486 md5_finish_ctx (&ctx, checksum_after_fn);
13487 ht.empty ();
13488
13489 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13490 fold_check_failed (fn, tem);
13491
13492 md5_init_ctx (&ctx);
13493 for (i = 0; i < nargs; i++)
13494 fold_checksum_tree (argarray[i], &ctx, &ht);
13495 md5_finish_ctx (&ctx, checksum_after_arglist);
13496
13497 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13498 fold_check_failed (NULL_TREE, tem);
13499 #endif
13500 return tem;
13501 }
13502
13503 /* Perform constant folding and related simplification of initializer
13504 expression EXPR. These behave identically to "fold_buildN" but ignore
13505 potential run-time traps and exceptions that fold must preserve. */
13506
13507 #define START_FOLD_INIT \
13508 int saved_signaling_nans = flag_signaling_nans;\
13509 int saved_trapping_math = flag_trapping_math;\
13510 int saved_rounding_math = flag_rounding_math;\
13511 int saved_trapv = flag_trapv;\
13512 int saved_folding_initializer = folding_initializer;\
13513 flag_signaling_nans = 0;\
13514 flag_trapping_math = 0;\
13515 flag_rounding_math = 0;\
13516 flag_trapv = 0;\
13517 folding_initializer = 1;
13518
13519 #define END_FOLD_INIT \
13520 flag_signaling_nans = saved_signaling_nans;\
13521 flag_trapping_math = saved_trapping_math;\
13522 flag_rounding_math = saved_rounding_math;\
13523 flag_trapv = saved_trapv;\
13524 folding_initializer = saved_folding_initializer;
13525
13526 tree
13527 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13528 tree type, tree op)
13529 {
13530 tree result;
13531 START_FOLD_INIT;
13532
13533 result = fold_build1_loc (loc, code, type, op);
13534
13535 END_FOLD_INIT;
13536 return result;
13537 }
13538
13539 tree
13540 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13541 tree type, tree op0, tree op1)
13542 {
13543 tree result;
13544 START_FOLD_INIT;
13545
13546 result = fold_build2_loc (loc, code, type, op0, op1);
13547
13548 END_FOLD_INIT;
13549 return result;
13550 }
13551
13552 tree
13553 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13554 int nargs, tree *argarray)
13555 {
13556 tree result;
13557 START_FOLD_INIT;
13558
13559 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13560
13561 END_FOLD_INIT;
13562 return result;
13563 }
13564
13565 #undef START_FOLD_INIT
13566 #undef END_FOLD_INIT
13567
13568 /* Determine if first argument is a multiple of second argument. Return 0 if
13569 it is not, or we cannot easily determined it to be.
13570
13571 An example of the sort of thing we care about (at this point; this routine
13572 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13573 fold cases do now) is discovering that
13574
13575 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13576
13577 is a multiple of
13578
13579 SAVE_EXPR (J * 8)
13580
13581 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13582
13583 This code also handles discovering that
13584
13585 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13586
13587 is a multiple of 8 so we don't have to worry about dealing with a
13588 possible remainder.
13589
13590 Note that we *look* inside a SAVE_EXPR only to determine how it was
13591 calculated; it is not safe for fold to do much of anything else with the
13592 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13593 at run time. For example, the latter example above *cannot* be implemented
13594 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13595 evaluation time of the original SAVE_EXPR is not necessarily the same at
13596 the time the new expression is evaluated. The only optimization of this
13597 sort that would be valid is changing
13598
13599 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13600
13601 divided by 8 to
13602
13603 SAVE_EXPR (I) * SAVE_EXPR (J)
13604
13605 (where the same SAVE_EXPR (J) is used in the original and the
13606 transformed version). */
13607
13608 int
13609 multiple_of_p (tree type, const_tree top, const_tree bottom)
13610 {
13611 if (operand_equal_p (top, bottom, 0))
13612 return 1;
13613
13614 if (TREE_CODE (type) != INTEGER_TYPE)
13615 return 0;
13616
13617 switch (TREE_CODE (top))
13618 {
13619 case BIT_AND_EXPR:
13620 /* Bitwise and provides a power of two multiple. If the mask is
13621 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13622 if (!integer_pow2p (bottom))
13623 return 0;
13624 /* FALLTHRU */
13625
13626 case MULT_EXPR:
13627 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13628 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13629
13630 case PLUS_EXPR:
13631 case MINUS_EXPR:
13632 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13633 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13634
13635 case LSHIFT_EXPR:
13636 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13637 {
13638 tree op1, t1;
13639
13640 op1 = TREE_OPERAND (top, 1);
13641 /* const_binop may not detect overflow correctly,
13642 so check for it explicitly here. */
13643 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
13644 && 0 != (t1 = fold_convert (type,
13645 const_binop (LSHIFT_EXPR,
13646 size_one_node,
13647 op1)))
13648 && !TREE_OVERFLOW (t1))
13649 return multiple_of_p (type, t1, bottom);
13650 }
13651 return 0;
13652
13653 case NOP_EXPR:
13654 /* Can't handle conversions from non-integral or wider integral type. */
13655 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13656 || (TYPE_PRECISION (type)
13657 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13658 return 0;
13659
13660 /* .. fall through ... */
13661
13662 case SAVE_EXPR:
13663 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13664
13665 case COND_EXPR:
13666 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13667 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13668
13669 case INTEGER_CST:
13670 if (TREE_CODE (bottom) != INTEGER_CST
13671 || integer_zerop (bottom)
13672 || (TYPE_UNSIGNED (type)
13673 && (tree_int_cst_sgn (top) < 0
13674 || tree_int_cst_sgn (bottom) < 0)))
13675 return 0;
13676 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13677 SIGNED);
13678
13679 default:
13680 return 0;
13681 }
13682 }
13683
13684 /* Return true if CODE or TYPE is known to be non-negative. */
13685
13686 static bool
13687 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13688 {
13689 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13690 && truth_value_p (code))
13691 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13692 have a signed:1 type (where the value is -1 and 0). */
13693 return true;
13694 return false;
13695 }
13696
13697 /* Return true if (CODE OP0) is known to be non-negative. If the return
13698 value is based on the assumption that signed overflow is undefined,
13699 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13700 *STRICT_OVERFLOW_P. */
13701
13702 bool
13703 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13704 bool *strict_overflow_p)
13705 {
13706 if (TYPE_UNSIGNED (type))
13707 return true;
13708
13709 switch (code)
13710 {
13711 case ABS_EXPR:
13712 /* We can't return 1 if flag_wrapv is set because
13713 ABS_EXPR<INT_MIN> = INT_MIN. */
13714 if (!ANY_INTEGRAL_TYPE_P (type))
13715 return true;
13716 if (TYPE_OVERFLOW_UNDEFINED (type))
13717 {
13718 *strict_overflow_p = true;
13719 return true;
13720 }
13721 break;
13722
13723 case NON_LVALUE_EXPR:
13724 case FLOAT_EXPR:
13725 case FIX_TRUNC_EXPR:
13726 return tree_expr_nonnegative_warnv_p (op0,
13727 strict_overflow_p);
13728
13729 CASE_CONVERT:
13730 {
13731 tree inner_type = TREE_TYPE (op0);
13732 tree outer_type = type;
13733
13734 if (TREE_CODE (outer_type) == REAL_TYPE)
13735 {
13736 if (TREE_CODE (inner_type) == REAL_TYPE)
13737 return tree_expr_nonnegative_warnv_p (op0,
13738 strict_overflow_p);
13739 if (INTEGRAL_TYPE_P (inner_type))
13740 {
13741 if (TYPE_UNSIGNED (inner_type))
13742 return true;
13743 return tree_expr_nonnegative_warnv_p (op0,
13744 strict_overflow_p);
13745 }
13746 }
13747 else if (INTEGRAL_TYPE_P (outer_type))
13748 {
13749 if (TREE_CODE (inner_type) == REAL_TYPE)
13750 return tree_expr_nonnegative_warnv_p (op0,
13751 strict_overflow_p);
13752 if (INTEGRAL_TYPE_P (inner_type))
13753 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13754 && TYPE_UNSIGNED (inner_type);
13755 }
13756 }
13757 break;
13758
13759 default:
13760 return tree_simple_nonnegative_warnv_p (code, type);
13761 }
13762
13763 /* We don't know sign of `t', so be conservative and return false. */
13764 return false;
13765 }
13766
13767 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13768 value is based on the assumption that signed overflow is undefined,
13769 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13770 *STRICT_OVERFLOW_P. */
13771
13772 bool
13773 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13774 tree op1, bool *strict_overflow_p)
13775 {
13776 if (TYPE_UNSIGNED (type))
13777 return true;
13778
13779 switch (code)
13780 {
13781 case POINTER_PLUS_EXPR:
13782 case PLUS_EXPR:
13783 if (FLOAT_TYPE_P (type))
13784 return (tree_expr_nonnegative_warnv_p (op0,
13785 strict_overflow_p)
13786 && tree_expr_nonnegative_warnv_p (op1,
13787 strict_overflow_p));
13788
13789 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13790 both unsigned and at least 2 bits shorter than the result. */
13791 if (TREE_CODE (type) == INTEGER_TYPE
13792 && TREE_CODE (op0) == NOP_EXPR
13793 && TREE_CODE (op1) == NOP_EXPR)
13794 {
13795 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13796 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13797 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13798 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13799 {
13800 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13801 TYPE_PRECISION (inner2)) + 1;
13802 return prec < TYPE_PRECISION (type);
13803 }
13804 }
13805 break;
13806
13807 case MULT_EXPR:
13808 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13809 {
13810 /* x * x is always non-negative for floating point x
13811 or without overflow. */
13812 if (operand_equal_p (op0, op1, 0)
13813 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
13814 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
13815 {
13816 if (ANY_INTEGRAL_TYPE_P (type)
13817 && TYPE_OVERFLOW_UNDEFINED (type))
13818 *strict_overflow_p = true;
13819 return true;
13820 }
13821 }
13822
13823 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13824 both unsigned and their total bits is shorter than the result. */
13825 if (TREE_CODE (type) == INTEGER_TYPE
13826 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13827 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13828 {
13829 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13830 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13831 : TREE_TYPE (op0);
13832 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13833 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13834 : TREE_TYPE (op1);
13835
13836 bool unsigned0 = TYPE_UNSIGNED (inner0);
13837 bool unsigned1 = TYPE_UNSIGNED (inner1);
13838
13839 if (TREE_CODE (op0) == INTEGER_CST)
13840 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13841
13842 if (TREE_CODE (op1) == INTEGER_CST)
13843 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13844
13845 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13846 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13847 {
13848 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13849 ? tree_int_cst_min_precision (op0, UNSIGNED)
13850 : TYPE_PRECISION (inner0);
13851
13852 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13853 ? tree_int_cst_min_precision (op1, UNSIGNED)
13854 : TYPE_PRECISION (inner1);
13855
13856 return precision0 + precision1 < TYPE_PRECISION (type);
13857 }
13858 }
13859 return false;
13860
13861 case BIT_AND_EXPR:
13862 case MAX_EXPR:
13863 return (tree_expr_nonnegative_warnv_p (op0,
13864 strict_overflow_p)
13865 || tree_expr_nonnegative_warnv_p (op1,
13866 strict_overflow_p));
13867
13868 case BIT_IOR_EXPR:
13869 case BIT_XOR_EXPR:
13870 case MIN_EXPR:
13871 case RDIV_EXPR:
13872 case TRUNC_DIV_EXPR:
13873 case CEIL_DIV_EXPR:
13874 case FLOOR_DIV_EXPR:
13875 case ROUND_DIV_EXPR:
13876 return (tree_expr_nonnegative_warnv_p (op0,
13877 strict_overflow_p)
13878 && tree_expr_nonnegative_warnv_p (op1,
13879 strict_overflow_p));
13880
13881 case TRUNC_MOD_EXPR:
13882 case CEIL_MOD_EXPR:
13883 case FLOOR_MOD_EXPR:
13884 case ROUND_MOD_EXPR:
13885 return tree_expr_nonnegative_warnv_p (op0,
13886 strict_overflow_p);
13887 default:
13888 return tree_simple_nonnegative_warnv_p (code, type);
13889 }
13890
13891 /* We don't know sign of `t', so be conservative and return false. */
13892 return false;
13893 }
13894
13895 /* Return true if T is known to be non-negative. If the return
13896 value is based on the assumption that signed overflow is undefined,
13897 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13898 *STRICT_OVERFLOW_P. */
13899
13900 bool
13901 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13902 {
13903 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13904 return true;
13905
13906 switch (TREE_CODE (t))
13907 {
13908 case INTEGER_CST:
13909 return tree_int_cst_sgn (t) >= 0;
13910
13911 case REAL_CST:
13912 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13913
13914 case FIXED_CST:
13915 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13916
13917 case COND_EXPR:
13918 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13919 strict_overflow_p)
13920 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13921 strict_overflow_p));
13922 default:
13923 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
13924 TREE_TYPE (t));
13925 }
13926 /* We don't know sign of `t', so be conservative and return false. */
13927 return false;
13928 }
13929
13930 /* Return true if T is known to be non-negative. If the return
13931 value is based on the assumption that signed overflow is undefined,
13932 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13933 *STRICT_OVERFLOW_P. */
13934
13935 bool
13936 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
13937 tree arg0, tree arg1, bool *strict_overflow_p)
13938 {
13939 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13940 switch (DECL_FUNCTION_CODE (fndecl))
13941 {
13942 CASE_FLT_FN (BUILT_IN_ACOS):
13943 CASE_FLT_FN (BUILT_IN_ACOSH):
13944 CASE_FLT_FN (BUILT_IN_CABS):
13945 CASE_FLT_FN (BUILT_IN_COSH):
13946 CASE_FLT_FN (BUILT_IN_ERFC):
13947 CASE_FLT_FN (BUILT_IN_EXP):
13948 CASE_FLT_FN (BUILT_IN_EXP10):
13949 CASE_FLT_FN (BUILT_IN_EXP2):
13950 CASE_FLT_FN (BUILT_IN_FABS):
13951 CASE_FLT_FN (BUILT_IN_FDIM):
13952 CASE_FLT_FN (BUILT_IN_HYPOT):
13953 CASE_FLT_FN (BUILT_IN_POW10):
13954 CASE_INT_FN (BUILT_IN_FFS):
13955 CASE_INT_FN (BUILT_IN_PARITY):
13956 CASE_INT_FN (BUILT_IN_POPCOUNT):
13957 CASE_INT_FN (BUILT_IN_CLZ):
13958 CASE_INT_FN (BUILT_IN_CLRSB):
13959 case BUILT_IN_BSWAP32:
13960 case BUILT_IN_BSWAP64:
13961 /* Always true. */
13962 return true;
13963
13964 CASE_FLT_FN (BUILT_IN_SQRT):
13965 /* sqrt(-0.0) is -0.0. */
13966 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13967 return true;
13968 return tree_expr_nonnegative_warnv_p (arg0,
13969 strict_overflow_p);
13970
13971 CASE_FLT_FN (BUILT_IN_ASINH):
13972 CASE_FLT_FN (BUILT_IN_ATAN):
13973 CASE_FLT_FN (BUILT_IN_ATANH):
13974 CASE_FLT_FN (BUILT_IN_CBRT):
13975 CASE_FLT_FN (BUILT_IN_CEIL):
13976 CASE_FLT_FN (BUILT_IN_ERF):
13977 CASE_FLT_FN (BUILT_IN_EXPM1):
13978 CASE_FLT_FN (BUILT_IN_FLOOR):
13979 CASE_FLT_FN (BUILT_IN_FMOD):
13980 CASE_FLT_FN (BUILT_IN_FREXP):
13981 CASE_FLT_FN (BUILT_IN_ICEIL):
13982 CASE_FLT_FN (BUILT_IN_IFLOOR):
13983 CASE_FLT_FN (BUILT_IN_IRINT):
13984 CASE_FLT_FN (BUILT_IN_IROUND):
13985 CASE_FLT_FN (BUILT_IN_LCEIL):
13986 CASE_FLT_FN (BUILT_IN_LDEXP):
13987 CASE_FLT_FN (BUILT_IN_LFLOOR):
13988 CASE_FLT_FN (BUILT_IN_LLCEIL):
13989 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13990 CASE_FLT_FN (BUILT_IN_LLRINT):
13991 CASE_FLT_FN (BUILT_IN_LLROUND):
13992 CASE_FLT_FN (BUILT_IN_LRINT):
13993 CASE_FLT_FN (BUILT_IN_LROUND):
13994 CASE_FLT_FN (BUILT_IN_MODF):
13995 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13996 CASE_FLT_FN (BUILT_IN_RINT):
13997 CASE_FLT_FN (BUILT_IN_ROUND):
13998 CASE_FLT_FN (BUILT_IN_SCALB):
13999 CASE_FLT_FN (BUILT_IN_SCALBLN):
14000 CASE_FLT_FN (BUILT_IN_SCALBN):
14001 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14002 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14003 CASE_FLT_FN (BUILT_IN_SINH):
14004 CASE_FLT_FN (BUILT_IN_TANH):
14005 CASE_FLT_FN (BUILT_IN_TRUNC):
14006 /* True if the 1st argument is nonnegative. */
14007 return tree_expr_nonnegative_warnv_p (arg0,
14008 strict_overflow_p);
14009
14010 CASE_FLT_FN (BUILT_IN_FMAX):
14011 /* True if the 1st OR 2nd arguments are nonnegative. */
14012 return (tree_expr_nonnegative_warnv_p (arg0,
14013 strict_overflow_p)
14014 || (tree_expr_nonnegative_warnv_p (arg1,
14015 strict_overflow_p)));
14016
14017 CASE_FLT_FN (BUILT_IN_FMIN):
14018 /* True if the 1st AND 2nd arguments are nonnegative. */
14019 return (tree_expr_nonnegative_warnv_p (arg0,
14020 strict_overflow_p)
14021 && (tree_expr_nonnegative_warnv_p (arg1,
14022 strict_overflow_p)));
14023
14024 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14025 /* True if the 2nd argument is nonnegative. */
14026 return tree_expr_nonnegative_warnv_p (arg1,
14027 strict_overflow_p);
14028
14029 CASE_FLT_FN (BUILT_IN_POWI):
14030 /* True if the 1st argument is nonnegative or the second
14031 argument is an even integer. */
14032 if (TREE_CODE (arg1) == INTEGER_CST
14033 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14034 return true;
14035 return tree_expr_nonnegative_warnv_p (arg0,
14036 strict_overflow_p);
14037
14038 CASE_FLT_FN (BUILT_IN_POW):
14039 /* True if the 1st argument is nonnegative or the second
14040 argument is an even integer valued real. */
14041 if (TREE_CODE (arg1) == REAL_CST)
14042 {
14043 REAL_VALUE_TYPE c;
14044 HOST_WIDE_INT n;
14045
14046 c = TREE_REAL_CST (arg1);
14047 n = real_to_integer (&c);
14048 if ((n & 1) == 0)
14049 {
14050 REAL_VALUE_TYPE cint;
14051 real_from_integer (&cint, VOIDmode, n, SIGNED);
14052 if (real_identical (&c, &cint))
14053 return true;
14054 }
14055 }
14056 return tree_expr_nonnegative_warnv_p (arg0,
14057 strict_overflow_p);
14058
14059 default:
14060 break;
14061 }
14062 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14063 type);
14064 }
14065
14066 /* Return true if T is known to be non-negative. If the return
14067 value is based on the assumption that signed overflow is undefined,
14068 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14069 *STRICT_OVERFLOW_P. */
14070
14071 static bool
14072 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14073 {
14074 enum tree_code code = TREE_CODE (t);
14075 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14076 return true;
14077
14078 switch (code)
14079 {
14080 case TARGET_EXPR:
14081 {
14082 tree temp = TARGET_EXPR_SLOT (t);
14083 t = TARGET_EXPR_INITIAL (t);
14084
14085 /* If the initializer is non-void, then it's a normal expression
14086 that will be assigned to the slot. */
14087 if (!VOID_TYPE_P (t))
14088 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14089
14090 /* Otherwise, the initializer sets the slot in some way. One common
14091 way is an assignment statement at the end of the initializer. */
14092 while (1)
14093 {
14094 if (TREE_CODE (t) == BIND_EXPR)
14095 t = expr_last (BIND_EXPR_BODY (t));
14096 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14097 || TREE_CODE (t) == TRY_CATCH_EXPR)
14098 t = expr_last (TREE_OPERAND (t, 0));
14099 else if (TREE_CODE (t) == STATEMENT_LIST)
14100 t = expr_last (t);
14101 else
14102 break;
14103 }
14104 if (TREE_CODE (t) == MODIFY_EXPR
14105 && TREE_OPERAND (t, 0) == temp)
14106 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14107 strict_overflow_p);
14108
14109 return false;
14110 }
14111
14112 case CALL_EXPR:
14113 {
14114 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14115 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14116
14117 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14118 get_callee_fndecl (t),
14119 arg0,
14120 arg1,
14121 strict_overflow_p);
14122 }
14123 case COMPOUND_EXPR:
14124 case MODIFY_EXPR:
14125 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14126 strict_overflow_p);
14127 case BIND_EXPR:
14128 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14129 strict_overflow_p);
14130 case SAVE_EXPR:
14131 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14132 strict_overflow_p);
14133
14134 default:
14135 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14136 TREE_TYPE (t));
14137 }
14138
14139 /* We don't know sign of `t', so be conservative and return false. */
14140 return false;
14141 }
14142
14143 /* Return true if T is known to be non-negative. If the return
14144 value is based on the assumption that signed overflow is undefined,
14145 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14146 *STRICT_OVERFLOW_P. */
14147
14148 bool
14149 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14150 {
14151 enum tree_code code;
14152 if (t == error_mark_node)
14153 return false;
14154
14155 code = TREE_CODE (t);
14156 switch (TREE_CODE_CLASS (code))
14157 {
14158 case tcc_binary:
14159 case tcc_comparison:
14160 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14161 TREE_TYPE (t),
14162 TREE_OPERAND (t, 0),
14163 TREE_OPERAND (t, 1),
14164 strict_overflow_p);
14165
14166 case tcc_unary:
14167 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14168 TREE_TYPE (t),
14169 TREE_OPERAND (t, 0),
14170 strict_overflow_p);
14171
14172 case tcc_constant:
14173 case tcc_declaration:
14174 case tcc_reference:
14175 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14176
14177 default:
14178 break;
14179 }
14180
14181 switch (code)
14182 {
14183 case TRUTH_AND_EXPR:
14184 case TRUTH_OR_EXPR:
14185 case TRUTH_XOR_EXPR:
14186 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14187 TREE_TYPE (t),
14188 TREE_OPERAND (t, 0),
14189 TREE_OPERAND (t, 1),
14190 strict_overflow_p);
14191 case TRUTH_NOT_EXPR:
14192 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14193 TREE_TYPE (t),
14194 TREE_OPERAND (t, 0),
14195 strict_overflow_p);
14196
14197 case COND_EXPR:
14198 case CONSTRUCTOR:
14199 case OBJ_TYPE_REF:
14200 case ASSERT_EXPR:
14201 case ADDR_EXPR:
14202 case WITH_SIZE_EXPR:
14203 case SSA_NAME:
14204 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14205
14206 default:
14207 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14208 }
14209 }
14210
14211 /* Return true if `t' is known to be non-negative. Handle warnings
14212 about undefined signed overflow. */
14213
14214 bool
14215 tree_expr_nonnegative_p (tree t)
14216 {
14217 bool ret, strict_overflow_p;
14218
14219 strict_overflow_p = false;
14220 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14221 if (strict_overflow_p)
14222 fold_overflow_warning (("assuming signed overflow does not occur when "
14223 "determining that expression is always "
14224 "non-negative"),
14225 WARN_STRICT_OVERFLOW_MISC);
14226 return ret;
14227 }
14228
14229
14230 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14231 For floating point we further ensure that T is not denormal.
14232 Similar logic is present in nonzero_address in rtlanal.h.
14233
14234 If the return value is based on the assumption that signed overflow
14235 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14236 change *STRICT_OVERFLOW_P. */
14237
14238 bool
14239 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14240 bool *strict_overflow_p)
14241 {
14242 switch (code)
14243 {
14244 case ABS_EXPR:
14245 return tree_expr_nonzero_warnv_p (op0,
14246 strict_overflow_p);
14247
14248 case NOP_EXPR:
14249 {
14250 tree inner_type = TREE_TYPE (op0);
14251 tree outer_type = type;
14252
14253 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14254 && tree_expr_nonzero_warnv_p (op0,
14255 strict_overflow_p));
14256 }
14257 break;
14258
14259 case NON_LVALUE_EXPR:
14260 return tree_expr_nonzero_warnv_p (op0,
14261 strict_overflow_p);
14262
14263 default:
14264 break;
14265 }
14266
14267 return false;
14268 }
14269
14270 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14271 For floating point we further ensure that T is not denormal.
14272 Similar logic is present in nonzero_address in rtlanal.h.
14273
14274 If the return value is based on the assumption that signed overflow
14275 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14276 change *STRICT_OVERFLOW_P. */
14277
14278 bool
14279 tree_binary_nonzero_warnv_p (enum tree_code code,
14280 tree type,
14281 tree op0,
14282 tree op1, bool *strict_overflow_p)
14283 {
14284 bool sub_strict_overflow_p;
14285 switch (code)
14286 {
14287 case POINTER_PLUS_EXPR:
14288 case PLUS_EXPR:
14289 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14290 {
14291 /* With the presence of negative values it is hard
14292 to say something. */
14293 sub_strict_overflow_p = false;
14294 if (!tree_expr_nonnegative_warnv_p (op0,
14295 &sub_strict_overflow_p)
14296 || !tree_expr_nonnegative_warnv_p (op1,
14297 &sub_strict_overflow_p))
14298 return false;
14299 /* One of operands must be positive and the other non-negative. */
14300 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14301 overflows, on a twos-complement machine the sum of two
14302 nonnegative numbers can never be zero. */
14303 return (tree_expr_nonzero_warnv_p (op0,
14304 strict_overflow_p)
14305 || tree_expr_nonzero_warnv_p (op1,
14306 strict_overflow_p));
14307 }
14308 break;
14309
14310 case MULT_EXPR:
14311 if (TYPE_OVERFLOW_UNDEFINED (type))
14312 {
14313 if (tree_expr_nonzero_warnv_p (op0,
14314 strict_overflow_p)
14315 && tree_expr_nonzero_warnv_p (op1,
14316 strict_overflow_p))
14317 {
14318 *strict_overflow_p = true;
14319 return true;
14320 }
14321 }
14322 break;
14323
14324 case MIN_EXPR:
14325 sub_strict_overflow_p = false;
14326 if (tree_expr_nonzero_warnv_p (op0,
14327 &sub_strict_overflow_p)
14328 && tree_expr_nonzero_warnv_p (op1,
14329 &sub_strict_overflow_p))
14330 {
14331 if (sub_strict_overflow_p)
14332 *strict_overflow_p = true;
14333 }
14334 break;
14335
14336 case MAX_EXPR:
14337 sub_strict_overflow_p = false;
14338 if (tree_expr_nonzero_warnv_p (op0,
14339 &sub_strict_overflow_p))
14340 {
14341 if (sub_strict_overflow_p)
14342 *strict_overflow_p = true;
14343
14344 /* When both operands are nonzero, then MAX must be too. */
14345 if (tree_expr_nonzero_warnv_p (op1,
14346 strict_overflow_p))
14347 return true;
14348
14349 /* MAX where operand 0 is positive is positive. */
14350 return tree_expr_nonnegative_warnv_p (op0,
14351 strict_overflow_p);
14352 }
14353 /* MAX where operand 1 is positive is positive. */
14354 else if (tree_expr_nonzero_warnv_p (op1,
14355 &sub_strict_overflow_p)
14356 && tree_expr_nonnegative_warnv_p (op1,
14357 &sub_strict_overflow_p))
14358 {
14359 if (sub_strict_overflow_p)
14360 *strict_overflow_p = true;
14361 return true;
14362 }
14363 break;
14364
14365 case BIT_IOR_EXPR:
14366 return (tree_expr_nonzero_warnv_p (op1,
14367 strict_overflow_p)
14368 || tree_expr_nonzero_warnv_p (op0,
14369 strict_overflow_p));
14370
14371 default:
14372 break;
14373 }
14374
14375 return false;
14376 }
14377
14378 /* Return true when T is an address and is known to be nonzero.
14379 For floating point we further ensure that T is not denormal.
14380 Similar logic is present in nonzero_address in rtlanal.h.
14381
14382 If the return value is based on the assumption that signed overflow
14383 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14384 change *STRICT_OVERFLOW_P. */
14385
14386 bool
14387 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14388 {
14389 bool sub_strict_overflow_p;
14390 switch (TREE_CODE (t))
14391 {
14392 case INTEGER_CST:
14393 return !integer_zerop (t);
14394
14395 case ADDR_EXPR:
14396 {
14397 tree base = TREE_OPERAND (t, 0);
14398
14399 if (!DECL_P (base))
14400 base = get_base_address (base);
14401
14402 if (!base)
14403 return false;
14404
14405 /* For objects in symbol table check if we know they are non-zero.
14406 Don't do anything for variables and functions before symtab is built;
14407 it is quite possible that they will be declared weak later. */
14408 if (DECL_P (base) && decl_in_symtab_p (base))
14409 {
14410 struct symtab_node *symbol;
14411
14412 symbol = symtab_node::get_create (base);
14413 if (symbol)
14414 return symbol->nonzero_address ();
14415 else
14416 return false;
14417 }
14418
14419 /* Function local objects are never NULL. */
14420 if (DECL_P (base)
14421 && (DECL_CONTEXT (base)
14422 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14423 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
14424 return true;
14425
14426 /* Constants are never weak. */
14427 if (CONSTANT_CLASS_P (base))
14428 return true;
14429
14430 return false;
14431 }
14432
14433 case COND_EXPR:
14434 sub_strict_overflow_p = false;
14435 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14436 &sub_strict_overflow_p)
14437 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14438 &sub_strict_overflow_p))
14439 {
14440 if (sub_strict_overflow_p)
14441 *strict_overflow_p = true;
14442 return true;
14443 }
14444 break;
14445
14446 default:
14447 break;
14448 }
14449 return false;
14450 }
14451
14452 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14453 attempt to fold the expression to a constant without modifying TYPE,
14454 OP0 or OP1.
14455
14456 If the expression could be simplified to a constant, then return
14457 the constant. If the expression would not be simplified to a
14458 constant, then return NULL_TREE. */
14459
14460 tree
14461 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14462 {
14463 tree tem = fold_binary (code, type, op0, op1);
14464 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14465 }
14466
14467 /* Given the components of a unary expression CODE, TYPE and OP0,
14468 attempt to fold the expression to a constant without modifying
14469 TYPE or OP0.
14470
14471 If the expression could be simplified to a constant, then return
14472 the constant. If the expression would not be simplified to a
14473 constant, then return NULL_TREE. */
14474
14475 tree
14476 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14477 {
14478 tree tem = fold_unary (code, type, op0);
14479 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14480 }
14481
14482 /* If EXP represents referencing an element in a constant string
14483 (either via pointer arithmetic or array indexing), return the
14484 tree representing the value accessed, otherwise return NULL. */
14485
14486 tree
14487 fold_read_from_constant_string (tree exp)
14488 {
14489 if ((TREE_CODE (exp) == INDIRECT_REF
14490 || TREE_CODE (exp) == ARRAY_REF)
14491 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14492 {
14493 tree exp1 = TREE_OPERAND (exp, 0);
14494 tree index;
14495 tree string;
14496 location_t loc = EXPR_LOCATION (exp);
14497
14498 if (TREE_CODE (exp) == INDIRECT_REF)
14499 string = string_constant (exp1, &index);
14500 else
14501 {
14502 tree low_bound = array_ref_low_bound (exp);
14503 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14504
14505 /* Optimize the special-case of a zero lower bound.
14506
14507 We convert the low_bound to sizetype to avoid some problems
14508 with constant folding. (E.g. suppose the lower bound is 1,
14509 and its mode is QI. Without the conversion,l (ARRAY
14510 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14511 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14512 if (! integer_zerop (low_bound))
14513 index = size_diffop_loc (loc, index,
14514 fold_convert_loc (loc, sizetype, low_bound));
14515
14516 string = exp1;
14517 }
14518
14519 if (string
14520 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14521 && TREE_CODE (string) == STRING_CST
14522 && TREE_CODE (index) == INTEGER_CST
14523 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14524 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14525 == MODE_INT)
14526 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14527 return build_int_cst_type (TREE_TYPE (exp),
14528 (TREE_STRING_POINTER (string)
14529 [TREE_INT_CST_LOW (index)]));
14530 }
14531 return NULL;
14532 }
14533
14534 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14535 an integer constant, real, or fixed-point constant.
14536
14537 TYPE is the type of the result. */
14538
14539 static tree
14540 fold_negate_const (tree arg0, tree type)
14541 {
14542 tree t = NULL_TREE;
14543
14544 switch (TREE_CODE (arg0))
14545 {
14546 case INTEGER_CST:
14547 {
14548 bool overflow;
14549 wide_int val = wi::neg (arg0, &overflow);
14550 t = force_fit_type (type, val, 1,
14551 (overflow | TREE_OVERFLOW (arg0))
14552 && !TYPE_UNSIGNED (type));
14553 break;
14554 }
14555
14556 case REAL_CST:
14557 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14558 break;
14559
14560 case FIXED_CST:
14561 {
14562 FIXED_VALUE_TYPE f;
14563 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14564 &(TREE_FIXED_CST (arg0)), NULL,
14565 TYPE_SATURATING (type));
14566 t = build_fixed (type, f);
14567 /* Propagate overflow flags. */
14568 if (overflow_p | TREE_OVERFLOW (arg0))
14569 TREE_OVERFLOW (t) = 1;
14570 break;
14571 }
14572
14573 default:
14574 gcc_unreachable ();
14575 }
14576
14577 return t;
14578 }
14579
14580 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14581 an integer constant or real constant.
14582
14583 TYPE is the type of the result. */
14584
14585 tree
14586 fold_abs_const (tree arg0, tree type)
14587 {
14588 tree t = NULL_TREE;
14589
14590 switch (TREE_CODE (arg0))
14591 {
14592 case INTEGER_CST:
14593 {
14594 /* If the value is unsigned or non-negative, then the absolute value
14595 is the same as the ordinary value. */
14596 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
14597 t = arg0;
14598
14599 /* If the value is negative, then the absolute value is
14600 its negation. */
14601 else
14602 {
14603 bool overflow;
14604 wide_int val = wi::neg (arg0, &overflow);
14605 t = force_fit_type (type, val, -1,
14606 overflow | TREE_OVERFLOW (arg0));
14607 }
14608 }
14609 break;
14610
14611 case REAL_CST:
14612 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14613 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14614 else
14615 t = arg0;
14616 break;
14617
14618 default:
14619 gcc_unreachable ();
14620 }
14621
14622 return t;
14623 }
14624
14625 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14626 constant. TYPE is the type of the result. */
14627
14628 static tree
14629 fold_not_const (const_tree arg0, tree type)
14630 {
14631 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14632
14633 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
14634 }
14635
14636 /* Given CODE, a relational operator, the target type, TYPE and two
14637 constant operands OP0 and OP1, return the result of the
14638 relational operation. If the result is not a compile time
14639 constant, then return NULL_TREE. */
14640
14641 static tree
14642 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14643 {
14644 int result, invert;
14645
14646 /* From here on, the only cases we handle are when the result is
14647 known to be a constant. */
14648
14649 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14650 {
14651 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14652 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14653
14654 /* Handle the cases where either operand is a NaN. */
14655 if (real_isnan (c0) || real_isnan (c1))
14656 {
14657 switch (code)
14658 {
14659 case EQ_EXPR:
14660 case ORDERED_EXPR:
14661 result = 0;
14662 break;
14663
14664 case NE_EXPR:
14665 case UNORDERED_EXPR:
14666 case UNLT_EXPR:
14667 case UNLE_EXPR:
14668 case UNGT_EXPR:
14669 case UNGE_EXPR:
14670 case UNEQ_EXPR:
14671 result = 1;
14672 break;
14673
14674 case LT_EXPR:
14675 case LE_EXPR:
14676 case GT_EXPR:
14677 case GE_EXPR:
14678 case LTGT_EXPR:
14679 if (flag_trapping_math)
14680 return NULL_TREE;
14681 result = 0;
14682 break;
14683
14684 default:
14685 gcc_unreachable ();
14686 }
14687
14688 return constant_boolean_node (result, type);
14689 }
14690
14691 return constant_boolean_node (real_compare (code, c0, c1), type);
14692 }
14693
14694 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14695 {
14696 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14697 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14698 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14699 }
14700
14701 /* Handle equality/inequality of complex constants. */
14702 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14703 {
14704 tree rcond = fold_relational_const (code, type,
14705 TREE_REALPART (op0),
14706 TREE_REALPART (op1));
14707 tree icond = fold_relational_const (code, type,
14708 TREE_IMAGPART (op0),
14709 TREE_IMAGPART (op1));
14710 if (code == EQ_EXPR)
14711 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14712 else if (code == NE_EXPR)
14713 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14714 else
14715 return NULL_TREE;
14716 }
14717
14718 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14719 {
14720 unsigned count = VECTOR_CST_NELTS (op0);
14721 tree *elts = XALLOCAVEC (tree, count);
14722 gcc_assert (VECTOR_CST_NELTS (op1) == count
14723 && TYPE_VECTOR_SUBPARTS (type) == count);
14724
14725 for (unsigned i = 0; i < count; i++)
14726 {
14727 tree elem_type = TREE_TYPE (type);
14728 tree elem0 = VECTOR_CST_ELT (op0, i);
14729 tree elem1 = VECTOR_CST_ELT (op1, i);
14730
14731 tree tem = fold_relational_const (code, elem_type,
14732 elem0, elem1);
14733
14734 if (tem == NULL_TREE)
14735 return NULL_TREE;
14736
14737 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14738 }
14739
14740 return build_vector (type, elts);
14741 }
14742
14743 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14744
14745 To compute GT, swap the arguments and do LT.
14746 To compute GE, do LT and invert the result.
14747 To compute LE, swap the arguments, do LT and invert the result.
14748 To compute NE, do EQ and invert the result.
14749
14750 Therefore, the code below must handle only EQ and LT. */
14751
14752 if (code == LE_EXPR || code == GT_EXPR)
14753 {
14754 std::swap (op0, op1);
14755 code = swap_tree_comparison (code);
14756 }
14757
14758 /* Note that it is safe to invert for real values here because we
14759 have already handled the one case that it matters. */
14760
14761 invert = 0;
14762 if (code == NE_EXPR || code == GE_EXPR)
14763 {
14764 invert = 1;
14765 code = invert_tree_comparison (code, false);
14766 }
14767
14768 /* Compute a result for LT or EQ if args permit;
14769 Otherwise return T. */
14770 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14771 {
14772 if (code == EQ_EXPR)
14773 result = tree_int_cst_equal (op0, op1);
14774 else
14775 result = tree_int_cst_lt (op0, op1);
14776 }
14777 else
14778 return NULL_TREE;
14779
14780 if (invert)
14781 result ^= 1;
14782 return constant_boolean_node (result, type);
14783 }
14784
14785 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14786 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14787 itself. */
14788
14789 tree
14790 fold_build_cleanup_point_expr (tree type, tree expr)
14791 {
14792 /* If the expression does not have side effects then we don't have to wrap
14793 it with a cleanup point expression. */
14794 if (!TREE_SIDE_EFFECTS (expr))
14795 return expr;
14796
14797 /* If the expression is a return, check to see if the expression inside the
14798 return has no side effects or the right hand side of the modify expression
14799 inside the return. If either don't have side effects set we don't need to
14800 wrap the expression in a cleanup point expression. Note we don't check the
14801 left hand side of the modify because it should always be a return decl. */
14802 if (TREE_CODE (expr) == RETURN_EXPR)
14803 {
14804 tree op = TREE_OPERAND (expr, 0);
14805 if (!op || !TREE_SIDE_EFFECTS (op))
14806 return expr;
14807 op = TREE_OPERAND (op, 1);
14808 if (!TREE_SIDE_EFFECTS (op))
14809 return expr;
14810 }
14811
14812 return build1 (CLEANUP_POINT_EXPR, type, expr);
14813 }
14814
14815 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14816 of an indirection through OP0, or NULL_TREE if no simplification is
14817 possible. */
14818
14819 tree
14820 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14821 {
14822 tree sub = op0;
14823 tree subtype;
14824
14825 STRIP_NOPS (sub);
14826 subtype = TREE_TYPE (sub);
14827 if (!POINTER_TYPE_P (subtype))
14828 return NULL_TREE;
14829
14830 if (TREE_CODE (sub) == ADDR_EXPR)
14831 {
14832 tree op = TREE_OPERAND (sub, 0);
14833 tree optype = TREE_TYPE (op);
14834 /* *&CONST_DECL -> to the value of the const decl. */
14835 if (TREE_CODE (op) == CONST_DECL)
14836 return DECL_INITIAL (op);
14837 /* *&p => p; make sure to handle *&"str"[cst] here. */
14838 if (type == optype)
14839 {
14840 tree fop = fold_read_from_constant_string (op);
14841 if (fop)
14842 return fop;
14843 else
14844 return op;
14845 }
14846 /* *(foo *)&fooarray => fooarray[0] */
14847 else if (TREE_CODE (optype) == ARRAY_TYPE
14848 && type == TREE_TYPE (optype)
14849 && (!in_gimple_form
14850 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14851 {
14852 tree type_domain = TYPE_DOMAIN (optype);
14853 tree min_val = size_zero_node;
14854 if (type_domain && TYPE_MIN_VALUE (type_domain))
14855 min_val = TYPE_MIN_VALUE (type_domain);
14856 if (in_gimple_form
14857 && TREE_CODE (min_val) != INTEGER_CST)
14858 return NULL_TREE;
14859 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14860 NULL_TREE, NULL_TREE);
14861 }
14862 /* *(foo *)&complexfoo => __real__ complexfoo */
14863 else if (TREE_CODE (optype) == COMPLEX_TYPE
14864 && type == TREE_TYPE (optype))
14865 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14866 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14867 else if (TREE_CODE (optype) == VECTOR_TYPE
14868 && type == TREE_TYPE (optype))
14869 {
14870 tree part_width = TYPE_SIZE (type);
14871 tree index = bitsize_int (0);
14872 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14873 }
14874 }
14875
14876 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14877 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14878 {
14879 tree op00 = TREE_OPERAND (sub, 0);
14880 tree op01 = TREE_OPERAND (sub, 1);
14881
14882 STRIP_NOPS (op00);
14883 if (TREE_CODE (op00) == ADDR_EXPR)
14884 {
14885 tree op00type;
14886 op00 = TREE_OPERAND (op00, 0);
14887 op00type = TREE_TYPE (op00);
14888
14889 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14890 if (TREE_CODE (op00type) == VECTOR_TYPE
14891 && type == TREE_TYPE (op00type))
14892 {
14893 HOST_WIDE_INT offset = tree_to_shwi (op01);
14894 tree part_width = TYPE_SIZE (type);
14895 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14896 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14897 tree index = bitsize_int (indexi);
14898
14899 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14900 return fold_build3_loc (loc,
14901 BIT_FIELD_REF, type, op00,
14902 part_width, index);
14903
14904 }
14905 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14906 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14907 && type == TREE_TYPE (op00type))
14908 {
14909 tree size = TYPE_SIZE_UNIT (type);
14910 if (tree_int_cst_equal (size, op01))
14911 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14912 }
14913 /* ((foo *)&fooarray)[1] => fooarray[1] */
14914 else if (TREE_CODE (op00type) == ARRAY_TYPE
14915 && type == TREE_TYPE (op00type))
14916 {
14917 tree type_domain = TYPE_DOMAIN (op00type);
14918 tree min_val = size_zero_node;
14919 if (type_domain && TYPE_MIN_VALUE (type_domain))
14920 min_val = TYPE_MIN_VALUE (type_domain);
14921 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14922 TYPE_SIZE_UNIT (type));
14923 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14924 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14925 NULL_TREE, NULL_TREE);
14926 }
14927 }
14928 }
14929
14930 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14931 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14932 && type == TREE_TYPE (TREE_TYPE (subtype))
14933 && (!in_gimple_form
14934 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14935 {
14936 tree type_domain;
14937 tree min_val = size_zero_node;
14938 sub = build_fold_indirect_ref_loc (loc, sub);
14939 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14940 if (type_domain && TYPE_MIN_VALUE (type_domain))
14941 min_val = TYPE_MIN_VALUE (type_domain);
14942 if (in_gimple_form
14943 && TREE_CODE (min_val) != INTEGER_CST)
14944 return NULL_TREE;
14945 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14946 NULL_TREE);
14947 }
14948
14949 return NULL_TREE;
14950 }
14951
14952 /* Builds an expression for an indirection through T, simplifying some
14953 cases. */
14954
14955 tree
14956 build_fold_indirect_ref_loc (location_t loc, tree t)
14957 {
14958 tree type = TREE_TYPE (TREE_TYPE (t));
14959 tree sub = fold_indirect_ref_1 (loc, type, t);
14960
14961 if (sub)
14962 return sub;
14963
14964 return build1_loc (loc, INDIRECT_REF, type, t);
14965 }
14966
14967 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14968
14969 tree
14970 fold_indirect_ref_loc (location_t loc, tree t)
14971 {
14972 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14973
14974 if (sub)
14975 return sub;
14976 else
14977 return t;
14978 }
14979
14980 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14981 whose result is ignored. The type of the returned tree need not be
14982 the same as the original expression. */
14983
14984 tree
14985 fold_ignored_result (tree t)
14986 {
14987 if (!TREE_SIDE_EFFECTS (t))
14988 return integer_zero_node;
14989
14990 for (;;)
14991 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14992 {
14993 case tcc_unary:
14994 t = TREE_OPERAND (t, 0);
14995 break;
14996
14997 case tcc_binary:
14998 case tcc_comparison:
14999 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15000 t = TREE_OPERAND (t, 0);
15001 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15002 t = TREE_OPERAND (t, 1);
15003 else
15004 return t;
15005 break;
15006
15007 case tcc_expression:
15008 switch (TREE_CODE (t))
15009 {
15010 case COMPOUND_EXPR:
15011 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15012 return t;
15013 t = TREE_OPERAND (t, 0);
15014 break;
15015
15016 case COND_EXPR:
15017 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15018 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15019 return t;
15020 t = TREE_OPERAND (t, 0);
15021 break;
15022
15023 default:
15024 return t;
15025 }
15026 break;
15027
15028 default:
15029 return t;
15030 }
15031 }
15032
15033 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15034
15035 tree
15036 round_up_loc (location_t loc, tree value, unsigned int divisor)
15037 {
15038 tree div = NULL_TREE;
15039
15040 if (divisor == 1)
15041 return value;
15042
15043 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15044 have to do anything. Only do this when we are not given a const,
15045 because in that case, this check is more expensive than just
15046 doing it. */
15047 if (TREE_CODE (value) != INTEGER_CST)
15048 {
15049 div = build_int_cst (TREE_TYPE (value), divisor);
15050
15051 if (multiple_of_p (TREE_TYPE (value), value, div))
15052 return value;
15053 }
15054
15055 /* If divisor is a power of two, simplify this to bit manipulation. */
15056 if (divisor == (divisor & -divisor))
15057 {
15058 if (TREE_CODE (value) == INTEGER_CST)
15059 {
15060 wide_int val = value;
15061 bool overflow_p;
15062
15063 if ((val & (divisor - 1)) == 0)
15064 return value;
15065
15066 overflow_p = TREE_OVERFLOW (value);
15067 val += divisor - 1;
15068 val &= - (int) divisor;
15069 if (val == 0)
15070 overflow_p = true;
15071
15072 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15073 }
15074 else
15075 {
15076 tree t;
15077
15078 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15079 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15080 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15081 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15082 }
15083 }
15084 else
15085 {
15086 if (!div)
15087 div = build_int_cst (TREE_TYPE (value), divisor);
15088 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15089 value = size_binop_loc (loc, MULT_EXPR, value, div);
15090 }
15091
15092 return value;
15093 }
15094
15095 /* Likewise, but round down. */
15096
15097 tree
15098 round_down_loc (location_t loc, tree value, int divisor)
15099 {
15100 tree div = NULL_TREE;
15101
15102 gcc_assert (divisor > 0);
15103 if (divisor == 1)
15104 return value;
15105
15106 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15107 have to do anything. Only do this when we are not given a const,
15108 because in that case, this check is more expensive than just
15109 doing it. */
15110 if (TREE_CODE (value) != INTEGER_CST)
15111 {
15112 div = build_int_cst (TREE_TYPE (value), divisor);
15113
15114 if (multiple_of_p (TREE_TYPE (value), value, div))
15115 return value;
15116 }
15117
15118 /* If divisor is a power of two, simplify this to bit manipulation. */
15119 if (divisor == (divisor & -divisor))
15120 {
15121 tree t;
15122
15123 t = build_int_cst (TREE_TYPE (value), -divisor);
15124 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15125 }
15126 else
15127 {
15128 if (!div)
15129 div = build_int_cst (TREE_TYPE (value), divisor);
15130 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15131 value = size_binop_loc (loc, MULT_EXPR, value, div);
15132 }
15133
15134 return value;
15135 }
15136
15137 /* Returns the pointer to the base of the object addressed by EXP and
15138 extracts the information about the offset of the access, storing it
15139 to PBITPOS and POFFSET. */
15140
15141 static tree
15142 split_address_to_core_and_offset (tree exp,
15143 HOST_WIDE_INT *pbitpos, tree *poffset)
15144 {
15145 tree core;
15146 machine_mode mode;
15147 int unsignedp, volatilep;
15148 HOST_WIDE_INT bitsize;
15149 location_t loc = EXPR_LOCATION (exp);
15150
15151 if (TREE_CODE (exp) == ADDR_EXPR)
15152 {
15153 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15154 poffset, &mode, &unsignedp, &volatilep,
15155 false);
15156 core = build_fold_addr_expr_loc (loc, core);
15157 }
15158 else
15159 {
15160 core = exp;
15161 *pbitpos = 0;
15162 *poffset = NULL_TREE;
15163 }
15164
15165 return core;
15166 }
15167
15168 /* Returns true if addresses of E1 and E2 differ by a constant, false
15169 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15170
15171 bool
15172 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15173 {
15174 tree core1, core2;
15175 HOST_WIDE_INT bitpos1, bitpos2;
15176 tree toffset1, toffset2, tdiff, type;
15177
15178 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15179 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15180
15181 if (bitpos1 % BITS_PER_UNIT != 0
15182 || bitpos2 % BITS_PER_UNIT != 0
15183 || !operand_equal_p (core1, core2, 0))
15184 return false;
15185
15186 if (toffset1 && toffset2)
15187 {
15188 type = TREE_TYPE (toffset1);
15189 if (type != TREE_TYPE (toffset2))
15190 toffset2 = fold_convert (type, toffset2);
15191
15192 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15193 if (!cst_and_fits_in_hwi (tdiff))
15194 return false;
15195
15196 *diff = int_cst_value (tdiff);
15197 }
15198 else if (toffset1 || toffset2)
15199 {
15200 /* If only one of the offsets is non-constant, the difference cannot
15201 be a constant. */
15202 return false;
15203 }
15204 else
15205 *diff = 0;
15206
15207 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15208 return true;
15209 }
15210
15211 /* Simplify the floating point expression EXP when the sign of the
15212 result is not significant. Return NULL_TREE if no simplification
15213 is possible. */
15214
15215 tree
15216 fold_strip_sign_ops (tree exp)
15217 {
15218 tree arg0, arg1;
15219 location_t loc = EXPR_LOCATION (exp);
15220
15221 switch (TREE_CODE (exp))
15222 {
15223 case ABS_EXPR:
15224 case NEGATE_EXPR:
15225 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15226 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15227
15228 case MULT_EXPR:
15229 case RDIV_EXPR:
15230 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
15231 return NULL_TREE;
15232 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15233 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15234 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15235 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15236 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15237 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15238 break;
15239
15240 case COMPOUND_EXPR:
15241 arg0 = TREE_OPERAND (exp, 0);
15242 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15243 if (arg1)
15244 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15245 break;
15246
15247 case COND_EXPR:
15248 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15249 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15250 if (arg0 || arg1)
15251 return fold_build3_loc (loc,
15252 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15253 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15254 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15255 break;
15256
15257 case CALL_EXPR:
15258 {
15259 const enum built_in_function fcode = builtin_mathfn_code (exp);
15260 switch (fcode)
15261 {
15262 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15263 /* Strip copysign function call, return the 1st argument. */
15264 arg0 = CALL_EXPR_ARG (exp, 0);
15265 arg1 = CALL_EXPR_ARG (exp, 1);
15266 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15267
15268 default:
15269 /* Strip sign ops from the argument of "odd" math functions. */
15270 if (negate_mathfn_p (fcode))
15271 {
15272 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15273 if (arg0)
15274 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
15275 }
15276 break;
15277 }
15278 }
15279 break;
15280
15281 default:
15282 break;
15283 }
15284 return NULL_TREE;
15285 }
15286
15287 /* Return OFF converted to a pointer offset type suitable as offset for
15288 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15289 tree
15290 convert_to_ptrofftype_loc (location_t loc, tree off)
15291 {
15292 return fold_convert_loc (loc, sizetype, off);
15293 }
15294
15295 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15296 tree
15297 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15298 {
15299 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15300 ptr, convert_to_ptrofftype_loc (loc, off));
15301 }
15302
15303 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15304 tree
15305 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15306 {
15307 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15308 ptr, size_int (off));
15309 }