* jvspec.c (jvgenmain_spec): Don't handle -fnew-verifier.
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33
34
35 /* If SYM is a constant variable with known value, return the value.
36 NULL_TREE is returned otherwise. */
37
38 tree
39 get_symbol_constant_value (tree sym)
40 {
41 if (TREE_STATIC (sym)
42 && (TREE_READONLY (sym)
43 || TREE_CODE (sym) == CONST_DECL))
44 {
45 tree val = DECL_INITIAL (sym);
46 if (val)
47 {
48 STRIP_NOPS (val);
49 if (is_gimple_min_invariant (val))
50 {
51 if (TREE_CODE (val) == ADDR_EXPR)
52 {
53 tree base = get_base_address (TREE_OPERAND (val, 0));
54 if (base && TREE_CODE (base) == VAR_DECL)
55 {
56 TREE_ADDRESSABLE (base) = 1;
57 if (gimple_referenced_vars (cfun))
58 add_referenced_var (base);
59 }
60 }
61 return val;
62 }
63 }
64 /* Variables declared 'const' without an initializer
65 have zero as the initializer if they may not be
66 overridden at link or run time. */
67 if (!val
68 && !DECL_EXTERNAL (sym)
69 && targetm.binds_local_p (sym)
70 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
71 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
72 return fold_convert (TREE_TYPE (sym), integer_zero_node);
73 }
74
75 return NULL_TREE;
76 }
77
78
79 /* Return true if we may propagate the address expression ADDR into the
80 dereference DEREF and cancel them. */
81
82 bool
83 may_propagate_address_into_dereference (tree addr, tree deref)
84 {
85 gcc_assert (TREE_CODE (deref) == MEM_REF
86 && TREE_CODE (addr) == ADDR_EXPR);
87
88 /* Don't propagate if ADDR's operand has incomplete type. */
89 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
90 return false;
91
92 /* If the address is invariant then we do not need to preserve restrict
93 qualifications. But we do need to preserve volatile qualifiers until
94 we can annotate the folded dereference itself properly. */
95 if (is_gimple_min_invariant (addr)
96 && (!TREE_THIS_VOLATILE (deref)
97 || TYPE_VOLATILE (TREE_TYPE (addr))))
98 return useless_type_conversion_p (TREE_TYPE (deref),
99 TREE_TYPE (TREE_OPERAND (addr, 0)));
100
101 /* Else both the address substitution and the folding must result in
102 a valid useless type conversion sequence. */
103 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
104 TREE_TYPE (addr))
105 && useless_type_conversion_p (TREE_TYPE (deref),
106 TREE_TYPE (TREE_OPERAND (addr, 0))));
107 }
108
109
110 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
111 BASE is an array type. OFFSET is a byte displacement.
112
113 LOC is the location of the original expression. */
114
115 static tree
116 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
117 {
118 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
119 tree array_type, elt_type, elt_size;
120 tree domain_type;
121
122 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
123 measured in units of the size of elements type) from that ARRAY_REF).
124 We can't do anything if either is variable.
125
126 The case we handle here is *(&A[N]+O). */
127 if (TREE_CODE (base) == ARRAY_REF)
128 {
129 tree low_bound = array_ref_low_bound (base);
130
131 elt_offset = TREE_OPERAND (base, 1);
132 if (TREE_CODE (low_bound) != INTEGER_CST
133 || TREE_CODE (elt_offset) != INTEGER_CST)
134 return NULL_TREE;
135
136 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
137 base = TREE_OPERAND (base, 0);
138 }
139
140 /* Ignore stupid user tricks of indexing non-array variables. */
141 array_type = TREE_TYPE (base);
142 if (TREE_CODE (array_type) != ARRAY_TYPE)
143 return NULL_TREE;
144 elt_type = TREE_TYPE (array_type);
145
146 /* Use signed size type for intermediate computation on the index. */
147 idx_type = ssizetype;
148
149 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
150 element type (so we can use the alignment if it's not constant).
151 Otherwise, compute the offset as an index by using a division. If the
152 division isn't exact, then don't do anything. */
153 elt_size = TYPE_SIZE_UNIT (elt_type);
154 if (!elt_size)
155 return NULL;
156 if (integer_zerop (offset))
157 {
158 if (TREE_CODE (elt_size) != INTEGER_CST)
159 elt_size = size_int (TYPE_ALIGN (elt_type));
160
161 idx = build_int_cst (idx_type, 0);
162 }
163 else
164 {
165 unsigned HOST_WIDE_INT lquo, lrem;
166 HOST_WIDE_INT hquo, hrem;
167 double_int soffset;
168
169 /* The final array offset should be signed, so we need
170 to sign-extend the (possibly pointer) offset here
171 and use signed division. */
172 soffset = double_int_sext (tree_to_double_int (offset),
173 TYPE_PRECISION (TREE_TYPE (offset)));
174 if (TREE_CODE (elt_size) != INTEGER_CST
175 || div_and_round_double (TRUNC_DIV_EXPR, 0,
176 soffset.low, soffset.high,
177 TREE_INT_CST_LOW (elt_size),
178 TREE_INT_CST_HIGH (elt_size),
179 &lquo, &hquo, &lrem, &hrem)
180 || lrem || hrem)
181 return NULL_TREE;
182
183 idx = build_int_cst_wide (idx_type, lquo, hquo);
184 }
185
186 /* Assume the low bound is zero. If there is a domain type, get the
187 low bound, if any, convert the index into that type, and add the
188 low bound. */
189 min_idx = build_int_cst (idx_type, 0);
190 domain_type = TYPE_DOMAIN (array_type);
191 if (domain_type)
192 {
193 idx_type = domain_type;
194 if (TYPE_MIN_VALUE (idx_type))
195 min_idx = TYPE_MIN_VALUE (idx_type);
196 else
197 min_idx = fold_convert (idx_type, min_idx);
198
199 if (TREE_CODE (min_idx) != INTEGER_CST)
200 return NULL_TREE;
201
202 elt_offset = fold_convert (idx_type, elt_offset);
203 }
204
205 if (!integer_zerop (min_idx))
206 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
207 if (!integer_zerop (elt_offset))
208 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
209
210 /* Make sure to possibly truncate late after offsetting. */
211 idx = fold_convert (idx_type, idx);
212
213 /* We don't want to construct access past array bounds. For example
214 char *(c[4]);
215 c[3][2];
216 should not be simplified into (*c)[14] or tree-vrp will
217 give false warnings.
218 This is only an issue for multi-dimensional arrays. */
219 if (TREE_CODE (elt_type) == ARRAY_TYPE
220 && domain_type)
221 {
222 if (TYPE_MAX_VALUE (domain_type)
223 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
224 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
225 return NULL_TREE;
226 else if (TYPE_MIN_VALUE (domain_type)
227 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
228 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
229 return NULL_TREE;
230 else if (compare_tree_int (idx, 0) < 0)
231 return NULL_TREE;
232 }
233
234 {
235 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
236 SET_EXPR_LOCATION (t, loc);
237 return t;
238 }
239 }
240
241
242 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
243 LOC is the location of original expression.
244
245 Before attempting the conversion strip off existing ADDR_EXPRs. */
246
247 tree
248 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
249 tree orig_type)
250 {
251 tree ret;
252
253 STRIP_NOPS (base);
254 if (TREE_CODE (base) != ADDR_EXPR)
255 return NULL_TREE;
256
257 base = TREE_OPERAND (base, 0);
258 if (types_compatible_p (orig_type, TREE_TYPE (base))
259 && integer_zerop (offset))
260 return base;
261
262 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
263 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
264 return ret;
265 return NULL_TREE;
266 }
267
268 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
269 LOC is the location of the original expression. */
270
271 tree
272 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
273 tree orig_type)
274 {
275 tree base, ret;
276
277 STRIP_NOPS (addr);
278 if (TREE_CODE (addr) != ADDR_EXPR)
279 return NULL_TREE;
280 base = TREE_OPERAND (addr, 0);
281 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
282 if (ret)
283 {
284 ret = build_fold_addr_expr (ret);
285 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
286 return NULL_TREE;
287 SET_EXPR_LOCATION (ret, loc);
288 }
289
290 return ret;
291 }
292
293
294 /* A quaint feature extant in our address arithmetic is that there
295 can be hidden type changes here. The type of the result need
296 not be the same as the type of the input pointer.
297
298 What we're after here is an expression of the form
299 (T *)(&array + const)
300 where array is OP0, const is OP1, RES_TYPE is T and
301 the cast doesn't actually exist, but is implicit in the
302 type of the POINTER_PLUS_EXPR. We'd like to turn this into
303 &array[x]
304 which may be able to propagate further. */
305
306 tree
307 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
308 {
309 tree ptd_type;
310 tree t;
311
312 /* The first operand should be an ADDR_EXPR. */
313 if (TREE_CODE (op0) != ADDR_EXPR)
314 return NULL_TREE;
315 op0 = TREE_OPERAND (op0, 0);
316
317 /* It had better be a constant. */
318 if (TREE_CODE (op1) != INTEGER_CST)
319 {
320 /* Or op0 should now be A[0] and the non-constant offset defined
321 via a multiplication by the array element size. */
322 if (TREE_CODE (op0) == ARRAY_REF
323 /* As we will end up creating a variable index array access
324 in the outermost array dimension make sure there isn't
325 a more inner array that the index could overflow to. */
326 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
327 && integer_zerop (TREE_OPERAND (op0, 1))
328 && TREE_CODE (op1) == SSA_NAME)
329 {
330 gimple offset_def = SSA_NAME_DEF_STMT (op1);
331 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
332 if (!host_integerp (elsz, 1)
333 || !is_gimple_assign (offset_def))
334 return NULL_TREE;
335
336 /* Do not build array references of something that we can't
337 see the true number of array dimensions for. */
338 if (!DECL_P (TREE_OPERAND (op0, 0))
339 && !handled_component_p (TREE_OPERAND (op0, 0)))
340 return NULL_TREE;
341
342 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
343 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
344 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
345 return build_fold_addr_expr
346 (build4 (ARRAY_REF, TREE_TYPE (op0),
347 TREE_OPERAND (op0, 0),
348 gimple_assign_rhs1 (offset_def),
349 TREE_OPERAND (op0, 2),
350 TREE_OPERAND (op0, 3)));
351 else if (integer_onep (elsz)
352 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
353 return build_fold_addr_expr
354 (build4 (ARRAY_REF, TREE_TYPE (op0),
355 TREE_OPERAND (op0, 0),
356 op1,
357 TREE_OPERAND (op0, 2),
358 TREE_OPERAND (op0, 3)));
359 }
360 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
361 /* Dto. */
362 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
363 && TREE_CODE (op1) == SSA_NAME)
364 {
365 gimple offset_def = SSA_NAME_DEF_STMT (op1);
366 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
367 if (!host_integerp (elsz, 1)
368 || !is_gimple_assign (offset_def))
369 return NULL_TREE;
370
371 /* Do not build array references of something that we can't
372 see the true number of array dimensions for. */
373 if (!DECL_P (op0)
374 && !handled_component_p (op0))
375 return NULL_TREE;
376
377 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
378 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
379 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
380 return build_fold_addr_expr
381 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
382 op0, gimple_assign_rhs1 (offset_def),
383 integer_zero_node, NULL_TREE));
384 else if (integer_onep (elsz)
385 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
386 return build_fold_addr_expr
387 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
388 op0, op1,
389 integer_zero_node, NULL_TREE));
390 }
391
392 return NULL_TREE;
393 }
394
395 /* If the first operand is an ARRAY_REF, expand it so that we can fold
396 the offset into it. */
397 while (TREE_CODE (op0) == ARRAY_REF)
398 {
399 tree array_obj = TREE_OPERAND (op0, 0);
400 tree array_idx = TREE_OPERAND (op0, 1);
401 tree elt_type = TREE_TYPE (op0);
402 tree elt_size = TYPE_SIZE_UNIT (elt_type);
403 tree min_idx;
404
405 if (TREE_CODE (array_idx) != INTEGER_CST)
406 break;
407 if (TREE_CODE (elt_size) != INTEGER_CST)
408 break;
409
410 /* Un-bias the index by the min index of the array type. */
411 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
412 if (min_idx)
413 {
414 min_idx = TYPE_MIN_VALUE (min_idx);
415 if (min_idx)
416 {
417 if (TREE_CODE (min_idx) != INTEGER_CST)
418 break;
419
420 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
421 if (!integer_zerop (min_idx))
422 array_idx = int_const_binop (MINUS_EXPR, array_idx,
423 min_idx, 0);
424 }
425 }
426
427 /* Convert the index to a byte offset. */
428 array_idx = fold_convert (sizetype, array_idx);
429 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
430
431 /* Update the operands for the next round, or for folding. */
432 op1 = int_const_binop (PLUS_EXPR,
433 array_idx, op1, 0);
434 op0 = array_obj;
435 }
436
437 ptd_type = TREE_TYPE (res_type);
438 /* If we want a pointer to void, reconstruct the reference from the
439 array element type. A pointer to that can be trivially converted
440 to void *. This happens as we fold (void *)(ptr p+ off). */
441 if (VOID_TYPE_P (ptd_type)
442 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
443 ptd_type = TREE_TYPE (TREE_TYPE (op0));
444
445 /* At which point we can try some of the same things as for indirects. */
446 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
447 if (t)
448 {
449 t = build_fold_addr_expr (t);
450 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
451 return NULL_TREE;
452 SET_EXPR_LOCATION (t, loc);
453 }
454
455 return t;
456 }
457
458 /* Subroutine of fold_stmt. We perform several simplifications of the
459 memory reference tree EXPR and make sure to re-gimplify them properly
460 after propagation of constant addresses. IS_LHS is true if the
461 reference is supposed to be an lvalue. */
462
463 static tree
464 maybe_fold_reference (tree expr, bool is_lhs)
465 {
466 tree *t = &expr;
467
468 if (TREE_CODE (expr) == ARRAY_REF
469 && !is_lhs)
470 {
471 tree tem = fold_read_from_constant_string (expr);
472 if (tem)
473 return tem;
474 }
475
476 /* ??? We might want to open-code the relevant remaining cases
477 to avoid using the generic fold. */
478 if (handled_component_p (*t)
479 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
480 {
481 tree tem = fold (*t);
482 if (tem != *t)
483 return tem;
484 }
485
486 while (handled_component_p (*t))
487 t = &TREE_OPERAND (*t, 0);
488
489 /* Fold back MEM_REFs to reference trees. */
490 if (TREE_CODE (*t) == MEM_REF
491 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
492 && integer_zerop (TREE_OPERAND (*t, 1))
493 && (TREE_THIS_VOLATILE (*t)
494 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
495 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
496 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
497 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
498 /* We have to look out here to not drop a required conversion
499 from the rhs to the lhs if is_lhs, but we don't have the
500 rhs here to verify that. Thus require strict type
501 compatibility. */
502 && types_compatible_p (TREE_TYPE (*t),
503 TREE_TYPE (TREE_OPERAND
504 (TREE_OPERAND (*t, 0), 0))))
505 {
506 tree tem;
507 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
508 tem = maybe_fold_reference (expr, is_lhs);
509 if (tem)
510 return tem;
511 return expr;
512 }
513 /* Canonicalize MEM_REFs invariant address operand. */
514 else if (TREE_CODE (*t) == MEM_REF
515 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
516 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
517 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
518 {
519 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
520 TREE_OPERAND (*t, 0),
521 TREE_OPERAND (*t, 1));
522 if (tem)
523 {
524 *t = tem;
525 tem = maybe_fold_reference (expr, is_lhs);
526 if (tem)
527 return tem;
528 return expr;
529 }
530 }
531 else if (!is_lhs
532 && DECL_P (*t))
533 {
534 tree tem = get_symbol_constant_value (*t);
535 if (tem
536 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
537 {
538 *t = unshare_expr (tem);
539 tem = maybe_fold_reference (expr, is_lhs);
540 if (tem)
541 return tem;
542 return expr;
543 }
544 }
545
546 return NULL_TREE;
547 }
548
549
550 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
551 replacement rhs for the statement or NULL_TREE if no simplification
552 could be made. It is assumed that the operands have been previously
553 folded. */
554
555 static tree
556 fold_gimple_assign (gimple_stmt_iterator *si)
557 {
558 gimple stmt = gsi_stmt (*si);
559 enum tree_code subcode = gimple_assign_rhs_code (stmt);
560 location_t loc = gimple_location (stmt);
561
562 tree result = NULL_TREE;
563
564 switch (get_gimple_rhs_class (subcode))
565 {
566 case GIMPLE_SINGLE_RHS:
567 {
568 tree rhs = gimple_assign_rhs1 (stmt);
569
570 /* Try to fold a conditional expression. */
571 if (TREE_CODE (rhs) == COND_EXPR)
572 {
573 tree op0 = COND_EXPR_COND (rhs);
574 tree tem;
575 bool set = false;
576 location_t cond_loc = EXPR_LOCATION (rhs);
577
578 if (COMPARISON_CLASS_P (op0))
579 {
580 fold_defer_overflow_warnings ();
581 tem = fold_binary_loc (cond_loc,
582 TREE_CODE (op0), TREE_TYPE (op0),
583 TREE_OPERAND (op0, 0),
584 TREE_OPERAND (op0, 1));
585 /* This is actually a conditional expression, not a GIMPLE
586 conditional statement, however, the valid_gimple_rhs_p
587 test still applies. */
588 set = (tem && is_gimple_condexpr (tem)
589 && valid_gimple_rhs_p (tem));
590 fold_undefer_overflow_warnings (set, stmt, 0);
591 }
592 else if (is_gimple_min_invariant (op0))
593 {
594 tem = op0;
595 set = true;
596 }
597 else
598 return NULL_TREE;
599
600 if (set)
601 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
602 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
603 }
604
605 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
606 return maybe_fold_tmr (rhs);
607
608 else if (REFERENCE_CLASS_P (rhs))
609 return maybe_fold_reference (rhs, false);
610
611 else if (TREE_CODE (rhs) == ADDR_EXPR)
612 {
613 tree ref = TREE_OPERAND (rhs, 0);
614 tree tem = maybe_fold_reference (ref, true);
615 if (tem
616 && TREE_CODE (tem) == MEM_REF
617 && integer_zerop (TREE_OPERAND (tem, 1)))
618 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
619 else if (tem)
620 result = fold_convert (TREE_TYPE (rhs),
621 build_fold_addr_expr_loc (loc, tem));
622 else if (TREE_CODE (ref) == MEM_REF
623 && integer_zerop (TREE_OPERAND (ref, 1)))
624 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
625 }
626
627 else if (TREE_CODE (rhs) == CONSTRUCTOR
628 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
629 && (CONSTRUCTOR_NELTS (rhs)
630 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
631 {
632 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
633 unsigned i;
634 tree val;
635
636 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
637 if (TREE_CODE (val) != INTEGER_CST
638 && TREE_CODE (val) != REAL_CST
639 && TREE_CODE (val) != FIXED_CST)
640 return NULL_TREE;
641
642 return build_vector_from_ctor (TREE_TYPE (rhs),
643 CONSTRUCTOR_ELTS (rhs));
644 }
645
646 else if (DECL_P (rhs))
647 return unshare_expr (get_symbol_constant_value (rhs));
648
649 /* If we couldn't fold the RHS, hand over to the generic
650 fold routines. */
651 if (result == NULL_TREE)
652 result = fold (rhs);
653
654 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
655 that may have been added by fold, and "useless" type
656 conversions that might now be apparent due to propagation. */
657 STRIP_USELESS_TYPE_CONVERSION (result);
658
659 if (result != rhs && valid_gimple_rhs_p (result))
660 return result;
661
662 return NULL_TREE;
663 }
664 break;
665
666 case GIMPLE_UNARY_RHS:
667 {
668 tree rhs = gimple_assign_rhs1 (stmt);
669
670 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
671 if (result)
672 {
673 /* If the operation was a conversion do _not_ mark a
674 resulting constant with TREE_OVERFLOW if the original
675 constant was not. These conversions have implementation
676 defined behavior and retaining the TREE_OVERFLOW flag
677 here would confuse later passes such as VRP. */
678 if (CONVERT_EXPR_CODE_P (subcode)
679 && TREE_CODE (result) == INTEGER_CST
680 && TREE_CODE (rhs) == INTEGER_CST)
681 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
682
683 STRIP_USELESS_TYPE_CONVERSION (result);
684 if (valid_gimple_rhs_p (result))
685 return result;
686 }
687 else if (CONVERT_EXPR_CODE_P (subcode)
688 && POINTER_TYPE_P (gimple_expr_type (stmt))
689 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
690 {
691 tree type = gimple_expr_type (stmt);
692 tree t = maybe_fold_offset_to_address (loc,
693 gimple_assign_rhs1 (stmt),
694 integer_zero_node, type);
695 if (t)
696 return t;
697 }
698 }
699 break;
700
701 case GIMPLE_BINARY_RHS:
702 /* Try to fold pointer addition. */
703 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
704 {
705 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
706 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
707 {
708 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
709 if (!useless_type_conversion_p
710 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
711 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
712 }
713 result = maybe_fold_stmt_addition (gimple_location (stmt),
714 type,
715 gimple_assign_rhs1 (stmt),
716 gimple_assign_rhs2 (stmt));
717 }
718
719 if (!result)
720 result = fold_binary_loc (loc, subcode,
721 TREE_TYPE (gimple_assign_lhs (stmt)),
722 gimple_assign_rhs1 (stmt),
723 gimple_assign_rhs2 (stmt));
724
725 if (result)
726 {
727 STRIP_USELESS_TYPE_CONVERSION (result);
728 if (valid_gimple_rhs_p (result))
729 return result;
730
731 /* Fold might have produced non-GIMPLE, so if we trust it blindly
732 we lose canonicalization opportunities. Do not go again
733 through fold here though, or the same non-GIMPLE will be
734 produced. */
735 if (commutative_tree_code (subcode)
736 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
737 gimple_assign_rhs2 (stmt), false))
738 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
739 gimple_assign_rhs2 (stmt),
740 gimple_assign_rhs1 (stmt));
741 }
742 break;
743
744 case GIMPLE_TERNARY_RHS:
745 result = fold_ternary_loc (loc, subcode,
746 TREE_TYPE (gimple_assign_lhs (stmt)),
747 gimple_assign_rhs1 (stmt),
748 gimple_assign_rhs2 (stmt),
749 gimple_assign_rhs3 (stmt));
750
751 if (result)
752 {
753 STRIP_USELESS_TYPE_CONVERSION (result);
754 if (valid_gimple_rhs_p (result))
755 return result;
756
757 /* Fold might have produced non-GIMPLE, so if we trust it blindly
758 we lose canonicalization opportunities. Do not go again
759 through fold here though, or the same non-GIMPLE will be
760 produced. */
761 if (commutative_ternary_tree_code (subcode)
762 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
763 gimple_assign_rhs2 (stmt), false))
764 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
765 gimple_assign_rhs2 (stmt),
766 gimple_assign_rhs1 (stmt),
767 gimple_assign_rhs3 (stmt));
768 }
769 break;
770
771 case GIMPLE_INVALID_RHS:
772 gcc_unreachable ();
773 }
774
775 return NULL_TREE;
776 }
777
778 /* Attempt to fold a conditional statement. Return true if any changes were
779 made. We only attempt to fold the condition expression, and do not perform
780 any transformation that would require alteration of the cfg. It is
781 assumed that the operands have been previously folded. */
782
783 static bool
784 fold_gimple_cond (gimple stmt)
785 {
786 tree result = fold_binary_loc (gimple_location (stmt),
787 gimple_cond_code (stmt),
788 boolean_type_node,
789 gimple_cond_lhs (stmt),
790 gimple_cond_rhs (stmt));
791
792 if (result)
793 {
794 STRIP_USELESS_TYPE_CONVERSION (result);
795 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
796 {
797 gimple_cond_set_condition_from_tree (stmt, result);
798 return true;
799 }
800 }
801
802 return false;
803 }
804
805 /* Convert EXPR into a GIMPLE value suitable for substitution on the
806 RHS of an assignment. Insert the necessary statements before
807 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
808 is replaced. If the call is expected to produces a result, then it
809 is replaced by an assignment of the new RHS to the result variable.
810 If the result is to be ignored, then the call is replaced by a
811 GIMPLE_NOP. A proper VDEF chain is retained by making the first
812 VUSE and the last VDEF of the whole sequence be the same as the replaced
813 statement and using new SSA names for stores in between. */
814
815 void
816 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
817 {
818 tree lhs;
819 tree tmp = NULL_TREE; /* Silence warning. */
820 gimple stmt, new_stmt;
821 gimple_stmt_iterator i;
822 gimple_seq stmts = gimple_seq_alloc();
823 struct gimplify_ctx gctx;
824 gimple last = NULL;
825 gimple laststore = NULL;
826 tree reaching_vuse;
827
828 stmt = gsi_stmt (*si_p);
829
830 gcc_assert (is_gimple_call (stmt));
831
832 lhs = gimple_call_lhs (stmt);
833 reaching_vuse = gimple_vuse (stmt);
834
835 push_gimplify_context (&gctx);
836
837 if (lhs == NULL_TREE)
838 gimplify_and_add (expr, &stmts);
839 else
840 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
841
842 pop_gimplify_context (NULL);
843
844 if (gimple_has_location (stmt))
845 annotate_all_with_location (stmts, gimple_location (stmt));
846
847 /* The replacement can expose previously unreferenced variables. */
848 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
849 {
850 if (last)
851 {
852 gsi_insert_before (si_p, last, GSI_NEW_STMT);
853 gsi_next (si_p);
854 }
855 new_stmt = gsi_stmt (i);
856 if (gimple_in_ssa_p (cfun))
857 {
858 find_new_referenced_vars (new_stmt);
859 mark_symbols_for_renaming (new_stmt);
860 }
861 /* If the new statement has a VUSE, update it with exact SSA name we
862 know will reach this one. */
863 if (gimple_vuse (new_stmt))
864 {
865 /* If we've also seen a previous store create a new VDEF for
866 the latter one, and make that the new reaching VUSE. */
867 if (laststore)
868 {
869 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
870 gimple_set_vdef (laststore, reaching_vuse);
871 update_stmt (laststore);
872 laststore = NULL;
873 }
874 gimple_set_vuse (new_stmt, reaching_vuse);
875 gimple_set_modified (new_stmt, true);
876 }
877 if (gimple_assign_single_p (new_stmt)
878 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
879 {
880 laststore = new_stmt;
881 }
882 last = new_stmt;
883 }
884
885 if (lhs == NULL_TREE)
886 {
887 /* If we replace a call without LHS that has a VDEF and our new
888 sequence ends with a store we must make that store have the same
889 vdef in order not to break the sequencing. This can happen
890 for instance when folding memcpy calls into assignments. */
891 if (gimple_vdef (stmt) && laststore)
892 {
893 gimple_set_vdef (laststore, gimple_vdef (stmt));
894 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
895 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
896 update_stmt (laststore);
897 }
898 else if (gimple_in_ssa_p (cfun))
899 {
900 unlink_stmt_vdef (stmt);
901 release_defs (stmt);
902 }
903 new_stmt = last;
904 }
905 else
906 {
907 if (last)
908 {
909 gsi_insert_before (si_p, last, GSI_NEW_STMT);
910 gsi_next (si_p);
911 }
912 if (laststore && is_gimple_reg (lhs))
913 {
914 gimple_set_vdef (laststore, gimple_vdef (stmt));
915 update_stmt (laststore);
916 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
917 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
918 laststore = NULL;
919 }
920 else if (laststore)
921 {
922 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
923 gimple_set_vdef (laststore, reaching_vuse);
924 update_stmt (laststore);
925 laststore = NULL;
926 }
927 new_stmt = gimple_build_assign (lhs, tmp);
928 if (!is_gimple_reg (tmp))
929 gimple_set_vuse (new_stmt, reaching_vuse);
930 if (!is_gimple_reg (lhs))
931 {
932 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
933 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
934 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
935 }
936 }
937
938 gimple_set_location (new_stmt, gimple_location (stmt));
939 gsi_replace (si_p, new_stmt, false);
940 }
941
942 /* Return the string length, maximum string length or maximum value of
943 ARG in LENGTH.
944 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
945 is not NULL and, for TYPE == 0, its value is not equal to the length
946 we determine or if we are unable to determine the length or value,
947 return false. VISITED is a bitmap of visited variables.
948 TYPE is 0 if string length should be returned, 1 for maximum string
949 length and 2 for maximum value ARG can have. */
950
951 static bool
952 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
953 {
954 tree var, val;
955 gimple def_stmt;
956
957 if (TREE_CODE (arg) != SSA_NAME)
958 {
959 if (TREE_CODE (arg) == COND_EXPR)
960 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
961 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
962 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
963 else if (TREE_CODE (arg) == ADDR_EXPR
964 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
965 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
966 {
967 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
968 if (TREE_CODE (aop0) == INDIRECT_REF
969 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
970 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
971 length, visited, type);
972 }
973
974 if (type == 2)
975 {
976 val = arg;
977 if (TREE_CODE (val) != INTEGER_CST
978 || tree_int_cst_sgn (val) < 0)
979 return false;
980 }
981 else
982 val = c_strlen (arg, 1);
983 if (!val)
984 return false;
985
986 if (*length)
987 {
988 if (type > 0)
989 {
990 if (TREE_CODE (*length) != INTEGER_CST
991 || TREE_CODE (val) != INTEGER_CST)
992 return false;
993
994 if (tree_int_cst_lt (*length, val))
995 *length = val;
996 return true;
997 }
998 else if (simple_cst_equal (val, *length) != 1)
999 return false;
1000 }
1001
1002 *length = val;
1003 return true;
1004 }
1005
1006 /* If we were already here, break the infinite cycle. */
1007 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1008 return true;
1009
1010 var = arg;
1011 def_stmt = SSA_NAME_DEF_STMT (var);
1012
1013 switch (gimple_code (def_stmt))
1014 {
1015 case GIMPLE_ASSIGN:
1016 /* The RHS of the statement defining VAR must either have a
1017 constant length or come from another SSA_NAME with a constant
1018 length. */
1019 if (gimple_assign_single_p (def_stmt)
1020 || gimple_assign_unary_nop_p (def_stmt))
1021 {
1022 tree rhs = gimple_assign_rhs1 (def_stmt);
1023 return get_maxval_strlen (rhs, length, visited, type);
1024 }
1025 return false;
1026
1027 case GIMPLE_PHI:
1028 {
1029 /* All the arguments of the PHI node must have the same constant
1030 length. */
1031 unsigned i;
1032
1033 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1034 {
1035 tree arg = gimple_phi_arg (def_stmt, i)->def;
1036
1037 /* If this PHI has itself as an argument, we cannot
1038 determine the string length of this argument. However,
1039 if we can find a constant string length for the other
1040 PHI args then we can still be sure that this is a
1041 constant string length. So be optimistic and just
1042 continue with the next argument. */
1043 if (arg == gimple_phi_result (def_stmt))
1044 continue;
1045
1046 if (!get_maxval_strlen (arg, length, visited, type))
1047 return false;
1048 }
1049 }
1050 return true;
1051
1052 default:
1053 return false;
1054 }
1055 }
1056
1057
1058 /* Fold builtin call in statement STMT. Returns a simplified tree.
1059 We may return a non-constant expression, including another call
1060 to a different function and with different arguments, e.g.,
1061 substituting memcpy for strcpy when the string length is known.
1062 Note that some builtins expand into inline code that may not
1063 be valid in GIMPLE. Callers must take care. */
1064
1065 tree
1066 gimple_fold_builtin (gimple stmt)
1067 {
1068 tree result, val[3];
1069 tree callee, a;
1070 int arg_idx, type;
1071 bitmap visited;
1072 bool ignore;
1073 int nargs;
1074 location_t loc = gimple_location (stmt);
1075
1076 gcc_assert (is_gimple_call (stmt));
1077
1078 ignore = (gimple_call_lhs (stmt) == NULL);
1079
1080 /* First try the generic builtin folder. If that succeeds, return the
1081 result directly. */
1082 result = fold_call_stmt (stmt, ignore);
1083 if (result)
1084 {
1085 if (ignore)
1086 STRIP_NOPS (result);
1087 return result;
1088 }
1089
1090 /* Ignore MD builtins. */
1091 callee = gimple_call_fndecl (stmt);
1092 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1093 return NULL_TREE;
1094
1095 /* If the builtin could not be folded, and it has no argument list,
1096 we're done. */
1097 nargs = gimple_call_num_args (stmt);
1098 if (nargs == 0)
1099 return NULL_TREE;
1100
1101 /* Limit the work only for builtins we know how to simplify. */
1102 switch (DECL_FUNCTION_CODE (callee))
1103 {
1104 case BUILT_IN_STRLEN:
1105 case BUILT_IN_FPUTS:
1106 case BUILT_IN_FPUTS_UNLOCKED:
1107 arg_idx = 0;
1108 type = 0;
1109 break;
1110 case BUILT_IN_STRCPY:
1111 case BUILT_IN_STRNCPY:
1112 arg_idx = 1;
1113 type = 0;
1114 break;
1115 case BUILT_IN_MEMCPY_CHK:
1116 case BUILT_IN_MEMPCPY_CHK:
1117 case BUILT_IN_MEMMOVE_CHK:
1118 case BUILT_IN_MEMSET_CHK:
1119 case BUILT_IN_STRNCPY_CHK:
1120 arg_idx = 2;
1121 type = 2;
1122 break;
1123 case BUILT_IN_STRCPY_CHK:
1124 case BUILT_IN_STPCPY_CHK:
1125 arg_idx = 1;
1126 type = 1;
1127 break;
1128 case BUILT_IN_SNPRINTF_CHK:
1129 case BUILT_IN_VSNPRINTF_CHK:
1130 arg_idx = 1;
1131 type = 2;
1132 break;
1133 default:
1134 return NULL_TREE;
1135 }
1136
1137 if (arg_idx >= nargs)
1138 return NULL_TREE;
1139
1140 /* Try to use the dataflow information gathered by the CCP process. */
1141 visited = BITMAP_ALLOC (NULL);
1142 bitmap_clear (visited);
1143
1144 memset (val, 0, sizeof (val));
1145 a = gimple_call_arg (stmt, arg_idx);
1146 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1147 val[arg_idx] = NULL_TREE;
1148
1149 BITMAP_FREE (visited);
1150
1151 result = NULL_TREE;
1152 switch (DECL_FUNCTION_CODE (callee))
1153 {
1154 case BUILT_IN_STRLEN:
1155 if (val[0] && nargs == 1)
1156 {
1157 tree new_val =
1158 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1159
1160 /* If the result is not a valid gimple value, or not a cast
1161 of a valid gimple value, then we cannot use the result. */
1162 if (is_gimple_val (new_val)
1163 || (is_gimple_cast (new_val)
1164 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1165 return new_val;
1166 }
1167 break;
1168
1169 case BUILT_IN_STRCPY:
1170 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1171 result = fold_builtin_strcpy (loc, callee,
1172 gimple_call_arg (stmt, 0),
1173 gimple_call_arg (stmt, 1),
1174 val[1]);
1175 break;
1176
1177 case BUILT_IN_STRNCPY:
1178 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1179 result = fold_builtin_strncpy (loc, callee,
1180 gimple_call_arg (stmt, 0),
1181 gimple_call_arg (stmt, 1),
1182 gimple_call_arg (stmt, 2),
1183 val[1]);
1184 break;
1185
1186 case BUILT_IN_FPUTS:
1187 if (nargs == 2)
1188 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1189 gimple_call_arg (stmt, 1),
1190 ignore, false, val[0]);
1191 break;
1192
1193 case BUILT_IN_FPUTS_UNLOCKED:
1194 if (nargs == 2)
1195 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1196 gimple_call_arg (stmt, 1),
1197 ignore, true, val[0]);
1198 break;
1199
1200 case BUILT_IN_MEMCPY_CHK:
1201 case BUILT_IN_MEMPCPY_CHK:
1202 case BUILT_IN_MEMMOVE_CHK:
1203 case BUILT_IN_MEMSET_CHK:
1204 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1205 result = fold_builtin_memory_chk (loc, callee,
1206 gimple_call_arg (stmt, 0),
1207 gimple_call_arg (stmt, 1),
1208 gimple_call_arg (stmt, 2),
1209 gimple_call_arg (stmt, 3),
1210 val[2], ignore,
1211 DECL_FUNCTION_CODE (callee));
1212 break;
1213
1214 case BUILT_IN_STRCPY_CHK:
1215 case BUILT_IN_STPCPY_CHK:
1216 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1217 result = fold_builtin_stxcpy_chk (loc, callee,
1218 gimple_call_arg (stmt, 0),
1219 gimple_call_arg (stmt, 1),
1220 gimple_call_arg (stmt, 2),
1221 val[1], ignore,
1222 DECL_FUNCTION_CODE (callee));
1223 break;
1224
1225 case BUILT_IN_STRNCPY_CHK:
1226 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1227 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1228 gimple_call_arg (stmt, 1),
1229 gimple_call_arg (stmt, 2),
1230 gimple_call_arg (stmt, 3),
1231 val[2]);
1232 break;
1233
1234 case BUILT_IN_SNPRINTF_CHK:
1235 case BUILT_IN_VSNPRINTF_CHK:
1236 if (val[1] && is_gimple_val (val[1]))
1237 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1238 DECL_FUNCTION_CODE (callee));
1239 break;
1240
1241 default:
1242 gcc_unreachable ();
1243 }
1244
1245 if (result && ignore)
1246 result = fold_ignored_result (result);
1247 return result;
1248 }
1249
1250 /* Return the first of the base binfos of BINFO that has virtual functions. */
1251
1252 static tree
1253 get_first_base_binfo_with_virtuals (tree binfo)
1254 {
1255 int i;
1256 tree base_binfo;
1257
1258 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1259 if (BINFO_VIRTUALS (base_binfo))
1260 return base_binfo;
1261
1262 return NULL_TREE;
1263 }
1264
1265
1266 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1267 it is found or NULL_TREE if it is not. */
1268
1269 static tree
1270 get_base_binfo_for_type (tree binfo, tree type)
1271 {
1272 int i;
1273 tree base_binfo;
1274 tree res = NULL_TREE;
1275
1276 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1277 if (TREE_TYPE (base_binfo) == type)
1278 {
1279 gcc_assert (!res);
1280 res = base_binfo;
1281 }
1282
1283 return res;
1284 }
1285
1286 /* Return a binfo describing the part of object referenced by expression REF.
1287 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1288 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1289 a simple declaration, indirect reference or an SSA_NAME. If the function
1290 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1291 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1292 Otherwise the first non-artificial field declaration or the base declaration
1293 will be examined to get the encapsulating type. */
1294
1295 tree
1296 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1297 {
1298 while (true)
1299 {
1300 if (TREE_CODE (ref) == COMPONENT_REF)
1301 {
1302 tree par_type;
1303 tree binfo, base_binfo;
1304 tree field = TREE_OPERAND (ref, 1);
1305
1306 if (!DECL_ARTIFICIAL (field))
1307 {
1308 tree type = TREE_TYPE (field);
1309 if (TREE_CODE (type) == RECORD_TYPE)
1310 return TYPE_BINFO (type);
1311 else
1312 return NULL_TREE;
1313 }
1314
1315 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1316 binfo = TYPE_BINFO (par_type);
1317 if (!binfo
1318 || BINFO_N_BASE_BINFOS (binfo) == 0)
1319 return NULL_TREE;
1320
1321 base_binfo = get_first_base_binfo_with_virtuals (binfo);
1322 if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1323 {
1324 tree d_binfo;
1325
1326 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1327 known_binfo);
1328 /* Get descendant binfo. */
1329 if (!d_binfo)
1330 return NULL_TREE;
1331 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1332 }
1333
1334 ref = TREE_OPERAND (ref, 0);
1335 }
1336 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1337 return TYPE_BINFO (TREE_TYPE (ref));
1338 else if (known_binfo
1339 && (TREE_CODE (ref) == SSA_NAME
1340 || TREE_CODE (ref) == MEM_REF))
1341 return known_binfo;
1342 else
1343 return NULL_TREE;
1344 }
1345 }
1346
1347 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1348 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1349 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1350
1351 tree
1352 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1353 {
1354 HOST_WIDE_INT i;
1355 tree v, fndecl;
1356 struct cgraph_node *node;
1357
1358 v = BINFO_VIRTUALS (known_binfo);
1359 i = 0;
1360 while (i != token)
1361 {
1362 i += (TARGET_VTABLE_USES_DESCRIPTORS
1363 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1364 v = TREE_CHAIN (v);
1365 }
1366
1367 fndecl = TREE_VALUE (v);
1368 node = cgraph_get_node (fndecl);
1369 /* When cgraph node is missing and function is not public, we cannot
1370 devirtualize. This can happen in WHOPR when the actual method
1371 ends up in other partition, because we found devirtualization
1372 possibility too late. */
1373 if ((!node || (!node->analyzed && !node->in_other_partition))
1374 && (!TREE_PUBLIC (fndecl) || DECL_COMDAT (fndecl)))
1375 return NULL;
1376 return build_fold_addr_expr (fndecl);
1377 }
1378
1379
1380 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1381 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1382 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1383 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1384 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1385
1386 tree
1387 gimple_fold_obj_type_ref (tree ref, tree known_type)
1388 {
1389 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1390 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1391 tree binfo;
1392
1393 if (TREE_CODE (obj) == ADDR_EXPR)
1394 obj = TREE_OPERAND (obj, 0);
1395
1396 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1397 if (binfo)
1398 {
1399 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1400 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1401 }
1402 else
1403 return NULL_TREE;
1404 }
1405
1406 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1407 The statement may be replaced by another statement, e.g., if the call
1408 simplifies to a constant value. Return true if any changes were made.
1409 It is assumed that the operands have been previously folded. */
1410
1411 static bool
1412 fold_gimple_call (gimple_stmt_iterator *gsi)
1413 {
1414 gimple stmt = gsi_stmt (*gsi);
1415
1416 tree callee = gimple_call_fndecl (stmt);
1417
1418 /* Check for builtins that CCP can handle using information not
1419 available in the generic fold routines. */
1420 if (callee && DECL_BUILT_IN (callee))
1421 {
1422 tree result = gimple_fold_builtin (stmt);
1423
1424 if (result)
1425 {
1426 if (!update_call_from_tree (gsi, result))
1427 gimplify_and_update_call_from_tree (gsi, result);
1428 return true;
1429 }
1430 }
1431 else
1432 {
1433 /* ??? Should perhaps do this in fold proper. However, doing it
1434 there requires that we create a new CALL_EXPR, and that requires
1435 copying EH region info to the new node. Easier to just do it
1436 here where we can just smash the call operand. */
1437 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1438 callee = gimple_call_fn (stmt);
1439 if (TREE_CODE (callee) == OBJ_TYPE_REF
1440 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1441 {
1442 tree t;
1443
1444 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1445 if (t)
1446 {
1447 gimple_call_set_fn (stmt, t);
1448 return true;
1449 }
1450 }
1451 }
1452
1453 return false;
1454 }
1455
1456 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1457 distinguishes both cases. */
1458
1459 static bool
1460 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1461 {
1462 bool changed = false;
1463 gimple stmt = gsi_stmt (*gsi);
1464 unsigned i;
1465
1466 /* Fold the main computation performed by the statement. */
1467 switch (gimple_code (stmt))
1468 {
1469 case GIMPLE_ASSIGN:
1470 {
1471 unsigned old_num_ops = gimple_num_ops (stmt);
1472 tree new_rhs = fold_gimple_assign (gsi);
1473 tree lhs = gimple_assign_lhs (stmt);
1474 if (new_rhs
1475 && !useless_type_conversion_p (TREE_TYPE (lhs),
1476 TREE_TYPE (new_rhs)))
1477 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1478 if (new_rhs
1479 && (!inplace
1480 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1481 {
1482 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1483 changed = true;
1484 }
1485 break;
1486 }
1487
1488 case GIMPLE_COND:
1489 changed |= fold_gimple_cond (stmt);
1490 break;
1491
1492 case GIMPLE_CALL:
1493 /* Fold *& in call arguments. */
1494 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1495 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1496 {
1497 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1498 if (tmp)
1499 {
1500 gimple_call_set_arg (stmt, i, tmp);
1501 changed = true;
1502 }
1503 }
1504 /* The entire statement may be replaced in this case. */
1505 if (!inplace)
1506 changed |= fold_gimple_call (gsi);
1507 break;
1508
1509 case GIMPLE_ASM:
1510 /* Fold *& in asm operands. */
1511 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1512 {
1513 tree link = gimple_asm_output_op (stmt, i);
1514 tree op = TREE_VALUE (link);
1515 if (REFERENCE_CLASS_P (op)
1516 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1517 {
1518 TREE_VALUE (link) = op;
1519 changed = true;
1520 }
1521 }
1522 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1523 {
1524 tree link = gimple_asm_input_op (stmt, i);
1525 tree op = TREE_VALUE (link);
1526 if (REFERENCE_CLASS_P (op)
1527 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1528 {
1529 TREE_VALUE (link) = op;
1530 changed = true;
1531 }
1532 }
1533 break;
1534
1535 case GIMPLE_DEBUG:
1536 if (gimple_debug_bind_p (stmt))
1537 {
1538 tree val = gimple_debug_bind_get_value (stmt);
1539 if (val
1540 && REFERENCE_CLASS_P (val))
1541 {
1542 tree tem = maybe_fold_reference (val, false);
1543 if (tem)
1544 {
1545 gimple_debug_bind_set_value (stmt, tem);
1546 changed = true;
1547 }
1548 }
1549 }
1550 break;
1551
1552 default:;
1553 }
1554
1555 stmt = gsi_stmt (*gsi);
1556
1557 /* Fold *& on the lhs. */
1558 if (gimple_has_lhs (stmt))
1559 {
1560 tree lhs = gimple_get_lhs (stmt);
1561 if (lhs && REFERENCE_CLASS_P (lhs))
1562 {
1563 tree new_lhs = maybe_fold_reference (lhs, true);
1564 if (new_lhs)
1565 {
1566 gimple_set_lhs (stmt, new_lhs);
1567 changed = true;
1568 }
1569 }
1570 }
1571
1572 return changed;
1573 }
1574
1575 /* Fold the statement pointed to by GSI. In some cases, this function may
1576 replace the whole statement with a new one. Returns true iff folding
1577 makes any changes.
1578 The statement pointed to by GSI should be in valid gimple form but may
1579 be in unfolded state as resulting from for example constant propagation
1580 which can produce *&x = 0. */
1581
1582 bool
1583 fold_stmt (gimple_stmt_iterator *gsi)
1584 {
1585 return fold_stmt_1 (gsi, false);
1586 }
1587
1588 /* Perform the minimal folding on statement STMT. Only operations like
1589 *&x created by constant propagation are handled. The statement cannot
1590 be replaced with a new one. Return true if the statement was
1591 changed, false otherwise.
1592 The statement STMT should be in valid gimple form but may
1593 be in unfolded state as resulting from for example constant propagation
1594 which can produce *&x = 0. */
1595
1596 bool
1597 fold_stmt_inplace (gimple stmt)
1598 {
1599 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1600 bool changed = fold_stmt_1 (&gsi, true);
1601 gcc_assert (gsi_stmt (gsi) == stmt);
1602 return changed;
1603 }
1604
1605 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1606 if EXPR is null or we don't know how.
1607 If non-null, the result always has boolean type. */
1608
1609 static tree
1610 canonicalize_bool (tree expr, bool invert)
1611 {
1612 if (!expr)
1613 return NULL_TREE;
1614 else if (invert)
1615 {
1616 if (integer_nonzerop (expr))
1617 return boolean_false_node;
1618 else if (integer_zerop (expr))
1619 return boolean_true_node;
1620 else if (TREE_CODE (expr) == SSA_NAME)
1621 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1622 build_int_cst (TREE_TYPE (expr), 0));
1623 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1624 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1625 boolean_type_node,
1626 TREE_OPERAND (expr, 0),
1627 TREE_OPERAND (expr, 1));
1628 else
1629 return NULL_TREE;
1630 }
1631 else
1632 {
1633 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1634 return expr;
1635 if (integer_nonzerop (expr))
1636 return boolean_true_node;
1637 else if (integer_zerop (expr))
1638 return boolean_false_node;
1639 else if (TREE_CODE (expr) == SSA_NAME)
1640 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1641 build_int_cst (TREE_TYPE (expr), 0));
1642 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1643 return fold_build2 (TREE_CODE (expr),
1644 boolean_type_node,
1645 TREE_OPERAND (expr, 0),
1646 TREE_OPERAND (expr, 1));
1647 else
1648 return NULL_TREE;
1649 }
1650 }
1651
1652 /* Check to see if a boolean expression EXPR is logically equivalent to the
1653 comparison (OP1 CODE OP2). Check for various identities involving
1654 SSA_NAMEs. */
1655
1656 static bool
1657 same_bool_comparison_p (const_tree expr, enum tree_code code,
1658 const_tree op1, const_tree op2)
1659 {
1660 gimple s;
1661
1662 /* The obvious case. */
1663 if (TREE_CODE (expr) == code
1664 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1665 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1666 return true;
1667
1668 /* Check for comparing (name, name != 0) and the case where expr
1669 is an SSA_NAME with a definition matching the comparison. */
1670 if (TREE_CODE (expr) == SSA_NAME
1671 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1672 {
1673 if (operand_equal_p (expr, op1, 0))
1674 return ((code == NE_EXPR && integer_zerop (op2))
1675 || (code == EQ_EXPR && integer_nonzerop (op2)));
1676 s = SSA_NAME_DEF_STMT (expr);
1677 if (is_gimple_assign (s)
1678 && gimple_assign_rhs_code (s) == code
1679 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1680 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1681 return true;
1682 }
1683
1684 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1685 of name is a comparison, recurse. */
1686 if (TREE_CODE (op1) == SSA_NAME
1687 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1688 {
1689 s = SSA_NAME_DEF_STMT (op1);
1690 if (is_gimple_assign (s)
1691 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1692 {
1693 enum tree_code c = gimple_assign_rhs_code (s);
1694 if ((c == NE_EXPR && integer_zerop (op2))
1695 || (c == EQ_EXPR && integer_nonzerop (op2)))
1696 return same_bool_comparison_p (expr, c,
1697 gimple_assign_rhs1 (s),
1698 gimple_assign_rhs2 (s));
1699 if ((c == EQ_EXPR && integer_zerop (op2))
1700 || (c == NE_EXPR && integer_nonzerop (op2)))
1701 return same_bool_comparison_p (expr,
1702 invert_tree_comparison (c, false),
1703 gimple_assign_rhs1 (s),
1704 gimple_assign_rhs2 (s));
1705 }
1706 }
1707 return false;
1708 }
1709
1710 /* Check to see if two boolean expressions OP1 and OP2 are logically
1711 equivalent. */
1712
1713 static bool
1714 same_bool_result_p (const_tree op1, const_tree op2)
1715 {
1716 /* Simple cases first. */
1717 if (operand_equal_p (op1, op2, 0))
1718 return true;
1719
1720 /* Check the cases where at least one of the operands is a comparison.
1721 These are a bit smarter than operand_equal_p in that they apply some
1722 identifies on SSA_NAMEs. */
1723 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1724 && same_bool_comparison_p (op1, TREE_CODE (op2),
1725 TREE_OPERAND (op2, 0),
1726 TREE_OPERAND (op2, 1)))
1727 return true;
1728 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1729 && same_bool_comparison_p (op2, TREE_CODE (op1),
1730 TREE_OPERAND (op1, 0),
1731 TREE_OPERAND (op1, 1)))
1732 return true;
1733
1734 /* Default case. */
1735 return false;
1736 }
1737
1738 /* Forward declarations for some mutually recursive functions. */
1739
1740 static tree
1741 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1742 enum tree_code code2, tree op2a, tree op2b);
1743 static tree
1744 and_var_with_comparison (tree var, bool invert,
1745 enum tree_code code2, tree op2a, tree op2b);
1746 static tree
1747 and_var_with_comparison_1 (gimple stmt,
1748 enum tree_code code2, tree op2a, tree op2b);
1749 static tree
1750 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1751 enum tree_code code2, tree op2a, tree op2b);
1752 static tree
1753 or_var_with_comparison (tree var, bool invert,
1754 enum tree_code code2, tree op2a, tree op2b);
1755 static tree
1756 or_var_with_comparison_1 (gimple stmt,
1757 enum tree_code code2, tree op2a, tree op2b);
1758
1759 /* Helper function for and_comparisons_1: try to simplify the AND of the
1760 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1761 If INVERT is true, invert the value of the VAR before doing the AND.
1762 Return NULL_EXPR if we can't simplify this to a single expression. */
1763
1764 static tree
1765 and_var_with_comparison (tree var, bool invert,
1766 enum tree_code code2, tree op2a, tree op2b)
1767 {
1768 tree t;
1769 gimple stmt = SSA_NAME_DEF_STMT (var);
1770
1771 /* We can only deal with variables whose definitions are assignments. */
1772 if (!is_gimple_assign (stmt))
1773 return NULL_TREE;
1774
1775 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1776 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1777 Then we only have to consider the simpler non-inverted cases. */
1778 if (invert)
1779 t = or_var_with_comparison_1 (stmt,
1780 invert_tree_comparison (code2, false),
1781 op2a, op2b);
1782 else
1783 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1784 return canonicalize_bool (t, invert);
1785 }
1786
1787 /* Try to simplify the AND of the ssa variable defined by the assignment
1788 STMT with the comparison specified by (OP2A CODE2 OP2B).
1789 Return NULL_EXPR if we can't simplify this to a single expression. */
1790
1791 static tree
1792 and_var_with_comparison_1 (gimple stmt,
1793 enum tree_code code2, tree op2a, tree op2b)
1794 {
1795 tree var = gimple_assign_lhs (stmt);
1796 tree true_test_var = NULL_TREE;
1797 tree false_test_var = NULL_TREE;
1798 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1799
1800 /* Check for identities like (var AND (var == 0)) => false. */
1801 if (TREE_CODE (op2a) == SSA_NAME
1802 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1803 {
1804 if ((code2 == NE_EXPR && integer_zerop (op2b))
1805 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1806 {
1807 true_test_var = op2a;
1808 if (var == true_test_var)
1809 return var;
1810 }
1811 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1812 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1813 {
1814 false_test_var = op2a;
1815 if (var == false_test_var)
1816 return boolean_false_node;
1817 }
1818 }
1819
1820 /* If the definition is a comparison, recurse on it. */
1821 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1822 {
1823 tree t = and_comparisons_1 (innercode,
1824 gimple_assign_rhs1 (stmt),
1825 gimple_assign_rhs2 (stmt),
1826 code2,
1827 op2a,
1828 op2b);
1829 if (t)
1830 return t;
1831 }
1832
1833 /* If the definition is an AND or OR expression, we may be able to
1834 simplify by reassociating. */
1835 if (innercode == TRUTH_AND_EXPR
1836 || innercode == TRUTH_OR_EXPR
1837 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1838 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1839 {
1840 tree inner1 = gimple_assign_rhs1 (stmt);
1841 tree inner2 = gimple_assign_rhs2 (stmt);
1842 gimple s;
1843 tree t;
1844 tree partial = NULL_TREE;
1845 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1846
1847 /* Check for boolean identities that don't require recursive examination
1848 of inner1/inner2:
1849 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1850 inner1 AND (inner1 OR inner2) => inner1
1851 !inner1 AND (inner1 AND inner2) => false
1852 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1853 Likewise for similar cases involving inner2. */
1854 if (inner1 == true_test_var)
1855 return (is_and ? var : inner1);
1856 else if (inner2 == true_test_var)
1857 return (is_and ? var : inner2);
1858 else if (inner1 == false_test_var)
1859 return (is_and
1860 ? boolean_false_node
1861 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1862 else if (inner2 == false_test_var)
1863 return (is_and
1864 ? boolean_false_node
1865 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1866
1867 /* Next, redistribute/reassociate the AND across the inner tests.
1868 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1869 if (TREE_CODE (inner1) == SSA_NAME
1870 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1871 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1872 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1873 gimple_assign_rhs1 (s),
1874 gimple_assign_rhs2 (s),
1875 code2, op2a, op2b)))
1876 {
1877 /* Handle the AND case, where we are reassociating:
1878 (inner1 AND inner2) AND (op2a code2 op2b)
1879 => (t AND inner2)
1880 If the partial result t is a constant, we win. Otherwise
1881 continue on to try reassociating with the other inner test. */
1882 if (is_and)
1883 {
1884 if (integer_onep (t))
1885 return inner2;
1886 else if (integer_zerop (t))
1887 return boolean_false_node;
1888 }
1889
1890 /* Handle the OR case, where we are redistributing:
1891 (inner1 OR inner2) AND (op2a code2 op2b)
1892 => (t OR (inner2 AND (op2a code2 op2b))) */
1893 else
1894 {
1895 if (integer_onep (t))
1896 return boolean_true_node;
1897 else
1898 /* Save partial result for later. */
1899 partial = t;
1900 }
1901 }
1902
1903 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1904 if (TREE_CODE (inner2) == SSA_NAME
1905 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1906 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1907 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1908 gimple_assign_rhs1 (s),
1909 gimple_assign_rhs2 (s),
1910 code2, op2a, op2b)))
1911 {
1912 /* Handle the AND case, where we are reassociating:
1913 (inner1 AND inner2) AND (op2a code2 op2b)
1914 => (inner1 AND t) */
1915 if (is_and)
1916 {
1917 if (integer_onep (t))
1918 return inner1;
1919 else if (integer_zerop (t))
1920 return boolean_false_node;
1921 }
1922
1923 /* Handle the OR case. where we are redistributing:
1924 (inner1 OR inner2) AND (op2a code2 op2b)
1925 => (t OR (inner1 AND (op2a code2 op2b)))
1926 => (t OR partial) */
1927 else
1928 {
1929 if (integer_onep (t))
1930 return boolean_true_node;
1931 else if (partial)
1932 {
1933 /* We already got a simplification for the other
1934 operand to the redistributed OR expression. The
1935 interesting case is when at least one is false.
1936 Or, if both are the same, we can apply the identity
1937 (x OR x) == x. */
1938 if (integer_zerop (partial))
1939 return t;
1940 else if (integer_zerop (t))
1941 return partial;
1942 else if (same_bool_result_p (t, partial))
1943 return t;
1944 }
1945 }
1946 }
1947 }
1948 return NULL_TREE;
1949 }
1950
1951 /* Try to simplify the AND of two comparisons defined by
1952 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1953 If this can be done without constructing an intermediate value,
1954 return the resulting tree; otherwise NULL_TREE is returned.
1955 This function is deliberately asymmetric as it recurses on SSA_DEFs
1956 in the first comparison but not the second. */
1957
1958 static tree
1959 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1960 enum tree_code code2, tree op2a, tree op2b)
1961 {
1962 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1963 if (operand_equal_p (op1a, op2a, 0)
1964 && operand_equal_p (op1b, op2b, 0))
1965 {
1966 tree t = combine_comparisons (UNKNOWN_LOCATION,
1967 TRUTH_ANDIF_EXPR, code1, code2,
1968 boolean_type_node, op1a, op1b);
1969 if (t)
1970 return t;
1971 }
1972
1973 /* Likewise the swapped case of the above. */
1974 if (operand_equal_p (op1a, op2b, 0)
1975 && operand_equal_p (op1b, op2a, 0))
1976 {
1977 tree t = combine_comparisons (UNKNOWN_LOCATION,
1978 TRUTH_ANDIF_EXPR, code1,
1979 swap_tree_comparison (code2),
1980 boolean_type_node, op1a, op1b);
1981 if (t)
1982 return t;
1983 }
1984
1985 /* If both comparisons are of the same value against constants, we might
1986 be able to merge them. */
1987 if (operand_equal_p (op1a, op2a, 0)
1988 && TREE_CODE (op1b) == INTEGER_CST
1989 && TREE_CODE (op2b) == INTEGER_CST)
1990 {
1991 int cmp = tree_int_cst_compare (op1b, op2b);
1992
1993 /* If we have (op1a == op1b), we should either be able to
1994 return that or FALSE, depending on whether the constant op1b
1995 also satisfies the other comparison against op2b. */
1996 if (code1 == EQ_EXPR)
1997 {
1998 bool done = true;
1999 bool val;
2000 switch (code2)
2001 {
2002 case EQ_EXPR: val = (cmp == 0); break;
2003 case NE_EXPR: val = (cmp != 0); break;
2004 case LT_EXPR: val = (cmp < 0); break;
2005 case GT_EXPR: val = (cmp > 0); break;
2006 case LE_EXPR: val = (cmp <= 0); break;
2007 case GE_EXPR: val = (cmp >= 0); break;
2008 default: done = false;
2009 }
2010 if (done)
2011 {
2012 if (val)
2013 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2014 else
2015 return boolean_false_node;
2016 }
2017 }
2018 /* Likewise if the second comparison is an == comparison. */
2019 else if (code2 == EQ_EXPR)
2020 {
2021 bool done = true;
2022 bool val;
2023 switch (code1)
2024 {
2025 case EQ_EXPR: val = (cmp == 0); break;
2026 case NE_EXPR: val = (cmp != 0); break;
2027 case LT_EXPR: val = (cmp > 0); break;
2028 case GT_EXPR: val = (cmp < 0); break;
2029 case LE_EXPR: val = (cmp >= 0); break;
2030 case GE_EXPR: val = (cmp <= 0); break;
2031 default: done = false;
2032 }
2033 if (done)
2034 {
2035 if (val)
2036 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2037 else
2038 return boolean_false_node;
2039 }
2040 }
2041
2042 /* Same business with inequality tests. */
2043 else if (code1 == NE_EXPR)
2044 {
2045 bool val;
2046 switch (code2)
2047 {
2048 case EQ_EXPR: val = (cmp != 0); break;
2049 case NE_EXPR: val = (cmp == 0); break;
2050 case LT_EXPR: val = (cmp >= 0); break;
2051 case GT_EXPR: val = (cmp <= 0); break;
2052 case LE_EXPR: val = (cmp > 0); break;
2053 case GE_EXPR: val = (cmp < 0); break;
2054 default:
2055 val = false;
2056 }
2057 if (val)
2058 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2059 }
2060 else if (code2 == NE_EXPR)
2061 {
2062 bool val;
2063 switch (code1)
2064 {
2065 case EQ_EXPR: val = (cmp == 0); break;
2066 case NE_EXPR: val = (cmp != 0); break;
2067 case LT_EXPR: val = (cmp <= 0); break;
2068 case GT_EXPR: val = (cmp >= 0); break;
2069 case LE_EXPR: val = (cmp < 0); break;
2070 case GE_EXPR: val = (cmp > 0); break;
2071 default:
2072 val = false;
2073 }
2074 if (val)
2075 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2076 }
2077
2078 /* Chose the more restrictive of two < or <= comparisons. */
2079 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2080 && (code2 == LT_EXPR || code2 == LE_EXPR))
2081 {
2082 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2083 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2084 else
2085 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2086 }
2087
2088 /* Likewise chose the more restrictive of two > or >= comparisons. */
2089 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2090 && (code2 == GT_EXPR || code2 == GE_EXPR))
2091 {
2092 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2093 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2094 else
2095 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2096 }
2097
2098 /* Check for singleton ranges. */
2099 else if (cmp == 0
2100 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2101 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2102 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2103
2104 /* Check for disjoint ranges. */
2105 else if (cmp <= 0
2106 && (code1 == LT_EXPR || code1 == LE_EXPR)
2107 && (code2 == GT_EXPR || code2 == GE_EXPR))
2108 return boolean_false_node;
2109 else if (cmp >= 0
2110 && (code1 == GT_EXPR || code1 == GE_EXPR)
2111 && (code2 == LT_EXPR || code2 == LE_EXPR))
2112 return boolean_false_node;
2113 }
2114
2115 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2116 NAME's definition is a truth value. See if there are any simplifications
2117 that can be done against the NAME's definition. */
2118 if (TREE_CODE (op1a) == SSA_NAME
2119 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2120 && (integer_zerop (op1b) || integer_onep (op1b)))
2121 {
2122 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2123 || (code1 == NE_EXPR && integer_onep (op1b)));
2124 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2125 switch (gimple_code (stmt))
2126 {
2127 case GIMPLE_ASSIGN:
2128 /* Try to simplify by copy-propagating the definition. */
2129 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2130
2131 case GIMPLE_PHI:
2132 /* If every argument to the PHI produces the same result when
2133 ANDed with the second comparison, we win.
2134 Do not do this unless the type is bool since we need a bool
2135 result here anyway. */
2136 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2137 {
2138 tree result = NULL_TREE;
2139 unsigned i;
2140 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2141 {
2142 tree arg = gimple_phi_arg_def (stmt, i);
2143
2144 /* If this PHI has itself as an argument, ignore it.
2145 If all the other args produce the same result,
2146 we're still OK. */
2147 if (arg == gimple_phi_result (stmt))
2148 continue;
2149 else if (TREE_CODE (arg) == INTEGER_CST)
2150 {
2151 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2152 {
2153 if (!result)
2154 result = boolean_false_node;
2155 else if (!integer_zerop (result))
2156 return NULL_TREE;
2157 }
2158 else if (!result)
2159 result = fold_build2 (code2, boolean_type_node,
2160 op2a, op2b);
2161 else if (!same_bool_comparison_p (result,
2162 code2, op2a, op2b))
2163 return NULL_TREE;
2164 }
2165 else if (TREE_CODE (arg) == SSA_NAME)
2166 {
2167 tree temp = and_var_with_comparison (arg, invert,
2168 code2, op2a, op2b);
2169 if (!temp)
2170 return NULL_TREE;
2171 else if (!result)
2172 result = temp;
2173 else if (!same_bool_result_p (result, temp))
2174 return NULL_TREE;
2175 }
2176 else
2177 return NULL_TREE;
2178 }
2179 return result;
2180 }
2181
2182 default:
2183 break;
2184 }
2185 }
2186 return NULL_TREE;
2187 }
2188
2189 /* Try to simplify the AND of two comparisons, specified by
2190 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2191 If this can be simplified to a single expression (without requiring
2192 introducing more SSA variables to hold intermediate values),
2193 return the resulting tree. Otherwise return NULL_TREE.
2194 If the result expression is non-null, it has boolean type. */
2195
2196 tree
2197 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2198 enum tree_code code2, tree op2a, tree op2b)
2199 {
2200 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2201 if (t)
2202 return t;
2203 else
2204 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2205 }
2206
2207 /* Helper function for or_comparisons_1: try to simplify the OR of the
2208 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2209 If INVERT is true, invert the value of VAR before doing the OR.
2210 Return NULL_EXPR if we can't simplify this to a single expression. */
2211
2212 static tree
2213 or_var_with_comparison (tree var, bool invert,
2214 enum tree_code code2, tree op2a, tree op2b)
2215 {
2216 tree t;
2217 gimple stmt = SSA_NAME_DEF_STMT (var);
2218
2219 /* We can only deal with variables whose definitions are assignments. */
2220 if (!is_gimple_assign (stmt))
2221 return NULL_TREE;
2222
2223 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2224 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2225 Then we only have to consider the simpler non-inverted cases. */
2226 if (invert)
2227 t = and_var_with_comparison_1 (stmt,
2228 invert_tree_comparison (code2, false),
2229 op2a, op2b);
2230 else
2231 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2232 return canonicalize_bool (t, invert);
2233 }
2234
2235 /* Try to simplify the OR of the ssa variable defined by the assignment
2236 STMT with the comparison specified by (OP2A CODE2 OP2B).
2237 Return NULL_EXPR if we can't simplify this to a single expression. */
2238
2239 static tree
2240 or_var_with_comparison_1 (gimple stmt,
2241 enum tree_code code2, tree op2a, tree op2b)
2242 {
2243 tree var = gimple_assign_lhs (stmt);
2244 tree true_test_var = NULL_TREE;
2245 tree false_test_var = NULL_TREE;
2246 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2247
2248 /* Check for identities like (var OR (var != 0)) => true . */
2249 if (TREE_CODE (op2a) == SSA_NAME
2250 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2251 {
2252 if ((code2 == NE_EXPR && integer_zerop (op2b))
2253 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2254 {
2255 true_test_var = op2a;
2256 if (var == true_test_var)
2257 return var;
2258 }
2259 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2260 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2261 {
2262 false_test_var = op2a;
2263 if (var == false_test_var)
2264 return boolean_true_node;
2265 }
2266 }
2267
2268 /* If the definition is a comparison, recurse on it. */
2269 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2270 {
2271 tree t = or_comparisons_1 (innercode,
2272 gimple_assign_rhs1 (stmt),
2273 gimple_assign_rhs2 (stmt),
2274 code2,
2275 op2a,
2276 op2b);
2277 if (t)
2278 return t;
2279 }
2280
2281 /* If the definition is an AND or OR expression, we may be able to
2282 simplify by reassociating. */
2283 if (innercode == TRUTH_AND_EXPR
2284 || innercode == TRUTH_OR_EXPR
2285 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2286 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2287 {
2288 tree inner1 = gimple_assign_rhs1 (stmt);
2289 tree inner2 = gimple_assign_rhs2 (stmt);
2290 gimple s;
2291 tree t;
2292 tree partial = NULL_TREE;
2293 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2294
2295 /* Check for boolean identities that don't require recursive examination
2296 of inner1/inner2:
2297 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2298 inner1 OR (inner1 AND inner2) => inner1
2299 !inner1 OR (inner1 OR inner2) => true
2300 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2301 */
2302 if (inner1 == true_test_var)
2303 return (is_or ? var : inner1);
2304 else if (inner2 == true_test_var)
2305 return (is_or ? var : inner2);
2306 else if (inner1 == false_test_var)
2307 return (is_or
2308 ? boolean_true_node
2309 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2310 else if (inner2 == false_test_var)
2311 return (is_or
2312 ? boolean_true_node
2313 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2314
2315 /* Next, redistribute/reassociate the OR across the inner tests.
2316 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2317 if (TREE_CODE (inner1) == SSA_NAME
2318 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2319 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2320 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2321 gimple_assign_rhs1 (s),
2322 gimple_assign_rhs2 (s),
2323 code2, op2a, op2b)))
2324 {
2325 /* Handle the OR case, where we are reassociating:
2326 (inner1 OR inner2) OR (op2a code2 op2b)
2327 => (t OR inner2)
2328 If the partial result t is a constant, we win. Otherwise
2329 continue on to try reassociating with the other inner test. */
2330 if (innercode == TRUTH_OR_EXPR)
2331 {
2332 if (integer_onep (t))
2333 return boolean_true_node;
2334 else if (integer_zerop (t))
2335 return inner2;
2336 }
2337
2338 /* Handle the AND case, where we are redistributing:
2339 (inner1 AND inner2) OR (op2a code2 op2b)
2340 => (t AND (inner2 OR (op2a code op2b))) */
2341 else
2342 {
2343 if (integer_zerop (t))
2344 return boolean_false_node;
2345 else
2346 /* Save partial result for later. */
2347 partial = t;
2348 }
2349 }
2350
2351 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2352 if (TREE_CODE (inner2) == SSA_NAME
2353 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2354 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2355 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2356 gimple_assign_rhs1 (s),
2357 gimple_assign_rhs2 (s),
2358 code2, op2a, op2b)))
2359 {
2360 /* Handle the OR case, where we are reassociating:
2361 (inner1 OR inner2) OR (op2a code2 op2b)
2362 => (inner1 OR t) */
2363 if (innercode == TRUTH_OR_EXPR)
2364 {
2365 if (integer_zerop (t))
2366 return inner1;
2367 else if (integer_onep (t))
2368 return boolean_true_node;
2369 }
2370
2371 /* Handle the AND case, where we are redistributing:
2372 (inner1 AND inner2) OR (op2a code2 op2b)
2373 => (t AND (inner1 OR (op2a code2 op2b)))
2374 => (t AND partial) */
2375 else
2376 {
2377 if (integer_zerop (t))
2378 return boolean_false_node;
2379 else if (partial)
2380 {
2381 /* We already got a simplification for the other
2382 operand to the redistributed AND expression. The
2383 interesting case is when at least one is true.
2384 Or, if both are the same, we can apply the identity
2385 (x AND x) == true. */
2386 if (integer_onep (partial))
2387 return t;
2388 else if (integer_onep (t))
2389 return partial;
2390 else if (same_bool_result_p (t, partial))
2391 return boolean_true_node;
2392 }
2393 }
2394 }
2395 }
2396 return NULL_TREE;
2397 }
2398
2399 /* Try to simplify the OR of two comparisons defined by
2400 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2401 If this can be done without constructing an intermediate value,
2402 return the resulting tree; otherwise NULL_TREE is returned.
2403 This function is deliberately asymmetric as it recurses on SSA_DEFs
2404 in the first comparison but not the second. */
2405
2406 static tree
2407 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2408 enum tree_code code2, tree op2a, tree op2b)
2409 {
2410 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2411 if (operand_equal_p (op1a, op2a, 0)
2412 && operand_equal_p (op1b, op2b, 0))
2413 {
2414 tree t = combine_comparisons (UNKNOWN_LOCATION,
2415 TRUTH_ORIF_EXPR, code1, code2,
2416 boolean_type_node, op1a, op1b);
2417 if (t)
2418 return t;
2419 }
2420
2421 /* Likewise the swapped case of the above. */
2422 if (operand_equal_p (op1a, op2b, 0)
2423 && operand_equal_p (op1b, op2a, 0))
2424 {
2425 tree t = combine_comparisons (UNKNOWN_LOCATION,
2426 TRUTH_ORIF_EXPR, code1,
2427 swap_tree_comparison (code2),
2428 boolean_type_node, op1a, op1b);
2429 if (t)
2430 return t;
2431 }
2432
2433 /* If both comparisons are of the same value against constants, we might
2434 be able to merge them. */
2435 if (operand_equal_p (op1a, op2a, 0)
2436 && TREE_CODE (op1b) == INTEGER_CST
2437 && TREE_CODE (op2b) == INTEGER_CST)
2438 {
2439 int cmp = tree_int_cst_compare (op1b, op2b);
2440
2441 /* If we have (op1a != op1b), we should either be able to
2442 return that or TRUE, depending on whether the constant op1b
2443 also satisfies the other comparison against op2b. */
2444 if (code1 == NE_EXPR)
2445 {
2446 bool done = true;
2447 bool val;
2448 switch (code2)
2449 {
2450 case EQ_EXPR: val = (cmp == 0); break;
2451 case NE_EXPR: val = (cmp != 0); break;
2452 case LT_EXPR: val = (cmp < 0); break;
2453 case GT_EXPR: val = (cmp > 0); break;
2454 case LE_EXPR: val = (cmp <= 0); break;
2455 case GE_EXPR: val = (cmp >= 0); break;
2456 default: done = false;
2457 }
2458 if (done)
2459 {
2460 if (val)
2461 return boolean_true_node;
2462 else
2463 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2464 }
2465 }
2466 /* Likewise if the second comparison is a != comparison. */
2467 else if (code2 == NE_EXPR)
2468 {
2469 bool done = true;
2470 bool val;
2471 switch (code1)
2472 {
2473 case EQ_EXPR: val = (cmp == 0); break;
2474 case NE_EXPR: val = (cmp != 0); break;
2475 case LT_EXPR: val = (cmp > 0); break;
2476 case GT_EXPR: val = (cmp < 0); break;
2477 case LE_EXPR: val = (cmp >= 0); break;
2478 case GE_EXPR: val = (cmp <= 0); break;
2479 default: done = false;
2480 }
2481 if (done)
2482 {
2483 if (val)
2484 return boolean_true_node;
2485 else
2486 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2487 }
2488 }
2489
2490 /* See if an equality test is redundant with the other comparison. */
2491 else if (code1 == EQ_EXPR)
2492 {
2493 bool val;
2494 switch (code2)
2495 {
2496 case EQ_EXPR: val = (cmp == 0); break;
2497 case NE_EXPR: val = (cmp != 0); break;
2498 case LT_EXPR: val = (cmp < 0); break;
2499 case GT_EXPR: val = (cmp > 0); break;
2500 case LE_EXPR: val = (cmp <= 0); break;
2501 case GE_EXPR: val = (cmp >= 0); break;
2502 default:
2503 val = false;
2504 }
2505 if (val)
2506 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2507 }
2508 else if (code2 == EQ_EXPR)
2509 {
2510 bool val;
2511 switch (code1)
2512 {
2513 case EQ_EXPR: val = (cmp == 0); break;
2514 case NE_EXPR: val = (cmp != 0); break;
2515 case LT_EXPR: val = (cmp > 0); break;
2516 case GT_EXPR: val = (cmp < 0); break;
2517 case LE_EXPR: val = (cmp >= 0); break;
2518 case GE_EXPR: val = (cmp <= 0); break;
2519 default:
2520 val = false;
2521 }
2522 if (val)
2523 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2524 }
2525
2526 /* Chose the less restrictive of two < or <= comparisons. */
2527 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2528 && (code2 == LT_EXPR || code2 == LE_EXPR))
2529 {
2530 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2531 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2532 else
2533 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2534 }
2535
2536 /* Likewise chose the less restrictive of two > or >= comparisons. */
2537 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2538 && (code2 == GT_EXPR || code2 == GE_EXPR))
2539 {
2540 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2541 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2542 else
2543 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2544 }
2545
2546 /* Check for singleton ranges. */
2547 else if (cmp == 0
2548 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2549 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2550 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2551
2552 /* Check for less/greater pairs that don't restrict the range at all. */
2553 else if (cmp >= 0
2554 && (code1 == LT_EXPR || code1 == LE_EXPR)
2555 && (code2 == GT_EXPR || code2 == GE_EXPR))
2556 return boolean_true_node;
2557 else if (cmp <= 0
2558 && (code1 == GT_EXPR || code1 == GE_EXPR)
2559 && (code2 == LT_EXPR || code2 == LE_EXPR))
2560 return boolean_true_node;
2561 }
2562
2563 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2564 NAME's definition is a truth value. See if there are any simplifications
2565 that can be done against the NAME's definition. */
2566 if (TREE_CODE (op1a) == SSA_NAME
2567 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2568 && (integer_zerop (op1b) || integer_onep (op1b)))
2569 {
2570 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2571 || (code1 == NE_EXPR && integer_onep (op1b)));
2572 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2573 switch (gimple_code (stmt))
2574 {
2575 case GIMPLE_ASSIGN:
2576 /* Try to simplify by copy-propagating the definition. */
2577 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2578
2579 case GIMPLE_PHI:
2580 /* If every argument to the PHI produces the same result when
2581 ORed with the second comparison, we win.
2582 Do not do this unless the type is bool since we need a bool
2583 result here anyway. */
2584 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2585 {
2586 tree result = NULL_TREE;
2587 unsigned i;
2588 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2589 {
2590 tree arg = gimple_phi_arg_def (stmt, i);
2591
2592 /* If this PHI has itself as an argument, ignore it.
2593 If all the other args produce the same result,
2594 we're still OK. */
2595 if (arg == gimple_phi_result (stmt))
2596 continue;
2597 else if (TREE_CODE (arg) == INTEGER_CST)
2598 {
2599 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2600 {
2601 if (!result)
2602 result = boolean_true_node;
2603 else if (!integer_onep (result))
2604 return NULL_TREE;
2605 }
2606 else if (!result)
2607 result = fold_build2 (code2, boolean_type_node,
2608 op2a, op2b);
2609 else if (!same_bool_comparison_p (result,
2610 code2, op2a, op2b))
2611 return NULL_TREE;
2612 }
2613 else if (TREE_CODE (arg) == SSA_NAME)
2614 {
2615 tree temp = or_var_with_comparison (arg, invert,
2616 code2, op2a, op2b);
2617 if (!temp)
2618 return NULL_TREE;
2619 else if (!result)
2620 result = temp;
2621 else if (!same_bool_result_p (result, temp))
2622 return NULL_TREE;
2623 }
2624 else
2625 return NULL_TREE;
2626 }
2627 return result;
2628 }
2629
2630 default:
2631 break;
2632 }
2633 }
2634 return NULL_TREE;
2635 }
2636
2637 /* Try to simplify the OR of two comparisons, specified by
2638 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2639 If this can be simplified to a single expression (without requiring
2640 introducing more SSA variables to hold intermediate values),
2641 return the resulting tree. Otherwise return NULL_TREE.
2642 If the result expression is non-null, it has boolean type. */
2643
2644 tree
2645 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2646 enum tree_code code2, tree op2a, tree op2b)
2647 {
2648 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2649 if (t)
2650 return t;
2651 else
2652 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2653 }