intrinsic.h (gfc_check_selected_real_kind, [...]): Update prototypes.
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33
34
35 /* If SYM is a constant variable with known value, return the value.
36 NULL_TREE is returned otherwise. */
37
38 tree
39 get_symbol_constant_value (tree sym)
40 {
41 if (TREE_STATIC (sym)
42 && (TREE_READONLY (sym)
43 || TREE_CODE (sym) == CONST_DECL))
44 {
45 tree val = DECL_INITIAL (sym);
46 if (val)
47 {
48 STRIP_NOPS (val);
49 if (is_gimple_min_invariant (val))
50 {
51 if (TREE_CODE (val) == ADDR_EXPR)
52 {
53 tree base = get_base_address (TREE_OPERAND (val, 0));
54 if (base && TREE_CODE (base) == VAR_DECL)
55 {
56 TREE_ADDRESSABLE (base) = 1;
57 if (gimple_referenced_vars (cfun))
58 add_referenced_var (base);
59 }
60 }
61 return val;
62 }
63 }
64 /* Variables declared 'const' without an initializer
65 have zero as the initializer if they may not be
66 overridden at link or run time. */
67 if (!val
68 && !DECL_EXTERNAL (sym)
69 && targetm.binds_local_p (sym)
70 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
71 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
72 return fold_convert (TREE_TYPE (sym), integer_zero_node);
73 }
74
75 return NULL_TREE;
76 }
77
78
79 /* Return true if we may propagate the address expression ADDR into the
80 dereference DEREF and cancel them. */
81
82 bool
83 may_propagate_address_into_dereference (tree addr, tree deref)
84 {
85 gcc_assert (INDIRECT_REF_P (deref)
86 && TREE_CODE (addr) == ADDR_EXPR);
87
88 /* Don't propagate if ADDR's operand has incomplete type. */
89 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
90 return false;
91
92 /* If the address is invariant then we do not need to preserve restrict
93 qualifications. But we do need to preserve volatile qualifiers until
94 we can annotate the folded dereference itself properly. */
95 if (is_gimple_min_invariant (addr)
96 && (!TREE_THIS_VOLATILE (deref)
97 || TYPE_VOLATILE (TREE_TYPE (addr))))
98 return useless_type_conversion_p (TREE_TYPE (deref),
99 TREE_TYPE (TREE_OPERAND (addr, 0)));
100
101 /* Else both the address substitution and the folding must result in
102 a valid useless type conversion sequence. */
103 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
104 TREE_TYPE (addr))
105 && useless_type_conversion_p (TREE_TYPE (deref),
106 TREE_TYPE (TREE_OPERAND (addr, 0))));
107 }
108
109
110 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
111 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
112 is the desired result type.
113
114 LOC is the location of the original expression. */
115
116 static tree
117 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
118 tree orig_type,
119 bool allow_negative_idx)
120 {
121 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
122 tree array_type, elt_type, elt_size;
123 tree domain_type;
124
125 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
126 measured in units of the size of elements type) from that ARRAY_REF).
127 We can't do anything if either is variable.
128
129 The case we handle here is *(&A[N]+O). */
130 if (TREE_CODE (base) == ARRAY_REF)
131 {
132 tree low_bound = array_ref_low_bound (base);
133
134 elt_offset = TREE_OPERAND (base, 1);
135 if (TREE_CODE (low_bound) != INTEGER_CST
136 || TREE_CODE (elt_offset) != INTEGER_CST)
137 return NULL_TREE;
138
139 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
140 base = TREE_OPERAND (base, 0);
141 }
142
143 /* Ignore stupid user tricks of indexing non-array variables. */
144 array_type = TREE_TYPE (base);
145 if (TREE_CODE (array_type) != ARRAY_TYPE)
146 return NULL_TREE;
147 elt_type = TREE_TYPE (array_type);
148 if (!useless_type_conversion_p (orig_type, elt_type))
149 return NULL_TREE;
150
151 /* Use signed size type for intermediate computation on the index. */
152 idx_type = ssizetype;
153
154 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
155 element type (so we can use the alignment if it's not constant).
156 Otherwise, compute the offset as an index by using a division. If the
157 division isn't exact, then don't do anything. */
158 elt_size = TYPE_SIZE_UNIT (elt_type);
159 if (!elt_size)
160 return NULL;
161 if (integer_zerop (offset))
162 {
163 if (TREE_CODE (elt_size) != INTEGER_CST)
164 elt_size = size_int (TYPE_ALIGN (elt_type));
165
166 idx = build_int_cst (idx_type, 0);
167 }
168 else
169 {
170 unsigned HOST_WIDE_INT lquo, lrem;
171 HOST_WIDE_INT hquo, hrem;
172 double_int soffset;
173
174 /* The final array offset should be signed, so we need
175 to sign-extend the (possibly pointer) offset here
176 and use signed division. */
177 soffset = double_int_sext (tree_to_double_int (offset),
178 TYPE_PRECISION (TREE_TYPE (offset)));
179 if (TREE_CODE (elt_size) != INTEGER_CST
180 || div_and_round_double (TRUNC_DIV_EXPR, 0,
181 soffset.low, soffset.high,
182 TREE_INT_CST_LOW (elt_size),
183 TREE_INT_CST_HIGH (elt_size),
184 &lquo, &hquo, &lrem, &hrem)
185 || lrem || hrem)
186 return NULL_TREE;
187
188 idx = build_int_cst_wide (idx_type, lquo, hquo);
189 }
190
191 /* Assume the low bound is zero. If there is a domain type, get the
192 low bound, if any, convert the index into that type, and add the
193 low bound. */
194 min_idx = build_int_cst (idx_type, 0);
195 domain_type = TYPE_DOMAIN (array_type);
196 if (domain_type)
197 {
198 idx_type = domain_type;
199 if (TYPE_MIN_VALUE (idx_type))
200 min_idx = TYPE_MIN_VALUE (idx_type);
201 else
202 min_idx = fold_convert (idx_type, min_idx);
203
204 if (TREE_CODE (min_idx) != INTEGER_CST)
205 return NULL_TREE;
206
207 elt_offset = fold_convert (idx_type, elt_offset);
208 }
209
210 if (!integer_zerop (min_idx))
211 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
212 if (!integer_zerop (elt_offset))
213 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
214
215 /* Make sure to possibly truncate late after offsetting. */
216 idx = fold_convert (idx_type, idx);
217
218 /* We don't want to construct access past array bounds. For example
219 char *(c[4]);
220 c[3][2];
221 should not be simplified into (*c)[14] or tree-vrp will
222 give false warnings. The same is true for
223 struct A { long x; char d[0]; } *a;
224 (char *)a - 4;
225 which should be not folded to &a->d[-8]. */
226 if (domain_type
227 && TYPE_MAX_VALUE (domain_type)
228 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
229 {
230 tree up_bound = TYPE_MAX_VALUE (domain_type);
231
232 if (tree_int_cst_lt (up_bound, idx)
233 /* Accesses after the end of arrays of size 0 (gcc
234 extension) and 1 are likely intentional ("struct
235 hack"). */
236 && compare_tree_int (up_bound, 1) > 0)
237 return NULL_TREE;
238 }
239 if (domain_type
240 && TYPE_MIN_VALUE (domain_type))
241 {
242 if (!allow_negative_idx
243 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
244 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
245 return NULL_TREE;
246 }
247 else if (!allow_negative_idx
248 && compare_tree_int (idx, 0) < 0)
249 return NULL_TREE;
250
251 {
252 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
253 SET_EXPR_LOCATION (t, loc);
254 return t;
255 }
256 }
257
258
259 /* Attempt to fold *(S+O) to S.X.
260 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
261 is the desired result type.
262
263 LOC is the location of the original expression. */
264
265 static tree
266 maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
267 tree base, tree offset, tree orig_type)
268 {
269 tree f, t, field_type, tail_array_field, field_offset;
270 tree ret;
271 tree new_base;
272
273 if (TREE_CODE (record_type) != RECORD_TYPE
274 && TREE_CODE (record_type) != UNION_TYPE
275 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
276 return NULL_TREE;
277
278 /* Short-circuit silly cases. */
279 if (useless_type_conversion_p (record_type, orig_type))
280 return NULL_TREE;
281
282 tail_array_field = NULL_TREE;
283 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
284 {
285 int cmp;
286
287 if (TREE_CODE (f) != FIELD_DECL)
288 continue;
289 if (DECL_BIT_FIELD (f))
290 continue;
291
292 if (!DECL_FIELD_OFFSET (f))
293 continue;
294 field_offset = byte_position (f);
295 if (TREE_CODE (field_offset) != INTEGER_CST)
296 continue;
297
298 /* ??? Java creates "interesting" fields for representing base classes.
299 They have no name, and have no context. With no context, we get into
300 trouble with nonoverlapping_component_refs_p. Skip them. */
301 if (!DECL_FIELD_CONTEXT (f))
302 continue;
303
304 /* The previous array field isn't at the end. */
305 tail_array_field = NULL_TREE;
306
307 /* Check to see if this offset overlaps with the field. */
308 cmp = tree_int_cst_compare (field_offset, offset);
309 if (cmp > 0)
310 continue;
311
312 field_type = TREE_TYPE (f);
313
314 /* Here we exactly match the offset being checked. If the types match,
315 then we can return that field. */
316 if (cmp == 0
317 && useless_type_conversion_p (orig_type, field_type))
318 {
319 t = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
320 return t;
321 }
322
323 /* Don't care about offsets into the middle of scalars. */
324 if (!AGGREGATE_TYPE_P (field_type))
325 continue;
326
327 /* Check for array at the end of the struct. This is often
328 used as for flexible array members. We should be able to
329 turn this into an array access anyway. */
330 if (TREE_CODE (field_type) == ARRAY_TYPE)
331 tail_array_field = f;
332
333 /* Check the end of the field against the offset. */
334 if (!DECL_SIZE_UNIT (f)
335 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
336 continue;
337 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
338 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
339 continue;
340
341 /* If we matched, then set offset to the displacement into
342 this field. */
343 new_base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
344 SET_EXPR_LOCATION (new_base, loc);
345
346 /* Recurse to possibly find the match. */
347 ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
348 f == TYPE_FIELDS (record_type));
349 if (ret)
350 return ret;
351 ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
352 orig_type);
353 if (ret)
354 return ret;
355 }
356
357 if (!tail_array_field)
358 return NULL_TREE;
359
360 f = tail_array_field;
361 field_type = TREE_TYPE (f);
362 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
363
364 /* If we get here, we've got an aggregate field, and a possibly
365 nonzero offset into them. Recurse and hope for a valid match. */
366 base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
367 SET_EXPR_LOCATION (base, loc);
368
369 t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
370 f == TYPE_FIELDS (record_type));
371 if (t)
372 return t;
373 return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
374 orig_type);
375 }
376
377 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
378 or BASE[index] or by combination of those.
379
380 LOC is the location of original expression.
381
382 Before attempting the conversion strip off existing ADDR_EXPRs and
383 handled component refs. */
384
385 tree
386 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
387 tree orig_type)
388 {
389 tree ret;
390 tree type;
391
392 STRIP_NOPS (base);
393 if (TREE_CODE (base) != ADDR_EXPR)
394 return NULL_TREE;
395
396 base = TREE_OPERAND (base, 0);
397
398 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
399 so it needs to be removed and new COMPONENT_REF constructed.
400 The wrong COMPONENT_REF are often constructed by folding the
401 (type *)&object within the expression (type *)&object+offset */
402 if (handled_component_p (base))
403 {
404 HOST_WIDE_INT sub_offset, size, maxsize;
405 tree newbase;
406 newbase = get_ref_base_and_extent (base, &sub_offset,
407 &size, &maxsize);
408 gcc_assert (newbase);
409 if (size == maxsize
410 && size != -1
411 && !(sub_offset & (BITS_PER_UNIT - 1)))
412 {
413 base = newbase;
414 if (sub_offset)
415 offset = int_const_binop (PLUS_EXPR, offset,
416 build_int_cst (TREE_TYPE (offset),
417 sub_offset / BITS_PER_UNIT), 1);
418 }
419 }
420 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
421 && integer_zerop (offset))
422 return base;
423 type = TREE_TYPE (base);
424
425 ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
426 if (!ret)
427 ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
428
429 return ret;
430 }
431
432 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
433 or &BASE[index] or by combination of those.
434
435 LOC is the location of the original expression.
436
437 Before attempting the conversion strip off existing component refs. */
438
439 tree
440 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
441 tree orig_type)
442 {
443 tree t;
444
445 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
446 && POINTER_TYPE_P (orig_type));
447
448 t = maybe_fold_offset_to_reference (loc, addr, offset,
449 TREE_TYPE (orig_type));
450 if (t != NULL_TREE)
451 {
452 tree orig = addr;
453 tree ptr_type;
454
455 /* For __builtin_object_size to function correctly we need to
456 make sure not to fold address arithmetic so that we change
457 reference from one array to another. This would happen for
458 example for
459
460 struct X { char s1[10]; char s2[10] } s;
461 char *foo (void) { return &s.s2[-4]; }
462
463 where we need to avoid generating &s.s1[6]. As the C and
464 C++ frontends create different initial trees
465 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
466 sophisticated comparisons here. Note that checking for the
467 condition after the fact is easier than trying to avoid doing
468 the folding. */
469 STRIP_NOPS (orig);
470 if (TREE_CODE (orig) == ADDR_EXPR)
471 orig = TREE_OPERAND (orig, 0);
472 if ((TREE_CODE (orig) == ARRAY_REF
473 || (TREE_CODE (orig) == COMPONENT_REF
474 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
475 && (TREE_CODE (t) == ARRAY_REF
476 || TREE_CODE (t) == COMPONENT_REF)
477 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
478 ? TREE_OPERAND (orig, 0) : orig,
479 TREE_CODE (t) == ARRAY_REF
480 ? TREE_OPERAND (t, 0) : t, 0))
481 return NULL_TREE;
482
483 ptr_type = build_pointer_type (TREE_TYPE (t));
484 if (!useless_type_conversion_p (orig_type, ptr_type))
485 return NULL_TREE;
486 return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
487 }
488
489 return NULL_TREE;
490 }
491
492 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
493 Return the simplified expression, or NULL if nothing could be done. */
494
495 static tree
496 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
497 {
498 tree t;
499 bool volatile_p = TREE_THIS_VOLATILE (expr);
500 location_t loc = EXPR_LOCATION (expr);
501
502 /* We may well have constructed a double-nested PLUS_EXPR via multiple
503 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
504 are sometimes added. */
505 base = fold (base);
506 STRIP_TYPE_NOPS (base);
507 TREE_OPERAND (expr, 0) = base;
508
509 /* One possibility is that the address reduces to a string constant. */
510 t = fold_read_from_constant_string (expr);
511 if (t)
512 return t;
513
514 /* Add in any offset from a POINTER_PLUS_EXPR. */
515 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
516 {
517 tree offset2;
518
519 offset2 = TREE_OPERAND (base, 1);
520 if (TREE_CODE (offset2) != INTEGER_CST)
521 return NULL_TREE;
522 base = TREE_OPERAND (base, 0);
523
524 offset = fold_convert (sizetype,
525 int_const_binop (PLUS_EXPR, offset, offset2, 1));
526 }
527
528 if (TREE_CODE (base) == ADDR_EXPR)
529 {
530 tree base_addr = base;
531
532 /* Strip the ADDR_EXPR. */
533 base = TREE_OPERAND (base, 0);
534
535 /* Fold away CONST_DECL to its value, if the type is scalar. */
536 if (TREE_CODE (base) == CONST_DECL
537 && is_gimple_min_invariant (DECL_INITIAL (base)))
538 return DECL_INITIAL (base);
539
540 /* If there is no offset involved simply return the folded base. */
541 if (integer_zerop (offset))
542 return base;
543
544 /* Try folding *(&B+O) to B.X. */
545 t = maybe_fold_offset_to_reference (loc, base_addr, offset,
546 TREE_TYPE (expr));
547 if (t)
548 {
549 /* Preserve volatileness of the original expression.
550 We can end up with a plain decl here which is shared
551 and we shouldn't mess with its flags. */
552 if (!SSA_VAR_P (t))
553 TREE_THIS_VOLATILE (t) = volatile_p;
554 return t;
555 }
556 }
557 else
558 {
559 /* We can get here for out-of-range string constant accesses,
560 such as "_"[3]. Bail out of the entire substitution search
561 and arrange for the entire statement to be replaced by a
562 call to __builtin_trap. In all likelihood this will all be
563 constant-folded away, but in the meantime we can't leave with
564 something that get_expr_operands can't understand. */
565
566 t = base;
567 STRIP_NOPS (t);
568 if (TREE_CODE (t) == ADDR_EXPR
569 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
570 {
571 /* FIXME: Except that this causes problems elsewhere with dead
572 code not being deleted, and we die in the rtl expanders
573 because we failed to remove some ssa_name. In the meantime,
574 just return zero. */
575 /* FIXME2: This condition should be signaled by
576 fold_read_from_constant_string directly, rather than
577 re-checking for it here. */
578 return integer_zero_node;
579 }
580
581 /* Try folding *(B+O) to B->X. Still an improvement. */
582 if (POINTER_TYPE_P (TREE_TYPE (base)))
583 {
584 t = maybe_fold_offset_to_reference (loc, base, offset,
585 TREE_TYPE (expr));
586 if (t)
587 return t;
588 }
589 }
590
591 /* Otherwise we had an offset that we could not simplify. */
592 return NULL_TREE;
593 }
594
595
596 /* A quaint feature extant in our address arithmetic is that there
597 can be hidden type changes here. The type of the result need
598 not be the same as the type of the input pointer.
599
600 What we're after here is an expression of the form
601 (T *)(&array + const)
602 where array is OP0, const is OP1, RES_TYPE is T and
603 the cast doesn't actually exist, but is implicit in the
604 type of the POINTER_PLUS_EXPR. We'd like to turn this into
605 &array[x]
606 which may be able to propagate further. */
607
608 tree
609 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
610 {
611 tree ptd_type;
612 tree t;
613
614 /* The first operand should be an ADDR_EXPR. */
615 if (TREE_CODE (op0) != ADDR_EXPR)
616 return NULL_TREE;
617 op0 = TREE_OPERAND (op0, 0);
618
619 /* It had better be a constant. */
620 if (TREE_CODE (op1) != INTEGER_CST)
621 {
622 /* Or op0 should now be A[0] and the non-constant offset defined
623 via a multiplication by the array element size. */
624 if (TREE_CODE (op0) == ARRAY_REF
625 && integer_zerop (TREE_OPERAND (op0, 1))
626 && TREE_CODE (op1) == SSA_NAME
627 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
628 {
629 gimple offset_def = SSA_NAME_DEF_STMT (op1);
630 if (!is_gimple_assign (offset_def))
631 return NULL_TREE;
632
633 /* As we will end up creating a variable index array access
634 in the outermost array dimension make sure there isn't
635 a more inner array that the index could overflow to. */
636 if (TREE_CODE (TREE_OPERAND (op0, 0)) == ARRAY_REF)
637 return NULL_TREE;
638
639 /* Do not build array references of something that we can't
640 see the true number of array dimensions for. */
641 if (!DECL_P (TREE_OPERAND (op0, 0))
642 && !handled_component_p (TREE_OPERAND (op0, 0)))
643 return NULL_TREE;
644
645 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
646 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
647 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
648 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
649 return build_fold_addr_expr
650 (build4 (ARRAY_REF, TREE_TYPE (op0),
651 TREE_OPERAND (op0, 0),
652 gimple_assign_rhs1 (offset_def),
653 TREE_OPERAND (op0, 2),
654 TREE_OPERAND (op0, 3)));
655 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
656 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
657 return build_fold_addr_expr
658 (build4 (ARRAY_REF, TREE_TYPE (op0),
659 TREE_OPERAND (op0, 0),
660 op1,
661 TREE_OPERAND (op0, 2),
662 TREE_OPERAND (op0, 3)));
663 }
664 return NULL_TREE;
665 }
666
667 /* If the first operand is an ARRAY_REF, expand it so that we can fold
668 the offset into it. */
669 while (TREE_CODE (op0) == ARRAY_REF)
670 {
671 tree array_obj = TREE_OPERAND (op0, 0);
672 tree array_idx = TREE_OPERAND (op0, 1);
673 tree elt_type = TREE_TYPE (op0);
674 tree elt_size = TYPE_SIZE_UNIT (elt_type);
675 tree min_idx;
676
677 if (TREE_CODE (array_idx) != INTEGER_CST)
678 break;
679 if (TREE_CODE (elt_size) != INTEGER_CST)
680 break;
681
682 /* Un-bias the index by the min index of the array type. */
683 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
684 if (min_idx)
685 {
686 min_idx = TYPE_MIN_VALUE (min_idx);
687 if (min_idx)
688 {
689 if (TREE_CODE (min_idx) != INTEGER_CST)
690 break;
691
692 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
693 if (!integer_zerop (min_idx))
694 array_idx = int_const_binop (MINUS_EXPR, array_idx,
695 min_idx, 0);
696 }
697 }
698
699 /* Convert the index to a byte offset. */
700 array_idx = fold_convert (sizetype, array_idx);
701 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
702
703 /* Update the operands for the next round, or for folding. */
704 op1 = int_const_binop (PLUS_EXPR,
705 array_idx, op1, 0);
706 op0 = array_obj;
707 }
708
709 ptd_type = TREE_TYPE (res_type);
710 /* If we want a pointer to void, reconstruct the reference from the
711 array element type. A pointer to that can be trivially converted
712 to void *. This happens as we fold (void *)(ptr p+ off). */
713 if (VOID_TYPE_P (ptd_type)
714 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
715 ptd_type = TREE_TYPE (TREE_TYPE (op0));
716
717 /* At which point we can try some of the same things as for indirects. */
718 t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
719 if (!t)
720 t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
721 ptd_type);
722 if (t)
723 {
724 t = build1 (ADDR_EXPR, res_type, t);
725 SET_EXPR_LOCATION (t, loc);
726 }
727
728 return t;
729 }
730
731 /* Subroutine of fold_stmt. We perform several simplifications of the
732 memory reference tree EXPR and make sure to re-gimplify them properly
733 after propagation of constant addresses. IS_LHS is true if the
734 reference is supposed to be an lvalue. */
735
736 static tree
737 maybe_fold_reference (tree expr, bool is_lhs)
738 {
739 tree *t = &expr;
740
741 if (TREE_CODE (expr) == ARRAY_REF
742 && !is_lhs)
743 {
744 tree tem = fold_read_from_constant_string (expr);
745 if (tem)
746 return tem;
747 }
748
749 /* ??? We might want to open-code the relevant remaining cases
750 to avoid using the generic fold. */
751 if (handled_component_p (*t)
752 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
753 {
754 tree tem = fold (*t);
755 if (tem != *t)
756 return tem;
757 }
758
759 while (handled_component_p (*t))
760 t = &TREE_OPERAND (*t, 0);
761
762 if (TREE_CODE (*t) == INDIRECT_REF)
763 {
764 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
765 integer_zero_node);
766 /* Avoid folding *"abc" = 5 into 'a' = 5. */
767 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
768 tem = NULL_TREE;
769 if (!tem
770 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
771 /* If we had a good reason for propagating the address here,
772 make sure we end up with valid gimple. See PR34989. */
773 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
774
775 if (tem)
776 {
777 *t = tem;
778 tem = maybe_fold_reference (expr, is_lhs);
779 if (tem)
780 return tem;
781 return expr;
782 }
783 }
784 else if (!is_lhs
785 && DECL_P (*t))
786 {
787 tree tem = get_symbol_constant_value (*t);
788 if (tem
789 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
790 {
791 *t = unshare_expr (tem);
792 tem = maybe_fold_reference (expr, is_lhs);
793 if (tem)
794 return tem;
795 return expr;
796 }
797 }
798
799 return NULL_TREE;
800 }
801
802
803 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
804 replacement rhs for the statement or NULL_TREE if no simplification
805 could be made. It is assumed that the operands have been previously
806 folded. */
807
808 static tree
809 fold_gimple_assign (gimple_stmt_iterator *si)
810 {
811 gimple stmt = gsi_stmt (*si);
812 enum tree_code subcode = gimple_assign_rhs_code (stmt);
813 location_t loc = gimple_location (stmt);
814
815 tree result = NULL_TREE;
816
817 switch (get_gimple_rhs_class (subcode))
818 {
819 case GIMPLE_SINGLE_RHS:
820 {
821 tree rhs = gimple_assign_rhs1 (stmt);
822
823 /* Try to fold a conditional expression. */
824 if (TREE_CODE (rhs) == COND_EXPR)
825 {
826 tree op0 = COND_EXPR_COND (rhs);
827 tree tem;
828 bool set = false;
829 location_t cond_loc = EXPR_LOCATION (rhs);
830
831 if (COMPARISON_CLASS_P (op0))
832 {
833 fold_defer_overflow_warnings ();
834 tem = fold_binary_loc (cond_loc,
835 TREE_CODE (op0), TREE_TYPE (op0),
836 TREE_OPERAND (op0, 0),
837 TREE_OPERAND (op0, 1));
838 /* This is actually a conditional expression, not a GIMPLE
839 conditional statement, however, the valid_gimple_rhs_p
840 test still applies. */
841 set = (tem && is_gimple_condexpr (tem)
842 && valid_gimple_rhs_p (tem));
843 fold_undefer_overflow_warnings (set, stmt, 0);
844 }
845 else if (is_gimple_min_invariant (op0))
846 {
847 tem = op0;
848 set = true;
849 }
850 else
851 return NULL_TREE;
852
853 if (set)
854 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
855 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
856 }
857
858 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
859 return maybe_fold_tmr (rhs);
860
861 else if (REFERENCE_CLASS_P (rhs))
862 return maybe_fold_reference (rhs, false);
863
864 else if (TREE_CODE (rhs) == ADDR_EXPR)
865 {
866 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
867 if (tem)
868 result = fold_convert (TREE_TYPE (rhs),
869 build_fold_addr_expr_loc (loc, tem));
870 }
871
872 else if (TREE_CODE (rhs) == CONSTRUCTOR
873 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
874 && (CONSTRUCTOR_NELTS (rhs)
875 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
876 {
877 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
878 unsigned i;
879 tree val;
880
881 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
882 if (TREE_CODE (val) != INTEGER_CST
883 && TREE_CODE (val) != REAL_CST
884 && TREE_CODE (val) != FIXED_CST)
885 return NULL_TREE;
886
887 return build_vector_from_ctor (TREE_TYPE (rhs),
888 CONSTRUCTOR_ELTS (rhs));
889 }
890
891 else if (DECL_P (rhs))
892 return unshare_expr (get_symbol_constant_value (rhs));
893
894 /* If we couldn't fold the RHS, hand over to the generic
895 fold routines. */
896 if (result == NULL_TREE)
897 result = fold (rhs);
898
899 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
900 that may have been added by fold, and "useless" type
901 conversions that might now be apparent due to propagation. */
902 STRIP_USELESS_TYPE_CONVERSION (result);
903
904 if (result != rhs && valid_gimple_rhs_p (result))
905 return result;
906
907 return NULL_TREE;
908 }
909 break;
910
911 case GIMPLE_UNARY_RHS:
912 {
913 tree rhs = gimple_assign_rhs1 (stmt);
914
915 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
916 if (result)
917 {
918 /* If the operation was a conversion do _not_ mark a
919 resulting constant with TREE_OVERFLOW if the original
920 constant was not. These conversions have implementation
921 defined behavior and retaining the TREE_OVERFLOW flag
922 here would confuse later passes such as VRP. */
923 if (CONVERT_EXPR_CODE_P (subcode)
924 && TREE_CODE (result) == INTEGER_CST
925 && TREE_CODE (rhs) == INTEGER_CST)
926 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
927
928 STRIP_USELESS_TYPE_CONVERSION (result);
929 if (valid_gimple_rhs_p (result))
930 return result;
931 }
932 else if (CONVERT_EXPR_CODE_P (subcode)
933 && POINTER_TYPE_P (gimple_expr_type (stmt))
934 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
935 {
936 tree type = gimple_expr_type (stmt);
937 tree t = maybe_fold_offset_to_address (loc,
938 gimple_assign_rhs1 (stmt),
939 integer_zero_node, type);
940 if (t)
941 return t;
942 }
943 }
944 break;
945
946 case GIMPLE_BINARY_RHS:
947 /* Try to fold pointer addition. */
948 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
949 {
950 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
951 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
952 {
953 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
954 if (!useless_type_conversion_p
955 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
956 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
957 }
958 result = maybe_fold_stmt_addition (gimple_location (stmt),
959 type,
960 gimple_assign_rhs1 (stmt),
961 gimple_assign_rhs2 (stmt));
962 }
963
964 if (!result)
965 result = fold_binary_loc (loc, subcode,
966 TREE_TYPE (gimple_assign_lhs (stmt)),
967 gimple_assign_rhs1 (stmt),
968 gimple_assign_rhs2 (stmt));
969
970 if (result)
971 {
972 STRIP_USELESS_TYPE_CONVERSION (result);
973 if (valid_gimple_rhs_p (result))
974 return result;
975
976 /* Fold might have produced non-GIMPLE, so if we trust it blindly
977 we lose canonicalization opportunities. Do not go again
978 through fold here though, or the same non-GIMPLE will be
979 produced. */
980 if (commutative_tree_code (subcode)
981 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
982 gimple_assign_rhs2 (stmt), false))
983 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
984 gimple_assign_rhs2 (stmt),
985 gimple_assign_rhs1 (stmt));
986 }
987 break;
988
989 case GIMPLE_TERNARY_RHS:
990 result = fold_ternary_loc (loc, subcode,
991 TREE_TYPE (gimple_assign_lhs (stmt)),
992 gimple_assign_rhs1 (stmt),
993 gimple_assign_rhs2 (stmt),
994 gimple_assign_rhs3 (stmt));
995
996 if (result)
997 {
998 STRIP_USELESS_TYPE_CONVERSION (result);
999 if (valid_gimple_rhs_p (result))
1000 return result;
1001
1002 /* Fold might have produced non-GIMPLE, so if we trust it blindly
1003 we lose canonicalization opportunities. Do not go again
1004 through fold here though, or the same non-GIMPLE will be
1005 produced. */
1006 if (commutative_ternary_tree_code (subcode)
1007 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1008 gimple_assign_rhs2 (stmt), false))
1009 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
1010 gimple_assign_rhs2 (stmt),
1011 gimple_assign_rhs1 (stmt),
1012 gimple_assign_rhs3 (stmt));
1013 }
1014 break;
1015
1016 case GIMPLE_INVALID_RHS:
1017 gcc_unreachable ();
1018 }
1019
1020 return NULL_TREE;
1021 }
1022
1023 /* Attempt to fold a conditional statement. Return true if any changes were
1024 made. We only attempt to fold the condition expression, and do not perform
1025 any transformation that would require alteration of the cfg. It is
1026 assumed that the operands have been previously folded. */
1027
1028 static bool
1029 fold_gimple_cond (gimple stmt)
1030 {
1031 tree result = fold_binary_loc (gimple_location (stmt),
1032 gimple_cond_code (stmt),
1033 boolean_type_node,
1034 gimple_cond_lhs (stmt),
1035 gimple_cond_rhs (stmt));
1036
1037 if (result)
1038 {
1039 STRIP_USELESS_TYPE_CONVERSION (result);
1040 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
1041 {
1042 gimple_cond_set_condition_from_tree (stmt, result);
1043 return true;
1044 }
1045 }
1046
1047 return false;
1048 }
1049
1050 /* Convert EXPR into a GIMPLE value suitable for substitution on the
1051 RHS of an assignment. Insert the necessary statements before
1052 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
1053 is replaced. If the call is expected to produces a result, then it
1054 is replaced by an assignment of the new RHS to the result variable.
1055 If the result is to be ignored, then the call is replaced by a
1056 GIMPLE_NOP. */
1057
1058 void
1059 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
1060 {
1061 tree lhs;
1062 tree tmp = NULL_TREE; /* Silence warning. */
1063 gimple stmt, new_stmt;
1064 gimple_stmt_iterator i;
1065 gimple_seq stmts = gimple_seq_alloc();
1066 struct gimplify_ctx gctx;
1067 gimple last = NULL;
1068
1069 stmt = gsi_stmt (*si_p);
1070
1071 gcc_assert (is_gimple_call (stmt));
1072
1073 lhs = gimple_call_lhs (stmt);
1074
1075 push_gimplify_context (&gctx);
1076
1077 if (lhs == NULL_TREE)
1078 gimplify_and_add (expr, &stmts);
1079 else
1080 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
1081
1082 pop_gimplify_context (NULL);
1083
1084 if (gimple_has_location (stmt))
1085 annotate_all_with_location (stmts, gimple_location (stmt));
1086
1087 /* The replacement can expose previously unreferenced variables. */
1088 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
1089 {
1090 if (last)
1091 {
1092 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1093 gsi_next (si_p);
1094 }
1095 new_stmt = gsi_stmt (i);
1096 find_new_referenced_vars (new_stmt);
1097 mark_symbols_for_renaming (new_stmt);
1098 last = new_stmt;
1099 }
1100
1101 if (lhs == NULL_TREE)
1102 {
1103 unlink_stmt_vdef (stmt);
1104 release_defs (stmt);
1105 new_stmt = last;
1106 }
1107 else
1108 {
1109 if (last)
1110 {
1111 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1112 gsi_next (si_p);
1113 }
1114 new_stmt = gimple_build_assign (lhs, tmp);
1115 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1116 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1117 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
1118 }
1119
1120 gimple_set_location (new_stmt, gimple_location (stmt));
1121 gsi_replace (si_p, new_stmt, false);
1122 }
1123
1124 /* Return the string length, maximum string length or maximum value of
1125 ARG in LENGTH.
1126 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1127 is not NULL and, for TYPE == 0, its value is not equal to the length
1128 we determine or if we are unable to determine the length or value,
1129 return false. VISITED is a bitmap of visited variables.
1130 TYPE is 0 if string length should be returned, 1 for maximum string
1131 length and 2 for maximum value ARG can have. */
1132
1133 static bool
1134 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1135 {
1136 tree var, val;
1137 gimple def_stmt;
1138
1139 if (TREE_CODE (arg) != SSA_NAME)
1140 {
1141 if (TREE_CODE (arg) == COND_EXPR)
1142 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1143 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1144 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1145 else if (TREE_CODE (arg) == ADDR_EXPR
1146 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1147 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1148 {
1149 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1150 if (TREE_CODE (aop0) == INDIRECT_REF
1151 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1152 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1153 length, visited, type);
1154 }
1155
1156 if (type == 2)
1157 {
1158 val = arg;
1159 if (TREE_CODE (val) != INTEGER_CST
1160 || tree_int_cst_sgn (val) < 0)
1161 return false;
1162 }
1163 else
1164 val = c_strlen (arg, 1);
1165 if (!val)
1166 return false;
1167
1168 if (*length)
1169 {
1170 if (type > 0)
1171 {
1172 if (TREE_CODE (*length) != INTEGER_CST
1173 || TREE_CODE (val) != INTEGER_CST)
1174 return false;
1175
1176 if (tree_int_cst_lt (*length, val))
1177 *length = val;
1178 return true;
1179 }
1180 else if (simple_cst_equal (val, *length) != 1)
1181 return false;
1182 }
1183
1184 *length = val;
1185 return true;
1186 }
1187
1188 /* If we were already here, break the infinite cycle. */
1189 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1190 return true;
1191 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1192
1193 var = arg;
1194 def_stmt = SSA_NAME_DEF_STMT (var);
1195
1196 switch (gimple_code (def_stmt))
1197 {
1198 case GIMPLE_ASSIGN:
1199 /* The RHS of the statement defining VAR must either have a
1200 constant length or come from another SSA_NAME with a constant
1201 length. */
1202 if (gimple_assign_single_p (def_stmt)
1203 || gimple_assign_unary_nop_p (def_stmt))
1204 {
1205 tree rhs = gimple_assign_rhs1 (def_stmt);
1206 return get_maxval_strlen (rhs, length, visited, type);
1207 }
1208 return false;
1209
1210 case GIMPLE_PHI:
1211 {
1212 /* All the arguments of the PHI node must have the same constant
1213 length. */
1214 unsigned i;
1215
1216 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1217 {
1218 tree arg = gimple_phi_arg (def_stmt, i)->def;
1219
1220 /* If this PHI has itself as an argument, we cannot
1221 determine the string length of this argument. However,
1222 if we can find a constant string length for the other
1223 PHI args then we can still be sure that this is a
1224 constant string length. So be optimistic and just
1225 continue with the next argument. */
1226 if (arg == gimple_phi_result (def_stmt))
1227 continue;
1228
1229 if (!get_maxval_strlen (arg, length, visited, type))
1230 return false;
1231 }
1232 }
1233 return true;
1234
1235 default:
1236 return false;
1237 }
1238 }
1239
1240
1241 /* Fold builtin call in statement STMT. Returns a simplified tree.
1242 We may return a non-constant expression, including another call
1243 to a different function and with different arguments, e.g.,
1244 substituting memcpy for strcpy when the string length is known.
1245 Note that some builtins expand into inline code that may not
1246 be valid in GIMPLE. Callers must take care. */
1247
1248 tree
1249 gimple_fold_builtin (gimple stmt)
1250 {
1251 tree result, val[3];
1252 tree callee, a;
1253 int arg_idx, type;
1254 bitmap visited;
1255 bool ignore;
1256 int nargs;
1257 location_t loc = gimple_location (stmt);
1258
1259 gcc_assert (is_gimple_call (stmt));
1260
1261 ignore = (gimple_call_lhs (stmt) == NULL);
1262
1263 /* First try the generic builtin folder. If that succeeds, return the
1264 result directly. */
1265 result = fold_call_stmt (stmt, ignore);
1266 if (result)
1267 {
1268 if (ignore)
1269 STRIP_NOPS (result);
1270 return result;
1271 }
1272
1273 /* Ignore MD builtins. */
1274 callee = gimple_call_fndecl (stmt);
1275 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1276 return NULL_TREE;
1277
1278 /* If the builtin could not be folded, and it has no argument list,
1279 we're done. */
1280 nargs = gimple_call_num_args (stmt);
1281 if (nargs == 0)
1282 return NULL_TREE;
1283
1284 /* Limit the work only for builtins we know how to simplify. */
1285 switch (DECL_FUNCTION_CODE (callee))
1286 {
1287 case BUILT_IN_STRLEN:
1288 case BUILT_IN_FPUTS:
1289 case BUILT_IN_FPUTS_UNLOCKED:
1290 arg_idx = 0;
1291 type = 0;
1292 break;
1293 case BUILT_IN_STRCPY:
1294 case BUILT_IN_STRNCPY:
1295 arg_idx = 1;
1296 type = 0;
1297 break;
1298 case BUILT_IN_MEMCPY_CHK:
1299 case BUILT_IN_MEMPCPY_CHK:
1300 case BUILT_IN_MEMMOVE_CHK:
1301 case BUILT_IN_MEMSET_CHK:
1302 case BUILT_IN_STRNCPY_CHK:
1303 arg_idx = 2;
1304 type = 2;
1305 break;
1306 case BUILT_IN_STRCPY_CHK:
1307 case BUILT_IN_STPCPY_CHK:
1308 arg_idx = 1;
1309 type = 1;
1310 break;
1311 case BUILT_IN_SNPRINTF_CHK:
1312 case BUILT_IN_VSNPRINTF_CHK:
1313 arg_idx = 1;
1314 type = 2;
1315 break;
1316 default:
1317 return NULL_TREE;
1318 }
1319
1320 if (arg_idx >= nargs)
1321 return NULL_TREE;
1322
1323 /* Try to use the dataflow information gathered by the CCP process. */
1324 visited = BITMAP_ALLOC (NULL);
1325 bitmap_clear (visited);
1326
1327 memset (val, 0, sizeof (val));
1328 a = gimple_call_arg (stmt, arg_idx);
1329 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1330 val[arg_idx] = NULL_TREE;
1331
1332 BITMAP_FREE (visited);
1333
1334 result = NULL_TREE;
1335 switch (DECL_FUNCTION_CODE (callee))
1336 {
1337 case BUILT_IN_STRLEN:
1338 if (val[0] && nargs == 1)
1339 {
1340 tree new_val =
1341 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1342
1343 /* If the result is not a valid gimple value, or not a cast
1344 of a valid gimple value, then we can not use the result. */
1345 if (is_gimple_val (new_val)
1346 || (is_gimple_cast (new_val)
1347 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1348 return new_val;
1349 }
1350 break;
1351
1352 case BUILT_IN_STRCPY:
1353 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1354 result = fold_builtin_strcpy (loc, callee,
1355 gimple_call_arg (stmt, 0),
1356 gimple_call_arg (stmt, 1),
1357 val[1]);
1358 break;
1359
1360 case BUILT_IN_STRNCPY:
1361 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1362 result = fold_builtin_strncpy (loc, callee,
1363 gimple_call_arg (stmt, 0),
1364 gimple_call_arg (stmt, 1),
1365 gimple_call_arg (stmt, 2),
1366 val[1]);
1367 break;
1368
1369 case BUILT_IN_FPUTS:
1370 if (nargs == 2)
1371 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1372 gimple_call_arg (stmt, 1),
1373 ignore, false, val[0]);
1374 break;
1375
1376 case BUILT_IN_FPUTS_UNLOCKED:
1377 if (nargs == 2)
1378 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1379 gimple_call_arg (stmt, 1),
1380 ignore, true, val[0]);
1381 break;
1382
1383 case BUILT_IN_MEMCPY_CHK:
1384 case BUILT_IN_MEMPCPY_CHK:
1385 case BUILT_IN_MEMMOVE_CHK:
1386 case BUILT_IN_MEMSET_CHK:
1387 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1388 result = fold_builtin_memory_chk (loc, callee,
1389 gimple_call_arg (stmt, 0),
1390 gimple_call_arg (stmt, 1),
1391 gimple_call_arg (stmt, 2),
1392 gimple_call_arg (stmt, 3),
1393 val[2], ignore,
1394 DECL_FUNCTION_CODE (callee));
1395 break;
1396
1397 case BUILT_IN_STRCPY_CHK:
1398 case BUILT_IN_STPCPY_CHK:
1399 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1400 result = fold_builtin_stxcpy_chk (loc, callee,
1401 gimple_call_arg (stmt, 0),
1402 gimple_call_arg (stmt, 1),
1403 gimple_call_arg (stmt, 2),
1404 val[1], ignore,
1405 DECL_FUNCTION_CODE (callee));
1406 break;
1407
1408 case BUILT_IN_STRNCPY_CHK:
1409 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1410 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1411 gimple_call_arg (stmt, 1),
1412 gimple_call_arg (stmt, 2),
1413 gimple_call_arg (stmt, 3),
1414 val[2]);
1415 break;
1416
1417 case BUILT_IN_SNPRINTF_CHK:
1418 case BUILT_IN_VSNPRINTF_CHK:
1419 if (val[1] && is_gimple_val (val[1]))
1420 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1421 DECL_FUNCTION_CODE (callee));
1422 break;
1423
1424 default:
1425 gcc_unreachable ();
1426 }
1427
1428 if (result && ignore)
1429 result = fold_ignored_result (result);
1430 return result;
1431 }
1432
1433 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1434 it is found or NULL_TREE if it is not. */
1435
1436 static tree
1437 get_base_binfo_for_type (tree binfo, tree type)
1438 {
1439 int i;
1440 tree base_binfo;
1441 tree res = NULL_TREE;
1442
1443 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1444 if (TREE_TYPE (base_binfo) == type)
1445 {
1446 gcc_assert (!res);
1447 res = base_binfo;
1448 }
1449
1450 return res;
1451 }
1452
1453 /* Return a binfo describing the part of object referenced by expression REF.
1454 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1455 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1456 a simple declaration, indirect reference or an SSA_NAME. If the function
1457 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1458 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1459 Otherwise the first non-artificial field declaration or the base declaration
1460 will be examined to get the encapsulating type. */
1461
1462 tree
1463 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1464 {
1465 while (true)
1466 {
1467 if (TREE_CODE (ref) == COMPONENT_REF)
1468 {
1469 tree par_type;
1470 tree binfo, base_binfo;
1471 tree field = TREE_OPERAND (ref, 1);
1472
1473 if (!DECL_ARTIFICIAL (field))
1474 {
1475 tree type = TREE_TYPE (field);
1476 if (TREE_CODE (type) == RECORD_TYPE)
1477 return TYPE_BINFO (type);
1478 else
1479 return NULL_TREE;
1480 }
1481
1482 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1483 binfo = TYPE_BINFO (par_type);
1484 if (!binfo
1485 || BINFO_N_BASE_BINFOS (binfo) == 0)
1486 return NULL_TREE;
1487
1488 base_binfo = BINFO_BASE_BINFO (binfo, 0);
1489 if (BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1490 {
1491 tree d_binfo;
1492
1493 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1494 known_binfo);
1495 /* Get descendant binfo. */
1496 if (!d_binfo)
1497 return NULL_TREE;
1498 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1499 }
1500
1501 ref = TREE_OPERAND (ref, 0);
1502 }
1503 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1504 return TYPE_BINFO (TREE_TYPE (ref));
1505 else if (known_binfo
1506 && (TREE_CODE (ref) == SSA_NAME
1507 || TREE_CODE (ref) == INDIRECT_REF))
1508 return known_binfo;
1509 else
1510 return NULL_TREE;
1511 }
1512 }
1513
1514 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1515 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1516 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1517
1518 tree
1519 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1520 {
1521 HOST_WIDE_INT i;
1522 tree v, fndecl;
1523
1524 v = BINFO_VIRTUALS (known_binfo);
1525 i = 0;
1526 while (i != token)
1527 {
1528 i += (TARGET_VTABLE_USES_DESCRIPTORS
1529 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1530 v = TREE_CHAIN (v);
1531 }
1532
1533 fndecl = TREE_VALUE (v);
1534 return build_fold_addr_expr (fndecl);
1535 }
1536
1537
1538 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1539 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1540 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1541 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1542 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1543
1544 tree
1545 gimple_fold_obj_type_ref (tree ref, tree known_type)
1546 {
1547 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1548 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1549 tree binfo;
1550
1551 if (TREE_CODE (obj) == ADDR_EXPR)
1552 obj = TREE_OPERAND (obj, 0);
1553
1554 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1555 if (binfo)
1556 {
1557 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1558 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1559 }
1560 else
1561 return NULL_TREE;
1562 }
1563
1564 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1565 The statement may be replaced by another statement, e.g., if the call
1566 simplifies to a constant value. Return true if any changes were made.
1567 It is assumed that the operands have been previously folded. */
1568
1569 static bool
1570 fold_gimple_call (gimple_stmt_iterator *gsi)
1571 {
1572 gimple stmt = gsi_stmt (*gsi);
1573
1574 tree callee = gimple_call_fndecl (stmt);
1575
1576 /* Check for builtins that CCP can handle using information not
1577 available in the generic fold routines. */
1578 if (callee && DECL_BUILT_IN (callee))
1579 {
1580 tree result = gimple_fold_builtin (stmt);
1581
1582 if (result)
1583 {
1584 if (!update_call_from_tree (gsi, result))
1585 gimplify_and_update_call_from_tree (gsi, result);
1586 return true;
1587 }
1588 }
1589 else
1590 {
1591 /* ??? Should perhaps do this in fold proper. However, doing it
1592 there requires that we create a new CALL_EXPR, and that requires
1593 copying EH region info to the new node. Easier to just do it
1594 here where we can just smash the call operand. */
1595 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1596 callee = gimple_call_fn (stmt);
1597 if (TREE_CODE (callee) == OBJ_TYPE_REF
1598 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1599 {
1600 tree t;
1601
1602 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1603 if (t)
1604 {
1605 gimple_call_set_fn (stmt, t);
1606 return true;
1607 }
1608 }
1609 }
1610
1611 return false;
1612 }
1613
1614 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1615 distinguishes both cases. */
1616
1617 static bool
1618 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1619 {
1620 bool changed = false;
1621 gimple stmt = gsi_stmt (*gsi);
1622 unsigned i;
1623
1624 /* Fold the main computation performed by the statement. */
1625 switch (gimple_code (stmt))
1626 {
1627 case GIMPLE_ASSIGN:
1628 {
1629 unsigned old_num_ops = gimple_num_ops (stmt);
1630 tree new_rhs = fold_gimple_assign (gsi);
1631 tree lhs = gimple_assign_lhs (stmt);
1632 if (new_rhs
1633 && !useless_type_conversion_p (TREE_TYPE (lhs),
1634 TREE_TYPE (new_rhs)))
1635 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1636 if (new_rhs
1637 && (!inplace
1638 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1639 {
1640 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1641 changed = true;
1642 }
1643 break;
1644 }
1645
1646 case GIMPLE_COND:
1647 changed |= fold_gimple_cond (stmt);
1648 break;
1649
1650 case GIMPLE_CALL:
1651 /* Fold *& in call arguments. */
1652 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1653 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1654 {
1655 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1656 if (tmp)
1657 {
1658 gimple_call_set_arg (stmt, i, tmp);
1659 changed = true;
1660 }
1661 }
1662 /* The entire statement may be replaced in this case. */
1663 if (!inplace)
1664 changed |= fold_gimple_call (gsi);
1665 break;
1666
1667 case GIMPLE_ASM:
1668 /* Fold *& in asm operands. */
1669 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1670 {
1671 tree link = gimple_asm_output_op (stmt, i);
1672 tree op = TREE_VALUE (link);
1673 if (REFERENCE_CLASS_P (op)
1674 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1675 {
1676 TREE_VALUE (link) = op;
1677 changed = true;
1678 }
1679 }
1680 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1681 {
1682 tree link = gimple_asm_input_op (stmt, i);
1683 tree op = TREE_VALUE (link);
1684 if (REFERENCE_CLASS_P (op)
1685 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1686 {
1687 TREE_VALUE (link) = op;
1688 changed = true;
1689 }
1690 }
1691 break;
1692
1693 default:;
1694 }
1695
1696 stmt = gsi_stmt (*gsi);
1697
1698 /* Fold *& on the lhs. */
1699 if (gimple_has_lhs (stmt))
1700 {
1701 tree lhs = gimple_get_lhs (stmt);
1702 if (lhs && REFERENCE_CLASS_P (lhs))
1703 {
1704 tree new_lhs = maybe_fold_reference (lhs, true);
1705 if (new_lhs)
1706 {
1707 gimple_set_lhs (stmt, new_lhs);
1708 changed = true;
1709 }
1710 }
1711 }
1712
1713 return changed;
1714 }
1715
1716 /* Fold the statement pointed to by GSI. In some cases, this function may
1717 replace the whole statement with a new one. Returns true iff folding
1718 makes any changes.
1719 The statement pointed to by GSI should be in valid gimple form but may
1720 be in unfolded state as resulting from for example constant propagation
1721 which can produce *&x = 0. */
1722
1723 bool
1724 fold_stmt (gimple_stmt_iterator *gsi)
1725 {
1726 return fold_stmt_1 (gsi, false);
1727 }
1728
1729 /* Perform the minimal folding on statement STMT. Only operations like
1730 *&x created by constant propagation are handled. The statement cannot
1731 be replaced with a new one. Return true if the statement was
1732 changed, false otherwise.
1733 The statement STMT should be in valid gimple form but may
1734 be in unfolded state as resulting from for example constant propagation
1735 which can produce *&x = 0. */
1736
1737 bool
1738 fold_stmt_inplace (gimple stmt)
1739 {
1740 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1741 bool changed = fold_stmt_1 (&gsi, true);
1742 gcc_assert (gsi_stmt (gsi) == stmt);
1743 return changed;
1744 }
1745
1746 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1747 if EXPR is null or we don't know how.
1748 If non-null, the result always has boolean type. */
1749
1750 static tree
1751 canonicalize_bool (tree expr, bool invert)
1752 {
1753 if (!expr)
1754 return NULL_TREE;
1755 else if (invert)
1756 {
1757 if (integer_nonzerop (expr))
1758 return boolean_false_node;
1759 else if (integer_zerop (expr))
1760 return boolean_true_node;
1761 else if (TREE_CODE (expr) == SSA_NAME)
1762 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1763 build_int_cst (TREE_TYPE (expr), 0));
1764 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1765 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1766 boolean_type_node,
1767 TREE_OPERAND (expr, 0),
1768 TREE_OPERAND (expr, 1));
1769 else
1770 return NULL_TREE;
1771 }
1772 else
1773 {
1774 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1775 return expr;
1776 if (integer_nonzerop (expr))
1777 return boolean_true_node;
1778 else if (integer_zerop (expr))
1779 return boolean_false_node;
1780 else if (TREE_CODE (expr) == SSA_NAME)
1781 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1782 build_int_cst (TREE_TYPE (expr), 0));
1783 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1784 return fold_build2 (TREE_CODE (expr),
1785 boolean_type_node,
1786 TREE_OPERAND (expr, 0),
1787 TREE_OPERAND (expr, 1));
1788 else
1789 return NULL_TREE;
1790 }
1791 }
1792
1793 /* Check to see if a boolean expression EXPR is logically equivalent to the
1794 comparison (OP1 CODE OP2). Check for various identities involving
1795 SSA_NAMEs. */
1796
1797 static bool
1798 same_bool_comparison_p (const_tree expr, enum tree_code code,
1799 const_tree op1, const_tree op2)
1800 {
1801 gimple s;
1802
1803 /* The obvious case. */
1804 if (TREE_CODE (expr) == code
1805 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1806 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1807 return true;
1808
1809 /* Check for comparing (name, name != 0) and the case where expr
1810 is an SSA_NAME with a definition matching the comparison. */
1811 if (TREE_CODE (expr) == SSA_NAME
1812 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1813 {
1814 if (operand_equal_p (expr, op1, 0))
1815 return ((code == NE_EXPR && integer_zerop (op2))
1816 || (code == EQ_EXPR && integer_nonzerop (op2)));
1817 s = SSA_NAME_DEF_STMT (expr);
1818 if (is_gimple_assign (s)
1819 && gimple_assign_rhs_code (s) == code
1820 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1821 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1822 return true;
1823 }
1824
1825 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1826 of name is a comparison, recurse. */
1827 if (TREE_CODE (op1) == SSA_NAME
1828 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1829 {
1830 s = SSA_NAME_DEF_STMT (op1);
1831 if (is_gimple_assign (s)
1832 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1833 {
1834 enum tree_code c = gimple_assign_rhs_code (s);
1835 if ((c == NE_EXPR && integer_zerop (op2))
1836 || (c == EQ_EXPR && integer_nonzerop (op2)))
1837 return same_bool_comparison_p (expr, c,
1838 gimple_assign_rhs1 (s),
1839 gimple_assign_rhs2 (s));
1840 if ((c == EQ_EXPR && integer_zerop (op2))
1841 || (c == NE_EXPR && integer_nonzerop (op2)))
1842 return same_bool_comparison_p (expr,
1843 invert_tree_comparison (c, false),
1844 gimple_assign_rhs1 (s),
1845 gimple_assign_rhs2 (s));
1846 }
1847 }
1848 return false;
1849 }
1850
1851 /* Check to see if two boolean expressions OP1 and OP2 are logically
1852 equivalent. */
1853
1854 static bool
1855 same_bool_result_p (const_tree op1, const_tree op2)
1856 {
1857 /* Simple cases first. */
1858 if (operand_equal_p (op1, op2, 0))
1859 return true;
1860
1861 /* Check the cases where at least one of the operands is a comparison.
1862 These are a bit smarter than operand_equal_p in that they apply some
1863 identifies on SSA_NAMEs. */
1864 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1865 && same_bool_comparison_p (op1, TREE_CODE (op2),
1866 TREE_OPERAND (op2, 0),
1867 TREE_OPERAND (op2, 1)))
1868 return true;
1869 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1870 && same_bool_comparison_p (op2, TREE_CODE (op1),
1871 TREE_OPERAND (op1, 0),
1872 TREE_OPERAND (op1, 1)))
1873 return true;
1874
1875 /* Default case. */
1876 return false;
1877 }
1878
1879 /* Forward declarations for some mutually recursive functions. */
1880
1881 static tree
1882 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1883 enum tree_code code2, tree op2a, tree op2b);
1884 static tree
1885 and_var_with_comparison (tree var, bool invert,
1886 enum tree_code code2, tree op2a, tree op2b);
1887 static tree
1888 and_var_with_comparison_1 (gimple stmt,
1889 enum tree_code code2, tree op2a, tree op2b);
1890 static tree
1891 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1892 enum tree_code code2, tree op2a, tree op2b);
1893 static tree
1894 or_var_with_comparison (tree var, bool invert,
1895 enum tree_code code2, tree op2a, tree op2b);
1896 static tree
1897 or_var_with_comparison_1 (gimple stmt,
1898 enum tree_code code2, tree op2a, tree op2b);
1899
1900 /* Helper function for and_comparisons_1: try to simplify the AND of the
1901 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1902 If INVERT is true, invert the value of the VAR before doing the AND.
1903 Return NULL_EXPR if we can't simplify this to a single expression. */
1904
1905 static tree
1906 and_var_with_comparison (tree var, bool invert,
1907 enum tree_code code2, tree op2a, tree op2b)
1908 {
1909 tree t;
1910 gimple stmt = SSA_NAME_DEF_STMT (var);
1911
1912 /* We can only deal with variables whose definitions are assignments. */
1913 if (!is_gimple_assign (stmt))
1914 return NULL_TREE;
1915
1916 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1917 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1918 Then we only have to consider the simpler non-inverted cases. */
1919 if (invert)
1920 t = or_var_with_comparison_1 (stmt,
1921 invert_tree_comparison (code2, false),
1922 op2a, op2b);
1923 else
1924 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1925 return canonicalize_bool (t, invert);
1926 }
1927
1928 /* Try to simplify the AND of the ssa variable defined by the assignment
1929 STMT with the comparison specified by (OP2A CODE2 OP2B).
1930 Return NULL_EXPR if we can't simplify this to a single expression. */
1931
1932 static tree
1933 and_var_with_comparison_1 (gimple stmt,
1934 enum tree_code code2, tree op2a, tree op2b)
1935 {
1936 tree var = gimple_assign_lhs (stmt);
1937 tree true_test_var = NULL_TREE;
1938 tree false_test_var = NULL_TREE;
1939 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1940
1941 /* Check for identities like (var AND (var == 0)) => false. */
1942 if (TREE_CODE (op2a) == SSA_NAME
1943 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1944 {
1945 if ((code2 == NE_EXPR && integer_zerop (op2b))
1946 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1947 {
1948 true_test_var = op2a;
1949 if (var == true_test_var)
1950 return var;
1951 }
1952 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1953 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1954 {
1955 false_test_var = op2a;
1956 if (var == false_test_var)
1957 return boolean_false_node;
1958 }
1959 }
1960
1961 /* If the definition is a comparison, recurse on it. */
1962 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1963 {
1964 tree t = and_comparisons_1 (innercode,
1965 gimple_assign_rhs1 (stmt),
1966 gimple_assign_rhs2 (stmt),
1967 code2,
1968 op2a,
1969 op2b);
1970 if (t)
1971 return t;
1972 }
1973
1974 /* If the definition is an AND or OR expression, we may be able to
1975 simplify by reassociating. */
1976 if (innercode == TRUTH_AND_EXPR
1977 || innercode == TRUTH_OR_EXPR
1978 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1979 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1980 {
1981 tree inner1 = gimple_assign_rhs1 (stmt);
1982 tree inner2 = gimple_assign_rhs2 (stmt);
1983 gimple s;
1984 tree t;
1985 tree partial = NULL_TREE;
1986 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1987
1988 /* Check for boolean identities that don't require recursive examination
1989 of inner1/inner2:
1990 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1991 inner1 AND (inner1 OR inner2) => inner1
1992 !inner1 AND (inner1 AND inner2) => false
1993 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1994 Likewise for similar cases involving inner2. */
1995 if (inner1 == true_test_var)
1996 return (is_and ? var : inner1);
1997 else if (inner2 == true_test_var)
1998 return (is_and ? var : inner2);
1999 else if (inner1 == false_test_var)
2000 return (is_and
2001 ? boolean_false_node
2002 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
2003 else if (inner2 == false_test_var)
2004 return (is_and
2005 ? boolean_false_node
2006 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
2007
2008 /* Next, redistribute/reassociate the AND across the inner tests.
2009 Compute the first partial result, (inner1 AND (op2a code op2b)) */
2010 if (TREE_CODE (inner1) == SSA_NAME
2011 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2012 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2013 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2014 gimple_assign_rhs1 (s),
2015 gimple_assign_rhs2 (s),
2016 code2, op2a, op2b)))
2017 {
2018 /* Handle the AND case, where we are reassociating:
2019 (inner1 AND inner2) AND (op2a code2 op2b)
2020 => (t AND inner2)
2021 If the partial result t is a constant, we win. Otherwise
2022 continue on to try reassociating with the other inner test. */
2023 if (is_and)
2024 {
2025 if (integer_onep (t))
2026 return inner2;
2027 else if (integer_zerop (t))
2028 return boolean_false_node;
2029 }
2030
2031 /* Handle the OR case, where we are redistributing:
2032 (inner1 OR inner2) AND (op2a code2 op2b)
2033 => (t OR (inner2 AND (op2a code2 op2b))) */
2034 else
2035 {
2036 if (integer_onep (t))
2037 return boolean_true_node;
2038 else
2039 /* Save partial result for later. */
2040 partial = t;
2041 }
2042 }
2043
2044 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2045 if (TREE_CODE (inner2) == SSA_NAME
2046 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2047 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2048 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2049 gimple_assign_rhs1 (s),
2050 gimple_assign_rhs2 (s),
2051 code2, op2a, op2b)))
2052 {
2053 /* Handle the AND case, where we are reassociating:
2054 (inner1 AND inner2) AND (op2a code2 op2b)
2055 => (inner1 AND t) */
2056 if (is_and)
2057 {
2058 if (integer_onep (t))
2059 return inner1;
2060 else if (integer_zerop (t))
2061 return boolean_false_node;
2062 }
2063
2064 /* Handle the OR case. where we are redistributing:
2065 (inner1 OR inner2) AND (op2a code2 op2b)
2066 => (t OR (inner1 AND (op2a code2 op2b)))
2067 => (t OR partial) */
2068 else
2069 {
2070 if (integer_onep (t))
2071 return boolean_true_node;
2072 else if (partial)
2073 {
2074 /* We already got a simplification for the other
2075 operand to the redistributed OR expression. The
2076 interesting case is when at least one is false.
2077 Or, if both are the same, we can apply the identity
2078 (x OR x) == x. */
2079 if (integer_zerop (partial))
2080 return t;
2081 else if (integer_zerop (t))
2082 return partial;
2083 else if (same_bool_result_p (t, partial))
2084 return t;
2085 }
2086 }
2087 }
2088 }
2089 return NULL_TREE;
2090 }
2091
2092 /* Try to simplify the AND of two comparisons defined by
2093 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2094 If this can be done without constructing an intermediate value,
2095 return the resulting tree; otherwise NULL_TREE is returned.
2096 This function is deliberately asymmetric as it recurses on SSA_DEFs
2097 in the first comparison but not the second. */
2098
2099 static tree
2100 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2101 enum tree_code code2, tree op2a, tree op2b)
2102 {
2103 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2104 if (operand_equal_p (op1a, op2a, 0)
2105 && operand_equal_p (op1b, op2b, 0))
2106 {
2107 tree t = combine_comparisons (UNKNOWN_LOCATION,
2108 TRUTH_ANDIF_EXPR, code1, code2,
2109 boolean_type_node, op1a, op1b);
2110 if (t)
2111 return t;
2112 }
2113
2114 /* Likewise the swapped case of the above. */
2115 if (operand_equal_p (op1a, op2b, 0)
2116 && operand_equal_p (op1b, op2a, 0))
2117 {
2118 tree t = combine_comparisons (UNKNOWN_LOCATION,
2119 TRUTH_ANDIF_EXPR, code1,
2120 swap_tree_comparison (code2),
2121 boolean_type_node, op1a, op1b);
2122 if (t)
2123 return t;
2124 }
2125
2126 /* If both comparisons are of the same value against constants, we might
2127 be able to merge them. */
2128 if (operand_equal_p (op1a, op2a, 0)
2129 && TREE_CODE (op1b) == INTEGER_CST
2130 && TREE_CODE (op2b) == INTEGER_CST)
2131 {
2132 int cmp = tree_int_cst_compare (op1b, op2b);
2133
2134 /* If we have (op1a == op1b), we should either be able to
2135 return that or FALSE, depending on whether the constant op1b
2136 also satisfies the other comparison against op2b. */
2137 if (code1 == EQ_EXPR)
2138 {
2139 bool done = true;
2140 bool val;
2141 switch (code2)
2142 {
2143 case EQ_EXPR: val = (cmp == 0); break;
2144 case NE_EXPR: val = (cmp != 0); break;
2145 case LT_EXPR: val = (cmp < 0); break;
2146 case GT_EXPR: val = (cmp > 0); break;
2147 case LE_EXPR: val = (cmp <= 0); break;
2148 case GE_EXPR: val = (cmp >= 0); break;
2149 default: done = false;
2150 }
2151 if (done)
2152 {
2153 if (val)
2154 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2155 else
2156 return boolean_false_node;
2157 }
2158 }
2159 /* Likewise if the second comparison is an == comparison. */
2160 else if (code2 == EQ_EXPR)
2161 {
2162 bool done = true;
2163 bool val;
2164 switch (code1)
2165 {
2166 case EQ_EXPR: val = (cmp == 0); break;
2167 case NE_EXPR: val = (cmp != 0); break;
2168 case LT_EXPR: val = (cmp > 0); break;
2169 case GT_EXPR: val = (cmp < 0); break;
2170 case LE_EXPR: val = (cmp >= 0); break;
2171 case GE_EXPR: val = (cmp <= 0); break;
2172 default: done = false;
2173 }
2174 if (done)
2175 {
2176 if (val)
2177 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2178 else
2179 return boolean_false_node;
2180 }
2181 }
2182
2183 /* Same business with inequality tests. */
2184 else if (code1 == NE_EXPR)
2185 {
2186 bool val;
2187 switch (code2)
2188 {
2189 case EQ_EXPR: val = (cmp != 0); break;
2190 case NE_EXPR: val = (cmp == 0); break;
2191 case LT_EXPR: val = (cmp >= 0); break;
2192 case GT_EXPR: val = (cmp <= 0); break;
2193 case LE_EXPR: val = (cmp > 0); break;
2194 case GE_EXPR: val = (cmp < 0); break;
2195 default:
2196 val = false;
2197 }
2198 if (val)
2199 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2200 }
2201 else if (code2 == NE_EXPR)
2202 {
2203 bool val;
2204 switch (code1)
2205 {
2206 case EQ_EXPR: val = (cmp == 0); break;
2207 case NE_EXPR: val = (cmp != 0); break;
2208 case LT_EXPR: val = (cmp <= 0); break;
2209 case GT_EXPR: val = (cmp >= 0); break;
2210 case LE_EXPR: val = (cmp < 0); break;
2211 case GE_EXPR: val = (cmp > 0); break;
2212 default:
2213 val = false;
2214 }
2215 if (val)
2216 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2217 }
2218
2219 /* Chose the more restrictive of two < or <= comparisons. */
2220 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2221 && (code2 == LT_EXPR || code2 == LE_EXPR))
2222 {
2223 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2224 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2225 else
2226 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2227 }
2228
2229 /* Likewise chose the more restrictive of two > or >= comparisons. */
2230 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2231 && (code2 == GT_EXPR || code2 == GE_EXPR))
2232 {
2233 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2234 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2235 else
2236 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2237 }
2238
2239 /* Check for singleton ranges. */
2240 else if (cmp == 0
2241 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2242 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2243 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2244
2245 /* Check for disjoint ranges. */
2246 else if (cmp <= 0
2247 && (code1 == LT_EXPR || code1 == LE_EXPR)
2248 && (code2 == GT_EXPR || code2 == GE_EXPR))
2249 return boolean_false_node;
2250 else if (cmp >= 0
2251 && (code1 == GT_EXPR || code1 == GE_EXPR)
2252 && (code2 == LT_EXPR || code2 == LE_EXPR))
2253 return boolean_false_node;
2254 }
2255
2256 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2257 NAME's definition is a truth value. See if there are any simplifications
2258 that can be done against the NAME's definition. */
2259 if (TREE_CODE (op1a) == SSA_NAME
2260 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2261 && (integer_zerop (op1b) || integer_onep (op1b)))
2262 {
2263 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2264 || (code1 == NE_EXPR && integer_onep (op1b)));
2265 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2266 switch (gimple_code (stmt))
2267 {
2268 case GIMPLE_ASSIGN:
2269 /* Try to simplify by copy-propagating the definition. */
2270 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2271
2272 case GIMPLE_PHI:
2273 /* If every argument to the PHI produces the same result when
2274 ANDed with the second comparison, we win.
2275 Do not do this unless the type is bool since we need a bool
2276 result here anyway. */
2277 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2278 {
2279 tree result = NULL_TREE;
2280 unsigned i;
2281 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2282 {
2283 tree arg = gimple_phi_arg_def (stmt, i);
2284
2285 /* If this PHI has itself as an argument, ignore it.
2286 If all the other args produce the same result,
2287 we're still OK. */
2288 if (arg == gimple_phi_result (stmt))
2289 continue;
2290 else if (TREE_CODE (arg) == INTEGER_CST)
2291 {
2292 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2293 {
2294 if (!result)
2295 result = boolean_false_node;
2296 else if (!integer_zerop (result))
2297 return NULL_TREE;
2298 }
2299 else if (!result)
2300 result = fold_build2 (code2, boolean_type_node,
2301 op2a, op2b);
2302 else if (!same_bool_comparison_p (result,
2303 code2, op2a, op2b))
2304 return NULL_TREE;
2305 }
2306 else if (TREE_CODE (arg) == SSA_NAME)
2307 {
2308 tree temp = and_var_with_comparison (arg, invert,
2309 code2, op2a, op2b);
2310 if (!temp)
2311 return NULL_TREE;
2312 else if (!result)
2313 result = temp;
2314 else if (!same_bool_result_p (result, temp))
2315 return NULL_TREE;
2316 }
2317 else
2318 return NULL_TREE;
2319 }
2320 return result;
2321 }
2322
2323 default:
2324 break;
2325 }
2326 }
2327 return NULL_TREE;
2328 }
2329
2330 /* Try to simplify the AND of two comparisons, specified by
2331 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2332 If this can be simplified to a single expression (without requiring
2333 introducing more SSA variables to hold intermediate values),
2334 return the resulting tree. Otherwise return NULL_TREE.
2335 If the result expression is non-null, it has boolean type. */
2336
2337 tree
2338 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2339 enum tree_code code2, tree op2a, tree op2b)
2340 {
2341 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2342 if (t)
2343 return t;
2344 else
2345 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2346 }
2347
2348 /* Helper function for or_comparisons_1: try to simplify the OR of the
2349 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2350 If INVERT is true, invert the value of VAR before doing the OR.
2351 Return NULL_EXPR if we can't simplify this to a single expression. */
2352
2353 static tree
2354 or_var_with_comparison (tree var, bool invert,
2355 enum tree_code code2, tree op2a, tree op2b)
2356 {
2357 tree t;
2358 gimple stmt = SSA_NAME_DEF_STMT (var);
2359
2360 /* We can only deal with variables whose definitions are assignments. */
2361 if (!is_gimple_assign (stmt))
2362 return NULL_TREE;
2363
2364 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2365 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2366 Then we only have to consider the simpler non-inverted cases. */
2367 if (invert)
2368 t = and_var_with_comparison_1 (stmt,
2369 invert_tree_comparison (code2, false),
2370 op2a, op2b);
2371 else
2372 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2373 return canonicalize_bool (t, invert);
2374 }
2375
2376 /* Try to simplify the OR of the ssa variable defined by the assignment
2377 STMT with the comparison specified by (OP2A CODE2 OP2B).
2378 Return NULL_EXPR if we can't simplify this to a single expression. */
2379
2380 static tree
2381 or_var_with_comparison_1 (gimple stmt,
2382 enum tree_code code2, tree op2a, tree op2b)
2383 {
2384 tree var = gimple_assign_lhs (stmt);
2385 tree true_test_var = NULL_TREE;
2386 tree false_test_var = NULL_TREE;
2387 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2388
2389 /* Check for identities like (var OR (var != 0)) => true . */
2390 if (TREE_CODE (op2a) == SSA_NAME
2391 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2392 {
2393 if ((code2 == NE_EXPR && integer_zerop (op2b))
2394 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2395 {
2396 true_test_var = op2a;
2397 if (var == true_test_var)
2398 return var;
2399 }
2400 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2401 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2402 {
2403 false_test_var = op2a;
2404 if (var == false_test_var)
2405 return boolean_true_node;
2406 }
2407 }
2408
2409 /* If the definition is a comparison, recurse on it. */
2410 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2411 {
2412 tree t = or_comparisons_1 (innercode,
2413 gimple_assign_rhs1 (stmt),
2414 gimple_assign_rhs2 (stmt),
2415 code2,
2416 op2a,
2417 op2b);
2418 if (t)
2419 return t;
2420 }
2421
2422 /* If the definition is an AND or OR expression, we may be able to
2423 simplify by reassociating. */
2424 if (innercode == TRUTH_AND_EXPR
2425 || innercode == TRUTH_OR_EXPR
2426 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2427 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2428 {
2429 tree inner1 = gimple_assign_rhs1 (stmt);
2430 tree inner2 = gimple_assign_rhs2 (stmt);
2431 gimple s;
2432 tree t;
2433 tree partial = NULL_TREE;
2434 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2435
2436 /* Check for boolean identities that don't require recursive examination
2437 of inner1/inner2:
2438 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2439 inner1 OR (inner1 AND inner2) => inner1
2440 !inner1 OR (inner1 OR inner2) => true
2441 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2442 */
2443 if (inner1 == true_test_var)
2444 return (is_or ? var : inner1);
2445 else if (inner2 == true_test_var)
2446 return (is_or ? var : inner2);
2447 else if (inner1 == false_test_var)
2448 return (is_or
2449 ? boolean_true_node
2450 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2451 else if (inner2 == false_test_var)
2452 return (is_or
2453 ? boolean_true_node
2454 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2455
2456 /* Next, redistribute/reassociate the OR across the inner tests.
2457 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2458 if (TREE_CODE (inner1) == SSA_NAME
2459 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2460 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2461 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2462 gimple_assign_rhs1 (s),
2463 gimple_assign_rhs2 (s),
2464 code2, op2a, op2b)))
2465 {
2466 /* Handle the OR case, where we are reassociating:
2467 (inner1 OR inner2) OR (op2a code2 op2b)
2468 => (t OR inner2)
2469 If the partial result t is a constant, we win. Otherwise
2470 continue on to try reassociating with the other inner test. */
2471 if (innercode == TRUTH_OR_EXPR)
2472 {
2473 if (integer_onep (t))
2474 return boolean_true_node;
2475 else if (integer_zerop (t))
2476 return inner2;
2477 }
2478
2479 /* Handle the AND case, where we are redistributing:
2480 (inner1 AND inner2) OR (op2a code2 op2b)
2481 => (t AND (inner2 OR (op2a code op2b))) */
2482 else
2483 {
2484 if (integer_zerop (t))
2485 return boolean_false_node;
2486 else
2487 /* Save partial result for later. */
2488 partial = t;
2489 }
2490 }
2491
2492 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2493 if (TREE_CODE (inner2) == SSA_NAME
2494 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2495 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2496 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2497 gimple_assign_rhs1 (s),
2498 gimple_assign_rhs2 (s),
2499 code2, op2a, op2b)))
2500 {
2501 /* Handle the OR case, where we are reassociating:
2502 (inner1 OR inner2) OR (op2a code2 op2b)
2503 => (inner1 OR t) */
2504 if (innercode == TRUTH_OR_EXPR)
2505 {
2506 if (integer_zerop (t))
2507 return inner1;
2508 else if (integer_onep (t))
2509 return boolean_true_node;
2510 }
2511
2512 /* Handle the AND case, where we are redistributing:
2513 (inner1 AND inner2) OR (op2a code2 op2b)
2514 => (t AND (inner1 OR (op2a code2 op2b)))
2515 => (t AND partial) */
2516 else
2517 {
2518 if (integer_zerop (t))
2519 return boolean_false_node;
2520 else if (partial)
2521 {
2522 /* We already got a simplification for the other
2523 operand to the redistributed AND expression. The
2524 interesting case is when at least one is true.
2525 Or, if both are the same, we can apply the identity
2526 (x AND x) == true. */
2527 if (integer_onep (partial))
2528 return t;
2529 else if (integer_onep (t))
2530 return partial;
2531 else if (same_bool_result_p (t, partial))
2532 return boolean_true_node;
2533 }
2534 }
2535 }
2536 }
2537 return NULL_TREE;
2538 }
2539
2540 /* Try to simplify the OR of two comparisons defined by
2541 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2542 If this can be done without constructing an intermediate value,
2543 return the resulting tree; otherwise NULL_TREE is returned.
2544 This function is deliberately asymmetric as it recurses on SSA_DEFs
2545 in the first comparison but not the second. */
2546
2547 static tree
2548 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2549 enum tree_code code2, tree op2a, tree op2b)
2550 {
2551 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2552 if (operand_equal_p (op1a, op2a, 0)
2553 && operand_equal_p (op1b, op2b, 0))
2554 {
2555 tree t = combine_comparisons (UNKNOWN_LOCATION,
2556 TRUTH_ORIF_EXPR, code1, code2,
2557 boolean_type_node, op1a, op1b);
2558 if (t)
2559 return t;
2560 }
2561
2562 /* Likewise the swapped case of the above. */
2563 if (operand_equal_p (op1a, op2b, 0)
2564 && operand_equal_p (op1b, op2a, 0))
2565 {
2566 tree t = combine_comparisons (UNKNOWN_LOCATION,
2567 TRUTH_ORIF_EXPR, code1,
2568 swap_tree_comparison (code2),
2569 boolean_type_node, op1a, op1b);
2570 if (t)
2571 return t;
2572 }
2573
2574 /* If both comparisons are of the same value against constants, we might
2575 be able to merge them. */
2576 if (operand_equal_p (op1a, op2a, 0)
2577 && TREE_CODE (op1b) == INTEGER_CST
2578 && TREE_CODE (op2b) == INTEGER_CST)
2579 {
2580 int cmp = tree_int_cst_compare (op1b, op2b);
2581
2582 /* If we have (op1a != op1b), we should either be able to
2583 return that or TRUE, depending on whether the constant op1b
2584 also satisfies the other comparison against op2b. */
2585 if (code1 == NE_EXPR)
2586 {
2587 bool done = true;
2588 bool val;
2589 switch (code2)
2590 {
2591 case EQ_EXPR: val = (cmp == 0); break;
2592 case NE_EXPR: val = (cmp != 0); break;
2593 case LT_EXPR: val = (cmp < 0); break;
2594 case GT_EXPR: val = (cmp > 0); break;
2595 case LE_EXPR: val = (cmp <= 0); break;
2596 case GE_EXPR: val = (cmp >= 0); break;
2597 default: done = false;
2598 }
2599 if (done)
2600 {
2601 if (val)
2602 return boolean_true_node;
2603 else
2604 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2605 }
2606 }
2607 /* Likewise if the second comparison is a != comparison. */
2608 else if (code2 == NE_EXPR)
2609 {
2610 bool done = true;
2611 bool val;
2612 switch (code1)
2613 {
2614 case EQ_EXPR: val = (cmp == 0); break;
2615 case NE_EXPR: val = (cmp != 0); break;
2616 case LT_EXPR: val = (cmp > 0); break;
2617 case GT_EXPR: val = (cmp < 0); break;
2618 case LE_EXPR: val = (cmp >= 0); break;
2619 case GE_EXPR: val = (cmp <= 0); break;
2620 default: done = false;
2621 }
2622 if (done)
2623 {
2624 if (val)
2625 return boolean_true_node;
2626 else
2627 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2628 }
2629 }
2630
2631 /* See if an equality test is redundant with the other comparison. */
2632 else if (code1 == EQ_EXPR)
2633 {
2634 bool val;
2635 switch (code2)
2636 {
2637 case EQ_EXPR: val = (cmp == 0); break;
2638 case NE_EXPR: val = (cmp != 0); break;
2639 case LT_EXPR: val = (cmp < 0); break;
2640 case GT_EXPR: val = (cmp > 0); break;
2641 case LE_EXPR: val = (cmp <= 0); break;
2642 case GE_EXPR: val = (cmp >= 0); break;
2643 default:
2644 val = false;
2645 }
2646 if (val)
2647 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2648 }
2649 else if (code2 == EQ_EXPR)
2650 {
2651 bool val;
2652 switch (code1)
2653 {
2654 case EQ_EXPR: val = (cmp == 0); break;
2655 case NE_EXPR: val = (cmp != 0); break;
2656 case LT_EXPR: val = (cmp > 0); break;
2657 case GT_EXPR: val = (cmp < 0); break;
2658 case LE_EXPR: val = (cmp >= 0); break;
2659 case GE_EXPR: val = (cmp <= 0); break;
2660 default:
2661 val = false;
2662 }
2663 if (val)
2664 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2665 }
2666
2667 /* Chose the less restrictive of two < or <= comparisons. */
2668 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2669 && (code2 == LT_EXPR || code2 == LE_EXPR))
2670 {
2671 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2672 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2673 else
2674 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2675 }
2676
2677 /* Likewise chose the less restrictive of two > or >= comparisons. */
2678 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2679 && (code2 == GT_EXPR || code2 == GE_EXPR))
2680 {
2681 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2682 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2683 else
2684 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2685 }
2686
2687 /* Check for singleton ranges. */
2688 else if (cmp == 0
2689 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2690 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2691 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2692
2693 /* Check for less/greater pairs that don't restrict the range at all. */
2694 else if (cmp >= 0
2695 && (code1 == LT_EXPR || code1 == LE_EXPR)
2696 && (code2 == GT_EXPR || code2 == GE_EXPR))
2697 return boolean_true_node;
2698 else if (cmp <= 0
2699 && (code1 == GT_EXPR || code1 == GE_EXPR)
2700 && (code2 == LT_EXPR || code2 == LE_EXPR))
2701 return boolean_true_node;
2702 }
2703
2704 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2705 NAME's definition is a truth value. See if there are any simplifications
2706 that can be done against the NAME's definition. */
2707 if (TREE_CODE (op1a) == SSA_NAME
2708 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2709 && (integer_zerop (op1b) || integer_onep (op1b)))
2710 {
2711 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2712 || (code1 == NE_EXPR && integer_onep (op1b)));
2713 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2714 switch (gimple_code (stmt))
2715 {
2716 case GIMPLE_ASSIGN:
2717 /* Try to simplify by copy-propagating the definition. */
2718 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2719
2720 case GIMPLE_PHI:
2721 /* If every argument to the PHI produces the same result when
2722 ORed with the second comparison, we win.
2723 Do not do this unless the type is bool since we need a bool
2724 result here anyway. */
2725 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2726 {
2727 tree result = NULL_TREE;
2728 unsigned i;
2729 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2730 {
2731 tree arg = gimple_phi_arg_def (stmt, i);
2732
2733 /* If this PHI has itself as an argument, ignore it.
2734 If all the other args produce the same result,
2735 we're still OK. */
2736 if (arg == gimple_phi_result (stmt))
2737 continue;
2738 else if (TREE_CODE (arg) == INTEGER_CST)
2739 {
2740 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2741 {
2742 if (!result)
2743 result = boolean_true_node;
2744 else if (!integer_onep (result))
2745 return NULL_TREE;
2746 }
2747 else if (!result)
2748 result = fold_build2 (code2, boolean_type_node,
2749 op2a, op2b);
2750 else if (!same_bool_comparison_p (result,
2751 code2, op2a, op2b))
2752 return NULL_TREE;
2753 }
2754 else if (TREE_CODE (arg) == SSA_NAME)
2755 {
2756 tree temp = or_var_with_comparison (arg, invert,
2757 code2, op2a, op2b);
2758 if (!temp)
2759 return NULL_TREE;
2760 else if (!result)
2761 result = temp;
2762 else if (!same_bool_result_p (result, temp))
2763 return NULL_TREE;
2764 }
2765 else
2766 return NULL_TREE;
2767 }
2768 return result;
2769 }
2770
2771 default:
2772 break;
2773 }
2774 }
2775 return NULL_TREE;
2776 }
2777
2778 /* Try to simplify the OR of two comparisons, specified by
2779 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2780 If this can be simplified to a single expression (without requiring
2781 introducing more SSA variables to hold intermediate values),
2782 return the resulting tree. Otherwise return NULL_TREE.
2783 If the result expression is non-null, it has boolean type. */
2784
2785 tree
2786 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2787 enum tree_code code2, tree op2a, tree op2b)
2788 {
2789 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2790 if (t)
2791 return t;
2792 else
2793 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2794 }