re PR objc/45878 (Can't compile even a trivial ObjC program with -fexceptions -O2)
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33
34 /* Return true when DECL can be referenced from current unit.
35 We can get declarations that are not possible to reference for
36 various reasons:
37
38 1) When analyzing C++ virtual tables.
39 C++ virtual tables do have known constructors even
40 when they are keyed to other compilation unit.
41 Those tables can contain pointers to methods and vars
42 in other units. Those methods have both STATIC and EXTERNAL
43 set.
44 2) In WHOPR mode devirtualization might lead to reference
45 to method that was partitioned elsehwere.
46 In this case we have static VAR_DECL or FUNCTION_DECL
47 that has no corresponding callgraph/varpool node
48 declaring the body.
49 3) COMDAT functions referred by external vtables that
50 we devirtualize only during final copmilation stage.
51 At this time we already decided that we will not output
52 the function body and thus we can't reference the symbol
53 directly. */
54
55 static bool
56 can_refer_decl_in_current_unit_p (tree decl)
57 {
58 struct varpool_node *vnode;
59 struct cgraph_node *node;
60
61 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
62 return true;
63 /* External flag is set, so we deal with C++ reference
64 to static object from other file. */
65 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
66 && TREE_CODE (decl) == VAR_DECL)
67 {
68 /* Just be sure it is not big in frontend setting
69 flags incorrectly. Those variables should never
70 be finalized. */
71 gcc_checking_assert (!(vnode = varpool_get_node (decl))
72 || !vnode->finalized);
73 return false;
74 }
75 /* When function is public, we always can introduce new reference.
76 Exception are the COMDAT functions where introducing a direct
77 reference imply need to include function body in the curren tunit. */
78 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
79 return true;
80 /* We are not at ltrans stage; so don't worry about WHOPR.
81 Also when still gimplifying all referred comdat functions will be
82 produced. */
83 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
84 return true;
85 /* If we already output the function body, we are safe. */
86 if (TREE_ASM_WRITTEN (decl))
87 return true;
88 if (TREE_CODE (decl) == FUNCTION_DECL)
89 {
90 node = cgraph_get_node (decl);
91 /* Check that we still have function body and that we didn't took
92 the decision to eliminate offline copy of the function yet.
93 The second is important when devirtualization happens during final
94 compilation stage when making a new reference no longer makes callee
95 to be compiled. */
96 if (!node || !node->analyzed || node->global.inlined_to)
97 return false;
98 }
99 else if (TREE_CODE (decl) == VAR_DECL)
100 {
101 vnode = varpool_get_node (decl);
102 if (!vnode || !vnode->finalized)
103 return false;
104 }
105 return true;
106 }
107
108 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into
109 acceptable form for is_gimple_min_invariant. */
110
111 tree
112 canonicalize_constructor_val (tree cval)
113 {
114 STRIP_NOPS (cval);
115 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
116 {
117 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
118 TREE_OPERAND (cval, 0),
119 TREE_OPERAND (cval, 1),
120 TREE_TYPE (cval));
121 if (t)
122 cval = t;
123 }
124 if (TREE_CODE (cval) == ADDR_EXPR)
125 {
126 tree base = get_base_address (TREE_OPERAND (cval, 0));
127
128 if (base
129 && (TREE_CODE (base) == VAR_DECL
130 || TREE_CODE (base) == FUNCTION_DECL)
131 && !can_refer_decl_in_current_unit_p (base))
132 return NULL_TREE;
133 if (base && TREE_CODE (base) == VAR_DECL)
134 add_referenced_var (base);
135 }
136 return cval;
137 }
138
139 /* If SYM is a constant variable with known value, return the value.
140 NULL_TREE is returned otherwise. */
141
142 tree
143 get_symbol_constant_value (tree sym)
144 {
145 if (const_value_known_p (sym))
146 {
147 tree val = DECL_INITIAL (sym);
148 if (val)
149 {
150 val = canonicalize_constructor_val (val);
151 if (val && is_gimple_min_invariant (val))
152 return val;
153 else
154 return NULL_TREE;
155 }
156 /* Variables declared 'const' without an initializer
157 have zero as the initializer if they may not be
158 overridden at link or run time. */
159 if (!val
160 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
161 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
162 return fold_convert (TREE_TYPE (sym), integer_zero_node);
163 }
164
165 return NULL_TREE;
166 }
167
168
169 /* Return true if we may propagate the address expression ADDR into the
170 dereference DEREF and cancel them. */
171
172 bool
173 may_propagate_address_into_dereference (tree addr, tree deref)
174 {
175 gcc_assert (TREE_CODE (deref) == MEM_REF
176 && TREE_CODE (addr) == ADDR_EXPR);
177
178 /* Don't propagate if ADDR's operand has incomplete type. */
179 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
180 return false;
181
182 /* If the address is invariant then we do not need to preserve restrict
183 qualifications. But we do need to preserve volatile qualifiers until
184 we can annotate the folded dereference itself properly. */
185 if (is_gimple_min_invariant (addr)
186 && (!TREE_THIS_VOLATILE (deref)
187 || TYPE_VOLATILE (TREE_TYPE (addr))))
188 return useless_type_conversion_p (TREE_TYPE (deref),
189 TREE_TYPE (TREE_OPERAND (addr, 0)));
190
191 /* Else both the address substitution and the folding must result in
192 a valid useless type conversion sequence. */
193 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
194 TREE_TYPE (addr))
195 && useless_type_conversion_p (TREE_TYPE (deref),
196 TREE_TYPE (TREE_OPERAND (addr, 0))));
197 }
198
199
200 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
201 BASE is an array type. OFFSET is a byte displacement.
202
203 LOC is the location of the original expression. */
204
205 static tree
206 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
207 {
208 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
209 tree array_type, elt_type, elt_size;
210 tree domain_type;
211
212 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
213 measured in units of the size of elements type) from that ARRAY_REF).
214 We can't do anything if either is variable.
215
216 The case we handle here is *(&A[N]+O). */
217 if (TREE_CODE (base) == ARRAY_REF)
218 {
219 tree low_bound = array_ref_low_bound (base);
220
221 elt_offset = TREE_OPERAND (base, 1);
222 if (TREE_CODE (low_bound) != INTEGER_CST
223 || TREE_CODE (elt_offset) != INTEGER_CST)
224 return NULL_TREE;
225
226 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
227 base = TREE_OPERAND (base, 0);
228 }
229
230 /* Ignore stupid user tricks of indexing non-array variables. */
231 array_type = TREE_TYPE (base);
232 if (TREE_CODE (array_type) != ARRAY_TYPE)
233 return NULL_TREE;
234 elt_type = TREE_TYPE (array_type);
235
236 /* Use signed size type for intermediate computation on the index. */
237 idx_type = ssizetype;
238
239 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
240 element type (so we can use the alignment if it's not constant).
241 Otherwise, compute the offset as an index by using a division. If the
242 division isn't exact, then don't do anything. */
243 elt_size = TYPE_SIZE_UNIT (elt_type);
244 if (!elt_size)
245 return NULL;
246 if (integer_zerop (offset))
247 {
248 if (TREE_CODE (elt_size) != INTEGER_CST)
249 elt_size = size_int (TYPE_ALIGN (elt_type));
250
251 idx = build_int_cst (idx_type, 0);
252 }
253 else
254 {
255 unsigned HOST_WIDE_INT lquo, lrem;
256 HOST_WIDE_INT hquo, hrem;
257 double_int soffset;
258
259 /* The final array offset should be signed, so we need
260 to sign-extend the (possibly pointer) offset here
261 and use signed division. */
262 soffset = double_int_sext (tree_to_double_int (offset),
263 TYPE_PRECISION (TREE_TYPE (offset)));
264 if (TREE_CODE (elt_size) != INTEGER_CST
265 || div_and_round_double (TRUNC_DIV_EXPR, 0,
266 soffset.low, soffset.high,
267 TREE_INT_CST_LOW (elt_size),
268 TREE_INT_CST_HIGH (elt_size),
269 &lquo, &hquo, &lrem, &hrem)
270 || lrem || hrem)
271 return NULL_TREE;
272
273 idx = build_int_cst_wide (idx_type, lquo, hquo);
274 }
275
276 /* Assume the low bound is zero. If there is a domain type, get the
277 low bound, if any, convert the index into that type, and add the
278 low bound. */
279 min_idx = build_int_cst (idx_type, 0);
280 domain_type = TYPE_DOMAIN (array_type);
281 if (domain_type)
282 {
283 idx_type = domain_type;
284 if (TYPE_MIN_VALUE (idx_type))
285 min_idx = TYPE_MIN_VALUE (idx_type);
286 else
287 min_idx = fold_convert (idx_type, min_idx);
288
289 if (TREE_CODE (min_idx) != INTEGER_CST)
290 return NULL_TREE;
291
292 elt_offset = fold_convert (idx_type, elt_offset);
293 }
294
295 if (!integer_zerop (min_idx))
296 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
297 if (!integer_zerop (elt_offset))
298 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
299
300 /* Make sure to possibly truncate late after offsetting. */
301 idx = fold_convert (idx_type, idx);
302
303 /* We don't want to construct access past array bounds. For example
304 char *(c[4]);
305 c[3][2];
306 should not be simplified into (*c)[14] or tree-vrp will
307 give false warnings.
308 This is only an issue for multi-dimensional arrays. */
309 if (TREE_CODE (elt_type) == ARRAY_TYPE
310 && domain_type)
311 {
312 if (TYPE_MAX_VALUE (domain_type)
313 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
314 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
315 return NULL_TREE;
316 else if (TYPE_MIN_VALUE (domain_type)
317 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
318 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
319 return NULL_TREE;
320 else if (compare_tree_int (idx, 0) < 0)
321 return NULL_TREE;
322 }
323
324 {
325 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
326 SET_EXPR_LOCATION (t, loc);
327 return t;
328 }
329 }
330
331
332 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
333 LOC is the location of original expression.
334
335 Before attempting the conversion strip off existing ADDR_EXPRs. */
336
337 tree
338 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
339 tree orig_type)
340 {
341 tree ret;
342
343 STRIP_NOPS (base);
344 if (TREE_CODE (base) != ADDR_EXPR)
345 return NULL_TREE;
346
347 base = TREE_OPERAND (base, 0);
348 if (types_compatible_p (orig_type, TREE_TYPE (base))
349 && integer_zerop (offset))
350 return base;
351
352 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
353 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
354 return ret;
355 return NULL_TREE;
356 }
357
358 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
359 LOC is the location of the original expression. */
360
361 tree
362 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
363 tree orig_type)
364 {
365 tree base, ret;
366
367 STRIP_NOPS (addr);
368 if (TREE_CODE (addr) != ADDR_EXPR)
369 return NULL_TREE;
370 base = TREE_OPERAND (addr, 0);
371 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
372 if (ret)
373 {
374 ret = build_fold_addr_expr (ret);
375 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
376 return NULL_TREE;
377 SET_EXPR_LOCATION (ret, loc);
378 }
379
380 return ret;
381 }
382
383
384 /* A quaint feature extant in our address arithmetic is that there
385 can be hidden type changes here. The type of the result need
386 not be the same as the type of the input pointer.
387
388 What we're after here is an expression of the form
389 (T *)(&array + const)
390 where array is OP0, const is OP1, RES_TYPE is T and
391 the cast doesn't actually exist, but is implicit in the
392 type of the POINTER_PLUS_EXPR. We'd like to turn this into
393 &array[x]
394 which may be able to propagate further. */
395
396 tree
397 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
398 {
399 tree ptd_type;
400 tree t;
401
402 /* The first operand should be an ADDR_EXPR. */
403 if (TREE_CODE (op0) != ADDR_EXPR)
404 return NULL_TREE;
405 op0 = TREE_OPERAND (op0, 0);
406
407 /* It had better be a constant. */
408 if (TREE_CODE (op1) != INTEGER_CST)
409 {
410 /* Or op0 should now be A[0] and the non-constant offset defined
411 via a multiplication by the array element size. */
412 if (TREE_CODE (op0) == ARRAY_REF
413 /* As we will end up creating a variable index array access
414 in the outermost array dimension make sure there isn't
415 a more inner array that the index could overflow to. */
416 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
417 && integer_zerop (TREE_OPERAND (op0, 1))
418 && TREE_CODE (op1) == SSA_NAME)
419 {
420 gimple offset_def = SSA_NAME_DEF_STMT (op1);
421 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
422 if (!host_integerp (elsz, 1)
423 || !is_gimple_assign (offset_def))
424 return NULL_TREE;
425
426 /* Do not build array references of something that we can't
427 see the true number of array dimensions for. */
428 if (!DECL_P (TREE_OPERAND (op0, 0))
429 && !handled_component_p (TREE_OPERAND (op0, 0)))
430 return NULL_TREE;
431
432 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
433 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
434 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
435 return build_fold_addr_expr
436 (build4 (ARRAY_REF, TREE_TYPE (op0),
437 TREE_OPERAND (op0, 0),
438 gimple_assign_rhs1 (offset_def),
439 TREE_OPERAND (op0, 2),
440 TREE_OPERAND (op0, 3)));
441 else if (integer_onep (elsz)
442 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
443 return build_fold_addr_expr
444 (build4 (ARRAY_REF, TREE_TYPE (op0),
445 TREE_OPERAND (op0, 0),
446 op1,
447 TREE_OPERAND (op0, 2),
448 TREE_OPERAND (op0, 3)));
449 }
450 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
451 /* Dto. */
452 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
453 && TREE_CODE (op1) == SSA_NAME)
454 {
455 gimple offset_def = SSA_NAME_DEF_STMT (op1);
456 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
457 if (!host_integerp (elsz, 1)
458 || !is_gimple_assign (offset_def))
459 return NULL_TREE;
460
461 /* Do not build array references of something that we can't
462 see the true number of array dimensions for. */
463 if (!DECL_P (op0)
464 && !handled_component_p (op0))
465 return NULL_TREE;
466
467 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
468 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
469 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
470 return build_fold_addr_expr
471 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
472 op0, gimple_assign_rhs1 (offset_def),
473 integer_zero_node, NULL_TREE));
474 else if (integer_onep (elsz)
475 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
476 return build_fold_addr_expr
477 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
478 op0, op1,
479 integer_zero_node, NULL_TREE));
480 }
481
482 return NULL_TREE;
483 }
484
485 /* If the first operand is an ARRAY_REF, expand it so that we can fold
486 the offset into it. */
487 while (TREE_CODE (op0) == ARRAY_REF)
488 {
489 tree array_obj = TREE_OPERAND (op0, 0);
490 tree array_idx = TREE_OPERAND (op0, 1);
491 tree elt_type = TREE_TYPE (op0);
492 tree elt_size = TYPE_SIZE_UNIT (elt_type);
493 tree min_idx;
494
495 if (TREE_CODE (array_idx) != INTEGER_CST)
496 break;
497 if (TREE_CODE (elt_size) != INTEGER_CST)
498 break;
499
500 /* Un-bias the index by the min index of the array type. */
501 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
502 if (min_idx)
503 {
504 min_idx = TYPE_MIN_VALUE (min_idx);
505 if (min_idx)
506 {
507 if (TREE_CODE (min_idx) != INTEGER_CST)
508 break;
509
510 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
511 if (!integer_zerop (min_idx))
512 array_idx = int_const_binop (MINUS_EXPR, array_idx,
513 min_idx, 0);
514 }
515 }
516
517 /* Convert the index to a byte offset. */
518 array_idx = fold_convert (sizetype, array_idx);
519 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
520
521 /* Update the operands for the next round, or for folding. */
522 op1 = int_const_binop (PLUS_EXPR,
523 array_idx, op1, 0);
524 op0 = array_obj;
525 }
526
527 ptd_type = TREE_TYPE (res_type);
528 /* If we want a pointer to void, reconstruct the reference from the
529 array element type. A pointer to that can be trivially converted
530 to void *. This happens as we fold (void *)(ptr p+ off). */
531 if (VOID_TYPE_P (ptd_type)
532 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
533 ptd_type = TREE_TYPE (TREE_TYPE (op0));
534
535 /* At which point we can try some of the same things as for indirects. */
536 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
537 if (t)
538 {
539 t = build_fold_addr_expr (t);
540 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
541 return NULL_TREE;
542 SET_EXPR_LOCATION (t, loc);
543 }
544
545 return t;
546 }
547
548 /* Subroutine of fold_stmt. We perform several simplifications of the
549 memory reference tree EXPR and make sure to re-gimplify them properly
550 after propagation of constant addresses. IS_LHS is true if the
551 reference is supposed to be an lvalue. */
552
553 static tree
554 maybe_fold_reference (tree expr, bool is_lhs)
555 {
556 tree *t = &expr;
557 tree result;
558
559 if (!is_lhs
560 && (result = fold_const_aggregate_ref (expr))
561 && is_gimple_min_invariant (result))
562 return result;
563
564 /* ??? We might want to open-code the relevant remaining cases
565 to avoid using the generic fold. */
566 if (handled_component_p (*t)
567 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
568 {
569 tree tem = fold (*t);
570 if (tem != *t)
571 return tem;
572 }
573
574 while (handled_component_p (*t))
575 t = &TREE_OPERAND (*t, 0);
576
577 /* Fold back MEM_REFs to reference trees. */
578 if (TREE_CODE (*t) == MEM_REF
579 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
580 && integer_zerop (TREE_OPERAND (*t, 1))
581 && (TREE_THIS_VOLATILE (*t)
582 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
583 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
584 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
585 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
586 /* We have to look out here to not drop a required conversion
587 from the rhs to the lhs if is_lhs, but we don't have the
588 rhs here to verify that. Thus require strict type
589 compatibility. */
590 && types_compatible_p (TREE_TYPE (*t),
591 TREE_TYPE (TREE_OPERAND
592 (TREE_OPERAND (*t, 0), 0))))
593 {
594 tree tem;
595 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
596 tem = maybe_fold_reference (expr, is_lhs);
597 if (tem)
598 return tem;
599 return expr;
600 }
601 /* Canonicalize MEM_REFs invariant address operand. */
602 else if (TREE_CODE (*t) == MEM_REF
603 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
604 && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
605 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
606 {
607 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
608 TREE_OPERAND (*t, 0),
609 TREE_OPERAND (*t, 1));
610 if (tem)
611 {
612 *t = tem;
613 tem = maybe_fold_reference (expr, is_lhs);
614 if (tem)
615 return tem;
616 return expr;
617 }
618 }
619 else if (TREE_CODE (*t) == TARGET_MEM_REF)
620 {
621 tree tem = maybe_fold_tmr (*t);
622 if (tem)
623 {
624 *t = tem;
625 tem = maybe_fold_reference (expr, is_lhs);
626 if (tem)
627 return tem;
628 return expr;
629 }
630 }
631 else if (!is_lhs
632 && DECL_P (*t))
633 {
634 tree tem = get_symbol_constant_value (*t);
635 if (tem
636 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
637 {
638 *t = unshare_expr (tem);
639 tem = maybe_fold_reference (expr, is_lhs);
640 if (tem)
641 return tem;
642 return expr;
643 }
644 }
645
646 return NULL_TREE;
647 }
648
649
650 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
651 replacement rhs for the statement or NULL_TREE if no simplification
652 could be made. It is assumed that the operands have been previously
653 folded. */
654
655 static tree
656 fold_gimple_assign (gimple_stmt_iterator *si)
657 {
658 gimple stmt = gsi_stmt (*si);
659 enum tree_code subcode = gimple_assign_rhs_code (stmt);
660 location_t loc = gimple_location (stmt);
661
662 tree result = NULL_TREE;
663
664 switch (get_gimple_rhs_class (subcode))
665 {
666 case GIMPLE_SINGLE_RHS:
667 {
668 tree rhs = gimple_assign_rhs1 (stmt);
669
670 /* Try to fold a conditional expression. */
671 if (TREE_CODE (rhs) == COND_EXPR)
672 {
673 tree op0 = COND_EXPR_COND (rhs);
674 tree tem;
675 bool set = false;
676 location_t cond_loc = EXPR_LOCATION (rhs);
677
678 if (COMPARISON_CLASS_P (op0))
679 {
680 fold_defer_overflow_warnings ();
681 tem = fold_binary_loc (cond_loc,
682 TREE_CODE (op0), TREE_TYPE (op0),
683 TREE_OPERAND (op0, 0),
684 TREE_OPERAND (op0, 1));
685 /* This is actually a conditional expression, not a GIMPLE
686 conditional statement, however, the valid_gimple_rhs_p
687 test still applies. */
688 set = (tem && is_gimple_condexpr (tem)
689 && valid_gimple_rhs_p (tem));
690 fold_undefer_overflow_warnings (set, stmt, 0);
691 }
692 else if (is_gimple_min_invariant (op0))
693 {
694 tem = op0;
695 set = true;
696 }
697 else
698 return NULL_TREE;
699
700 if (set)
701 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
702 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
703 }
704
705 else if (REFERENCE_CLASS_P (rhs))
706 return maybe_fold_reference (rhs, false);
707
708 else if (TREE_CODE (rhs) == ADDR_EXPR)
709 {
710 tree ref = TREE_OPERAND (rhs, 0);
711 tree tem = maybe_fold_reference (ref, true);
712 if (tem
713 && TREE_CODE (tem) == MEM_REF
714 && integer_zerop (TREE_OPERAND (tem, 1)))
715 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
716 else if (tem)
717 result = fold_convert (TREE_TYPE (rhs),
718 build_fold_addr_expr_loc (loc, tem));
719 else if (TREE_CODE (ref) == MEM_REF
720 && integer_zerop (TREE_OPERAND (ref, 1)))
721 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
722 }
723
724 else if (TREE_CODE (rhs) == CONSTRUCTOR
725 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
726 && (CONSTRUCTOR_NELTS (rhs)
727 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
728 {
729 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
730 unsigned i;
731 tree val;
732
733 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
734 if (TREE_CODE (val) != INTEGER_CST
735 && TREE_CODE (val) != REAL_CST
736 && TREE_CODE (val) != FIXED_CST)
737 return NULL_TREE;
738
739 return build_vector_from_ctor (TREE_TYPE (rhs),
740 CONSTRUCTOR_ELTS (rhs));
741 }
742
743 else if (DECL_P (rhs))
744 return unshare_expr (get_symbol_constant_value (rhs));
745
746 /* If we couldn't fold the RHS, hand over to the generic
747 fold routines. */
748 if (result == NULL_TREE)
749 result = fold (rhs);
750
751 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
752 that may have been added by fold, and "useless" type
753 conversions that might now be apparent due to propagation. */
754 STRIP_USELESS_TYPE_CONVERSION (result);
755
756 if (result != rhs && valid_gimple_rhs_p (result))
757 return result;
758
759 return NULL_TREE;
760 }
761 break;
762
763 case GIMPLE_UNARY_RHS:
764 {
765 tree rhs = gimple_assign_rhs1 (stmt);
766
767 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
768 if (result)
769 {
770 /* If the operation was a conversion do _not_ mark a
771 resulting constant with TREE_OVERFLOW if the original
772 constant was not. These conversions have implementation
773 defined behavior and retaining the TREE_OVERFLOW flag
774 here would confuse later passes such as VRP. */
775 if (CONVERT_EXPR_CODE_P (subcode)
776 && TREE_CODE (result) == INTEGER_CST
777 && TREE_CODE (rhs) == INTEGER_CST)
778 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
779
780 STRIP_USELESS_TYPE_CONVERSION (result);
781 if (valid_gimple_rhs_p (result))
782 return result;
783 }
784 else if (CONVERT_EXPR_CODE_P (subcode)
785 && POINTER_TYPE_P (gimple_expr_type (stmt))
786 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
787 {
788 tree type = gimple_expr_type (stmt);
789 tree t = maybe_fold_offset_to_address (loc,
790 gimple_assign_rhs1 (stmt),
791 integer_zero_node, type);
792 if (t)
793 return t;
794 }
795 }
796 break;
797
798 case GIMPLE_BINARY_RHS:
799 /* Try to fold pointer addition. */
800 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
801 {
802 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
803 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
804 {
805 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
806 if (!useless_type_conversion_p
807 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
808 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
809 }
810 result = maybe_fold_stmt_addition (gimple_location (stmt),
811 type,
812 gimple_assign_rhs1 (stmt),
813 gimple_assign_rhs2 (stmt));
814 }
815
816 if (!result)
817 result = fold_binary_loc (loc, subcode,
818 TREE_TYPE (gimple_assign_lhs (stmt)),
819 gimple_assign_rhs1 (stmt),
820 gimple_assign_rhs2 (stmt));
821
822 if (result)
823 {
824 STRIP_USELESS_TYPE_CONVERSION (result);
825 if (valid_gimple_rhs_p (result))
826 return result;
827
828 /* Fold might have produced non-GIMPLE, so if we trust it blindly
829 we lose canonicalization opportunities. Do not go again
830 through fold here though, or the same non-GIMPLE will be
831 produced. */
832 if (commutative_tree_code (subcode)
833 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
834 gimple_assign_rhs2 (stmt), false))
835 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
836 gimple_assign_rhs2 (stmt),
837 gimple_assign_rhs1 (stmt));
838 }
839 break;
840
841 case GIMPLE_TERNARY_RHS:
842 result = fold_ternary_loc (loc, subcode,
843 TREE_TYPE (gimple_assign_lhs (stmt)),
844 gimple_assign_rhs1 (stmt),
845 gimple_assign_rhs2 (stmt),
846 gimple_assign_rhs3 (stmt));
847
848 if (result)
849 {
850 STRIP_USELESS_TYPE_CONVERSION (result);
851 if (valid_gimple_rhs_p (result))
852 return result;
853
854 /* Fold might have produced non-GIMPLE, so if we trust it blindly
855 we lose canonicalization opportunities. Do not go again
856 through fold here though, or the same non-GIMPLE will be
857 produced. */
858 if (commutative_ternary_tree_code (subcode)
859 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
860 gimple_assign_rhs2 (stmt), false))
861 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
862 gimple_assign_rhs2 (stmt),
863 gimple_assign_rhs1 (stmt),
864 gimple_assign_rhs3 (stmt));
865 }
866 break;
867
868 case GIMPLE_INVALID_RHS:
869 gcc_unreachable ();
870 }
871
872 return NULL_TREE;
873 }
874
875 /* Attempt to fold a conditional statement. Return true if any changes were
876 made. We only attempt to fold the condition expression, and do not perform
877 any transformation that would require alteration of the cfg. It is
878 assumed that the operands have been previously folded. */
879
880 static bool
881 fold_gimple_cond (gimple stmt)
882 {
883 tree result = fold_binary_loc (gimple_location (stmt),
884 gimple_cond_code (stmt),
885 boolean_type_node,
886 gimple_cond_lhs (stmt),
887 gimple_cond_rhs (stmt));
888
889 if (result)
890 {
891 STRIP_USELESS_TYPE_CONVERSION (result);
892 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
893 {
894 gimple_cond_set_condition_from_tree (stmt, result);
895 return true;
896 }
897 }
898
899 return false;
900 }
901
902 /* Convert EXPR into a GIMPLE value suitable for substitution on the
903 RHS of an assignment. Insert the necessary statements before
904 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
905 is replaced. If the call is expected to produces a result, then it
906 is replaced by an assignment of the new RHS to the result variable.
907 If the result is to be ignored, then the call is replaced by a
908 GIMPLE_NOP. A proper VDEF chain is retained by making the first
909 VUSE and the last VDEF of the whole sequence be the same as the replaced
910 statement and using new SSA names for stores in between. */
911
912 void
913 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
914 {
915 tree lhs;
916 tree tmp = NULL_TREE; /* Silence warning. */
917 gimple stmt, new_stmt;
918 gimple_stmt_iterator i;
919 gimple_seq stmts = gimple_seq_alloc();
920 struct gimplify_ctx gctx;
921 gimple last = NULL;
922 gimple laststore = NULL;
923 tree reaching_vuse;
924
925 stmt = gsi_stmt (*si_p);
926
927 gcc_assert (is_gimple_call (stmt));
928
929 lhs = gimple_call_lhs (stmt);
930 reaching_vuse = gimple_vuse (stmt);
931
932 push_gimplify_context (&gctx);
933
934 if (lhs == NULL_TREE)
935 {
936 gimplify_and_add (expr, &stmts);
937 /* We can end up with folding a memcpy of an empty class assignment
938 which gets optimized away by C++ gimplification. */
939 if (gimple_seq_empty_p (stmts))
940 {
941 if (gimple_in_ssa_p (cfun))
942 {
943 unlink_stmt_vdef (stmt);
944 release_defs (stmt);
945 }
946 gsi_remove (si_p, true);
947 return;
948 }
949 }
950 else
951 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
952
953 pop_gimplify_context (NULL);
954
955 if (gimple_has_location (stmt))
956 annotate_all_with_location (stmts, gimple_location (stmt));
957
958 /* The replacement can expose previously unreferenced variables. */
959 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
960 {
961 if (last)
962 {
963 gsi_insert_before (si_p, last, GSI_NEW_STMT);
964 gsi_next (si_p);
965 }
966 new_stmt = gsi_stmt (i);
967 if (gimple_in_ssa_p (cfun))
968 {
969 find_new_referenced_vars (new_stmt);
970 mark_symbols_for_renaming (new_stmt);
971 }
972 /* If the new statement has a VUSE, update it with exact SSA name we
973 know will reach this one. */
974 if (gimple_vuse (new_stmt))
975 {
976 /* If we've also seen a previous store create a new VDEF for
977 the latter one, and make that the new reaching VUSE. */
978 if (laststore)
979 {
980 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
981 gimple_set_vdef (laststore, reaching_vuse);
982 update_stmt (laststore);
983 laststore = NULL;
984 }
985 gimple_set_vuse (new_stmt, reaching_vuse);
986 gimple_set_modified (new_stmt, true);
987 }
988 if (gimple_assign_single_p (new_stmt)
989 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
990 {
991 laststore = new_stmt;
992 }
993 last = new_stmt;
994 }
995
996 if (lhs == NULL_TREE)
997 {
998 /* If we replace a call without LHS that has a VDEF and our new
999 sequence ends with a store we must make that store have the same
1000 vdef in order not to break the sequencing. This can happen
1001 for instance when folding memcpy calls into assignments. */
1002 if (gimple_vdef (stmt) && laststore)
1003 {
1004 gimple_set_vdef (laststore, gimple_vdef (stmt));
1005 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1006 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1007 update_stmt (laststore);
1008 }
1009 else if (gimple_in_ssa_p (cfun))
1010 {
1011 unlink_stmt_vdef (stmt);
1012 release_defs (stmt);
1013 }
1014 new_stmt = last;
1015 }
1016 else
1017 {
1018 if (last)
1019 {
1020 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1021 gsi_next (si_p);
1022 }
1023 if (laststore && is_gimple_reg (lhs))
1024 {
1025 gimple_set_vdef (laststore, gimple_vdef (stmt));
1026 update_stmt (laststore);
1027 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1028 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1029 laststore = NULL;
1030 }
1031 else if (laststore)
1032 {
1033 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1034 gimple_set_vdef (laststore, reaching_vuse);
1035 update_stmt (laststore);
1036 laststore = NULL;
1037 }
1038 new_stmt = gimple_build_assign (lhs, tmp);
1039 if (!is_gimple_reg (tmp))
1040 gimple_set_vuse (new_stmt, reaching_vuse);
1041 if (!is_gimple_reg (lhs))
1042 {
1043 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1044 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1045 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1046 }
1047 }
1048
1049 gimple_set_location (new_stmt, gimple_location (stmt));
1050 gsi_replace (si_p, new_stmt, false);
1051 }
1052
1053 /* Return the string length, maximum string length or maximum value of
1054 ARG in LENGTH.
1055 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1056 is not NULL and, for TYPE == 0, its value is not equal to the length
1057 we determine or if we are unable to determine the length or value,
1058 return false. VISITED is a bitmap of visited variables.
1059 TYPE is 0 if string length should be returned, 1 for maximum string
1060 length and 2 for maximum value ARG can have. */
1061
1062 static bool
1063 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1064 {
1065 tree var, val;
1066 gimple def_stmt;
1067
1068 if (TREE_CODE (arg) != SSA_NAME)
1069 {
1070 if (TREE_CODE (arg) == COND_EXPR)
1071 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1072 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1073 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1074 else if (TREE_CODE (arg) == ADDR_EXPR
1075 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1076 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1077 {
1078 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1079 if (TREE_CODE (aop0) == INDIRECT_REF
1080 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1081 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1082 length, visited, type);
1083 }
1084
1085 if (type == 2)
1086 {
1087 val = arg;
1088 if (TREE_CODE (val) != INTEGER_CST
1089 || tree_int_cst_sgn (val) < 0)
1090 return false;
1091 }
1092 else
1093 val = c_strlen (arg, 1);
1094 if (!val)
1095 return false;
1096
1097 if (*length)
1098 {
1099 if (type > 0)
1100 {
1101 if (TREE_CODE (*length) != INTEGER_CST
1102 || TREE_CODE (val) != INTEGER_CST)
1103 return false;
1104
1105 if (tree_int_cst_lt (*length, val))
1106 *length = val;
1107 return true;
1108 }
1109 else if (simple_cst_equal (val, *length) != 1)
1110 return false;
1111 }
1112
1113 *length = val;
1114 return true;
1115 }
1116
1117 /* If we were already here, break the infinite cycle. */
1118 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1119 return true;
1120
1121 var = arg;
1122 def_stmt = SSA_NAME_DEF_STMT (var);
1123
1124 switch (gimple_code (def_stmt))
1125 {
1126 case GIMPLE_ASSIGN:
1127 /* The RHS of the statement defining VAR must either have a
1128 constant length or come from another SSA_NAME with a constant
1129 length. */
1130 if (gimple_assign_single_p (def_stmt)
1131 || gimple_assign_unary_nop_p (def_stmt))
1132 {
1133 tree rhs = gimple_assign_rhs1 (def_stmt);
1134 return get_maxval_strlen (rhs, length, visited, type);
1135 }
1136 return false;
1137
1138 case GIMPLE_PHI:
1139 {
1140 /* All the arguments of the PHI node must have the same constant
1141 length. */
1142 unsigned i;
1143
1144 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1145 {
1146 tree arg = gimple_phi_arg (def_stmt, i)->def;
1147
1148 /* If this PHI has itself as an argument, we cannot
1149 determine the string length of this argument. However,
1150 if we can find a constant string length for the other
1151 PHI args then we can still be sure that this is a
1152 constant string length. So be optimistic and just
1153 continue with the next argument. */
1154 if (arg == gimple_phi_result (def_stmt))
1155 continue;
1156
1157 if (!get_maxval_strlen (arg, length, visited, type))
1158 return false;
1159 }
1160 }
1161 return true;
1162
1163 default:
1164 return false;
1165 }
1166 }
1167
1168
1169 /* Fold builtin call in statement STMT. Returns a simplified tree.
1170 We may return a non-constant expression, including another call
1171 to a different function and with different arguments, e.g.,
1172 substituting memcpy for strcpy when the string length is known.
1173 Note that some builtins expand into inline code that may not
1174 be valid in GIMPLE. Callers must take care. */
1175
1176 tree
1177 gimple_fold_builtin (gimple stmt)
1178 {
1179 tree result, val[3];
1180 tree callee, a;
1181 int arg_idx, type;
1182 bitmap visited;
1183 bool ignore;
1184 int nargs;
1185 location_t loc = gimple_location (stmt);
1186
1187 gcc_assert (is_gimple_call (stmt));
1188
1189 ignore = (gimple_call_lhs (stmt) == NULL);
1190
1191 /* First try the generic builtin folder. If that succeeds, return the
1192 result directly. */
1193 result = fold_call_stmt (stmt, ignore);
1194 if (result)
1195 {
1196 if (ignore)
1197 STRIP_NOPS (result);
1198 return result;
1199 }
1200
1201 /* Ignore MD builtins. */
1202 callee = gimple_call_fndecl (stmt);
1203 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1204 return NULL_TREE;
1205
1206 /* If the builtin could not be folded, and it has no argument list,
1207 we're done. */
1208 nargs = gimple_call_num_args (stmt);
1209 if (nargs == 0)
1210 return NULL_TREE;
1211
1212 /* Limit the work only for builtins we know how to simplify. */
1213 switch (DECL_FUNCTION_CODE (callee))
1214 {
1215 case BUILT_IN_STRLEN:
1216 case BUILT_IN_FPUTS:
1217 case BUILT_IN_FPUTS_UNLOCKED:
1218 arg_idx = 0;
1219 type = 0;
1220 break;
1221 case BUILT_IN_STRCPY:
1222 case BUILT_IN_STRNCPY:
1223 arg_idx = 1;
1224 type = 0;
1225 break;
1226 case BUILT_IN_MEMCPY_CHK:
1227 case BUILT_IN_MEMPCPY_CHK:
1228 case BUILT_IN_MEMMOVE_CHK:
1229 case BUILT_IN_MEMSET_CHK:
1230 case BUILT_IN_STRNCPY_CHK:
1231 arg_idx = 2;
1232 type = 2;
1233 break;
1234 case BUILT_IN_STRCPY_CHK:
1235 case BUILT_IN_STPCPY_CHK:
1236 arg_idx = 1;
1237 type = 1;
1238 break;
1239 case BUILT_IN_SNPRINTF_CHK:
1240 case BUILT_IN_VSNPRINTF_CHK:
1241 arg_idx = 1;
1242 type = 2;
1243 break;
1244 default:
1245 return NULL_TREE;
1246 }
1247
1248 if (arg_idx >= nargs)
1249 return NULL_TREE;
1250
1251 /* Try to use the dataflow information gathered by the CCP process. */
1252 visited = BITMAP_ALLOC (NULL);
1253 bitmap_clear (visited);
1254
1255 memset (val, 0, sizeof (val));
1256 a = gimple_call_arg (stmt, arg_idx);
1257 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1258 val[arg_idx] = NULL_TREE;
1259
1260 BITMAP_FREE (visited);
1261
1262 result = NULL_TREE;
1263 switch (DECL_FUNCTION_CODE (callee))
1264 {
1265 case BUILT_IN_STRLEN:
1266 if (val[0] && nargs == 1)
1267 {
1268 tree new_val =
1269 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1270
1271 /* If the result is not a valid gimple value, or not a cast
1272 of a valid gimple value, then we cannot use the result. */
1273 if (is_gimple_val (new_val)
1274 || (is_gimple_cast (new_val)
1275 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1276 return new_val;
1277 }
1278 break;
1279
1280 case BUILT_IN_STRCPY:
1281 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1282 result = fold_builtin_strcpy (loc, callee,
1283 gimple_call_arg (stmt, 0),
1284 gimple_call_arg (stmt, 1),
1285 val[1]);
1286 break;
1287
1288 case BUILT_IN_STRNCPY:
1289 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1290 result = fold_builtin_strncpy (loc, callee,
1291 gimple_call_arg (stmt, 0),
1292 gimple_call_arg (stmt, 1),
1293 gimple_call_arg (stmt, 2),
1294 val[1]);
1295 break;
1296
1297 case BUILT_IN_FPUTS:
1298 if (nargs == 2)
1299 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1300 gimple_call_arg (stmt, 1),
1301 ignore, false, val[0]);
1302 break;
1303
1304 case BUILT_IN_FPUTS_UNLOCKED:
1305 if (nargs == 2)
1306 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1307 gimple_call_arg (stmt, 1),
1308 ignore, true, val[0]);
1309 break;
1310
1311 case BUILT_IN_MEMCPY_CHK:
1312 case BUILT_IN_MEMPCPY_CHK:
1313 case BUILT_IN_MEMMOVE_CHK:
1314 case BUILT_IN_MEMSET_CHK:
1315 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1316 result = fold_builtin_memory_chk (loc, callee,
1317 gimple_call_arg (stmt, 0),
1318 gimple_call_arg (stmt, 1),
1319 gimple_call_arg (stmt, 2),
1320 gimple_call_arg (stmt, 3),
1321 val[2], ignore,
1322 DECL_FUNCTION_CODE (callee));
1323 break;
1324
1325 case BUILT_IN_STRCPY_CHK:
1326 case BUILT_IN_STPCPY_CHK:
1327 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1328 result = fold_builtin_stxcpy_chk (loc, callee,
1329 gimple_call_arg (stmt, 0),
1330 gimple_call_arg (stmt, 1),
1331 gimple_call_arg (stmt, 2),
1332 val[1], ignore,
1333 DECL_FUNCTION_CODE (callee));
1334 break;
1335
1336 case BUILT_IN_STRNCPY_CHK:
1337 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1338 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1339 gimple_call_arg (stmt, 1),
1340 gimple_call_arg (stmt, 2),
1341 gimple_call_arg (stmt, 3),
1342 val[2]);
1343 break;
1344
1345 case BUILT_IN_SNPRINTF_CHK:
1346 case BUILT_IN_VSNPRINTF_CHK:
1347 if (val[1] && is_gimple_val (val[1]))
1348 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1349 DECL_FUNCTION_CODE (callee));
1350 break;
1351
1352 default:
1353 gcc_unreachable ();
1354 }
1355
1356 if (result && ignore)
1357 result = fold_ignored_result (result);
1358 return result;
1359 }
1360
1361 /* Return the first of the base binfos of BINFO that has virtual functions. */
1362
1363 static tree
1364 get_first_base_binfo_with_virtuals (tree binfo)
1365 {
1366 int i;
1367 tree base_binfo;
1368
1369 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1370 if (BINFO_VIRTUALS (base_binfo))
1371 return base_binfo;
1372
1373 return NULL_TREE;
1374 }
1375
1376
1377 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1378 it is found or NULL_TREE if it is not. */
1379
1380 static tree
1381 get_base_binfo_for_type (tree binfo, tree type)
1382 {
1383 int i;
1384 tree base_binfo;
1385 tree res = NULL_TREE;
1386
1387 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1388 if (TREE_TYPE (base_binfo) == type)
1389 {
1390 gcc_assert (!res);
1391 res = base_binfo;
1392 }
1393
1394 return res;
1395 }
1396
1397 /* Return a binfo describing the part of object referenced by expression REF.
1398 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1399 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1400 a simple declaration, indirect reference or an SSA_NAME. If the function
1401 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1402 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1403 Otherwise the first non-artificial field declaration or the base declaration
1404 will be examined to get the encapsulating type. */
1405
1406 tree
1407 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
1408 {
1409 while (true)
1410 {
1411 if (TREE_CODE (ref) == COMPONENT_REF)
1412 {
1413 tree par_type;
1414 tree binfo, base_binfo;
1415 tree field = TREE_OPERAND (ref, 1);
1416
1417 if (!DECL_ARTIFICIAL (field))
1418 {
1419 tree type = TREE_TYPE (field);
1420 if (TREE_CODE (type) == RECORD_TYPE)
1421 return TYPE_BINFO (type);
1422 else
1423 return NULL_TREE;
1424 }
1425
1426 par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
1427 binfo = TYPE_BINFO (par_type);
1428 if (!binfo
1429 || BINFO_N_BASE_BINFOS (binfo) == 0)
1430 return NULL_TREE;
1431
1432 base_binfo = get_first_base_binfo_with_virtuals (binfo);
1433 if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
1434 {
1435 tree d_binfo;
1436
1437 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
1438 known_binfo);
1439 /* Get descendant binfo. */
1440 if (!d_binfo)
1441 return NULL_TREE;
1442 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
1443 }
1444
1445 ref = TREE_OPERAND (ref, 0);
1446 }
1447 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
1448 return TYPE_BINFO (TREE_TYPE (ref));
1449 else if (known_binfo
1450 && (TREE_CODE (ref) == SSA_NAME
1451 || TREE_CODE (ref) == MEM_REF))
1452 return known_binfo;
1453 else
1454 return NULL_TREE;
1455 }
1456 }
1457
1458 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1459 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1460 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1461
1462 tree
1463 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
1464 {
1465 HOST_WIDE_INT i;
1466 tree v, fndecl, delta;
1467
1468 v = BINFO_VIRTUALS (known_binfo);
1469 i = 0;
1470 while (i != token)
1471 {
1472 i += (TARGET_VTABLE_USES_DESCRIPTORS
1473 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1474 v = TREE_CHAIN (v);
1475 }
1476
1477 fndecl = TREE_VALUE (v);
1478 delta = TREE_PURPOSE (v);
1479 gcc_assert (host_integerp (delta, 0));
1480
1481 if (integer_nonzerop (delta))
1482 {
1483 struct cgraph_node *node = cgraph_get_node (fndecl);
1484 HOST_WIDE_INT off = tree_low_cst (delta, 0);
1485
1486 if (!node)
1487 return NULL;
1488 for (node = node->same_body; node; node = node->next)
1489 if (node->thunk.thunk_p && off == node->thunk.fixed_offset)
1490 break;
1491 if (node)
1492 fndecl = node->decl;
1493 else
1494 return NULL;
1495 }
1496
1497 /* When cgraph node is missing and function is not public, we cannot
1498 devirtualize. This can happen in WHOPR when the actual method
1499 ends up in other partition, because we found devirtualization
1500 possibility too late. */
1501 if (!can_refer_decl_in_current_unit_p (fndecl))
1502 return NULL;
1503 return build_fold_addr_expr (fndecl);
1504 }
1505
1506
1507 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1508 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1509 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1510 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1511 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1512
1513 tree
1514 gimple_fold_obj_type_ref (tree ref, tree known_type)
1515 {
1516 tree obj = OBJ_TYPE_REF_OBJECT (ref);
1517 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
1518 tree binfo;
1519
1520 if (TREE_CODE (obj) == ADDR_EXPR)
1521 obj = TREE_OPERAND (obj, 0);
1522
1523 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
1524 if (binfo)
1525 {
1526 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
1527 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1528 if (!BINFO_VIRTUALS (binfo))
1529 return NULL_TREE;
1530 return gimple_fold_obj_type_ref_known_binfo (token, binfo);
1531 }
1532 else
1533 return NULL_TREE;
1534 }
1535
1536 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1537 The statement may be replaced by another statement, e.g., if the call
1538 simplifies to a constant value. Return true if any changes were made.
1539 It is assumed that the operands have been previously folded. */
1540
1541 static bool
1542 fold_gimple_call (gimple_stmt_iterator *gsi, bool inplace)
1543 {
1544 gimple stmt = gsi_stmt (*gsi);
1545
1546 tree callee = gimple_call_fndecl (stmt);
1547
1548 /* Check for builtins that CCP can handle using information not
1549 available in the generic fold routines. */
1550 if (!inplace && callee && DECL_BUILT_IN (callee))
1551 {
1552 tree result = gimple_fold_builtin (stmt);
1553
1554 if (result)
1555 {
1556 if (!update_call_from_tree (gsi, result))
1557 gimplify_and_update_call_from_tree (gsi, result);
1558 return true;
1559 }
1560 }
1561 else
1562 {
1563 /* ??? Should perhaps do this in fold proper. However, doing it
1564 there requires that we create a new CALL_EXPR, and that requires
1565 copying EH region info to the new node. Easier to just do it
1566 here where we can just smash the call operand. */
1567 callee = gimple_call_fn (stmt);
1568 if (TREE_CODE (callee) == OBJ_TYPE_REF
1569 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
1570 {
1571 tree t;
1572
1573 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
1574 if (t)
1575 {
1576 gimple_call_set_fn (stmt, t);
1577 return true;
1578 }
1579 }
1580 }
1581
1582 return false;
1583 }
1584
1585 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1586 distinguishes both cases. */
1587
1588 static bool
1589 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1590 {
1591 bool changed = false;
1592 gimple stmt = gsi_stmt (*gsi);
1593 unsigned i;
1594
1595 /* Fold the main computation performed by the statement. */
1596 switch (gimple_code (stmt))
1597 {
1598 case GIMPLE_ASSIGN:
1599 {
1600 unsigned old_num_ops = gimple_num_ops (stmt);
1601 tree new_rhs = fold_gimple_assign (gsi);
1602 tree lhs = gimple_assign_lhs (stmt);
1603 if (new_rhs
1604 && !useless_type_conversion_p (TREE_TYPE (lhs),
1605 TREE_TYPE (new_rhs)))
1606 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1607 if (new_rhs
1608 && (!inplace
1609 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1610 {
1611 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1612 changed = true;
1613 }
1614 break;
1615 }
1616
1617 case GIMPLE_COND:
1618 changed |= fold_gimple_cond (stmt);
1619 break;
1620
1621 case GIMPLE_CALL:
1622 /* Fold *& in call arguments. */
1623 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1624 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1625 {
1626 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1627 if (tmp)
1628 {
1629 gimple_call_set_arg (stmt, i, tmp);
1630 changed = true;
1631 }
1632 }
1633 changed |= fold_gimple_call (gsi, inplace);
1634 break;
1635
1636 case GIMPLE_ASM:
1637 /* Fold *& in asm operands. */
1638 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1639 {
1640 tree link = gimple_asm_output_op (stmt, i);
1641 tree op = TREE_VALUE (link);
1642 if (REFERENCE_CLASS_P (op)
1643 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1644 {
1645 TREE_VALUE (link) = op;
1646 changed = true;
1647 }
1648 }
1649 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1650 {
1651 tree link = gimple_asm_input_op (stmt, i);
1652 tree op = TREE_VALUE (link);
1653 if (REFERENCE_CLASS_P (op)
1654 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1655 {
1656 TREE_VALUE (link) = op;
1657 changed = true;
1658 }
1659 }
1660 break;
1661
1662 case GIMPLE_DEBUG:
1663 if (gimple_debug_bind_p (stmt))
1664 {
1665 tree val = gimple_debug_bind_get_value (stmt);
1666 if (val
1667 && REFERENCE_CLASS_P (val))
1668 {
1669 tree tem = maybe_fold_reference (val, false);
1670 if (tem)
1671 {
1672 gimple_debug_bind_set_value (stmt, tem);
1673 changed = true;
1674 }
1675 }
1676 }
1677 break;
1678
1679 default:;
1680 }
1681
1682 stmt = gsi_stmt (*gsi);
1683
1684 /* Fold *& on the lhs. */
1685 if (gimple_has_lhs (stmt))
1686 {
1687 tree lhs = gimple_get_lhs (stmt);
1688 if (lhs && REFERENCE_CLASS_P (lhs))
1689 {
1690 tree new_lhs = maybe_fold_reference (lhs, true);
1691 if (new_lhs)
1692 {
1693 gimple_set_lhs (stmt, new_lhs);
1694 changed = true;
1695 }
1696 }
1697 }
1698
1699 return changed;
1700 }
1701
1702 /* Fold the statement pointed to by GSI. In some cases, this function may
1703 replace the whole statement with a new one. Returns true iff folding
1704 makes any changes.
1705 The statement pointed to by GSI should be in valid gimple form but may
1706 be in unfolded state as resulting from for example constant propagation
1707 which can produce *&x = 0. */
1708
1709 bool
1710 fold_stmt (gimple_stmt_iterator *gsi)
1711 {
1712 return fold_stmt_1 (gsi, false);
1713 }
1714
1715 /* Perform the minimal folding on statement STMT. Only operations like
1716 *&x created by constant propagation are handled. The statement cannot
1717 be replaced with a new one. Return true if the statement was
1718 changed, false otherwise.
1719 The statement STMT should be in valid gimple form but may
1720 be in unfolded state as resulting from for example constant propagation
1721 which can produce *&x = 0. */
1722
1723 bool
1724 fold_stmt_inplace (gimple stmt)
1725 {
1726 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1727 bool changed = fold_stmt_1 (&gsi, true);
1728 gcc_assert (gsi_stmt (gsi) == stmt);
1729 return changed;
1730 }
1731
1732 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1733 if EXPR is null or we don't know how.
1734 If non-null, the result always has boolean type. */
1735
1736 static tree
1737 canonicalize_bool (tree expr, bool invert)
1738 {
1739 if (!expr)
1740 return NULL_TREE;
1741 else if (invert)
1742 {
1743 if (integer_nonzerop (expr))
1744 return boolean_false_node;
1745 else if (integer_zerop (expr))
1746 return boolean_true_node;
1747 else if (TREE_CODE (expr) == SSA_NAME)
1748 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1749 build_int_cst (TREE_TYPE (expr), 0));
1750 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1751 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1752 boolean_type_node,
1753 TREE_OPERAND (expr, 0),
1754 TREE_OPERAND (expr, 1));
1755 else
1756 return NULL_TREE;
1757 }
1758 else
1759 {
1760 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1761 return expr;
1762 if (integer_nonzerop (expr))
1763 return boolean_true_node;
1764 else if (integer_zerop (expr))
1765 return boolean_false_node;
1766 else if (TREE_CODE (expr) == SSA_NAME)
1767 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1768 build_int_cst (TREE_TYPE (expr), 0));
1769 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1770 return fold_build2 (TREE_CODE (expr),
1771 boolean_type_node,
1772 TREE_OPERAND (expr, 0),
1773 TREE_OPERAND (expr, 1));
1774 else
1775 return NULL_TREE;
1776 }
1777 }
1778
1779 /* Check to see if a boolean expression EXPR is logically equivalent to the
1780 comparison (OP1 CODE OP2). Check for various identities involving
1781 SSA_NAMEs. */
1782
1783 static bool
1784 same_bool_comparison_p (const_tree expr, enum tree_code code,
1785 const_tree op1, const_tree op2)
1786 {
1787 gimple s;
1788
1789 /* The obvious case. */
1790 if (TREE_CODE (expr) == code
1791 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1792 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1793 return true;
1794
1795 /* Check for comparing (name, name != 0) and the case where expr
1796 is an SSA_NAME with a definition matching the comparison. */
1797 if (TREE_CODE (expr) == SSA_NAME
1798 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1799 {
1800 if (operand_equal_p (expr, op1, 0))
1801 return ((code == NE_EXPR && integer_zerop (op2))
1802 || (code == EQ_EXPR && integer_nonzerop (op2)));
1803 s = SSA_NAME_DEF_STMT (expr);
1804 if (is_gimple_assign (s)
1805 && gimple_assign_rhs_code (s) == code
1806 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1807 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1808 return true;
1809 }
1810
1811 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1812 of name is a comparison, recurse. */
1813 if (TREE_CODE (op1) == SSA_NAME
1814 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1815 {
1816 s = SSA_NAME_DEF_STMT (op1);
1817 if (is_gimple_assign (s)
1818 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1819 {
1820 enum tree_code c = gimple_assign_rhs_code (s);
1821 if ((c == NE_EXPR && integer_zerop (op2))
1822 || (c == EQ_EXPR && integer_nonzerop (op2)))
1823 return same_bool_comparison_p (expr, c,
1824 gimple_assign_rhs1 (s),
1825 gimple_assign_rhs2 (s));
1826 if ((c == EQ_EXPR && integer_zerop (op2))
1827 || (c == NE_EXPR && integer_nonzerop (op2)))
1828 return same_bool_comparison_p (expr,
1829 invert_tree_comparison (c, false),
1830 gimple_assign_rhs1 (s),
1831 gimple_assign_rhs2 (s));
1832 }
1833 }
1834 return false;
1835 }
1836
1837 /* Check to see if two boolean expressions OP1 and OP2 are logically
1838 equivalent. */
1839
1840 static bool
1841 same_bool_result_p (const_tree op1, const_tree op2)
1842 {
1843 /* Simple cases first. */
1844 if (operand_equal_p (op1, op2, 0))
1845 return true;
1846
1847 /* Check the cases where at least one of the operands is a comparison.
1848 These are a bit smarter than operand_equal_p in that they apply some
1849 identifies on SSA_NAMEs. */
1850 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1851 && same_bool_comparison_p (op1, TREE_CODE (op2),
1852 TREE_OPERAND (op2, 0),
1853 TREE_OPERAND (op2, 1)))
1854 return true;
1855 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1856 && same_bool_comparison_p (op2, TREE_CODE (op1),
1857 TREE_OPERAND (op1, 0),
1858 TREE_OPERAND (op1, 1)))
1859 return true;
1860
1861 /* Default case. */
1862 return false;
1863 }
1864
1865 /* Forward declarations for some mutually recursive functions. */
1866
1867 static tree
1868 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1869 enum tree_code code2, tree op2a, tree op2b);
1870 static tree
1871 and_var_with_comparison (tree var, bool invert,
1872 enum tree_code code2, tree op2a, tree op2b);
1873 static tree
1874 and_var_with_comparison_1 (gimple stmt,
1875 enum tree_code code2, tree op2a, tree op2b);
1876 static tree
1877 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1878 enum tree_code code2, tree op2a, tree op2b);
1879 static tree
1880 or_var_with_comparison (tree var, bool invert,
1881 enum tree_code code2, tree op2a, tree op2b);
1882 static tree
1883 or_var_with_comparison_1 (gimple stmt,
1884 enum tree_code code2, tree op2a, tree op2b);
1885
1886 /* Helper function for and_comparisons_1: try to simplify the AND of the
1887 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1888 If INVERT is true, invert the value of the VAR before doing the AND.
1889 Return NULL_EXPR if we can't simplify this to a single expression. */
1890
1891 static tree
1892 and_var_with_comparison (tree var, bool invert,
1893 enum tree_code code2, tree op2a, tree op2b)
1894 {
1895 tree t;
1896 gimple stmt = SSA_NAME_DEF_STMT (var);
1897
1898 /* We can only deal with variables whose definitions are assignments. */
1899 if (!is_gimple_assign (stmt))
1900 return NULL_TREE;
1901
1902 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1903 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1904 Then we only have to consider the simpler non-inverted cases. */
1905 if (invert)
1906 t = or_var_with_comparison_1 (stmt,
1907 invert_tree_comparison (code2, false),
1908 op2a, op2b);
1909 else
1910 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1911 return canonicalize_bool (t, invert);
1912 }
1913
1914 /* Try to simplify the AND of the ssa variable defined by the assignment
1915 STMT with the comparison specified by (OP2A CODE2 OP2B).
1916 Return NULL_EXPR if we can't simplify this to a single expression. */
1917
1918 static tree
1919 and_var_with_comparison_1 (gimple stmt,
1920 enum tree_code code2, tree op2a, tree op2b)
1921 {
1922 tree var = gimple_assign_lhs (stmt);
1923 tree true_test_var = NULL_TREE;
1924 tree false_test_var = NULL_TREE;
1925 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1926
1927 /* Check for identities like (var AND (var == 0)) => false. */
1928 if (TREE_CODE (op2a) == SSA_NAME
1929 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1930 {
1931 if ((code2 == NE_EXPR && integer_zerop (op2b))
1932 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1933 {
1934 true_test_var = op2a;
1935 if (var == true_test_var)
1936 return var;
1937 }
1938 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1939 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1940 {
1941 false_test_var = op2a;
1942 if (var == false_test_var)
1943 return boolean_false_node;
1944 }
1945 }
1946
1947 /* If the definition is a comparison, recurse on it. */
1948 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1949 {
1950 tree t = and_comparisons_1 (innercode,
1951 gimple_assign_rhs1 (stmt),
1952 gimple_assign_rhs2 (stmt),
1953 code2,
1954 op2a,
1955 op2b);
1956 if (t)
1957 return t;
1958 }
1959
1960 /* If the definition is an AND or OR expression, we may be able to
1961 simplify by reassociating. */
1962 if (innercode == TRUTH_AND_EXPR
1963 || innercode == TRUTH_OR_EXPR
1964 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1965 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1966 {
1967 tree inner1 = gimple_assign_rhs1 (stmt);
1968 tree inner2 = gimple_assign_rhs2 (stmt);
1969 gimple s;
1970 tree t;
1971 tree partial = NULL_TREE;
1972 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1973
1974 /* Check for boolean identities that don't require recursive examination
1975 of inner1/inner2:
1976 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1977 inner1 AND (inner1 OR inner2) => inner1
1978 !inner1 AND (inner1 AND inner2) => false
1979 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1980 Likewise for similar cases involving inner2. */
1981 if (inner1 == true_test_var)
1982 return (is_and ? var : inner1);
1983 else if (inner2 == true_test_var)
1984 return (is_and ? var : inner2);
1985 else if (inner1 == false_test_var)
1986 return (is_and
1987 ? boolean_false_node
1988 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1989 else if (inner2 == false_test_var)
1990 return (is_and
1991 ? boolean_false_node
1992 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1993
1994 /* Next, redistribute/reassociate the AND across the inner tests.
1995 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1996 if (TREE_CODE (inner1) == SSA_NAME
1997 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1998 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1999 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2000 gimple_assign_rhs1 (s),
2001 gimple_assign_rhs2 (s),
2002 code2, op2a, op2b)))
2003 {
2004 /* Handle the AND case, where we are reassociating:
2005 (inner1 AND inner2) AND (op2a code2 op2b)
2006 => (t AND inner2)
2007 If the partial result t is a constant, we win. Otherwise
2008 continue on to try reassociating with the other inner test. */
2009 if (is_and)
2010 {
2011 if (integer_onep (t))
2012 return inner2;
2013 else if (integer_zerop (t))
2014 return boolean_false_node;
2015 }
2016
2017 /* Handle the OR case, where we are redistributing:
2018 (inner1 OR inner2) AND (op2a code2 op2b)
2019 => (t OR (inner2 AND (op2a code2 op2b))) */
2020 else
2021 {
2022 if (integer_onep (t))
2023 return boolean_true_node;
2024 else
2025 /* Save partial result for later. */
2026 partial = t;
2027 }
2028 }
2029
2030 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2031 if (TREE_CODE (inner2) == SSA_NAME
2032 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2033 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2034 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2035 gimple_assign_rhs1 (s),
2036 gimple_assign_rhs2 (s),
2037 code2, op2a, op2b)))
2038 {
2039 /* Handle the AND case, where we are reassociating:
2040 (inner1 AND inner2) AND (op2a code2 op2b)
2041 => (inner1 AND t) */
2042 if (is_and)
2043 {
2044 if (integer_onep (t))
2045 return inner1;
2046 else if (integer_zerop (t))
2047 return boolean_false_node;
2048 }
2049
2050 /* Handle the OR case. where we are redistributing:
2051 (inner1 OR inner2) AND (op2a code2 op2b)
2052 => (t OR (inner1 AND (op2a code2 op2b)))
2053 => (t OR partial) */
2054 else
2055 {
2056 if (integer_onep (t))
2057 return boolean_true_node;
2058 else if (partial)
2059 {
2060 /* We already got a simplification for the other
2061 operand to the redistributed OR expression. The
2062 interesting case is when at least one is false.
2063 Or, if both are the same, we can apply the identity
2064 (x OR x) == x. */
2065 if (integer_zerop (partial))
2066 return t;
2067 else if (integer_zerop (t))
2068 return partial;
2069 else if (same_bool_result_p (t, partial))
2070 return t;
2071 }
2072 }
2073 }
2074 }
2075 return NULL_TREE;
2076 }
2077
2078 /* Try to simplify the AND of two comparisons defined by
2079 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2080 If this can be done without constructing an intermediate value,
2081 return the resulting tree; otherwise NULL_TREE is returned.
2082 This function is deliberately asymmetric as it recurses on SSA_DEFs
2083 in the first comparison but not the second. */
2084
2085 static tree
2086 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2087 enum tree_code code2, tree op2a, tree op2b)
2088 {
2089 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2090 if (operand_equal_p (op1a, op2a, 0)
2091 && operand_equal_p (op1b, op2b, 0))
2092 {
2093 tree t = combine_comparisons (UNKNOWN_LOCATION,
2094 TRUTH_ANDIF_EXPR, code1, code2,
2095 boolean_type_node, op1a, op1b);
2096 if (t)
2097 return t;
2098 }
2099
2100 /* Likewise the swapped case of the above. */
2101 if (operand_equal_p (op1a, op2b, 0)
2102 && operand_equal_p (op1b, op2a, 0))
2103 {
2104 tree t = combine_comparisons (UNKNOWN_LOCATION,
2105 TRUTH_ANDIF_EXPR, code1,
2106 swap_tree_comparison (code2),
2107 boolean_type_node, op1a, op1b);
2108 if (t)
2109 return t;
2110 }
2111
2112 /* If both comparisons are of the same value against constants, we might
2113 be able to merge them. */
2114 if (operand_equal_p (op1a, op2a, 0)
2115 && TREE_CODE (op1b) == INTEGER_CST
2116 && TREE_CODE (op2b) == INTEGER_CST)
2117 {
2118 int cmp = tree_int_cst_compare (op1b, op2b);
2119
2120 /* If we have (op1a == op1b), we should either be able to
2121 return that or FALSE, depending on whether the constant op1b
2122 also satisfies the other comparison against op2b. */
2123 if (code1 == EQ_EXPR)
2124 {
2125 bool done = true;
2126 bool val;
2127 switch (code2)
2128 {
2129 case EQ_EXPR: val = (cmp == 0); break;
2130 case NE_EXPR: val = (cmp != 0); break;
2131 case LT_EXPR: val = (cmp < 0); break;
2132 case GT_EXPR: val = (cmp > 0); break;
2133 case LE_EXPR: val = (cmp <= 0); break;
2134 case GE_EXPR: val = (cmp >= 0); break;
2135 default: done = false;
2136 }
2137 if (done)
2138 {
2139 if (val)
2140 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2141 else
2142 return boolean_false_node;
2143 }
2144 }
2145 /* Likewise if the second comparison is an == comparison. */
2146 else if (code2 == EQ_EXPR)
2147 {
2148 bool done = true;
2149 bool val;
2150 switch (code1)
2151 {
2152 case EQ_EXPR: val = (cmp == 0); break;
2153 case NE_EXPR: val = (cmp != 0); break;
2154 case LT_EXPR: val = (cmp > 0); break;
2155 case GT_EXPR: val = (cmp < 0); break;
2156 case LE_EXPR: val = (cmp >= 0); break;
2157 case GE_EXPR: val = (cmp <= 0); break;
2158 default: done = false;
2159 }
2160 if (done)
2161 {
2162 if (val)
2163 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2164 else
2165 return boolean_false_node;
2166 }
2167 }
2168
2169 /* Same business with inequality tests. */
2170 else if (code1 == NE_EXPR)
2171 {
2172 bool val;
2173 switch (code2)
2174 {
2175 case EQ_EXPR: val = (cmp != 0); break;
2176 case NE_EXPR: val = (cmp == 0); break;
2177 case LT_EXPR: val = (cmp >= 0); break;
2178 case GT_EXPR: val = (cmp <= 0); break;
2179 case LE_EXPR: val = (cmp > 0); break;
2180 case GE_EXPR: val = (cmp < 0); break;
2181 default:
2182 val = false;
2183 }
2184 if (val)
2185 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2186 }
2187 else if (code2 == NE_EXPR)
2188 {
2189 bool val;
2190 switch (code1)
2191 {
2192 case EQ_EXPR: val = (cmp == 0); break;
2193 case NE_EXPR: val = (cmp != 0); break;
2194 case LT_EXPR: val = (cmp <= 0); break;
2195 case GT_EXPR: val = (cmp >= 0); break;
2196 case LE_EXPR: val = (cmp < 0); break;
2197 case GE_EXPR: val = (cmp > 0); break;
2198 default:
2199 val = false;
2200 }
2201 if (val)
2202 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2203 }
2204
2205 /* Chose the more restrictive of two < or <= comparisons. */
2206 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2207 && (code2 == LT_EXPR || code2 == LE_EXPR))
2208 {
2209 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2210 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2211 else
2212 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2213 }
2214
2215 /* Likewise chose the more restrictive of two > or >= comparisons. */
2216 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2217 && (code2 == GT_EXPR || code2 == GE_EXPR))
2218 {
2219 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2220 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2221 else
2222 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2223 }
2224
2225 /* Check for singleton ranges. */
2226 else if (cmp == 0
2227 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2228 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2229 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2230
2231 /* Check for disjoint ranges. */
2232 else if (cmp <= 0
2233 && (code1 == LT_EXPR || code1 == LE_EXPR)
2234 && (code2 == GT_EXPR || code2 == GE_EXPR))
2235 return boolean_false_node;
2236 else if (cmp >= 0
2237 && (code1 == GT_EXPR || code1 == GE_EXPR)
2238 && (code2 == LT_EXPR || code2 == LE_EXPR))
2239 return boolean_false_node;
2240 }
2241
2242 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2243 NAME's definition is a truth value. See if there are any simplifications
2244 that can be done against the NAME's definition. */
2245 if (TREE_CODE (op1a) == SSA_NAME
2246 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2247 && (integer_zerop (op1b) || integer_onep (op1b)))
2248 {
2249 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2250 || (code1 == NE_EXPR && integer_onep (op1b)));
2251 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2252 switch (gimple_code (stmt))
2253 {
2254 case GIMPLE_ASSIGN:
2255 /* Try to simplify by copy-propagating the definition. */
2256 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2257
2258 case GIMPLE_PHI:
2259 /* If every argument to the PHI produces the same result when
2260 ANDed with the second comparison, we win.
2261 Do not do this unless the type is bool since we need a bool
2262 result here anyway. */
2263 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2264 {
2265 tree result = NULL_TREE;
2266 unsigned i;
2267 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2268 {
2269 tree arg = gimple_phi_arg_def (stmt, i);
2270
2271 /* If this PHI has itself as an argument, ignore it.
2272 If all the other args produce the same result,
2273 we're still OK. */
2274 if (arg == gimple_phi_result (stmt))
2275 continue;
2276 else if (TREE_CODE (arg) == INTEGER_CST)
2277 {
2278 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2279 {
2280 if (!result)
2281 result = boolean_false_node;
2282 else if (!integer_zerop (result))
2283 return NULL_TREE;
2284 }
2285 else if (!result)
2286 result = fold_build2 (code2, boolean_type_node,
2287 op2a, op2b);
2288 else if (!same_bool_comparison_p (result,
2289 code2, op2a, op2b))
2290 return NULL_TREE;
2291 }
2292 else if (TREE_CODE (arg) == SSA_NAME)
2293 {
2294 tree temp = and_var_with_comparison (arg, invert,
2295 code2, op2a, op2b);
2296 if (!temp)
2297 return NULL_TREE;
2298 else if (!result)
2299 result = temp;
2300 else if (!same_bool_result_p (result, temp))
2301 return NULL_TREE;
2302 }
2303 else
2304 return NULL_TREE;
2305 }
2306 return result;
2307 }
2308
2309 default:
2310 break;
2311 }
2312 }
2313 return NULL_TREE;
2314 }
2315
2316 /* Try to simplify the AND of two comparisons, specified by
2317 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2318 If this can be simplified to a single expression (without requiring
2319 introducing more SSA variables to hold intermediate values),
2320 return the resulting tree. Otherwise return NULL_TREE.
2321 If the result expression is non-null, it has boolean type. */
2322
2323 tree
2324 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2325 enum tree_code code2, tree op2a, tree op2b)
2326 {
2327 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2328 if (t)
2329 return t;
2330 else
2331 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2332 }
2333
2334 /* Helper function for or_comparisons_1: try to simplify the OR of the
2335 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2336 If INVERT is true, invert the value of VAR before doing the OR.
2337 Return NULL_EXPR if we can't simplify this to a single expression. */
2338
2339 static tree
2340 or_var_with_comparison (tree var, bool invert,
2341 enum tree_code code2, tree op2a, tree op2b)
2342 {
2343 tree t;
2344 gimple stmt = SSA_NAME_DEF_STMT (var);
2345
2346 /* We can only deal with variables whose definitions are assignments. */
2347 if (!is_gimple_assign (stmt))
2348 return NULL_TREE;
2349
2350 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2351 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2352 Then we only have to consider the simpler non-inverted cases. */
2353 if (invert)
2354 t = and_var_with_comparison_1 (stmt,
2355 invert_tree_comparison (code2, false),
2356 op2a, op2b);
2357 else
2358 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2359 return canonicalize_bool (t, invert);
2360 }
2361
2362 /* Try to simplify the OR of the ssa variable defined by the assignment
2363 STMT with the comparison specified by (OP2A CODE2 OP2B).
2364 Return NULL_EXPR if we can't simplify this to a single expression. */
2365
2366 static tree
2367 or_var_with_comparison_1 (gimple stmt,
2368 enum tree_code code2, tree op2a, tree op2b)
2369 {
2370 tree var = gimple_assign_lhs (stmt);
2371 tree true_test_var = NULL_TREE;
2372 tree false_test_var = NULL_TREE;
2373 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2374
2375 /* Check for identities like (var OR (var != 0)) => true . */
2376 if (TREE_CODE (op2a) == SSA_NAME
2377 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2378 {
2379 if ((code2 == NE_EXPR && integer_zerop (op2b))
2380 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2381 {
2382 true_test_var = op2a;
2383 if (var == true_test_var)
2384 return var;
2385 }
2386 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2387 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2388 {
2389 false_test_var = op2a;
2390 if (var == false_test_var)
2391 return boolean_true_node;
2392 }
2393 }
2394
2395 /* If the definition is a comparison, recurse on it. */
2396 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2397 {
2398 tree t = or_comparisons_1 (innercode,
2399 gimple_assign_rhs1 (stmt),
2400 gimple_assign_rhs2 (stmt),
2401 code2,
2402 op2a,
2403 op2b);
2404 if (t)
2405 return t;
2406 }
2407
2408 /* If the definition is an AND or OR expression, we may be able to
2409 simplify by reassociating. */
2410 if (innercode == TRUTH_AND_EXPR
2411 || innercode == TRUTH_OR_EXPR
2412 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2413 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2414 {
2415 tree inner1 = gimple_assign_rhs1 (stmt);
2416 tree inner2 = gimple_assign_rhs2 (stmt);
2417 gimple s;
2418 tree t;
2419 tree partial = NULL_TREE;
2420 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2421
2422 /* Check for boolean identities that don't require recursive examination
2423 of inner1/inner2:
2424 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2425 inner1 OR (inner1 AND inner2) => inner1
2426 !inner1 OR (inner1 OR inner2) => true
2427 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2428 */
2429 if (inner1 == true_test_var)
2430 return (is_or ? var : inner1);
2431 else if (inner2 == true_test_var)
2432 return (is_or ? var : inner2);
2433 else if (inner1 == false_test_var)
2434 return (is_or
2435 ? boolean_true_node
2436 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2437 else if (inner2 == false_test_var)
2438 return (is_or
2439 ? boolean_true_node
2440 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2441
2442 /* Next, redistribute/reassociate the OR across the inner tests.
2443 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2444 if (TREE_CODE (inner1) == SSA_NAME
2445 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2446 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2447 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2448 gimple_assign_rhs1 (s),
2449 gimple_assign_rhs2 (s),
2450 code2, op2a, op2b)))
2451 {
2452 /* Handle the OR case, where we are reassociating:
2453 (inner1 OR inner2) OR (op2a code2 op2b)
2454 => (t OR inner2)
2455 If the partial result t is a constant, we win. Otherwise
2456 continue on to try reassociating with the other inner test. */
2457 if (innercode == TRUTH_OR_EXPR)
2458 {
2459 if (integer_onep (t))
2460 return boolean_true_node;
2461 else if (integer_zerop (t))
2462 return inner2;
2463 }
2464
2465 /* Handle the AND case, where we are redistributing:
2466 (inner1 AND inner2) OR (op2a code2 op2b)
2467 => (t AND (inner2 OR (op2a code op2b))) */
2468 else
2469 {
2470 if (integer_zerop (t))
2471 return boolean_false_node;
2472 else
2473 /* Save partial result for later. */
2474 partial = t;
2475 }
2476 }
2477
2478 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2479 if (TREE_CODE (inner2) == SSA_NAME
2480 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2481 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2482 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2483 gimple_assign_rhs1 (s),
2484 gimple_assign_rhs2 (s),
2485 code2, op2a, op2b)))
2486 {
2487 /* Handle the OR case, where we are reassociating:
2488 (inner1 OR inner2) OR (op2a code2 op2b)
2489 => (inner1 OR t) */
2490 if (innercode == TRUTH_OR_EXPR)
2491 {
2492 if (integer_zerop (t))
2493 return inner1;
2494 else if (integer_onep (t))
2495 return boolean_true_node;
2496 }
2497
2498 /* Handle the AND case, where we are redistributing:
2499 (inner1 AND inner2) OR (op2a code2 op2b)
2500 => (t AND (inner1 OR (op2a code2 op2b)))
2501 => (t AND partial) */
2502 else
2503 {
2504 if (integer_zerop (t))
2505 return boolean_false_node;
2506 else if (partial)
2507 {
2508 /* We already got a simplification for the other
2509 operand to the redistributed AND expression. The
2510 interesting case is when at least one is true.
2511 Or, if both are the same, we can apply the identity
2512 (x AND x) == true. */
2513 if (integer_onep (partial))
2514 return t;
2515 else if (integer_onep (t))
2516 return partial;
2517 else if (same_bool_result_p (t, partial))
2518 return boolean_true_node;
2519 }
2520 }
2521 }
2522 }
2523 return NULL_TREE;
2524 }
2525
2526 /* Try to simplify the OR of two comparisons defined by
2527 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2528 If this can be done without constructing an intermediate value,
2529 return the resulting tree; otherwise NULL_TREE is returned.
2530 This function is deliberately asymmetric as it recurses on SSA_DEFs
2531 in the first comparison but not the second. */
2532
2533 static tree
2534 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2535 enum tree_code code2, tree op2a, tree op2b)
2536 {
2537 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2538 if (operand_equal_p (op1a, op2a, 0)
2539 && operand_equal_p (op1b, op2b, 0))
2540 {
2541 tree t = combine_comparisons (UNKNOWN_LOCATION,
2542 TRUTH_ORIF_EXPR, code1, code2,
2543 boolean_type_node, op1a, op1b);
2544 if (t)
2545 return t;
2546 }
2547
2548 /* Likewise the swapped case of the above. */
2549 if (operand_equal_p (op1a, op2b, 0)
2550 && operand_equal_p (op1b, op2a, 0))
2551 {
2552 tree t = combine_comparisons (UNKNOWN_LOCATION,
2553 TRUTH_ORIF_EXPR, code1,
2554 swap_tree_comparison (code2),
2555 boolean_type_node, op1a, op1b);
2556 if (t)
2557 return t;
2558 }
2559
2560 /* If both comparisons are of the same value against constants, we might
2561 be able to merge them. */
2562 if (operand_equal_p (op1a, op2a, 0)
2563 && TREE_CODE (op1b) == INTEGER_CST
2564 && TREE_CODE (op2b) == INTEGER_CST)
2565 {
2566 int cmp = tree_int_cst_compare (op1b, op2b);
2567
2568 /* If we have (op1a != op1b), we should either be able to
2569 return that or TRUE, depending on whether the constant op1b
2570 also satisfies the other comparison against op2b. */
2571 if (code1 == NE_EXPR)
2572 {
2573 bool done = true;
2574 bool val;
2575 switch (code2)
2576 {
2577 case EQ_EXPR: val = (cmp == 0); break;
2578 case NE_EXPR: val = (cmp != 0); break;
2579 case LT_EXPR: val = (cmp < 0); break;
2580 case GT_EXPR: val = (cmp > 0); break;
2581 case LE_EXPR: val = (cmp <= 0); break;
2582 case GE_EXPR: val = (cmp >= 0); break;
2583 default: done = false;
2584 }
2585 if (done)
2586 {
2587 if (val)
2588 return boolean_true_node;
2589 else
2590 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2591 }
2592 }
2593 /* Likewise if the second comparison is a != comparison. */
2594 else if (code2 == NE_EXPR)
2595 {
2596 bool done = true;
2597 bool val;
2598 switch (code1)
2599 {
2600 case EQ_EXPR: val = (cmp == 0); break;
2601 case NE_EXPR: val = (cmp != 0); break;
2602 case LT_EXPR: val = (cmp > 0); break;
2603 case GT_EXPR: val = (cmp < 0); break;
2604 case LE_EXPR: val = (cmp >= 0); break;
2605 case GE_EXPR: val = (cmp <= 0); break;
2606 default: done = false;
2607 }
2608 if (done)
2609 {
2610 if (val)
2611 return boolean_true_node;
2612 else
2613 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2614 }
2615 }
2616
2617 /* See if an equality test is redundant with the other comparison. */
2618 else if (code1 == EQ_EXPR)
2619 {
2620 bool val;
2621 switch (code2)
2622 {
2623 case EQ_EXPR: val = (cmp == 0); break;
2624 case NE_EXPR: val = (cmp != 0); break;
2625 case LT_EXPR: val = (cmp < 0); break;
2626 case GT_EXPR: val = (cmp > 0); break;
2627 case LE_EXPR: val = (cmp <= 0); break;
2628 case GE_EXPR: val = (cmp >= 0); break;
2629 default:
2630 val = false;
2631 }
2632 if (val)
2633 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2634 }
2635 else if (code2 == EQ_EXPR)
2636 {
2637 bool val;
2638 switch (code1)
2639 {
2640 case EQ_EXPR: val = (cmp == 0); break;
2641 case NE_EXPR: val = (cmp != 0); break;
2642 case LT_EXPR: val = (cmp > 0); break;
2643 case GT_EXPR: val = (cmp < 0); break;
2644 case LE_EXPR: val = (cmp >= 0); break;
2645 case GE_EXPR: val = (cmp <= 0); break;
2646 default:
2647 val = false;
2648 }
2649 if (val)
2650 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2651 }
2652
2653 /* Chose the less restrictive of two < or <= comparisons. */
2654 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2655 && (code2 == LT_EXPR || code2 == LE_EXPR))
2656 {
2657 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2658 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2659 else
2660 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2661 }
2662
2663 /* Likewise chose the less restrictive of two > or >= comparisons. */
2664 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2665 && (code2 == GT_EXPR || code2 == GE_EXPR))
2666 {
2667 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2668 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2669 else
2670 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2671 }
2672
2673 /* Check for singleton ranges. */
2674 else if (cmp == 0
2675 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2676 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2677 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2678
2679 /* Check for less/greater pairs that don't restrict the range at all. */
2680 else if (cmp >= 0
2681 && (code1 == LT_EXPR || code1 == LE_EXPR)
2682 && (code2 == GT_EXPR || code2 == GE_EXPR))
2683 return boolean_true_node;
2684 else if (cmp <= 0
2685 && (code1 == GT_EXPR || code1 == GE_EXPR)
2686 && (code2 == LT_EXPR || code2 == LE_EXPR))
2687 return boolean_true_node;
2688 }
2689
2690 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2691 NAME's definition is a truth value. See if there are any simplifications
2692 that can be done against the NAME's definition. */
2693 if (TREE_CODE (op1a) == SSA_NAME
2694 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2695 && (integer_zerop (op1b) || integer_onep (op1b)))
2696 {
2697 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2698 || (code1 == NE_EXPR && integer_onep (op1b)));
2699 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2700 switch (gimple_code (stmt))
2701 {
2702 case GIMPLE_ASSIGN:
2703 /* Try to simplify by copy-propagating the definition. */
2704 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2705
2706 case GIMPLE_PHI:
2707 /* If every argument to the PHI produces the same result when
2708 ORed with the second comparison, we win.
2709 Do not do this unless the type is bool since we need a bool
2710 result here anyway. */
2711 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2712 {
2713 tree result = NULL_TREE;
2714 unsigned i;
2715 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2716 {
2717 tree arg = gimple_phi_arg_def (stmt, i);
2718
2719 /* If this PHI has itself as an argument, ignore it.
2720 If all the other args produce the same result,
2721 we're still OK. */
2722 if (arg == gimple_phi_result (stmt))
2723 continue;
2724 else if (TREE_CODE (arg) == INTEGER_CST)
2725 {
2726 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2727 {
2728 if (!result)
2729 result = boolean_true_node;
2730 else if (!integer_onep (result))
2731 return NULL_TREE;
2732 }
2733 else if (!result)
2734 result = fold_build2 (code2, boolean_type_node,
2735 op2a, op2b);
2736 else if (!same_bool_comparison_p (result,
2737 code2, op2a, op2b))
2738 return NULL_TREE;
2739 }
2740 else if (TREE_CODE (arg) == SSA_NAME)
2741 {
2742 tree temp = or_var_with_comparison (arg, invert,
2743 code2, op2a, op2b);
2744 if (!temp)
2745 return NULL_TREE;
2746 else if (!result)
2747 result = temp;
2748 else if (!same_bool_result_p (result, temp))
2749 return NULL_TREE;
2750 }
2751 else
2752 return NULL_TREE;
2753 }
2754 return result;
2755 }
2756
2757 default:
2758 break;
2759 }
2760 }
2761 return NULL_TREE;
2762 }
2763
2764 /* Try to simplify the OR of two comparisons, specified by
2765 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2766 If this can be simplified to a single expression (without requiring
2767 introducing more SSA variables to hold intermediate values),
2768 return the resulting tree. Otherwise return NULL_TREE.
2769 If the result expression is non-null, it has boolean type. */
2770
2771 tree
2772 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2773 enum tree_code code2, tree op2a, tree op2b)
2774 {
2775 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2776 if (t)
2777 return t;
2778 else
2779 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2780 }