1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl
)
59 struct varpool_node
*vnode
;
60 struct cgraph_node
*node
;
62 if (!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl
) && TREE_STATIC (decl
)
67 && TREE_CODE (decl
) == VAR_DECL
)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
72 gcc_checking_assert (!(vnode
= varpool_get_node (decl
))
73 || !vnode
->finalized
);
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl
))
92 if (TREE_CODE (decl
) == FUNCTION_DECL
)
94 node
= cgraph_get_node (decl
);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
100 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
103 else if (TREE_CODE (decl
) == VAR_DECL
)
105 vnode
= varpool_get_node (decl
);
106 if (!vnode
|| !vnode
->finalized
)
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
116 canonicalize_constructor_val (tree cval
)
119 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
122 tree ptr
= TREE_OPERAND (cval
, 0);
123 if (is_gimple_min_invariant (ptr
))
124 cval
= build1_loc (EXPR_LOCATION (cval
),
125 ADDR_EXPR
, TREE_TYPE (ptr
),
126 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
128 fold_convert (ptr_type_node
,
129 TREE_OPERAND (cval
, 1))));
131 if (TREE_CODE (cval
) == ADDR_EXPR
)
133 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
136 && (TREE_CODE (base
) == VAR_DECL
137 || TREE_CODE (base
) == FUNCTION_DECL
)
138 && !can_refer_decl_in_current_unit_p (base
))
140 if (cfun
&& base
&& TREE_CODE (base
) == VAR_DECL
)
141 add_referenced_var (base
);
142 /* Fixup types in global initializers. */
143 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
144 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
149 /* If SYM is a constant variable with known value, return the value.
150 NULL_TREE is returned otherwise. */
153 get_symbol_constant_value (tree sym
)
155 if (const_value_known_p (sym
))
157 tree val
= DECL_INITIAL (sym
);
160 val
= canonicalize_constructor_val (val
);
161 if (val
&& is_gimple_min_invariant (val
))
166 /* Variables declared 'const' without an initializer
167 have zero as the initializer if they may not be
168 overridden at link or run time. */
170 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
171 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
172 return build_zero_cst (TREE_TYPE (sym
));
180 /* Subroutine of fold_stmt. We perform several simplifications of the
181 memory reference tree EXPR and make sure to re-gimplify them properly
182 after propagation of constant addresses. IS_LHS is true if the
183 reference is supposed to be an lvalue. */
186 maybe_fold_reference (tree expr
, bool is_lhs
)
191 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
192 || TREE_CODE (expr
) == REALPART_EXPR
193 || TREE_CODE (expr
) == IMAGPART_EXPR
)
194 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
195 return fold_unary_loc (EXPR_LOCATION (expr
),
198 TREE_OPERAND (expr
, 0));
199 else if (TREE_CODE (expr
) == BIT_FIELD_REF
200 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
201 return fold_ternary_loc (EXPR_LOCATION (expr
),
204 TREE_OPERAND (expr
, 0),
205 TREE_OPERAND (expr
, 1),
206 TREE_OPERAND (expr
, 2));
208 while (handled_component_p (*t
))
209 t
= &TREE_OPERAND (*t
, 0);
211 /* Canonicalize MEM_REFs invariant address operand. Do this first
212 to avoid feeding non-canonical MEM_REFs elsewhere. */
213 if (TREE_CODE (*t
) == MEM_REF
214 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
216 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
217 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
218 TREE_OPERAND (*t
, 0),
219 TREE_OPERAND (*t
, 1));
222 TREE_THIS_VOLATILE (tem
) = volatile_p
;
224 tem
= maybe_fold_reference (expr
, is_lhs
);
232 && (result
= fold_const_aggregate_ref (expr
))
233 && is_gimple_min_invariant (result
))
236 /* Fold back MEM_REFs to reference trees. */
237 if (TREE_CODE (*t
) == MEM_REF
238 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
239 && integer_zerop (TREE_OPERAND (*t
, 1))
240 && (TREE_THIS_VOLATILE (*t
)
241 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
242 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
243 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
244 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
245 /* We have to look out here to not drop a required conversion
246 from the rhs to the lhs if is_lhs, but we don't have the
247 rhs here to verify that. Thus require strict type
249 && types_compatible_p (TREE_TYPE (*t
),
250 TREE_TYPE (TREE_OPERAND
251 (TREE_OPERAND (*t
, 0), 0))))
254 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
255 tem
= maybe_fold_reference (expr
, is_lhs
);
260 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
262 tree tem
= maybe_fold_tmr (*t
);
266 tem
= maybe_fold_reference (expr
, is_lhs
);
277 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
278 replacement rhs for the statement or NULL_TREE if no simplification
279 could be made. It is assumed that the operands have been previously
283 fold_gimple_assign (gimple_stmt_iterator
*si
)
285 gimple stmt
= gsi_stmt (*si
);
286 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
287 location_t loc
= gimple_location (stmt
);
289 tree result
= NULL_TREE
;
291 switch (get_gimple_rhs_class (subcode
))
293 case GIMPLE_SINGLE_RHS
:
295 tree rhs
= gimple_assign_rhs1 (stmt
);
297 if (REFERENCE_CLASS_P (rhs
))
298 return maybe_fold_reference (rhs
, false);
300 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
302 tree ref
= TREE_OPERAND (rhs
, 0);
303 tree tem
= maybe_fold_reference (ref
, true);
305 && TREE_CODE (tem
) == MEM_REF
306 && integer_zerop (TREE_OPERAND (tem
, 1)))
307 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
309 result
= fold_convert (TREE_TYPE (rhs
),
310 build_fold_addr_expr_loc (loc
, tem
));
311 else if (TREE_CODE (ref
) == MEM_REF
312 && integer_zerop (TREE_OPERAND (ref
, 1)))
313 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
316 else if (TREE_CODE (rhs
) == CONSTRUCTOR
317 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
318 && (CONSTRUCTOR_NELTS (rhs
)
319 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
321 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
325 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
326 if (TREE_CODE (val
) != INTEGER_CST
327 && TREE_CODE (val
) != REAL_CST
328 && TREE_CODE (val
) != FIXED_CST
)
331 return build_vector_from_ctor (TREE_TYPE (rhs
),
332 CONSTRUCTOR_ELTS (rhs
));
335 else if (DECL_P (rhs
))
336 return unshare_expr (get_symbol_constant_value (rhs
));
338 /* If we couldn't fold the RHS, hand over to the generic
340 if (result
== NULL_TREE
)
343 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
344 that may have been added by fold, and "useless" type
345 conversions that might now be apparent due to propagation. */
346 STRIP_USELESS_TYPE_CONVERSION (result
);
348 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
355 case GIMPLE_UNARY_RHS
:
357 tree rhs
= gimple_assign_rhs1 (stmt
);
359 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
362 /* If the operation was a conversion do _not_ mark a
363 resulting constant with TREE_OVERFLOW if the original
364 constant was not. These conversions have implementation
365 defined behavior and retaining the TREE_OVERFLOW flag
366 here would confuse later passes such as VRP. */
367 if (CONVERT_EXPR_CODE_P (subcode
)
368 && TREE_CODE (result
) == INTEGER_CST
369 && TREE_CODE (rhs
) == INTEGER_CST
)
370 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
372 STRIP_USELESS_TYPE_CONVERSION (result
);
373 if (valid_gimple_rhs_p (result
))
379 case GIMPLE_BINARY_RHS
:
380 /* Try to canonicalize for boolean-typed X the comparisons
381 X == 0, X == 1, X != 0, and X != 1. */
382 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
383 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
385 tree lhs
= gimple_assign_lhs (stmt
);
386 tree op1
= gimple_assign_rhs1 (stmt
);
387 tree op2
= gimple_assign_rhs2 (stmt
);
388 tree type
= TREE_TYPE (op1
);
390 /* Check whether the comparison operands are of the same boolean
391 type as the result type is.
392 Check that second operand is an integer-constant with value
394 if (TREE_CODE (op2
) == INTEGER_CST
395 && (integer_zerop (op2
) || integer_onep (op2
))
396 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
398 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
399 bool is_logical_not
= false;
401 /* X == 0 and X != 1 is a logical-not.of X
402 X == 1 and X != 0 is X */
403 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
404 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
405 is_logical_not
= true;
407 if (is_logical_not
== false)
409 /* Only for one-bit precision typed X the transformation
410 !X -> ~X is valied. */
411 else if (TYPE_PRECISION (type
) == 1)
412 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
414 /* Otherwise we use !X -> X ^ 1. */
416 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
417 type
, op1
, build_int_cst (type
, 1));
423 result
= fold_binary_loc (loc
, subcode
,
424 TREE_TYPE (gimple_assign_lhs (stmt
)),
425 gimple_assign_rhs1 (stmt
),
426 gimple_assign_rhs2 (stmt
));
430 STRIP_USELESS_TYPE_CONVERSION (result
);
431 if (valid_gimple_rhs_p (result
))
436 case GIMPLE_TERNARY_RHS
:
437 /* Try to fold a conditional expression. */
438 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
440 tree op0
= gimple_assign_rhs1 (stmt
);
443 location_t cond_loc
= gimple_location (stmt
);
445 if (COMPARISON_CLASS_P (op0
))
447 fold_defer_overflow_warnings ();
448 tem
= fold_binary_loc (cond_loc
,
449 TREE_CODE (op0
), TREE_TYPE (op0
),
450 TREE_OPERAND (op0
, 0),
451 TREE_OPERAND (op0
, 1));
452 /* This is actually a conditional expression, not a GIMPLE
453 conditional statement, however, the valid_gimple_rhs_p
454 test still applies. */
455 set
= (tem
&& is_gimple_condexpr (tem
)
456 && valid_gimple_rhs_p (tem
));
457 fold_undefer_overflow_warnings (set
, stmt
, 0);
459 else if (is_gimple_min_invariant (op0
))
468 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
469 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
470 gimple_assign_rhs2 (stmt
),
471 gimple_assign_rhs3 (stmt
));
475 result
= fold_ternary_loc (loc
, subcode
,
476 TREE_TYPE (gimple_assign_lhs (stmt
)),
477 gimple_assign_rhs1 (stmt
),
478 gimple_assign_rhs2 (stmt
),
479 gimple_assign_rhs3 (stmt
));
483 STRIP_USELESS_TYPE_CONVERSION (result
);
484 if (valid_gimple_rhs_p (result
))
489 case GIMPLE_INVALID_RHS
:
496 /* Attempt to fold a conditional statement. Return true if any changes were
497 made. We only attempt to fold the condition expression, and do not perform
498 any transformation that would require alteration of the cfg. It is
499 assumed that the operands have been previously folded. */
502 fold_gimple_cond (gimple stmt
)
504 tree result
= fold_binary_loc (gimple_location (stmt
),
505 gimple_cond_code (stmt
),
507 gimple_cond_lhs (stmt
),
508 gimple_cond_rhs (stmt
));
512 STRIP_USELESS_TYPE_CONVERSION (result
);
513 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
515 gimple_cond_set_condition_from_tree (stmt
, result
);
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
537 tree tmp
= NULL_TREE
; /* Silence warning. */
538 gimple stmt
, new_stmt
;
539 gimple_stmt_iterator i
;
540 gimple_seq stmts
= gimple_seq_alloc();
541 struct gimplify_ctx gctx
;
543 gimple laststore
= NULL
;
546 stmt
= gsi_stmt (*si_p
);
548 gcc_assert (is_gimple_call (stmt
));
550 lhs
= gimple_call_lhs (stmt
);
551 reaching_vuse
= gimple_vuse (stmt
);
553 push_gimplify_context (&gctx
);
554 gctx
.into_ssa
= gimple_in_ssa_p (cfun
);
556 if (lhs
== NULL_TREE
)
558 gimplify_and_add (expr
, &stmts
);
559 /* We can end up with folding a memcpy of an empty class assignment
560 which gets optimized away by C++ gimplification. */
561 if (gimple_seq_empty_p (stmts
))
563 pop_gimplify_context (NULL
);
564 if (gimple_in_ssa_p (cfun
))
566 unlink_stmt_vdef (stmt
);
569 gsi_remove (si_p
, true);
574 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
576 pop_gimplify_context (NULL
);
578 if (gimple_has_location (stmt
))
579 annotate_all_with_location (stmts
, gimple_location (stmt
));
581 /* The replacement can expose previously unreferenced variables. */
582 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
586 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
589 new_stmt
= gsi_stmt (i
);
590 if (gimple_in_ssa_p (cfun
))
592 find_new_referenced_vars (new_stmt
);
593 mark_symbols_for_renaming (new_stmt
);
595 /* If the new statement has a VUSE, update it with exact SSA name we
596 know will reach this one. */
597 if (gimple_vuse (new_stmt
))
599 /* If we've also seen a previous store create a new VDEF for
600 the latter one, and make that the new reaching VUSE. */
603 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
604 gimple_set_vdef (laststore
, reaching_vuse
);
605 update_stmt (laststore
);
608 gimple_set_vuse (new_stmt
, reaching_vuse
);
609 gimple_set_modified (new_stmt
, true);
611 if (gimple_assign_single_p (new_stmt
)
612 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
614 laststore
= new_stmt
;
619 if (lhs
== NULL_TREE
)
621 /* If we replace a call without LHS that has a VDEF and our new
622 sequence ends with a store we must make that store have the same
623 vdef in order not to break the sequencing. This can happen
624 for instance when folding memcpy calls into assignments. */
625 if (gimple_vdef (stmt
) && laststore
)
627 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
628 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
629 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
630 update_stmt (laststore
);
632 else if (gimple_in_ssa_p (cfun
))
634 unlink_stmt_vdef (stmt
);
643 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
646 if (laststore
&& is_gimple_reg (lhs
))
648 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
649 update_stmt (laststore
);
650 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
651 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
656 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
657 gimple_set_vdef (laststore
, reaching_vuse
);
658 update_stmt (laststore
);
661 new_stmt
= gimple_build_assign (lhs
, tmp
);
662 if (!is_gimple_reg (tmp
))
663 gimple_set_vuse (new_stmt
, reaching_vuse
);
664 if (!is_gimple_reg (lhs
))
666 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
667 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
668 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = new_stmt
;
670 else if (reaching_vuse
== gimple_vuse (stmt
))
671 unlink_stmt_vdef (stmt
);
674 gimple_set_location (new_stmt
, gimple_location (stmt
));
675 gsi_replace (si_p
, new_stmt
, false);
678 /* Return the string length, maximum string length or maximum value of
680 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
681 is not NULL and, for TYPE == 0, its value is not equal to the length
682 we determine or if we are unable to determine the length or value,
683 return false. VISITED is a bitmap of visited variables.
684 TYPE is 0 if string length should be returned, 1 for maximum string
685 length and 2 for maximum value ARG can have. */
688 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
693 if (TREE_CODE (arg
) != SSA_NAME
)
695 if (TREE_CODE (arg
) == COND_EXPR
)
696 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
697 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
698 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
699 else if (TREE_CODE (arg
) == ADDR_EXPR
700 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
701 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
703 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
704 if (TREE_CODE (aop0
) == INDIRECT_REF
705 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
706 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
707 length
, visited
, type
);
713 if (TREE_CODE (val
) != INTEGER_CST
714 || tree_int_cst_sgn (val
) < 0)
718 val
= c_strlen (arg
, 1);
726 if (TREE_CODE (*length
) != INTEGER_CST
727 || TREE_CODE (val
) != INTEGER_CST
)
730 if (tree_int_cst_lt (*length
, val
))
734 else if (simple_cst_equal (val
, *length
) != 1)
742 /* If we were already here, break the infinite cycle. */
743 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
747 def_stmt
= SSA_NAME_DEF_STMT (var
);
749 switch (gimple_code (def_stmt
))
752 /* The RHS of the statement defining VAR must either have a
753 constant length or come from another SSA_NAME with a constant
755 if (gimple_assign_single_p (def_stmt
)
756 || gimple_assign_unary_nop_p (def_stmt
))
758 tree rhs
= gimple_assign_rhs1 (def_stmt
);
759 return get_maxval_strlen (rhs
, length
, visited
, type
);
765 /* All the arguments of the PHI node must have the same constant
769 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
771 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
773 /* If this PHI has itself as an argument, we cannot
774 determine the string length of this argument. However,
775 if we can find a constant string length for the other
776 PHI args then we can still be sure that this is a
777 constant string length. So be optimistic and just
778 continue with the next argument. */
779 if (arg
== gimple_phi_result (def_stmt
))
782 if (!get_maxval_strlen (arg
, length
, visited
, type
))
794 /* Fold builtin call in statement STMT. Returns a simplified tree.
795 We may return a non-constant expression, including another call
796 to a different function and with different arguments, e.g.,
797 substituting memcpy for strcpy when the string length is known.
798 Note that some builtins expand into inline code that may not
799 be valid in GIMPLE. Callers must take care. */
802 gimple_fold_builtin (gimple stmt
)
810 location_t loc
= gimple_location (stmt
);
812 gcc_assert (is_gimple_call (stmt
));
814 ignore
= (gimple_call_lhs (stmt
) == NULL
);
816 /* First try the generic builtin folder. If that succeeds, return the
818 result
= fold_call_stmt (stmt
, ignore
);
826 /* Ignore MD builtins. */
827 callee
= gimple_call_fndecl (stmt
);
828 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
831 /* If the builtin could not be folded, and it has no argument list,
833 nargs
= gimple_call_num_args (stmt
);
837 /* Limit the work only for builtins we know how to simplify. */
838 switch (DECL_FUNCTION_CODE (callee
))
840 case BUILT_IN_STRLEN
:
842 case BUILT_IN_FPUTS_UNLOCKED
:
846 case BUILT_IN_STRCPY
:
847 case BUILT_IN_STRNCPY
:
851 case BUILT_IN_MEMCPY_CHK
:
852 case BUILT_IN_MEMPCPY_CHK
:
853 case BUILT_IN_MEMMOVE_CHK
:
854 case BUILT_IN_MEMSET_CHK
:
855 case BUILT_IN_STRNCPY_CHK
:
859 case BUILT_IN_STRCPY_CHK
:
860 case BUILT_IN_STPCPY_CHK
:
864 case BUILT_IN_SNPRINTF_CHK
:
865 case BUILT_IN_VSNPRINTF_CHK
:
873 if (arg_idx
>= nargs
)
876 /* Try to use the dataflow information gathered by the CCP process. */
877 visited
= BITMAP_ALLOC (NULL
);
878 bitmap_clear (visited
);
880 memset (val
, 0, sizeof (val
));
881 a
= gimple_call_arg (stmt
, arg_idx
);
882 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
883 val
[arg_idx
] = NULL_TREE
;
885 BITMAP_FREE (visited
);
888 switch (DECL_FUNCTION_CODE (callee
))
890 case BUILT_IN_STRLEN
:
891 if (val
[0] && nargs
== 1)
894 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
896 /* If the result is not a valid gimple value, or not a cast
897 of a valid gimple value, then we cannot use the result. */
898 if (is_gimple_val (new_val
)
899 || (CONVERT_EXPR_P (new_val
)
900 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
905 case BUILT_IN_STRCPY
:
906 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
907 result
= fold_builtin_strcpy (loc
, callee
,
908 gimple_call_arg (stmt
, 0),
909 gimple_call_arg (stmt
, 1),
913 case BUILT_IN_STRNCPY
:
914 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
915 result
= fold_builtin_strncpy (loc
, callee
,
916 gimple_call_arg (stmt
, 0),
917 gimple_call_arg (stmt
, 1),
918 gimple_call_arg (stmt
, 2),
924 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
925 gimple_call_arg (stmt
, 1),
926 ignore
, false, val
[0]);
929 case BUILT_IN_FPUTS_UNLOCKED
:
931 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
932 gimple_call_arg (stmt
, 1),
933 ignore
, true, val
[0]);
936 case BUILT_IN_MEMCPY_CHK
:
937 case BUILT_IN_MEMPCPY_CHK
:
938 case BUILT_IN_MEMMOVE_CHK
:
939 case BUILT_IN_MEMSET_CHK
:
940 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
941 result
= fold_builtin_memory_chk (loc
, callee
,
942 gimple_call_arg (stmt
, 0),
943 gimple_call_arg (stmt
, 1),
944 gimple_call_arg (stmt
, 2),
945 gimple_call_arg (stmt
, 3),
947 DECL_FUNCTION_CODE (callee
));
950 case BUILT_IN_STRCPY_CHK
:
951 case BUILT_IN_STPCPY_CHK
:
952 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
953 result
= fold_builtin_stxcpy_chk (loc
, callee
,
954 gimple_call_arg (stmt
, 0),
955 gimple_call_arg (stmt
, 1),
956 gimple_call_arg (stmt
, 2),
958 DECL_FUNCTION_CODE (callee
));
961 case BUILT_IN_STRNCPY_CHK
:
962 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
963 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
964 gimple_call_arg (stmt
, 1),
965 gimple_call_arg (stmt
, 2),
966 gimple_call_arg (stmt
, 3),
970 case BUILT_IN_SNPRINTF_CHK
:
971 case BUILT_IN_VSNPRINTF_CHK
:
972 if (val
[1] && is_gimple_val (val
[1]))
973 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
974 DECL_FUNCTION_CODE (callee
));
981 if (result
&& ignore
)
982 result
= fold_ignored_result (result
);
986 /* Generate code adjusting the first parameter of a call statement determined
990 gimple_adjust_this_by_delta (gimple_stmt_iterator
*gsi
, tree delta
)
992 gimple call_stmt
= gsi_stmt (*gsi
);
996 delta
= convert_to_ptrofftype (delta
);
997 gcc_assert (gimple_call_num_args (call_stmt
) >= 1);
998 parm
= gimple_call_arg (call_stmt
, 0);
999 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm
)));
1000 tmp
= create_tmp_var (TREE_TYPE (parm
), NULL
);
1001 add_referenced_var (tmp
);
1003 tmp
= make_ssa_name (tmp
, NULL
);
1004 new_stmt
= gimple_build_assign_with_ops (POINTER_PLUS_EXPR
, tmp
, parm
, delta
);
1005 SSA_NAME_DEF_STMT (tmp
) = new_stmt
;
1006 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1007 gimple_call_set_arg (call_stmt
, 0, tmp
);
1010 /* Return a binfo to be used for devirtualization of calls based on an object
1011 represented by a declaration (i.e. a global or automatically allocated one)
1012 or NULL if it cannot be found or is not safe. CST is expected to be an
1013 ADDR_EXPR of such object or the function will return NULL. Currently it is
1014 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1017 gimple_extract_devirt_binfo_from_cst (tree cst
)
1019 HOST_WIDE_INT offset
, size
, max_size
;
1020 tree base
, type
, expected_type
, binfo
;
1021 bool last_artificial
= false;
1023 if (!flag_devirtualize
1024 || TREE_CODE (cst
) != ADDR_EXPR
1025 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1028 cst
= TREE_OPERAND (cst
, 0);
1029 expected_type
= TREE_TYPE (cst
);
1030 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1031 type
= TREE_TYPE (base
);
1035 || TREE_CODE (type
) != RECORD_TYPE
)
1038 /* Find the sub-object the constant actually refers to and mark whether it is
1039 an artificial one (as opposed to a user-defined one). */
1042 HOST_WIDE_INT pos
, size
;
1045 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (expected_type
))
1050 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1052 if (TREE_CODE (fld
) != FIELD_DECL
)
1055 pos
= int_bit_position (fld
);
1056 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1057 if (pos
<= offset
&& (pos
+ size
) > offset
)
1060 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1063 last_artificial
= DECL_ARTIFICIAL (fld
);
1064 type
= TREE_TYPE (fld
);
1067 /* Artifical sub-objects are ancestors, we do not want to use them for
1068 devirtualization, at least not here. */
1069 if (last_artificial
)
1071 binfo
= TYPE_BINFO (type
);
1072 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1078 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1079 The statement may be replaced by another statement, e.g., if the call
1080 simplifies to a constant value. Return true if any changes were made.
1081 It is assumed that the operands have been previously folded. */
1084 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1086 gimple stmt
= gsi_stmt (*gsi
);
1089 /* Check for builtins that CCP can handle using information not
1090 available in the generic fold routines. */
1091 callee
= gimple_call_fndecl (stmt
);
1092 if (!inplace
&& callee
&& DECL_BUILT_IN (callee
))
1094 tree result
= gimple_fold_builtin (stmt
);
1098 if (!update_call_from_tree (gsi
, result
))
1099 gimplify_and_update_call_from_tree (gsi
, result
);
1104 /* Check for virtual calls that became direct calls. */
1105 callee
= gimple_call_fn (stmt
);
1106 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1108 tree binfo
, fndecl
, obj
;
1109 HOST_WIDE_INT token
;
1111 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1113 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1117 obj
= OBJ_TYPE_REF_OBJECT (callee
);
1118 binfo
= gimple_extract_devirt_binfo_from_cst (obj
);
1121 token
= TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1122 fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1125 gimple_call_set_fndecl (stmt
, fndecl
);
1132 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1133 distinguishes both cases. */
1136 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1138 bool changed
= false;
1139 gimple stmt
= gsi_stmt (*gsi
);
1141 gimple_stmt_iterator gsinext
= *gsi
;
1144 gsi_next (&gsinext
);
1145 next_stmt
= gsi_end_p (gsinext
) ? NULL
: gsi_stmt (gsinext
);
1147 /* Fold the main computation performed by the statement. */
1148 switch (gimple_code (stmt
))
1152 unsigned old_num_ops
= gimple_num_ops (stmt
);
1153 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1154 tree lhs
= gimple_assign_lhs (stmt
);
1156 /* First canonicalize operand order. This avoids building new
1157 trees if this is the only thing fold would later do. */
1158 if ((commutative_tree_code (subcode
)
1159 || commutative_ternary_tree_code (subcode
))
1160 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1161 gimple_assign_rhs2 (stmt
), false))
1163 tree tem
= gimple_assign_rhs1 (stmt
);
1164 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1165 gimple_assign_set_rhs2 (stmt
, tem
);
1168 new_rhs
= fold_gimple_assign (gsi
);
1170 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1171 TREE_TYPE (new_rhs
)))
1172 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1175 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1177 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1184 changed
|= fold_gimple_cond (stmt
);
1188 /* Fold *& in call arguments. */
1189 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1190 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1192 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1195 gimple_call_set_arg (stmt
, i
, tmp
);
1199 changed
|= gimple_fold_call (gsi
, inplace
);
1203 /* Fold *& in asm operands. */
1206 const char **oconstraints
;
1207 const char *constraint
;
1208 bool allows_mem
, allows_reg
;
1210 noutputs
= gimple_asm_noutputs (stmt
);
1211 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1213 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1215 tree link
= gimple_asm_output_op (stmt
, i
);
1216 tree op
= TREE_VALUE (link
);
1218 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1219 if (REFERENCE_CLASS_P (op
)
1220 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1222 TREE_VALUE (link
) = op
;
1226 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1228 tree link
= gimple_asm_input_op (stmt
, i
);
1229 tree op
= TREE_VALUE (link
);
1231 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1232 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1233 oconstraints
, &allows_mem
, &allows_reg
);
1234 if (REFERENCE_CLASS_P (op
)
1235 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1238 TREE_VALUE (link
) = op
;
1246 if (gimple_debug_bind_p (stmt
))
1248 tree val
= gimple_debug_bind_get_value (stmt
);
1250 && REFERENCE_CLASS_P (val
))
1252 tree tem
= maybe_fold_reference (val
, false);
1255 gimple_debug_bind_set_value (stmt
, tem
);
1265 /* If stmt folds into nothing and it was the last stmt in a bb,
1266 don't call gsi_stmt. */
1267 if (gsi_end_p (*gsi
))
1269 gcc_assert (next_stmt
== NULL
);
1273 stmt
= gsi_stmt (*gsi
);
1275 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1276 as we'd changing the next stmt. */
1277 if (gimple_has_lhs (stmt
) && stmt
!= next_stmt
)
1279 tree lhs
= gimple_get_lhs (stmt
);
1280 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1282 tree new_lhs
= maybe_fold_reference (lhs
, true);
1285 gimple_set_lhs (stmt
, new_lhs
);
1294 /* Fold the statement pointed to by GSI. In some cases, this function may
1295 replace the whole statement with a new one. Returns true iff folding
1297 The statement pointed to by GSI should be in valid gimple form but may
1298 be in unfolded state as resulting from for example constant propagation
1299 which can produce *&x = 0. */
1302 fold_stmt (gimple_stmt_iterator
*gsi
)
1304 return fold_stmt_1 (gsi
, false);
1307 /* Perform the minimal folding on statement *GSI. Only operations like
1308 *&x created by constant propagation are handled. The statement cannot
1309 be replaced with a new one. Return true if the statement was
1310 changed, false otherwise.
1311 The statement *GSI should be in valid gimple form but may
1312 be in unfolded state as resulting from for example constant propagation
1313 which can produce *&x = 0. */
1316 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1318 gimple stmt
= gsi_stmt (*gsi
);
1319 bool changed
= fold_stmt_1 (gsi
, true);
1320 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1324 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1325 if EXPR is null or we don't know how.
1326 If non-null, the result always has boolean type. */
1329 canonicalize_bool (tree expr
, bool invert
)
1335 if (integer_nonzerop (expr
))
1336 return boolean_false_node
;
1337 else if (integer_zerop (expr
))
1338 return boolean_true_node
;
1339 else if (TREE_CODE (expr
) == SSA_NAME
)
1340 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1341 build_int_cst (TREE_TYPE (expr
), 0));
1342 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1343 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1345 TREE_OPERAND (expr
, 0),
1346 TREE_OPERAND (expr
, 1));
1352 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1354 if (integer_nonzerop (expr
))
1355 return boolean_true_node
;
1356 else if (integer_zerop (expr
))
1357 return boolean_false_node
;
1358 else if (TREE_CODE (expr
) == SSA_NAME
)
1359 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1360 build_int_cst (TREE_TYPE (expr
), 0));
1361 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1362 return fold_build2 (TREE_CODE (expr
),
1364 TREE_OPERAND (expr
, 0),
1365 TREE_OPERAND (expr
, 1));
1371 /* Check to see if a boolean expression EXPR is logically equivalent to the
1372 comparison (OP1 CODE OP2). Check for various identities involving
1376 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1377 const_tree op1
, const_tree op2
)
1381 /* The obvious case. */
1382 if (TREE_CODE (expr
) == code
1383 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1384 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1387 /* Check for comparing (name, name != 0) and the case where expr
1388 is an SSA_NAME with a definition matching the comparison. */
1389 if (TREE_CODE (expr
) == SSA_NAME
1390 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1392 if (operand_equal_p (expr
, op1
, 0))
1393 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1394 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1395 s
= SSA_NAME_DEF_STMT (expr
);
1396 if (is_gimple_assign (s
)
1397 && gimple_assign_rhs_code (s
) == code
1398 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1399 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1403 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1404 of name is a comparison, recurse. */
1405 if (TREE_CODE (op1
) == SSA_NAME
1406 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1408 s
= SSA_NAME_DEF_STMT (op1
);
1409 if (is_gimple_assign (s
)
1410 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1412 enum tree_code c
= gimple_assign_rhs_code (s
);
1413 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1414 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1415 return same_bool_comparison_p (expr
, c
,
1416 gimple_assign_rhs1 (s
),
1417 gimple_assign_rhs2 (s
));
1418 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1419 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1420 return same_bool_comparison_p (expr
,
1421 invert_tree_comparison (c
, false),
1422 gimple_assign_rhs1 (s
),
1423 gimple_assign_rhs2 (s
));
1429 /* Check to see if two boolean expressions OP1 and OP2 are logically
1433 same_bool_result_p (const_tree op1
, const_tree op2
)
1435 /* Simple cases first. */
1436 if (operand_equal_p (op1
, op2
, 0))
1439 /* Check the cases where at least one of the operands is a comparison.
1440 These are a bit smarter than operand_equal_p in that they apply some
1441 identifies on SSA_NAMEs. */
1442 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1443 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1444 TREE_OPERAND (op2
, 0),
1445 TREE_OPERAND (op2
, 1)))
1447 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1448 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1449 TREE_OPERAND (op1
, 0),
1450 TREE_OPERAND (op1
, 1)))
1457 /* Forward declarations for some mutually recursive functions. */
1460 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1461 enum tree_code code2
, tree op2a
, tree op2b
);
1463 and_var_with_comparison (tree var
, bool invert
,
1464 enum tree_code code2
, tree op2a
, tree op2b
);
1466 and_var_with_comparison_1 (gimple stmt
,
1467 enum tree_code code2
, tree op2a
, tree op2b
);
1469 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1470 enum tree_code code2
, tree op2a
, tree op2b
);
1472 or_var_with_comparison (tree var
, bool invert
,
1473 enum tree_code code2
, tree op2a
, tree op2b
);
1475 or_var_with_comparison_1 (gimple stmt
,
1476 enum tree_code code2
, tree op2a
, tree op2b
);
1478 /* Helper function for and_comparisons_1: try to simplify the AND of the
1479 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1480 If INVERT is true, invert the value of the VAR before doing the AND.
1481 Return NULL_EXPR if we can't simplify this to a single expression. */
1484 and_var_with_comparison (tree var
, bool invert
,
1485 enum tree_code code2
, tree op2a
, tree op2b
)
1488 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1490 /* We can only deal with variables whose definitions are assignments. */
1491 if (!is_gimple_assign (stmt
))
1494 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1495 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1496 Then we only have to consider the simpler non-inverted cases. */
1498 t
= or_var_with_comparison_1 (stmt
,
1499 invert_tree_comparison (code2
, false),
1502 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1503 return canonicalize_bool (t
, invert
);
1506 /* Try to simplify the AND of the ssa variable defined by the assignment
1507 STMT with the comparison specified by (OP2A CODE2 OP2B).
1508 Return NULL_EXPR if we can't simplify this to a single expression. */
1511 and_var_with_comparison_1 (gimple stmt
,
1512 enum tree_code code2
, tree op2a
, tree op2b
)
1514 tree var
= gimple_assign_lhs (stmt
);
1515 tree true_test_var
= NULL_TREE
;
1516 tree false_test_var
= NULL_TREE
;
1517 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1519 /* Check for identities like (var AND (var == 0)) => false. */
1520 if (TREE_CODE (op2a
) == SSA_NAME
1521 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1523 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1524 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1526 true_test_var
= op2a
;
1527 if (var
== true_test_var
)
1530 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1531 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1533 false_test_var
= op2a
;
1534 if (var
== false_test_var
)
1535 return boolean_false_node
;
1539 /* If the definition is a comparison, recurse on it. */
1540 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1542 tree t
= and_comparisons_1 (innercode
,
1543 gimple_assign_rhs1 (stmt
),
1544 gimple_assign_rhs2 (stmt
),
1552 /* If the definition is an AND or OR expression, we may be able to
1553 simplify by reassociating. */
1554 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1555 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1557 tree inner1
= gimple_assign_rhs1 (stmt
);
1558 tree inner2
= gimple_assign_rhs2 (stmt
);
1561 tree partial
= NULL_TREE
;
1562 bool is_and
= (innercode
== BIT_AND_EXPR
);
1564 /* Check for boolean identities that don't require recursive examination
1566 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1567 inner1 AND (inner1 OR inner2) => inner1
1568 !inner1 AND (inner1 AND inner2) => false
1569 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1570 Likewise for similar cases involving inner2. */
1571 if (inner1
== true_test_var
)
1572 return (is_and
? var
: inner1
);
1573 else if (inner2
== true_test_var
)
1574 return (is_and
? var
: inner2
);
1575 else if (inner1
== false_test_var
)
1577 ? boolean_false_node
1578 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1579 else if (inner2
== false_test_var
)
1581 ? boolean_false_node
1582 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1584 /* Next, redistribute/reassociate the AND across the inner tests.
1585 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1586 if (TREE_CODE (inner1
) == SSA_NAME
1587 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1588 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1589 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1590 gimple_assign_rhs1 (s
),
1591 gimple_assign_rhs2 (s
),
1592 code2
, op2a
, op2b
)))
1594 /* Handle the AND case, where we are reassociating:
1595 (inner1 AND inner2) AND (op2a code2 op2b)
1597 If the partial result t is a constant, we win. Otherwise
1598 continue on to try reassociating with the other inner test. */
1601 if (integer_onep (t
))
1603 else if (integer_zerop (t
))
1604 return boolean_false_node
;
1607 /* Handle the OR case, where we are redistributing:
1608 (inner1 OR inner2) AND (op2a code2 op2b)
1609 => (t OR (inner2 AND (op2a code2 op2b))) */
1610 else if (integer_onep (t
))
1611 return boolean_true_node
;
1613 /* Save partial result for later. */
1617 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1618 if (TREE_CODE (inner2
) == SSA_NAME
1619 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1620 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1621 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1622 gimple_assign_rhs1 (s
),
1623 gimple_assign_rhs2 (s
),
1624 code2
, op2a
, op2b
)))
1626 /* Handle the AND case, where we are reassociating:
1627 (inner1 AND inner2) AND (op2a code2 op2b)
1628 => (inner1 AND t) */
1631 if (integer_onep (t
))
1633 else if (integer_zerop (t
))
1634 return boolean_false_node
;
1635 /* If both are the same, we can apply the identity
1637 else if (partial
&& same_bool_result_p (t
, partial
))
1641 /* Handle the OR case. where we are redistributing:
1642 (inner1 OR inner2) AND (op2a code2 op2b)
1643 => (t OR (inner1 AND (op2a code2 op2b)))
1644 => (t OR partial) */
1647 if (integer_onep (t
))
1648 return boolean_true_node
;
1651 /* We already got a simplification for the other
1652 operand to the redistributed OR expression. The
1653 interesting case is when at least one is false.
1654 Or, if both are the same, we can apply the identity
1656 if (integer_zerop (partial
))
1658 else if (integer_zerop (t
))
1660 else if (same_bool_result_p (t
, partial
))
1669 /* Try to simplify the AND of two comparisons defined by
1670 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1671 If this can be done without constructing an intermediate value,
1672 return the resulting tree; otherwise NULL_TREE is returned.
1673 This function is deliberately asymmetric as it recurses on SSA_DEFs
1674 in the first comparison but not the second. */
1677 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1678 enum tree_code code2
, tree op2a
, tree op2b
)
1680 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1681 if (operand_equal_p (op1a
, op2a
, 0)
1682 && operand_equal_p (op1b
, op2b
, 0))
1684 /* Result will be either NULL_TREE, or a combined comparison. */
1685 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1686 TRUTH_ANDIF_EXPR
, code1
, code2
,
1687 boolean_type_node
, op1a
, op1b
);
1692 /* Likewise the swapped case of the above. */
1693 if (operand_equal_p (op1a
, op2b
, 0)
1694 && operand_equal_p (op1b
, op2a
, 0))
1696 /* Result will be either NULL_TREE, or a combined comparison. */
1697 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1698 TRUTH_ANDIF_EXPR
, code1
,
1699 swap_tree_comparison (code2
),
1700 boolean_type_node
, op1a
, op1b
);
1705 /* If both comparisons are of the same value against constants, we might
1706 be able to merge them. */
1707 if (operand_equal_p (op1a
, op2a
, 0)
1708 && TREE_CODE (op1b
) == INTEGER_CST
1709 && TREE_CODE (op2b
) == INTEGER_CST
)
1711 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1713 /* If we have (op1a == op1b), we should either be able to
1714 return that or FALSE, depending on whether the constant op1b
1715 also satisfies the other comparison against op2b. */
1716 if (code1
== EQ_EXPR
)
1722 case EQ_EXPR
: val
= (cmp
== 0); break;
1723 case NE_EXPR
: val
= (cmp
!= 0); break;
1724 case LT_EXPR
: val
= (cmp
< 0); break;
1725 case GT_EXPR
: val
= (cmp
> 0); break;
1726 case LE_EXPR
: val
= (cmp
<= 0); break;
1727 case GE_EXPR
: val
= (cmp
>= 0); break;
1728 default: done
= false;
1733 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1735 return boolean_false_node
;
1738 /* Likewise if the second comparison is an == comparison. */
1739 else if (code2
== EQ_EXPR
)
1745 case EQ_EXPR
: val
= (cmp
== 0); break;
1746 case NE_EXPR
: val
= (cmp
!= 0); break;
1747 case LT_EXPR
: val
= (cmp
> 0); break;
1748 case GT_EXPR
: val
= (cmp
< 0); break;
1749 case LE_EXPR
: val
= (cmp
>= 0); break;
1750 case GE_EXPR
: val
= (cmp
<= 0); break;
1751 default: done
= false;
1756 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1758 return boolean_false_node
;
1762 /* Same business with inequality tests. */
1763 else if (code1
== NE_EXPR
)
1768 case EQ_EXPR
: val
= (cmp
!= 0); break;
1769 case NE_EXPR
: val
= (cmp
== 0); break;
1770 case LT_EXPR
: val
= (cmp
>= 0); break;
1771 case GT_EXPR
: val
= (cmp
<= 0); break;
1772 case LE_EXPR
: val
= (cmp
> 0); break;
1773 case GE_EXPR
: val
= (cmp
< 0); break;
1778 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1780 else if (code2
== NE_EXPR
)
1785 case EQ_EXPR
: val
= (cmp
== 0); break;
1786 case NE_EXPR
: val
= (cmp
!= 0); break;
1787 case LT_EXPR
: val
= (cmp
<= 0); break;
1788 case GT_EXPR
: val
= (cmp
>= 0); break;
1789 case LE_EXPR
: val
= (cmp
< 0); break;
1790 case GE_EXPR
: val
= (cmp
> 0); break;
1795 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1798 /* Chose the more restrictive of two < or <= comparisons. */
1799 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1800 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1802 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1803 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1805 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1808 /* Likewise chose the more restrictive of two > or >= comparisons. */
1809 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1810 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1812 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1813 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1815 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1818 /* Check for singleton ranges. */
1820 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1821 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1822 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1824 /* Check for disjoint ranges. */
1826 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1827 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1828 return boolean_false_node
;
1830 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1831 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1832 return boolean_false_node
;
1835 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1836 NAME's definition is a truth value. See if there are any simplifications
1837 that can be done against the NAME's definition. */
1838 if (TREE_CODE (op1a
) == SSA_NAME
1839 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1840 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1842 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1843 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1844 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1845 switch (gimple_code (stmt
))
1848 /* Try to simplify by copy-propagating the definition. */
1849 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1852 /* If every argument to the PHI produces the same result when
1853 ANDed with the second comparison, we win.
1854 Do not do this unless the type is bool since we need a bool
1855 result here anyway. */
1856 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1858 tree result
= NULL_TREE
;
1860 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1862 tree arg
= gimple_phi_arg_def (stmt
, i
);
1864 /* If this PHI has itself as an argument, ignore it.
1865 If all the other args produce the same result,
1867 if (arg
== gimple_phi_result (stmt
))
1869 else if (TREE_CODE (arg
) == INTEGER_CST
)
1871 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1874 result
= boolean_false_node
;
1875 else if (!integer_zerop (result
))
1879 result
= fold_build2 (code2
, boolean_type_node
,
1881 else if (!same_bool_comparison_p (result
,
1885 else if (TREE_CODE (arg
) == SSA_NAME
1886 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1889 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1890 /* In simple cases we can look through PHI nodes,
1891 but we have to be careful with loops.
1893 if (! dom_info_available_p (CDI_DOMINATORS
)
1894 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1895 || dominated_by_p (CDI_DOMINATORS
,
1896 gimple_bb (def_stmt
),
1899 temp
= and_var_with_comparison (arg
, invert
, code2
,
1905 else if (!same_bool_result_p (result
, temp
))
1921 /* Try to simplify the AND of two comparisons, specified by
1922 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1923 If this can be simplified to a single expression (without requiring
1924 introducing more SSA variables to hold intermediate values),
1925 return the resulting tree. Otherwise return NULL_TREE.
1926 If the result expression is non-null, it has boolean type. */
1929 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1930 enum tree_code code2
, tree op2a
, tree op2b
)
1932 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1936 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1939 /* Helper function for or_comparisons_1: try to simplify the OR of the
1940 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1941 If INVERT is true, invert the value of VAR before doing the OR.
1942 Return NULL_EXPR if we can't simplify this to a single expression. */
1945 or_var_with_comparison (tree var
, bool invert
,
1946 enum tree_code code2
, tree op2a
, tree op2b
)
1949 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1951 /* We can only deal with variables whose definitions are assignments. */
1952 if (!is_gimple_assign (stmt
))
1955 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1956 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1957 Then we only have to consider the simpler non-inverted cases. */
1959 t
= and_var_with_comparison_1 (stmt
,
1960 invert_tree_comparison (code2
, false),
1963 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1964 return canonicalize_bool (t
, invert
);
1967 /* Try to simplify the OR of the ssa variable defined by the assignment
1968 STMT with the comparison specified by (OP2A CODE2 OP2B).
1969 Return NULL_EXPR if we can't simplify this to a single expression. */
1972 or_var_with_comparison_1 (gimple stmt
,
1973 enum tree_code code2
, tree op2a
, tree op2b
)
1975 tree var
= gimple_assign_lhs (stmt
);
1976 tree true_test_var
= NULL_TREE
;
1977 tree false_test_var
= NULL_TREE
;
1978 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1980 /* Check for identities like (var OR (var != 0)) => true . */
1981 if (TREE_CODE (op2a
) == SSA_NAME
1982 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1984 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1985 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1987 true_test_var
= op2a
;
1988 if (var
== true_test_var
)
1991 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1992 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1994 false_test_var
= op2a
;
1995 if (var
== false_test_var
)
1996 return boolean_true_node
;
2000 /* If the definition is a comparison, recurse on it. */
2001 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2003 tree t
= or_comparisons_1 (innercode
,
2004 gimple_assign_rhs1 (stmt
),
2005 gimple_assign_rhs2 (stmt
),
2013 /* If the definition is an AND or OR expression, we may be able to
2014 simplify by reassociating. */
2015 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2016 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2018 tree inner1
= gimple_assign_rhs1 (stmt
);
2019 tree inner2
= gimple_assign_rhs2 (stmt
);
2022 tree partial
= NULL_TREE
;
2023 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2025 /* Check for boolean identities that don't require recursive examination
2027 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2028 inner1 OR (inner1 AND inner2) => inner1
2029 !inner1 OR (inner1 OR inner2) => true
2030 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2032 if (inner1
== true_test_var
)
2033 return (is_or
? var
: inner1
);
2034 else if (inner2
== true_test_var
)
2035 return (is_or
? var
: inner2
);
2036 else if (inner1
== false_test_var
)
2039 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2040 else if (inner2
== false_test_var
)
2043 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2045 /* Next, redistribute/reassociate the OR across the inner tests.
2046 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2047 if (TREE_CODE (inner1
) == SSA_NAME
2048 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2049 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2050 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2051 gimple_assign_rhs1 (s
),
2052 gimple_assign_rhs2 (s
),
2053 code2
, op2a
, op2b
)))
2055 /* Handle the OR case, where we are reassociating:
2056 (inner1 OR inner2) OR (op2a code2 op2b)
2058 If the partial result t is a constant, we win. Otherwise
2059 continue on to try reassociating with the other inner test. */
2062 if (integer_onep (t
))
2063 return boolean_true_node
;
2064 else if (integer_zerop (t
))
2068 /* Handle the AND case, where we are redistributing:
2069 (inner1 AND inner2) OR (op2a code2 op2b)
2070 => (t AND (inner2 OR (op2a code op2b))) */
2071 else if (integer_zerop (t
))
2072 return boolean_false_node
;
2074 /* Save partial result for later. */
2078 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2079 if (TREE_CODE (inner2
) == SSA_NAME
2080 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2081 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2082 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2083 gimple_assign_rhs1 (s
),
2084 gimple_assign_rhs2 (s
),
2085 code2
, op2a
, op2b
)))
2087 /* Handle the OR case, where we are reassociating:
2088 (inner1 OR inner2) OR (op2a code2 op2b)
2090 => (t OR partial) */
2093 if (integer_zerop (t
))
2095 else if (integer_onep (t
))
2096 return boolean_true_node
;
2097 /* If both are the same, we can apply the identity
2099 else if (partial
&& same_bool_result_p (t
, partial
))
2103 /* Handle the AND case, where we are redistributing:
2104 (inner1 AND inner2) OR (op2a code2 op2b)
2105 => (t AND (inner1 OR (op2a code2 op2b)))
2106 => (t AND partial) */
2109 if (integer_zerop (t
))
2110 return boolean_false_node
;
2113 /* We already got a simplification for the other
2114 operand to the redistributed AND expression. The
2115 interesting case is when at least one is true.
2116 Or, if both are the same, we can apply the identity
2118 if (integer_onep (partial
))
2120 else if (integer_onep (t
))
2122 else if (same_bool_result_p (t
, partial
))
2131 /* Try to simplify the OR of two comparisons defined by
2132 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2133 If this can be done without constructing an intermediate value,
2134 return the resulting tree; otherwise NULL_TREE is returned.
2135 This function is deliberately asymmetric as it recurses on SSA_DEFs
2136 in the first comparison but not the second. */
2139 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2140 enum tree_code code2
, tree op2a
, tree op2b
)
2142 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2143 if (operand_equal_p (op1a
, op2a
, 0)
2144 && operand_equal_p (op1b
, op2b
, 0))
2146 /* Result will be either NULL_TREE, or a combined comparison. */
2147 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2148 TRUTH_ORIF_EXPR
, code1
, code2
,
2149 boolean_type_node
, op1a
, op1b
);
2154 /* Likewise the swapped case of the above. */
2155 if (operand_equal_p (op1a
, op2b
, 0)
2156 && operand_equal_p (op1b
, op2a
, 0))
2158 /* Result will be either NULL_TREE, or a combined comparison. */
2159 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2160 TRUTH_ORIF_EXPR
, code1
,
2161 swap_tree_comparison (code2
),
2162 boolean_type_node
, op1a
, op1b
);
2167 /* If both comparisons are of the same value against constants, we might
2168 be able to merge them. */
2169 if (operand_equal_p (op1a
, op2a
, 0)
2170 && TREE_CODE (op1b
) == INTEGER_CST
2171 && TREE_CODE (op2b
) == INTEGER_CST
)
2173 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2175 /* If we have (op1a != op1b), we should either be able to
2176 return that or TRUE, depending on whether the constant op1b
2177 also satisfies the other comparison against op2b. */
2178 if (code1
== NE_EXPR
)
2184 case EQ_EXPR
: val
= (cmp
== 0); break;
2185 case NE_EXPR
: val
= (cmp
!= 0); break;
2186 case LT_EXPR
: val
= (cmp
< 0); break;
2187 case GT_EXPR
: val
= (cmp
> 0); break;
2188 case LE_EXPR
: val
= (cmp
<= 0); break;
2189 case GE_EXPR
: val
= (cmp
>= 0); break;
2190 default: done
= false;
2195 return boolean_true_node
;
2197 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2200 /* Likewise if the second comparison is a != comparison. */
2201 else if (code2
== NE_EXPR
)
2207 case EQ_EXPR
: val
= (cmp
== 0); break;
2208 case NE_EXPR
: val
= (cmp
!= 0); break;
2209 case LT_EXPR
: val
= (cmp
> 0); break;
2210 case GT_EXPR
: val
= (cmp
< 0); break;
2211 case LE_EXPR
: val
= (cmp
>= 0); break;
2212 case GE_EXPR
: val
= (cmp
<= 0); break;
2213 default: done
= false;
2218 return boolean_true_node
;
2220 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2224 /* See if an equality test is redundant with the other comparison. */
2225 else if (code1
== EQ_EXPR
)
2230 case EQ_EXPR
: val
= (cmp
== 0); break;
2231 case NE_EXPR
: val
= (cmp
!= 0); break;
2232 case LT_EXPR
: val
= (cmp
< 0); break;
2233 case GT_EXPR
: val
= (cmp
> 0); break;
2234 case LE_EXPR
: val
= (cmp
<= 0); break;
2235 case GE_EXPR
: val
= (cmp
>= 0); break;
2240 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2242 else if (code2
== EQ_EXPR
)
2247 case EQ_EXPR
: val
= (cmp
== 0); break;
2248 case NE_EXPR
: val
= (cmp
!= 0); break;
2249 case LT_EXPR
: val
= (cmp
> 0); break;
2250 case GT_EXPR
: val
= (cmp
< 0); break;
2251 case LE_EXPR
: val
= (cmp
>= 0); break;
2252 case GE_EXPR
: val
= (cmp
<= 0); break;
2257 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2260 /* Chose the less restrictive of two < or <= comparisons. */
2261 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2262 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2264 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2265 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2267 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2270 /* Likewise chose the less restrictive of two > or >= comparisons. */
2271 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2272 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2274 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2275 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2277 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2280 /* Check for singleton ranges. */
2282 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2283 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2284 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2286 /* Check for less/greater pairs that don't restrict the range at all. */
2288 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2289 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2290 return boolean_true_node
;
2292 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2293 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2294 return boolean_true_node
;
2297 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2298 NAME's definition is a truth value. See if there are any simplifications
2299 that can be done against the NAME's definition. */
2300 if (TREE_CODE (op1a
) == SSA_NAME
2301 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2302 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2304 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2305 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2306 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2307 switch (gimple_code (stmt
))
2310 /* Try to simplify by copy-propagating the definition. */
2311 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2314 /* If every argument to the PHI produces the same result when
2315 ORed with the second comparison, we win.
2316 Do not do this unless the type is bool since we need a bool
2317 result here anyway. */
2318 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2320 tree result
= NULL_TREE
;
2322 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2324 tree arg
= gimple_phi_arg_def (stmt
, i
);
2326 /* If this PHI has itself as an argument, ignore it.
2327 If all the other args produce the same result,
2329 if (arg
== gimple_phi_result (stmt
))
2331 else if (TREE_CODE (arg
) == INTEGER_CST
)
2333 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2336 result
= boolean_true_node
;
2337 else if (!integer_onep (result
))
2341 result
= fold_build2 (code2
, boolean_type_node
,
2343 else if (!same_bool_comparison_p (result
,
2347 else if (TREE_CODE (arg
) == SSA_NAME
2348 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2351 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2352 /* In simple cases we can look through PHI nodes,
2353 but we have to be careful with loops.
2355 if (! dom_info_available_p (CDI_DOMINATORS
)
2356 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2357 || dominated_by_p (CDI_DOMINATORS
,
2358 gimple_bb (def_stmt
),
2361 temp
= or_var_with_comparison (arg
, invert
, code2
,
2367 else if (!same_bool_result_p (result
, temp
))
2383 /* Try to simplify the OR of two comparisons, specified by
2384 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2385 If this can be simplified to a single expression (without requiring
2386 introducing more SSA variables to hold intermediate values),
2387 return the resulting tree. Otherwise return NULL_TREE.
2388 If the result expression is non-null, it has boolean type. */
2391 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2392 enum tree_code code2
, tree op2a
, tree op2b
)
2394 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2398 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2402 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2404 Either NULL_TREE, a simplified but non-constant or a constant
2407 ??? This should go into a gimple-fold-inline.h file to be eventually
2408 privatized with the single valueize function used in the various TUs
2409 to avoid the indirect function call overhead. */
2412 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2414 location_t loc
= gimple_location (stmt
);
2415 switch (gimple_code (stmt
))
2419 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2421 switch (get_gimple_rhs_class (subcode
))
2423 case GIMPLE_SINGLE_RHS
:
2425 tree rhs
= gimple_assign_rhs1 (stmt
);
2426 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2428 if (TREE_CODE (rhs
) == SSA_NAME
)
2430 /* If the RHS is an SSA_NAME, return its known constant value,
2432 return (*valueize
) (rhs
);
2434 /* Handle propagating invariant addresses into address
2436 else if (TREE_CODE (rhs
) == ADDR_EXPR
2437 && !is_gimple_min_invariant (rhs
))
2439 HOST_WIDE_INT offset
;
2441 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2445 && (CONSTANT_CLASS_P (base
)
2446 || decl_address_invariant_p (base
)))
2447 return build_invariant_address (TREE_TYPE (rhs
),
2450 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2451 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2452 && (CONSTRUCTOR_NELTS (rhs
)
2453 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2459 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2461 val
= (*valueize
) (val
);
2462 if (TREE_CODE (val
) == INTEGER_CST
2463 || TREE_CODE (val
) == REAL_CST
2464 || TREE_CODE (val
) == FIXED_CST
)
2465 list
= tree_cons (NULL_TREE
, val
, list
);
2470 return build_vector (TREE_TYPE (rhs
), nreverse (list
));
2473 if (kind
== tcc_reference
)
2475 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2476 || TREE_CODE (rhs
) == REALPART_EXPR
2477 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2478 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2480 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2481 return fold_unary_loc (EXPR_LOCATION (rhs
),
2483 TREE_TYPE (rhs
), val
);
2485 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2486 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2488 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2489 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2491 TREE_TYPE (rhs
), val
,
2492 TREE_OPERAND (rhs
, 1),
2493 TREE_OPERAND (rhs
, 2));
2495 else if (TREE_CODE (rhs
) == MEM_REF
2496 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2498 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2499 if (TREE_CODE (val
) == ADDR_EXPR
2500 && is_gimple_min_invariant (val
))
2502 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2504 TREE_OPERAND (rhs
, 1));
2509 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2511 else if (kind
== tcc_declaration
)
2512 return get_symbol_constant_value (rhs
);
2516 case GIMPLE_UNARY_RHS
:
2518 /* Handle unary operators that can appear in GIMPLE form.
2519 Note that we know the single operand must be a constant,
2520 so this should almost always return a simplified RHS. */
2521 tree lhs
= gimple_assign_lhs (stmt
);
2522 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2524 /* Conversions are useless for CCP purposes if they are
2525 value-preserving. Thus the restrictions that
2526 useless_type_conversion_p places for restrict qualification
2527 of pointer types should not apply here.
2528 Substitution later will only substitute to allowed places. */
2529 if (CONVERT_EXPR_CODE_P (subcode
)
2530 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2531 && POINTER_TYPE_P (TREE_TYPE (op0
))
2532 && (TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2533 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))))
2537 fold_unary_ignore_overflow_loc (loc
, subcode
,
2538 gimple_expr_type (stmt
), op0
);
2541 case GIMPLE_BINARY_RHS
:
2543 /* Handle binary operators that can appear in GIMPLE form. */
2544 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2545 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2547 /* Translate &x + CST into an invariant form suitable for
2548 further propagation. */
2549 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2550 && TREE_CODE (op0
) == ADDR_EXPR
2551 && TREE_CODE (op1
) == INTEGER_CST
)
2553 tree off
= fold_convert (ptr_type_node
, op1
);
2554 return build_fold_addr_expr_loc
2556 fold_build2 (MEM_REF
,
2557 TREE_TYPE (TREE_TYPE (op0
)),
2558 unshare_expr (op0
), off
));
2561 return fold_binary_loc (loc
, subcode
,
2562 gimple_expr_type (stmt
), op0
, op1
);
2565 case GIMPLE_TERNARY_RHS
:
2567 /* Handle ternary operators that can appear in GIMPLE form. */
2568 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2569 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2570 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2572 /* Fold embedded expressions in ternary codes. */
2573 if ((subcode
== COND_EXPR
2574 || subcode
== VEC_COND_EXPR
)
2575 && COMPARISON_CLASS_P (op0
))
2577 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2578 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2579 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2580 TREE_TYPE (op0
), op00
, op01
);
2585 return fold_ternary_loc (loc
, subcode
,
2586 gimple_expr_type (stmt
), op0
, op1
, op2
);
2598 if (gimple_call_internal_p (stmt
))
2599 /* No folding yet for these functions. */
2602 fn
= (*valueize
) (gimple_call_fn (stmt
));
2603 if (TREE_CODE (fn
) == ADDR_EXPR
2604 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2605 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2607 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2610 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2611 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2612 call
= build_call_array_loc (loc
,
2613 gimple_call_return_type (stmt
),
2614 fn
, gimple_call_num_args (stmt
), args
);
2615 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2617 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2618 STRIP_NOPS (retval
);
2629 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2630 Returns NULL_TREE if folding to a constant is not possible, otherwise
2631 returns a constant according to is_gimple_min_invariant. */
2634 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2636 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2637 if (res
&& is_gimple_min_invariant (res
))
2643 /* The following set of functions are supposed to fold references using
2644 their constant initializers. */
2646 static tree
fold_ctor_reference (tree type
, tree ctor
,
2647 unsigned HOST_WIDE_INT offset
,
2648 unsigned HOST_WIDE_INT size
);
2650 /* See if we can find constructor defining value of BASE.
2651 When we know the consructor with constant offset (such as
2652 base is array[40] and we do know constructor of array), then
2653 BIT_OFFSET is adjusted accordingly.
2655 As a special case, return error_mark_node when constructor
2656 is not explicitly available, but it is known to be zero
2657 such as 'static const int a;'. */
2659 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2660 tree (*valueize
)(tree
))
2662 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2663 if (TREE_CODE (base
) == MEM_REF
)
2665 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2667 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
2669 *bit_offset
+= (mem_ref_offset (base
).low
2674 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2675 base
= valueize (TREE_OPERAND (base
, 0));
2676 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2678 base
= TREE_OPERAND (base
, 0);
2681 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2682 DECL_INITIAL. If BASE is a nested reference into another
2683 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2684 the inner reference. */
2685 switch (TREE_CODE (base
))
2688 if (!const_value_known_p (base
))
2693 if (!DECL_INITIAL (base
)
2694 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
)))
2695 return error_mark_node
;
2696 return DECL_INITIAL (base
);
2700 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2701 if (max_size
== -1 || size
!= max_size
)
2703 *bit_offset
+= bit_offset2
;
2704 return get_base_constructor (base
, bit_offset
, valueize
);
2715 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2716 to the memory at bit OFFSET.
2718 We do only simple job of folding byte accesses. */
2721 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2722 unsigned HOST_WIDE_INT offset
,
2723 unsigned HOST_WIDE_INT size
)
2725 if (INTEGRAL_TYPE_P (type
)
2726 && (TYPE_MODE (type
)
2727 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2728 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2730 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2731 && size
== BITS_PER_UNIT
2732 && !(offset
% BITS_PER_UNIT
))
2734 offset
/= BITS_PER_UNIT
;
2735 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2736 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2739 const char a[20]="hello";
2742 might lead to offset greater than string length. In this case we
2743 know value is either initialized to 0 or out of bounds. Return 0
2745 return build_zero_cst (type
);
2750 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2751 SIZE to the memory at bit OFFSET. */
2754 fold_array_ctor_reference (tree type
, tree ctor
,
2755 unsigned HOST_WIDE_INT offset
,
2756 unsigned HOST_WIDE_INT size
)
2758 unsigned HOST_WIDE_INT cnt
;
2760 double_int low_bound
, elt_size
;
2761 double_int index
, max_index
;
2762 double_int access_index
;
2763 tree domain_type
= NULL_TREE
;
2764 HOST_WIDE_INT inner_offset
;
2766 /* Compute low bound and elt size. */
2767 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2768 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2769 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2771 /* Static constructors for variably sized objects makes no sense. */
2772 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2773 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2776 low_bound
= double_int_zero
;
2777 /* Static constructors for variably sized objects makes no sense. */
2778 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2781 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2784 /* We can handle only constantly sized accesses that are known to not
2785 be larger than size of array element. */
2786 if (!TYPE_SIZE_UNIT (type
)
2787 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2788 || double_int_cmp (elt_size
,
2789 tree_to_double_int (TYPE_SIZE_UNIT (type
)), 0) < 0)
2792 /* Compute the array index we look for. */
2793 access_index
= double_int_udiv (uhwi_to_double_int (offset
/ BITS_PER_UNIT
),
2794 elt_size
, TRUNC_DIV_EXPR
);
2795 access_index
= double_int_add (access_index
, low_bound
);
2797 /* And offset within the access. */
2798 inner_offset
= offset
% (double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
);
2800 /* See if the array field is large enough to span whole access. We do not
2801 care to fold accesses spanning multiple array indexes. */
2802 if (inner_offset
+ size
> double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
)
2805 index
= double_int_sub (low_bound
, double_int_one
);
2806 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2808 /* Array constructor might explicitely set index, or specify range
2809 or leave index NULL meaning that it is next index after previous
2813 if (TREE_CODE (cfield
) == INTEGER_CST
)
2814 max_index
= index
= tree_to_double_int (cfield
);
2817 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2818 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2819 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2823 max_index
= index
= double_int_add (index
, double_int_one
);
2825 /* Do we have match? */
2826 if (double_int_cmp (access_index
, index
, 1) >= 0
2827 && double_int_cmp (access_index
, max_index
, 1) <= 0)
2828 return fold_ctor_reference (type
, cval
, inner_offset
, size
);
2830 /* When memory is not explicitely mentioned in constructor,
2831 it is 0 (or out of range). */
2832 return build_zero_cst (type
);
2835 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2836 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2839 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2840 unsigned HOST_WIDE_INT offset
,
2841 unsigned HOST_WIDE_INT size
)
2843 unsigned HOST_WIDE_INT cnt
;
2846 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2849 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2850 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2851 tree field_size
= DECL_SIZE (cfield
);
2852 double_int bitoffset
;
2853 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2854 double_int bits_per_unit_cst
= uhwi_to_double_int (BITS_PER_UNIT
);
2855 double_int bitoffset_end
, access_end
;
2857 /* Variable sized objects in static constructors makes no sense,
2858 but field_size can be NULL for flexible array members. */
2859 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2860 && TREE_CODE (byte_offset
) == INTEGER_CST
2861 && (field_size
!= NULL_TREE
2862 ? TREE_CODE (field_size
) == INTEGER_CST
2863 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2865 /* Compute bit offset of the field. */
2866 bitoffset
= double_int_add (tree_to_double_int (field_offset
),
2867 double_int_mul (byte_offset_cst
,
2868 bits_per_unit_cst
));
2869 /* Compute bit offset where the field ends. */
2870 if (field_size
!= NULL_TREE
)
2871 bitoffset_end
= double_int_add (bitoffset
,
2872 tree_to_double_int (field_size
));
2874 bitoffset_end
= double_int_zero
;
2876 access_end
= double_int_add (uhwi_to_double_int (offset
),
2877 uhwi_to_double_int (size
));
2879 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2880 [BITOFFSET, BITOFFSET_END)? */
2881 if (double_int_cmp (access_end
, bitoffset
, 0) > 0
2882 && (field_size
== NULL_TREE
2883 || double_int_cmp (uhwi_to_double_int (offset
),
2884 bitoffset_end
, 0) < 0))
2886 double_int inner_offset
= double_int_sub (uhwi_to_double_int (offset
),
2888 /* We do have overlap. Now see if field is large enough to
2889 cover the access. Give up for accesses spanning multiple
2891 if (double_int_cmp (access_end
, bitoffset_end
, 0) > 0)
2893 if (double_int_cmp (uhwi_to_double_int (offset
), bitoffset
, 0) < 0)
2895 return fold_ctor_reference (type
, cval
,
2896 double_int_to_uhwi (inner_offset
), size
);
2899 /* When memory is not explicitely mentioned in constructor, it is 0. */
2900 return build_zero_cst (type
);
2903 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2904 to the memory at bit OFFSET. */
2907 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
2908 unsigned HOST_WIDE_INT size
)
2912 /* We found the field with exact match. */
2913 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
2915 return canonicalize_constructor_val (ctor
);
2917 /* We are at the end of walk, see if we can view convert the
2919 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
2920 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2921 && operand_equal_p (TYPE_SIZE (type
),
2922 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
2924 ret
= canonicalize_constructor_val (ctor
);
2925 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
2930 if (TREE_CODE (ctor
) == STRING_CST
)
2931 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
2932 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
2935 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
2936 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
2937 return fold_array_ctor_reference (type
, ctor
, offset
, size
);
2939 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
);
2945 /* Return the tree representing the element referenced by T if T is an
2946 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2947 names using VALUEIZE. Return NULL_TREE otherwise. */
2950 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
2952 tree ctor
, idx
, base
;
2953 HOST_WIDE_INT offset
, size
, max_size
;
2956 if (TREE_THIS_VOLATILE (t
))
2959 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
2960 return get_symbol_constant_value (t
);
2962 tem
= fold_read_from_constant_string (t
);
2966 switch (TREE_CODE (t
))
2969 case ARRAY_RANGE_REF
:
2970 /* Constant indexes are handled well by get_base_constructor.
2971 Only special case variable offsets.
2972 FIXME: This code can't handle nested references with variable indexes
2973 (they will be handled only by iteration of ccp). Perhaps we can bring
2974 get_ref_base_and_extent here and make it use a valueize callback. */
2975 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
2977 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
2978 && host_integerp (idx
, 0))
2980 tree low_bound
, unit_size
;
2982 /* If the resulting bit-offset is constant, track it. */
2983 if ((low_bound
= array_ref_low_bound (t
),
2984 host_integerp (low_bound
, 0))
2985 && (unit_size
= array_ref_element_size (t
),
2986 host_integerp (unit_size
, 1)))
2988 offset
= TREE_INT_CST_LOW (idx
);
2989 offset
-= TREE_INT_CST_LOW (low_bound
);
2990 offset
*= TREE_INT_CST_LOW (unit_size
);
2991 offset
*= BITS_PER_UNIT
;
2993 base
= TREE_OPERAND (t
, 0);
2994 ctor
= get_base_constructor (base
, &offset
, valueize
);
2995 /* Empty constructor. Always fold to 0. */
2996 if (ctor
== error_mark_node
)
2997 return build_zero_cst (TREE_TYPE (t
));
2998 /* Out of bound array access. Value is undefined,
3002 /* We can not determine ctor. */
3005 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3006 TREE_INT_CST_LOW (unit_size
)
3014 case TARGET_MEM_REF
:
3016 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3017 ctor
= get_base_constructor (base
, &offset
, valueize
);
3019 /* Empty constructor. Always fold to 0. */
3020 if (ctor
== error_mark_node
)
3021 return build_zero_cst (TREE_TYPE (t
));
3022 /* We do not know precise address. */
3023 if (max_size
== -1 || max_size
!= size
)
3025 /* We can not determine ctor. */
3029 /* Out of bound array access. Value is undefined, but don't fold. */
3033 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
);
3038 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3039 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3040 return fold_build1_loc (EXPR_LOCATION (t
),
3041 TREE_CODE (t
), TREE_TYPE (t
), c
);
3053 fold_const_aggregate_ref (tree t
)
3055 return fold_const_aggregate_ref_1 (t
, NULL
);
3058 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3059 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3060 KNOWN_BINFO carries the binfo describing the true type of
3061 OBJ_TYPE_REF_OBJECT(REF). */
3064 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3066 unsigned HOST_WIDE_INT offset
, size
;
3069 v
= BINFO_VTABLE (known_binfo
);
3070 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3074 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3076 offset
= tree_low_cst (TREE_OPERAND (v
, 1), 1) * BITS_PER_UNIT
;
3077 v
= TREE_OPERAND (v
, 0);
3082 if (TREE_CODE (v
) != ADDR_EXPR
)
3084 v
= TREE_OPERAND (v
, 0);
3086 if (TREE_CODE (v
) != VAR_DECL
3087 || !DECL_VIRTUAL_P (v
)
3088 || !DECL_INITIAL (v
)
3089 || DECL_INITIAL (v
) == error_mark_node
)
3091 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3092 size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))), 1);
3093 offset
+= token
* size
;
3094 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), DECL_INITIAL (v
),
3098 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3099 || TREE_CODE (fn
) == FDESC_EXPR
);
3100 fn
= TREE_OPERAND (fn
, 0);
3101 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3103 /* When cgraph node is missing and function is not public, we cannot
3104 devirtualize. This can happen in WHOPR when the actual method
3105 ends up in other partition, because we found devirtualization
3106 possibility too late. */
3107 if (!can_refer_decl_in_current_unit_p (fn
))
3113 /* Return true iff VAL is a gimple expression that is known to be
3114 non-negative. Restricted to floating-point inputs. */
3117 gimple_val_nonnegative_real_p (tree val
)
3121 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3123 /* Use existing logic for non-gimple trees. */
3124 if (tree_expr_nonnegative_p (val
))
3127 if (TREE_CODE (val
) != SSA_NAME
)
3130 /* Currently we look only at the immediately defining statement
3131 to make this determination, since recursion on defining
3132 statements of operands can lead to quadratic behavior in the
3133 worst case. This is expected to catch almost all occurrences
3134 in practice. It would be possible to implement limited-depth
3135 recursion if important cases are lost. Alternatively, passes
3136 that need this information (such as the pow/powi lowering code
3137 in the cse_sincos pass) could be revised to provide it through
3138 dataflow propagation. */
3140 def_stmt
= SSA_NAME_DEF_STMT (val
);
3142 if (is_gimple_assign (def_stmt
))
3146 /* See fold-const.c:tree_expr_nonnegative_p for additional
3147 cases that could be handled with recursion. */
3149 switch (gimple_assign_rhs_code (def_stmt
))
3152 /* Always true for floating-point operands. */
3156 /* True if the two operands are identical (since we are
3157 restricted to floating-point inputs). */
3158 op0
= gimple_assign_rhs1 (def_stmt
);
3159 op1
= gimple_assign_rhs2 (def_stmt
);
3162 || operand_equal_p (op0
, op1
, 0))
3169 else if (is_gimple_call (def_stmt
))
3171 tree fndecl
= gimple_call_fndecl (def_stmt
);
3173 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3177 switch (DECL_FUNCTION_CODE (fndecl
))
3179 CASE_FLT_FN (BUILT_IN_ACOS
):
3180 CASE_FLT_FN (BUILT_IN_ACOSH
):
3181 CASE_FLT_FN (BUILT_IN_CABS
):
3182 CASE_FLT_FN (BUILT_IN_COSH
):
3183 CASE_FLT_FN (BUILT_IN_ERFC
):
3184 CASE_FLT_FN (BUILT_IN_EXP
):
3185 CASE_FLT_FN (BUILT_IN_EXP10
):
3186 CASE_FLT_FN (BUILT_IN_EXP2
):
3187 CASE_FLT_FN (BUILT_IN_FABS
):
3188 CASE_FLT_FN (BUILT_IN_FDIM
):
3189 CASE_FLT_FN (BUILT_IN_HYPOT
):
3190 CASE_FLT_FN (BUILT_IN_POW10
):
3193 CASE_FLT_FN (BUILT_IN_SQRT
):
3194 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3195 nonnegative inputs. */
3196 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3201 CASE_FLT_FN (BUILT_IN_POWI
):
3202 /* True if the second argument is an even integer. */
3203 arg1
= gimple_call_arg (def_stmt
, 1);
3205 if (TREE_CODE (arg1
) == INTEGER_CST
3206 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3211 CASE_FLT_FN (BUILT_IN_POW
):
3212 /* True if the second argument is an even integer-valued
3214 arg1
= gimple_call_arg (def_stmt
, 1);
3216 if (TREE_CODE (arg1
) == REAL_CST
)
3221 c
= TREE_REAL_CST (arg1
);
3222 n
= real_to_integer (&c
);
3226 REAL_VALUE_TYPE cint
;
3227 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3228 if (real_identical (&c
, &cint
))