1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
35 #include "hard-reg-set.h"
41 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "gimple-expr.h"
50 #include "gimple-iterator.h"
51 #include "gimple-ssa.h"
52 #include "tree-ssanames.h"
53 #include "tree-into-ssa.h"
56 #include "tree-ssa-propagate.h"
59 #include "plugin-api.h"
62 #include "ipa-utils.h"
63 #include "gimple-pretty-print.h"
64 #include "tree-ssa-address.h"
65 #include "langhooks.h"
66 #include "gimplify-me.h"
71 #include "gimple-match.h"
72 #include "tree-phinodes.h"
73 #include "ssa-iterators.h"
75 /* Return true when DECL can be referenced from current unit.
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
91 3) COMDAT functions referred by external vtables that
92 we devirtualize only during final compilation stage.
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
98 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
101 struct cgraph_node
*node
;
104 if (DECL_ABSTRACT_P (decl
))
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
109 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
117 if (symtab
->function_flags_ready
)
119 snode
= symtab_node::get (decl
);
120 if (!snode
|| !snode
->definition
)
122 node
= dyn_cast
<cgraph_node
*> (snode
);
123 return !node
|| !node
->global
.inlined_to
;
126 /* We will later output the initializer, so we can refer to it.
127 So we are concerned only when DECL comes from initializer of
128 external var or var that has been optimized out. */
130 || TREE_CODE (from_decl
) != VAR_DECL
131 || (!DECL_EXTERNAL (from_decl
)
132 && (vnode
= varpool_node::get (from_decl
)) != NULL
133 && vnode
->definition
)
135 && (vnode
= varpool_node::get (from_decl
)) != NULL
136 && vnode
->in_other_partition
))
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl
)
142 && DECL_EXTERNAL (decl
)
143 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
144 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
155 As observed in PR20991 for already optimized out comdat virtual functions
156 it may be tempting to not necessarily give up because the copy will be
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
162 if (!symtab
->function_flags_ready
)
165 snode
= symtab_node::get (decl
);
167 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
168 && (!snode
->in_other_partition
169 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
171 node
= dyn_cast
<cgraph_node
*> (snode
);
172 return !node
|| !node
->global
.inlined_to
;
175 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
180 canonicalize_constructor_val (tree cval
, tree from_decl
)
182 tree orig_cval
= cval
;
184 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
187 tree ptr
= TREE_OPERAND (cval
, 0);
188 if (is_gimple_min_invariant (ptr
))
189 cval
= build1_loc (EXPR_LOCATION (cval
),
190 ADDR_EXPR
, TREE_TYPE (ptr
),
191 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
193 fold_convert (ptr_type_node
,
194 TREE_OPERAND (cval
, 1))));
196 if (TREE_CODE (cval
) == ADDR_EXPR
)
198 tree base
= NULL_TREE
;
199 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
201 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
203 TREE_OPERAND (cval
, 0) = base
;
206 base
= get_base_address (TREE_OPERAND (cval
, 0));
210 if ((TREE_CODE (base
) == VAR_DECL
211 || TREE_CODE (base
) == FUNCTION_DECL
)
212 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
214 if (TREE_CODE (base
) == VAR_DECL
)
215 TREE_ADDRESSABLE (base
) = 1;
216 else if (TREE_CODE (base
) == FUNCTION_DECL
)
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
221 cgraph_node::get_create (base
);
223 /* Fixup types in global initializers. */
224 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
225 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
228 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
231 if (TREE_OVERFLOW_P (cval
))
232 return drop_tree_overflow (cval
);
236 /* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
240 get_symbol_constant_value (tree sym
)
242 tree val
= ctor_for_folding (sym
);
243 if (val
!= error_mark_node
)
247 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
248 if (val
&& is_gimple_min_invariant (val
))
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
257 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
258 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
259 return build_zero_cst (TREE_TYPE (sym
));
267 /* Subroutine of fold_stmt. We perform several simplifications of the
268 memory reference tree EXPR and make sure to re-gimplify them properly
269 after propagation of constant addresses. IS_LHS is true if the
270 reference is supposed to be an lvalue. */
273 maybe_fold_reference (tree expr
, bool is_lhs
)
277 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
278 || TREE_CODE (expr
) == REALPART_EXPR
279 || TREE_CODE (expr
) == IMAGPART_EXPR
)
280 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
281 return fold_unary_loc (EXPR_LOCATION (expr
),
284 TREE_OPERAND (expr
, 0));
285 else if (TREE_CODE (expr
) == BIT_FIELD_REF
286 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
287 return fold_ternary_loc (EXPR_LOCATION (expr
),
290 TREE_OPERAND (expr
, 0),
291 TREE_OPERAND (expr
, 1),
292 TREE_OPERAND (expr
, 2));
295 && (result
= fold_const_aggregate_ref (expr
))
296 && is_gimple_min_invariant (result
))
303 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
304 replacement rhs for the statement or NULL_TREE if no simplification
305 could be made. It is assumed that the operands have been previously
309 fold_gimple_assign (gimple_stmt_iterator
*si
)
311 gimple stmt
= gsi_stmt (*si
);
312 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
313 location_t loc
= gimple_location (stmt
);
315 tree result
= NULL_TREE
;
317 switch (get_gimple_rhs_class (subcode
))
319 case GIMPLE_SINGLE_RHS
:
321 tree rhs
= gimple_assign_rhs1 (stmt
);
323 if (TREE_CLOBBER_P (rhs
))
326 if (REFERENCE_CLASS_P (rhs
))
327 return maybe_fold_reference (rhs
, false);
329 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
331 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
332 if (is_gimple_min_invariant (val
))
334 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
337 vec
<cgraph_node
*>targets
338 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
339 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
341 if (dump_enabled_p ())
343 location_t loc
= gimple_location_safe (stmt
);
344 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
345 "resolving virtual function address "
346 "reference to function %s\n",
347 targets
.length () == 1
348 ? targets
[0]->name ()
351 if (targets
.length () == 1)
353 val
= fold_convert (TREE_TYPE (val
),
354 build_fold_addr_expr_loc
355 (loc
, targets
[0]->decl
));
356 STRIP_USELESS_TYPE_CONVERSION (val
);
359 /* We can not use __builtin_unreachable here because it
360 can not have address taken. */
361 val
= build_int_cst (TREE_TYPE (val
), 0);
367 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
369 tree ref
= TREE_OPERAND (rhs
, 0);
370 tree tem
= maybe_fold_reference (ref
, true);
372 && TREE_CODE (tem
) == MEM_REF
373 && integer_zerop (TREE_OPERAND (tem
, 1)))
374 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
376 result
= fold_convert (TREE_TYPE (rhs
),
377 build_fold_addr_expr_loc (loc
, tem
));
378 else if (TREE_CODE (ref
) == MEM_REF
379 && integer_zerop (TREE_OPERAND (ref
, 1)))
380 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
383 else if (TREE_CODE (rhs
) == CONSTRUCTOR
384 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
385 && (CONSTRUCTOR_NELTS (rhs
)
386 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
388 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
392 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
393 if (TREE_CODE (val
) != INTEGER_CST
394 && TREE_CODE (val
) != REAL_CST
395 && TREE_CODE (val
) != FIXED_CST
)
398 return build_vector_from_ctor (TREE_TYPE (rhs
),
399 CONSTRUCTOR_ELTS (rhs
));
402 else if (DECL_P (rhs
))
403 return get_symbol_constant_value (rhs
);
405 /* If we couldn't fold the RHS, hand over to the generic
407 if (result
== NULL_TREE
)
410 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
411 that may have been added by fold, and "useless" type
412 conversions that might now be apparent due to propagation. */
413 STRIP_USELESS_TYPE_CONVERSION (result
);
415 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
422 case GIMPLE_UNARY_RHS
:
425 case GIMPLE_BINARY_RHS
:
426 /* Try to canonicalize for boolean-typed X the comparisons
427 X == 0, X == 1, X != 0, and X != 1. */
428 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
429 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
431 tree lhs
= gimple_assign_lhs (stmt
);
432 tree op1
= gimple_assign_rhs1 (stmt
);
433 tree op2
= gimple_assign_rhs2 (stmt
);
434 tree type
= TREE_TYPE (op1
);
436 /* Check whether the comparison operands are of the same boolean
437 type as the result type is.
438 Check that second operand is an integer-constant with value
440 if (TREE_CODE (op2
) == INTEGER_CST
441 && (integer_zerop (op2
) || integer_onep (op2
))
442 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
444 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
445 bool is_logical_not
= false;
447 /* X == 0 and X != 1 is a logical-not.of X
448 X == 1 and X != 0 is X */
449 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
450 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
451 is_logical_not
= true;
453 if (is_logical_not
== false)
455 /* Only for one-bit precision typed X the transformation
456 !X -> ~X is valied. */
457 else if (TYPE_PRECISION (type
) == 1)
458 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
460 /* Otherwise we use !X -> X ^ 1. */
462 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
463 type
, op1
, build_int_cst (type
, 1));
469 result
= fold_binary_loc (loc
, subcode
,
470 TREE_TYPE (gimple_assign_lhs (stmt
)),
471 gimple_assign_rhs1 (stmt
),
472 gimple_assign_rhs2 (stmt
));
476 STRIP_USELESS_TYPE_CONVERSION (result
);
477 if (valid_gimple_rhs_p (result
))
482 case GIMPLE_TERNARY_RHS
:
483 /* Try to fold a conditional expression. */
484 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
486 tree op0
= gimple_assign_rhs1 (stmt
);
489 location_t cond_loc
= gimple_location (stmt
);
491 if (COMPARISON_CLASS_P (op0
))
493 fold_defer_overflow_warnings ();
494 tem
= fold_binary_loc (cond_loc
,
495 TREE_CODE (op0
), TREE_TYPE (op0
),
496 TREE_OPERAND (op0
, 0),
497 TREE_OPERAND (op0
, 1));
498 /* This is actually a conditional expression, not a GIMPLE
499 conditional statement, however, the valid_gimple_rhs_p
500 test still applies. */
501 set
= (tem
&& is_gimple_condexpr (tem
)
502 && valid_gimple_rhs_p (tem
));
503 fold_undefer_overflow_warnings (set
, stmt
, 0);
505 else if (is_gimple_min_invariant (op0
))
514 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
515 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
516 gimple_assign_rhs2 (stmt
),
517 gimple_assign_rhs3 (stmt
));
521 result
= fold_ternary_loc (loc
, subcode
,
522 TREE_TYPE (gimple_assign_lhs (stmt
)),
523 gimple_assign_rhs1 (stmt
),
524 gimple_assign_rhs2 (stmt
),
525 gimple_assign_rhs3 (stmt
));
529 STRIP_USELESS_TYPE_CONVERSION (result
);
530 if (valid_gimple_rhs_p (result
))
535 case GIMPLE_INVALID_RHS
:
542 /* Attempt to fold a conditional statement. Return true if any changes were
543 made. We only attempt to fold the condition expression, and do not perform
544 any transformation that would require alteration of the cfg. It is
545 assumed that the operands have been previously folded. */
548 fold_gimple_cond (gimple stmt
)
550 tree result
= fold_binary_loc (gimple_location (stmt
),
551 gimple_cond_code (stmt
),
553 gimple_cond_lhs (stmt
),
554 gimple_cond_rhs (stmt
));
558 STRIP_USELESS_TYPE_CONVERSION (result
);
559 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
561 gimple_cond_set_condition_from_tree (stmt
, result
);
570 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
571 adjusting the replacement stmts location and virtual operands.
572 If the statement has a lhs the last stmt in the sequence is expected
573 to assign to that lhs. */
576 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
578 gimple stmt
= gsi_stmt (*si_p
);
580 if (gimple_has_location (stmt
))
581 annotate_all_with_location (stmts
, gimple_location (stmt
));
583 /* First iterate over the replacement statements backward, assigning
584 virtual operands to their defining statements. */
585 gimple laststore
= NULL
;
586 for (gimple_stmt_iterator i
= gsi_last (stmts
);
587 !gsi_end_p (i
); gsi_prev (&i
))
589 gimple new_stmt
= gsi_stmt (i
);
590 if ((gimple_assign_single_p (new_stmt
)
591 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
592 || (is_gimple_call (new_stmt
)
593 && (gimple_call_flags (new_stmt
)
594 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
598 vdef
= gimple_vdef (stmt
);
600 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
601 gimple_set_vdef (new_stmt
, vdef
);
602 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
603 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
604 laststore
= new_stmt
;
608 /* Second iterate over the statements forward, assigning virtual
609 operands to their uses. */
610 tree reaching_vuse
= gimple_vuse (stmt
);
611 for (gimple_stmt_iterator i
= gsi_start (stmts
);
612 !gsi_end_p (i
); gsi_next (&i
))
614 gimple new_stmt
= gsi_stmt (i
);
615 /* If the new statement possibly has a VUSE, update it with exact SSA
616 name we know will reach this one. */
617 if (gimple_has_mem_ops (new_stmt
))
618 gimple_set_vuse (new_stmt
, reaching_vuse
);
619 gimple_set_modified (new_stmt
, true);
620 if (gimple_vdef (new_stmt
))
621 reaching_vuse
= gimple_vdef (new_stmt
);
624 /* If the new sequence does not do a store release the virtual
625 definition of the original statement. */
627 && reaching_vuse
== gimple_vuse (stmt
))
629 tree vdef
= gimple_vdef (stmt
);
631 && TREE_CODE (vdef
) == SSA_NAME
)
633 unlink_stmt_vdef (stmt
);
634 release_ssa_name (vdef
);
638 /* Finally replace the original statement with the sequence. */
639 gsi_replace_with_seq (si_p
, stmts
, false);
642 /* Convert EXPR into a GIMPLE value suitable for substitution on the
643 RHS of an assignment. Insert the necessary statements before
644 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
645 is replaced. If the call is expected to produces a result, then it
646 is replaced by an assignment of the new RHS to the result variable.
647 If the result is to be ignored, then the call is replaced by a
648 GIMPLE_NOP. A proper VDEF chain is retained by making the first
649 VUSE and the last VDEF of the whole sequence be the same as the replaced
650 statement and using new SSA names for stores in between. */
653 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
656 gimple stmt
, new_stmt
;
657 gimple_stmt_iterator i
;
658 gimple_seq stmts
= NULL
;
660 stmt
= gsi_stmt (*si_p
);
662 gcc_assert (is_gimple_call (stmt
));
664 push_gimplify_context (gimple_in_ssa_p (cfun
));
666 lhs
= gimple_call_lhs (stmt
);
667 if (lhs
== NULL_TREE
)
669 gimplify_and_add (expr
, &stmts
);
670 /* We can end up with folding a memcpy of an empty class assignment
671 which gets optimized away by C++ gimplification. */
672 if (gimple_seq_empty_p (stmts
))
674 pop_gimplify_context (NULL
);
675 if (gimple_in_ssa_p (cfun
))
677 unlink_stmt_vdef (stmt
);
680 gsi_replace (si_p
, gimple_build_nop (), true);
686 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
687 new_stmt
= gimple_build_assign (lhs
, tmp
);
688 i
= gsi_last (stmts
);
689 gsi_insert_after_without_update (&i
, new_stmt
,
690 GSI_CONTINUE_LINKING
);
693 pop_gimplify_context (NULL
);
695 gsi_replace_with_seq_vops (si_p
, stmts
);
699 /* Replace the call at *GSI with the gimple value VAL. */
702 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
704 gimple stmt
= gsi_stmt (*gsi
);
705 tree lhs
= gimple_call_lhs (stmt
);
709 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
710 val
= fold_convert (TREE_TYPE (lhs
), val
);
711 repl
= gimple_build_assign (lhs
, val
);
714 repl
= gimple_build_nop ();
715 tree vdef
= gimple_vdef (stmt
);
716 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
718 unlink_stmt_vdef (stmt
);
719 release_ssa_name (vdef
);
721 gsi_replace (gsi
, repl
, true);
724 /* Replace the call at *GSI with the new call REPL and fold that
728 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
730 gimple stmt
= gsi_stmt (*gsi
);
731 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
732 gimple_set_location (repl
, gimple_location (stmt
));
733 if (gimple_vdef (stmt
)
734 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
736 gimple_set_vdef (repl
, gimple_vdef (stmt
));
737 gimple_set_vuse (repl
, gimple_vuse (stmt
));
738 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
740 gsi_replace (gsi
, repl
, true);
744 /* Return true if VAR is a VAR_DECL or a component thereof. */
747 var_decl_component_p (tree var
)
750 while (handled_component_p (inner
))
751 inner
= TREE_OPERAND (inner
, 0);
752 return SSA_VAR_P (inner
);
755 /* Fold function call to builtin mem{{,p}cpy,move}. Return
756 NULL_TREE if no simplification can be made.
757 If ENDP is 0, return DEST (like memcpy).
758 If ENDP is 1, return DEST+LEN (like mempcpy).
759 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
760 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
764 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
765 tree dest
, tree src
, int endp
)
767 gimple stmt
= gsi_stmt (*gsi
);
768 tree lhs
= gimple_call_lhs (stmt
);
769 tree len
= gimple_call_arg (stmt
, 2);
770 tree destvar
, srcvar
;
771 location_t loc
= gimple_location (stmt
);
773 /* If the LEN parameter is zero, return DEST. */
774 if (integer_zerop (len
))
777 if (gimple_call_lhs (stmt
))
778 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
780 repl
= gimple_build_nop ();
781 tree vdef
= gimple_vdef (stmt
);
782 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
784 unlink_stmt_vdef (stmt
);
785 release_ssa_name (vdef
);
787 gsi_replace (gsi
, repl
, true);
791 /* If SRC and DEST are the same (and not volatile), return
792 DEST{,+LEN,+LEN-1}. */
793 if (operand_equal_p (src
, dest
, 0))
795 unlink_stmt_vdef (stmt
);
796 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
797 release_ssa_name (gimple_vdef (stmt
));
800 gsi_replace (gsi
, gimple_build_nop (), true);
807 tree srctype
, desttype
;
808 unsigned int src_align
, dest_align
;
811 /* Build accesses at offset zero with a ref-all character type. */
812 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
815 /* If we can perform the copy efficiently with first doing all loads
816 and then all stores inline it that way. Currently efficiently
817 means that we can load all the memory into a single integer
818 register which is what MOVE_MAX gives us. */
819 src_align
= get_pointer_alignment (src
);
820 dest_align
= get_pointer_alignment (dest
);
821 if (tree_fits_uhwi_p (len
)
822 && compare_tree_int (len
, MOVE_MAX
) <= 0
823 /* ??? Don't transform copies from strings with known length this
824 confuses the tree-ssa-strlen.c. This doesn't handle
825 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
827 && !c_strlen (src
, 2))
829 unsigned ilen
= tree_to_uhwi (len
);
830 if (exact_log2 (ilen
) != -1)
832 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
834 && TYPE_MODE (type
) != BLKmode
835 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
837 /* If the destination pointer is not aligned we must be able
838 to emit an unaligned store. */
839 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
840 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
843 tree desttype
= type
;
844 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
845 srctype
= build_aligned_type (type
, src_align
);
846 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
847 tree tem
= fold_const_aggregate_ref (srcmem
);
850 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
851 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
857 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
859 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
860 if (gimple_in_ssa_p (cfun
))
861 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
864 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
),
866 gimple_assign_set_lhs (new_stmt
, srcmem
);
867 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
868 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
870 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
871 desttype
= build_aligned_type (type
, dest_align
);
873 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
876 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
877 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
878 if (gimple_vdef (new_stmt
)
879 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
880 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
883 gsi_replace (gsi
, new_stmt
, true);
886 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
895 /* Both DEST and SRC must be pointer types.
896 ??? This is what old code did. Is the testing for pointer types
899 If either SRC is readonly or length is 1, we can use memcpy. */
900 if (!dest_align
|| !src_align
)
902 if (readonly_data_expr (src
)
903 || (tree_fits_uhwi_p (len
)
904 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
905 >= tree_to_uhwi (len
))))
907 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
910 gimple_call_set_fndecl (stmt
, fn
);
911 gimple_call_set_arg (stmt
, 0, dest
);
912 gimple_call_set_arg (stmt
, 1, src
);
917 /* If *src and *dest can't overlap, optimize into memcpy as well. */
918 if (TREE_CODE (src
) == ADDR_EXPR
919 && TREE_CODE (dest
) == ADDR_EXPR
)
921 tree src_base
, dest_base
, fn
;
922 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
923 HOST_WIDE_INT size
= -1;
924 HOST_WIDE_INT maxsize
= -1;
926 srcvar
= TREE_OPERAND (src
, 0);
927 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
929 destvar
= TREE_OPERAND (dest
, 0);
930 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
932 if (tree_fits_uhwi_p (len
))
933 maxsize
= tree_to_uhwi (len
);
936 src_offset
/= BITS_PER_UNIT
;
937 dest_offset
/= BITS_PER_UNIT
;
938 if (SSA_VAR_P (src_base
)
939 && SSA_VAR_P (dest_base
))
941 if (operand_equal_p (src_base
, dest_base
, 0)
942 && ranges_overlap_p (src_offset
, maxsize
,
943 dest_offset
, maxsize
))
946 else if (TREE_CODE (src_base
) == MEM_REF
947 && TREE_CODE (dest_base
) == MEM_REF
)
949 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
950 TREE_OPERAND (dest_base
, 0), 0))
952 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
953 if (!wi::fits_shwi_p (off
))
955 src_offset
= off
.to_shwi ();
957 off
= mem_ref_offset (dest_base
) + dest_offset
;
958 if (!wi::fits_shwi_p (off
))
960 dest_offset
= off
.to_shwi ();
961 if (ranges_overlap_p (src_offset
, maxsize
,
962 dest_offset
, maxsize
))
968 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
971 gimple_call_set_fndecl (stmt
, fn
);
972 gimple_call_set_arg (stmt
, 0, dest
);
973 gimple_call_set_arg (stmt
, 1, src
);
978 /* If the destination and source do not alias optimize into
980 if ((is_gimple_min_invariant (dest
)
981 || TREE_CODE (dest
) == SSA_NAME
)
982 && (is_gimple_min_invariant (src
)
983 || TREE_CODE (src
) == SSA_NAME
))
986 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
987 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
988 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
991 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
994 gimple_call_set_fndecl (stmt
, fn
);
995 gimple_call_set_arg (stmt
, 0, dest
);
996 gimple_call_set_arg (stmt
, 1, src
);
1005 if (!tree_fits_shwi_p (len
))
1008 This logic lose for arguments like (type *)malloc (sizeof (type)),
1009 since we strip the casts of up to VOID return value from malloc.
1010 Perhaps we ought to inherit type from non-VOID argument here? */
1013 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1014 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1016 /* In the following try to find a type that is most natural to be
1017 used for the memcpy source and destination and that allows
1018 the most optimization when memcpy is turned into a plain assignment
1019 using that type. In theory we could always use a char[len] type
1020 but that only gains us that the destination and source possibly
1021 no longer will have their address taken. */
1022 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1023 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1025 tree tem
= TREE_OPERAND (src
, 0);
1027 if (tem
!= TREE_OPERAND (src
, 0))
1028 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1030 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1032 tree tem
= TREE_OPERAND (dest
, 0);
1034 if (tem
!= TREE_OPERAND (dest
, 0))
1035 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1037 srctype
= TREE_TYPE (TREE_TYPE (src
));
1038 if (TREE_CODE (srctype
) == ARRAY_TYPE
1039 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1041 srctype
= TREE_TYPE (srctype
);
1043 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1045 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1046 if (TREE_CODE (desttype
) == ARRAY_TYPE
1047 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1049 desttype
= TREE_TYPE (desttype
);
1051 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1053 if (TREE_ADDRESSABLE (srctype
)
1054 || TREE_ADDRESSABLE (desttype
))
1057 /* Make sure we are not copying using a floating-point mode or
1058 a type whose size possibly does not match its precision. */
1059 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1060 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1061 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1062 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1063 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1064 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1065 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1066 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1074 src_align
= get_pointer_alignment (src
);
1075 dest_align
= get_pointer_alignment (dest
);
1076 if (dest_align
< TYPE_ALIGN (desttype
)
1077 || src_align
< TYPE_ALIGN (srctype
))
1081 STRIP_NOPS (destvar
);
1082 if (TREE_CODE (destvar
) == ADDR_EXPR
1083 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1084 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1085 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1087 destvar
= NULL_TREE
;
1090 STRIP_NOPS (srcvar
);
1091 if (TREE_CODE (srcvar
) == ADDR_EXPR
1092 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1093 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1096 || src_align
>= TYPE_ALIGN (desttype
))
1097 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1099 else if (!STRICT_ALIGNMENT
)
1101 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1103 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1111 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1114 if (srcvar
== NULL_TREE
)
1117 if (src_align
>= TYPE_ALIGN (desttype
))
1118 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1121 if (STRICT_ALIGNMENT
)
1123 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1125 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1128 else if (destvar
== NULL_TREE
)
1131 if (dest_align
>= TYPE_ALIGN (srctype
))
1132 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1135 if (STRICT_ALIGNMENT
)
1137 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1139 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1144 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1146 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1147 if (gimple_in_ssa_p (cfun
))
1148 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1150 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
), NULL
);
1151 gimple_assign_set_lhs (new_stmt
, srcvar
);
1152 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1153 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1155 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1156 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1157 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1158 if (gimple_vdef (new_stmt
)
1159 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1160 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1163 gsi_replace (gsi
, new_stmt
, true);
1166 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1170 if (endp
== 0 || endp
== 3)
1173 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1175 if (endp
== 2 || endp
== 1)
1176 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1178 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1180 gimple repl
= gimple_build_assign (lhs
, dest
);
1181 gsi_replace (gsi
, repl
, true);
1185 /* Fold function call to builtin memset or bzero at *GSI setting the
1186 memory of size LEN to VAL. Return whether a simplification was made. */
1189 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1191 gimple stmt
= gsi_stmt (*gsi
);
1193 unsigned HOST_WIDE_INT length
, cval
;
1195 /* If the LEN parameter is zero, return DEST. */
1196 if (integer_zerop (len
))
1198 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1202 if (! tree_fits_uhwi_p (len
))
1205 if (TREE_CODE (c
) != INTEGER_CST
)
1208 tree dest
= gimple_call_arg (stmt
, 0);
1210 if (TREE_CODE (var
) != ADDR_EXPR
)
1213 var
= TREE_OPERAND (var
, 0);
1214 if (TREE_THIS_VOLATILE (var
))
1217 etype
= TREE_TYPE (var
);
1218 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1219 etype
= TREE_TYPE (etype
);
1221 if (!INTEGRAL_TYPE_P (etype
)
1222 && !POINTER_TYPE_P (etype
))
1225 if (! var_decl_component_p (var
))
1228 length
= tree_to_uhwi (len
);
1229 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1230 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1233 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1236 if (integer_zerop (c
))
1240 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1243 cval
= TREE_INT_CST_LOW (c
);
1247 cval
|= (cval
<< 31) << 1;
1250 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1251 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1252 gimple_set_vuse (store
, gimple_vuse (stmt
));
1253 tree vdef
= gimple_vdef (stmt
);
1254 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1256 gimple_set_vdef (store
, gimple_vdef (stmt
));
1257 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1259 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1260 if (gimple_call_lhs (stmt
))
1262 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1263 gsi_replace (gsi
, asgn
, true);
1267 gimple_stmt_iterator gsi2
= *gsi
;
1269 gsi_remove (&gsi2
, true);
1276 /* Return the string length, maximum string length or maximum value of
1278 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1279 is not NULL and, for TYPE == 0, its value is not equal to the length
1280 we determine or if we are unable to determine the length or value,
1281 return false. VISITED is a bitmap of visited variables.
1282 TYPE is 0 if string length should be returned, 1 for maximum string
1283 length and 2 for maximum value ARG can have. */
1286 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1291 if (TREE_CODE (arg
) != SSA_NAME
)
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg
) == ADDR_EXPR
1295 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1296 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1298 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1299 if (TREE_CODE (aop0
) == INDIRECT_REF
1300 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1301 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1302 length
, visited
, type
);
1308 if (TREE_CODE (val
) != INTEGER_CST
1309 || tree_int_cst_sgn (val
) < 0)
1313 val
= c_strlen (arg
, 1);
1321 if (TREE_CODE (*length
) != INTEGER_CST
1322 || TREE_CODE (val
) != INTEGER_CST
)
1325 if (tree_int_cst_lt (*length
, val
))
1329 else if (simple_cst_equal (val
, *length
) != 1)
1337 /* If ARG is registered for SSA update we cannot look at its defining
1339 if (name_registered_for_update_p (arg
))
1342 /* If we were already here, break the infinite cycle. */
1344 *visited
= BITMAP_ALLOC (NULL
);
1345 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1349 def_stmt
= SSA_NAME_DEF_STMT (var
);
1351 switch (gimple_code (def_stmt
))
1354 /* The RHS of the statement defining VAR must either have a
1355 constant length or come from another SSA_NAME with a constant
1357 if (gimple_assign_single_p (def_stmt
)
1358 || gimple_assign_unary_nop_p (def_stmt
))
1360 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1361 return get_maxval_strlen (rhs
, length
, visited
, type
);
1363 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1365 tree op2
= gimple_assign_rhs2 (def_stmt
);
1366 tree op3
= gimple_assign_rhs3 (def_stmt
);
1367 return get_maxval_strlen (op2
, length
, visited
, type
)
1368 && get_maxval_strlen (op3
, length
, visited
, type
);
1374 /* All the arguments of the PHI node must have the same constant
1378 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1380 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1382 /* If this PHI has itself as an argument, we cannot
1383 determine the string length of this argument. However,
1384 if we can find a constant string length for the other
1385 PHI args then we can still be sure that this is a
1386 constant string length. So be optimistic and just
1387 continue with the next argument. */
1388 if (arg
== gimple_phi_result (def_stmt
))
1391 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1403 get_maxval_strlen (tree arg
, int type
)
1405 bitmap visited
= NULL
;
1406 tree len
= NULL_TREE
;
1407 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1410 BITMAP_FREE (visited
);
1416 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1417 If LEN is not NULL, it represents the length of the string to be
1418 copied. Return NULL_TREE if no simplification can be made. */
1421 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1422 tree dest
, tree src
)
1424 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1427 /* If SRC and DEST are the same (and not volatile), return DEST. */
1428 if (operand_equal_p (src
, dest
, 0))
1430 replace_call_with_value (gsi
, dest
);
1434 if (optimize_function_for_size_p (cfun
))
1437 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1441 tree len
= get_maxval_strlen (src
, 0);
1445 len
= fold_convert_loc (loc
, size_type_node
, len
);
1446 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1447 len
= force_gimple_operand_gsi (gsi
, len
, true,
1448 NULL_TREE
, true, GSI_SAME_STMT
);
1449 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1450 replace_call_with_call_and_fold (gsi
, repl
);
1454 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1455 If SLEN is not NULL, it represents the length of the source string.
1456 Return NULL_TREE if no simplification can be made. */
1459 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1460 tree dest
, tree src
, tree len
)
1462 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1465 /* If the LEN parameter is zero, return DEST. */
1466 if (integer_zerop (len
))
1468 replace_call_with_value (gsi
, dest
);
1472 /* We can't compare slen with len as constants below if len is not a
1474 if (TREE_CODE (len
) != INTEGER_CST
)
1477 /* Now, we must be passed a constant src ptr parameter. */
1478 tree slen
= get_maxval_strlen (src
, 0);
1479 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1482 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1484 /* We do not support simplification of this case, though we do
1485 support it when expanding trees into RTL. */
1486 /* FIXME: generate a call to __builtin_memset. */
1487 if (tree_int_cst_lt (slen
, len
))
1490 /* OK transform into builtin memcpy. */
1491 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1495 len
= fold_convert_loc (loc
, size_type_node
, len
);
1496 len
= force_gimple_operand_gsi (gsi
, len
, true,
1497 NULL_TREE
, true, GSI_SAME_STMT
);
1498 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1499 replace_call_with_call_and_fold (gsi
, repl
);
1503 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1506 Return NULL_TREE if no simplification was possible, otherwise return the
1507 simplified form of the call as a tree.
1509 The simplified form may be a constant or other expression which
1510 computes the same value, but in a more efficient manner (including
1511 calls to other builtin functions).
1513 The call may contain arguments which need to be evaluated, but
1514 which are not useful to determine the result of the call. In
1515 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1516 COMPOUND_EXPR will be an argument which must be evaluated.
1517 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1518 COMPOUND_EXPR in the chain will contain the tree for the simplified
1519 form of the builtin function call. */
1522 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1524 gimple stmt
= gsi_stmt (*gsi
);
1525 location_t loc
= gimple_location (stmt
);
1527 const char *p
= c_getstr (src
);
1529 /* If the string length is zero, return the dst parameter. */
1530 if (p
&& *p
== '\0')
1532 replace_call_with_value (gsi
, dst
);
1536 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1539 /* See if we can store by pieces into (dst + strlen(dst)). */
1541 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1542 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1544 if (!strlen_fn
|| !memcpy_fn
)
1547 /* If the length of the source string isn't computable don't
1548 split strcat into strlen and memcpy. */
1549 tree len
= get_maxval_strlen (src
, 0);
1553 /* Create strlen (dst). */
1554 gimple_seq stmts
= NULL
, stmts2
;
1555 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1556 gimple_set_location (repl
, loc
);
1557 if (gimple_in_ssa_p (cfun
))
1558 newdst
= make_ssa_name (size_type_node
, NULL
);
1560 newdst
= create_tmp_reg (size_type_node
, NULL
);
1561 gimple_call_set_lhs (repl
, newdst
);
1562 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1564 /* Create (dst p+ strlen (dst)). */
1565 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1566 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1567 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1569 len
= fold_convert_loc (loc
, size_type_node
, len
);
1570 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1571 build_int_cst (size_type_node
, 1));
1572 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1573 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1575 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1576 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1577 if (gimple_call_lhs (stmt
))
1579 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1580 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1581 gsi_replace_with_seq_vops (gsi
, stmts
);
1582 /* gsi now points at the assignment to the lhs, get a
1583 stmt iterator to the memcpy call.
1584 ??? We can't use gsi_for_stmt as that doesn't work when the
1585 CFG isn't built yet. */
1586 gimple_stmt_iterator gsi2
= *gsi
;
1592 gsi_replace_with_seq_vops (gsi
, stmts
);
1598 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1599 are the arguments to the call. */
1602 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1604 gimple stmt
= gsi_stmt (*gsi
);
1605 tree dest
= gimple_call_arg (stmt
, 0);
1606 tree src
= gimple_call_arg (stmt
, 1);
1607 tree size
= gimple_call_arg (stmt
, 2);
1613 /* If the SRC parameter is "", return DEST. */
1614 if (p
&& *p
== '\0')
1616 replace_call_with_value (gsi
, dest
);
1620 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1623 /* If __builtin_strcat_chk is used, assume strcat is available. */
1624 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1628 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1629 replace_call_with_call_and_fold (gsi
, repl
);
1633 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1637 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1639 gimple stmt
= gsi_stmt (*gsi
);
1640 tree dest
= gimple_call_arg (stmt
, 0);
1641 tree src
= gimple_call_arg (stmt
, 1);
1642 tree len
= gimple_call_arg (stmt
, 2);
1643 tree size
= gimple_call_arg (stmt
, 3);
1648 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1649 if ((p
&& *p
== '\0')
1650 || integer_zerop (len
))
1652 replace_call_with_value (gsi
, dest
);
1656 if (! tree_fits_uhwi_p (size
))
1659 if (! integer_all_onesp (size
))
1661 tree src_len
= c_strlen (src
, 1);
1663 && tree_fits_uhwi_p (src_len
)
1664 && tree_fits_uhwi_p (len
)
1665 && ! tree_int_cst_lt (len
, src_len
))
1667 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1668 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1672 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1673 replace_call_with_call_and_fold (gsi
, repl
);
1679 /* If __builtin_strncat_chk is used, assume strncat is available. */
1680 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1684 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1685 replace_call_with_call_and_fold (gsi
, repl
);
1689 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1690 to the call. IGNORE is true if the value returned
1691 by the builtin will be ignored. UNLOCKED is true is true if this
1692 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1693 the known length of the string. Return NULL_TREE if no simplification
1697 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1698 tree arg0
, tree arg1
,
1701 gimple stmt
= gsi_stmt (*gsi
);
1703 /* If we're using an unlocked function, assume the other unlocked
1704 functions exist explicitly. */
1705 tree
const fn_fputc
= (unlocked
1706 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1707 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1708 tree
const fn_fwrite
= (unlocked
1709 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1710 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1712 /* If the return value is used, don't do the transformation. */
1713 if (gimple_call_lhs (stmt
))
1716 /* Get the length of the string passed to fputs. If the length
1717 can't be determined, punt. */
1718 tree len
= get_maxval_strlen (arg0
, 0);
1720 || TREE_CODE (len
) != INTEGER_CST
)
1723 switch (compare_tree_int (len
, 1))
1725 case -1: /* length is 0, delete the call entirely . */
1726 replace_call_with_value (gsi
, integer_zero_node
);
1729 case 0: /* length is 1, call fputc. */
1731 const char *p
= c_getstr (arg0
);
1737 gimple repl
= gimple_build_call (fn_fputc
, 2,
1739 (integer_type_node
, p
[0]), arg1
);
1740 replace_call_with_call_and_fold (gsi
, repl
);
1745 case 1: /* length is greater than 1, call fwrite. */
1747 /* If optimizing for size keep fputs. */
1748 if (optimize_function_for_size_p (cfun
))
1750 /* New argument list transforming fputs(string, stream) to
1751 fwrite(string, 1, len, stream). */
1755 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1756 size_one_node
, len
, arg1
);
1757 replace_call_with_call_and_fold (gsi
, repl
);
1766 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1767 DEST, SRC, LEN, and SIZE are the arguments to the call.
1768 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1769 code of the builtin. If MAXLEN is not NULL, it is maximum length
1770 passed as third argument. */
1773 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1774 tree dest
, tree src
, tree len
, tree size
,
1775 enum built_in_function fcode
)
1777 gimple stmt
= gsi_stmt (*gsi
);
1778 location_t loc
= gimple_location (stmt
);
1779 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1782 /* If SRC and DEST are the same (and not volatile), return DEST
1783 (resp. DEST+LEN for __mempcpy_chk). */
1784 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1786 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1788 replace_call_with_value (gsi
, dest
);
1793 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1794 temp
= force_gimple_operand_gsi (gsi
, temp
,
1795 false, NULL_TREE
, true,
1797 replace_call_with_value (gsi
, temp
);
1802 if (! tree_fits_uhwi_p (size
))
1805 tree maxlen
= get_maxval_strlen (len
, 2);
1806 if (! integer_all_onesp (size
))
1808 if (! tree_fits_uhwi_p (len
))
1810 /* If LEN is not constant, try MAXLEN too.
1811 For MAXLEN only allow optimizing into non-_ocs function
1812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1813 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1815 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1817 /* (void) __mempcpy_chk () can be optimized into
1818 (void) __memcpy_chk (). */
1819 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1823 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1824 replace_call_with_call_and_fold (gsi
, repl
);
1833 if (tree_int_cst_lt (size
, maxlen
))
1838 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1839 mem{cpy,pcpy,move,set} is available. */
1842 case BUILT_IN_MEMCPY_CHK
:
1843 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1845 case BUILT_IN_MEMPCPY_CHK
:
1846 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1848 case BUILT_IN_MEMMOVE_CHK
:
1849 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1851 case BUILT_IN_MEMSET_CHK
:
1852 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1861 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1862 replace_call_with_call_and_fold (gsi
, repl
);
1866 /* Fold a call to the __st[rp]cpy_chk builtin.
1867 DEST, SRC, and SIZE are the arguments to the call.
1868 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1869 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1870 strings passed as second argument. */
1873 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1875 tree src
, tree size
,
1876 enum built_in_function fcode
)
1878 gimple stmt
= gsi_stmt (*gsi
);
1879 location_t loc
= gimple_location (stmt
);
1880 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1883 /* If SRC and DEST are the same (and not volatile), return DEST. */
1884 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1886 replace_call_with_value (gsi
, dest
);
1890 if (! tree_fits_uhwi_p (size
))
1893 tree maxlen
= get_maxval_strlen (src
, 1);
1894 if (! integer_all_onesp (size
))
1896 len
= c_strlen (src
, 1);
1897 if (! len
|| ! tree_fits_uhwi_p (len
))
1899 /* If LEN is not constant, try MAXLEN too.
1900 For MAXLEN only allow optimizing into non-_ocs function
1901 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1902 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1904 if (fcode
== BUILT_IN_STPCPY_CHK
)
1909 /* If return value of __stpcpy_chk is ignored,
1910 optimize into __strcpy_chk. */
1911 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1915 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1916 replace_call_with_call_and_fold (gsi
, repl
);
1920 if (! len
|| TREE_SIDE_EFFECTS (len
))
1923 /* If c_strlen returned something, but not a constant,
1924 transform __strcpy_chk into __memcpy_chk. */
1925 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1929 len
= fold_convert_loc (loc
, size_type_node
, len
);
1930 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1931 build_int_cst (size_type_node
, 1));
1932 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1933 true, GSI_SAME_STMT
);
1934 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1935 replace_call_with_call_and_fold (gsi
, repl
);
1942 if (! tree_int_cst_lt (maxlen
, size
))
1946 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1947 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1948 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1952 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1953 replace_call_with_call_and_fold (gsi
, repl
);
1957 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1958 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1959 length passed as third argument. IGNORE is true if return value can be
1960 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1963 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1964 tree dest
, tree src
,
1965 tree len
, tree size
,
1966 enum built_in_function fcode
)
1968 gimple stmt
= gsi_stmt (*gsi
);
1969 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1972 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1974 /* If return value of __stpncpy_chk is ignored,
1975 optimize into __strncpy_chk. */
1976 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1979 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1980 replace_call_with_call_and_fold (gsi
, repl
);
1985 if (! tree_fits_uhwi_p (size
))
1988 tree maxlen
= get_maxval_strlen (len
, 2);
1989 if (! integer_all_onesp (size
))
1991 if (! tree_fits_uhwi_p (len
))
1993 /* If LEN is not constant, try MAXLEN too.
1994 For MAXLEN only allow optimizing into non-_ocs function
1995 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1996 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2002 if (tree_int_cst_lt (size
, maxlen
))
2006 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2007 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2008 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2012 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2013 replace_call_with_call_and_fold (gsi
, repl
);
2017 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2018 NULL_TREE if a normal call should be emitted rather than expanding
2019 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2020 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2021 passed as second argument. */
2024 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2025 enum built_in_function fcode
)
2027 gimple stmt
= gsi_stmt (*gsi
);
2028 tree dest
, size
, len
, fn
, fmt
, flag
;
2029 const char *fmt_str
;
2031 /* Verify the required arguments in the original call. */
2032 if (gimple_call_num_args (stmt
) < 5)
2035 dest
= gimple_call_arg (stmt
, 0);
2036 len
= gimple_call_arg (stmt
, 1);
2037 flag
= gimple_call_arg (stmt
, 2);
2038 size
= gimple_call_arg (stmt
, 3);
2039 fmt
= gimple_call_arg (stmt
, 4);
2041 if (! tree_fits_uhwi_p (size
))
2044 if (! integer_all_onesp (size
))
2046 tree maxlen
= get_maxval_strlen (len
, 2);
2047 if (! tree_fits_uhwi_p (len
))
2049 /* If LEN is not constant, try MAXLEN too.
2050 For MAXLEN only allow optimizing into non-_ocs function
2051 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2052 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2058 if (tree_int_cst_lt (size
, maxlen
))
2062 if (!init_target_chars ())
2065 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2066 or if format doesn't contain % chars or is "%s". */
2067 if (! integer_zerop (flag
))
2069 fmt_str
= c_getstr (fmt
);
2070 if (fmt_str
== NULL
)
2072 if (strchr (fmt_str
, target_percent
) != NULL
2073 && strcmp (fmt_str
, target_percent_s
))
2077 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2079 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2080 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2084 /* Replace the called function and the first 5 argument by 3 retaining
2085 trailing varargs. */
2086 gimple_call_set_fndecl (stmt
, fn
);
2087 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2088 gimple_call_set_arg (stmt
, 0, dest
);
2089 gimple_call_set_arg (stmt
, 1, len
);
2090 gimple_call_set_arg (stmt
, 2, fmt
);
2091 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2092 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2093 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2098 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2099 Return NULL_TREE if a normal call should be emitted rather than
2100 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2101 or BUILT_IN_VSPRINTF_CHK. */
2104 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2105 enum built_in_function fcode
)
2107 gimple stmt
= gsi_stmt (*gsi
);
2108 tree dest
, size
, len
, fn
, fmt
, flag
;
2109 const char *fmt_str
;
2110 unsigned nargs
= gimple_call_num_args (stmt
);
2112 /* Verify the required arguments in the original call. */
2115 dest
= gimple_call_arg (stmt
, 0);
2116 flag
= gimple_call_arg (stmt
, 1);
2117 size
= gimple_call_arg (stmt
, 2);
2118 fmt
= gimple_call_arg (stmt
, 3);
2120 if (! tree_fits_uhwi_p (size
))
2125 if (!init_target_chars ())
2128 /* Check whether the format is a literal string constant. */
2129 fmt_str
= c_getstr (fmt
);
2130 if (fmt_str
!= NULL
)
2132 /* If the format doesn't contain % args or %%, we know the size. */
2133 if (strchr (fmt_str
, target_percent
) == 0)
2135 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2136 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2138 /* If the format is "%s" and first ... argument is a string literal,
2139 we know the size too. */
2140 else if (fcode
== BUILT_IN_SPRINTF_CHK
2141 && strcmp (fmt_str
, target_percent_s
) == 0)
2147 arg
= gimple_call_arg (stmt
, 4);
2148 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2150 len
= c_strlen (arg
, 1);
2151 if (! len
|| ! tree_fits_uhwi_p (len
))
2158 if (! integer_all_onesp (size
))
2160 if (! len
|| ! tree_int_cst_lt (len
, size
))
2164 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2165 or if format doesn't contain % chars or is "%s". */
2166 if (! integer_zerop (flag
))
2168 if (fmt_str
== NULL
)
2170 if (strchr (fmt_str
, target_percent
) != NULL
2171 && strcmp (fmt_str
, target_percent_s
))
2175 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2176 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2177 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2181 /* Replace the called function and the first 4 argument by 2 retaining
2182 trailing varargs. */
2183 gimple_call_set_fndecl (stmt
, fn
);
2184 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2185 gimple_call_set_arg (stmt
, 0, dest
);
2186 gimple_call_set_arg (stmt
, 1, fmt
);
2187 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2188 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2189 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2194 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2195 ORIG may be null if this is a 2-argument call. We don't attempt to
2196 simplify calls with more than 3 arguments.
2198 Return NULL_TREE if no simplification was possible, otherwise return the
2199 simplified form of the call as a tree. If IGNORED is true, it means that
2200 the caller does not use the returned value of the function. */
2203 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2205 gimple stmt
= gsi_stmt (*gsi
);
2206 tree dest
= gimple_call_arg (stmt
, 0);
2207 tree fmt
= gimple_call_arg (stmt
, 1);
2208 tree orig
= NULL_TREE
;
2209 const char *fmt_str
= NULL
;
2211 /* Verify the required arguments in the original call. We deal with two
2212 types of sprintf() calls: 'sprintf (str, fmt)' and
2213 'sprintf (dest, "%s", orig)'. */
2214 if (gimple_call_num_args (stmt
) > 3)
2217 if (gimple_call_num_args (stmt
) == 3)
2218 orig
= gimple_call_arg (stmt
, 2);
2220 /* Check whether the format is a literal string constant. */
2221 fmt_str
= c_getstr (fmt
);
2222 if (fmt_str
== NULL
)
2225 if (!init_target_chars ())
2228 /* If the format doesn't contain % args or %%, use strcpy. */
2229 if (strchr (fmt_str
, target_percent
) == NULL
)
2231 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2236 /* Don't optimize sprintf (buf, "abc", ptr++). */
2240 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2241 'format' is known to contain no % formats. */
2242 gimple_seq stmts
= NULL
;
2243 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2244 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2245 if (gimple_call_lhs (stmt
))
2247 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2248 build_int_cst (integer_type_node
,
2250 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2251 gsi_replace_with_seq_vops (gsi
, stmts
);
2252 /* gsi now points at the assignment to the lhs, get a
2253 stmt iterator to the memcpy call.
2254 ??? We can't use gsi_for_stmt as that doesn't work when the
2255 CFG isn't built yet. */
2256 gimple_stmt_iterator gsi2
= *gsi
;
2262 gsi_replace_with_seq_vops (gsi
, stmts
);
2268 /* If the format is "%s", use strcpy if the result isn't used. */
2269 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2272 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2277 /* Don't crash on sprintf (str1, "%s"). */
2281 tree orig_len
= NULL_TREE
;
2282 if (gimple_call_lhs (stmt
))
2284 orig_len
= get_maxval_strlen (orig
, 0);
2289 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2290 gimple_seq stmts
= NULL
;
2291 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2292 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2293 if (gimple_call_lhs (stmt
))
2295 if (!useless_type_conversion_p (integer_type_node
,
2296 TREE_TYPE (orig_len
)))
2297 orig_len
= fold_convert (integer_type_node
, orig_len
);
2298 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2299 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2300 gsi_replace_with_seq_vops (gsi
, stmts
);
2301 /* gsi now points at the assignment to the lhs, get a
2302 stmt iterator to the memcpy call.
2303 ??? We can't use gsi_for_stmt as that doesn't work when the
2304 CFG isn't built yet. */
2305 gimple_stmt_iterator gsi2
= *gsi
;
2311 gsi_replace_with_seq_vops (gsi
, stmts
);
2319 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2320 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2321 attempt to simplify calls with more than 4 arguments.
2323 Return NULL_TREE if no simplification was possible, otherwise return the
2324 simplified form of the call as a tree. If IGNORED is true, it means that
2325 the caller does not use the returned value of the function. */
2328 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2330 gimple stmt
= gsi_stmt (*gsi
);
2331 tree dest
= gimple_call_arg (stmt
, 0);
2332 tree destsize
= gimple_call_arg (stmt
, 1);
2333 tree fmt
= gimple_call_arg (stmt
, 2);
2334 tree orig
= NULL_TREE
;
2335 const char *fmt_str
= NULL
;
2337 if (gimple_call_num_args (stmt
) > 4)
2340 if (gimple_call_num_args (stmt
) == 4)
2341 orig
= gimple_call_arg (stmt
, 3);
2343 if (!tree_fits_uhwi_p (destsize
))
2345 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2347 /* Check whether the format is a literal string constant. */
2348 fmt_str
= c_getstr (fmt
);
2349 if (fmt_str
== NULL
)
2352 if (!init_target_chars ())
2355 /* If the format doesn't contain % args or %%, use strcpy. */
2356 if (strchr (fmt_str
, target_percent
) == NULL
)
2358 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2362 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2366 /* We could expand this as
2367 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2369 memcpy (str, fmt_with_nul_at_cstm1, cst);
2370 but in the former case that might increase code size
2371 and in the latter case grow .rodata section too much.
2373 size_t len
= strlen (fmt_str
);
2377 gimple_seq stmts
= NULL
;
2378 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2379 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2380 if (gimple_call_lhs (stmt
))
2382 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2383 build_int_cst (integer_type_node
, len
));
2384 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2385 gsi_replace_with_seq_vops (gsi
, stmts
);
2386 /* gsi now points at the assignment to the lhs, get a
2387 stmt iterator to the memcpy call.
2388 ??? We can't use gsi_for_stmt as that doesn't work when the
2389 CFG isn't built yet. */
2390 gimple_stmt_iterator gsi2
= *gsi
;
2396 gsi_replace_with_seq_vops (gsi
, stmts
);
2402 /* If the format is "%s", use strcpy if the result isn't used. */
2403 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2405 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2409 /* Don't crash on snprintf (str1, cst, "%s"). */
2413 tree orig_len
= get_maxval_strlen (orig
, 0);
2417 /* We could expand this as
2418 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2420 memcpy (str1, str2_with_nul_at_cstm1, cst);
2421 but in the former case that might increase code size
2422 and in the latter case grow .rodata section too much.
2424 if (compare_tree_int (orig_len
, destlen
) >= 0)
2427 /* Convert snprintf (str1, cst, "%s", str2) into
2428 strcpy (str1, str2) if strlen (str2) < cst. */
2429 gimple_seq stmts
= NULL
;
2430 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2431 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2432 if (gimple_call_lhs (stmt
))
2434 if (!useless_type_conversion_p (integer_type_node
,
2435 TREE_TYPE (orig_len
)))
2436 orig_len
= fold_convert (integer_type_node
, orig_len
);
2437 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2438 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2439 gsi_replace_with_seq_vops (gsi
, stmts
);
2440 /* gsi now points at the assignment to the lhs, get a
2441 stmt iterator to the memcpy call.
2442 ??? We can't use gsi_for_stmt as that doesn't work when the
2443 CFG isn't built yet. */
2444 gimple_stmt_iterator gsi2
= *gsi
;
2450 gsi_replace_with_seq_vops (gsi
, stmts
);
2459 /* Fold a call to __builtin_strlen with known length LEN. */
2462 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2464 gimple stmt
= gsi_stmt (*gsi
);
2465 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2468 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2469 replace_call_with_value (gsi
, len
);
2474 /* Fold the non-target builtin at *GSI and return whether any simplification
2478 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2480 gimple stmt
= gsi_stmt (*gsi
);
2481 tree callee
= gimple_call_fndecl (stmt
);
2483 /* Give up for always_inline inline builtins until they are
2485 if (avoid_folding_inline_builtin (callee
))
2488 switch (DECL_FUNCTION_CODE (callee
))
2490 case BUILT_IN_BZERO
:
2491 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2492 gimple_call_arg (stmt
, 1));
2493 case BUILT_IN_MEMSET
:
2494 return gimple_fold_builtin_memset (gsi
,
2495 gimple_call_arg (stmt
, 1),
2496 gimple_call_arg (stmt
, 2));
2497 case BUILT_IN_BCOPY
:
2498 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2499 gimple_call_arg (stmt
, 0), 3);
2500 case BUILT_IN_MEMCPY
:
2501 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2502 gimple_call_arg (stmt
, 1), 0);
2503 case BUILT_IN_MEMPCPY
:
2504 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2505 gimple_call_arg (stmt
, 1), 1);
2506 case BUILT_IN_MEMMOVE
:
2507 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2508 gimple_call_arg (stmt
, 1), 3);
2509 case BUILT_IN_SPRINTF_CHK
:
2510 case BUILT_IN_VSPRINTF_CHK
:
2511 return gimple_fold_builtin_sprintf_chk (gsi
, DECL_FUNCTION_CODE (callee
));
2512 case BUILT_IN_STRCAT_CHK
:
2513 return gimple_fold_builtin_strcat_chk (gsi
);
2514 case BUILT_IN_STRNCAT_CHK
:
2515 return gimple_fold_builtin_strncat_chk (gsi
);
2516 case BUILT_IN_STRLEN
:
2517 return gimple_fold_builtin_strlen (gsi
);
2518 case BUILT_IN_STRCPY
:
2519 return gimple_fold_builtin_strcpy (gsi
,
2520 gimple_call_arg (stmt
, 0),
2521 gimple_call_arg (stmt
, 1));
2522 case BUILT_IN_STRNCPY
:
2523 return gimple_fold_builtin_strncpy (gsi
,
2524 gimple_call_arg (stmt
, 0),
2525 gimple_call_arg (stmt
, 1),
2526 gimple_call_arg (stmt
, 2));
2527 case BUILT_IN_STRCAT
:
2528 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2529 gimple_call_arg (stmt
, 1));
2530 case BUILT_IN_FPUTS
:
2531 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2532 gimple_call_arg (stmt
, 1), false);
2533 case BUILT_IN_FPUTS_UNLOCKED
:
2534 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2535 gimple_call_arg (stmt
, 1), true);
2536 case BUILT_IN_MEMCPY_CHK
:
2537 case BUILT_IN_MEMPCPY_CHK
:
2538 case BUILT_IN_MEMMOVE_CHK
:
2539 case BUILT_IN_MEMSET_CHK
:
2540 return gimple_fold_builtin_memory_chk (gsi
,
2541 gimple_call_arg (stmt
, 0),
2542 gimple_call_arg (stmt
, 1),
2543 gimple_call_arg (stmt
, 2),
2544 gimple_call_arg (stmt
, 3),
2545 DECL_FUNCTION_CODE (callee
));
2546 case BUILT_IN_STRCPY_CHK
:
2547 case BUILT_IN_STPCPY_CHK
:
2548 return gimple_fold_builtin_stxcpy_chk (gsi
,
2549 gimple_call_arg (stmt
, 0),
2550 gimple_call_arg (stmt
, 1),
2551 gimple_call_arg (stmt
, 2),
2552 DECL_FUNCTION_CODE (callee
));
2553 case BUILT_IN_STRNCPY_CHK
:
2554 case BUILT_IN_STPNCPY_CHK
:
2555 return gimple_fold_builtin_stxncpy_chk (gsi
,
2556 gimple_call_arg (stmt
, 0),
2557 gimple_call_arg (stmt
, 1),
2558 gimple_call_arg (stmt
, 2),
2559 gimple_call_arg (stmt
, 3),
2560 DECL_FUNCTION_CODE (callee
));
2561 case BUILT_IN_SNPRINTF_CHK
:
2562 case BUILT_IN_VSNPRINTF_CHK
:
2563 return gimple_fold_builtin_snprintf_chk (gsi
,
2564 DECL_FUNCTION_CODE (callee
));
2565 case BUILT_IN_SNPRINTF
:
2566 return gimple_fold_builtin_snprintf (gsi
);
2567 case BUILT_IN_SPRINTF
:
2568 return gimple_fold_builtin_sprintf (gsi
);
2572 /* Try the generic builtin folder. */
2573 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2574 tree result
= fold_call_stmt (stmt
, ignore
);
2578 STRIP_NOPS (result
);
2580 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2581 if (!update_call_from_tree (gsi
, result
))
2582 gimplify_and_update_call_from_tree (gsi
, result
);
2589 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2590 doesn't fit into TYPE. The test for overflow should be regardless of
2591 -fwrapv, and even for unsigned types. */
2594 arith_overflowed_p (enum tree_code code
, const_tree type
,
2595 const_tree arg0
, const_tree arg1
)
2597 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
2598 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
2600 widest2_int warg0
= widest2_int_cst (arg0
);
2601 widest2_int warg1
= widest2_int_cst (arg1
);
2605 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
2606 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
2607 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
2608 default: gcc_unreachable ();
2610 signop sign
= TYPE_SIGN (type
);
2611 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
2613 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
2616 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2617 The statement may be replaced by another statement, e.g., if the call
2618 simplifies to a constant value. Return true if any changes were made.
2619 It is assumed that the operands have been previously folded. */
2622 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
2624 gimple stmt
= gsi_stmt (*gsi
);
2626 bool changed
= false;
2629 /* Fold *& in call arguments. */
2630 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2631 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2633 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2636 gimple_call_set_arg (stmt
, i
, tmp
);
2641 /* Check for virtual calls that became direct calls. */
2642 callee
= gimple_call_fn (stmt
);
2643 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
2645 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
2647 if (dump_file
&& virtual_method_call_p (callee
)
2648 && !possible_polymorphic_call_target_p
2649 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
2650 (OBJ_TYPE_REF_EXPR (callee
)))))
2653 "Type inheritance inconsistent devirtualization of ");
2654 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2655 fprintf (dump_file
, " to ");
2656 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
2657 fprintf (dump_file
, "\n");
2660 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
2663 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
2666 vec
<cgraph_node
*>targets
2667 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
2668 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
2670 tree lhs
= gimple_call_lhs (stmt
);
2671 if (dump_enabled_p ())
2673 location_t loc
= gimple_location_safe (stmt
);
2674 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2675 "folding virtual function call to %s\n",
2676 targets
.length () == 1
2677 ? targets
[0]->name ()
2678 : "__builtin_unreachable");
2680 if (targets
.length () == 1)
2682 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
2684 /* If the call becomes noreturn, remove the lhs. */
2685 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
2687 if (TREE_CODE (lhs
) == SSA_NAME
)
2689 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2690 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2691 gimple new_stmt
= gimple_build_assign (lhs
, def
);
2692 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
2694 gimple_call_set_lhs (stmt
, NULL_TREE
);
2699 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2700 gimple new_stmt
= gimple_build_call (fndecl
, 0);
2701 gimple_set_location (new_stmt
, gimple_location (stmt
));
2702 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
2704 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2705 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2707 /* To satisfy condition for
2708 cgraph_update_edges_for_call_stmt_node,
2709 we need to preserve GIMPLE_CALL statement
2710 at position of GSI iterator. */
2711 update_call_from_tree (gsi
, def
);
2712 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
2716 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
2717 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
2718 gsi_replace (gsi
, new_stmt
, false);
2726 /* Check for indirect calls that became direct calls, and then
2727 no longer require a static chain. */
2728 if (gimple_call_chain (stmt
))
2730 tree fn
= gimple_call_fndecl (stmt
);
2731 if (fn
&& !DECL_STATIC_CHAIN (fn
))
2733 gimple_call_set_chain (stmt
, NULL
);
2738 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
2741 gimple_call_set_chain (stmt
, tmp
);
2750 /* Check for builtins that CCP can handle using information not
2751 available in the generic fold routines. */
2752 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
2754 if (gimple_fold_builtin (gsi
))
2757 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
2759 changed
|= targetm
.gimple_fold_builtin (gsi
);
2761 else if (gimple_call_internal_p (stmt
))
2763 enum tree_code subcode
= ERROR_MARK
;
2764 tree result
= NULL_TREE
;
2765 bool cplx_result
= false;
2766 tree overflow
= NULL_TREE
;
2767 switch (gimple_call_internal_fn (stmt
))
2769 case IFN_BUILTIN_EXPECT
:
2770 result
= fold_builtin_expect (gimple_location (stmt
),
2771 gimple_call_arg (stmt
, 0),
2772 gimple_call_arg (stmt
, 1),
2773 gimple_call_arg (stmt
, 2));
2775 case IFN_UBSAN_OBJECT_SIZE
:
2776 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
2777 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
2778 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
2779 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
2780 gimple_call_arg (stmt
, 2))))
2782 gsi_replace (gsi
, gimple_build_nop (), true);
2783 unlink_stmt_vdef (stmt
);
2784 release_defs (stmt
);
2788 case IFN_UBSAN_CHECK_ADD
:
2789 subcode
= PLUS_EXPR
;
2791 case IFN_UBSAN_CHECK_SUB
:
2792 subcode
= MINUS_EXPR
;
2794 case IFN_UBSAN_CHECK_MUL
:
2795 subcode
= MULT_EXPR
;
2797 case IFN_ADD_OVERFLOW
:
2798 subcode
= PLUS_EXPR
;
2801 case IFN_SUB_OVERFLOW
:
2802 subcode
= MINUS_EXPR
;
2805 case IFN_MUL_OVERFLOW
:
2806 subcode
= MULT_EXPR
;
2812 if (subcode
!= ERROR_MARK
)
2814 tree arg0
= gimple_call_arg (stmt
, 0);
2815 tree arg1
= gimple_call_arg (stmt
, 1);
2816 tree type
= TREE_TYPE (arg0
);
2819 tree lhs
= gimple_call_lhs (stmt
);
2820 if (lhs
== NULL_TREE
)
2823 type
= TREE_TYPE (TREE_TYPE (lhs
));
2825 if (type
== NULL_TREE
)
2827 /* x = y + 0; x = y - 0; x = y * 0; */
2828 else if (integer_zerop (arg1
))
2829 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
2830 /* x = 0 + y; x = 0 * y; */
2831 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
2832 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
2834 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
2835 result
= integer_zero_node
;
2836 /* x = y * 1; x = 1 * y; */
2837 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
2839 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
2841 else if (TREE_CODE (arg0
) == INTEGER_CST
2842 && TREE_CODE (arg1
) == INTEGER_CST
)
2845 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
2846 fold_convert (type
, arg1
));
2848 result
= int_const_binop (subcode
, arg0
, arg1
);
2849 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
2852 overflow
= build_one_cst (type
);
2859 if (result
== integer_zero_node
)
2860 result
= build_zero_cst (type
);
2861 else if (cplx_result
&& TREE_TYPE (result
) != type
)
2863 if (TREE_CODE (result
) == INTEGER_CST
)
2865 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
2867 overflow
= build_one_cst (type
);
2869 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
2870 && TYPE_UNSIGNED (type
))
2871 || (TYPE_PRECISION (type
)
2872 < (TYPE_PRECISION (TREE_TYPE (result
))
2873 + (TYPE_UNSIGNED (TREE_TYPE (result
))
2874 && !TYPE_UNSIGNED (type
)))))
2877 result
= fold_convert (type
, result
);
2884 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
2885 result
= drop_tree_overflow (result
);
2888 if (overflow
== NULL_TREE
)
2889 overflow
= build_zero_cst (TREE_TYPE (result
));
2890 tree ctype
= build_complex_type (TREE_TYPE (result
));
2891 if (TREE_CODE (result
) == INTEGER_CST
2892 && TREE_CODE (overflow
) == INTEGER_CST
)
2893 result
= build_complex (ctype
, result
, overflow
);
2895 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
2896 ctype
, result
, overflow
);
2898 if (!update_call_from_tree (gsi
, result
))
2899 gimplify_and_update_call_from_tree (gsi
, result
);
2908 /* Worker for fold_stmt_1 dispatch to pattern based folding with
2911 Replaces *GSI with the simplification result in RCODE and OPS
2912 and the associated statements in *SEQ. Does the replacement
2913 according to INPLACE and returns true if the operation succeeded. */
2916 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
2917 code_helper rcode
, tree
*ops
,
2918 gimple_seq
*seq
, bool inplace
)
2920 gimple stmt
= gsi_stmt (*gsi
);
2922 /* Play safe and do not allow abnormals to be mentioned in
2923 newly created statements. See also maybe_push_res_to_seq. */
2924 if ((TREE_CODE (ops
[0]) == SSA_NAME
2925 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
2927 && TREE_CODE (ops
[1]) == SSA_NAME
2928 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
2930 && TREE_CODE (ops
[2]) == SSA_NAME
2931 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
2934 if (gimple_code (stmt
) == GIMPLE_COND
)
2936 gcc_assert (rcode
.is_tree_code ());
2937 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
2938 /* GIMPLE_CONDs condition may not throw. */
2939 && (!flag_exceptions
2940 || !cfun
->can_throw_non_call_exceptions
2941 || !operation_could_trap_p (rcode
,
2942 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
2944 gimple_cond_set_condition (stmt
, rcode
, ops
[0], ops
[1]);
2945 else if (rcode
== SSA_NAME
)
2946 gimple_cond_set_condition (stmt
, NE_EXPR
, ops
[0],
2947 build_zero_cst (TREE_TYPE (ops
[0])));
2948 else if (rcode
== INTEGER_CST
)
2950 if (integer_zerop (ops
[0]))
2951 gimple_cond_make_false (stmt
);
2953 gimple_cond_make_true (stmt
);
2957 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
2961 gimple_cond_set_condition (stmt
, NE_EXPR
, res
,
2962 build_zero_cst (TREE_TYPE (res
)));
2966 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2968 fprintf (dump_file
, "gimple_simplified to ");
2969 if (!gimple_seq_empty_p (*seq
))
2970 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
2971 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
2974 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
2977 else if (is_gimple_assign (stmt
)
2978 && rcode
.is_tree_code ())
2981 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
2983 maybe_build_generic_op (rcode
,
2984 TREE_TYPE (gimple_assign_lhs (stmt
)),
2985 &ops
[0], ops
[1], ops
[2]);
2986 gimple_assign_set_rhs_with_ops_1 (gsi
, rcode
,
2987 ops
[0], ops
[1], ops
[2]);
2988 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2990 fprintf (dump_file
, "gimple_simplified to ");
2991 if (!gimple_seq_empty_p (*seq
))
2992 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
2993 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
2996 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3002 if (gimple_has_lhs (stmt
))
3004 tree lhs
= gimple_get_lhs (stmt
);
3005 maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3007 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3009 fprintf (dump_file
, "gimple_simplified to ");
3010 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3012 gsi_replace_with_seq_vops (gsi
, *seq
);
3022 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3025 maybe_canonicalize_mem_ref_addr (tree
*t
)
3029 if (TREE_CODE (*t
) == ADDR_EXPR
)
3030 t
= &TREE_OPERAND (*t
, 0);
3032 while (handled_component_p (*t
))
3033 t
= &TREE_OPERAND (*t
, 0);
3035 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3036 of invariant addresses into a SSA name MEM_REF address. */
3037 if (TREE_CODE (*t
) == MEM_REF
3038 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3040 tree addr
= TREE_OPERAND (*t
, 0);
3041 if (TREE_CODE (addr
) == ADDR_EXPR
3042 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3043 || handled_component_p (TREE_OPERAND (addr
, 0))))
3046 HOST_WIDE_INT coffset
;
3047 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3052 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3053 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3054 TREE_OPERAND (*t
, 1),
3055 size_int (coffset
));
3058 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3059 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3062 /* Canonicalize back MEM_REFs to plain reference trees if the object
3063 accessed is a decl that has the same access semantics as the MEM_REF. */
3064 if (TREE_CODE (*t
) == MEM_REF
3065 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3066 && integer_zerop (TREE_OPERAND (*t
, 1)))
3068 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3069 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3070 if (/* Same volatile qualification. */
3071 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3072 /* Same TBAA behavior with -fstrict-aliasing. */
3073 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3074 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3075 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3076 /* Same alignment. */
3077 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3078 /* We have to look out here to not drop a required conversion
3079 from the rhs to the lhs if *t appears on the lhs or vice-versa
3080 if it appears on the rhs. Thus require strict type
3082 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3084 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3089 /* Canonicalize TARGET_MEM_REF in particular with respect to
3090 the indexes becoming constant. */
3091 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3093 tree tem
= maybe_fold_tmr (*t
);
3104 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3105 distinguishes both cases. */
3108 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3110 bool changed
= false;
3111 gimple stmt
= gsi_stmt (*gsi
);
3114 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3116 ??? This shouldn't be done in generic folding but in the
3117 propagation helpers which also know whether an address was
3119 switch (gimple_code (stmt
))
3122 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3124 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3125 if ((REFERENCE_CLASS_P (*rhs
)
3126 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3127 && maybe_canonicalize_mem_ref_addr (rhs
))
3129 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3130 if (REFERENCE_CLASS_P (*lhs
)
3131 && maybe_canonicalize_mem_ref_addr (lhs
))
3137 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3139 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3140 if (REFERENCE_CLASS_P (*arg
)
3141 && maybe_canonicalize_mem_ref_addr (arg
))
3144 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3146 && REFERENCE_CLASS_P (*lhs
)
3147 && maybe_canonicalize_mem_ref_addr (lhs
))
3153 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
3155 tree link
= gimple_asm_output_op (stmt
, i
);
3156 tree op
= TREE_VALUE (link
);
3157 if (REFERENCE_CLASS_P (op
)
3158 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3161 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
3163 tree link
= gimple_asm_input_op (stmt
, i
);
3164 tree op
= TREE_VALUE (link
);
3165 if ((REFERENCE_CLASS_P (op
)
3166 || TREE_CODE (op
) == ADDR_EXPR
)
3167 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3173 if (gimple_debug_bind_p (stmt
))
3175 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3177 && (REFERENCE_CLASS_P (*val
)
3178 || TREE_CODE (*val
) == ADDR_EXPR
)
3179 && maybe_canonicalize_mem_ref_addr (val
))
3186 /* Dispatch to pattern-based folding. */
3188 || is_gimple_assign (stmt
)
3189 || gimple_code (stmt
) == GIMPLE_COND
)
3191 gimple_seq seq
= NULL
;
3194 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
, valueize
))
3196 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3199 gimple_seq_discard (seq
);
3203 stmt
= gsi_stmt (*gsi
);
3205 /* Fold the main computation performed by the statement. */
3206 switch (gimple_code (stmt
))
3210 unsigned old_num_ops
= gimple_num_ops (stmt
);
3211 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
3212 tree lhs
= gimple_assign_lhs (stmt
);
3214 /* First canonicalize operand order. This avoids building new
3215 trees if this is the only thing fold would later do. */
3216 if ((commutative_tree_code (subcode
)
3217 || commutative_ternary_tree_code (subcode
))
3218 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
3219 gimple_assign_rhs2 (stmt
), false))
3221 tree tem
= gimple_assign_rhs1 (stmt
);
3222 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
3223 gimple_assign_set_rhs2 (stmt
, tem
);
3226 new_rhs
= fold_gimple_assign (gsi
);
3228 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3229 TREE_TYPE (new_rhs
)))
3230 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3233 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3235 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3242 changed
|= fold_gimple_cond (stmt
);
3246 changed
|= gimple_fold_call (gsi
, inplace
);
3250 /* Fold *& in asm operands. */
3253 const char **oconstraints
;
3254 const char *constraint
;
3255 bool allows_mem
, allows_reg
;
3257 noutputs
= gimple_asm_noutputs (stmt
);
3258 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3260 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
3262 tree link
= gimple_asm_output_op (stmt
, i
);
3263 tree op
= TREE_VALUE (link
);
3265 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3266 if (REFERENCE_CLASS_P (op
)
3267 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3269 TREE_VALUE (link
) = op
;
3273 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
3275 tree link
= gimple_asm_input_op (stmt
, i
);
3276 tree op
= TREE_VALUE (link
);
3278 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3279 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3280 oconstraints
, &allows_mem
, &allows_reg
);
3281 if (REFERENCE_CLASS_P (op
)
3282 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3285 TREE_VALUE (link
) = op
;
3293 if (gimple_debug_bind_p (stmt
))
3295 tree val
= gimple_debug_bind_get_value (stmt
);
3297 && REFERENCE_CLASS_P (val
))
3299 tree tem
= maybe_fold_reference (val
, false);
3302 gimple_debug_bind_set_value (stmt
, tem
);
3307 && TREE_CODE (val
) == ADDR_EXPR
)
3309 tree ref
= TREE_OPERAND (val
, 0);
3310 tree tem
= maybe_fold_reference (ref
, false);
3313 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3314 gimple_debug_bind_set_value (stmt
, tem
);
3324 stmt
= gsi_stmt (*gsi
);
3326 /* Fold *& on the lhs. */
3327 if (gimple_has_lhs (stmt
))
3329 tree lhs
= gimple_get_lhs (stmt
);
3330 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3332 tree new_lhs
= maybe_fold_reference (lhs
, true);
3335 gimple_set_lhs (stmt
, new_lhs
);
3344 /* Valueziation callback that ends up not following SSA edges. */
3347 no_follow_ssa_edges (tree
)
3352 /* Valueization callback that ends up following single-use SSA edges only. */
3355 follow_single_use_edges (tree val
)
3357 if (TREE_CODE (val
) == SSA_NAME
3358 && !has_single_use (val
))
3363 /* Fold the statement pointed to by GSI. In some cases, this function may
3364 replace the whole statement with a new one. Returns true iff folding
3366 The statement pointed to by GSI should be in valid gimple form but may
3367 be in unfolded state as resulting from for example constant propagation
3368 which can produce *&x = 0. */
3371 fold_stmt (gimple_stmt_iterator
*gsi
)
3373 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3377 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3379 return fold_stmt_1 (gsi
, false, valueize
);
3382 /* Perform the minimal folding on statement *GSI. Only operations like
3383 *&x created by constant propagation are handled. The statement cannot
3384 be replaced with a new one. Return true if the statement was
3385 changed, false otherwise.
3386 The statement *GSI should be in valid gimple form but may
3387 be in unfolded state as resulting from for example constant propagation
3388 which can produce *&x = 0. */
3391 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3393 gimple stmt
= gsi_stmt (*gsi
);
3394 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3395 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3399 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3400 if EXPR is null or we don't know how.
3401 If non-null, the result always has boolean type. */
3404 canonicalize_bool (tree expr
, bool invert
)
3410 if (integer_nonzerop (expr
))
3411 return boolean_false_node
;
3412 else if (integer_zerop (expr
))
3413 return boolean_true_node
;
3414 else if (TREE_CODE (expr
) == SSA_NAME
)
3415 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3416 build_int_cst (TREE_TYPE (expr
), 0));
3417 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3418 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3420 TREE_OPERAND (expr
, 0),
3421 TREE_OPERAND (expr
, 1));
3427 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3429 if (integer_nonzerop (expr
))
3430 return boolean_true_node
;
3431 else if (integer_zerop (expr
))
3432 return boolean_false_node
;
3433 else if (TREE_CODE (expr
) == SSA_NAME
)
3434 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3435 build_int_cst (TREE_TYPE (expr
), 0));
3436 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3437 return fold_build2 (TREE_CODE (expr
),
3439 TREE_OPERAND (expr
, 0),
3440 TREE_OPERAND (expr
, 1));
3446 /* Check to see if a boolean expression EXPR is logically equivalent to the
3447 comparison (OP1 CODE OP2). Check for various identities involving
3451 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3452 const_tree op1
, const_tree op2
)
3456 /* The obvious case. */
3457 if (TREE_CODE (expr
) == code
3458 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3459 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3462 /* Check for comparing (name, name != 0) and the case where expr
3463 is an SSA_NAME with a definition matching the comparison. */
3464 if (TREE_CODE (expr
) == SSA_NAME
3465 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3467 if (operand_equal_p (expr
, op1
, 0))
3468 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3469 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3470 s
= SSA_NAME_DEF_STMT (expr
);
3471 if (is_gimple_assign (s
)
3472 && gimple_assign_rhs_code (s
) == code
3473 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3474 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3478 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3479 of name is a comparison, recurse. */
3480 if (TREE_CODE (op1
) == SSA_NAME
3481 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3483 s
= SSA_NAME_DEF_STMT (op1
);
3484 if (is_gimple_assign (s
)
3485 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3487 enum tree_code c
= gimple_assign_rhs_code (s
);
3488 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3489 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3490 return same_bool_comparison_p (expr
, c
,
3491 gimple_assign_rhs1 (s
),
3492 gimple_assign_rhs2 (s
));
3493 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3494 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3495 return same_bool_comparison_p (expr
,
3496 invert_tree_comparison (c
, false),
3497 gimple_assign_rhs1 (s
),
3498 gimple_assign_rhs2 (s
));
3504 /* Check to see if two boolean expressions OP1 and OP2 are logically
3508 same_bool_result_p (const_tree op1
, const_tree op2
)
3510 /* Simple cases first. */
3511 if (operand_equal_p (op1
, op2
, 0))
3514 /* Check the cases where at least one of the operands is a comparison.
3515 These are a bit smarter than operand_equal_p in that they apply some
3516 identifies on SSA_NAMEs. */
3517 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
3518 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3519 TREE_OPERAND (op2
, 0),
3520 TREE_OPERAND (op2
, 1)))
3522 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
3523 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3524 TREE_OPERAND (op1
, 0),
3525 TREE_OPERAND (op1
, 1)))
3532 /* Forward declarations for some mutually recursive functions. */
3535 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3536 enum tree_code code2
, tree op2a
, tree op2b
);
3538 and_var_with_comparison (tree var
, bool invert
,
3539 enum tree_code code2
, tree op2a
, tree op2b
);
3541 and_var_with_comparison_1 (gimple stmt
,
3542 enum tree_code code2
, tree op2a
, tree op2b
);
3544 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3545 enum tree_code code2
, tree op2a
, tree op2b
);
3547 or_var_with_comparison (tree var
, bool invert
,
3548 enum tree_code code2
, tree op2a
, tree op2b
);
3550 or_var_with_comparison_1 (gimple stmt
,
3551 enum tree_code code2
, tree op2a
, tree op2b
);
3553 /* Helper function for and_comparisons_1: try to simplify the AND of the
3554 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3555 If INVERT is true, invert the value of the VAR before doing the AND.
3556 Return NULL_EXPR if we can't simplify this to a single expression. */
3559 and_var_with_comparison (tree var
, bool invert
,
3560 enum tree_code code2
, tree op2a
, tree op2b
)
3563 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3565 /* We can only deal with variables whose definitions are assignments. */
3566 if (!is_gimple_assign (stmt
))
3569 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3570 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3571 Then we only have to consider the simpler non-inverted cases. */
3573 t
= or_var_with_comparison_1 (stmt
,
3574 invert_tree_comparison (code2
, false),
3577 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3578 return canonicalize_bool (t
, invert
);
3581 /* Try to simplify the AND of the ssa variable defined by the assignment
3582 STMT with the comparison specified by (OP2A CODE2 OP2B).
3583 Return NULL_EXPR if we can't simplify this to a single expression. */
3586 and_var_with_comparison_1 (gimple stmt
,
3587 enum tree_code code2
, tree op2a
, tree op2b
)
3589 tree var
= gimple_assign_lhs (stmt
);
3590 tree true_test_var
= NULL_TREE
;
3591 tree false_test_var
= NULL_TREE
;
3592 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3594 /* Check for identities like (var AND (var == 0)) => false. */
3595 if (TREE_CODE (op2a
) == SSA_NAME
3596 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3598 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3599 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3601 true_test_var
= op2a
;
3602 if (var
== true_test_var
)
3605 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3606 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3608 false_test_var
= op2a
;
3609 if (var
== false_test_var
)
3610 return boolean_false_node
;
3614 /* If the definition is a comparison, recurse on it. */
3615 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3617 tree t
= and_comparisons_1 (innercode
,
3618 gimple_assign_rhs1 (stmt
),
3619 gimple_assign_rhs2 (stmt
),
3627 /* If the definition is an AND or OR expression, we may be able to
3628 simplify by reassociating. */
3629 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3630 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3632 tree inner1
= gimple_assign_rhs1 (stmt
);
3633 tree inner2
= gimple_assign_rhs2 (stmt
);
3636 tree partial
= NULL_TREE
;
3637 bool is_and
= (innercode
== BIT_AND_EXPR
);
3639 /* Check for boolean identities that don't require recursive examination
3641 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3642 inner1 AND (inner1 OR inner2) => inner1
3643 !inner1 AND (inner1 AND inner2) => false
3644 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3645 Likewise for similar cases involving inner2. */
3646 if (inner1
== true_test_var
)
3647 return (is_and
? var
: inner1
);
3648 else if (inner2
== true_test_var
)
3649 return (is_and
? var
: inner2
);
3650 else if (inner1
== false_test_var
)
3652 ? boolean_false_node
3653 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
3654 else if (inner2
== false_test_var
)
3656 ? boolean_false_node
3657 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
3659 /* Next, redistribute/reassociate the AND across the inner tests.
3660 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3661 if (TREE_CODE (inner1
) == SSA_NAME
3662 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
3663 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3664 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3665 gimple_assign_rhs1 (s
),
3666 gimple_assign_rhs2 (s
),
3667 code2
, op2a
, op2b
)))
3669 /* Handle the AND case, where we are reassociating:
3670 (inner1 AND inner2) AND (op2a code2 op2b)
3672 If the partial result t is a constant, we win. Otherwise
3673 continue on to try reassociating with the other inner test. */
3676 if (integer_onep (t
))
3678 else if (integer_zerop (t
))
3679 return boolean_false_node
;
3682 /* Handle the OR case, where we are redistributing:
3683 (inner1 OR inner2) AND (op2a code2 op2b)
3684 => (t OR (inner2 AND (op2a code2 op2b))) */
3685 else if (integer_onep (t
))
3686 return boolean_true_node
;
3688 /* Save partial result for later. */
3692 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3693 if (TREE_CODE (inner2
) == SSA_NAME
3694 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
3695 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3696 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3697 gimple_assign_rhs1 (s
),
3698 gimple_assign_rhs2 (s
),
3699 code2
, op2a
, op2b
)))
3701 /* Handle the AND case, where we are reassociating:
3702 (inner1 AND inner2) AND (op2a code2 op2b)
3703 => (inner1 AND t) */
3706 if (integer_onep (t
))
3708 else if (integer_zerop (t
))
3709 return boolean_false_node
;
3710 /* If both are the same, we can apply the identity
3712 else if (partial
&& same_bool_result_p (t
, partial
))
3716 /* Handle the OR case. where we are redistributing:
3717 (inner1 OR inner2) AND (op2a code2 op2b)
3718 => (t OR (inner1 AND (op2a code2 op2b)))
3719 => (t OR partial) */
3722 if (integer_onep (t
))
3723 return boolean_true_node
;
3726 /* We already got a simplification for the other
3727 operand to the redistributed OR expression. The
3728 interesting case is when at least one is false.
3729 Or, if both are the same, we can apply the identity
3731 if (integer_zerop (partial
))
3733 else if (integer_zerop (t
))
3735 else if (same_bool_result_p (t
, partial
))
3744 /* Try to simplify the AND of two comparisons defined by
3745 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3746 If this can be done without constructing an intermediate value,
3747 return the resulting tree; otherwise NULL_TREE is returned.
3748 This function is deliberately asymmetric as it recurses on SSA_DEFs
3749 in the first comparison but not the second. */
3752 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3753 enum tree_code code2
, tree op2a
, tree op2b
)
3755 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
3757 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3758 if (operand_equal_p (op1a
, op2a
, 0)
3759 && operand_equal_p (op1b
, op2b
, 0))
3761 /* Result will be either NULL_TREE, or a combined comparison. */
3762 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3763 TRUTH_ANDIF_EXPR
, code1
, code2
,
3764 truth_type
, op1a
, op1b
);
3769 /* Likewise the swapped case of the above. */
3770 if (operand_equal_p (op1a
, op2b
, 0)
3771 && operand_equal_p (op1b
, op2a
, 0))
3773 /* Result will be either NULL_TREE, or a combined comparison. */
3774 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3775 TRUTH_ANDIF_EXPR
, code1
,
3776 swap_tree_comparison (code2
),
3777 truth_type
, op1a
, op1b
);
3782 /* If both comparisons are of the same value against constants, we might
3783 be able to merge them. */
3784 if (operand_equal_p (op1a
, op2a
, 0)
3785 && TREE_CODE (op1b
) == INTEGER_CST
3786 && TREE_CODE (op2b
) == INTEGER_CST
)
3788 int cmp
= tree_int_cst_compare (op1b
, op2b
);
3790 /* If we have (op1a == op1b), we should either be able to
3791 return that or FALSE, depending on whether the constant op1b
3792 also satisfies the other comparison against op2b. */
3793 if (code1
== EQ_EXPR
)
3799 case EQ_EXPR
: val
= (cmp
== 0); break;
3800 case NE_EXPR
: val
= (cmp
!= 0); break;
3801 case LT_EXPR
: val
= (cmp
< 0); break;
3802 case GT_EXPR
: val
= (cmp
> 0); break;
3803 case LE_EXPR
: val
= (cmp
<= 0); break;
3804 case GE_EXPR
: val
= (cmp
>= 0); break;
3805 default: done
= false;
3810 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3812 return boolean_false_node
;
3815 /* Likewise if the second comparison is an == comparison. */
3816 else if (code2
== EQ_EXPR
)
3822 case EQ_EXPR
: val
= (cmp
== 0); break;
3823 case NE_EXPR
: val
= (cmp
!= 0); break;
3824 case LT_EXPR
: val
= (cmp
> 0); break;
3825 case GT_EXPR
: val
= (cmp
< 0); break;
3826 case LE_EXPR
: val
= (cmp
>= 0); break;
3827 case GE_EXPR
: val
= (cmp
<= 0); break;
3828 default: done
= false;
3833 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3835 return boolean_false_node
;
3839 /* Same business with inequality tests. */
3840 else if (code1
== NE_EXPR
)
3845 case EQ_EXPR
: val
= (cmp
!= 0); break;
3846 case NE_EXPR
: val
= (cmp
== 0); break;
3847 case LT_EXPR
: val
= (cmp
>= 0); break;
3848 case GT_EXPR
: val
= (cmp
<= 0); break;
3849 case LE_EXPR
: val
= (cmp
> 0); break;
3850 case GE_EXPR
: val
= (cmp
< 0); break;
3855 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3857 else if (code2
== NE_EXPR
)
3862 case EQ_EXPR
: val
= (cmp
== 0); break;
3863 case NE_EXPR
: val
= (cmp
!= 0); break;
3864 case LT_EXPR
: val
= (cmp
<= 0); break;
3865 case GT_EXPR
: val
= (cmp
>= 0); break;
3866 case LE_EXPR
: val
= (cmp
< 0); break;
3867 case GE_EXPR
: val
= (cmp
> 0); break;
3872 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3875 /* Chose the more restrictive of two < or <= comparisons. */
3876 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
3877 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3879 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
3880 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3882 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3885 /* Likewise chose the more restrictive of two > or >= comparisons. */
3886 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
3887 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3889 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
3890 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3892 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3895 /* Check for singleton ranges. */
3897 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
3898 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
3899 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
3901 /* Check for disjoint ranges. */
3903 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
3904 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3905 return boolean_false_node
;
3907 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
3908 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3909 return boolean_false_node
;
3912 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3913 NAME's definition is a truth value. See if there are any simplifications
3914 that can be done against the NAME's definition. */
3915 if (TREE_CODE (op1a
) == SSA_NAME
3916 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
3917 && (integer_zerop (op1b
) || integer_onep (op1b
)))
3919 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
3920 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
3921 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
3922 switch (gimple_code (stmt
))
3925 /* Try to simplify by copy-propagating the definition. */
3926 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
3929 /* If every argument to the PHI produces the same result when
3930 ANDed with the second comparison, we win.
3931 Do not do this unless the type is bool since we need a bool
3932 result here anyway. */
3933 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
3935 tree result
= NULL_TREE
;
3937 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
3939 tree arg
= gimple_phi_arg_def (stmt
, i
);
3941 /* If this PHI has itself as an argument, ignore it.
3942 If all the other args produce the same result,
3944 if (arg
== gimple_phi_result (stmt
))
3946 else if (TREE_CODE (arg
) == INTEGER_CST
)
3948 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
3951 result
= boolean_false_node
;
3952 else if (!integer_zerop (result
))
3956 result
= fold_build2 (code2
, boolean_type_node
,
3958 else if (!same_bool_comparison_p (result
,
3962 else if (TREE_CODE (arg
) == SSA_NAME
3963 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
3966 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
3967 /* In simple cases we can look through PHI nodes,
3968 but we have to be careful with loops.
3970 if (! dom_info_available_p (CDI_DOMINATORS
)
3971 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
3972 || dominated_by_p (CDI_DOMINATORS
,
3973 gimple_bb (def_stmt
),
3976 temp
= and_var_with_comparison (arg
, invert
, code2
,
3982 else if (!same_bool_result_p (result
, temp
))
3998 /* Try to simplify the AND of two comparisons, specified by
3999 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4000 If this can be simplified to a single expression (without requiring
4001 introducing more SSA variables to hold intermediate values),
4002 return the resulting tree. Otherwise return NULL_TREE.
4003 If the result expression is non-null, it has boolean type. */
4006 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4007 enum tree_code code2
, tree op2a
, tree op2b
)
4009 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4013 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4016 /* Helper function for or_comparisons_1: try to simplify the OR of the
4017 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4018 If INVERT is true, invert the value of VAR before doing the OR.
4019 Return NULL_EXPR if we can't simplify this to a single expression. */
4022 or_var_with_comparison (tree var
, bool invert
,
4023 enum tree_code code2
, tree op2a
, tree op2b
)
4026 gimple stmt
= SSA_NAME_DEF_STMT (var
);
4028 /* We can only deal with variables whose definitions are assignments. */
4029 if (!is_gimple_assign (stmt
))
4032 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4033 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4034 Then we only have to consider the simpler non-inverted cases. */
4036 t
= and_var_with_comparison_1 (stmt
,
4037 invert_tree_comparison (code2
, false),
4040 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4041 return canonicalize_bool (t
, invert
);
4044 /* Try to simplify the OR of the ssa variable defined by the assignment
4045 STMT with the comparison specified by (OP2A CODE2 OP2B).
4046 Return NULL_EXPR if we can't simplify this to a single expression. */
4049 or_var_with_comparison_1 (gimple stmt
,
4050 enum tree_code code2
, tree op2a
, tree op2b
)
4052 tree var
= gimple_assign_lhs (stmt
);
4053 tree true_test_var
= NULL_TREE
;
4054 tree false_test_var
= NULL_TREE
;
4055 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4057 /* Check for identities like (var OR (var != 0)) => true . */
4058 if (TREE_CODE (op2a
) == SSA_NAME
4059 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4061 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4062 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4064 true_test_var
= op2a
;
4065 if (var
== true_test_var
)
4068 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4069 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4071 false_test_var
= op2a
;
4072 if (var
== false_test_var
)
4073 return boolean_true_node
;
4077 /* If the definition is a comparison, recurse on it. */
4078 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4080 tree t
= or_comparisons_1 (innercode
,
4081 gimple_assign_rhs1 (stmt
),
4082 gimple_assign_rhs2 (stmt
),
4090 /* If the definition is an AND or OR expression, we may be able to
4091 simplify by reassociating. */
4092 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4093 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4095 tree inner1
= gimple_assign_rhs1 (stmt
);
4096 tree inner2
= gimple_assign_rhs2 (stmt
);
4099 tree partial
= NULL_TREE
;
4100 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4102 /* Check for boolean identities that don't require recursive examination
4104 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4105 inner1 OR (inner1 AND inner2) => inner1
4106 !inner1 OR (inner1 OR inner2) => true
4107 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4109 if (inner1
== true_test_var
)
4110 return (is_or
? var
: inner1
);
4111 else if (inner2
== true_test_var
)
4112 return (is_or
? var
: inner2
);
4113 else if (inner1
== false_test_var
)
4116 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4117 else if (inner2
== false_test_var
)
4120 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4122 /* Next, redistribute/reassociate the OR across the inner tests.
4123 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4124 if (TREE_CODE (inner1
) == SSA_NAME
4125 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4126 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4127 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4128 gimple_assign_rhs1 (s
),
4129 gimple_assign_rhs2 (s
),
4130 code2
, op2a
, op2b
)))
4132 /* Handle the OR case, where we are reassociating:
4133 (inner1 OR inner2) OR (op2a code2 op2b)
4135 If the partial result t is a constant, we win. Otherwise
4136 continue on to try reassociating with the other inner test. */
4139 if (integer_onep (t
))
4140 return boolean_true_node
;
4141 else if (integer_zerop (t
))
4145 /* Handle the AND case, where we are redistributing:
4146 (inner1 AND inner2) OR (op2a code2 op2b)
4147 => (t AND (inner2 OR (op2a code op2b))) */
4148 else if (integer_zerop (t
))
4149 return boolean_false_node
;
4151 /* Save partial result for later. */
4155 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4156 if (TREE_CODE (inner2
) == SSA_NAME
4157 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4158 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4159 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4160 gimple_assign_rhs1 (s
),
4161 gimple_assign_rhs2 (s
),
4162 code2
, op2a
, op2b
)))
4164 /* Handle the OR case, where we are reassociating:
4165 (inner1 OR inner2) OR (op2a code2 op2b)
4167 => (t OR partial) */
4170 if (integer_zerop (t
))
4172 else if (integer_onep (t
))
4173 return boolean_true_node
;
4174 /* If both are the same, we can apply the identity
4176 else if (partial
&& same_bool_result_p (t
, partial
))
4180 /* Handle the AND case, where we are redistributing:
4181 (inner1 AND inner2) OR (op2a code2 op2b)
4182 => (t AND (inner1 OR (op2a code2 op2b)))
4183 => (t AND partial) */
4186 if (integer_zerop (t
))
4187 return boolean_false_node
;
4190 /* We already got a simplification for the other
4191 operand to the redistributed AND expression. The
4192 interesting case is when at least one is true.
4193 Or, if both are the same, we can apply the identity
4195 if (integer_onep (partial
))
4197 else if (integer_onep (t
))
4199 else if (same_bool_result_p (t
, partial
))
4208 /* Try to simplify the OR of two comparisons defined by
4209 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4210 If this can be done without constructing an intermediate value,
4211 return the resulting tree; otherwise NULL_TREE is returned.
4212 This function is deliberately asymmetric as it recurses on SSA_DEFs
4213 in the first comparison but not the second. */
4216 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4217 enum tree_code code2
, tree op2a
, tree op2b
)
4219 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4221 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4222 if (operand_equal_p (op1a
, op2a
, 0)
4223 && operand_equal_p (op1b
, op2b
, 0))
4225 /* Result will be either NULL_TREE, or a combined comparison. */
4226 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4227 TRUTH_ORIF_EXPR
, code1
, code2
,
4228 truth_type
, op1a
, op1b
);
4233 /* Likewise the swapped case of the above. */
4234 if (operand_equal_p (op1a
, op2b
, 0)
4235 && operand_equal_p (op1b
, op2a
, 0))
4237 /* Result will be either NULL_TREE, or a combined comparison. */
4238 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4239 TRUTH_ORIF_EXPR
, code1
,
4240 swap_tree_comparison (code2
),
4241 truth_type
, op1a
, op1b
);
4246 /* If both comparisons are of the same value against constants, we might
4247 be able to merge them. */
4248 if (operand_equal_p (op1a
, op2a
, 0)
4249 && TREE_CODE (op1b
) == INTEGER_CST
4250 && TREE_CODE (op2b
) == INTEGER_CST
)
4252 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4254 /* If we have (op1a != op1b), we should either be able to
4255 return that or TRUE, depending on whether the constant op1b
4256 also satisfies the other comparison against op2b. */
4257 if (code1
== NE_EXPR
)
4263 case EQ_EXPR
: val
= (cmp
== 0); break;
4264 case NE_EXPR
: val
= (cmp
!= 0); break;
4265 case LT_EXPR
: val
= (cmp
< 0); break;
4266 case GT_EXPR
: val
= (cmp
> 0); break;
4267 case LE_EXPR
: val
= (cmp
<= 0); break;
4268 case GE_EXPR
: val
= (cmp
>= 0); break;
4269 default: done
= false;
4274 return boolean_true_node
;
4276 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4279 /* Likewise if the second comparison is a != comparison. */
4280 else if (code2
== NE_EXPR
)
4286 case EQ_EXPR
: val
= (cmp
== 0); break;
4287 case NE_EXPR
: val
= (cmp
!= 0); break;
4288 case LT_EXPR
: val
= (cmp
> 0); break;
4289 case GT_EXPR
: val
= (cmp
< 0); break;
4290 case LE_EXPR
: val
= (cmp
>= 0); break;
4291 case GE_EXPR
: val
= (cmp
<= 0); break;
4292 default: done
= false;
4297 return boolean_true_node
;
4299 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4303 /* See if an equality test is redundant with the other comparison. */
4304 else if (code1
== EQ_EXPR
)
4309 case EQ_EXPR
: val
= (cmp
== 0); break;
4310 case NE_EXPR
: val
= (cmp
!= 0); break;
4311 case LT_EXPR
: val
= (cmp
< 0); break;
4312 case GT_EXPR
: val
= (cmp
> 0); break;
4313 case LE_EXPR
: val
= (cmp
<= 0); break;
4314 case GE_EXPR
: val
= (cmp
>= 0); break;
4319 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4321 else if (code2
== EQ_EXPR
)
4326 case EQ_EXPR
: val
= (cmp
== 0); break;
4327 case NE_EXPR
: val
= (cmp
!= 0); break;
4328 case LT_EXPR
: val
= (cmp
> 0); break;
4329 case GT_EXPR
: val
= (cmp
< 0); break;
4330 case LE_EXPR
: val
= (cmp
>= 0); break;
4331 case GE_EXPR
: val
= (cmp
<= 0); break;
4336 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4339 /* Chose the less restrictive of two < or <= comparisons. */
4340 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4341 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4343 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4344 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4346 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4349 /* Likewise chose the less restrictive of two > or >= comparisons. */
4350 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4351 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4353 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4354 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4356 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4359 /* Check for singleton ranges. */
4361 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4362 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4363 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4365 /* Check for less/greater pairs that don't restrict the range at all. */
4367 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4368 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4369 return boolean_true_node
;
4371 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4372 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4373 return boolean_true_node
;
4376 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4377 NAME's definition is a truth value. See if there are any simplifications
4378 that can be done against the NAME's definition. */
4379 if (TREE_CODE (op1a
) == SSA_NAME
4380 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4381 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4383 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4384 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4385 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4386 switch (gimple_code (stmt
))
4389 /* Try to simplify by copy-propagating the definition. */
4390 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4393 /* If every argument to the PHI produces the same result when
4394 ORed with the second comparison, we win.
4395 Do not do this unless the type is bool since we need a bool
4396 result here anyway. */
4397 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4399 tree result
= NULL_TREE
;
4401 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4403 tree arg
= gimple_phi_arg_def (stmt
, i
);
4405 /* If this PHI has itself as an argument, ignore it.
4406 If all the other args produce the same result,
4408 if (arg
== gimple_phi_result (stmt
))
4410 else if (TREE_CODE (arg
) == INTEGER_CST
)
4412 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4415 result
= boolean_true_node
;
4416 else if (!integer_onep (result
))
4420 result
= fold_build2 (code2
, boolean_type_node
,
4422 else if (!same_bool_comparison_p (result
,
4426 else if (TREE_CODE (arg
) == SSA_NAME
4427 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4430 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4431 /* In simple cases we can look through PHI nodes,
4432 but we have to be careful with loops.
4434 if (! dom_info_available_p (CDI_DOMINATORS
)
4435 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4436 || dominated_by_p (CDI_DOMINATORS
,
4437 gimple_bb (def_stmt
),
4440 temp
= or_var_with_comparison (arg
, invert
, code2
,
4446 else if (!same_bool_result_p (result
, temp
))
4462 /* Try to simplify the OR of two comparisons, specified by
4463 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4464 If this can be simplified to a single expression (without requiring
4465 introducing more SSA variables to hold intermediate values),
4466 return the resulting tree. Otherwise return NULL_TREE.
4467 If the result expression is non-null, it has boolean type. */
4470 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4471 enum tree_code code2
, tree op2a
, tree op2b
)
4473 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4477 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4481 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4483 Either NULL_TREE, a simplified but non-constant or a constant
4486 ??? This should go into a gimple-fold-inline.h file to be eventually
4487 privatized with the single valueize function used in the various TUs
4488 to avoid the indirect function call overhead. */
4491 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
),
4492 tree (*gvalueize
) (tree
))
4496 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4497 edges if there are intermediate VARYING defs. For this reason
4498 do not follow SSA edges here even though SCCVN can technically
4499 just deal fine with that. */
4500 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
)
4501 && rcode
.is_tree_code ()
4502 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4503 || ((tree_code
) rcode
) == ADDR_EXPR
)
4504 && is_gimple_val (ops
[0]))
4507 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4509 fprintf (dump_file
, "Match-and-simplified ");
4510 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4511 fprintf (dump_file
, " to ");
4512 print_generic_expr (dump_file
, res
, 0);
4513 fprintf (dump_file
, "\n");
4518 location_t loc
= gimple_location (stmt
);
4519 switch (gimple_code (stmt
))
4523 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4525 switch (get_gimple_rhs_class (subcode
))
4527 case GIMPLE_SINGLE_RHS
:
4529 tree rhs
= gimple_assign_rhs1 (stmt
);
4530 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4532 if (TREE_CODE (rhs
) == SSA_NAME
)
4534 /* If the RHS is an SSA_NAME, return its known constant value,
4536 return (*valueize
) (rhs
);
4538 /* Handle propagating invariant addresses into address
4540 else if (TREE_CODE (rhs
) == ADDR_EXPR
4541 && !is_gimple_min_invariant (rhs
))
4543 HOST_WIDE_INT offset
= 0;
4545 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4549 && (CONSTANT_CLASS_P (base
)
4550 || decl_address_invariant_p (base
)))
4551 return build_invariant_address (TREE_TYPE (rhs
),
4554 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4555 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4556 && (CONSTRUCTOR_NELTS (rhs
)
4557 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4562 vec
= XALLOCAVEC (tree
,
4563 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4564 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4566 val
= (*valueize
) (val
);
4567 if (TREE_CODE (val
) == INTEGER_CST
4568 || TREE_CODE (val
) == REAL_CST
4569 || TREE_CODE (val
) == FIXED_CST
)
4575 return build_vector (TREE_TYPE (rhs
), vec
);
4577 if (subcode
== OBJ_TYPE_REF
)
4579 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
4580 /* If callee is constant, we can fold away the wrapper. */
4581 if (is_gimple_min_invariant (val
))
4585 if (kind
== tcc_reference
)
4587 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
4588 || TREE_CODE (rhs
) == REALPART_EXPR
4589 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
4590 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4592 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4593 return fold_unary_loc (EXPR_LOCATION (rhs
),
4595 TREE_TYPE (rhs
), val
);
4597 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
4598 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4600 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4601 return fold_ternary_loc (EXPR_LOCATION (rhs
),
4603 TREE_TYPE (rhs
), val
,
4604 TREE_OPERAND (rhs
, 1),
4605 TREE_OPERAND (rhs
, 2));
4607 else if (TREE_CODE (rhs
) == MEM_REF
4608 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4610 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4611 if (TREE_CODE (val
) == ADDR_EXPR
4612 && is_gimple_min_invariant (val
))
4614 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
4616 TREE_OPERAND (rhs
, 1));
4621 return fold_const_aggregate_ref_1 (rhs
, valueize
);
4623 else if (kind
== tcc_declaration
)
4624 return get_symbol_constant_value (rhs
);
4628 case GIMPLE_UNARY_RHS
:
4631 case GIMPLE_BINARY_RHS
:
4633 /* Handle binary operators that can appear in GIMPLE form. */
4634 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4635 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4637 /* Translate &x + CST into an invariant form suitable for
4638 further propagation. */
4639 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
4640 && TREE_CODE (op0
) == ADDR_EXPR
4641 && TREE_CODE (op1
) == INTEGER_CST
)
4643 tree off
= fold_convert (ptr_type_node
, op1
);
4644 return build_fold_addr_expr_loc
4646 fold_build2 (MEM_REF
,
4647 TREE_TYPE (TREE_TYPE (op0
)),
4648 unshare_expr (op0
), off
));
4651 return fold_binary_loc (loc
, subcode
,
4652 gimple_expr_type (stmt
), op0
, op1
);
4655 case GIMPLE_TERNARY_RHS
:
4657 /* Handle ternary operators that can appear in GIMPLE form. */
4658 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4659 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4660 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
4662 /* Fold embedded expressions in ternary codes. */
4663 if ((subcode
== COND_EXPR
4664 || subcode
== VEC_COND_EXPR
)
4665 && COMPARISON_CLASS_P (op0
))
4667 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
4668 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
4669 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
4670 TREE_TYPE (op0
), op00
, op01
);
4675 return fold_ternary_loc (loc
, subcode
,
4676 gimple_expr_type (stmt
), op0
, op1
, op2
);
4688 if (gimple_call_internal_p (stmt
))
4690 enum tree_code subcode
= ERROR_MARK
;
4691 switch (gimple_call_internal_fn (stmt
))
4693 case IFN_UBSAN_CHECK_ADD
:
4694 subcode
= PLUS_EXPR
;
4696 case IFN_UBSAN_CHECK_SUB
:
4697 subcode
= MINUS_EXPR
;
4699 case IFN_UBSAN_CHECK_MUL
:
4700 subcode
= MULT_EXPR
;
4705 tree arg0
= gimple_call_arg (stmt
, 0);
4706 tree arg1
= gimple_call_arg (stmt
, 1);
4707 tree op0
= (*valueize
) (arg0
);
4708 tree op1
= (*valueize
) (arg1
);
4710 if (TREE_CODE (op0
) != INTEGER_CST
4711 || TREE_CODE (op1
) != INTEGER_CST
)
4716 /* x * 0 = 0 * x = 0 without overflow. */
4717 if (integer_zerop (op0
) || integer_zerop (op1
))
4718 return build_zero_cst (TREE_TYPE (arg0
));
4721 /* y - y = 0 without overflow. */
4722 if (operand_equal_p (op0
, op1
, 0))
4723 return build_zero_cst (TREE_TYPE (arg0
));
4730 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
4732 && TREE_CODE (res
) == INTEGER_CST
4733 && !TREE_OVERFLOW (res
))
4738 fn
= (*valueize
) (gimple_call_fn (stmt
));
4739 if (TREE_CODE (fn
) == ADDR_EXPR
4740 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
4741 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
4742 && gimple_builtin_call_types_compatible_p (stmt
,
4743 TREE_OPERAND (fn
, 0)))
4745 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
4748 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4749 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
4750 call
= build_call_array_loc (loc
,
4751 gimple_call_return_type (stmt
),
4752 fn
, gimple_call_num_args (stmt
), args
);
4753 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
4756 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4757 STRIP_NOPS (retval
);
4758 retval
= fold_convert (gimple_call_return_type (stmt
), retval
);
4770 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4771 Returns NULL_TREE if folding to a constant is not possible, otherwise
4772 returns a constant according to is_gimple_min_invariant. */
4775 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
4777 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
4778 if (res
&& is_gimple_min_invariant (res
))
4784 /* The following set of functions are supposed to fold references using
4785 their constant initializers. */
4787 static tree
fold_ctor_reference (tree type
, tree ctor
,
4788 unsigned HOST_WIDE_INT offset
,
4789 unsigned HOST_WIDE_INT size
, tree
);
4791 /* See if we can find constructor defining value of BASE.
4792 When we know the consructor with constant offset (such as
4793 base is array[40] and we do know constructor of array), then
4794 BIT_OFFSET is adjusted accordingly.
4796 As a special case, return error_mark_node when constructor
4797 is not explicitly available, but it is known to be zero
4798 such as 'static const int a;'. */
4800 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
4801 tree (*valueize
)(tree
))
4803 HOST_WIDE_INT bit_offset2
, size
, max_size
;
4804 if (TREE_CODE (base
) == MEM_REF
)
4806 if (!integer_zerop (TREE_OPERAND (base
, 1)))
4808 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
4810 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
4815 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
4816 base
= valueize (TREE_OPERAND (base
, 0));
4817 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
4819 base
= TREE_OPERAND (base
, 0);
4822 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4823 DECL_INITIAL. If BASE is a nested reference into another
4824 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4825 the inner reference. */
4826 switch (TREE_CODE (base
))
4831 tree init
= ctor_for_folding (base
);
4833 /* Our semantic is exact opposite of ctor_for_folding;
4834 NULL means unknown, while error_mark_node is 0. */
4835 if (init
== error_mark_node
)
4838 return error_mark_node
;
4844 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
4845 if (max_size
== -1 || size
!= max_size
)
4847 *bit_offset
+= bit_offset2
;
4848 return get_base_constructor (base
, bit_offset
, valueize
);
4859 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4860 SIZE to the memory at bit OFFSET. */
4863 fold_array_ctor_reference (tree type
, tree ctor
,
4864 unsigned HOST_WIDE_INT offset
,
4865 unsigned HOST_WIDE_INT size
,
4868 unsigned HOST_WIDE_INT cnt
;
4870 offset_int low_bound
;
4871 offset_int elt_size
;
4872 offset_int index
, max_index
;
4873 offset_int access_index
;
4874 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
4875 HOST_WIDE_INT inner_offset
;
4877 /* Compute low bound and elt size. */
4878 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
4879 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
4880 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
4882 /* Static constructors for variably sized objects makes no sense. */
4883 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
4884 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
4885 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
4889 /* Static constructors for variably sized objects makes no sense. */
4890 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
4892 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
4894 /* We can handle only constantly sized accesses that are known to not
4895 be larger than size of array element. */
4896 if (!TYPE_SIZE_UNIT (type
)
4897 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
4898 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
4902 /* Compute the array index we look for. */
4903 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
4905 access_index
+= low_bound
;
4907 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
4908 TYPE_SIGN (index_type
));
4910 /* And offset within the access. */
4911 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
4913 /* See if the array field is large enough to span whole access. We do not
4914 care to fold accesses spanning multiple array indexes. */
4915 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
4918 index
= low_bound
- 1;
4920 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4921 TYPE_SIGN (index_type
));
4923 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
4925 /* Array constructor might explicitely set index, or specify range
4926 or leave index NULL meaning that it is next index after previous
4930 if (TREE_CODE (cfield
) == INTEGER_CST
)
4931 max_index
= index
= wi::to_offset (cfield
);
4934 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
4935 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
4936 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
4943 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4944 TYPE_SIGN (index_type
));
4948 /* Do we have match? */
4949 if (wi::cmpu (access_index
, index
) >= 0
4950 && wi::cmpu (access_index
, max_index
) <= 0)
4951 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
4954 /* When memory is not explicitely mentioned in constructor,
4955 it is 0 (or out of range). */
4956 return build_zero_cst (type
);
4959 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4960 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4963 fold_nonarray_ctor_reference (tree type
, tree ctor
,
4964 unsigned HOST_WIDE_INT offset
,
4965 unsigned HOST_WIDE_INT size
,
4968 unsigned HOST_WIDE_INT cnt
;
4971 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
4974 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
4975 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
4976 tree field_size
= DECL_SIZE (cfield
);
4977 offset_int bitoffset
;
4978 offset_int bitoffset_end
, access_end
;
4980 /* Variable sized objects in static constructors makes no sense,
4981 but field_size can be NULL for flexible array members. */
4982 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
4983 && TREE_CODE (byte_offset
) == INTEGER_CST
4984 && (field_size
!= NULL_TREE
4985 ? TREE_CODE (field_size
) == INTEGER_CST
4986 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
4988 /* Compute bit offset of the field. */
4989 bitoffset
= (wi::to_offset (field_offset
)
4990 + wi::lshift (wi::to_offset (byte_offset
),
4991 LOG2_BITS_PER_UNIT
));
4992 /* Compute bit offset where the field ends. */
4993 if (field_size
!= NULL_TREE
)
4994 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
4998 access_end
= offset_int (offset
) + size
;
5000 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5001 [BITOFFSET, BITOFFSET_END)? */
5002 if (wi::cmps (access_end
, bitoffset
) > 0
5003 && (field_size
== NULL_TREE
5004 || wi::lts_p (offset
, bitoffset_end
)))
5006 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5007 /* We do have overlap. Now see if field is large enough to
5008 cover the access. Give up for accesses spanning multiple
5010 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5012 if (wi::lts_p (offset
, bitoffset
))
5014 return fold_ctor_reference (type
, cval
,
5015 inner_offset
.to_uhwi (), size
,
5019 /* When memory is not explicitely mentioned in constructor, it is 0. */
5020 return build_zero_cst (type
);
5023 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5024 to the memory at bit OFFSET. */
5027 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5028 unsigned HOST_WIDE_INT size
, tree from_decl
)
5032 /* We found the field with exact match. */
5033 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5035 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5037 /* We are at the end of walk, see if we can view convert the
5039 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5040 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5041 && !compare_tree_int (TYPE_SIZE (type
), size
)
5042 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5044 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5045 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5050 /* For constants and byte-aligned/sized reads try to go through
5051 native_encode/interpret. */
5052 if (CONSTANT_CLASS_P (ctor
)
5053 && BITS_PER_UNIT
== 8
5054 && offset
% BITS_PER_UNIT
== 0
5055 && size
% BITS_PER_UNIT
== 0
5056 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5058 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5059 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5060 offset
/ BITS_PER_UNIT
) > 0)
5061 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5063 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5066 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5067 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5068 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5071 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5078 /* Return the tree representing the element referenced by T if T is an
5079 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5080 names using VALUEIZE. Return NULL_TREE otherwise. */
5083 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5085 tree ctor
, idx
, base
;
5086 HOST_WIDE_INT offset
, size
, max_size
;
5089 if (TREE_THIS_VOLATILE (t
))
5092 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
5093 return get_symbol_constant_value (t
);
5095 tem
= fold_read_from_constant_string (t
);
5099 switch (TREE_CODE (t
))
5102 case ARRAY_RANGE_REF
:
5103 /* Constant indexes are handled well by get_base_constructor.
5104 Only special case variable offsets.
5105 FIXME: This code can't handle nested references with variable indexes
5106 (they will be handled only by iteration of ccp). Perhaps we can bring
5107 get_ref_base_and_extent here and make it use a valueize callback. */
5108 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5110 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5111 && TREE_CODE (idx
) == INTEGER_CST
)
5113 tree low_bound
, unit_size
;
5115 /* If the resulting bit-offset is constant, track it. */
5116 if ((low_bound
= array_ref_low_bound (t
),
5117 TREE_CODE (low_bound
) == INTEGER_CST
)
5118 && (unit_size
= array_ref_element_size (t
),
5119 tree_fits_uhwi_p (unit_size
)))
5122 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5123 TYPE_PRECISION (TREE_TYPE (idx
)));
5125 if (wi::fits_shwi_p (woffset
))
5127 offset
= woffset
.to_shwi ();
5128 /* TODO: This code seems wrong, multiply then check
5129 to see if it fits. */
5130 offset
*= tree_to_uhwi (unit_size
);
5131 offset
*= BITS_PER_UNIT
;
5133 base
= TREE_OPERAND (t
, 0);
5134 ctor
= get_base_constructor (base
, &offset
, valueize
);
5135 /* Empty constructor. Always fold to 0. */
5136 if (ctor
== error_mark_node
)
5137 return build_zero_cst (TREE_TYPE (t
));
5138 /* Out of bound array access. Value is undefined,
5142 /* We can not determine ctor. */
5145 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5146 tree_to_uhwi (unit_size
)
5156 case TARGET_MEM_REF
:
5158 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5159 ctor
= get_base_constructor (base
, &offset
, valueize
);
5161 /* Empty constructor. Always fold to 0. */
5162 if (ctor
== error_mark_node
)
5163 return build_zero_cst (TREE_TYPE (t
));
5164 /* We do not know precise address. */
5165 if (max_size
== -1 || max_size
!= size
)
5167 /* We can not determine ctor. */
5171 /* Out of bound array access. Value is undefined, but don't fold. */
5175 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5181 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5182 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5183 return fold_build1_loc (EXPR_LOCATION (t
),
5184 TREE_CODE (t
), TREE_TYPE (t
), c
);
5196 fold_const_aggregate_ref (tree t
)
5198 return fold_const_aggregate_ref_1 (t
, NULL
);
5201 /* Lookup virtual method with index TOKEN in a virtual table V
5203 Set CAN_REFER if non-NULL to false if method
5204 is not referable or if the virtual table is ill-formed (such as rewriten
5205 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5208 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5210 unsigned HOST_WIDE_INT offset
,
5213 tree vtable
= v
, init
, fn
;
5214 unsigned HOST_WIDE_INT size
;
5215 unsigned HOST_WIDE_INT elt_size
, access_index
;
5221 /* First of all double check we have virtual table. */
5222 if (TREE_CODE (v
) != VAR_DECL
5223 || !DECL_VIRTUAL_P (v
))
5225 gcc_assert (in_lto_p
);
5226 /* Pass down that we lost track of the target. */
5232 init
= ctor_for_folding (v
);
5234 /* The virtual tables should always be born with constructors
5235 and we always should assume that they are avaialble for
5236 folding. At the moment we do not stream them in all cases,
5237 but it should never happen that ctor seem unreachable. */
5239 if (init
== error_mark_node
)
5241 gcc_assert (in_lto_p
);
5242 /* Pass down that we lost track of the target. */
5247 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5248 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5249 offset
*= BITS_PER_UNIT
;
5250 offset
+= token
* size
;
5252 /* Lookup the value in the constructor that is assumed to be array.
5253 This is equivalent to
5254 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5255 offset, size, NULL);
5256 but in a constant time. We expect that frontend produced a simple
5257 array without indexed initializers. */
5259 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5260 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5261 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5262 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5264 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5265 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5267 /* This code makes an assumption that there are no
5268 indexed fileds produced by C++ FE, so we can directly index the array. */
5269 if (access_index
< CONSTRUCTOR_NELTS (init
))
5271 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5272 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5278 /* For type inconsistent program we may end up looking up virtual method
5279 in virtual table that does not contain TOKEN entries. We may overrun
5280 the virtual table and pick up a constant or RTTI info pointer.
5281 In any case the call is undefined. */
5283 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5284 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5285 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5288 fn
= TREE_OPERAND (fn
, 0);
5290 /* When cgraph node is missing and function is not public, we cannot
5291 devirtualize. This can happen in WHOPR when the actual method
5292 ends up in other partition, because we found devirtualization
5293 possibility too late. */
5294 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5305 /* Make sure we create a cgraph node for functions we'll reference.
5306 They can be non-existent if the reference comes from an entry
5307 of an external vtable for example. */
5308 cgraph_node::get_create (fn
);
5313 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5314 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5315 KNOWN_BINFO carries the binfo describing the true type of
5316 OBJ_TYPE_REF_OBJECT(REF).
5317 Set CAN_REFER if non-NULL to false if method
5318 is not referable or if the virtual table is ill-formed (such as rewriten
5319 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5322 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5325 unsigned HOST_WIDE_INT offset
;
5328 v
= BINFO_VTABLE (known_binfo
);
5329 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5333 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5339 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5342 /* Return true iff VAL is a gimple expression that is known to be
5343 non-negative. Restricted to floating-point inputs. */
5346 gimple_val_nonnegative_real_p (tree val
)
5350 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5352 /* Use existing logic for non-gimple trees. */
5353 if (tree_expr_nonnegative_p (val
))
5356 if (TREE_CODE (val
) != SSA_NAME
)
5359 /* Currently we look only at the immediately defining statement
5360 to make this determination, since recursion on defining
5361 statements of operands can lead to quadratic behavior in the
5362 worst case. This is expected to catch almost all occurrences
5363 in practice. It would be possible to implement limited-depth
5364 recursion if important cases are lost. Alternatively, passes
5365 that need this information (such as the pow/powi lowering code
5366 in the cse_sincos pass) could be revised to provide it through
5367 dataflow propagation. */
5369 def_stmt
= SSA_NAME_DEF_STMT (val
);
5371 if (is_gimple_assign (def_stmt
))
5375 /* See fold-const.c:tree_expr_nonnegative_p for additional
5376 cases that could be handled with recursion. */
5378 switch (gimple_assign_rhs_code (def_stmt
))
5381 /* Always true for floating-point operands. */
5385 /* True if the two operands are identical (since we are
5386 restricted to floating-point inputs). */
5387 op0
= gimple_assign_rhs1 (def_stmt
);
5388 op1
= gimple_assign_rhs2 (def_stmt
);
5391 || operand_equal_p (op0
, op1
, 0))
5398 else if (is_gimple_call (def_stmt
))
5400 tree fndecl
= gimple_call_fndecl (def_stmt
);
5402 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5406 switch (DECL_FUNCTION_CODE (fndecl
))
5408 CASE_FLT_FN (BUILT_IN_ACOS
):
5409 CASE_FLT_FN (BUILT_IN_ACOSH
):
5410 CASE_FLT_FN (BUILT_IN_CABS
):
5411 CASE_FLT_FN (BUILT_IN_COSH
):
5412 CASE_FLT_FN (BUILT_IN_ERFC
):
5413 CASE_FLT_FN (BUILT_IN_EXP
):
5414 CASE_FLT_FN (BUILT_IN_EXP10
):
5415 CASE_FLT_FN (BUILT_IN_EXP2
):
5416 CASE_FLT_FN (BUILT_IN_FABS
):
5417 CASE_FLT_FN (BUILT_IN_FDIM
):
5418 CASE_FLT_FN (BUILT_IN_HYPOT
):
5419 CASE_FLT_FN (BUILT_IN_POW10
):
5422 CASE_FLT_FN (BUILT_IN_SQRT
):
5423 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5424 nonnegative inputs. */
5425 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
5430 CASE_FLT_FN (BUILT_IN_POWI
):
5431 /* True if the second argument is an even integer. */
5432 arg1
= gimple_call_arg (def_stmt
, 1);
5434 if (TREE_CODE (arg1
) == INTEGER_CST
5435 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5440 CASE_FLT_FN (BUILT_IN_POW
):
5441 /* True if the second argument is an even integer-valued
5443 arg1
= gimple_call_arg (def_stmt
, 1);
5445 if (TREE_CODE (arg1
) == REAL_CST
)
5450 c
= TREE_REAL_CST (arg1
);
5451 n
= real_to_integer (&c
);
5455 REAL_VALUE_TYPE cint
;
5456 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5457 if (real_identical (&c
, &cint
))
5473 /* Given a pointer value OP0, return a simplified version of an
5474 indirection through OP0, or NULL_TREE if no simplification is
5475 possible. Note that the resulting type may be different from
5476 the type pointed to in the sense that it is still compatible
5477 from the langhooks point of view. */
5480 gimple_fold_indirect_ref (tree t
)
5482 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5487 subtype
= TREE_TYPE (sub
);
5488 if (!POINTER_TYPE_P (subtype
))
5491 if (TREE_CODE (sub
) == ADDR_EXPR
)
5493 tree op
= TREE_OPERAND (sub
, 0);
5494 tree optype
= TREE_TYPE (op
);
5496 if (useless_type_conversion_p (type
, optype
))
5499 /* *(foo *)&fooarray => fooarray[0] */
5500 if (TREE_CODE (optype
) == ARRAY_TYPE
5501 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5502 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5504 tree type_domain
= TYPE_DOMAIN (optype
);
5505 tree min_val
= size_zero_node
;
5506 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5507 min_val
= TYPE_MIN_VALUE (type_domain
);
5508 if (TREE_CODE (min_val
) == INTEGER_CST
)
5509 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5511 /* *(foo *)&complexfoo => __real__ complexfoo */
5512 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5513 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5514 return fold_build1 (REALPART_EXPR
, type
, op
);
5515 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5516 else if (TREE_CODE (optype
) == VECTOR_TYPE
5517 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5519 tree part_width
= TYPE_SIZE (type
);
5520 tree index
= bitsize_int (0);
5521 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5525 /* *(p + CST) -> ... */
5526 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5527 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5529 tree addr
= TREE_OPERAND (sub
, 0);
5530 tree off
= TREE_OPERAND (sub
, 1);
5534 addrtype
= TREE_TYPE (addr
);
5536 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5537 if (TREE_CODE (addr
) == ADDR_EXPR
5538 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5539 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5540 && tree_fits_uhwi_p (off
))
5542 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5543 tree part_width
= TYPE_SIZE (type
);
5544 unsigned HOST_WIDE_INT part_widthi
5545 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5546 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5547 tree index
= bitsize_int (indexi
);
5548 if (offset
/ part_widthi
5549 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5550 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5554 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5555 if (TREE_CODE (addr
) == ADDR_EXPR
5556 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5557 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5559 tree size
= TYPE_SIZE_UNIT (type
);
5560 if (tree_int_cst_equal (size
, off
))
5561 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5564 /* *(p + CST) -> MEM_REF <p, CST>. */
5565 if (TREE_CODE (addr
) != ADDR_EXPR
5566 || DECL_P (TREE_OPERAND (addr
, 0)))
5567 return fold_build2 (MEM_REF
, type
,
5569 wide_int_to_tree (ptype
, off
));
5572 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5573 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5574 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5575 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5578 tree min_val
= size_zero_node
;
5580 sub
= gimple_fold_indirect_ref (sub
);
5582 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5583 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5584 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5585 min_val
= TYPE_MIN_VALUE (type_domain
);
5586 if (TREE_CODE (min_val
) == INTEGER_CST
)
5587 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
5593 /* Return true if CODE is an operation that when operating on signed
5594 integer types involves undefined behavior on overflow and the
5595 operation can be expressed with unsigned arithmetic. */
5598 arith_code_with_undefined_signed_overflow (tree_code code
)
5606 case POINTER_PLUS_EXPR
:
5613 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5614 operation that can be transformed to unsigned arithmetic by converting
5615 its operand, carrying out the operation in the corresponding unsigned
5616 type and converting the result back to the original type.
5618 Returns a sequence of statements that replace STMT and also contain
5619 a modified form of STMT itself. */
5622 rewrite_to_defined_overflow (gimple stmt
)
5624 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5626 fprintf (dump_file
, "rewriting stmt with undefined signed "
5628 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5631 tree lhs
= gimple_assign_lhs (stmt
);
5632 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
5633 gimple_seq stmts
= NULL
;
5634 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
5636 gimple_seq stmts2
= NULL
;
5637 gimple_set_op (stmt
, i
,
5638 force_gimple_operand (fold_convert (type
,
5639 gimple_op (stmt
, i
)),
5640 &stmts2
, true, NULL_TREE
));
5641 gimple_seq_add_seq (&stmts
, stmts2
);
5643 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
5644 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
5645 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
5646 gimple_seq_add_stmt (&stmts
, stmt
);
5647 gimple cvt
= gimple_build_assign_with_ops
5648 (NOP_EXPR
, lhs
, gimple_assign_lhs (stmt
), NULL_TREE
);
5649 gimple_seq_add_stmt (&stmts
, cvt
);
5655 /* Build the expression CODE OP0 of type TYPE with location LOC,
5656 simplifying it first if possible using VALUEIZE if not NULL.
5657 OP0 is expected to be valueized already. Returns the built
5658 expression value and appends statements possibly defining it
5662 gimple_build (gimple_seq
*seq
, location_t loc
,
5663 enum tree_code code
, tree type
, tree op0
,
5664 tree (*valueize
)(tree
))
5666 tree res
= gimple_simplify (code
, type
, op0
, seq
, valueize
);
5669 if (gimple_in_ssa_p (cfun
))
5670 res
= make_ssa_name (type
, NULL
);
5672 res
= create_tmp_reg (type
, NULL
);
5674 if (code
== REALPART_EXPR
5675 || code
== IMAGPART_EXPR
5676 || code
== VIEW_CONVERT_EXPR
)
5677 stmt
= gimple_build_assign_with_ops (code
, res
,
5681 stmt
= gimple_build_assign_with_ops (code
, res
, op0
, NULL_TREE
);
5682 gimple_set_location (stmt
, loc
);
5683 gimple_seq_add_stmt_without_update (seq
, stmt
);
5688 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5689 simplifying it first if possible using VALUEIZE if not NULL.
5690 OP0 and OP1 are expected to be valueized already. Returns the built
5691 expression value and appends statements possibly defining it
5695 gimple_build (gimple_seq
*seq
, location_t loc
,
5696 enum tree_code code
, tree type
, tree op0
, tree op1
,
5697 tree (*valueize
)(tree
))
5699 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, valueize
);
5702 if (gimple_in_ssa_p (cfun
))
5703 res
= make_ssa_name (type
, NULL
);
5705 res
= create_tmp_reg (type
, NULL
);
5706 gimple stmt
= gimple_build_assign_with_ops (code
, res
, op0
, op1
);
5707 gimple_set_location (stmt
, loc
);
5708 gimple_seq_add_stmt_without_update (seq
, stmt
);
5713 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5714 simplifying it first if possible using VALUEIZE if not NULL.
5715 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5716 expression value and appends statements possibly defining it
5720 gimple_build (gimple_seq
*seq
, location_t loc
,
5721 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
,
5722 tree (*valueize
)(tree
))
5724 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
5728 if (gimple_in_ssa_p (cfun
))
5729 res
= make_ssa_name (type
, NULL
);
5731 res
= create_tmp_reg (type
, NULL
);
5733 if (code
== BIT_FIELD_REF
)
5734 stmt
= gimple_build_assign_with_ops (code
, res
,
5735 build3 (BIT_FIELD_REF
, type
,
5739 stmt
= gimple_build_assign_with_ops (code
, res
, op0
, op1
, op2
);
5740 gimple_set_location (stmt
, loc
);
5741 gimple_seq_add_stmt_without_update (seq
, stmt
);
5746 /* Build the call FN (ARG0) with a result of type TYPE
5747 (or no result if TYPE is void) with location LOC,
5748 simplifying it first if possible using VALUEIZE if not NULL.
5749 ARG0 is expected to be valueized already. Returns the built
5750 expression value (or NULL_TREE if TYPE is void) and appends
5751 statements possibly defining it to SEQ. */
5754 gimple_build (gimple_seq
*seq
, location_t loc
,
5755 enum built_in_function fn
, tree type
, tree arg0
,
5756 tree (*valueize
)(tree
))
5758 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, valueize
);
5761 tree decl
= builtin_decl_implicit (fn
);
5762 gimple stmt
= gimple_build_call (decl
, 1, arg0
);
5763 if (!VOID_TYPE_P (type
))
5765 if (gimple_in_ssa_p (cfun
))
5766 res
= make_ssa_name (type
, NULL
);
5768 res
= create_tmp_reg (type
, NULL
);
5769 gimple_call_set_lhs (stmt
, res
);
5771 gimple_set_location (stmt
, loc
);
5772 gimple_seq_add_stmt_without_update (seq
, stmt
);
5777 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
5778 (or no result if TYPE is void) with location LOC,
5779 simplifying it first if possible using VALUEIZE if not NULL.
5780 ARG0 is expected to be valueized already. Returns the built
5781 expression value (or NULL_TREE if TYPE is void) and appends
5782 statements possibly defining it to SEQ. */
5785 gimple_build (gimple_seq
*seq
, location_t loc
,
5786 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
,
5787 tree (*valueize
)(tree
))
5789 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, valueize
);
5792 tree decl
= builtin_decl_implicit (fn
);
5793 gimple stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
5794 if (!VOID_TYPE_P (type
))
5796 if (gimple_in_ssa_p (cfun
))
5797 res
= make_ssa_name (type
, NULL
);
5799 res
= create_tmp_reg (type
, NULL
);
5800 gimple_call_set_lhs (stmt
, res
);
5802 gimple_set_location (stmt
, loc
);
5803 gimple_seq_add_stmt_without_update (seq
, stmt
);
5808 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5809 (or no result if TYPE is void) with location LOC,
5810 simplifying it first if possible using VALUEIZE if not NULL.
5811 ARG0 is expected to be valueized already. Returns the built
5812 expression value (or NULL_TREE if TYPE is void) and appends
5813 statements possibly defining it to SEQ. */
5816 gimple_build (gimple_seq
*seq
, location_t loc
,
5817 enum built_in_function fn
, tree type
,
5818 tree arg0
, tree arg1
, tree arg2
,
5819 tree (*valueize
)(tree
))
5821 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
, seq
, valueize
);
5824 tree decl
= builtin_decl_implicit (fn
);
5825 gimple stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
5826 if (!VOID_TYPE_P (type
))
5828 if (gimple_in_ssa_p (cfun
))
5829 res
= make_ssa_name (type
, NULL
);
5831 res
= create_tmp_reg (type
, NULL
);
5832 gimple_call_set_lhs (stmt
, res
);
5834 gimple_set_location (stmt
, loc
);
5835 gimple_seq_add_stmt_without_update (seq
, stmt
);
5840 /* Build the conversion (TYPE) OP with a result of type TYPE
5841 with location LOC if such conversion is neccesary in GIMPLE,
5842 simplifying it first.
5843 Returns the built expression value and appends
5844 statements possibly defining it to SEQ. */
5847 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
5849 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
5851 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);