1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "gimple-fold.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
44 #include "tree-object-size.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
64 #include "diagnostic-core.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
70 /* Return true when DECL can be referenced from current unit.
71 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
72 We can get declarations that are not possible to reference for various
75 1) When analyzing C++ virtual tables.
76 C++ virtual tables do have known constructors even
77 when they are keyed to other compilation unit.
78 Those tables can contain pointers to methods and vars
79 in other units. Those methods have both STATIC and EXTERNAL
81 2) In WHOPR mode devirtualization might lead to reference
82 to method that was partitioned elsehwere.
83 In this case we have static VAR_DECL or FUNCTION_DECL
84 that has no corresponding callgraph/varpool node
86 3) COMDAT functions referred by external vtables that
87 we devirtualize only during final compilation stage.
88 At this time we already decided that we will not output
89 the function body and thus we can't reference the symbol
93 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
96 struct cgraph_node
*node
;
99 if (DECL_ABSTRACT_P (decl
))
102 /* We are concerned only about static/external vars and functions. */
103 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
104 || !VAR_OR_FUNCTION_DECL_P (decl
))
107 /* Static objects can be referred only if they was not optimized out yet. */
108 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
110 /* Before we start optimizing unreachable code we can be sure all
111 static objects are defined. */
112 if (symtab
->function_flags_ready
)
114 snode
= symtab_node::get (decl
);
115 if (!snode
|| !snode
->definition
)
117 node
= dyn_cast
<cgraph_node
*> (snode
);
118 return !node
|| !node
->global
.inlined_to
;
121 /* We will later output the initializer, so we can refer to it.
122 So we are concerned only when DECL comes from initializer of
123 external var or var that has been optimized out. */
125 || !VAR_P (from_decl
)
126 || (!DECL_EXTERNAL (from_decl
)
127 && (vnode
= varpool_node::get (from_decl
)) != NULL
128 && vnode
->definition
)
130 && (vnode
= varpool_node::get (from_decl
)) != NULL
131 && vnode
->in_other_partition
))
133 /* We are folding reference from external vtable. The vtable may reffer
134 to a symbol keyed to other compilation unit. The other compilation
135 unit may be in separate DSO and the symbol may be hidden. */
136 if (DECL_VISIBILITY_SPECIFIED (decl
)
137 && DECL_EXTERNAL (decl
)
138 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
139 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
141 /* When function is public, we always can introduce new reference.
142 Exception are the COMDAT functions where introducing a direct
143 reference imply need to include function body in the curren tunit. */
144 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
146 /* We have COMDAT. We are going to check if we still have definition
147 or if the definition is going to be output in other partition.
148 Bypass this when gimplifying; all needed functions will be produced.
150 As observed in PR20991 for already optimized out comdat virtual functions
151 it may be tempting to not necessarily give up because the copy will be
152 output elsewhere when corresponding vtable is output.
153 This is however not possible - ABI specify that COMDATs are output in
154 units where they are used and when the other unit was compiled with LTO
155 it is possible that vtable was kept public while the function itself
157 if (!symtab
->function_flags_ready
)
160 snode
= symtab_node::get (decl
);
162 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
163 && (!snode
->in_other_partition
164 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
166 node
= dyn_cast
<cgraph_node
*> (snode
);
167 return !node
|| !node
->global
.inlined_to
;
170 /* Create a temporary for TYPE for a statement STMT. If the current function
171 is in SSA form, a SSA name is created. Otherwise a temporary register
175 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
177 if (gimple_in_ssa_p (cfun
))
178 return make_ssa_name (type
, stmt
);
180 return create_tmp_reg (type
);
183 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
184 acceptable form for is_gimple_min_invariant.
185 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
188 canonicalize_constructor_val (tree cval
, tree from_decl
)
190 tree orig_cval
= cval
;
192 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
193 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
195 tree ptr
= TREE_OPERAND (cval
, 0);
196 if (is_gimple_min_invariant (ptr
))
197 cval
= build1_loc (EXPR_LOCATION (cval
),
198 ADDR_EXPR
, TREE_TYPE (ptr
),
199 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
201 fold_convert (ptr_type_node
,
202 TREE_OPERAND (cval
, 1))));
204 if (TREE_CODE (cval
) == ADDR_EXPR
)
206 tree base
= NULL_TREE
;
207 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
209 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
211 TREE_OPERAND (cval
, 0) = base
;
214 base
= get_base_address (TREE_OPERAND (cval
, 0));
218 if (VAR_OR_FUNCTION_DECL_P (base
)
219 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
221 if (TREE_TYPE (base
) == error_mark_node
)
224 TREE_ADDRESSABLE (base
) = 1;
225 else if (TREE_CODE (base
) == FUNCTION_DECL
)
227 /* Make sure we create a cgraph node for functions we'll reference.
228 They can be non-existent if the reference comes from an entry
229 of an external vtable for example. */
230 cgraph_node::get_create (base
);
232 /* Fixup types in global initializers. */
233 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
234 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
236 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
237 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
240 if (TREE_OVERFLOW_P (cval
))
241 return drop_tree_overflow (cval
);
245 /* If SYM is a constant variable with known value, return the value.
246 NULL_TREE is returned otherwise. */
249 get_symbol_constant_value (tree sym
)
251 tree val
= ctor_for_folding (sym
);
252 if (val
!= error_mark_node
)
256 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
257 if (val
&& is_gimple_min_invariant (val
))
262 /* Variables declared 'const' without an initializer
263 have zero as the initializer if they may not be
264 overridden at link or run time. */
266 && is_gimple_reg_type (TREE_TYPE (sym
)))
267 return build_zero_cst (TREE_TYPE (sym
));
275 /* Subroutine of fold_stmt. We perform several simplifications of the
276 memory reference tree EXPR and make sure to re-gimplify them properly
277 after propagation of constant addresses. IS_LHS is true if the
278 reference is supposed to be an lvalue. */
281 maybe_fold_reference (tree expr
, bool is_lhs
)
285 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
286 || TREE_CODE (expr
) == REALPART_EXPR
287 || TREE_CODE (expr
) == IMAGPART_EXPR
)
288 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
289 return fold_unary_loc (EXPR_LOCATION (expr
),
292 TREE_OPERAND (expr
, 0));
293 else if (TREE_CODE (expr
) == BIT_FIELD_REF
294 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
295 return fold_ternary_loc (EXPR_LOCATION (expr
),
298 TREE_OPERAND (expr
, 0),
299 TREE_OPERAND (expr
, 1),
300 TREE_OPERAND (expr
, 2));
303 && (result
= fold_const_aggregate_ref (expr
))
304 && is_gimple_min_invariant (result
))
311 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
312 replacement rhs for the statement or NULL_TREE if no simplification
313 could be made. It is assumed that the operands have been previously
317 fold_gimple_assign (gimple_stmt_iterator
*si
)
319 gimple
*stmt
= gsi_stmt (*si
);
320 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
321 location_t loc
= gimple_location (stmt
);
323 tree result
= NULL_TREE
;
325 switch (get_gimple_rhs_class (subcode
))
327 case GIMPLE_SINGLE_RHS
:
329 tree rhs
= gimple_assign_rhs1 (stmt
);
331 if (TREE_CLOBBER_P (rhs
))
334 if (REFERENCE_CLASS_P (rhs
))
335 return maybe_fold_reference (rhs
, false);
337 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
339 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
340 if (is_gimple_min_invariant (val
))
342 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
345 vec
<cgraph_node
*>targets
346 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
347 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
349 if (dump_enabled_p ())
351 location_t loc
= gimple_location_safe (stmt
);
352 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
353 "resolving virtual function address "
354 "reference to function %s\n",
355 targets
.length () == 1
356 ? targets
[0]->name ()
359 if (targets
.length () == 1)
361 val
= fold_convert (TREE_TYPE (val
),
362 build_fold_addr_expr_loc
363 (loc
, targets
[0]->decl
));
364 STRIP_USELESS_TYPE_CONVERSION (val
);
367 /* We can not use __builtin_unreachable here because it
368 can not have address taken. */
369 val
= build_int_cst (TREE_TYPE (val
), 0);
375 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
377 tree ref
= TREE_OPERAND (rhs
, 0);
378 tree tem
= maybe_fold_reference (ref
, true);
380 && TREE_CODE (tem
) == MEM_REF
381 && integer_zerop (TREE_OPERAND (tem
, 1)))
382 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
384 result
= fold_convert (TREE_TYPE (rhs
),
385 build_fold_addr_expr_loc (loc
, tem
));
386 else if (TREE_CODE (ref
) == MEM_REF
387 && integer_zerop (TREE_OPERAND (ref
, 1)))
388 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
392 /* Strip away useless type conversions. Both the
393 NON_LVALUE_EXPR that may have been added by fold, and
394 "useless" type conversions that might now be apparent
395 due to propagation. */
396 STRIP_USELESS_TYPE_CONVERSION (result
);
398 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
403 else if (TREE_CODE (rhs
) == CONSTRUCTOR
404 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
406 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
410 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
411 if (! CONSTANT_CLASS_P (val
))
414 return build_vector_from_ctor (TREE_TYPE (rhs
),
415 CONSTRUCTOR_ELTS (rhs
));
418 else if (DECL_P (rhs
))
419 return get_symbol_constant_value (rhs
);
423 case GIMPLE_UNARY_RHS
:
426 case GIMPLE_BINARY_RHS
:
429 case GIMPLE_TERNARY_RHS
:
430 result
= fold_ternary_loc (loc
, subcode
,
431 TREE_TYPE (gimple_assign_lhs (stmt
)),
432 gimple_assign_rhs1 (stmt
),
433 gimple_assign_rhs2 (stmt
),
434 gimple_assign_rhs3 (stmt
));
438 STRIP_USELESS_TYPE_CONVERSION (result
);
439 if (valid_gimple_rhs_p (result
))
444 case GIMPLE_INVALID_RHS
:
452 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
453 adjusting the replacement stmts location and virtual operands.
454 If the statement has a lhs the last stmt in the sequence is expected
455 to assign to that lhs. */
458 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
460 gimple
*stmt
= gsi_stmt (*si_p
);
462 if (gimple_has_location (stmt
))
463 annotate_all_with_location (stmts
, gimple_location (stmt
));
465 /* First iterate over the replacement statements backward, assigning
466 virtual operands to their defining statements. */
467 gimple
*laststore
= NULL
;
468 for (gimple_stmt_iterator i
= gsi_last (stmts
);
469 !gsi_end_p (i
); gsi_prev (&i
))
471 gimple
*new_stmt
= gsi_stmt (i
);
472 if ((gimple_assign_single_p (new_stmt
)
473 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
474 || (is_gimple_call (new_stmt
)
475 && (gimple_call_flags (new_stmt
)
476 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
480 vdef
= gimple_vdef (stmt
);
482 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
483 gimple_set_vdef (new_stmt
, vdef
);
484 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
485 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
486 laststore
= new_stmt
;
490 /* Second iterate over the statements forward, assigning virtual
491 operands to their uses. */
492 tree reaching_vuse
= gimple_vuse (stmt
);
493 for (gimple_stmt_iterator i
= gsi_start (stmts
);
494 !gsi_end_p (i
); gsi_next (&i
))
496 gimple
*new_stmt
= gsi_stmt (i
);
497 /* If the new statement possibly has a VUSE, update it with exact SSA
498 name we know will reach this one. */
499 if (gimple_has_mem_ops (new_stmt
))
500 gimple_set_vuse (new_stmt
, reaching_vuse
);
501 gimple_set_modified (new_stmt
, true);
502 if (gimple_vdef (new_stmt
))
503 reaching_vuse
= gimple_vdef (new_stmt
);
506 /* If the new sequence does not do a store release the virtual
507 definition of the original statement. */
509 && reaching_vuse
== gimple_vuse (stmt
))
511 tree vdef
= gimple_vdef (stmt
);
513 && TREE_CODE (vdef
) == SSA_NAME
)
515 unlink_stmt_vdef (stmt
);
516 release_ssa_name (vdef
);
520 /* Finally replace the original statement with the sequence. */
521 gsi_replace_with_seq (si_p
, stmts
, false);
524 /* Convert EXPR into a GIMPLE value suitable for substitution on the
525 RHS of an assignment. Insert the necessary statements before
526 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
527 is replaced. If the call is expected to produces a result, then it
528 is replaced by an assignment of the new RHS to the result variable.
529 If the result is to be ignored, then the call is replaced by a
530 GIMPLE_NOP. A proper VDEF chain is retained by making the first
531 VUSE and the last VDEF of the whole sequence be the same as the replaced
532 statement and using new SSA names for stores in between. */
535 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
538 gimple
*stmt
, *new_stmt
;
539 gimple_stmt_iterator i
;
540 gimple_seq stmts
= NULL
;
542 stmt
= gsi_stmt (*si_p
);
544 gcc_assert (is_gimple_call (stmt
));
546 push_gimplify_context (gimple_in_ssa_p (cfun
));
548 lhs
= gimple_call_lhs (stmt
);
549 if (lhs
== NULL_TREE
)
551 gimplify_and_add (expr
, &stmts
);
552 /* We can end up with folding a memcpy of an empty class assignment
553 which gets optimized away by C++ gimplification. */
554 if (gimple_seq_empty_p (stmts
))
556 pop_gimplify_context (NULL
);
557 if (gimple_in_ssa_p (cfun
))
559 unlink_stmt_vdef (stmt
);
562 gsi_replace (si_p
, gimple_build_nop (), false);
568 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
569 new_stmt
= gimple_build_assign (lhs
, tmp
);
570 i
= gsi_last (stmts
);
571 gsi_insert_after_without_update (&i
, new_stmt
,
572 GSI_CONTINUE_LINKING
);
575 pop_gimplify_context (NULL
);
577 gsi_replace_with_seq_vops (si_p
, stmts
);
581 /* Replace the call at *GSI with the gimple value VAL. */
584 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
586 gimple
*stmt
= gsi_stmt (*gsi
);
587 tree lhs
= gimple_call_lhs (stmt
);
591 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
592 val
= fold_convert (TREE_TYPE (lhs
), val
);
593 repl
= gimple_build_assign (lhs
, val
);
596 repl
= gimple_build_nop ();
597 tree vdef
= gimple_vdef (stmt
);
598 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
600 unlink_stmt_vdef (stmt
);
601 release_ssa_name (vdef
);
603 gsi_replace (gsi
, repl
, false);
606 /* Replace the call at *GSI with the new call REPL and fold that
610 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
612 gimple
*stmt
= gsi_stmt (*gsi
);
613 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
614 gimple_set_location (repl
, gimple_location (stmt
));
615 if (gimple_vdef (stmt
)
616 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
618 gimple_set_vdef (repl
, gimple_vdef (stmt
));
619 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
621 if (gimple_vuse (stmt
))
622 gimple_set_vuse (repl
, gimple_vuse (stmt
));
623 gsi_replace (gsi
, repl
, false);
627 /* Return true if VAR is a VAR_DECL or a component thereof. */
630 var_decl_component_p (tree var
)
633 while (handled_component_p (inner
))
634 inner
= TREE_OPERAND (inner
, 0);
635 return SSA_VAR_P (inner
);
638 /* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
643 size_must_be_zero_p (tree size
)
645 if (integer_zerop (size
))
648 if (TREE_CODE (size
) != SSA_NAME
)
652 enum value_range_type rtype
= get_range_info (size
, &min
, &max
);
653 if (rtype
!= VR_ANTI_RANGE
)
656 tree type
= TREE_TYPE (size
);
657 int prec
= TYPE_PRECISION (type
);
659 wide_int wone
= wi::one (prec
);
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
665 return wi::eq_p (min
, wone
) && wi::geu_p (max
, ssize_max
);
668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
676 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
677 tree dest
, tree src
, int endp
)
679 gimple
*stmt
= gsi_stmt (*gsi
);
680 tree lhs
= gimple_call_lhs (stmt
);
681 tree len
= gimple_call_arg (stmt
, 2);
682 tree destvar
, srcvar
;
683 location_t loc
= gimple_location (stmt
);
685 bool nowarn
= gimple_no_warning_p (stmt
);
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len
))
692 if (gimple_call_lhs (stmt
))
693 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
695 repl
= gimple_build_nop ();
696 tree vdef
= gimple_vdef (stmt
);
697 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
699 unlink_stmt_vdef (stmt
);
700 release_ssa_name (vdef
);
702 gsi_replace (gsi
, repl
, false);
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src
, dest
, 0))
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
713 unlink_stmt_vdef (stmt
);
714 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
715 release_ssa_name (gimple_vdef (stmt
));
718 gsi_replace (gsi
, gimple_build_nop (), false);
725 tree srctype
, desttype
;
726 unsigned int src_align
, dest_align
;
729 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
730 pointers as wide integer) and also may result in huge function
731 size because of inlined bounds copy. Thus don't inline for
732 functions we want to instrument. */
733 if (flag_check_pointer_bounds
734 && chkp_instrumentable_p (cfun
->decl
)
735 /* Even if data may contain pointers we can inline if copy
736 less than a pointer size. */
737 && (!tree_fits_uhwi_p (len
)
738 || compare_tree_int (len
, POINTER_SIZE_UNITS
) >= 0))
741 /* Build accesses at offset zero with a ref-all character type. */
742 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
745 /* If we can perform the copy efficiently with first doing all loads
746 and then all stores inline it that way. Currently efficiently
747 means that we can load all the memory into a single integer
748 register which is what MOVE_MAX gives us. */
749 src_align
= get_pointer_alignment (src
);
750 dest_align
= get_pointer_alignment (dest
);
751 if (tree_fits_uhwi_p (len
)
752 && compare_tree_int (len
, MOVE_MAX
) <= 0
753 /* ??? Don't transform copies from strings with known length this
754 confuses the tree-ssa-strlen.c. This doesn't handle
755 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
757 && !c_strlen (src
, 2))
759 unsigned ilen
= tree_to_uhwi (len
);
760 if (pow2p_hwi (ilen
))
762 /* Detect invalid bounds and overlapping copies and issue
763 either -Warray-bounds or -Wrestrict. */
765 && check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
766 dest
, src
, len
, len
))
767 gimple_set_no_warning (stmt
, true);
769 scalar_int_mode mode
;
770 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
772 && is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
)
773 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
774 /* If the destination pointer is not aligned we must be able
775 to emit an unaligned store. */
776 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
777 || !targetm
.slow_unaligned_access (mode
, dest_align
)
778 || (optab_handler (movmisalign_optab
, mode
)
779 != CODE_FOR_nothing
)))
782 tree desttype
= type
;
783 if (src_align
< GET_MODE_ALIGNMENT (mode
))
784 srctype
= build_aligned_type (type
, src_align
);
785 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
786 tree tem
= fold_const_aggregate_ref (srcmem
);
789 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
790 && targetm
.slow_unaligned_access (mode
, src_align
)
791 && (optab_handler (movmisalign_optab
, mode
)
792 == CODE_FOR_nothing
))
797 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
799 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
801 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
803 gimple_assign_set_lhs (new_stmt
, srcmem
);
804 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
805 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
807 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
808 desttype
= build_aligned_type (type
, dest_align
);
810 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
813 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
814 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
815 if (gimple_vdef (new_stmt
)
816 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
817 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
820 gsi_replace (gsi
, new_stmt
, false);
823 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
832 /* Both DEST and SRC must be pointer types.
833 ??? This is what old code did. Is the testing for pointer types
836 If either SRC is readonly or length is 1, we can use memcpy. */
837 if (!dest_align
|| !src_align
)
839 if (readonly_data_expr (src
)
840 || (tree_fits_uhwi_p (len
)
841 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
842 >= tree_to_uhwi (len
))))
844 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
847 gimple_call_set_fndecl (stmt
, fn
);
848 gimple_call_set_arg (stmt
, 0, dest
);
849 gimple_call_set_arg (stmt
, 1, src
);
854 /* If *src and *dest can't overlap, optimize into memcpy as well. */
855 if (TREE_CODE (src
) == ADDR_EXPR
856 && TREE_CODE (dest
) == ADDR_EXPR
)
858 tree src_base
, dest_base
, fn
;
859 poly_int64 src_offset
= 0, dest_offset
= 0;
862 srcvar
= TREE_OPERAND (src
, 0);
863 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
864 if (src_base
== NULL
)
866 destvar
= TREE_OPERAND (dest
, 0);
867 dest_base
= get_addr_base_and_unit_offset (destvar
,
869 if (dest_base
== NULL
)
871 if (!poly_int_tree_p (len
, &maxsize
))
873 if (SSA_VAR_P (src_base
)
874 && SSA_VAR_P (dest_base
))
876 if (operand_equal_p (src_base
, dest_base
, 0)
877 && ranges_maybe_overlap_p (src_offset
, maxsize
,
878 dest_offset
, maxsize
))
881 else if (TREE_CODE (src_base
) == MEM_REF
882 && TREE_CODE (dest_base
) == MEM_REF
)
884 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
885 TREE_OPERAND (dest_base
, 0), 0))
887 poly_offset_int full_src_offset
888 = mem_ref_offset (src_base
) + src_offset
;
889 poly_offset_int full_dest_offset
890 = mem_ref_offset (dest_base
) + dest_offset
;
891 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
892 full_dest_offset
, maxsize
))
898 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
901 gimple_call_set_fndecl (stmt
, fn
);
902 gimple_call_set_arg (stmt
, 0, dest
);
903 gimple_call_set_arg (stmt
, 1, src
);
908 /* If the destination and source do not alias optimize into
910 if ((is_gimple_min_invariant (dest
)
911 || TREE_CODE (dest
) == SSA_NAME
)
912 && (is_gimple_min_invariant (src
)
913 || TREE_CODE (src
) == SSA_NAME
))
916 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
917 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
918 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
921 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
924 gimple_call_set_fndecl (stmt
, fn
);
925 gimple_call_set_arg (stmt
, 0, dest
);
926 gimple_call_set_arg (stmt
, 1, src
);
935 if (!tree_fits_shwi_p (len
))
937 if (!POINTER_TYPE_P (TREE_TYPE (src
))
938 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
940 /* In the following try to find a type that is most natural to be
941 used for the memcpy source and destination and that allows
942 the most optimization when memcpy is turned into a plain assignment
943 using that type. In theory we could always use a char[len] type
944 but that only gains us that the destination and source possibly
945 no longer will have their address taken. */
946 srctype
= TREE_TYPE (TREE_TYPE (src
));
947 if (TREE_CODE (srctype
) == ARRAY_TYPE
948 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
949 srctype
= TREE_TYPE (srctype
);
950 desttype
= TREE_TYPE (TREE_TYPE (dest
));
951 if (TREE_CODE (desttype
) == ARRAY_TYPE
952 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
953 desttype
= TREE_TYPE (desttype
);
954 if (TREE_ADDRESSABLE (srctype
)
955 || TREE_ADDRESSABLE (desttype
))
958 /* Make sure we are not copying using a floating-point mode or
959 a type whose size possibly does not match its precision. */
960 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
961 || TREE_CODE (desttype
) == BOOLEAN_TYPE
962 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
963 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
964 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
965 || TREE_CODE (srctype
) == BOOLEAN_TYPE
966 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
967 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
975 src_align
= get_pointer_alignment (src
);
976 dest_align
= get_pointer_alignment (dest
);
977 if (dest_align
< TYPE_ALIGN (desttype
)
978 || src_align
< TYPE_ALIGN (srctype
))
982 if (TREE_CODE (dest
) == ADDR_EXPR
983 && var_decl_component_p (TREE_OPERAND (dest
, 0))
984 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
985 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
988 if (TREE_CODE (src
) == ADDR_EXPR
989 && var_decl_component_p (TREE_OPERAND (src
, 0))
990 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
993 || src_align
>= TYPE_ALIGN (desttype
))
994 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
996 else if (!STRICT_ALIGNMENT
)
998 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1000 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1004 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1007 if (srcvar
== NULL_TREE
)
1009 if (src_align
>= TYPE_ALIGN (desttype
))
1010 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1013 if (STRICT_ALIGNMENT
)
1015 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1017 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1020 else if (destvar
== NULL_TREE
)
1022 if (dest_align
>= TYPE_ALIGN (srctype
))
1023 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1026 if (STRICT_ALIGNMENT
)
1028 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1030 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1034 /* Detect invalid bounds and overlapping copies and issue either
1035 -Warray-bounds or -Wrestrict. */
1037 check_bounds_or_overlap (as_a
<gcall
*>(stmt
), dest
, src
, len
, len
);
1040 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1042 tree tem
= fold_const_aggregate_ref (srcvar
);
1045 if (! is_gimple_min_invariant (srcvar
))
1047 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1048 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1050 gimple_assign_set_lhs (new_stmt
, srcvar
);
1051 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1052 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1054 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1055 goto set_vop_and_replace
;
1058 /* We get an aggregate copy. Use an unsigned char[] type to
1059 perform the copying to preserve padding and to avoid any issues
1060 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1061 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1062 tree_to_uhwi (len
));
1064 if (src_align
> TYPE_ALIGN (srctype
))
1065 srctype
= build_aligned_type (srctype
, src_align
);
1066 if (dest_align
> TYPE_ALIGN (desttype
))
1067 desttype
= build_aligned_type (desttype
, dest_align
);
1069 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
, dest
, off0
),
1070 fold_build2 (MEM_REF
, srctype
, src
, off0
));
1071 set_vop_and_replace
:
1072 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1073 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1074 if (gimple_vdef (new_stmt
)
1075 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1076 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1079 gsi_replace (gsi
, new_stmt
, false);
1082 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1086 gimple_seq stmts
= NULL
;
1087 if (endp
== 0 || endp
== 3)
1090 len
= gimple_build (&stmts
, loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1092 if (endp
== 2 || endp
== 1)
1094 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1095 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1096 TREE_TYPE (dest
), dest
, len
);
1099 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1100 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1101 gsi_replace (gsi
, repl
, false);
1105 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1106 to built-in memcmp (a, b, len). */
1109 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1111 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1116 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1118 gimple
*stmt
= gsi_stmt (*gsi
);
1119 tree a
= gimple_call_arg (stmt
, 0);
1120 tree b
= gimple_call_arg (stmt
, 1);
1121 tree len
= gimple_call_arg (stmt
, 2);
1123 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1124 replace_call_with_call_and_fold (gsi
, repl
);
1129 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1130 to built-in memmove (dest, src, len). */
1133 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1135 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1140 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1141 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1142 len) into memmove (dest, src, len). */
1144 gimple
*stmt
= gsi_stmt (*gsi
);
1145 tree src
= gimple_call_arg (stmt
, 0);
1146 tree dest
= gimple_call_arg (stmt
, 1);
1147 tree len
= gimple_call_arg (stmt
, 2);
1149 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1150 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1151 replace_call_with_call_and_fold (gsi
, repl
);
1156 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1157 to built-in memset (dest, 0, len). */
1160 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1162 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1167 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1169 gimple
*stmt
= gsi_stmt (*gsi
);
1170 tree dest
= gimple_call_arg (stmt
, 0);
1171 tree len
= gimple_call_arg (stmt
, 1);
1173 gimple_seq seq
= NULL
;
1174 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1175 gimple_seq_add_stmt_without_update (&seq
, repl
);
1176 gsi_replace_with_seq_vops (gsi
, seq
);
1182 /* Fold function call to builtin memset or bzero at *GSI setting the
1183 memory of size LEN to VAL. Return whether a simplification was made. */
1186 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1188 gimple
*stmt
= gsi_stmt (*gsi
);
1190 unsigned HOST_WIDE_INT length
, cval
;
1192 /* If the LEN parameter is zero, return DEST. */
1193 if (integer_zerop (len
))
1195 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1199 if (! tree_fits_uhwi_p (len
))
1202 if (TREE_CODE (c
) != INTEGER_CST
)
1205 tree dest
= gimple_call_arg (stmt
, 0);
1207 if (TREE_CODE (var
) != ADDR_EXPR
)
1210 var
= TREE_OPERAND (var
, 0);
1211 if (TREE_THIS_VOLATILE (var
))
1214 etype
= TREE_TYPE (var
);
1215 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1216 etype
= TREE_TYPE (etype
);
1218 if (!INTEGRAL_TYPE_P (etype
)
1219 && !POINTER_TYPE_P (etype
))
1222 if (! var_decl_component_p (var
))
1225 length
= tree_to_uhwi (len
);
1226 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1227 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1230 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1233 if (integer_zerop (c
))
1237 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1240 cval
= TREE_INT_CST_LOW (c
);
1244 cval
|= (cval
<< 31) << 1;
1247 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1248 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1249 gimple_set_vuse (store
, gimple_vuse (stmt
));
1250 tree vdef
= gimple_vdef (stmt
);
1251 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1253 gimple_set_vdef (store
, gimple_vdef (stmt
));
1254 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1256 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1257 if (gimple_call_lhs (stmt
))
1259 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1260 gsi_replace (gsi
, asgn
, false);
1264 gimple_stmt_iterator gsi2
= *gsi
;
1266 gsi_remove (&gsi2
, true);
1273 /* Obtain the minimum and maximum string length or minimum and maximum
1274 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1275 If ARG is an SSA name variable, follow its use-def chains. When
1276 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1277 if we are unable to determine the length or value, return false.
1278 VISITED is a bitmap of visited variables.
1279 TYPE is 0 if string length should be obtained, 1 for maximum string
1280 length and 2 for maximum value ARG can have.
1281 When FUZZY is non-zero and the length of a string cannot be determined,
1282 the function instead considers as the maximum possible length the
1283 size of a character array it may refer to. If FUZZY is 2, it will handle
1284 PHIs and COND_EXPRs optimistically, if we can determine string length
1285 minimum and maximum, it will use the minimum from the ones where it
1287 Set *FLEXP to true if the range of the string lengths has been
1288 obtained from the upper bound of an array at the end of a struct.
1289 Such an array may hold a string that's longer than its upper bound
1290 due to it being used as a poor-man's flexible array member. */
1293 get_range_strlen (tree arg
, tree length
[2], bitmap
*visited
, int type
,
1294 int fuzzy
, bool *flexp
)
1296 tree var
, val
= NULL_TREE
;
1299 /* The minimum and maximum length. */
1300 tree
*const minlen
= length
;
1301 tree
*const maxlen
= length
+ 1;
1303 if (TREE_CODE (arg
) != SSA_NAME
)
1305 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1306 if (TREE_CODE (arg
) == ADDR_EXPR
1307 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1309 tree op
= TREE_OPERAND (arg
, 0);
1310 if (integer_zerop (TREE_OPERAND (op
, 1)))
1312 tree aop0
= TREE_OPERAND (op
, 0);
1313 if (TREE_CODE (aop0
) == INDIRECT_REF
1314 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1315 return get_range_strlen (TREE_OPERAND (aop0
, 0),
1316 length
, visited
, type
, fuzzy
, flexp
);
1318 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
&& fuzzy
)
1320 /* Fail if an array is the last member of a struct object
1321 since it could be treated as a (fake) flexible array
1323 tree idx
= TREE_OPERAND (op
, 1);
1325 arg
= TREE_OPERAND (op
, 0);
1326 tree optype
= TREE_TYPE (arg
);
1327 if (tree dom
= TYPE_DOMAIN (optype
))
1328 if (tree bound
= TYPE_MAX_VALUE (dom
))
1329 if (TREE_CODE (bound
) == INTEGER_CST
1330 && TREE_CODE (idx
) == INTEGER_CST
1331 && tree_int_cst_lt (bound
, idx
))
1339 if (TREE_CODE (val
) != INTEGER_CST
1340 || tree_int_cst_sgn (val
) < 0)
1344 val
= c_strlen (arg
, 1);
1348 if (TREE_CODE (arg
) == ADDR_EXPR
)
1349 return get_range_strlen (TREE_OPERAND (arg
, 0), length
,
1350 visited
, type
, fuzzy
, flexp
);
1352 if (TREE_CODE (arg
) == ARRAY_REF
)
1354 tree type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1356 /* Determine the "innermost" array type. */
1357 while (TREE_CODE (type
) == ARRAY_TYPE
1358 && TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
1359 type
= TREE_TYPE (type
);
1361 /* Avoid arrays of pointers. */
1362 tree eltype
= TREE_TYPE (type
);
1363 if (TREE_CODE (type
) != ARRAY_TYPE
1364 || !INTEGRAL_TYPE_P (eltype
))
1367 val
= TYPE_SIZE_UNIT (type
);
1368 if (!val
|| integer_zerop (val
))
1371 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1373 /* Set the minimum size to zero since the string in
1374 the array could have zero length. */
1375 *minlen
= ssize_int (0);
1377 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == COMPONENT_REF
1378 && type
== TREE_TYPE (TREE_OPERAND (arg
, 0))
1379 && array_at_struct_end_p (TREE_OPERAND (arg
, 0)))
1382 else if (TREE_CODE (arg
) == COMPONENT_REF
1383 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1386 /* Use the type of the member array to determine the upper
1387 bound on the length of the array. This may be overly
1388 optimistic if the array itself isn't NUL-terminated and
1389 the caller relies on the subsequent member to contain
1390 the NUL but that would only be considered valid if
1391 the array were the last member of a struct.
1392 Set *FLEXP to true if the array whose bound is being
1393 used is at the end of a struct. */
1394 if (array_at_struct_end_p (arg
))
1397 arg
= TREE_OPERAND (arg
, 1);
1399 tree type
= TREE_TYPE (arg
);
1401 while (TREE_CODE (type
) == ARRAY_TYPE
1402 && TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
1403 type
= TREE_TYPE (type
);
1405 /* Fail when the array bound is unknown or zero. */
1406 val
= TYPE_SIZE_UNIT (type
);
1407 if (!val
|| integer_zerop (val
))
1409 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1411 /* Set the minimum size to zero since the string in
1412 the array could have zero length. */
1413 *minlen
= ssize_int (0);
1418 tree type
= TREE_TYPE (arg
);
1419 if (POINTER_TYPE_P (type
))
1420 type
= TREE_TYPE (type
);
1422 if (TREE_CODE (type
) == ARRAY_TYPE
)
1424 val
= TYPE_SIZE_UNIT (type
);
1426 || TREE_CODE (val
) != INTEGER_CST
1427 || integer_zerop (val
))
1429 val
= wide_int_to_tree (TREE_TYPE (val
),
1430 wi::sub (wi::to_wide (val
), 1));
1431 /* Set the minimum size to zero since the string in
1432 the array could have zero length. */
1433 *minlen
= ssize_int (0);
1443 && TREE_CODE (*minlen
) == INTEGER_CST
1444 && TREE_CODE (val
) == INTEGER_CST
1445 && tree_int_cst_lt (val
, *minlen
)))
1452 if (TREE_CODE (*maxlen
) != INTEGER_CST
1453 || TREE_CODE (val
) != INTEGER_CST
)
1456 if (tree_int_cst_lt (*maxlen
, val
))
1460 else if (simple_cst_equal (val
, *maxlen
) != 1)
1468 /* If ARG is registered for SSA update we cannot look at its defining
1470 if (name_registered_for_update_p (arg
))
1473 /* If we were already here, break the infinite cycle. */
1475 *visited
= BITMAP_ALLOC (NULL
);
1476 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1480 def_stmt
= SSA_NAME_DEF_STMT (var
);
1482 switch (gimple_code (def_stmt
))
1485 /* The RHS of the statement defining VAR must either have a
1486 constant length or come from another SSA_NAME with a constant
1488 if (gimple_assign_single_p (def_stmt
)
1489 || gimple_assign_unary_nop_p (def_stmt
))
1491 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1492 return get_range_strlen (rhs
, length
, visited
, type
, fuzzy
, flexp
);
1494 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1496 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1497 gimple_assign_rhs3 (def_stmt
) };
1499 for (unsigned int i
= 0; i
< 2; i
++)
1500 if (!get_range_strlen (ops
[i
], length
, visited
, type
, fuzzy
,
1504 *maxlen
= build_all_ones_cst (size_type_node
);
1513 /* All the arguments of the PHI node must have the same constant
1515 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1517 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1519 /* If this PHI has itself as an argument, we cannot
1520 determine the string length of this argument. However,
1521 if we can find a constant string length for the other
1522 PHI args then we can still be sure that this is a
1523 constant string length. So be optimistic and just
1524 continue with the next argument. */
1525 if (arg
== gimple_phi_result (def_stmt
))
1528 if (!get_range_strlen (arg
, length
, visited
, type
, fuzzy
, flexp
))
1531 *maxlen
= build_all_ones_cst (size_type_node
);
1543 /* Determine the minimum and maximum value or string length that ARG
1544 refers to and store each in the first two elements of MINMAXLEN.
1545 For expressions that point to strings of unknown lengths that are
1546 character arrays, use the upper bound of the array as the maximum
1547 length. For example, given an expression like 'x ? array : "xyz"'
1548 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1549 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1551 Return true if the range of the string lengths has been obtained
1552 from the upper bound of an array at the end of a struct. Such
1553 an array may hold a string that's longer than its upper bound
1554 due to it being used as a poor-man's flexible array member.
1556 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1557 and false if PHIs and COND_EXPRs are to be handled optimistically,
1558 if we can determine string length minimum and maximum; it will use
1559 the minimum from the ones where it can be determined.
1560 STRICT false should be only used for warning code. */
1563 get_range_strlen (tree arg
, tree minmaxlen
[2], bool strict
)
1565 bitmap visited
= NULL
;
1567 minmaxlen
[0] = NULL_TREE
;
1568 minmaxlen
[1] = NULL_TREE
;
1570 bool flexarray
= false;
1571 if (!get_range_strlen (arg
, minmaxlen
, &visited
, 1, strict
? 1 : 2,
1574 minmaxlen
[0] = NULL_TREE
;
1575 minmaxlen
[1] = NULL_TREE
;
1579 BITMAP_FREE (visited
);
1585 get_maxval_strlen (tree arg
, int type
)
1587 bitmap visited
= NULL
;
1588 tree len
[2] = { NULL_TREE
, NULL_TREE
};
1591 if (!get_range_strlen (arg
, len
, &visited
, type
, 0, &dummy
))
1594 BITMAP_FREE (visited
);
1600 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1601 If LEN is not NULL, it represents the length of the string to be
1602 copied. Return NULL_TREE if no simplification can be made. */
1605 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1606 tree dest
, tree src
)
1608 gimple
*stmt
= gsi_stmt (*gsi
);
1609 location_t loc
= gimple_location (stmt
);
1612 /* If SRC and DEST are the same (and not volatile), return DEST. */
1613 if (operand_equal_p (src
, dest
, 0))
1615 /* Issue -Wrestrict unless the pointers are null (those do
1616 not point to objects and so do not indicate an overlap;
1617 such calls could be the result of sanitization and jump
1619 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
1621 tree func
= gimple_call_fndecl (stmt
);
1623 warning_at (loc
, OPT_Wrestrict
,
1624 "%qD source argument is the same as destination",
1628 replace_call_with_value (gsi
, dest
);
1632 if (optimize_function_for_size_p (cfun
))
1635 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1639 tree len
= get_maxval_strlen (src
, 0);
1643 len
= fold_convert_loc (loc
, size_type_node
, len
);
1644 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1645 len
= force_gimple_operand_gsi (gsi
, len
, true,
1646 NULL_TREE
, true, GSI_SAME_STMT
);
1647 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1648 replace_call_with_call_and_fold (gsi
, repl
);
1652 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1653 If SLEN is not NULL, it represents the length of the source string.
1654 Return NULL_TREE if no simplification can be made. */
1657 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1658 tree dest
, tree src
, tree len
)
1660 gimple
*stmt
= gsi_stmt (*gsi
);
1661 location_t loc
= gimple_location (stmt
);
1662 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
1664 /* If the LEN parameter is zero, return DEST. */
1665 if (integer_zerop (len
))
1667 /* Avoid warning if the destination refers to a an array/pointer
1668 decorate with attribute nonstring. */
1671 tree fndecl
= gimple_call_fndecl (stmt
);
1672 gcall
*call
= as_a
<gcall
*> (stmt
);
1674 /* Warn about the lack of nul termination: the result is not
1675 a (nul-terminated) string. */
1676 tree slen
= get_maxval_strlen (src
, 0);
1677 if (slen
&& !integer_zerop (slen
))
1678 warning_at (loc
, OPT_Wstringop_truncation
,
1679 "%G%qD destination unchanged after copying no bytes "
1680 "from a string of length %E",
1681 call
, fndecl
, slen
);
1683 warning_at (loc
, OPT_Wstringop_truncation
,
1684 "%G%qD destination unchanged after copying no bytes",
1688 replace_call_with_value (gsi
, dest
);
1692 /* We can't compare slen with len as constants below if len is not a
1694 if (TREE_CODE (len
) != INTEGER_CST
)
1697 /* Now, we must be passed a constant src ptr parameter. */
1698 tree slen
= get_maxval_strlen (src
, 0);
1699 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1702 /* The size of the source string including the terminating nul. */
1703 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1705 /* We do not support simplification of this case, though we do
1706 support it when expanding trees into RTL. */
1707 /* FIXME: generate a call to __builtin_memset. */
1708 if (tree_int_cst_lt (ssize
, len
))
1711 /* Diagnose truncation that leaves the copy unterminated. */
1712 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
1714 /* OK transform into builtin memcpy. */
1715 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1719 len
= fold_convert_loc (loc
, size_type_node
, len
);
1720 len
= force_gimple_operand_gsi (gsi
, len
, true,
1721 NULL_TREE
, true, GSI_SAME_STMT
);
1722 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1723 replace_call_with_call_and_fold (gsi
, repl
);
1728 /* Fold function call to builtin strchr or strrchr.
1729 If both arguments are constant, evaluate and fold the result,
1730 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1731 In general strlen is significantly faster than strchr
1732 due to being a simpler operation. */
1734 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
1736 gimple
*stmt
= gsi_stmt (*gsi
);
1737 tree str
= gimple_call_arg (stmt
, 0);
1738 tree c
= gimple_call_arg (stmt
, 1);
1739 location_t loc
= gimple_location (stmt
);
1743 if (!gimple_call_lhs (stmt
))
1746 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
1748 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
1752 replace_call_with_value (gsi
, integer_zero_node
);
1756 tree len
= build_int_cst (size_type_node
, p1
- p
);
1757 gimple_seq stmts
= NULL
;
1758 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1759 POINTER_PLUS_EXPR
, str
, len
);
1760 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1761 gsi_replace_with_seq_vops (gsi
, stmts
);
1765 if (!integer_zerop (c
))
1768 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1769 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
1771 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1775 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
1776 replace_call_with_call_and_fold (gsi
, repl
);
1784 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1789 /* Create newstr = strlen (str). */
1790 gimple_seq stmts
= NULL
;
1791 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
1792 gimple_set_location (new_stmt
, loc
);
1793 len
= create_tmp_reg_or_ssa_name (size_type_node
);
1794 gimple_call_set_lhs (new_stmt
, len
);
1795 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1797 /* Create (str p+ strlen (str)). */
1798 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1799 POINTER_PLUS_EXPR
, str
, len
);
1800 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1801 gsi_replace_with_seq_vops (gsi
, stmts
);
1802 /* gsi now points at the assignment to the lhs, get a
1803 stmt iterator to the strlen.
1804 ??? We can't use gsi_for_stmt as that doesn't work when the
1805 CFG isn't built yet. */
1806 gimple_stmt_iterator gsi2
= *gsi
;
1812 /* Fold function call to builtin strstr.
1813 If both arguments are constant, evaluate and fold the result,
1814 additionally fold strstr (x, "") into x and strstr (x, "c")
1815 into strchr (x, 'c'). */
1817 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
1819 gimple
*stmt
= gsi_stmt (*gsi
);
1820 tree haystack
= gimple_call_arg (stmt
, 0);
1821 tree needle
= gimple_call_arg (stmt
, 1);
1824 if (!gimple_call_lhs (stmt
))
1827 q
= c_getstr (needle
);
1831 if ((p
= c_getstr (haystack
)))
1833 const char *r
= strstr (p
, q
);
1837 replace_call_with_value (gsi
, integer_zero_node
);
1841 tree len
= build_int_cst (size_type_node
, r
- p
);
1842 gimple_seq stmts
= NULL
;
1844 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
1846 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1847 gsi_replace_with_seq_vops (gsi
, stmts
);
1851 /* For strstr (x, "") return x. */
1854 replace_call_with_value (gsi
, haystack
);
1858 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1861 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1864 tree c
= build_int_cst (integer_type_node
, q
[0]);
1865 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
1866 replace_call_with_call_and_fold (gsi
, repl
);
1874 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1877 Return NULL_TREE if no simplification was possible, otherwise return the
1878 simplified form of the call as a tree.
1880 The simplified form may be a constant or other expression which
1881 computes the same value, but in a more efficient manner (including
1882 calls to other builtin functions).
1884 The call may contain arguments which need to be evaluated, but
1885 which are not useful to determine the result of the call. In
1886 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1887 COMPOUND_EXPR will be an argument which must be evaluated.
1888 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1889 COMPOUND_EXPR in the chain will contain the tree for the simplified
1890 form of the builtin function call. */
1893 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1895 gimple
*stmt
= gsi_stmt (*gsi
);
1896 location_t loc
= gimple_location (stmt
);
1898 const char *p
= c_getstr (src
);
1900 /* If the string length is zero, return the dst parameter. */
1901 if (p
&& *p
== '\0')
1903 replace_call_with_value (gsi
, dst
);
1907 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1910 /* See if we can store by pieces into (dst + strlen(dst)). */
1912 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1913 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1915 if (!strlen_fn
|| !memcpy_fn
)
1918 /* If the length of the source string isn't computable don't
1919 split strcat into strlen and memcpy. */
1920 tree len
= get_maxval_strlen (src
, 0);
1924 /* Create strlen (dst). */
1925 gimple_seq stmts
= NULL
, stmts2
;
1926 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
1927 gimple_set_location (repl
, loc
);
1928 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
1929 gimple_call_set_lhs (repl
, newdst
);
1930 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1932 /* Create (dst p+ strlen (dst)). */
1933 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1934 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1935 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1937 len
= fold_convert_loc (loc
, size_type_node
, len
);
1938 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1939 build_int_cst (size_type_node
, 1));
1940 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1941 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1943 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1944 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1945 if (gimple_call_lhs (stmt
))
1947 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1948 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1949 gsi_replace_with_seq_vops (gsi
, stmts
);
1950 /* gsi now points at the assignment to the lhs, get a
1951 stmt iterator to the memcpy call.
1952 ??? We can't use gsi_for_stmt as that doesn't work when the
1953 CFG isn't built yet. */
1954 gimple_stmt_iterator gsi2
= *gsi
;
1960 gsi_replace_with_seq_vops (gsi
, stmts
);
1966 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1967 are the arguments to the call. */
1970 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1972 gimple
*stmt
= gsi_stmt (*gsi
);
1973 tree dest
= gimple_call_arg (stmt
, 0);
1974 tree src
= gimple_call_arg (stmt
, 1);
1975 tree size
= gimple_call_arg (stmt
, 2);
1981 /* If the SRC parameter is "", return DEST. */
1982 if (p
&& *p
== '\0')
1984 replace_call_with_value (gsi
, dest
);
1988 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1991 /* If __builtin_strcat_chk is used, assume strcat is available. */
1992 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1996 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1997 replace_call_with_call_and_fold (gsi
, repl
);
2001 /* Simplify a call to the strncat builtin. */
2004 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2006 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2007 tree dst
= gimple_call_arg (stmt
, 0);
2008 tree src
= gimple_call_arg (stmt
, 1);
2009 tree len
= gimple_call_arg (stmt
, 2);
2011 const char *p
= c_getstr (src
);
2013 /* If the requested length is zero, or the src parameter string
2014 length is zero, return the dst parameter. */
2015 if (integer_zerop (len
) || (p
&& *p
== '\0'))
2017 replace_call_with_value (gsi
, dst
);
2021 if (TREE_CODE (len
) != INTEGER_CST
|| !p
)
2024 unsigned srclen
= strlen (p
);
2026 int cmpsrc
= compare_tree_int (len
, srclen
);
2028 /* Return early if the requested len is less than the string length.
2029 Warnings will be issued elsewhere later. */
2033 unsigned HOST_WIDE_INT dstsize
;
2035 bool nowarn
= gimple_no_warning_p (stmt
);
2037 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
))
2039 int cmpdst
= compare_tree_int (len
, dstsize
);
2043 tree fndecl
= gimple_call_fndecl (stmt
);
2045 /* Strncat copies (at most) LEN bytes and always appends
2046 the terminating NUL so the specified bound should never
2047 be equal to (or greater than) the size of the destination.
2048 If it is, the copy could overflow. */
2049 location_t loc
= gimple_location (stmt
);
2050 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2052 ? G_("%G%qD specified bound %E equals "
2054 : G_("%G%qD specified bound %E exceeds "
2055 "destination size %wu"),
2056 stmt
, fndecl
, len
, dstsize
);
2058 gimple_set_no_warning (stmt
, true);
2062 if (!nowarn
&& cmpsrc
== 0)
2064 tree fndecl
= gimple_call_fndecl (stmt
);
2066 /* To avoid certain truncation the specified bound should also
2067 not be equal to (or less than) the length of the source. */
2068 location_t loc
= gimple_location (stmt
);
2069 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2070 "%G%qD specified bound %E equals source length",
2072 gimple_set_no_warning (stmt
, true);
2075 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2077 /* If the replacement _DECL isn't initialized, don't do the
2082 /* Otherwise, emit a call to strcat. */
2083 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2084 replace_call_with_call_and_fold (gsi
, repl
);
2088 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2092 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2094 gimple
*stmt
= gsi_stmt (*gsi
);
2095 tree dest
= gimple_call_arg (stmt
, 0);
2096 tree src
= gimple_call_arg (stmt
, 1);
2097 tree len
= gimple_call_arg (stmt
, 2);
2098 tree size
= gimple_call_arg (stmt
, 3);
2103 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2104 if ((p
&& *p
== '\0')
2105 || integer_zerop (len
))
2107 replace_call_with_value (gsi
, dest
);
2111 if (! tree_fits_uhwi_p (size
))
2114 if (! integer_all_onesp (size
))
2116 tree src_len
= c_strlen (src
, 1);
2118 && tree_fits_uhwi_p (src_len
)
2119 && tree_fits_uhwi_p (len
)
2120 && ! tree_int_cst_lt (len
, src_len
))
2122 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2123 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2127 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2128 replace_call_with_call_and_fold (gsi
, repl
);
2134 /* If __builtin_strncat_chk is used, assume strncat is available. */
2135 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2139 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2140 replace_call_with_call_and_fold (gsi
, repl
);
2144 /* Build and append gimple statements to STMTS that would load a first
2145 character of a memory location identified by STR. LOC is location
2146 of the statement. */
2149 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2153 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2154 tree cst_uchar_ptr_node
2155 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2156 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2158 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2159 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2160 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2162 gimple_assign_set_lhs (stmt
, var
);
2163 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2168 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2169 FCODE is the name of the builtin. */
2172 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2174 gimple
*stmt
= gsi_stmt (*gsi
);
2175 tree callee
= gimple_call_fndecl (stmt
);
2176 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2178 tree type
= integer_type_node
;
2179 tree str1
= gimple_call_arg (stmt
, 0);
2180 tree str2
= gimple_call_arg (stmt
, 1);
2181 tree lhs
= gimple_call_lhs (stmt
);
2182 HOST_WIDE_INT length
= -1;
2184 /* Handle strncmp and strncasecmp functions. */
2185 if (gimple_call_num_args (stmt
) == 3)
2187 tree len
= gimple_call_arg (stmt
, 2);
2188 if (tree_fits_uhwi_p (len
))
2189 length
= tree_to_uhwi (len
);
2192 /* If the LEN parameter is zero, return zero. */
2195 replace_call_with_value (gsi
, integer_zero_node
);
2199 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2200 if (operand_equal_p (str1
, str2
, 0))
2202 replace_call_with_value (gsi
, integer_zero_node
);
2206 const char *p1
= c_getstr (str1
);
2207 const char *p2
= c_getstr (str2
);
2209 /* For known strings, return an immediate value. */
2213 bool known_result
= false;
2217 case BUILT_IN_STRCMP
:
2219 r
= strcmp (p1
, p2
);
2220 known_result
= true;
2223 case BUILT_IN_STRNCMP
:
2227 r
= strncmp (p1
, p2
, length
);
2228 known_result
= true;
2231 /* Only handleable situation is where the string are equal (result 0),
2232 which is already handled by operand_equal_p case. */
2233 case BUILT_IN_STRCASECMP
:
2235 case BUILT_IN_STRNCASECMP
:
2239 r
= strncmp (p1
, p2
, length
);
2241 known_result
= true;
2250 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2255 bool nonzero_length
= length
>= 1
2256 || fcode
== BUILT_IN_STRCMP
2257 || fcode
== BUILT_IN_STRCASECMP
;
2259 location_t loc
= gimple_location (stmt
);
2261 /* If the second arg is "", return *(const unsigned char*)arg1. */
2262 if (p2
&& *p2
== '\0' && nonzero_length
)
2264 gimple_seq stmts
= NULL
;
2265 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2268 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2269 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2272 gsi_replace_with_seq_vops (gsi
, stmts
);
2276 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2277 if (p1
&& *p1
== '\0' && nonzero_length
)
2279 gimple_seq stmts
= NULL
;
2280 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2284 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2285 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2286 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2288 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2289 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2292 gsi_replace_with_seq_vops (gsi
, stmts
);
2296 /* If len parameter is one, return an expression corresponding to
2297 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2298 if (fcode
== BUILT_IN_STRNCMP
&& length
== 1)
2300 gimple_seq stmts
= NULL
;
2301 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2302 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2306 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2307 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2308 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2310 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2311 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2312 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2314 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2315 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2318 gsi_replace_with_seq_vops (gsi
, stmts
);
2322 /* If length is larger than the length of one constant string,
2323 replace strncmp with corresponding strcmp */
2324 if (fcode
== BUILT_IN_STRNCMP
2326 && ((p2
&& (size_t) length
> strlen (p2
))
2327 || (p1
&& (size_t) length
> strlen (p1
))))
2329 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2332 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2333 replace_call_with_call_and_fold (gsi
, repl
);
2340 /* Fold a call to the memchr pointed by GSI iterator. */
2343 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2345 gimple
*stmt
= gsi_stmt (*gsi
);
2346 tree lhs
= gimple_call_lhs (stmt
);
2347 tree arg1
= gimple_call_arg (stmt
, 0);
2348 tree arg2
= gimple_call_arg (stmt
, 1);
2349 tree len
= gimple_call_arg (stmt
, 2);
2351 /* If the LEN parameter is zero, return zero. */
2352 if (integer_zerop (len
))
2354 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2359 if (TREE_CODE (arg2
) != INTEGER_CST
2360 || !tree_fits_uhwi_p (len
)
2361 || !target_char_cst_p (arg2
, &c
))
2364 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2365 unsigned HOST_WIDE_INT string_length
;
2366 const char *p1
= c_getstr (arg1
, &string_length
);
2370 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2373 if (length
<= string_length
)
2375 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2381 unsigned HOST_WIDE_INT offset
= r
- p1
;
2382 gimple_seq stmts
= NULL
;
2383 if (lhs
!= NULL_TREE
)
2385 tree offset_cst
= build_int_cst (TREE_TYPE (len
), offset
);
2386 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2388 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2391 gimple_seq_add_stmt_without_update (&stmts
,
2392 gimple_build_nop ());
2394 gsi_replace_with_seq_vops (gsi
, stmts
);
2402 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2403 to the call. IGNORE is true if the value returned
2404 by the builtin will be ignored. UNLOCKED is true is true if this
2405 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2406 the known length of the string. Return NULL_TREE if no simplification
2410 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2411 tree arg0
, tree arg1
,
2414 gimple
*stmt
= gsi_stmt (*gsi
);
2416 /* If we're using an unlocked function, assume the other unlocked
2417 functions exist explicitly. */
2418 tree
const fn_fputc
= (unlocked
2419 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2420 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2421 tree
const fn_fwrite
= (unlocked
2422 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2423 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2425 /* If the return value is used, don't do the transformation. */
2426 if (gimple_call_lhs (stmt
))
2429 /* Get the length of the string passed to fputs. If the length
2430 can't be determined, punt. */
2431 tree len
= get_maxval_strlen (arg0
, 0);
2433 || TREE_CODE (len
) != INTEGER_CST
)
2436 switch (compare_tree_int (len
, 1))
2438 case -1: /* length is 0, delete the call entirely . */
2439 replace_call_with_value (gsi
, integer_zero_node
);
2442 case 0: /* length is 1, call fputc. */
2444 const char *p
= c_getstr (arg0
);
2450 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2452 (integer_type_node
, p
[0]), arg1
);
2453 replace_call_with_call_and_fold (gsi
, repl
);
2458 case 1: /* length is greater than 1, call fwrite. */
2460 /* If optimizing for size keep fputs. */
2461 if (optimize_function_for_size_p (cfun
))
2463 /* New argument list transforming fputs(string, stream) to
2464 fwrite(string, 1, len, stream). */
2468 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2469 size_one_node
, len
, arg1
);
2470 replace_call_with_call_and_fold (gsi
, repl
);
2479 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2480 DEST, SRC, LEN, and SIZE are the arguments to the call.
2481 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2482 code of the builtin. If MAXLEN is not NULL, it is maximum length
2483 passed as third argument. */
2486 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2487 tree dest
, tree src
, tree len
, tree size
,
2488 enum built_in_function fcode
)
2490 gimple
*stmt
= gsi_stmt (*gsi
);
2491 location_t loc
= gimple_location (stmt
);
2492 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2495 /* If SRC and DEST are the same (and not volatile), return DEST
2496 (resp. DEST+LEN for __mempcpy_chk). */
2497 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
2499 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
2501 replace_call_with_value (gsi
, dest
);
2506 gimple_seq stmts
= NULL
;
2507 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
2508 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
2509 TREE_TYPE (dest
), dest
, len
);
2510 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2511 replace_call_with_value (gsi
, temp
);
2516 if (! tree_fits_uhwi_p (size
))
2519 tree maxlen
= get_maxval_strlen (len
, 2);
2520 if (! integer_all_onesp (size
))
2522 if (! tree_fits_uhwi_p (len
))
2524 /* If LEN is not constant, try MAXLEN too.
2525 For MAXLEN only allow optimizing into non-_ocs function
2526 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2527 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2529 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
2531 /* (void) __mempcpy_chk () can be optimized into
2532 (void) __memcpy_chk (). */
2533 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2537 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2538 replace_call_with_call_and_fold (gsi
, repl
);
2547 if (tree_int_cst_lt (size
, maxlen
))
2552 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2553 mem{cpy,pcpy,move,set} is available. */
2556 case BUILT_IN_MEMCPY_CHK
:
2557 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
2559 case BUILT_IN_MEMPCPY_CHK
:
2560 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
2562 case BUILT_IN_MEMMOVE_CHK
:
2563 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
2565 case BUILT_IN_MEMSET_CHK
:
2566 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
2575 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2576 replace_call_with_call_and_fold (gsi
, repl
);
2580 /* Fold a call to the __st[rp]cpy_chk builtin.
2581 DEST, SRC, and SIZE are the arguments to the call.
2582 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2583 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2584 strings passed as second argument. */
2587 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
2589 tree src
, tree size
,
2590 enum built_in_function fcode
)
2592 gimple
*stmt
= gsi_stmt (*gsi
);
2593 location_t loc
= gimple_location (stmt
);
2594 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2597 /* If SRC and DEST are the same (and not volatile), return DEST. */
2598 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
2600 /* Issue -Wrestrict unless the pointers are null (those do
2601 not point to objects and so do not indicate an overlap;
2602 such calls could be the result of sanitization and jump
2604 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
2606 tree func
= gimple_call_fndecl (stmt
);
2608 warning_at (loc
, OPT_Wrestrict
,
2609 "%qD source argument is the same as destination",
2613 replace_call_with_value (gsi
, dest
);
2617 if (! tree_fits_uhwi_p (size
))
2620 tree maxlen
= get_maxval_strlen (src
, 1);
2621 if (! integer_all_onesp (size
))
2623 len
= c_strlen (src
, 1);
2624 if (! len
|| ! tree_fits_uhwi_p (len
))
2626 /* If LEN is not constant, try MAXLEN too.
2627 For MAXLEN only allow optimizing into non-_ocs function
2628 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2629 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2631 if (fcode
== BUILT_IN_STPCPY_CHK
)
2636 /* If return value of __stpcpy_chk is ignored,
2637 optimize into __strcpy_chk. */
2638 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
2642 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2643 replace_call_with_call_and_fold (gsi
, repl
);
2647 if (! len
|| TREE_SIDE_EFFECTS (len
))
2650 /* If c_strlen returned something, but not a constant,
2651 transform __strcpy_chk into __memcpy_chk. */
2652 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2656 gimple_seq stmts
= NULL
;
2657 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2658 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
2659 build_int_cst (size_type_node
, 1));
2660 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2661 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2662 replace_call_with_call_and_fold (gsi
, repl
);
2669 if (! tree_int_cst_lt (maxlen
, size
))
2673 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2674 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2675 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2679 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2680 replace_call_with_call_and_fold (gsi
, repl
);
2684 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2685 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2686 length passed as third argument. IGNORE is true if return value can be
2687 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2690 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2691 tree dest
, tree src
,
2692 tree len
, tree size
,
2693 enum built_in_function fcode
)
2695 gimple
*stmt
= gsi_stmt (*gsi
);
2696 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2699 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2701 /* If return value of __stpncpy_chk is ignored,
2702 optimize into __strncpy_chk. */
2703 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2706 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2707 replace_call_with_call_and_fold (gsi
, repl
);
2712 if (! tree_fits_uhwi_p (size
))
2715 tree maxlen
= get_maxval_strlen (len
, 2);
2716 if (! integer_all_onesp (size
))
2718 if (! tree_fits_uhwi_p (len
))
2720 /* If LEN is not constant, try MAXLEN too.
2721 For MAXLEN only allow optimizing into non-_ocs function
2722 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2723 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2729 if (tree_int_cst_lt (size
, maxlen
))
2733 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2734 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2735 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2739 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2740 replace_call_with_call_and_fold (gsi
, repl
);
2744 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2745 Return NULL_TREE if no simplification can be made. */
2748 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2750 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2751 location_t loc
= gimple_location (stmt
);
2752 tree dest
= gimple_call_arg (stmt
, 0);
2753 tree src
= gimple_call_arg (stmt
, 1);
2754 tree fn
, len
, lenp1
;
2756 /* If the result is unused, replace stpcpy with strcpy. */
2757 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2759 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2762 gimple_call_set_fndecl (stmt
, fn
);
2767 len
= c_strlen (src
, 1);
2769 || TREE_CODE (len
) != INTEGER_CST
)
2772 if (optimize_function_for_size_p (cfun
)
2773 /* If length is zero it's small enough. */
2774 && !integer_zerop (len
))
2777 /* If the source has a known length replace stpcpy with memcpy. */
2778 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2782 gimple_seq stmts
= NULL
;
2783 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2784 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2785 tem
, build_int_cst (size_type_node
, 1));
2786 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2787 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2788 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2789 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2790 if (gimple_vdef (repl
)
2791 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2792 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2793 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2794 /* Replace the result with dest + len. */
2796 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2797 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2798 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2799 POINTER_PLUS_EXPR
, dest
, tem
);
2800 gsi_replace (gsi
, ret
, false);
2801 /* Finally fold the memcpy call. */
2802 gimple_stmt_iterator gsi2
= *gsi
;
2808 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2809 NULL_TREE if a normal call should be emitted rather than expanding
2810 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2811 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2812 passed as second argument. */
2815 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2816 enum built_in_function fcode
)
2818 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2819 tree dest
, size
, len
, fn
, fmt
, flag
;
2820 const char *fmt_str
;
2822 /* Verify the required arguments in the original call. */
2823 if (gimple_call_num_args (stmt
) < 5)
2826 dest
= gimple_call_arg (stmt
, 0);
2827 len
= gimple_call_arg (stmt
, 1);
2828 flag
= gimple_call_arg (stmt
, 2);
2829 size
= gimple_call_arg (stmt
, 3);
2830 fmt
= gimple_call_arg (stmt
, 4);
2832 if (! tree_fits_uhwi_p (size
))
2835 if (! integer_all_onesp (size
))
2837 tree maxlen
= get_maxval_strlen (len
, 2);
2838 if (! tree_fits_uhwi_p (len
))
2840 /* If LEN is not constant, try MAXLEN too.
2841 For MAXLEN only allow optimizing into non-_ocs function
2842 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2843 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2849 if (tree_int_cst_lt (size
, maxlen
))
2853 if (!init_target_chars ())
2856 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2857 or if format doesn't contain % chars or is "%s". */
2858 if (! integer_zerop (flag
))
2860 fmt_str
= c_getstr (fmt
);
2861 if (fmt_str
== NULL
)
2863 if (strchr (fmt_str
, target_percent
) != NULL
2864 && strcmp (fmt_str
, target_percent_s
))
2868 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2870 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2871 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2875 /* Replace the called function and the first 5 argument by 3 retaining
2876 trailing varargs. */
2877 gimple_call_set_fndecl (stmt
, fn
);
2878 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2879 gimple_call_set_arg (stmt
, 0, dest
);
2880 gimple_call_set_arg (stmt
, 1, len
);
2881 gimple_call_set_arg (stmt
, 2, fmt
);
2882 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2883 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2884 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2889 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2890 Return NULL_TREE if a normal call should be emitted rather than
2891 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2892 or BUILT_IN_VSPRINTF_CHK. */
2895 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2896 enum built_in_function fcode
)
2898 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2899 tree dest
, size
, len
, fn
, fmt
, flag
;
2900 const char *fmt_str
;
2901 unsigned nargs
= gimple_call_num_args (stmt
);
2903 /* Verify the required arguments in the original call. */
2906 dest
= gimple_call_arg (stmt
, 0);
2907 flag
= gimple_call_arg (stmt
, 1);
2908 size
= gimple_call_arg (stmt
, 2);
2909 fmt
= gimple_call_arg (stmt
, 3);
2911 if (! tree_fits_uhwi_p (size
))
2916 if (!init_target_chars ())
2919 /* Check whether the format is a literal string constant. */
2920 fmt_str
= c_getstr (fmt
);
2921 if (fmt_str
!= NULL
)
2923 /* If the format doesn't contain % args or %%, we know the size. */
2924 if (strchr (fmt_str
, target_percent
) == 0)
2926 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2927 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2929 /* If the format is "%s" and first ... argument is a string literal,
2930 we know the size too. */
2931 else if (fcode
== BUILT_IN_SPRINTF_CHK
2932 && strcmp (fmt_str
, target_percent_s
) == 0)
2938 arg
= gimple_call_arg (stmt
, 4);
2939 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2941 len
= c_strlen (arg
, 1);
2942 if (! len
|| ! tree_fits_uhwi_p (len
))
2949 if (! integer_all_onesp (size
))
2951 if (! len
|| ! tree_int_cst_lt (len
, size
))
2955 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2956 or if format doesn't contain % chars or is "%s". */
2957 if (! integer_zerop (flag
))
2959 if (fmt_str
== NULL
)
2961 if (strchr (fmt_str
, target_percent
) != NULL
2962 && strcmp (fmt_str
, target_percent_s
))
2966 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2967 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2968 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2972 /* Replace the called function and the first 4 argument by 2 retaining
2973 trailing varargs. */
2974 gimple_call_set_fndecl (stmt
, fn
);
2975 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2976 gimple_call_set_arg (stmt
, 0, dest
);
2977 gimple_call_set_arg (stmt
, 1, fmt
);
2978 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2979 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2980 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2985 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2986 ORIG may be null if this is a 2-argument call. We don't attempt to
2987 simplify calls with more than 3 arguments.
2989 Return true if simplification was possible, otherwise false. */
2992 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2994 gimple
*stmt
= gsi_stmt (*gsi
);
2995 tree dest
= gimple_call_arg (stmt
, 0);
2996 tree fmt
= gimple_call_arg (stmt
, 1);
2997 tree orig
= NULL_TREE
;
2998 const char *fmt_str
= NULL
;
3000 /* Verify the required arguments in the original call. We deal with two
3001 types of sprintf() calls: 'sprintf (str, fmt)' and
3002 'sprintf (dest, "%s", orig)'. */
3003 if (gimple_call_num_args (stmt
) > 3)
3006 if (gimple_call_num_args (stmt
) == 3)
3007 orig
= gimple_call_arg (stmt
, 2);
3009 /* Check whether the format is a literal string constant. */
3010 fmt_str
= c_getstr (fmt
);
3011 if (fmt_str
== NULL
)
3014 if (!init_target_chars ())
3017 /* If the format doesn't contain % args or %%, use strcpy. */
3018 if (strchr (fmt_str
, target_percent
) == NULL
)
3020 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3025 /* Don't optimize sprintf (buf, "abc", ptr++). */
3029 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3030 'format' is known to contain no % formats. */
3031 gimple_seq stmts
= NULL
;
3032 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3033 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3034 if (gimple_call_lhs (stmt
))
3036 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
3037 build_int_cst (integer_type_node
,
3039 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3040 gsi_replace_with_seq_vops (gsi
, stmts
);
3041 /* gsi now points at the assignment to the lhs, get a
3042 stmt iterator to the memcpy call.
3043 ??? We can't use gsi_for_stmt as that doesn't work when the
3044 CFG isn't built yet. */
3045 gimple_stmt_iterator gsi2
= *gsi
;
3051 gsi_replace_with_seq_vops (gsi
, stmts
);
3057 /* If the format is "%s", use strcpy if the result isn't used. */
3058 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3061 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3066 /* Don't crash on sprintf (str1, "%s"). */
3070 tree orig_len
= NULL_TREE
;
3071 if (gimple_call_lhs (stmt
))
3073 orig_len
= get_maxval_strlen (orig
, 0);
3078 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3079 gimple_seq stmts
= NULL
;
3080 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3081 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3082 if (gimple_call_lhs (stmt
))
3084 if (!useless_type_conversion_p (integer_type_node
,
3085 TREE_TYPE (orig_len
)))
3086 orig_len
= fold_convert (integer_type_node
, orig_len
);
3087 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
3088 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3089 gsi_replace_with_seq_vops (gsi
, stmts
);
3090 /* gsi now points at the assignment to the lhs, get a
3091 stmt iterator to the memcpy call.
3092 ??? We can't use gsi_for_stmt as that doesn't work when the
3093 CFG isn't built yet. */
3094 gimple_stmt_iterator gsi2
= *gsi
;
3100 gsi_replace_with_seq_vops (gsi
, stmts
);
3108 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3109 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3110 attempt to simplify calls with more than 4 arguments.
3112 Return true if simplification was possible, otherwise false. */
3115 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3117 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3118 tree dest
= gimple_call_arg (stmt
, 0);
3119 tree destsize
= gimple_call_arg (stmt
, 1);
3120 tree fmt
= gimple_call_arg (stmt
, 2);
3121 tree orig
= NULL_TREE
;
3122 const char *fmt_str
= NULL
;
3124 if (gimple_call_num_args (stmt
) > 4)
3127 if (gimple_call_num_args (stmt
) == 4)
3128 orig
= gimple_call_arg (stmt
, 3);
3130 if (!tree_fits_uhwi_p (destsize
))
3132 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
3134 /* Check whether the format is a literal string constant. */
3135 fmt_str
= c_getstr (fmt
);
3136 if (fmt_str
== NULL
)
3139 if (!init_target_chars ())
3142 /* If the format doesn't contain % args or %%, use strcpy. */
3143 if (strchr (fmt_str
, target_percent
) == NULL
)
3145 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3149 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3153 /* We could expand this as
3154 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3156 memcpy (str, fmt_with_nul_at_cstm1, cst);
3157 but in the former case that might increase code size
3158 and in the latter case grow .rodata section too much.
3160 size_t len
= strlen (fmt_str
);
3164 gimple_seq stmts
= NULL
;
3165 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3166 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3167 if (gimple_call_lhs (stmt
))
3169 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
3170 build_int_cst (integer_type_node
, len
));
3171 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3172 gsi_replace_with_seq_vops (gsi
, stmts
);
3173 /* gsi now points at the assignment to the lhs, get a
3174 stmt iterator to the memcpy call.
3175 ??? We can't use gsi_for_stmt as that doesn't work when the
3176 CFG isn't built yet. */
3177 gimple_stmt_iterator gsi2
= *gsi
;
3183 gsi_replace_with_seq_vops (gsi
, stmts
);
3189 /* If the format is "%s", use strcpy if the result isn't used. */
3190 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3192 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3196 /* Don't crash on snprintf (str1, cst, "%s"). */
3200 tree orig_len
= get_maxval_strlen (orig
, 0);
3201 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
3204 /* We could expand this as
3205 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3207 memcpy (str1, str2_with_nul_at_cstm1, cst);
3208 but in the former case that might increase code size
3209 and in the latter case grow .rodata section too much.
3211 if (compare_tree_int (orig_len
, destlen
) >= 0)
3214 /* Convert snprintf (str1, cst, "%s", str2) into
3215 strcpy (str1, str2) if strlen (str2) < cst. */
3216 gimple_seq stmts
= NULL
;
3217 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3218 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3219 if (gimple_call_lhs (stmt
))
3221 if (!useless_type_conversion_p (integer_type_node
,
3222 TREE_TYPE (orig_len
)))
3223 orig_len
= fold_convert (integer_type_node
, orig_len
);
3224 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
3225 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3226 gsi_replace_with_seq_vops (gsi
, stmts
);
3227 /* gsi now points at the assignment to the lhs, get a
3228 stmt iterator to the memcpy call.
3229 ??? We can't use gsi_for_stmt as that doesn't work when the
3230 CFG isn't built yet. */
3231 gimple_stmt_iterator gsi2
= *gsi
;
3237 gsi_replace_with_seq_vops (gsi
, stmts
);
3245 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3246 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3247 more than 3 arguments, and ARG may be null in the 2-argument case.
3249 Return NULL_TREE if no simplification was possible, otherwise return the
3250 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3251 code of the function to be simplified. */
3254 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3255 tree fp
, tree fmt
, tree arg
,
3256 enum built_in_function fcode
)
3258 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3259 tree fn_fputc
, fn_fputs
;
3260 const char *fmt_str
= NULL
;
3262 /* If the return value is used, don't do the transformation. */
3263 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3266 /* Check whether the format is a literal string constant. */
3267 fmt_str
= c_getstr (fmt
);
3268 if (fmt_str
== NULL
)
3271 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3273 /* If we're using an unlocked function, assume the other
3274 unlocked functions exist explicitly. */
3275 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3276 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3280 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3281 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3284 if (!init_target_chars ())
3287 /* If the format doesn't contain % args or %%, use strcpy. */
3288 if (strchr (fmt_str
, target_percent
) == NULL
)
3290 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3294 /* If the format specifier was "", fprintf does nothing. */
3295 if (fmt_str
[0] == '\0')
3297 replace_call_with_value (gsi
, NULL_TREE
);
3301 /* When "string" doesn't contain %, replace all cases of
3302 fprintf (fp, string) with fputs (string, fp). The fputs
3303 builtin will take care of special cases like length == 1. */
3306 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3307 replace_call_with_call_and_fold (gsi
, repl
);
3312 /* The other optimizations can be done only on the non-va_list variants. */
3313 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3316 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3317 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3319 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3323 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3324 replace_call_with_call_and_fold (gsi
, repl
);
3329 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3330 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3333 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3337 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3338 replace_call_with_call_and_fold (gsi
, repl
);
3346 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3347 FMT and ARG are the arguments to the call; we don't fold cases with
3348 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3350 Return NULL_TREE if no simplification was possible, otherwise return the
3351 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3352 code of the function to be simplified. */
3355 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3356 tree arg
, enum built_in_function fcode
)
3358 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3359 tree fn_putchar
, fn_puts
, newarg
;
3360 const char *fmt_str
= NULL
;
3362 /* If the return value is used, don't do the transformation. */
3363 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3366 /* Check whether the format is a literal string constant. */
3367 fmt_str
= c_getstr (fmt
);
3368 if (fmt_str
== NULL
)
3371 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3373 /* If we're using an unlocked function, assume the other
3374 unlocked functions exist explicitly. */
3375 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3376 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3380 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3381 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3384 if (!init_target_chars ())
3387 if (strcmp (fmt_str
, target_percent_s
) == 0
3388 || strchr (fmt_str
, target_percent
) == NULL
)
3392 if (strcmp (fmt_str
, target_percent_s
) == 0)
3394 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3397 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3400 str
= c_getstr (arg
);
3406 /* The format specifier doesn't contain any '%' characters. */
3407 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3413 /* If the string was "", printf does nothing. */
3416 replace_call_with_value (gsi
, NULL_TREE
);
3420 /* If the string has length of 1, call putchar. */
3423 /* Given printf("c"), (where c is any one character,)
3424 convert "c"[0] to an int and pass that to the replacement
3426 newarg
= build_int_cst (integer_type_node
, str
[0]);
3429 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3430 replace_call_with_call_and_fold (gsi
, repl
);
3436 /* If the string was "string\n", call puts("string"). */
3437 size_t len
= strlen (str
);
3438 if ((unsigned char)str
[len
- 1] == target_newline
3439 && (size_t) (int) len
== len
3443 tree offset_node
, string_cst
;
3445 /* Create a NUL-terminated string that's one char shorter
3446 than the original, stripping off the trailing '\n'. */
3447 newarg
= build_string_literal (len
, str
);
3448 string_cst
= string_constant (newarg
, &offset_node
);
3449 gcc_checking_assert (string_cst
3450 && (TREE_STRING_LENGTH (string_cst
)
3452 && integer_zerop (offset_node
)
3454 TREE_STRING_POINTER (string_cst
)[len
- 1]
3456 /* build_string_literal creates a new STRING_CST,
3457 modify it in place to avoid double copying. */
3458 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
3459 newstr
[len
- 1] = '\0';
3462 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3463 replace_call_with_call_and_fold (gsi
, repl
);
3468 /* We'd like to arrange to call fputs(string,stdout) here,
3469 but we need stdout and don't have a way to get it yet. */
3474 /* The other optimizations can be done only on the non-va_list variants. */
3475 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3478 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3479 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3481 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3485 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3486 replace_call_with_call_and_fold (gsi
, repl
);
3491 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3492 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3494 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3499 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3500 replace_call_with_call_and_fold (gsi
, repl
);
3510 /* Fold a call to __builtin_strlen with known length LEN. */
3513 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3515 gimple
*stmt
= gsi_stmt (*gsi
);
3521 if (!get_range_strlen (gimple_call_arg (stmt
, 0), lenrange
, true)
3522 && lenrange
[0] && TREE_CODE (lenrange
[0]) == INTEGER_CST
3523 && lenrange
[1] && TREE_CODE (lenrange
[1]) == INTEGER_CST
)
3525 /* The range of lengths refers to either a single constant
3526 string or to the longest and shortest constant string
3527 referenced by the argument of the strlen() call, or to
3528 the strings that can possibly be stored in the arrays
3529 the argument refers to. */
3530 minlen
= wi::to_wide (lenrange
[0]);
3531 maxlen
= wi::to_wide (lenrange
[1]);
3535 unsigned prec
= TYPE_PRECISION (sizetype
);
3537 minlen
= wi::shwi (0, prec
);
3538 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
3541 if (minlen
== maxlen
)
3543 lenrange
[0] = force_gimple_operand_gsi (gsi
, lenrange
[0], true, NULL
,
3544 true, GSI_SAME_STMT
);
3545 replace_call_with_value (gsi
, lenrange
[0]);
3549 tree lhs
= gimple_call_lhs (stmt
);
3550 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3551 set_range_info (lhs
, VR_RANGE
, minlen
, maxlen
);
3556 /* Fold a call to __builtin_acc_on_device. */
3559 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
3561 /* Defer folding until we know which compiler we're in. */
3562 if (symtab
->state
!= EXPANSION
)
3565 unsigned val_host
= GOMP_DEVICE_HOST
;
3566 unsigned val_dev
= GOMP_DEVICE_NONE
;
3568 #ifdef ACCEL_COMPILER
3569 val_host
= GOMP_DEVICE_NOT_HOST
;
3570 val_dev
= ACCEL_COMPILER_acc_device
;
3573 location_t loc
= gimple_location (gsi_stmt (*gsi
));
3575 tree host_eq
= make_ssa_name (boolean_type_node
);
3576 gimple
*host_ass
= gimple_build_assign
3577 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
3578 gimple_set_location (host_ass
, loc
);
3579 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
3581 tree dev_eq
= make_ssa_name (boolean_type_node
);
3582 gimple
*dev_ass
= gimple_build_assign
3583 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
3584 gimple_set_location (dev_ass
, loc
);
3585 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
3587 tree result
= make_ssa_name (boolean_type_node
);
3588 gimple
*result_ass
= gimple_build_assign
3589 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
3590 gimple_set_location (result_ass
, loc
);
3591 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
3593 replace_call_with_value (gsi
, result
);
3598 /* Fold realloc (0, n) -> malloc (n). */
3601 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
3603 gimple
*stmt
= gsi_stmt (*gsi
);
3604 tree arg
= gimple_call_arg (stmt
, 0);
3605 tree size
= gimple_call_arg (stmt
, 1);
3607 if (operand_equal_p (arg
, null_pointer_node
, 0))
3609 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
3612 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
3613 replace_call_with_call_and_fold (gsi
, repl
);
3620 /* Fold the non-target builtin at *GSI and return whether any simplification
3624 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
3626 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
3627 tree callee
= gimple_call_fndecl (stmt
);
3629 /* Give up for always_inline inline builtins until they are
3631 if (avoid_folding_inline_builtin (callee
))
3634 unsigned n
= gimple_call_num_args (stmt
);
3635 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
3639 return gimple_fold_builtin_bcmp (gsi
);
3640 case BUILT_IN_BCOPY
:
3641 return gimple_fold_builtin_bcopy (gsi
);
3642 case BUILT_IN_BZERO
:
3643 return gimple_fold_builtin_bzero (gsi
);
3645 case BUILT_IN_MEMSET
:
3646 return gimple_fold_builtin_memset (gsi
,
3647 gimple_call_arg (stmt
, 1),
3648 gimple_call_arg (stmt
, 2));
3649 case BUILT_IN_MEMCPY
:
3650 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3651 gimple_call_arg (stmt
, 1), 0);
3652 case BUILT_IN_MEMPCPY
:
3653 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3654 gimple_call_arg (stmt
, 1), 1);
3655 case BUILT_IN_MEMMOVE
:
3656 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3657 gimple_call_arg (stmt
, 1), 3);
3658 case BUILT_IN_SPRINTF_CHK
:
3659 case BUILT_IN_VSPRINTF_CHK
:
3660 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
3661 case BUILT_IN_STRCAT_CHK
:
3662 return gimple_fold_builtin_strcat_chk (gsi
);
3663 case BUILT_IN_STRNCAT_CHK
:
3664 return gimple_fold_builtin_strncat_chk (gsi
);
3665 case BUILT_IN_STRLEN
:
3666 return gimple_fold_builtin_strlen (gsi
);
3667 case BUILT_IN_STRCPY
:
3668 return gimple_fold_builtin_strcpy (gsi
,
3669 gimple_call_arg (stmt
, 0),
3670 gimple_call_arg (stmt
, 1));
3671 case BUILT_IN_STRNCPY
:
3672 return gimple_fold_builtin_strncpy (gsi
,
3673 gimple_call_arg (stmt
, 0),
3674 gimple_call_arg (stmt
, 1),
3675 gimple_call_arg (stmt
, 2));
3676 case BUILT_IN_STRCAT
:
3677 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
3678 gimple_call_arg (stmt
, 1));
3679 case BUILT_IN_STRNCAT
:
3680 return gimple_fold_builtin_strncat (gsi
);
3681 case BUILT_IN_INDEX
:
3682 case BUILT_IN_STRCHR
:
3683 return gimple_fold_builtin_strchr (gsi
, false);
3684 case BUILT_IN_RINDEX
:
3685 case BUILT_IN_STRRCHR
:
3686 return gimple_fold_builtin_strchr (gsi
, true);
3687 case BUILT_IN_STRSTR
:
3688 return gimple_fold_builtin_strstr (gsi
);
3689 case BUILT_IN_STRCMP
:
3690 case BUILT_IN_STRCASECMP
:
3691 case BUILT_IN_STRNCMP
:
3692 case BUILT_IN_STRNCASECMP
:
3693 return gimple_fold_builtin_string_compare (gsi
);
3694 case BUILT_IN_MEMCHR
:
3695 return gimple_fold_builtin_memchr (gsi
);
3696 case BUILT_IN_FPUTS
:
3697 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3698 gimple_call_arg (stmt
, 1), false);
3699 case BUILT_IN_FPUTS_UNLOCKED
:
3700 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3701 gimple_call_arg (stmt
, 1), true);
3702 case BUILT_IN_MEMCPY_CHK
:
3703 case BUILT_IN_MEMPCPY_CHK
:
3704 case BUILT_IN_MEMMOVE_CHK
:
3705 case BUILT_IN_MEMSET_CHK
:
3706 return gimple_fold_builtin_memory_chk (gsi
,
3707 gimple_call_arg (stmt
, 0),
3708 gimple_call_arg (stmt
, 1),
3709 gimple_call_arg (stmt
, 2),
3710 gimple_call_arg (stmt
, 3),
3712 case BUILT_IN_STPCPY
:
3713 return gimple_fold_builtin_stpcpy (gsi
);
3714 case BUILT_IN_STRCPY_CHK
:
3715 case BUILT_IN_STPCPY_CHK
:
3716 return gimple_fold_builtin_stxcpy_chk (gsi
,
3717 gimple_call_arg (stmt
, 0),
3718 gimple_call_arg (stmt
, 1),
3719 gimple_call_arg (stmt
, 2),
3721 case BUILT_IN_STRNCPY_CHK
:
3722 case BUILT_IN_STPNCPY_CHK
:
3723 return gimple_fold_builtin_stxncpy_chk (gsi
,
3724 gimple_call_arg (stmt
, 0),
3725 gimple_call_arg (stmt
, 1),
3726 gimple_call_arg (stmt
, 2),
3727 gimple_call_arg (stmt
, 3),
3729 case BUILT_IN_SNPRINTF_CHK
:
3730 case BUILT_IN_VSNPRINTF_CHK
:
3731 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
3733 case BUILT_IN_FPRINTF
:
3734 case BUILT_IN_FPRINTF_UNLOCKED
:
3735 case BUILT_IN_VFPRINTF
:
3736 if (n
== 2 || n
== 3)
3737 return gimple_fold_builtin_fprintf (gsi
,
3738 gimple_call_arg (stmt
, 0),
3739 gimple_call_arg (stmt
, 1),
3741 ? gimple_call_arg (stmt
, 2)
3745 case BUILT_IN_FPRINTF_CHK
:
3746 case BUILT_IN_VFPRINTF_CHK
:
3747 if (n
== 3 || n
== 4)
3748 return gimple_fold_builtin_fprintf (gsi
,
3749 gimple_call_arg (stmt
, 0),
3750 gimple_call_arg (stmt
, 2),
3752 ? gimple_call_arg (stmt
, 3)
3756 case BUILT_IN_PRINTF
:
3757 case BUILT_IN_PRINTF_UNLOCKED
:
3758 case BUILT_IN_VPRINTF
:
3759 if (n
== 1 || n
== 2)
3760 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
3762 ? gimple_call_arg (stmt
, 1)
3763 : NULL_TREE
, fcode
);
3765 case BUILT_IN_PRINTF_CHK
:
3766 case BUILT_IN_VPRINTF_CHK
:
3767 if (n
== 2 || n
== 3)
3768 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
3770 ? gimple_call_arg (stmt
, 2)
3771 : NULL_TREE
, fcode
);
3773 case BUILT_IN_ACC_ON_DEVICE
:
3774 return gimple_fold_builtin_acc_on_device (gsi
,
3775 gimple_call_arg (stmt
, 0));
3776 case BUILT_IN_REALLOC
:
3777 return gimple_fold_builtin_realloc (gsi
);
3782 /* Try the generic builtin folder. */
3783 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
3784 tree result
= fold_call_stmt (stmt
, ignore
);
3788 STRIP_NOPS (result
);
3790 result
= fold_convert (gimple_call_return_type (stmt
), result
);
3791 if (!update_call_from_tree (gsi
, result
))
3792 gimplify_and_update_call_from_tree (gsi
, result
);
3799 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3800 function calls to constants, where possible. */
3803 fold_internal_goacc_dim (const gimple
*call
)
3805 int axis
= oacc_get_ifn_dim_arg (call
);
3806 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
3807 tree result
= NULL_TREE
;
3808 tree type
= TREE_TYPE (gimple_call_lhs (call
));
3810 switch (gimple_call_internal_fn (call
))
3812 case IFN_GOACC_DIM_POS
:
3813 /* If the size is 1, we know the answer. */
3815 result
= build_int_cst (type
, 0);
3817 case IFN_GOACC_DIM_SIZE
:
3818 /* If the size is not dynamic, we know the answer. */
3820 result
= build_int_cst (type
, size
);
3829 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3830 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3831 &var where var is only addressable because of such calls. */
3834 optimize_atomic_compare_exchange_p (gimple
*stmt
)
3836 if (gimple_call_num_args (stmt
) != 6
3837 || !flag_inline_atomics
3839 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
3840 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
3841 || !gimple_vdef (stmt
)
3842 || !gimple_vuse (stmt
))
3845 tree fndecl
= gimple_call_fndecl (stmt
);
3846 switch (DECL_FUNCTION_CODE (fndecl
))
3848 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
3849 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
3850 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
3851 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
3852 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
3858 tree expected
= gimple_call_arg (stmt
, 1);
3859 if (TREE_CODE (expected
) != ADDR_EXPR
3860 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
3863 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
3864 if (!is_gimple_reg_type (etype
)
3865 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
3866 || TREE_THIS_VOLATILE (etype
)
3867 || VECTOR_TYPE_P (etype
)
3868 || TREE_CODE (etype
) == COMPLEX_TYPE
3869 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3870 might not preserve all the bits. See PR71716. */
3871 || SCALAR_FLOAT_TYPE_P (etype
)
3872 || maybe_ne (TYPE_PRECISION (etype
),
3873 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
3876 tree weak
= gimple_call_arg (stmt
, 3);
3877 if (!integer_zerop (weak
) && !integer_onep (weak
))
3880 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3881 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3882 machine_mode mode
= TYPE_MODE (itype
);
3884 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
3886 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
3889 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
3896 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3898 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3899 i = IMAGPART_EXPR <t>;
3901 e = REALPART_EXPR <t>; */
3904 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
3906 gimple
*stmt
= gsi_stmt (*gsi
);
3907 tree fndecl
= gimple_call_fndecl (stmt
);
3908 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3909 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3910 tree ctype
= build_complex_type (itype
);
3911 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
3912 bool throws
= false;
3914 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3916 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3917 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
3918 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
3920 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
3921 build1 (VIEW_CONVERT_EXPR
, itype
,
3922 gimple_assign_lhs (g
)));
3923 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3925 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
3926 + int_size_in_bytes (itype
);
3927 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
3928 gimple_call_arg (stmt
, 0),
3929 gimple_assign_lhs (g
),
3930 gimple_call_arg (stmt
, 2),
3931 build_int_cst (integer_type_node
, flag
),
3932 gimple_call_arg (stmt
, 4),
3933 gimple_call_arg (stmt
, 5));
3934 tree lhs
= make_ssa_name (ctype
);
3935 gimple_call_set_lhs (g
, lhs
);
3936 gimple_set_vdef (g
, gimple_vdef (stmt
));
3937 gimple_set_vuse (g
, gimple_vuse (stmt
));
3938 SSA_NAME_DEF_STMT (gimple_vdef (g
)) = g
;
3939 tree oldlhs
= gimple_call_lhs (stmt
);
3940 if (stmt_can_throw_internal (stmt
))
3943 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
3945 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
3946 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
3947 gimple_call_set_lhs (stmt
, NULL_TREE
);
3948 gsi_replace (gsi
, g
, true);
3951 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
3952 build1 (IMAGPART_EXPR
, itype
, lhs
));
3955 gsi_insert_on_edge_immediate (e
, g
);
3956 *gsi
= gsi_for_stmt (g
);
3959 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3960 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
3961 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3963 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
3964 build1 (REALPART_EXPR
, itype
, lhs
));
3965 if (throws
&& oldlhs
== NULL_TREE
)
3967 gsi_insert_on_edge_immediate (e
, g
);
3968 *gsi
= gsi_for_stmt (g
);
3971 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3972 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
3974 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3976 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
3977 gimple_assign_lhs (g
)));
3978 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3980 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
3981 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3985 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3986 doesn't fit into TYPE. The test for overflow should be regardless of
3987 -fwrapv, and even for unsigned types. */
3990 arith_overflowed_p (enum tree_code code
, const_tree type
,
3991 const_tree arg0
, const_tree arg1
)
3993 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3994 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3996 widest2_int warg0
= widest2_int_cst (arg0
);
3997 widest2_int warg1
= widest2_int_cst (arg1
);
4001 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
4002 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
4003 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
4004 default: gcc_unreachable ();
4006 signop sign
= TYPE_SIGN (type
);
4007 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
4009 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
4012 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4013 The statement may be replaced by another statement, e.g., if the call
4014 simplifies to a constant value. Return true if any changes were made.
4015 It is assumed that the operands have been previously folded. */
4018 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
4020 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
4022 bool changed
= false;
4025 /* Fold *& in call arguments. */
4026 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4027 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
4029 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
4032 gimple_call_set_arg (stmt
, i
, tmp
);
4037 /* Check for virtual calls that became direct calls. */
4038 callee
= gimple_call_fn (stmt
);
4039 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
4041 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
4043 if (dump_file
&& virtual_method_call_p (callee
)
4044 && !possible_polymorphic_call_target_p
4045 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
4046 (OBJ_TYPE_REF_EXPR (callee
)))))
4049 "Type inheritance inconsistent devirtualization of ");
4050 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
4051 fprintf (dump_file
, " to ");
4052 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
4053 fprintf (dump_file
, "\n");
4056 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
4059 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
4062 vec
<cgraph_node
*>targets
4063 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
4064 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
4066 tree lhs
= gimple_call_lhs (stmt
);
4067 if (dump_enabled_p ())
4069 location_t loc
= gimple_location_safe (stmt
);
4070 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
4071 "folding virtual function call to %s\n",
4072 targets
.length () == 1
4073 ? targets
[0]->name ()
4074 : "__builtin_unreachable");
4076 if (targets
.length () == 1)
4078 tree fndecl
= targets
[0]->decl
;
4079 gimple_call_set_fndecl (stmt
, fndecl
);
4081 /* If changing the call to __cxa_pure_virtual
4082 or similar noreturn function, adjust gimple_call_fntype
4084 if (gimple_call_noreturn_p (stmt
)
4085 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
4086 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
4087 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
4089 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
4090 /* If the call becomes noreturn, remove the lhs. */
4092 && gimple_call_noreturn_p (stmt
)
4093 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
4094 || should_remove_lhs_p (lhs
)))
4096 if (TREE_CODE (lhs
) == SSA_NAME
)
4098 tree var
= create_tmp_var (TREE_TYPE (lhs
));
4099 tree def
= get_or_create_ssa_default_def (cfun
, var
);
4100 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
4101 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
4103 gimple_call_set_lhs (stmt
, NULL_TREE
);
4105 maybe_remove_unused_call_args (cfun
, stmt
);
4109 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4110 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
4111 gimple_set_location (new_stmt
, gimple_location (stmt
));
4112 /* If the call had a SSA name as lhs morph that into
4113 an uninitialized value. */
4114 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
4116 tree var
= create_tmp_var (TREE_TYPE (lhs
));
4117 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
4118 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
4119 set_ssa_default_def (cfun
, var
, lhs
);
4121 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4122 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4123 gsi_replace (gsi
, new_stmt
, false);
4130 /* Check for indirect calls that became direct calls, and then
4131 no longer require a static chain. */
4132 if (gimple_call_chain (stmt
))
4134 tree fn
= gimple_call_fndecl (stmt
);
4135 if (fn
&& !DECL_STATIC_CHAIN (fn
))
4137 gimple_call_set_chain (stmt
, NULL
);
4142 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
4145 gimple_call_set_chain (stmt
, tmp
);
4154 /* Check for builtins that CCP can handle using information not
4155 available in the generic fold routines. */
4156 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4158 if (gimple_fold_builtin (gsi
))
4161 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
4163 changed
|= targetm
.gimple_fold_builtin (gsi
);
4165 else if (gimple_call_internal_p (stmt
))
4167 enum tree_code subcode
= ERROR_MARK
;
4168 tree result
= NULL_TREE
;
4169 bool cplx_result
= false;
4170 tree overflow
= NULL_TREE
;
4171 switch (gimple_call_internal_fn (stmt
))
4173 case IFN_BUILTIN_EXPECT
:
4174 result
= fold_builtin_expect (gimple_location (stmt
),
4175 gimple_call_arg (stmt
, 0),
4176 gimple_call_arg (stmt
, 1),
4177 gimple_call_arg (stmt
, 2));
4179 case IFN_UBSAN_OBJECT_SIZE
:
4181 tree offset
= gimple_call_arg (stmt
, 1);
4182 tree objsize
= gimple_call_arg (stmt
, 2);
4183 if (integer_all_onesp (objsize
)
4184 || (TREE_CODE (offset
) == INTEGER_CST
4185 && TREE_CODE (objsize
) == INTEGER_CST
4186 && tree_int_cst_le (offset
, objsize
)))
4188 replace_call_with_value (gsi
, NULL_TREE
);
4194 if (integer_zerop (gimple_call_arg (stmt
, 1)))
4196 replace_call_with_value (gsi
, NULL_TREE
);
4200 case IFN_UBSAN_BOUNDS
:
4202 tree index
= gimple_call_arg (stmt
, 1);
4203 tree bound
= gimple_call_arg (stmt
, 2);
4204 if (TREE_CODE (index
) == INTEGER_CST
4205 && TREE_CODE (bound
) == INTEGER_CST
)
4207 index
= fold_convert (TREE_TYPE (bound
), index
);
4208 if (TREE_CODE (index
) == INTEGER_CST
4209 && tree_int_cst_le (index
, bound
))
4211 replace_call_with_value (gsi
, NULL_TREE
);
4217 case IFN_GOACC_DIM_SIZE
:
4218 case IFN_GOACC_DIM_POS
:
4219 result
= fold_internal_goacc_dim (stmt
);
4221 case IFN_UBSAN_CHECK_ADD
:
4222 subcode
= PLUS_EXPR
;
4224 case IFN_UBSAN_CHECK_SUB
:
4225 subcode
= MINUS_EXPR
;
4227 case IFN_UBSAN_CHECK_MUL
:
4228 subcode
= MULT_EXPR
;
4230 case IFN_ADD_OVERFLOW
:
4231 subcode
= PLUS_EXPR
;
4234 case IFN_SUB_OVERFLOW
:
4235 subcode
= MINUS_EXPR
;
4238 case IFN_MUL_OVERFLOW
:
4239 subcode
= MULT_EXPR
;
4245 if (subcode
!= ERROR_MARK
)
4247 tree arg0
= gimple_call_arg (stmt
, 0);
4248 tree arg1
= gimple_call_arg (stmt
, 1);
4249 tree type
= TREE_TYPE (arg0
);
4252 tree lhs
= gimple_call_lhs (stmt
);
4253 if (lhs
== NULL_TREE
)
4256 type
= TREE_TYPE (TREE_TYPE (lhs
));
4258 if (type
== NULL_TREE
)
4260 /* x = y + 0; x = y - 0; x = y * 0; */
4261 else if (integer_zerop (arg1
))
4262 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
4263 /* x = 0 + y; x = 0 * y; */
4264 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
4265 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
4267 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
4268 result
= integer_zero_node
;
4269 /* x = y * 1; x = 1 * y; */
4270 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
4272 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
4274 else if (TREE_CODE (arg0
) == INTEGER_CST
4275 && TREE_CODE (arg1
) == INTEGER_CST
)
4278 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
4279 fold_convert (type
, arg1
));
4281 result
= int_const_binop (subcode
, arg0
, arg1
);
4282 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
4285 overflow
= build_one_cst (type
);
4292 if (result
== integer_zero_node
)
4293 result
= build_zero_cst (type
);
4294 else if (cplx_result
&& TREE_TYPE (result
) != type
)
4296 if (TREE_CODE (result
) == INTEGER_CST
)
4298 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
4300 overflow
= build_one_cst (type
);
4302 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
4303 && TYPE_UNSIGNED (type
))
4304 || (TYPE_PRECISION (type
)
4305 < (TYPE_PRECISION (TREE_TYPE (result
))
4306 + (TYPE_UNSIGNED (TREE_TYPE (result
))
4307 && !TYPE_UNSIGNED (type
)))))
4310 result
= fold_convert (type
, result
);
4317 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
4318 result
= drop_tree_overflow (result
);
4321 if (overflow
== NULL_TREE
)
4322 overflow
= build_zero_cst (TREE_TYPE (result
));
4323 tree ctype
= build_complex_type (TREE_TYPE (result
));
4324 if (TREE_CODE (result
) == INTEGER_CST
4325 && TREE_CODE (overflow
) == INTEGER_CST
)
4326 result
= build_complex (ctype
, result
, overflow
);
4328 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
4329 ctype
, result
, overflow
);
4331 if (!update_call_from_tree (gsi
, result
))
4332 gimplify_and_update_call_from_tree (gsi
, result
);
4341 /* Return true whether NAME has a use on STMT. */
4344 has_use_on_stmt (tree name
, gimple
*stmt
)
4346 imm_use_iterator iter
;
4347 use_operand_p use_p
;
4348 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
4349 if (USE_STMT (use_p
) == stmt
)
4354 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4357 Replaces *GSI with the simplification result in RCODE and OPS
4358 and the associated statements in *SEQ. Does the replacement
4359 according to INPLACE and returns true if the operation succeeded. */
4362 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
4363 code_helper rcode
, tree
*ops
,
4364 gimple_seq
*seq
, bool inplace
)
4366 gimple
*stmt
= gsi_stmt (*gsi
);
4368 /* Play safe and do not allow abnormals to be mentioned in
4369 newly created statements. See also maybe_push_res_to_seq.
4370 As an exception allow such uses if there was a use of the
4371 same SSA name on the old stmt. */
4372 if ((TREE_CODE (ops
[0]) == SSA_NAME
4373 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
4374 && !has_use_on_stmt (ops
[0], stmt
))
4376 && TREE_CODE (ops
[1]) == SSA_NAME
4377 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
4378 && !has_use_on_stmt (ops
[1], stmt
))
4380 && TREE_CODE (ops
[2]) == SSA_NAME
4381 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
4382 && !has_use_on_stmt (ops
[2], stmt
))
4383 || (COMPARISON_CLASS_P (ops
[0])
4384 && ((TREE_CODE (TREE_OPERAND (ops
[0], 0)) == SSA_NAME
4385 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 0))
4386 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 0), stmt
))
4387 || (TREE_CODE (TREE_OPERAND (ops
[0], 1)) == SSA_NAME
4388 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 1))
4389 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 1), stmt
)))))
4392 /* Don't insert new statements when INPLACE is true, even if we could
4393 reuse STMT for the final statement. */
4394 if (inplace
&& !gimple_seq_empty_p (*seq
))
4397 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
4399 gcc_assert (rcode
.is_tree_code ());
4400 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
4401 /* GIMPLE_CONDs condition may not throw. */
4402 && (!flag_exceptions
4403 || !cfun
->can_throw_non_call_exceptions
4404 || !operation_could_trap_p (rcode
,
4405 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
4407 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
4408 else if (rcode
== SSA_NAME
)
4409 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
4410 build_zero_cst (TREE_TYPE (ops
[0])));
4411 else if (rcode
== INTEGER_CST
)
4413 if (integer_zerop (ops
[0]))
4414 gimple_cond_make_false (cond_stmt
);
4416 gimple_cond_make_true (cond_stmt
);
4420 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
4424 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
4425 build_zero_cst (TREE_TYPE (res
)));
4429 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4431 fprintf (dump_file
, "gimple_simplified to ");
4432 if (!gimple_seq_empty_p (*seq
))
4433 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4434 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4437 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4440 else if (is_gimple_assign (stmt
)
4441 && rcode
.is_tree_code ())
4444 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
4446 maybe_build_generic_op (rcode
,
4447 TREE_TYPE (gimple_assign_lhs (stmt
)), ops
);
4448 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
4449 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4451 fprintf (dump_file
, "gimple_simplified to ");
4452 if (!gimple_seq_empty_p (*seq
))
4453 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4454 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4457 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4461 else if (rcode
.is_fn_code ()
4462 && gimple_call_combined_fn (stmt
) == rcode
)
4465 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4467 gcc_assert (ops
[i
] != NULL_TREE
);
4468 gimple_call_set_arg (stmt
, i
, ops
[i
]);
4471 gcc_assert (ops
[i
] == NULL_TREE
);
4472 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4474 fprintf (dump_file
, "gimple_simplified to ");
4475 if (!gimple_seq_empty_p (*seq
))
4476 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4477 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
4479 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4484 if (gimple_has_lhs (stmt
))
4486 tree lhs
= gimple_get_lhs (stmt
);
4487 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
4490 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4492 fprintf (dump_file
, "gimple_simplified to ");
4493 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4495 gsi_replace_with_seq_vops (gsi
, *seq
);
4505 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4508 maybe_canonicalize_mem_ref_addr (tree
*t
)
4512 if (TREE_CODE (*t
) == ADDR_EXPR
)
4513 t
= &TREE_OPERAND (*t
, 0);
4515 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4516 generic vector extension. The actual vector referenced is
4517 view-converted to an array type for this purpose. If the index
4518 is constant the canonical representation in the middle-end is a
4519 BIT_FIELD_REF so re-write the former to the latter here. */
4520 if (TREE_CODE (*t
) == ARRAY_REF
4521 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
4522 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
4523 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
4525 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
4526 if (VECTOR_TYPE_P (vtype
))
4528 tree low
= array_ref_low_bound (*t
);
4529 if (TREE_CODE (low
) == INTEGER_CST
)
4531 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
4533 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
4534 wi::to_widest (low
));
4535 idx
= wi::mul (idx
, wi::to_widest
4536 (TYPE_SIZE (TREE_TYPE (*t
))));
4538 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
4539 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
4541 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
4543 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
4544 TYPE_SIZE (TREE_TYPE (*t
)),
4545 wide_int_to_tree (bitsizetype
, idx
));
4553 while (handled_component_p (*t
))
4554 t
= &TREE_OPERAND (*t
, 0);
4556 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4557 of invariant addresses into a SSA name MEM_REF address. */
4558 if (TREE_CODE (*t
) == MEM_REF
4559 || TREE_CODE (*t
) == TARGET_MEM_REF
)
4561 tree addr
= TREE_OPERAND (*t
, 0);
4562 if (TREE_CODE (addr
) == ADDR_EXPR
4563 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
4564 || handled_component_p (TREE_OPERAND (addr
, 0))))
4568 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
4573 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
4574 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
4575 TREE_OPERAND (*t
, 1),
4576 size_int (coffset
));
4579 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
4580 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
4583 /* Canonicalize back MEM_REFs to plain reference trees if the object
4584 accessed is a decl that has the same access semantics as the MEM_REF. */
4585 if (TREE_CODE (*t
) == MEM_REF
4586 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
4587 && integer_zerop (TREE_OPERAND (*t
, 1))
4588 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
4590 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4591 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
4592 if (/* Same volatile qualification. */
4593 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
4594 /* Same TBAA behavior with -fstrict-aliasing. */
4595 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
4596 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
4597 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
4598 /* Same alignment. */
4599 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
4600 /* We have to look out here to not drop a required conversion
4601 from the rhs to the lhs if *t appears on the lhs or vice-versa
4602 if it appears on the rhs. Thus require strict type
4604 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
4606 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4611 /* Canonicalize TARGET_MEM_REF in particular with respect to
4612 the indexes becoming constant. */
4613 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
4615 tree tem
= maybe_fold_tmr (*t
);
4626 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4627 distinguishes both cases. */
4630 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
4632 bool changed
= false;
4633 gimple
*stmt
= gsi_stmt (*gsi
);
4634 bool nowarning
= gimple_no_warning_p (stmt
);
4636 fold_defer_overflow_warnings ();
4638 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4640 ??? This shouldn't be done in generic folding but in the
4641 propagation helpers which also know whether an address was
4643 Also canonicalize operand order. */
4644 switch (gimple_code (stmt
))
4647 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
4649 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
4650 if ((REFERENCE_CLASS_P (*rhs
)
4651 || TREE_CODE (*rhs
) == ADDR_EXPR
)
4652 && maybe_canonicalize_mem_ref_addr (rhs
))
4654 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
4655 if (REFERENCE_CLASS_P (*lhs
)
4656 && maybe_canonicalize_mem_ref_addr (lhs
))
4661 /* Canonicalize operand order. */
4662 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4663 if (TREE_CODE_CLASS (code
) == tcc_comparison
4664 || commutative_tree_code (code
)
4665 || commutative_ternary_tree_code (code
))
4667 tree rhs1
= gimple_assign_rhs1 (stmt
);
4668 tree rhs2
= gimple_assign_rhs2 (stmt
);
4669 if (tree_swap_operands_p (rhs1
, rhs2
))
4671 gimple_assign_set_rhs1 (stmt
, rhs2
);
4672 gimple_assign_set_rhs2 (stmt
, rhs1
);
4673 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4674 gimple_assign_set_rhs_code (stmt
,
4675 swap_tree_comparison (code
));
4683 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4685 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
4686 if (REFERENCE_CLASS_P (*arg
)
4687 && maybe_canonicalize_mem_ref_addr (arg
))
4690 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
4692 && REFERENCE_CLASS_P (*lhs
)
4693 && maybe_canonicalize_mem_ref_addr (lhs
))
4699 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4700 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4702 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4703 tree op
= TREE_VALUE (link
);
4704 if (REFERENCE_CLASS_P (op
)
4705 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4708 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4710 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4711 tree op
= TREE_VALUE (link
);
4712 if ((REFERENCE_CLASS_P (op
)
4713 || TREE_CODE (op
) == ADDR_EXPR
)
4714 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4720 if (gimple_debug_bind_p (stmt
))
4722 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
4724 && (REFERENCE_CLASS_P (*val
)
4725 || TREE_CODE (*val
) == ADDR_EXPR
)
4726 && maybe_canonicalize_mem_ref_addr (val
))
4732 /* Canonicalize operand order. */
4733 tree lhs
= gimple_cond_lhs (stmt
);
4734 tree rhs
= gimple_cond_rhs (stmt
);
4735 if (tree_swap_operands_p (lhs
, rhs
))
4737 gcond
*gc
= as_a
<gcond
*> (stmt
);
4738 gimple_cond_set_lhs (gc
, rhs
);
4739 gimple_cond_set_rhs (gc
, lhs
);
4740 gimple_cond_set_code (gc
,
4741 swap_tree_comparison (gimple_cond_code (gc
)));
4748 /* Dispatch to pattern-based folding. */
4750 || is_gimple_assign (stmt
)
4751 || gimple_code (stmt
) == GIMPLE_COND
)
4753 gimple_seq seq
= NULL
;
4756 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
4757 valueize
, valueize
))
4759 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
4762 gimple_seq_discard (seq
);
4766 stmt
= gsi_stmt (*gsi
);
4768 /* Fold the main computation performed by the statement. */
4769 switch (gimple_code (stmt
))
4773 /* Try to canonicalize for boolean-typed X the comparisons
4774 X == 0, X == 1, X != 0, and X != 1. */
4775 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
4776 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
4778 tree lhs
= gimple_assign_lhs (stmt
);
4779 tree op1
= gimple_assign_rhs1 (stmt
);
4780 tree op2
= gimple_assign_rhs2 (stmt
);
4781 tree type
= TREE_TYPE (op1
);
4783 /* Check whether the comparison operands are of the same boolean
4784 type as the result type is.
4785 Check that second operand is an integer-constant with value
4787 if (TREE_CODE (op2
) == INTEGER_CST
4788 && (integer_zerop (op2
) || integer_onep (op2
))
4789 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
4791 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
4792 bool is_logical_not
= false;
4794 /* X == 0 and X != 1 is a logical-not.of X
4795 X == 1 and X != 0 is X */
4796 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
4797 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
4798 is_logical_not
= true;
4800 if (is_logical_not
== false)
4801 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
4802 /* Only for one-bit precision typed X the transformation
4803 !X -> ~X is valied. */
4804 else if (TYPE_PRECISION (type
) == 1)
4805 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
4806 /* Otherwise we use !X -> X ^ 1. */
4808 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
4809 build_int_cst (type
, 1));
4815 unsigned old_num_ops
= gimple_num_ops (stmt
);
4816 tree lhs
= gimple_assign_lhs (stmt
);
4817 tree new_rhs
= fold_gimple_assign (gsi
);
4819 && !useless_type_conversion_p (TREE_TYPE (lhs
),
4820 TREE_TYPE (new_rhs
)))
4821 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
4824 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
4826 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
4833 changed
|= gimple_fold_call (gsi
, inplace
);
4837 /* Fold *& in asm operands. */
4839 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4841 const char **oconstraints
;
4842 const char *constraint
;
4843 bool allows_mem
, allows_reg
;
4845 noutputs
= gimple_asm_noutputs (asm_stmt
);
4846 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4848 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4850 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4851 tree op
= TREE_VALUE (link
);
4853 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4854 if (REFERENCE_CLASS_P (op
)
4855 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
4857 TREE_VALUE (link
) = op
;
4861 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4863 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4864 tree op
= TREE_VALUE (link
);
4866 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4867 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
4868 oconstraints
, &allows_mem
, &allows_reg
);
4869 if (REFERENCE_CLASS_P (op
)
4870 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
4873 TREE_VALUE (link
) = op
;
4881 if (gimple_debug_bind_p (stmt
))
4883 tree val
= gimple_debug_bind_get_value (stmt
);
4885 && REFERENCE_CLASS_P (val
))
4887 tree tem
= maybe_fold_reference (val
, false);
4890 gimple_debug_bind_set_value (stmt
, tem
);
4895 && TREE_CODE (val
) == ADDR_EXPR
)
4897 tree ref
= TREE_OPERAND (val
, 0);
4898 tree tem
= maybe_fold_reference (ref
, false);
4901 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
4902 gimple_debug_bind_set_value (stmt
, tem
);
4911 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
4912 tree ret
= gimple_return_retval(ret_stmt
);
4914 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
4916 tree val
= valueize (ret
);
4917 if (val
&& val
!= ret
4918 && may_propagate_copy (ret
, val
))
4920 gimple_return_set_retval (ret_stmt
, val
);
4930 stmt
= gsi_stmt (*gsi
);
4932 /* Fold *& on the lhs. */
4933 if (gimple_has_lhs (stmt
))
4935 tree lhs
= gimple_get_lhs (stmt
);
4936 if (lhs
&& REFERENCE_CLASS_P (lhs
))
4938 tree new_lhs
= maybe_fold_reference (lhs
, true);
4941 gimple_set_lhs (stmt
, new_lhs
);
4947 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
4951 /* Valueziation callback that ends up not following SSA edges. */
4954 no_follow_ssa_edges (tree
)
4959 /* Valueization callback that ends up following single-use SSA edges only. */
4962 follow_single_use_edges (tree val
)
4964 if (TREE_CODE (val
) == SSA_NAME
4965 && !has_single_use (val
))
4970 /* Valueization callback that follows all SSA edges. */
4973 follow_all_ssa_edges (tree val
)
4978 /* Fold the statement pointed to by GSI. In some cases, this function may
4979 replace the whole statement with a new one. Returns true iff folding
4981 The statement pointed to by GSI should be in valid gimple form but may
4982 be in unfolded state as resulting from for example constant propagation
4983 which can produce *&x = 0. */
4986 fold_stmt (gimple_stmt_iterator
*gsi
)
4988 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
4992 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
4994 return fold_stmt_1 (gsi
, false, valueize
);
4997 /* Perform the minimal folding on statement *GSI. Only operations like
4998 *&x created by constant propagation are handled. The statement cannot
4999 be replaced with a new one. Return true if the statement was
5000 changed, false otherwise.
5001 The statement *GSI should be in valid gimple form but may
5002 be in unfolded state as resulting from for example constant propagation
5003 which can produce *&x = 0. */
5006 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
5008 gimple
*stmt
= gsi_stmt (*gsi
);
5009 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
5010 gcc_assert (gsi_stmt (*gsi
) == stmt
);
5014 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5015 if EXPR is null or we don't know how.
5016 If non-null, the result always has boolean type. */
5019 canonicalize_bool (tree expr
, bool invert
)
5025 if (integer_nonzerop (expr
))
5026 return boolean_false_node
;
5027 else if (integer_zerop (expr
))
5028 return boolean_true_node
;
5029 else if (TREE_CODE (expr
) == SSA_NAME
)
5030 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
5031 build_int_cst (TREE_TYPE (expr
), 0));
5032 else if (COMPARISON_CLASS_P (expr
))
5033 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
5035 TREE_OPERAND (expr
, 0),
5036 TREE_OPERAND (expr
, 1));
5042 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
5044 if (integer_nonzerop (expr
))
5045 return boolean_true_node
;
5046 else if (integer_zerop (expr
))
5047 return boolean_false_node
;
5048 else if (TREE_CODE (expr
) == SSA_NAME
)
5049 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
5050 build_int_cst (TREE_TYPE (expr
), 0));
5051 else if (COMPARISON_CLASS_P (expr
))
5052 return fold_build2 (TREE_CODE (expr
),
5054 TREE_OPERAND (expr
, 0),
5055 TREE_OPERAND (expr
, 1));
5061 /* Check to see if a boolean expression EXPR is logically equivalent to the
5062 comparison (OP1 CODE OP2). Check for various identities involving
5066 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
5067 const_tree op1
, const_tree op2
)
5071 /* The obvious case. */
5072 if (TREE_CODE (expr
) == code
5073 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
5074 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
5077 /* Check for comparing (name, name != 0) and the case where expr
5078 is an SSA_NAME with a definition matching the comparison. */
5079 if (TREE_CODE (expr
) == SSA_NAME
5080 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
5082 if (operand_equal_p (expr
, op1
, 0))
5083 return ((code
== NE_EXPR
&& integer_zerop (op2
))
5084 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
5085 s
= SSA_NAME_DEF_STMT (expr
);
5086 if (is_gimple_assign (s
)
5087 && gimple_assign_rhs_code (s
) == code
5088 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
5089 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
5093 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5094 of name is a comparison, recurse. */
5095 if (TREE_CODE (op1
) == SSA_NAME
5096 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
5098 s
= SSA_NAME_DEF_STMT (op1
);
5099 if (is_gimple_assign (s
)
5100 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
5102 enum tree_code c
= gimple_assign_rhs_code (s
);
5103 if ((c
== NE_EXPR
&& integer_zerop (op2
))
5104 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
5105 return same_bool_comparison_p (expr
, c
,
5106 gimple_assign_rhs1 (s
),
5107 gimple_assign_rhs2 (s
));
5108 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
5109 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
5110 return same_bool_comparison_p (expr
,
5111 invert_tree_comparison (c
, false),
5112 gimple_assign_rhs1 (s
),
5113 gimple_assign_rhs2 (s
));
5119 /* Check to see if two boolean expressions OP1 and OP2 are logically
5123 same_bool_result_p (const_tree op1
, const_tree op2
)
5125 /* Simple cases first. */
5126 if (operand_equal_p (op1
, op2
, 0))
5129 /* Check the cases where at least one of the operands is a comparison.
5130 These are a bit smarter than operand_equal_p in that they apply some
5131 identifies on SSA_NAMEs. */
5132 if (COMPARISON_CLASS_P (op2
)
5133 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
5134 TREE_OPERAND (op2
, 0),
5135 TREE_OPERAND (op2
, 1)))
5137 if (COMPARISON_CLASS_P (op1
)
5138 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
5139 TREE_OPERAND (op1
, 0),
5140 TREE_OPERAND (op1
, 1)))
5147 /* Forward declarations for some mutually recursive functions. */
5150 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5151 enum tree_code code2
, tree op2a
, tree op2b
);
5153 and_var_with_comparison (tree var
, bool invert
,
5154 enum tree_code code2
, tree op2a
, tree op2b
);
5156 and_var_with_comparison_1 (gimple
*stmt
,
5157 enum tree_code code2
, tree op2a
, tree op2b
);
5159 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5160 enum tree_code code2
, tree op2a
, tree op2b
);
5162 or_var_with_comparison (tree var
, bool invert
,
5163 enum tree_code code2
, tree op2a
, tree op2b
);
5165 or_var_with_comparison_1 (gimple
*stmt
,
5166 enum tree_code code2
, tree op2a
, tree op2b
);
5168 /* Helper function for and_comparisons_1: try to simplify the AND of the
5169 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5170 If INVERT is true, invert the value of the VAR before doing the AND.
5171 Return NULL_EXPR if we can't simplify this to a single expression. */
5174 and_var_with_comparison (tree var
, bool invert
,
5175 enum tree_code code2
, tree op2a
, tree op2b
)
5178 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5180 /* We can only deal with variables whose definitions are assignments. */
5181 if (!is_gimple_assign (stmt
))
5184 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5185 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5186 Then we only have to consider the simpler non-inverted cases. */
5188 t
= or_var_with_comparison_1 (stmt
,
5189 invert_tree_comparison (code2
, false),
5192 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
5193 return canonicalize_bool (t
, invert
);
5196 /* Try to simplify the AND of the ssa variable defined by the assignment
5197 STMT with the comparison specified by (OP2A CODE2 OP2B).
5198 Return NULL_EXPR if we can't simplify this to a single expression. */
5201 and_var_with_comparison_1 (gimple
*stmt
,
5202 enum tree_code code2
, tree op2a
, tree op2b
)
5204 tree var
= gimple_assign_lhs (stmt
);
5205 tree true_test_var
= NULL_TREE
;
5206 tree false_test_var
= NULL_TREE
;
5207 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5209 /* Check for identities like (var AND (var == 0)) => false. */
5210 if (TREE_CODE (op2a
) == SSA_NAME
5211 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5213 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5214 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5216 true_test_var
= op2a
;
5217 if (var
== true_test_var
)
5220 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5221 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5223 false_test_var
= op2a
;
5224 if (var
== false_test_var
)
5225 return boolean_false_node
;
5229 /* If the definition is a comparison, recurse on it. */
5230 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5232 tree t
= and_comparisons_1 (innercode
,
5233 gimple_assign_rhs1 (stmt
),
5234 gimple_assign_rhs2 (stmt
),
5242 /* If the definition is an AND or OR expression, we may be able to
5243 simplify by reassociating. */
5244 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5245 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5247 tree inner1
= gimple_assign_rhs1 (stmt
);
5248 tree inner2
= gimple_assign_rhs2 (stmt
);
5251 tree partial
= NULL_TREE
;
5252 bool is_and
= (innercode
== BIT_AND_EXPR
);
5254 /* Check for boolean identities that don't require recursive examination
5256 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5257 inner1 AND (inner1 OR inner2) => inner1
5258 !inner1 AND (inner1 AND inner2) => false
5259 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5260 Likewise for similar cases involving inner2. */
5261 if (inner1
== true_test_var
)
5262 return (is_and
? var
: inner1
);
5263 else if (inner2
== true_test_var
)
5264 return (is_and
? var
: inner2
);
5265 else if (inner1
== false_test_var
)
5267 ? boolean_false_node
5268 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
5269 else if (inner2
== false_test_var
)
5271 ? boolean_false_node
5272 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
5274 /* Next, redistribute/reassociate the AND across the inner tests.
5275 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5276 if (TREE_CODE (inner1
) == SSA_NAME
5277 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5278 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5279 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
5280 gimple_assign_rhs1 (s
),
5281 gimple_assign_rhs2 (s
),
5282 code2
, op2a
, op2b
)))
5284 /* Handle the AND case, where we are reassociating:
5285 (inner1 AND inner2) AND (op2a code2 op2b)
5287 If the partial result t is a constant, we win. Otherwise
5288 continue on to try reassociating with the other inner test. */
5291 if (integer_onep (t
))
5293 else if (integer_zerop (t
))
5294 return boolean_false_node
;
5297 /* Handle the OR case, where we are redistributing:
5298 (inner1 OR inner2) AND (op2a code2 op2b)
5299 => (t OR (inner2 AND (op2a code2 op2b))) */
5300 else if (integer_onep (t
))
5301 return boolean_true_node
;
5303 /* Save partial result for later. */
5307 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5308 if (TREE_CODE (inner2
) == SSA_NAME
5309 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5310 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5311 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
5312 gimple_assign_rhs1 (s
),
5313 gimple_assign_rhs2 (s
),
5314 code2
, op2a
, op2b
)))
5316 /* Handle the AND case, where we are reassociating:
5317 (inner1 AND inner2) AND (op2a code2 op2b)
5318 => (inner1 AND t) */
5321 if (integer_onep (t
))
5323 else if (integer_zerop (t
))
5324 return boolean_false_node
;
5325 /* If both are the same, we can apply the identity
5327 else if (partial
&& same_bool_result_p (t
, partial
))
5331 /* Handle the OR case. where we are redistributing:
5332 (inner1 OR inner2) AND (op2a code2 op2b)
5333 => (t OR (inner1 AND (op2a code2 op2b)))
5334 => (t OR partial) */
5337 if (integer_onep (t
))
5338 return boolean_true_node
;
5341 /* We already got a simplification for the other
5342 operand to the redistributed OR expression. The
5343 interesting case is when at least one is false.
5344 Or, if both are the same, we can apply the identity
5346 if (integer_zerop (partial
))
5348 else if (integer_zerop (t
))
5350 else if (same_bool_result_p (t
, partial
))
5359 /* Try to simplify the AND of two comparisons defined by
5360 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5361 If this can be done without constructing an intermediate value,
5362 return the resulting tree; otherwise NULL_TREE is returned.
5363 This function is deliberately asymmetric as it recurses on SSA_DEFs
5364 in the first comparison but not the second. */
5367 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5368 enum tree_code code2
, tree op2a
, tree op2b
)
5370 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5372 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5373 if (operand_equal_p (op1a
, op2a
, 0)
5374 && operand_equal_p (op1b
, op2b
, 0))
5376 /* Result will be either NULL_TREE, or a combined comparison. */
5377 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5378 TRUTH_ANDIF_EXPR
, code1
, code2
,
5379 truth_type
, op1a
, op1b
);
5384 /* Likewise the swapped case of the above. */
5385 if (operand_equal_p (op1a
, op2b
, 0)
5386 && operand_equal_p (op1b
, op2a
, 0))
5388 /* Result will be either NULL_TREE, or a combined comparison. */
5389 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5390 TRUTH_ANDIF_EXPR
, code1
,
5391 swap_tree_comparison (code2
),
5392 truth_type
, op1a
, op1b
);
5397 /* If both comparisons are of the same value against constants, we might
5398 be able to merge them. */
5399 if (operand_equal_p (op1a
, op2a
, 0)
5400 && TREE_CODE (op1b
) == INTEGER_CST
5401 && TREE_CODE (op2b
) == INTEGER_CST
)
5403 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5405 /* If we have (op1a == op1b), we should either be able to
5406 return that or FALSE, depending on whether the constant op1b
5407 also satisfies the other comparison against op2b. */
5408 if (code1
== EQ_EXPR
)
5414 case EQ_EXPR
: val
= (cmp
== 0); break;
5415 case NE_EXPR
: val
= (cmp
!= 0); break;
5416 case LT_EXPR
: val
= (cmp
< 0); break;
5417 case GT_EXPR
: val
= (cmp
> 0); break;
5418 case LE_EXPR
: val
= (cmp
<= 0); break;
5419 case GE_EXPR
: val
= (cmp
>= 0); break;
5420 default: done
= false;
5425 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5427 return boolean_false_node
;
5430 /* Likewise if the second comparison is an == comparison. */
5431 else if (code2
== EQ_EXPR
)
5437 case EQ_EXPR
: val
= (cmp
== 0); break;
5438 case NE_EXPR
: val
= (cmp
!= 0); break;
5439 case LT_EXPR
: val
= (cmp
> 0); break;
5440 case GT_EXPR
: val
= (cmp
< 0); break;
5441 case LE_EXPR
: val
= (cmp
>= 0); break;
5442 case GE_EXPR
: val
= (cmp
<= 0); break;
5443 default: done
= false;
5448 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5450 return boolean_false_node
;
5454 /* Same business with inequality tests. */
5455 else if (code1
== NE_EXPR
)
5460 case EQ_EXPR
: val
= (cmp
!= 0); break;
5461 case NE_EXPR
: val
= (cmp
== 0); break;
5462 case LT_EXPR
: val
= (cmp
>= 0); break;
5463 case GT_EXPR
: val
= (cmp
<= 0); break;
5464 case LE_EXPR
: val
= (cmp
> 0); break;
5465 case GE_EXPR
: val
= (cmp
< 0); break;
5470 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5472 else if (code2
== NE_EXPR
)
5477 case EQ_EXPR
: val
= (cmp
== 0); break;
5478 case NE_EXPR
: val
= (cmp
!= 0); break;
5479 case LT_EXPR
: val
= (cmp
<= 0); break;
5480 case GT_EXPR
: val
= (cmp
>= 0); break;
5481 case LE_EXPR
: val
= (cmp
< 0); break;
5482 case GE_EXPR
: val
= (cmp
> 0); break;
5487 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5490 /* Chose the more restrictive of two < or <= comparisons. */
5491 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5492 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5494 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5495 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5497 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5500 /* Likewise chose the more restrictive of two > or >= comparisons. */
5501 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5502 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5504 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5505 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5507 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5510 /* Check for singleton ranges. */
5512 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
5513 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
5514 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
5516 /* Check for disjoint ranges. */
5518 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5519 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5520 return boolean_false_node
;
5522 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5523 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5524 return boolean_false_node
;
5527 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5528 NAME's definition is a truth value. See if there are any simplifications
5529 that can be done against the NAME's definition. */
5530 if (TREE_CODE (op1a
) == SSA_NAME
5531 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5532 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5534 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5535 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5536 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5537 switch (gimple_code (stmt
))
5540 /* Try to simplify by copy-propagating the definition. */
5541 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5544 /* If every argument to the PHI produces the same result when
5545 ANDed with the second comparison, we win.
5546 Do not do this unless the type is bool since we need a bool
5547 result here anyway. */
5548 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5550 tree result
= NULL_TREE
;
5552 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5554 tree arg
= gimple_phi_arg_def (stmt
, i
);
5556 /* If this PHI has itself as an argument, ignore it.
5557 If all the other args produce the same result,
5559 if (arg
== gimple_phi_result (stmt
))
5561 else if (TREE_CODE (arg
) == INTEGER_CST
)
5563 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
5566 result
= boolean_false_node
;
5567 else if (!integer_zerop (result
))
5571 result
= fold_build2 (code2
, boolean_type_node
,
5573 else if (!same_bool_comparison_p (result
,
5577 else if (TREE_CODE (arg
) == SSA_NAME
5578 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5581 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5582 /* In simple cases we can look through PHI nodes,
5583 but we have to be careful with loops.
5585 if (! dom_info_available_p (CDI_DOMINATORS
)
5586 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5587 || dominated_by_p (CDI_DOMINATORS
,
5588 gimple_bb (def_stmt
),
5591 temp
= and_var_with_comparison (arg
, invert
, code2
,
5597 else if (!same_bool_result_p (result
, temp
))
5613 /* Try to simplify the AND of two comparisons, specified by
5614 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5615 If this can be simplified to a single expression (without requiring
5616 introducing more SSA variables to hold intermediate values),
5617 return the resulting tree. Otherwise return NULL_TREE.
5618 If the result expression is non-null, it has boolean type. */
5621 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5622 enum tree_code code2
, tree op2a
, tree op2b
)
5624 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5628 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5631 /* Helper function for or_comparisons_1: try to simplify the OR of the
5632 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5633 If INVERT is true, invert the value of VAR before doing the OR.
5634 Return NULL_EXPR if we can't simplify this to a single expression. */
5637 or_var_with_comparison (tree var
, bool invert
,
5638 enum tree_code code2
, tree op2a
, tree op2b
)
5641 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5643 /* We can only deal with variables whose definitions are assignments. */
5644 if (!is_gimple_assign (stmt
))
5647 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5648 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5649 Then we only have to consider the simpler non-inverted cases. */
5651 t
= and_var_with_comparison_1 (stmt
,
5652 invert_tree_comparison (code2
, false),
5655 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
5656 return canonicalize_bool (t
, invert
);
5659 /* Try to simplify the OR of the ssa variable defined by the assignment
5660 STMT with the comparison specified by (OP2A CODE2 OP2B).
5661 Return NULL_EXPR if we can't simplify this to a single expression. */
5664 or_var_with_comparison_1 (gimple
*stmt
,
5665 enum tree_code code2
, tree op2a
, tree op2b
)
5667 tree var
= gimple_assign_lhs (stmt
);
5668 tree true_test_var
= NULL_TREE
;
5669 tree false_test_var
= NULL_TREE
;
5670 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5672 /* Check for identities like (var OR (var != 0)) => true . */
5673 if (TREE_CODE (op2a
) == SSA_NAME
5674 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5676 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5677 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5679 true_test_var
= op2a
;
5680 if (var
== true_test_var
)
5683 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5684 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5686 false_test_var
= op2a
;
5687 if (var
== false_test_var
)
5688 return boolean_true_node
;
5692 /* If the definition is a comparison, recurse on it. */
5693 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5695 tree t
= or_comparisons_1 (innercode
,
5696 gimple_assign_rhs1 (stmt
),
5697 gimple_assign_rhs2 (stmt
),
5705 /* If the definition is an AND or OR expression, we may be able to
5706 simplify by reassociating. */
5707 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5708 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5710 tree inner1
= gimple_assign_rhs1 (stmt
);
5711 tree inner2
= gimple_assign_rhs2 (stmt
);
5714 tree partial
= NULL_TREE
;
5715 bool is_or
= (innercode
== BIT_IOR_EXPR
);
5717 /* Check for boolean identities that don't require recursive examination
5719 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5720 inner1 OR (inner1 AND inner2) => inner1
5721 !inner1 OR (inner1 OR inner2) => true
5722 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5724 if (inner1
== true_test_var
)
5725 return (is_or
? var
: inner1
);
5726 else if (inner2
== true_test_var
)
5727 return (is_or
? var
: inner2
);
5728 else if (inner1
== false_test_var
)
5731 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
5732 else if (inner2
== false_test_var
)
5735 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
5737 /* Next, redistribute/reassociate the OR across the inner tests.
5738 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5739 if (TREE_CODE (inner1
) == SSA_NAME
5740 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5741 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5742 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5743 gimple_assign_rhs1 (s
),
5744 gimple_assign_rhs2 (s
),
5745 code2
, op2a
, op2b
)))
5747 /* Handle the OR case, where we are reassociating:
5748 (inner1 OR inner2) OR (op2a code2 op2b)
5750 If the partial result t is a constant, we win. Otherwise
5751 continue on to try reassociating with the other inner test. */
5754 if (integer_onep (t
))
5755 return boolean_true_node
;
5756 else if (integer_zerop (t
))
5760 /* Handle the AND case, where we are redistributing:
5761 (inner1 AND inner2) OR (op2a code2 op2b)
5762 => (t AND (inner2 OR (op2a code op2b))) */
5763 else if (integer_zerop (t
))
5764 return boolean_false_node
;
5766 /* Save partial result for later. */
5770 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5771 if (TREE_CODE (inner2
) == SSA_NAME
5772 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5773 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5774 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5775 gimple_assign_rhs1 (s
),
5776 gimple_assign_rhs2 (s
),
5777 code2
, op2a
, op2b
)))
5779 /* Handle the OR case, where we are reassociating:
5780 (inner1 OR inner2) OR (op2a code2 op2b)
5782 => (t OR partial) */
5785 if (integer_zerop (t
))
5787 else if (integer_onep (t
))
5788 return boolean_true_node
;
5789 /* If both are the same, we can apply the identity
5791 else if (partial
&& same_bool_result_p (t
, partial
))
5795 /* Handle the AND case, where we are redistributing:
5796 (inner1 AND inner2) OR (op2a code2 op2b)
5797 => (t AND (inner1 OR (op2a code2 op2b)))
5798 => (t AND partial) */
5801 if (integer_zerop (t
))
5802 return boolean_false_node
;
5805 /* We already got a simplification for the other
5806 operand to the redistributed AND expression. The
5807 interesting case is when at least one is true.
5808 Or, if both are the same, we can apply the identity
5810 if (integer_onep (partial
))
5812 else if (integer_onep (t
))
5814 else if (same_bool_result_p (t
, partial
))
5823 /* Try to simplify the OR of two comparisons defined by
5824 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5825 If this can be done without constructing an intermediate value,
5826 return the resulting tree; otherwise NULL_TREE is returned.
5827 This function is deliberately asymmetric as it recurses on SSA_DEFs
5828 in the first comparison but not the second. */
5831 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5832 enum tree_code code2
, tree op2a
, tree op2b
)
5834 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5836 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5837 if (operand_equal_p (op1a
, op2a
, 0)
5838 && operand_equal_p (op1b
, op2b
, 0))
5840 /* Result will be either NULL_TREE, or a combined comparison. */
5841 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5842 TRUTH_ORIF_EXPR
, code1
, code2
,
5843 truth_type
, op1a
, op1b
);
5848 /* Likewise the swapped case of the above. */
5849 if (operand_equal_p (op1a
, op2b
, 0)
5850 && operand_equal_p (op1b
, op2a
, 0))
5852 /* Result will be either NULL_TREE, or a combined comparison. */
5853 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5854 TRUTH_ORIF_EXPR
, code1
,
5855 swap_tree_comparison (code2
),
5856 truth_type
, op1a
, op1b
);
5861 /* If both comparisons are of the same value against constants, we might
5862 be able to merge them. */
5863 if (operand_equal_p (op1a
, op2a
, 0)
5864 && TREE_CODE (op1b
) == INTEGER_CST
5865 && TREE_CODE (op2b
) == INTEGER_CST
)
5867 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5869 /* If we have (op1a != op1b), we should either be able to
5870 return that or TRUE, depending on whether the constant op1b
5871 also satisfies the other comparison against op2b. */
5872 if (code1
== NE_EXPR
)
5878 case EQ_EXPR
: val
= (cmp
== 0); break;
5879 case NE_EXPR
: val
= (cmp
!= 0); break;
5880 case LT_EXPR
: val
= (cmp
< 0); break;
5881 case GT_EXPR
: val
= (cmp
> 0); break;
5882 case LE_EXPR
: val
= (cmp
<= 0); break;
5883 case GE_EXPR
: val
= (cmp
>= 0); break;
5884 default: done
= false;
5889 return boolean_true_node
;
5891 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5894 /* Likewise if the second comparison is a != comparison. */
5895 else if (code2
== NE_EXPR
)
5901 case EQ_EXPR
: val
= (cmp
== 0); break;
5902 case NE_EXPR
: val
= (cmp
!= 0); break;
5903 case LT_EXPR
: val
= (cmp
> 0); break;
5904 case GT_EXPR
: val
= (cmp
< 0); break;
5905 case LE_EXPR
: val
= (cmp
>= 0); break;
5906 case GE_EXPR
: val
= (cmp
<= 0); break;
5907 default: done
= false;
5912 return boolean_true_node
;
5914 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5918 /* See if an equality test is redundant with the other comparison. */
5919 else if (code1
== EQ_EXPR
)
5924 case EQ_EXPR
: val
= (cmp
== 0); break;
5925 case NE_EXPR
: val
= (cmp
!= 0); break;
5926 case LT_EXPR
: val
= (cmp
< 0); break;
5927 case GT_EXPR
: val
= (cmp
> 0); break;
5928 case LE_EXPR
: val
= (cmp
<= 0); break;
5929 case GE_EXPR
: val
= (cmp
>= 0); break;
5934 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5936 else if (code2
== EQ_EXPR
)
5941 case EQ_EXPR
: val
= (cmp
== 0); break;
5942 case NE_EXPR
: val
= (cmp
!= 0); break;
5943 case LT_EXPR
: val
= (cmp
> 0); break;
5944 case GT_EXPR
: val
= (cmp
< 0); break;
5945 case LE_EXPR
: val
= (cmp
>= 0); break;
5946 case GE_EXPR
: val
= (cmp
<= 0); break;
5951 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5954 /* Chose the less restrictive of two < or <= comparisons. */
5955 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5956 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5958 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5959 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5961 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5964 /* Likewise chose the less restrictive of two > or >= comparisons. */
5965 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5966 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5968 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5969 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5971 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5974 /* Check for singleton ranges. */
5976 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
5977 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
5978 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
5980 /* Check for less/greater pairs that don't restrict the range at all. */
5982 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5983 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5984 return boolean_true_node
;
5986 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5987 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5988 return boolean_true_node
;
5991 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5992 NAME's definition is a truth value. See if there are any simplifications
5993 that can be done against the NAME's definition. */
5994 if (TREE_CODE (op1a
) == SSA_NAME
5995 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5996 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5998 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5999 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6000 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6001 switch (gimple_code (stmt
))
6004 /* Try to simplify by copy-propagating the definition. */
6005 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
6008 /* If every argument to the PHI produces the same result when
6009 ORed with the second comparison, we win.
6010 Do not do this unless the type is bool since we need a bool
6011 result here anyway. */
6012 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6014 tree result
= NULL_TREE
;
6016 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6018 tree arg
= gimple_phi_arg_def (stmt
, i
);
6020 /* If this PHI has itself as an argument, ignore it.
6021 If all the other args produce the same result,
6023 if (arg
== gimple_phi_result (stmt
))
6025 else if (TREE_CODE (arg
) == INTEGER_CST
)
6027 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
6030 result
= boolean_true_node
;
6031 else if (!integer_onep (result
))
6035 result
= fold_build2 (code2
, boolean_type_node
,
6037 else if (!same_bool_comparison_p (result
,
6041 else if (TREE_CODE (arg
) == SSA_NAME
6042 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6045 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6046 /* In simple cases we can look through PHI nodes,
6047 but we have to be careful with loops.
6049 if (! dom_info_available_p (CDI_DOMINATORS
)
6050 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6051 || dominated_by_p (CDI_DOMINATORS
,
6052 gimple_bb (def_stmt
),
6055 temp
= or_var_with_comparison (arg
, invert
, code2
,
6061 else if (!same_bool_result_p (result
, temp
))
6077 /* Try to simplify the OR of two comparisons, specified by
6078 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6079 If this can be simplified to a single expression (without requiring
6080 introducing more SSA variables to hold intermediate values),
6081 return the resulting tree. Otherwise return NULL_TREE.
6082 If the result expression is non-null, it has boolean type. */
6085 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
6086 enum tree_code code2
, tree op2a
, tree op2b
)
6088 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
6092 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
6096 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6098 Either NULL_TREE, a simplified but non-constant or a constant
6101 ??? This should go into a gimple-fold-inline.h file to be eventually
6102 privatized with the single valueize function used in the various TUs
6103 to avoid the indirect function call overhead. */
6106 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
6107 tree (*gvalueize
) (tree
))
6111 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6112 edges if there are intermediate VARYING defs. For this reason
6113 do not follow SSA edges here even though SCCVN can technically
6114 just deal fine with that. */
6115 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
))
6117 tree res
= NULL_TREE
;
6118 if (gimple_simplified_result_is_gimple_val (rcode
, ops
))
6120 else if (mprts_hook
)
6121 res
= mprts_hook (rcode
, gimple_expr_type (stmt
), ops
);
6124 if (dump_file
&& dump_flags
& TDF_DETAILS
)
6126 fprintf (dump_file
, "Match-and-simplified ");
6127 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
6128 fprintf (dump_file
, " to ");
6129 print_generic_expr (dump_file
, res
);
6130 fprintf (dump_file
, "\n");
6136 location_t loc
= gimple_location (stmt
);
6137 switch (gimple_code (stmt
))
6141 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
6143 switch (get_gimple_rhs_class (subcode
))
6145 case GIMPLE_SINGLE_RHS
:
6147 tree rhs
= gimple_assign_rhs1 (stmt
);
6148 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
6150 if (TREE_CODE (rhs
) == SSA_NAME
)
6152 /* If the RHS is an SSA_NAME, return its known constant value,
6154 return (*valueize
) (rhs
);
6156 /* Handle propagating invariant addresses into address
6158 else if (TREE_CODE (rhs
) == ADDR_EXPR
6159 && !is_gimple_min_invariant (rhs
))
6161 poly_int64 offset
= 0;
6163 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
6167 && (CONSTANT_CLASS_P (base
)
6168 || decl_address_invariant_p (base
)))
6169 return build_invariant_address (TREE_TYPE (rhs
),
6172 else if (TREE_CODE (rhs
) == CONSTRUCTOR
6173 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
6174 && known_eq (CONSTRUCTOR_NELTS (rhs
),
6175 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
6180 nelts
= CONSTRUCTOR_NELTS (rhs
);
6181 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
6182 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
6184 val
= (*valueize
) (val
);
6185 if (TREE_CODE (val
) == INTEGER_CST
6186 || TREE_CODE (val
) == REAL_CST
6187 || TREE_CODE (val
) == FIXED_CST
)
6188 vec
.quick_push (val
);
6193 return vec
.build ();
6195 if (subcode
== OBJ_TYPE_REF
)
6197 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
6198 /* If callee is constant, we can fold away the wrapper. */
6199 if (is_gimple_min_invariant (val
))
6203 if (kind
== tcc_reference
)
6205 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
6206 || TREE_CODE (rhs
) == REALPART_EXPR
6207 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
6208 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6210 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6211 return fold_unary_loc (EXPR_LOCATION (rhs
),
6213 TREE_TYPE (rhs
), val
);
6215 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
6216 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6218 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6219 return fold_ternary_loc (EXPR_LOCATION (rhs
),
6221 TREE_TYPE (rhs
), val
,
6222 TREE_OPERAND (rhs
, 1),
6223 TREE_OPERAND (rhs
, 2));
6225 else if (TREE_CODE (rhs
) == MEM_REF
6226 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6228 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6229 if (TREE_CODE (val
) == ADDR_EXPR
6230 && is_gimple_min_invariant (val
))
6232 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
6234 TREE_OPERAND (rhs
, 1));
6239 return fold_const_aggregate_ref_1 (rhs
, valueize
);
6241 else if (kind
== tcc_declaration
)
6242 return get_symbol_constant_value (rhs
);
6246 case GIMPLE_UNARY_RHS
:
6249 case GIMPLE_BINARY_RHS
:
6250 /* Translate &x + CST into an invariant form suitable for
6251 further propagation. */
6252 if (subcode
== POINTER_PLUS_EXPR
)
6254 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6255 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6256 if (TREE_CODE (op0
) == ADDR_EXPR
6257 && TREE_CODE (op1
) == INTEGER_CST
)
6259 tree off
= fold_convert (ptr_type_node
, op1
);
6260 return build_fold_addr_expr_loc
6262 fold_build2 (MEM_REF
,
6263 TREE_TYPE (TREE_TYPE (op0
)),
6264 unshare_expr (op0
), off
));
6267 /* Canonicalize bool != 0 and bool == 0 appearing after
6268 valueization. While gimple_simplify handles this
6269 it can get confused by the ~X == 1 -> X == 0 transform
6270 which we cant reduce to a SSA name or a constant
6271 (and we have no way to tell gimple_simplify to not
6272 consider those transforms in the first place). */
6273 else if (subcode
== EQ_EXPR
6274 || subcode
== NE_EXPR
)
6276 tree lhs
= gimple_assign_lhs (stmt
);
6277 tree op0
= gimple_assign_rhs1 (stmt
);
6278 if (useless_type_conversion_p (TREE_TYPE (lhs
),
6281 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6282 op0
= (*valueize
) (op0
);
6283 if (TREE_CODE (op0
) == INTEGER_CST
)
6284 std::swap (op0
, op1
);
6285 if (TREE_CODE (op1
) == INTEGER_CST
6286 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
6287 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
6293 case GIMPLE_TERNARY_RHS
:
6295 /* Handle ternary operators that can appear in GIMPLE form. */
6296 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6297 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6298 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
6299 return fold_ternary_loc (loc
, subcode
,
6300 gimple_expr_type (stmt
), op0
, op1
, op2
);
6311 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
6313 if (gimple_call_internal_p (stmt
))
6315 enum tree_code subcode
= ERROR_MARK
;
6316 switch (gimple_call_internal_fn (stmt
))
6318 case IFN_UBSAN_CHECK_ADD
:
6319 subcode
= PLUS_EXPR
;
6321 case IFN_UBSAN_CHECK_SUB
:
6322 subcode
= MINUS_EXPR
;
6324 case IFN_UBSAN_CHECK_MUL
:
6325 subcode
= MULT_EXPR
;
6327 case IFN_BUILTIN_EXPECT
:
6329 tree arg0
= gimple_call_arg (stmt
, 0);
6330 tree op0
= (*valueize
) (arg0
);
6331 if (TREE_CODE (op0
) == INTEGER_CST
)
6338 tree arg0
= gimple_call_arg (stmt
, 0);
6339 tree arg1
= gimple_call_arg (stmt
, 1);
6340 tree op0
= (*valueize
) (arg0
);
6341 tree op1
= (*valueize
) (arg1
);
6343 if (TREE_CODE (op0
) != INTEGER_CST
6344 || TREE_CODE (op1
) != INTEGER_CST
)
6349 /* x * 0 = 0 * x = 0 without overflow. */
6350 if (integer_zerop (op0
) || integer_zerop (op1
))
6351 return build_zero_cst (TREE_TYPE (arg0
));
6354 /* y - y = 0 without overflow. */
6355 if (operand_equal_p (op0
, op1
, 0))
6356 return build_zero_cst (TREE_TYPE (arg0
));
6363 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
6365 && TREE_CODE (res
) == INTEGER_CST
6366 && !TREE_OVERFLOW (res
))
6371 fn
= (*valueize
) (gimple_call_fn (stmt
));
6372 if (TREE_CODE (fn
) == ADDR_EXPR
6373 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
6374 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
6375 && gimple_builtin_call_types_compatible_p (stmt
,
6376 TREE_OPERAND (fn
, 0)))
6378 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
6381 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6382 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
6383 retval
= fold_builtin_call_array (loc
,
6384 gimple_call_return_type (call_stmt
),
6385 fn
, gimple_call_num_args (stmt
), args
);
6388 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6389 STRIP_NOPS (retval
);
6390 retval
= fold_convert (gimple_call_return_type (call_stmt
),
6403 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6404 Returns NULL_TREE if folding to a constant is not possible, otherwise
6405 returns a constant according to is_gimple_min_invariant. */
6408 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
6410 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
6411 if (res
&& is_gimple_min_invariant (res
))
6417 /* The following set of functions are supposed to fold references using
6418 their constant initializers. */
6420 /* See if we can find constructor defining value of BASE.
6421 When we know the consructor with constant offset (such as
6422 base is array[40] and we do know constructor of array), then
6423 BIT_OFFSET is adjusted accordingly.
6425 As a special case, return error_mark_node when constructor
6426 is not explicitly available, but it is known to be zero
6427 such as 'static const int a;'. */
6429 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
6430 tree (*valueize
)(tree
))
6432 poly_int64 bit_offset2
, size
, max_size
;
6435 if (TREE_CODE (base
) == MEM_REF
)
6437 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
6438 if (!boff
.to_shwi (bit_offset
))
6442 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
6443 base
= valueize (TREE_OPERAND (base
, 0));
6444 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
6446 base
= TREE_OPERAND (base
, 0);
6449 && TREE_CODE (base
) == SSA_NAME
)
6450 base
= valueize (base
);
6452 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6453 DECL_INITIAL. If BASE is a nested reference into another
6454 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6455 the inner reference. */
6456 switch (TREE_CODE (base
))
6461 tree init
= ctor_for_folding (base
);
6463 /* Our semantic is exact opposite of ctor_for_folding;
6464 NULL means unknown, while error_mark_node is 0. */
6465 if (init
== error_mark_node
)
6468 return error_mark_node
;
6472 case VIEW_CONVERT_EXPR
:
6473 return get_base_constructor (TREE_OPERAND (base
, 0),
6474 bit_offset
, valueize
);
6478 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
6480 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
6482 *bit_offset
+= bit_offset2
;
6483 return get_base_constructor (base
, bit_offset
, valueize
);
6489 if (CONSTANT_CLASS_P (base
))
6496 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6497 SIZE to the memory at bit OFFSET. */
6500 fold_array_ctor_reference (tree type
, tree ctor
,
6501 unsigned HOST_WIDE_INT offset
,
6502 unsigned HOST_WIDE_INT size
,
6505 offset_int low_bound
;
6506 offset_int elt_size
;
6507 offset_int access_index
;
6508 tree domain_type
= NULL_TREE
;
6509 HOST_WIDE_INT inner_offset
;
6511 /* Compute low bound and elt size. */
6512 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
6513 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
6514 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6516 /* Static constructors for variably sized objects makes no sense. */
6517 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
6519 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
6523 /* Static constructors for variably sized objects makes no sense. */
6524 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
6526 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
6528 /* We can handle only constantly sized accesses that are known to not
6529 be larger than size of array element. */
6530 if (!TYPE_SIZE_UNIT (type
)
6531 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
6532 || elt_size
< wi::to_offset (TYPE_SIZE_UNIT (type
))
6536 /* Compute the array index we look for. */
6537 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
6539 access_index
+= low_bound
;
6541 /* And offset within the access. */
6542 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
6544 /* See if the array field is large enough to span whole access. We do not
6545 care to fold accesses spanning multiple array indexes. */
6546 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
6548 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
6549 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
);
6551 /* When memory is not explicitely mentioned in constructor,
6552 it is 0 (or out of range). */
6553 return build_zero_cst (type
);
6556 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6557 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6560 fold_nonarray_ctor_reference (tree type
, tree ctor
,
6561 unsigned HOST_WIDE_INT offset
,
6562 unsigned HOST_WIDE_INT size
,
6565 unsigned HOST_WIDE_INT cnt
;
6568 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
6571 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
6572 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
6573 tree field_size
= DECL_SIZE (cfield
);
6574 offset_int bitoffset
;
6575 offset_int bitoffset_end
, access_end
;
6577 /* Variable sized objects in static constructors makes no sense,
6578 but field_size can be NULL for flexible array members. */
6579 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
6580 && TREE_CODE (byte_offset
) == INTEGER_CST
6581 && (field_size
!= NULL_TREE
6582 ? TREE_CODE (field_size
) == INTEGER_CST
6583 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
6585 /* Compute bit offset of the field. */
6586 bitoffset
= (wi::to_offset (field_offset
)
6587 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
6588 /* Compute bit offset where the field ends. */
6589 if (field_size
!= NULL_TREE
)
6590 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
6594 access_end
= offset_int (offset
) + size
;
6596 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6597 [BITOFFSET, BITOFFSET_END)? */
6598 if (wi::cmps (access_end
, bitoffset
) > 0
6599 && (field_size
== NULL_TREE
6600 || wi::lts_p (offset
, bitoffset_end
)))
6602 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
6603 /* We do have overlap. Now see if field is large enough to
6604 cover the access. Give up for accesses spanning multiple
6606 if (wi::cmps (access_end
, bitoffset_end
) > 0)
6608 if (offset
< bitoffset
)
6610 return fold_ctor_reference (type
, cval
,
6611 inner_offset
.to_uhwi (), size
,
6615 /* When memory is not explicitely mentioned in constructor, it is 0. */
6616 return build_zero_cst (type
);
6619 /* CTOR is value initializing memory, fold reference of type TYPE and
6620 size POLY_SIZE to the memory at bit POLY_OFFSET. */
6623 fold_ctor_reference (tree type
, tree ctor
, poly_uint64 poly_offset
,
6624 poly_uint64 poly_size
, tree from_decl
)
6628 /* We found the field with exact match. */
6629 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
6630 && known_eq (poly_offset
, 0U))
6631 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6633 /* The remaining optimizations need a constant size and offset. */
6634 unsigned HOST_WIDE_INT size
, offset
;
6635 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
6638 /* We are at the end of walk, see if we can view convert the
6640 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
6641 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6642 && !compare_tree_int (TYPE_SIZE (type
), size
)
6643 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
6645 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6648 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
6650 STRIP_USELESS_TYPE_CONVERSION (ret
);
6654 /* For constants and byte-aligned/sized reads try to go through
6655 native_encode/interpret. */
6656 if (CONSTANT_CLASS_P (ctor
)
6657 && BITS_PER_UNIT
== 8
6658 && offset
% BITS_PER_UNIT
== 0
6659 && size
% BITS_PER_UNIT
== 0
6660 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
6662 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
6663 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
6664 offset
/ BITS_PER_UNIT
);
6666 return native_interpret_expr (type
, buf
, len
);
6668 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
6671 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
6672 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
6673 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
6676 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
6683 /* Return the tree representing the element referenced by T if T is an
6684 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6685 names using VALUEIZE. Return NULL_TREE otherwise. */
6688 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
6690 tree ctor
, idx
, base
;
6691 poly_int64 offset
, size
, max_size
;
6695 if (TREE_THIS_VOLATILE (t
))
6699 return get_symbol_constant_value (t
);
6701 tem
= fold_read_from_constant_string (t
);
6705 switch (TREE_CODE (t
))
6708 case ARRAY_RANGE_REF
:
6709 /* Constant indexes are handled well by get_base_constructor.
6710 Only special case variable offsets.
6711 FIXME: This code can't handle nested references with variable indexes
6712 (they will be handled only by iteration of ccp). Perhaps we can bring
6713 get_ref_base_and_extent here and make it use a valueize callback. */
6714 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
6716 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
6717 && poly_int_tree_p (idx
))
6719 tree low_bound
, unit_size
;
6721 /* If the resulting bit-offset is constant, track it. */
6722 if ((low_bound
= array_ref_low_bound (t
),
6723 poly_int_tree_p (low_bound
))
6724 && (unit_size
= array_ref_element_size (t
),
6725 tree_fits_uhwi_p (unit_size
)))
6727 poly_offset_int woffset
6728 = wi::sext (wi::to_poly_offset (idx
)
6729 - wi::to_poly_offset (low_bound
),
6730 TYPE_PRECISION (TREE_TYPE (idx
)));
6732 if (woffset
.to_shwi (&offset
))
6734 /* TODO: This code seems wrong, multiply then check
6735 to see if it fits. */
6736 offset
*= tree_to_uhwi (unit_size
);
6737 offset
*= BITS_PER_UNIT
;
6739 base
= TREE_OPERAND (t
, 0);
6740 ctor
= get_base_constructor (base
, &offset
, valueize
);
6741 /* Empty constructor. Always fold to 0. */
6742 if (ctor
== error_mark_node
)
6743 return build_zero_cst (TREE_TYPE (t
));
6744 /* Out of bound array access. Value is undefined,
6746 if (maybe_lt (offset
, 0))
6748 /* We can not determine ctor. */
6751 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
6752 tree_to_uhwi (unit_size
)
6762 case TARGET_MEM_REF
:
6764 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
6765 ctor
= get_base_constructor (base
, &offset
, valueize
);
6767 /* Empty constructor. Always fold to 0. */
6768 if (ctor
== error_mark_node
)
6769 return build_zero_cst (TREE_TYPE (t
));
6770 /* We do not know precise address. */
6771 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
6773 /* We can not determine ctor. */
6777 /* Out of bound array access. Value is undefined, but don't fold. */
6778 if (maybe_lt (offset
, 0))
6781 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
6787 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
6788 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
6789 return fold_build1_loc (EXPR_LOCATION (t
),
6790 TREE_CODE (t
), TREE_TYPE (t
), c
);
6802 fold_const_aggregate_ref (tree t
)
6804 return fold_const_aggregate_ref_1 (t
, NULL
);
6807 /* Lookup virtual method with index TOKEN in a virtual table V
6809 Set CAN_REFER if non-NULL to false if method
6810 is not referable or if the virtual table is ill-formed (such as rewriten
6811 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6814 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
6816 unsigned HOST_WIDE_INT offset
,
6819 tree vtable
= v
, init
, fn
;
6820 unsigned HOST_WIDE_INT size
;
6821 unsigned HOST_WIDE_INT elt_size
, access_index
;
6827 /* First of all double check we have virtual table. */
6828 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
6830 /* Pass down that we lost track of the target. */
6836 init
= ctor_for_folding (v
);
6838 /* The virtual tables should always be born with constructors
6839 and we always should assume that they are avaialble for
6840 folding. At the moment we do not stream them in all cases,
6841 but it should never happen that ctor seem unreachable. */
6843 if (init
== error_mark_node
)
6845 /* Pass down that we lost track of the target. */
6850 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
6851 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
6852 offset
*= BITS_PER_UNIT
;
6853 offset
+= token
* size
;
6855 /* Lookup the value in the constructor that is assumed to be array.
6856 This is equivalent to
6857 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6858 offset, size, NULL);
6859 but in a constant time. We expect that frontend produced a simple
6860 array without indexed initializers. */
6862 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
6863 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
6864 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
6865 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
6867 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
6868 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
6870 /* This code makes an assumption that there are no
6871 indexed fileds produced by C++ FE, so we can directly index the array. */
6872 if (access_index
< CONSTRUCTOR_NELTS (init
))
6874 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
6875 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
6881 /* For type inconsistent program we may end up looking up virtual method
6882 in virtual table that does not contain TOKEN entries. We may overrun
6883 the virtual table and pick up a constant or RTTI info pointer.
6884 In any case the call is undefined. */
6886 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
6887 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
6888 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
6891 fn
= TREE_OPERAND (fn
, 0);
6893 /* When cgraph node is missing and function is not public, we cannot
6894 devirtualize. This can happen in WHOPR when the actual method
6895 ends up in other partition, because we found devirtualization
6896 possibility too late. */
6897 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
6908 /* Make sure we create a cgraph node for functions we'll reference.
6909 They can be non-existent if the reference comes from an entry
6910 of an external vtable for example. */
6911 cgraph_node::get_create (fn
);
6916 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6917 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6918 KNOWN_BINFO carries the binfo describing the true type of
6919 OBJ_TYPE_REF_OBJECT(REF).
6920 Set CAN_REFER if non-NULL to false if method
6921 is not referable or if the virtual table is ill-formed (such as rewriten
6922 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6925 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
6928 unsigned HOST_WIDE_INT offset
;
6931 v
= BINFO_VTABLE (known_binfo
);
6932 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6936 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
6942 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
6945 /* Given a pointer value T, return a simplified version of an
6946 indirection through T, or NULL_TREE if no simplification is
6947 possible. Note that the resulting type may be different from
6948 the type pointed to in the sense that it is still compatible
6949 from the langhooks point of view. */
6952 gimple_fold_indirect_ref (tree t
)
6954 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
6959 subtype
= TREE_TYPE (sub
);
6960 if (!POINTER_TYPE_P (subtype
)
6961 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
6964 if (TREE_CODE (sub
) == ADDR_EXPR
)
6966 tree op
= TREE_OPERAND (sub
, 0);
6967 tree optype
= TREE_TYPE (op
);
6969 if (useless_type_conversion_p (type
, optype
))
6972 /* *(foo *)&fooarray => fooarray[0] */
6973 if (TREE_CODE (optype
) == ARRAY_TYPE
6974 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
6975 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6977 tree type_domain
= TYPE_DOMAIN (optype
);
6978 tree min_val
= size_zero_node
;
6979 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6980 min_val
= TYPE_MIN_VALUE (type_domain
);
6981 if (TREE_CODE (min_val
) == INTEGER_CST
)
6982 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
6984 /* *(foo *)&complexfoo => __real__ complexfoo */
6985 else if (TREE_CODE (optype
) == COMPLEX_TYPE
6986 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6987 return fold_build1 (REALPART_EXPR
, type
, op
);
6988 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6989 else if (TREE_CODE (optype
) == VECTOR_TYPE
6990 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6992 tree part_width
= TYPE_SIZE (type
);
6993 tree index
= bitsize_int (0);
6994 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
6998 /* *(p + CST) -> ... */
6999 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
7000 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
7002 tree addr
= TREE_OPERAND (sub
, 0);
7003 tree off
= TREE_OPERAND (sub
, 1);
7007 addrtype
= TREE_TYPE (addr
);
7009 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7010 if (TREE_CODE (addr
) == ADDR_EXPR
7011 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
7012 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
7013 && tree_fits_uhwi_p (off
))
7015 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
7016 tree part_width
= TYPE_SIZE (type
);
7017 unsigned HOST_WIDE_INT part_widthi
7018 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
7019 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
7020 tree index
= bitsize_int (indexi
);
7021 if (known_lt (offset
/ part_widthi
,
7022 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
7023 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
7027 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7028 if (TREE_CODE (addr
) == ADDR_EXPR
7029 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
7030 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
7032 tree size
= TYPE_SIZE_UNIT (type
);
7033 if (tree_int_cst_equal (size
, off
))
7034 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
7037 /* *(p + CST) -> MEM_REF <p, CST>. */
7038 if (TREE_CODE (addr
) != ADDR_EXPR
7039 || DECL_P (TREE_OPERAND (addr
, 0)))
7040 return fold_build2 (MEM_REF
, type
,
7042 wide_int_to_tree (ptype
, wi::to_wide (off
)));
7045 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7046 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
7047 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
7048 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
7051 tree min_val
= size_zero_node
;
7053 sub
= gimple_fold_indirect_ref (sub
);
7055 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
7056 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
7057 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
7058 min_val
= TYPE_MIN_VALUE (type_domain
);
7059 if (TREE_CODE (min_val
) == INTEGER_CST
)
7060 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
7066 /* Return true if CODE is an operation that when operating on signed
7067 integer types involves undefined behavior on overflow and the
7068 operation can be expressed with unsigned arithmetic. */
7071 arith_code_with_undefined_signed_overflow (tree_code code
)
7079 case POINTER_PLUS_EXPR
:
7086 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7087 operation that can be transformed to unsigned arithmetic by converting
7088 its operand, carrying out the operation in the corresponding unsigned
7089 type and converting the result back to the original type.
7091 Returns a sequence of statements that replace STMT and also contain
7092 a modified form of STMT itself. */
7095 rewrite_to_defined_overflow (gimple
*stmt
)
7097 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7099 fprintf (dump_file
, "rewriting stmt with undefined signed "
7101 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
7104 tree lhs
= gimple_assign_lhs (stmt
);
7105 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
7106 gimple_seq stmts
= NULL
;
7107 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
7109 tree op
= gimple_op (stmt
, i
);
7110 op
= gimple_convert (&stmts
, type
, op
);
7111 gimple_set_op (stmt
, i
, op
);
7113 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
7114 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
7115 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
7116 gimple_seq_add_stmt (&stmts
, stmt
);
7117 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
7118 gimple_seq_add_stmt (&stmts
, cvt
);
7124 /* The valueization hook we use for the gimple_build API simplification.
7125 This makes us match fold_buildN behavior by only combining with
7126 statements in the sequence(s) we are currently building. */
7129 gimple_build_valueize (tree op
)
7131 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
7136 /* Build the expression CODE OP0 of type TYPE with location LOC,
7137 simplifying it first if possible. Returns the built
7138 expression value and appends statements possibly defining it
7142 gimple_build (gimple_seq
*seq
, location_t loc
,
7143 enum tree_code code
, tree type
, tree op0
)
7145 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
7148 res
= create_tmp_reg_or_ssa_name (type
);
7150 if (code
== REALPART_EXPR
7151 || code
== IMAGPART_EXPR
7152 || code
== VIEW_CONVERT_EXPR
)
7153 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
7155 stmt
= gimple_build_assign (res
, code
, op0
);
7156 gimple_set_location (stmt
, loc
);
7157 gimple_seq_add_stmt_without_update (seq
, stmt
);
7162 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7163 simplifying it first if possible. Returns the built
7164 expression value and appends statements possibly defining it
7168 gimple_build (gimple_seq
*seq
, location_t loc
,
7169 enum tree_code code
, tree type
, tree op0
, tree op1
)
7171 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
7174 res
= create_tmp_reg_or_ssa_name (type
);
7175 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
7176 gimple_set_location (stmt
, loc
);
7177 gimple_seq_add_stmt_without_update (seq
, stmt
);
7182 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7183 simplifying it first if possible. Returns the built
7184 expression value and appends statements possibly defining it
7188 gimple_build (gimple_seq
*seq
, location_t loc
,
7189 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
7191 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
7192 seq
, gimple_build_valueize
);
7195 res
= create_tmp_reg_or_ssa_name (type
);
7197 if (code
== BIT_FIELD_REF
)
7198 stmt
= gimple_build_assign (res
, code
,
7199 build3 (code
, type
, op0
, op1
, op2
));
7201 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
7202 gimple_set_location (stmt
, loc
);
7203 gimple_seq_add_stmt_without_update (seq
, stmt
);
7208 /* Build the call FN (ARG0) with a result of type TYPE
7209 (or no result if TYPE is void) with location LOC,
7210 simplifying it first if possible. Returns the built
7211 expression value (or NULL_TREE if TYPE is void) and appends
7212 statements possibly defining it to SEQ. */
7215 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7216 tree type
, tree arg0
)
7218 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
7222 if (internal_fn_p (fn
))
7223 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
7226 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7227 stmt
= gimple_build_call (decl
, 1, arg0
);
7229 if (!VOID_TYPE_P (type
))
7231 res
= create_tmp_reg_or_ssa_name (type
);
7232 gimple_call_set_lhs (stmt
, res
);
7234 gimple_set_location (stmt
, loc
);
7235 gimple_seq_add_stmt_without_update (seq
, stmt
);
7240 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7241 (or no result if TYPE is void) with location LOC,
7242 simplifying it first if possible. Returns the built
7243 expression value (or NULL_TREE if TYPE is void) and appends
7244 statements possibly defining it to SEQ. */
7247 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7248 tree type
, tree arg0
, tree arg1
)
7250 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
7254 if (internal_fn_p (fn
))
7255 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
7258 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7259 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
7261 if (!VOID_TYPE_P (type
))
7263 res
= create_tmp_reg_or_ssa_name (type
);
7264 gimple_call_set_lhs (stmt
, res
);
7266 gimple_set_location (stmt
, loc
);
7267 gimple_seq_add_stmt_without_update (seq
, stmt
);
7272 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7273 (or no result if TYPE is void) with location LOC,
7274 simplifying it first if possible. Returns the built
7275 expression value (or NULL_TREE if TYPE is void) and appends
7276 statements possibly defining it to SEQ. */
7279 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7280 tree type
, tree arg0
, tree arg1
, tree arg2
)
7282 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
7283 seq
, gimple_build_valueize
);
7287 if (internal_fn_p (fn
))
7288 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
7289 3, arg0
, arg1
, arg2
);
7292 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7293 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
7295 if (!VOID_TYPE_P (type
))
7297 res
= create_tmp_reg_or_ssa_name (type
);
7298 gimple_call_set_lhs (stmt
, res
);
7300 gimple_set_location (stmt
, loc
);
7301 gimple_seq_add_stmt_without_update (seq
, stmt
);
7306 /* Build the conversion (TYPE) OP with a result of type TYPE
7307 with location LOC if such conversion is neccesary in GIMPLE,
7308 simplifying it first.
7309 Returns the built expression value and appends
7310 statements possibly defining it to SEQ. */
7313 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
7315 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
7317 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
7320 /* Build the conversion (ptrofftype) OP with a result of a type
7321 compatible with ptrofftype with location LOC if such conversion
7322 is neccesary in GIMPLE, simplifying it first.
7323 Returns the built expression value and appends
7324 statements possibly defining it to SEQ. */
7327 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
7329 if (ptrofftype_p (TREE_TYPE (op
)))
7331 return gimple_convert (seq
, loc
, sizetype
, op
);
7334 /* Build a vector of type TYPE in which each element has the value OP.
7335 Return a gimple value for the result, appending any new statements
7339 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
7342 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
7343 && !CONSTANT_CLASS_P (op
))
7344 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
7346 tree res
, vec
= build_vector_from_val (type
, op
);
7347 if (is_gimple_val (vec
))
7349 if (gimple_in_ssa_p (cfun
))
7350 res
= make_ssa_name (type
);
7352 res
= create_tmp_reg (type
);
7353 gimple
*stmt
= gimple_build_assign (res
, vec
);
7354 gimple_set_location (stmt
, loc
);
7355 gimple_seq_add_stmt_without_update (seq
, stmt
);
7359 /* Build a vector from BUILDER, handling the case in which some elements
7360 are non-constant. Return a gimple value for the result, appending any
7361 new instructions to SEQ.
7363 BUILDER must not have a stepped encoding on entry. This is because
7364 the function is not geared up to handle the arithmetic that would
7365 be needed in the variable case, and any code building a vector that
7366 is known to be constant should use BUILDER->build () directly. */
7369 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
7370 tree_vector_builder
*builder
)
7372 gcc_assert (builder
->nelts_per_pattern () <= 2);
7373 unsigned int encoded_nelts
= builder
->encoded_nelts ();
7374 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
7375 if (!TREE_CONSTANT ((*builder
)[i
]))
7377 tree type
= builder
->type ();
7378 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
7379 vec
<constructor_elt
, va_gc
> *v
;
7380 vec_alloc (v
, nelts
);
7381 for (i
= 0; i
< nelts
; ++i
)
7382 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
7385 if (gimple_in_ssa_p (cfun
))
7386 res
= make_ssa_name (type
);
7388 res
= create_tmp_reg (type
);
7389 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
7390 gimple_set_location (stmt
, loc
);
7391 gimple_seq_add_stmt_without_update (seq
, stmt
);
7394 return builder
->build ();
7397 /* Return true if the result of assignment STMT is known to be non-negative.
7398 If the return value is based on the assumption that signed overflow is
7399 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7400 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7403 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7406 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7407 switch (get_gimple_rhs_class (code
))
7409 case GIMPLE_UNARY_RHS
:
7410 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7411 gimple_expr_type (stmt
),
7412 gimple_assign_rhs1 (stmt
),
7413 strict_overflow_p
, depth
);
7414 case GIMPLE_BINARY_RHS
:
7415 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7416 gimple_expr_type (stmt
),
7417 gimple_assign_rhs1 (stmt
),
7418 gimple_assign_rhs2 (stmt
),
7419 strict_overflow_p
, depth
);
7420 case GIMPLE_TERNARY_RHS
:
7422 case GIMPLE_SINGLE_RHS
:
7423 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
7424 strict_overflow_p
, depth
);
7425 case GIMPLE_INVALID_RHS
:
7431 /* Return true if return value of call STMT is known to be non-negative.
7432 If the return value is based on the assumption that signed overflow is
7433 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7434 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7437 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7440 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
7441 gimple_call_arg (stmt
, 0) : NULL_TREE
;
7442 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
7443 gimple_call_arg (stmt
, 1) : NULL_TREE
;
7445 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
7446 gimple_call_combined_fn (stmt
),
7449 strict_overflow_p
, depth
);
7452 /* Return true if return value of call STMT is known to be non-negative.
7453 If the return value is based on the assumption that signed overflow is
7454 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7455 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7458 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7461 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7463 tree arg
= gimple_phi_arg_def (stmt
, i
);
7464 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
7470 /* Return true if STMT is known to compute a non-negative value.
7471 If the return value is based on the assumption that signed overflow is
7472 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7473 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7476 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7479 switch (gimple_code (stmt
))
7482 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7485 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7488 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7495 /* Return true if the floating-point value computed by assignment STMT
7496 is known to have an integer value. We also allow +Inf, -Inf and NaN
7497 to be considered integer values. Return false for signaling NaN.
7499 DEPTH is the current nesting depth of the query. */
7502 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
7504 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7505 switch (get_gimple_rhs_class (code
))
7507 case GIMPLE_UNARY_RHS
:
7508 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
7509 gimple_assign_rhs1 (stmt
), depth
);
7510 case GIMPLE_BINARY_RHS
:
7511 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
7512 gimple_assign_rhs1 (stmt
),
7513 gimple_assign_rhs2 (stmt
), depth
);
7514 case GIMPLE_TERNARY_RHS
:
7516 case GIMPLE_SINGLE_RHS
:
7517 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
7518 case GIMPLE_INVALID_RHS
:
7524 /* Return true if the floating-point value computed by call STMT is known
7525 to have an integer value. We also allow +Inf, -Inf and NaN to be
7526 considered integer values. Return false for signaling NaN.
7528 DEPTH is the current nesting depth of the query. */
7531 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
7533 tree arg0
= (gimple_call_num_args (stmt
) > 0
7534 ? gimple_call_arg (stmt
, 0)
7536 tree arg1
= (gimple_call_num_args (stmt
) > 1
7537 ? gimple_call_arg (stmt
, 1)
7539 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
7543 /* Return true if the floating-point result of phi STMT is known to have
7544 an integer value. We also allow +Inf, -Inf and NaN to be considered
7545 integer values. Return false for signaling NaN.
7547 DEPTH is the current nesting depth of the query. */
7550 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
7552 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7554 tree arg
= gimple_phi_arg_def (stmt
, i
);
7555 if (!integer_valued_real_single_p (arg
, depth
+ 1))
7561 /* Return true if the floating-point value computed by STMT is known
7562 to have an integer value. We also allow +Inf, -Inf and NaN to be
7563 considered integer values. Return false for signaling NaN.
7565 DEPTH is the current nesting depth of the query. */
7568 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
7570 switch (gimple_code (stmt
))
7573 return gimple_assign_integer_valued_real_p (stmt
, depth
);
7575 return gimple_call_integer_valued_real_p (stmt
, depth
);
7577 return gimple_phi_integer_valued_real_p (stmt
, depth
);