1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
60 /* Return true when DECL can be referenced from current unit.
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
76 3) COMDAT functions referred by external vtables that
77 we devirtualize only during final compilation stage.
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
83 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
86 struct cgraph_node
*node
;
89 if (DECL_ABSTRACT_P (decl
))
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
94 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
102 if (symtab
->function_flags_ready
)
104 snode
= symtab_node::get (decl
);
105 if (!snode
|| !snode
->definition
)
107 node
= dyn_cast
<cgraph_node
*> (snode
);
108 return !node
|| !node
->global
.inlined_to
;
111 /* We will later output the initializer, so we can refer to it.
112 So we are concerned only when DECL comes from initializer of
113 external var or var that has been optimized out. */
115 || TREE_CODE (from_decl
) != VAR_DECL
116 || (!DECL_EXTERNAL (from_decl
)
117 && (vnode
= varpool_node::get (from_decl
)) != NULL
118 && vnode
->definition
)
120 && (vnode
= varpool_node::get (from_decl
)) != NULL
121 && vnode
->in_other_partition
))
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl
)
127 && DECL_EXTERNAL (decl
)
128 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
129 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
140 As observed in PR20991 for already optimized out comdat virtual functions
141 it may be tempting to not necessarily give up because the copy will be
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
147 if (!symtab
->function_flags_ready
)
150 snode
= symtab_node::get (decl
);
152 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
153 && (!snode
->in_other_partition
154 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
156 node
= dyn_cast
<cgraph_node
*> (snode
);
157 return !node
|| !node
->global
.inlined_to
;
160 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
161 acceptable form for is_gimple_min_invariant.
162 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
165 canonicalize_constructor_val (tree cval
, tree from_decl
)
167 tree orig_cval
= cval
;
169 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
170 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
172 tree ptr
= TREE_OPERAND (cval
, 0);
173 if (is_gimple_min_invariant (ptr
))
174 cval
= build1_loc (EXPR_LOCATION (cval
),
175 ADDR_EXPR
, TREE_TYPE (ptr
),
176 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
178 fold_convert (ptr_type_node
,
179 TREE_OPERAND (cval
, 1))));
181 if (TREE_CODE (cval
) == ADDR_EXPR
)
183 tree base
= NULL_TREE
;
184 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
186 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
188 TREE_OPERAND (cval
, 0) = base
;
191 base
= get_base_address (TREE_OPERAND (cval
, 0));
195 if ((TREE_CODE (base
) == VAR_DECL
196 || TREE_CODE (base
) == FUNCTION_DECL
)
197 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
199 if (TREE_TYPE (base
) == error_mark_node
)
201 if (TREE_CODE (base
) == VAR_DECL
)
202 TREE_ADDRESSABLE (base
) = 1;
203 else if (TREE_CODE (base
) == FUNCTION_DECL
)
205 /* Make sure we create a cgraph node for functions we'll reference.
206 They can be non-existent if the reference comes from an entry
207 of an external vtable for example. */
208 cgraph_node::get_create (base
);
210 /* Fixup types in global initializers. */
211 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
212 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
214 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
215 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
218 if (TREE_OVERFLOW_P (cval
))
219 return drop_tree_overflow (cval
);
223 /* If SYM is a constant variable with known value, return the value.
224 NULL_TREE is returned otherwise. */
227 get_symbol_constant_value (tree sym
)
229 tree val
= ctor_for_folding (sym
);
230 if (val
!= error_mark_node
)
234 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
235 if (val
&& is_gimple_min_invariant (val
))
240 /* Variables declared 'const' without an initializer
241 have zero as the initializer if they may not be
242 overridden at link or run time. */
244 && is_gimple_reg_type (TREE_TYPE (sym
)))
245 return build_zero_cst (TREE_TYPE (sym
));
253 /* Subroutine of fold_stmt. We perform several simplifications of the
254 memory reference tree EXPR and make sure to re-gimplify them properly
255 after propagation of constant addresses. IS_LHS is true if the
256 reference is supposed to be an lvalue. */
259 maybe_fold_reference (tree expr
, bool is_lhs
)
263 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
264 || TREE_CODE (expr
) == REALPART_EXPR
265 || TREE_CODE (expr
) == IMAGPART_EXPR
)
266 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
267 return fold_unary_loc (EXPR_LOCATION (expr
),
270 TREE_OPERAND (expr
, 0));
271 else if (TREE_CODE (expr
) == BIT_FIELD_REF
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
273 return fold_ternary_loc (EXPR_LOCATION (expr
),
276 TREE_OPERAND (expr
, 0),
277 TREE_OPERAND (expr
, 1),
278 TREE_OPERAND (expr
, 2));
281 && (result
= fold_const_aggregate_ref (expr
))
282 && is_gimple_min_invariant (result
))
289 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
290 replacement rhs for the statement or NULL_TREE if no simplification
291 could be made. It is assumed that the operands have been previously
295 fold_gimple_assign (gimple_stmt_iterator
*si
)
297 gimple
*stmt
= gsi_stmt (*si
);
298 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
299 location_t loc
= gimple_location (stmt
);
301 tree result
= NULL_TREE
;
303 switch (get_gimple_rhs_class (subcode
))
305 case GIMPLE_SINGLE_RHS
:
307 tree rhs
= gimple_assign_rhs1 (stmt
);
309 if (TREE_CLOBBER_P (rhs
))
312 if (REFERENCE_CLASS_P (rhs
))
313 return maybe_fold_reference (rhs
, false);
315 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
317 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
318 if (is_gimple_min_invariant (val
))
320 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
323 vec
<cgraph_node
*>targets
324 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
325 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
327 if (dump_enabled_p ())
329 location_t loc
= gimple_location_safe (stmt
);
330 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
331 "resolving virtual function address "
332 "reference to function %s\n",
333 targets
.length () == 1
334 ? targets
[0]->name ()
337 if (targets
.length () == 1)
339 val
= fold_convert (TREE_TYPE (val
),
340 build_fold_addr_expr_loc
341 (loc
, targets
[0]->decl
));
342 STRIP_USELESS_TYPE_CONVERSION (val
);
345 /* We can not use __builtin_unreachable here because it
346 can not have address taken. */
347 val
= build_int_cst (TREE_TYPE (val
), 0);
353 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
355 tree ref
= TREE_OPERAND (rhs
, 0);
356 tree tem
= maybe_fold_reference (ref
, true);
358 && TREE_CODE (tem
) == MEM_REF
359 && integer_zerop (TREE_OPERAND (tem
, 1)))
360 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
362 result
= fold_convert (TREE_TYPE (rhs
),
363 build_fold_addr_expr_loc (loc
, tem
));
364 else if (TREE_CODE (ref
) == MEM_REF
365 && integer_zerop (TREE_OPERAND (ref
, 1)))
366 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
370 /* Strip away useless type conversions. Both the
371 NON_LVALUE_EXPR that may have been added by fold, and
372 "useless" type conversions that might now be apparent
373 due to propagation. */
374 STRIP_USELESS_TYPE_CONVERSION (result
);
376 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
381 else if (TREE_CODE (rhs
) == CONSTRUCTOR
382 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
384 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
388 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
389 if (! CONSTANT_CLASS_P (val
))
392 return build_vector_from_ctor (TREE_TYPE (rhs
),
393 CONSTRUCTOR_ELTS (rhs
));
396 else if (DECL_P (rhs
))
397 return get_symbol_constant_value (rhs
);
401 case GIMPLE_UNARY_RHS
:
404 case GIMPLE_BINARY_RHS
:
407 case GIMPLE_TERNARY_RHS
:
408 result
= fold_ternary_loc (loc
, subcode
,
409 TREE_TYPE (gimple_assign_lhs (stmt
)),
410 gimple_assign_rhs1 (stmt
),
411 gimple_assign_rhs2 (stmt
),
412 gimple_assign_rhs3 (stmt
));
416 STRIP_USELESS_TYPE_CONVERSION (result
);
417 if (valid_gimple_rhs_p (result
))
422 case GIMPLE_INVALID_RHS
:
430 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
431 adjusting the replacement stmts location and virtual operands.
432 If the statement has a lhs the last stmt in the sequence is expected
433 to assign to that lhs. */
436 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
438 gimple
*stmt
= gsi_stmt (*si_p
);
440 if (gimple_has_location (stmt
))
441 annotate_all_with_location (stmts
, gimple_location (stmt
));
443 /* First iterate over the replacement statements backward, assigning
444 virtual operands to their defining statements. */
445 gimple
*laststore
= NULL
;
446 for (gimple_stmt_iterator i
= gsi_last (stmts
);
447 !gsi_end_p (i
); gsi_prev (&i
))
449 gimple
*new_stmt
= gsi_stmt (i
);
450 if ((gimple_assign_single_p (new_stmt
)
451 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
452 || (is_gimple_call (new_stmt
)
453 && (gimple_call_flags (new_stmt
)
454 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
458 vdef
= gimple_vdef (stmt
);
460 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
461 gimple_set_vdef (new_stmt
, vdef
);
462 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
463 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
464 laststore
= new_stmt
;
468 /* Second iterate over the statements forward, assigning virtual
469 operands to their uses. */
470 tree reaching_vuse
= gimple_vuse (stmt
);
471 for (gimple_stmt_iterator i
= gsi_start (stmts
);
472 !gsi_end_p (i
); gsi_next (&i
))
474 gimple
*new_stmt
= gsi_stmt (i
);
475 /* If the new statement possibly has a VUSE, update it with exact SSA
476 name we know will reach this one. */
477 if (gimple_has_mem_ops (new_stmt
))
478 gimple_set_vuse (new_stmt
, reaching_vuse
);
479 gimple_set_modified (new_stmt
, true);
480 if (gimple_vdef (new_stmt
))
481 reaching_vuse
= gimple_vdef (new_stmt
);
484 /* If the new sequence does not do a store release the virtual
485 definition of the original statement. */
487 && reaching_vuse
== gimple_vuse (stmt
))
489 tree vdef
= gimple_vdef (stmt
);
491 && TREE_CODE (vdef
) == SSA_NAME
)
493 unlink_stmt_vdef (stmt
);
494 release_ssa_name (vdef
);
498 /* Finally replace the original statement with the sequence. */
499 gsi_replace_with_seq (si_p
, stmts
, false);
502 /* Convert EXPR into a GIMPLE value suitable for substitution on the
503 RHS of an assignment. Insert the necessary statements before
504 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
505 is replaced. If the call is expected to produces a result, then it
506 is replaced by an assignment of the new RHS to the result variable.
507 If the result is to be ignored, then the call is replaced by a
508 GIMPLE_NOP. A proper VDEF chain is retained by making the first
509 VUSE and the last VDEF of the whole sequence be the same as the replaced
510 statement and using new SSA names for stores in between. */
513 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
516 gimple
*stmt
, *new_stmt
;
517 gimple_stmt_iterator i
;
518 gimple_seq stmts
= NULL
;
520 stmt
= gsi_stmt (*si_p
);
522 gcc_assert (is_gimple_call (stmt
));
524 push_gimplify_context (gimple_in_ssa_p (cfun
));
526 lhs
= gimple_call_lhs (stmt
);
527 if (lhs
== NULL_TREE
)
529 gimplify_and_add (expr
, &stmts
);
530 /* We can end up with folding a memcpy of an empty class assignment
531 which gets optimized away by C++ gimplification. */
532 if (gimple_seq_empty_p (stmts
))
534 pop_gimplify_context (NULL
);
535 if (gimple_in_ssa_p (cfun
))
537 unlink_stmt_vdef (stmt
);
540 gsi_replace (si_p
, gimple_build_nop (), false);
546 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
547 new_stmt
= gimple_build_assign (lhs
, tmp
);
548 i
= gsi_last (stmts
);
549 gsi_insert_after_without_update (&i
, new_stmt
,
550 GSI_CONTINUE_LINKING
);
553 pop_gimplify_context (NULL
);
555 gsi_replace_with_seq_vops (si_p
, stmts
);
559 /* Replace the call at *GSI with the gimple value VAL. */
562 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
564 gimple
*stmt
= gsi_stmt (*gsi
);
565 tree lhs
= gimple_call_lhs (stmt
);
569 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
570 val
= fold_convert (TREE_TYPE (lhs
), val
);
571 repl
= gimple_build_assign (lhs
, val
);
574 repl
= gimple_build_nop ();
575 tree vdef
= gimple_vdef (stmt
);
576 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
578 unlink_stmt_vdef (stmt
);
579 release_ssa_name (vdef
);
581 gsi_replace (gsi
, repl
, false);
584 /* Replace the call at *GSI with the new call REPL and fold that
588 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
590 gimple
*stmt
= gsi_stmt (*gsi
);
591 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
592 gimple_set_location (repl
, gimple_location (stmt
));
593 if (gimple_vdef (stmt
)
594 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
596 gimple_set_vdef (repl
, gimple_vdef (stmt
));
597 gimple_set_vuse (repl
, gimple_vuse (stmt
));
598 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
600 gsi_replace (gsi
, repl
, false);
604 /* Return true if VAR is a VAR_DECL or a component thereof. */
607 var_decl_component_p (tree var
)
610 while (handled_component_p (inner
))
611 inner
= TREE_OPERAND (inner
, 0);
612 return SSA_VAR_P (inner
);
615 /* Fold function call to builtin mem{{,p}cpy,move}. Return
616 false if no simplification can be made.
617 If ENDP is 0, return DEST (like memcpy).
618 If ENDP is 1, return DEST+LEN (like mempcpy).
619 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
620 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
624 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
625 tree dest
, tree src
, int endp
)
627 gimple
*stmt
= gsi_stmt (*gsi
);
628 tree lhs
= gimple_call_lhs (stmt
);
629 tree len
= gimple_call_arg (stmt
, 2);
630 tree destvar
, srcvar
;
631 location_t loc
= gimple_location (stmt
);
633 /* If the LEN parameter is zero, return DEST. */
634 if (integer_zerop (len
))
637 if (gimple_call_lhs (stmt
))
638 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
640 repl
= gimple_build_nop ();
641 tree vdef
= gimple_vdef (stmt
);
642 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
644 unlink_stmt_vdef (stmt
);
645 release_ssa_name (vdef
);
647 gsi_replace (gsi
, repl
, false);
651 /* If SRC and DEST are the same (and not volatile), return
652 DEST{,+LEN,+LEN-1}. */
653 if (operand_equal_p (src
, dest
, 0))
655 unlink_stmt_vdef (stmt
);
656 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
657 release_ssa_name (gimple_vdef (stmt
));
660 gsi_replace (gsi
, gimple_build_nop (), false);
667 tree srctype
, desttype
;
668 unsigned int src_align
, dest_align
;
671 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
672 pointers as wide integer) and also may result in huge function
673 size because of inlined bounds copy. Thus don't inline for
674 functions we want to instrument. */
675 if (flag_check_pointer_bounds
676 && chkp_instrumentable_p (cfun
->decl
)
677 /* Even if data may contain pointers we can inline if copy
678 less than a pointer size. */
679 && (!tree_fits_uhwi_p (len
)
680 || compare_tree_int (len
, POINTER_SIZE_UNITS
) >= 0))
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align
= get_pointer_alignment (src
);
692 dest_align
= get_pointer_alignment (dest
);
693 if (tree_fits_uhwi_p (len
)
694 && compare_tree_int (len
, MOVE_MAX
) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
699 && !c_strlen (src
, 2))
701 unsigned ilen
= tree_to_uhwi (len
);
702 if (pow2p_hwi (ilen
))
704 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
706 && TYPE_MODE (type
) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)
713 || (optab_handler (movmisalign_optab
, TYPE_MODE (type
))
714 != CODE_FOR_nothing
)))
717 tree desttype
= type
;
718 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
719 srctype
= build_aligned_type (type
, src_align
);
720 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
721 tree tem
= fold_const_aggregate_ref (srcmem
);
724 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
725 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
727 && (optab_handler (movmisalign_optab
,
729 == CODE_FOR_nothing
))
734 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
736 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
737 if (gimple_in_ssa_p (cfun
))
738 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
741 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
742 gimple_assign_set_lhs (new_stmt
, srcmem
);
743 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
744 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
746 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
747 desttype
= build_aligned_type (type
, dest_align
);
749 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
752 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
753 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
754 if (gimple_vdef (new_stmt
)
755 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
756 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
759 gsi_replace (gsi
, new_stmt
, false);
762 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
771 /* Both DEST and SRC must be pointer types.
772 ??? This is what old code did. Is the testing for pointer types
775 If either SRC is readonly or length is 1, we can use memcpy. */
776 if (!dest_align
|| !src_align
)
778 if (readonly_data_expr (src
)
779 || (tree_fits_uhwi_p (len
)
780 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
781 >= tree_to_uhwi (len
))))
783 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
786 gimple_call_set_fndecl (stmt
, fn
);
787 gimple_call_set_arg (stmt
, 0, dest
);
788 gimple_call_set_arg (stmt
, 1, src
);
793 /* If *src and *dest can't overlap, optimize into memcpy as well. */
794 if (TREE_CODE (src
) == ADDR_EXPR
795 && TREE_CODE (dest
) == ADDR_EXPR
)
797 tree src_base
, dest_base
, fn
;
798 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
799 HOST_WIDE_INT maxsize
;
801 srcvar
= TREE_OPERAND (src
, 0);
802 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
803 if (src_base
== NULL
)
805 destvar
= TREE_OPERAND (dest
, 0);
806 dest_base
= get_addr_base_and_unit_offset (destvar
,
808 if (dest_base
== NULL
)
810 if (tree_fits_uhwi_p (len
))
811 maxsize
= tree_to_uhwi (len
);
814 if (SSA_VAR_P (src_base
)
815 && SSA_VAR_P (dest_base
))
817 if (operand_equal_p (src_base
, dest_base
, 0)
818 && ranges_overlap_p (src_offset
, maxsize
,
819 dest_offset
, maxsize
))
822 else if (TREE_CODE (src_base
) == MEM_REF
823 && TREE_CODE (dest_base
) == MEM_REF
)
825 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
826 TREE_OPERAND (dest_base
, 0), 0))
828 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
829 if (!wi::fits_shwi_p (off
))
831 src_offset
= off
.to_shwi ();
833 off
= mem_ref_offset (dest_base
) + dest_offset
;
834 if (!wi::fits_shwi_p (off
))
836 dest_offset
= off
.to_shwi ();
837 if (ranges_overlap_p (src_offset
, maxsize
,
838 dest_offset
, maxsize
))
844 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
847 gimple_call_set_fndecl (stmt
, fn
);
848 gimple_call_set_arg (stmt
, 0, dest
);
849 gimple_call_set_arg (stmt
, 1, src
);
854 /* If the destination and source do not alias optimize into
856 if ((is_gimple_min_invariant (dest
)
857 || TREE_CODE (dest
) == SSA_NAME
)
858 && (is_gimple_min_invariant (src
)
859 || TREE_CODE (src
) == SSA_NAME
))
862 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
863 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
864 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
867 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
870 gimple_call_set_fndecl (stmt
, fn
);
871 gimple_call_set_arg (stmt
, 0, dest
);
872 gimple_call_set_arg (stmt
, 1, src
);
881 if (!tree_fits_shwi_p (len
))
884 This logic lose for arguments like (type *)malloc (sizeof (type)),
885 since we strip the casts of up to VOID return value from malloc.
886 Perhaps we ought to inherit type from non-VOID argument here? */
889 if (!POINTER_TYPE_P (TREE_TYPE (src
))
890 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
892 /* In the following try to find a type that is most natural to be
893 used for the memcpy source and destination and that allows
894 the most optimization when memcpy is turned into a plain assignment
895 using that type. In theory we could always use a char[len] type
896 but that only gains us that the destination and source possibly
897 no longer will have their address taken. */
898 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
899 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
901 tree tem
= TREE_OPERAND (src
, 0);
903 if (tem
!= TREE_OPERAND (src
, 0))
904 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
906 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
908 tree tem
= TREE_OPERAND (dest
, 0);
910 if (tem
!= TREE_OPERAND (dest
, 0))
911 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
913 srctype
= TREE_TYPE (TREE_TYPE (src
));
914 if (TREE_CODE (srctype
) == ARRAY_TYPE
915 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
917 srctype
= TREE_TYPE (srctype
);
919 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
921 desttype
= TREE_TYPE (TREE_TYPE (dest
));
922 if (TREE_CODE (desttype
) == ARRAY_TYPE
923 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
925 desttype
= TREE_TYPE (desttype
);
927 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
929 if (TREE_ADDRESSABLE (srctype
)
930 || TREE_ADDRESSABLE (desttype
))
933 /* Make sure we are not copying using a floating-point mode or
934 a type whose size possibly does not match its precision. */
935 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
936 || TREE_CODE (desttype
) == BOOLEAN_TYPE
937 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
938 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
939 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
940 || TREE_CODE (srctype
) == BOOLEAN_TYPE
941 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
942 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
950 src_align
= get_pointer_alignment (src
);
951 dest_align
= get_pointer_alignment (dest
);
952 if (dest_align
< TYPE_ALIGN (desttype
)
953 || src_align
< TYPE_ALIGN (srctype
))
957 STRIP_NOPS (destvar
);
958 if (TREE_CODE (destvar
) == ADDR_EXPR
959 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
960 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
961 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
967 if (TREE_CODE (srcvar
) == ADDR_EXPR
968 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
969 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
972 || src_align
>= TYPE_ALIGN (desttype
))
973 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
975 else if (!STRICT_ALIGNMENT
)
977 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
979 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
987 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
990 if (srcvar
== NULL_TREE
)
993 if (src_align
>= TYPE_ALIGN (desttype
))
994 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
997 if (STRICT_ALIGNMENT
)
999 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1001 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1004 else if (destvar
== NULL_TREE
)
1007 if (dest_align
>= TYPE_ALIGN (srctype
))
1008 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1011 if (STRICT_ALIGNMENT
)
1013 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1015 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1020 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1022 tree tem
= fold_const_aggregate_ref (srcvar
);
1025 if (! is_gimple_min_invariant (srcvar
))
1027 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1028 if (gimple_in_ssa_p (cfun
))
1029 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1031 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1032 gimple_assign_set_lhs (new_stmt
, srcvar
);
1033 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1034 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1037 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1038 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1039 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1040 if (gimple_vdef (new_stmt
)
1041 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1042 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1045 gsi_replace (gsi
, new_stmt
, false);
1048 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1052 gimple_seq stmts
= NULL
;
1053 if (endp
== 0 || endp
== 3)
1056 len
= gimple_build (&stmts
, loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1058 if (endp
== 2 || endp
== 1)
1060 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1061 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1062 TREE_TYPE (dest
), dest
, len
);
1065 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1066 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1067 gsi_replace (gsi
, repl
, false);
1071 /* Fold function call to builtin memset or bzero at *GSI setting the
1072 memory of size LEN to VAL. Return whether a simplification was made. */
1075 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1077 gimple
*stmt
= gsi_stmt (*gsi
);
1079 unsigned HOST_WIDE_INT length
, cval
;
1081 /* If the LEN parameter is zero, return DEST. */
1082 if (integer_zerop (len
))
1084 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1088 if (! tree_fits_uhwi_p (len
))
1091 if (TREE_CODE (c
) != INTEGER_CST
)
1094 tree dest
= gimple_call_arg (stmt
, 0);
1096 if (TREE_CODE (var
) != ADDR_EXPR
)
1099 var
= TREE_OPERAND (var
, 0);
1100 if (TREE_THIS_VOLATILE (var
))
1103 etype
= TREE_TYPE (var
);
1104 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1105 etype
= TREE_TYPE (etype
);
1107 if (!INTEGRAL_TYPE_P (etype
)
1108 && !POINTER_TYPE_P (etype
))
1111 if (! var_decl_component_p (var
))
1114 length
= tree_to_uhwi (len
);
1115 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1116 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1119 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1122 if (integer_zerop (c
))
1126 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1129 cval
= TREE_INT_CST_LOW (c
);
1133 cval
|= (cval
<< 31) << 1;
1136 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1137 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1138 gimple_set_vuse (store
, gimple_vuse (stmt
));
1139 tree vdef
= gimple_vdef (stmt
);
1140 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1142 gimple_set_vdef (store
, gimple_vdef (stmt
));
1143 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1145 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1146 if (gimple_call_lhs (stmt
))
1148 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1149 gsi_replace (gsi
, asgn
, false);
1153 gimple_stmt_iterator gsi2
= *gsi
;
1155 gsi_remove (&gsi2
, true);
1162 /* Obtain the minimum and maximum string length or minimum and maximum
1163 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1164 If ARG is an SSA name variable, follow its use-def chains. When
1165 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1166 if we are unable to determine the length or value, return False.
1167 VISITED is a bitmap of visited variables.
1168 TYPE is 0 if string length should be obtained, 1 for maximum string
1169 length and 2 for maximum value ARG can have.
1170 When FUZZY is set and the length of a string cannot be determined,
1171 the function instead considers as the maximum possible length the
1172 size of a character array it may refer to. */
1175 get_range_strlen (tree arg
, tree length
[2], bitmap
*visited
, int type
,
1181 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1182 but MINLEN may be cleared during the execution of the function. */
1183 tree
*minlen
= length
;
1184 tree
*const maxlen
= length
+ 1;
1186 if (TREE_CODE (arg
) != SSA_NAME
)
1188 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1189 if (TREE_CODE (arg
) == ADDR_EXPR
1190 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1191 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1193 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1194 if (TREE_CODE (aop0
) == INDIRECT_REF
1195 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1196 return get_range_strlen (TREE_OPERAND (aop0
, 0),
1197 length
, visited
, type
, fuzzy
);
1203 if (TREE_CODE (val
) != INTEGER_CST
1204 || tree_int_cst_sgn (val
) < 0)
1208 val
= c_strlen (arg
, 1);
1212 if (TREE_CODE (arg
) == ADDR_EXPR
)
1213 return get_range_strlen (TREE_OPERAND (arg
, 0), length
,
1214 visited
, type
, fuzzy
);
1216 if (TREE_CODE (arg
) == COMPONENT_REF
1217 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1))) == ARRAY_TYPE
)
1219 /* Use the type of the member array to determine the upper
1220 bound on the length of the array. This may be overly
1221 optimistic if the array itself isn't NUL-terminated and
1222 the caller relies on the subsequent member to contain
1224 arg
= TREE_OPERAND (arg
, 1);
1225 val
= TYPE_SIZE_UNIT (TREE_TYPE (arg
));
1226 if (!val
|| integer_zerop (val
))
1228 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1230 /* Avoid using the array size as the minimum. */
1241 && TREE_CODE (*minlen
) == INTEGER_CST
1242 && TREE_CODE (val
) == INTEGER_CST
1243 && tree_int_cst_lt (val
, *minlen
))))
1250 if (TREE_CODE (*maxlen
) != INTEGER_CST
1251 || TREE_CODE (val
) != INTEGER_CST
)
1254 if (tree_int_cst_lt (*maxlen
, val
))
1258 else if (simple_cst_equal (val
, *maxlen
) != 1)
1266 /* If ARG is registered for SSA update we cannot look at its defining
1268 if (name_registered_for_update_p (arg
))
1271 /* If we were already here, break the infinite cycle. */
1273 *visited
= BITMAP_ALLOC (NULL
);
1274 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1278 def_stmt
= SSA_NAME_DEF_STMT (var
);
1280 switch (gimple_code (def_stmt
))
1283 /* The RHS of the statement defining VAR must either have a
1284 constant length or come from another SSA_NAME with a constant
1286 if (gimple_assign_single_p (def_stmt
)
1287 || gimple_assign_unary_nop_p (def_stmt
))
1289 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1290 return get_range_strlen (rhs
, length
, visited
, type
, fuzzy
);
1292 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1294 tree op2
= gimple_assign_rhs2 (def_stmt
);
1295 tree op3
= gimple_assign_rhs3 (def_stmt
);
1296 return get_range_strlen (op2
, length
, visited
, type
, fuzzy
)
1297 && get_range_strlen (op3
, length
, visited
, type
, fuzzy
);
1303 /* All the arguments of the PHI node must have the same constant
1307 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1309 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1311 /* If this PHI has itself as an argument, we cannot
1312 determine the string length of this argument. However,
1313 if we can find a constant string length for the other
1314 PHI args then we can still be sure that this is a
1315 constant string length. So be optimistic and just
1316 continue with the next argument. */
1317 if (arg
== gimple_phi_result (def_stmt
))
1320 if (!get_range_strlen (arg
, length
, visited
, type
, fuzzy
))
1323 *maxlen
= build_all_ones_cst (size_type_node
);
1336 /* Determine the minimum and maximum value or string length that ARG
1337 refers to and store each in the first two elements of MINMAXLEN.
1338 For expressions that point to strings of unknown lengths that are
1339 character arrays, use the upper bound of the array as the maximum
1340 length. For example, given an expression like 'x ? array : "xyz"'
1341 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1342 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1346 void get_range_strlen (tree arg
, tree minmaxlen
[2])
1348 bitmap visited
= NULL
;
1350 minmaxlen
[0] = NULL_TREE
;
1351 minmaxlen
[1] = NULL_TREE
;
1353 get_range_strlen (arg
, minmaxlen
, &visited
, 1, true);
1356 BITMAP_FREE (visited
);
1360 get_maxval_strlen (tree arg
, int type
)
1362 bitmap visited
= NULL
;
1363 tree len
[2] = { NULL_TREE
, NULL_TREE
};
1364 if (!get_range_strlen (arg
, len
, &visited
, type
, false))
1367 BITMAP_FREE (visited
);
1373 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1374 If LEN is not NULL, it represents the length of the string to be
1375 copied. Return NULL_TREE if no simplification can be made. */
1378 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1379 tree dest
, tree src
)
1381 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1384 /* If SRC and DEST are the same (and not volatile), return DEST. */
1385 if (operand_equal_p (src
, dest
, 0))
1387 replace_call_with_value (gsi
, dest
);
1391 if (optimize_function_for_size_p (cfun
))
1394 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1398 tree len
= get_maxval_strlen (src
, 0);
1402 len
= fold_convert_loc (loc
, size_type_node
, len
);
1403 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1404 len
= force_gimple_operand_gsi (gsi
, len
, true,
1405 NULL_TREE
, true, GSI_SAME_STMT
);
1406 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1407 replace_call_with_call_and_fold (gsi
, repl
);
1411 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1412 If SLEN is not NULL, it represents the length of the source string.
1413 Return NULL_TREE if no simplification can be made. */
1416 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1417 tree dest
, tree src
, tree len
)
1419 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1422 /* If the LEN parameter is zero, return DEST. */
1423 if (integer_zerop (len
))
1425 replace_call_with_value (gsi
, dest
);
1429 /* We can't compare slen with len as constants below if len is not a
1431 if (TREE_CODE (len
) != INTEGER_CST
)
1434 /* Now, we must be passed a constant src ptr parameter. */
1435 tree slen
= get_maxval_strlen (src
, 0);
1436 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1439 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1441 /* We do not support simplification of this case, though we do
1442 support it when expanding trees into RTL. */
1443 /* FIXME: generate a call to __builtin_memset. */
1444 if (tree_int_cst_lt (slen
, len
))
1447 /* OK transform into builtin memcpy. */
1448 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1452 len
= fold_convert_loc (loc
, size_type_node
, len
);
1453 len
= force_gimple_operand_gsi (gsi
, len
, true,
1454 NULL_TREE
, true, GSI_SAME_STMT
);
1455 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1456 replace_call_with_call_and_fold (gsi
, repl
);
1460 /* Simplify strchr (str, 0) into str + strlen (str).
1461 In general strlen is significantly faster than strchr
1462 due to being a simpler operation. */
1464 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
)
1466 gimple
*stmt
= gsi_stmt (*gsi
);
1467 tree str
= gimple_call_arg (stmt
, 0);
1468 tree c
= gimple_call_arg (stmt
, 1);
1469 location_t loc
= gimple_location (stmt
);
1471 if (optimize_function_for_size_p (cfun
))
1474 if (!integer_zerop (c
) || !gimple_call_lhs (stmt
))
1478 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1483 /* Create newstr = strlen (str). */
1484 gimple_seq stmts
= NULL
;
1485 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
1486 gimple_set_location (new_stmt
, loc
);
1487 if (gimple_in_ssa_p (cfun
))
1488 len
= make_ssa_name (size_type_node
);
1490 len
= create_tmp_reg (size_type_node
);
1491 gimple_call_set_lhs (new_stmt
, len
);
1492 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1494 /* Create (str p+ strlen (str)). */
1495 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1496 POINTER_PLUS_EXPR
, str
, len
);
1497 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1498 gsi_replace_with_seq_vops (gsi
, stmts
);
1499 /* gsi now points at the assignment to the lhs, get a
1500 stmt iterator to the strlen.
1501 ??? We can't use gsi_for_stmt as that doesn't work when the
1502 CFG isn't built yet. */
1503 gimple_stmt_iterator gsi2
= *gsi
;
1509 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1512 Return NULL_TREE if no simplification was possible, otherwise return the
1513 simplified form of the call as a tree.
1515 The simplified form may be a constant or other expression which
1516 computes the same value, but in a more efficient manner (including
1517 calls to other builtin functions).
1519 The call may contain arguments which need to be evaluated, but
1520 which are not useful to determine the result of the call. In
1521 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1522 COMPOUND_EXPR will be an argument which must be evaluated.
1523 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1524 COMPOUND_EXPR in the chain will contain the tree for the simplified
1525 form of the builtin function call. */
1528 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1530 gimple
*stmt
= gsi_stmt (*gsi
);
1531 location_t loc
= gimple_location (stmt
);
1533 const char *p
= c_getstr (src
);
1535 /* If the string length is zero, return the dst parameter. */
1536 if (p
&& *p
== '\0')
1538 replace_call_with_value (gsi
, dst
);
1542 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1545 /* See if we can store by pieces into (dst + strlen(dst)). */
1547 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1548 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1550 if (!strlen_fn
|| !memcpy_fn
)
1553 /* If the length of the source string isn't computable don't
1554 split strcat into strlen and memcpy. */
1555 tree len
= get_maxval_strlen (src
, 0);
1559 /* Create strlen (dst). */
1560 gimple_seq stmts
= NULL
, stmts2
;
1561 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
1562 gimple_set_location (repl
, loc
);
1563 if (gimple_in_ssa_p (cfun
))
1564 newdst
= make_ssa_name (size_type_node
);
1566 newdst
= create_tmp_reg (size_type_node
);
1567 gimple_call_set_lhs (repl
, newdst
);
1568 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1570 /* Create (dst p+ strlen (dst)). */
1571 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1572 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1573 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1575 len
= fold_convert_loc (loc
, size_type_node
, len
);
1576 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1577 build_int_cst (size_type_node
, 1));
1578 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1579 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1581 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1582 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1583 if (gimple_call_lhs (stmt
))
1585 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1586 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1587 gsi_replace_with_seq_vops (gsi
, stmts
);
1588 /* gsi now points at the assignment to the lhs, get a
1589 stmt iterator to the memcpy call.
1590 ??? We can't use gsi_for_stmt as that doesn't work when the
1591 CFG isn't built yet. */
1592 gimple_stmt_iterator gsi2
= *gsi
;
1598 gsi_replace_with_seq_vops (gsi
, stmts
);
1604 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1605 are the arguments to the call. */
1608 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1610 gimple
*stmt
= gsi_stmt (*gsi
);
1611 tree dest
= gimple_call_arg (stmt
, 0);
1612 tree src
= gimple_call_arg (stmt
, 1);
1613 tree size
= gimple_call_arg (stmt
, 2);
1619 /* If the SRC parameter is "", return DEST. */
1620 if (p
&& *p
== '\0')
1622 replace_call_with_value (gsi
, dest
);
1626 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1629 /* If __builtin_strcat_chk is used, assume strcat is available. */
1630 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1634 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1635 replace_call_with_call_and_fold (gsi
, repl
);
1639 /* Simplify a call to the strncat builtin. */
1642 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1644 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1645 tree dst
= gimple_call_arg (stmt
, 0);
1646 tree src
= gimple_call_arg (stmt
, 1);
1647 tree len
= gimple_call_arg (stmt
, 2);
1649 const char *p
= c_getstr (src
);
1651 /* If the requested length is zero, or the src parameter string
1652 length is zero, return the dst parameter. */
1653 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1655 replace_call_with_value (gsi
, dst
);
1659 /* If the requested len is greater than or equal to the string
1660 length, call strcat. */
1661 if (TREE_CODE (len
) == INTEGER_CST
&& p
1662 && compare_tree_int (len
, strlen (p
)) >= 0)
1664 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1666 /* If the replacement _DECL isn't initialized, don't do the
1671 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1672 replace_call_with_call_and_fold (gsi
, repl
);
1679 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1683 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1685 gimple
*stmt
= gsi_stmt (*gsi
);
1686 tree dest
= gimple_call_arg (stmt
, 0);
1687 tree src
= gimple_call_arg (stmt
, 1);
1688 tree len
= gimple_call_arg (stmt
, 2);
1689 tree size
= gimple_call_arg (stmt
, 3);
1694 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1695 if ((p
&& *p
== '\0')
1696 || integer_zerop (len
))
1698 replace_call_with_value (gsi
, dest
);
1702 if (! tree_fits_uhwi_p (size
))
1705 if (! integer_all_onesp (size
))
1707 tree src_len
= c_strlen (src
, 1);
1709 && tree_fits_uhwi_p (src_len
)
1710 && tree_fits_uhwi_p (len
)
1711 && ! tree_int_cst_lt (len
, src_len
))
1713 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1714 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1718 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1719 replace_call_with_call_and_fold (gsi
, repl
);
1725 /* If __builtin_strncat_chk is used, assume strncat is available. */
1726 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1730 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1731 replace_call_with_call_and_fold (gsi
, repl
);
1735 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1736 to the call. IGNORE is true if the value returned
1737 by the builtin will be ignored. UNLOCKED is true is true if this
1738 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1739 the known length of the string. Return NULL_TREE if no simplification
1743 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1744 tree arg0
, tree arg1
,
1747 gimple
*stmt
= gsi_stmt (*gsi
);
1749 /* If we're using an unlocked function, assume the other unlocked
1750 functions exist explicitly. */
1751 tree
const fn_fputc
= (unlocked
1752 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1753 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1754 tree
const fn_fwrite
= (unlocked
1755 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1756 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1758 /* If the return value is used, don't do the transformation. */
1759 if (gimple_call_lhs (stmt
))
1762 /* Get the length of the string passed to fputs. If the length
1763 can't be determined, punt. */
1764 tree len
= get_maxval_strlen (arg0
, 0);
1766 || TREE_CODE (len
) != INTEGER_CST
)
1769 switch (compare_tree_int (len
, 1))
1771 case -1: /* length is 0, delete the call entirely . */
1772 replace_call_with_value (gsi
, integer_zero_node
);
1775 case 0: /* length is 1, call fputc. */
1777 const char *p
= c_getstr (arg0
);
1783 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
1785 (integer_type_node
, p
[0]), arg1
);
1786 replace_call_with_call_and_fold (gsi
, repl
);
1791 case 1: /* length is greater than 1, call fwrite. */
1793 /* If optimizing for size keep fputs. */
1794 if (optimize_function_for_size_p (cfun
))
1796 /* New argument list transforming fputs(string, stream) to
1797 fwrite(string, 1, len, stream). */
1801 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1802 size_one_node
, len
, arg1
);
1803 replace_call_with_call_and_fold (gsi
, repl
);
1812 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1813 DEST, SRC, LEN, and SIZE are the arguments to the call.
1814 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1815 code of the builtin. If MAXLEN is not NULL, it is maximum length
1816 passed as third argument. */
1819 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1820 tree dest
, tree src
, tree len
, tree size
,
1821 enum built_in_function fcode
)
1823 gimple
*stmt
= gsi_stmt (*gsi
);
1824 location_t loc
= gimple_location (stmt
);
1825 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1828 /* If SRC and DEST are the same (and not volatile), return DEST
1829 (resp. DEST+LEN for __mempcpy_chk). */
1830 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1832 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1834 replace_call_with_value (gsi
, dest
);
1839 gimple_seq stmts
= NULL
;
1840 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1841 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1842 TREE_TYPE (dest
), dest
, len
);
1843 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1844 replace_call_with_value (gsi
, temp
);
1849 if (! tree_fits_uhwi_p (size
))
1852 tree maxlen
= get_maxval_strlen (len
, 2);
1853 if (! integer_all_onesp (size
))
1855 if (! tree_fits_uhwi_p (len
))
1857 /* If LEN is not constant, try MAXLEN too.
1858 For MAXLEN only allow optimizing into non-_ocs function
1859 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1860 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1862 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1864 /* (void) __mempcpy_chk () can be optimized into
1865 (void) __memcpy_chk (). */
1866 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1870 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1871 replace_call_with_call_and_fold (gsi
, repl
);
1880 if (tree_int_cst_lt (size
, maxlen
))
1885 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1886 mem{cpy,pcpy,move,set} is available. */
1889 case BUILT_IN_MEMCPY_CHK
:
1890 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1892 case BUILT_IN_MEMPCPY_CHK
:
1893 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1895 case BUILT_IN_MEMMOVE_CHK
:
1896 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1898 case BUILT_IN_MEMSET_CHK
:
1899 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1908 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1909 replace_call_with_call_and_fold (gsi
, repl
);
1913 /* Fold a call to the __st[rp]cpy_chk builtin.
1914 DEST, SRC, and SIZE are the arguments to the call.
1915 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1916 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1917 strings passed as second argument. */
1920 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1922 tree src
, tree size
,
1923 enum built_in_function fcode
)
1925 gimple
*stmt
= gsi_stmt (*gsi
);
1926 location_t loc
= gimple_location (stmt
);
1927 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1930 /* If SRC and DEST are the same (and not volatile), return DEST. */
1931 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1933 replace_call_with_value (gsi
, dest
);
1937 if (! tree_fits_uhwi_p (size
))
1940 tree maxlen
= get_maxval_strlen (src
, 1);
1941 if (! integer_all_onesp (size
))
1943 len
= c_strlen (src
, 1);
1944 if (! len
|| ! tree_fits_uhwi_p (len
))
1946 /* If LEN is not constant, try MAXLEN too.
1947 For MAXLEN only allow optimizing into non-_ocs function
1948 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1949 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1951 if (fcode
== BUILT_IN_STPCPY_CHK
)
1956 /* If return value of __stpcpy_chk is ignored,
1957 optimize into __strcpy_chk. */
1958 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1962 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1963 replace_call_with_call_and_fold (gsi
, repl
);
1967 if (! len
|| TREE_SIDE_EFFECTS (len
))
1970 /* If c_strlen returned something, but not a constant,
1971 transform __strcpy_chk into __memcpy_chk. */
1972 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1976 gimple_seq stmts
= NULL
;
1977 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
1978 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
1979 build_int_cst (size_type_node
, 1));
1980 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1981 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1982 replace_call_with_call_and_fold (gsi
, repl
);
1989 if (! tree_int_cst_lt (maxlen
, size
))
1993 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1994 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1995 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1999 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2000 replace_call_with_call_and_fold (gsi
, repl
);
2004 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2005 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2006 length passed as third argument. IGNORE is true if return value can be
2007 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2010 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2011 tree dest
, tree src
,
2012 tree len
, tree size
,
2013 enum built_in_function fcode
)
2015 gimple
*stmt
= gsi_stmt (*gsi
);
2016 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2019 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2021 /* If return value of __stpncpy_chk is ignored,
2022 optimize into __strncpy_chk. */
2023 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2026 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2027 replace_call_with_call_and_fold (gsi
, repl
);
2032 if (! tree_fits_uhwi_p (size
))
2035 tree maxlen
= get_maxval_strlen (len
, 2);
2036 if (! integer_all_onesp (size
))
2038 if (! tree_fits_uhwi_p (len
))
2040 /* If LEN is not constant, try MAXLEN too.
2041 For MAXLEN only allow optimizing into non-_ocs function
2042 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2043 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2049 if (tree_int_cst_lt (size
, maxlen
))
2053 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2054 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2055 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2059 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2060 replace_call_with_call_and_fold (gsi
, repl
);
2064 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2065 Return NULL_TREE if no simplification can be made. */
2068 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2070 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2071 location_t loc
= gimple_location (stmt
);
2072 tree dest
= gimple_call_arg (stmt
, 0);
2073 tree src
= gimple_call_arg (stmt
, 1);
2074 tree fn
, len
, lenp1
;
2076 /* If the result is unused, replace stpcpy with strcpy. */
2077 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2079 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2082 gimple_call_set_fndecl (stmt
, fn
);
2087 len
= c_strlen (src
, 1);
2089 || TREE_CODE (len
) != INTEGER_CST
)
2092 if (optimize_function_for_size_p (cfun
)
2093 /* If length is zero it's small enough. */
2094 && !integer_zerop (len
))
2097 /* If the source has a known length replace stpcpy with memcpy. */
2098 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2102 gimple_seq stmts
= NULL
;
2103 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2104 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2105 tem
, build_int_cst (size_type_node
, 1));
2106 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2107 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2108 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2109 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2110 if (gimple_vdef (repl
)
2111 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2112 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2113 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2114 /* Replace the result with dest + len. */
2116 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2117 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2118 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2119 POINTER_PLUS_EXPR
, dest
, tem
);
2120 gsi_replace (gsi
, ret
, false);
2121 /* Finally fold the memcpy call. */
2122 gimple_stmt_iterator gsi2
= *gsi
;
2128 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2129 NULL_TREE if a normal call should be emitted rather than expanding
2130 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2131 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2132 passed as second argument. */
2135 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2136 enum built_in_function fcode
)
2138 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2139 tree dest
, size
, len
, fn
, fmt
, flag
;
2140 const char *fmt_str
;
2142 /* Verify the required arguments in the original call. */
2143 if (gimple_call_num_args (stmt
) < 5)
2146 dest
= gimple_call_arg (stmt
, 0);
2147 len
= gimple_call_arg (stmt
, 1);
2148 flag
= gimple_call_arg (stmt
, 2);
2149 size
= gimple_call_arg (stmt
, 3);
2150 fmt
= gimple_call_arg (stmt
, 4);
2152 if (! tree_fits_uhwi_p (size
))
2155 if (! integer_all_onesp (size
))
2157 tree maxlen
= get_maxval_strlen (len
, 2);
2158 if (! tree_fits_uhwi_p (len
))
2160 /* If LEN is not constant, try MAXLEN too.
2161 For MAXLEN only allow optimizing into non-_ocs function
2162 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2163 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2169 if (tree_int_cst_lt (size
, maxlen
))
2173 if (!init_target_chars ())
2176 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2177 or if format doesn't contain % chars or is "%s". */
2178 if (! integer_zerop (flag
))
2180 fmt_str
= c_getstr (fmt
);
2181 if (fmt_str
== NULL
)
2183 if (strchr (fmt_str
, target_percent
) != NULL
2184 && strcmp (fmt_str
, target_percent_s
))
2188 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2190 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2191 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2195 /* Replace the called function and the first 5 argument by 3 retaining
2196 trailing varargs. */
2197 gimple_call_set_fndecl (stmt
, fn
);
2198 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2199 gimple_call_set_arg (stmt
, 0, dest
);
2200 gimple_call_set_arg (stmt
, 1, len
);
2201 gimple_call_set_arg (stmt
, 2, fmt
);
2202 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2203 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2204 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2209 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2210 Return NULL_TREE if a normal call should be emitted rather than
2211 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2212 or BUILT_IN_VSPRINTF_CHK. */
2215 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2216 enum built_in_function fcode
)
2218 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2219 tree dest
, size
, len
, fn
, fmt
, flag
;
2220 const char *fmt_str
;
2221 unsigned nargs
= gimple_call_num_args (stmt
);
2223 /* Verify the required arguments in the original call. */
2226 dest
= gimple_call_arg (stmt
, 0);
2227 flag
= gimple_call_arg (stmt
, 1);
2228 size
= gimple_call_arg (stmt
, 2);
2229 fmt
= gimple_call_arg (stmt
, 3);
2231 if (! tree_fits_uhwi_p (size
))
2236 if (!init_target_chars ())
2239 /* Check whether the format is a literal string constant. */
2240 fmt_str
= c_getstr (fmt
);
2241 if (fmt_str
!= NULL
)
2243 /* If the format doesn't contain % args or %%, we know the size. */
2244 if (strchr (fmt_str
, target_percent
) == 0)
2246 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2247 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2249 /* If the format is "%s" and first ... argument is a string literal,
2250 we know the size too. */
2251 else if (fcode
== BUILT_IN_SPRINTF_CHK
2252 && strcmp (fmt_str
, target_percent_s
) == 0)
2258 arg
= gimple_call_arg (stmt
, 4);
2259 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2261 len
= c_strlen (arg
, 1);
2262 if (! len
|| ! tree_fits_uhwi_p (len
))
2269 if (! integer_all_onesp (size
))
2271 if (! len
|| ! tree_int_cst_lt (len
, size
))
2275 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2276 or if format doesn't contain % chars or is "%s". */
2277 if (! integer_zerop (flag
))
2279 if (fmt_str
== NULL
)
2281 if (strchr (fmt_str
, target_percent
) != NULL
2282 && strcmp (fmt_str
, target_percent_s
))
2286 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2287 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2288 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2292 /* Replace the called function and the first 4 argument by 2 retaining
2293 trailing varargs. */
2294 gimple_call_set_fndecl (stmt
, fn
);
2295 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2296 gimple_call_set_arg (stmt
, 0, dest
);
2297 gimple_call_set_arg (stmt
, 1, fmt
);
2298 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2299 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2300 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2305 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2306 ORIG may be null if this is a 2-argument call. We don't attempt to
2307 simplify calls with more than 3 arguments.
2309 Return NULL_TREE if no simplification was possible, otherwise return the
2310 simplified form of the call as a tree. If IGNORED is true, it means that
2311 the caller does not use the returned value of the function. */
2314 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2316 gimple
*stmt
= gsi_stmt (*gsi
);
2317 tree dest
= gimple_call_arg (stmt
, 0);
2318 tree fmt
= gimple_call_arg (stmt
, 1);
2319 tree orig
= NULL_TREE
;
2320 const char *fmt_str
= NULL
;
2322 /* Verify the required arguments in the original call. We deal with two
2323 types of sprintf() calls: 'sprintf (str, fmt)' and
2324 'sprintf (dest, "%s", orig)'. */
2325 if (gimple_call_num_args (stmt
) > 3)
2328 if (gimple_call_num_args (stmt
) == 3)
2329 orig
= gimple_call_arg (stmt
, 2);
2331 /* Check whether the format is a literal string constant. */
2332 fmt_str
= c_getstr (fmt
);
2333 if (fmt_str
== NULL
)
2336 if (!init_target_chars ())
2339 /* If the format doesn't contain % args or %%, use strcpy. */
2340 if (strchr (fmt_str
, target_percent
) == NULL
)
2342 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2347 /* Don't optimize sprintf (buf, "abc", ptr++). */
2351 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2352 'format' is known to contain no % formats. */
2353 gimple_seq stmts
= NULL
;
2354 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2355 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2356 if (gimple_call_lhs (stmt
))
2358 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2359 build_int_cst (integer_type_node
,
2361 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2362 gsi_replace_with_seq_vops (gsi
, stmts
);
2363 /* gsi now points at the assignment to the lhs, get a
2364 stmt iterator to the memcpy call.
2365 ??? We can't use gsi_for_stmt as that doesn't work when the
2366 CFG isn't built yet. */
2367 gimple_stmt_iterator gsi2
= *gsi
;
2373 gsi_replace_with_seq_vops (gsi
, stmts
);
2379 /* If the format is "%s", use strcpy if the result isn't used. */
2380 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2383 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2388 /* Don't crash on sprintf (str1, "%s"). */
2392 tree orig_len
= NULL_TREE
;
2393 if (gimple_call_lhs (stmt
))
2395 orig_len
= get_maxval_strlen (orig
, 0);
2400 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2401 gimple_seq stmts
= NULL
;
2402 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2403 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2404 if (gimple_call_lhs (stmt
))
2406 if (!useless_type_conversion_p (integer_type_node
,
2407 TREE_TYPE (orig_len
)))
2408 orig_len
= fold_convert (integer_type_node
, orig_len
);
2409 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2410 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2411 gsi_replace_with_seq_vops (gsi
, stmts
);
2412 /* gsi now points at the assignment to the lhs, get a
2413 stmt iterator to the memcpy call.
2414 ??? We can't use gsi_for_stmt as that doesn't work when the
2415 CFG isn't built yet. */
2416 gimple_stmt_iterator gsi2
= *gsi
;
2422 gsi_replace_with_seq_vops (gsi
, stmts
);
2430 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2431 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2432 attempt to simplify calls with more than 4 arguments.
2434 Return NULL_TREE if no simplification was possible, otherwise return the
2435 simplified form of the call as a tree. If IGNORED is true, it means that
2436 the caller does not use the returned value of the function. */
2439 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2441 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2442 tree dest
= gimple_call_arg (stmt
, 0);
2443 tree destsize
= gimple_call_arg (stmt
, 1);
2444 tree fmt
= gimple_call_arg (stmt
, 2);
2445 tree orig
= NULL_TREE
;
2446 const char *fmt_str
= NULL
;
2448 if (gimple_call_num_args (stmt
) > 4)
2451 if (gimple_call_num_args (stmt
) == 4)
2452 orig
= gimple_call_arg (stmt
, 3);
2454 if (!tree_fits_uhwi_p (destsize
))
2456 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2458 /* Check whether the format is a literal string constant. */
2459 fmt_str
= c_getstr (fmt
);
2460 if (fmt_str
== NULL
)
2463 if (!init_target_chars ())
2466 /* If the format doesn't contain % args or %%, use strcpy. */
2467 if (strchr (fmt_str
, target_percent
) == NULL
)
2469 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2473 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2477 /* We could expand this as
2478 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2480 memcpy (str, fmt_with_nul_at_cstm1, cst);
2481 but in the former case that might increase code size
2482 and in the latter case grow .rodata section too much.
2484 size_t len
= strlen (fmt_str
);
2488 gimple_seq stmts
= NULL
;
2489 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2490 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2491 if (gimple_call_lhs (stmt
))
2493 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2494 build_int_cst (integer_type_node
, len
));
2495 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2496 gsi_replace_with_seq_vops (gsi
, stmts
);
2497 /* gsi now points at the assignment to the lhs, get a
2498 stmt iterator to the memcpy call.
2499 ??? We can't use gsi_for_stmt as that doesn't work when the
2500 CFG isn't built yet. */
2501 gimple_stmt_iterator gsi2
= *gsi
;
2507 gsi_replace_with_seq_vops (gsi
, stmts
);
2513 /* If the format is "%s", use strcpy if the result isn't used. */
2514 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2516 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2520 /* Don't crash on snprintf (str1, cst, "%s"). */
2524 tree orig_len
= get_maxval_strlen (orig
, 0);
2525 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2528 /* We could expand this as
2529 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2531 memcpy (str1, str2_with_nul_at_cstm1, cst);
2532 but in the former case that might increase code size
2533 and in the latter case grow .rodata section too much.
2535 if (compare_tree_int (orig_len
, destlen
) >= 0)
2538 /* Convert snprintf (str1, cst, "%s", str2) into
2539 strcpy (str1, str2) if strlen (str2) < cst. */
2540 gimple_seq stmts
= NULL
;
2541 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2542 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2543 if (gimple_call_lhs (stmt
))
2545 if (!useless_type_conversion_p (integer_type_node
,
2546 TREE_TYPE (orig_len
)))
2547 orig_len
= fold_convert (integer_type_node
, orig_len
);
2548 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2549 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2550 gsi_replace_with_seq_vops (gsi
, stmts
);
2551 /* gsi now points at the assignment to the lhs, get a
2552 stmt iterator to the memcpy call.
2553 ??? We can't use gsi_for_stmt as that doesn't work when the
2554 CFG isn't built yet. */
2555 gimple_stmt_iterator gsi2
= *gsi
;
2561 gsi_replace_with_seq_vops (gsi
, stmts
);
2569 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2570 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2571 more than 3 arguments, and ARG may be null in the 2-argument case.
2573 Return NULL_TREE if no simplification was possible, otherwise return the
2574 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2575 code of the function to be simplified. */
2578 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2579 tree fp
, tree fmt
, tree arg
,
2580 enum built_in_function fcode
)
2582 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2583 tree fn_fputc
, fn_fputs
;
2584 const char *fmt_str
= NULL
;
2586 /* If the return value is used, don't do the transformation. */
2587 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2590 /* Check whether the format is a literal string constant. */
2591 fmt_str
= c_getstr (fmt
);
2592 if (fmt_str
== NULL
)
2595 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2597 /* If we're using an unlocked function, assume the other
2598 unlocked functions exist explicitly. */
2599 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2600 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2604 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2605 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2608 if (!init_target_chars ())
2611 /* If the format doesn't contain % args or %%, use strcpy. */
2612 if (strchr (fmt_str
, target_percent
) == NULL
)
2614 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2618 /* If the format specifier was "", fprintf does nothing. */
2619 if (fmt_str
[0] == '\0')
2621 replace_call_with_value (gsi
, NULL_TREE
);
2625 /* When "string" doesn't contain %, replace all cases of
2626 fprintf (fp, string) with fputs (string, fp). The fputs
2627 builtin will take care of special cases like length == 1. */
2630 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2631 replace_call_with_call_and_fold (gsi
, repl
);
2636 /* The other optimizations can be done only on the non-va_list variants. */
2637 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2640 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2641 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2643 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2647 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2648 replace_call_with_call_and_fold (gsi
, repl
);
2653 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2654 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2657 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2661 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2662 replace_call_with_call_and_fold (gsi
, repl
);
2670 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2671 FMT and ARG are the arguments to the call; we don't fold cases with
2672 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2674 Return NULL_TREE if no simplification was possible, otherwise return the
2675 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2676 code of the function to be simplified. */
2679 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2680 tree arg
, enum built_in_function fcode
)
2682 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2683 tree fn_putchar
, fn_puts
, newarg
;
2684 const char *fmt_str
= NULL
;
2686 /* If the return value is used, don't do the transformation. */
2687 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2690 /* Check whether the format is a literal string constant. */
2691 fmt_str
= c_getstr (fmt
);
2692 if (fmt_str
== NULL
)
2695 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2697 /* If we're using an unlocked function, assume the other
2698 unlocked functions exist explicitly. */
2699 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2700 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2704 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2705 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2708 if (!init_target_chars ())
2711 if (strcmp (fmt_str
, target_percent_s
) == 0
2712 || strchr (fmt_str
, target_percent
) == NULL
)
2716 if (strcmp (fmt_str
, target_percent_s
) == 0)
2718 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2721 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2724 str
= c_getstr (arg
);
2730 /* The format specifier doesn't contain any '%' characters. */
2731 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2737 /* If the string was "", printf does nothing. */
2740 replace_call_with_value (gsi
, NULL_TREE
);
2744 /* If the string has length of 1, call putchar. */
2747 /* Given printf("c"), (where c is any one character,)
2748 convert "c"[0] to an int and pass that to the replacement
2750 newarg
= build_int_cst (integer_type_node
, str
[0]);
2753 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2754 replace_call_with_call_and_fold (gsi
, repl
);
2760 /* If the string was "string\n", call puts("string"). */
2761 size_t len
= strlen (str
);
2762 if ((unsigned char)str
[len
- 1] == target_newline
2763 && (size_t) (int) len
== len
2767 tree offset_node
, string_cst
;
2769 /* Create a NUL-terminated string that's one char shorter
2770 than the original, stripping off the trailing '\n'. */
2771 newarg
= build_string_literal (len
, str
);
2772 string_cst
= string_constant (newarg
, &offset_node
);
2773 gcc_checking_assert (string_cst
2774 && (TREE_STRING_LENGTH (string_cst
)
2776 && integer_zerop (offset_node
)
2778 TREE_STRING_POINTER (string_cst
)[len
- 1]
2780 /* build_string_literal creates a new STRING_CST,
2781 modify it in place to avoid double copying. */
2782 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2783 newstr
[len
- 1] = '\0';
2786 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2787 replace_call_with_call_and_fold (gsi
, repl
);
2792 /* We'd like to arrange to call fputs(string,stdout) here,
2793 but we need stdout and don't have a way to get it yet. */
2798 /* The other optimizations can be done only on the non-va_list variants. */
2799 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2802 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2803 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2805 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2809 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2810 replace_call_with_call_and_fold (gsi
, repl
);
2815 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2816 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2818 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2823 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2824 replace_call_with_call_and_fold (gsi
, repl
);
2834 /* Fold a call to __builtin_strlen with known length LEN. */
2837 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2839 gimple
*stmt
= gsi_stmt (*gsi
);
2840 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2843 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2844 replace_call_with_value (gsi
, len
);
2848 /* Fold a call to __builtin_acc_on_device. */
2851 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
2853 /* Defer folding until we know which compiler we're in. */
2854 if (symtab
->state
!= EXPANSION
)
2857 unsigned val_host
= GOMP_DEVICE_HOST
;
2858 unsigned val_dev
= GOMP_DEVICE_NONE
;
2860 #ifdef ACCEL_COMPILER
2861 val_host
= GOMP_DEVICE_NOT_HOST
;
2862 val_dev
= ACCEL_COMPILER_acc_device
;
2865 location_t loc
= gimple_location (gsi_stmt (*gsi
));
2867 tree host_eq
= make_ssa_name (boolean_type_node
);
2868 gimple
*host_ass
= gimple_build_assign
2869 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
2870 gimple_set_location (host_ass
, loc
);
2871 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
2873 tree dev_eq
= make_ssa_name (boolean_type_node
);
2874 gimple
*dev_ass
= gimple_build_assign
2875 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
2876 gimple_set_location (dev_ass
, loc
);
2877 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
2879 tree result
= make_ssa_name (boolean_type_node
);
2880 gimple
*result_ass
= gimple_build_assign
2881 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
2882 gimple_set_location (result_ass
, loc
);
2883 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
2885 replace_call_with_value (gsi
, result
);
2890 /* Fold the non-target builtin at *GSI and return whether any simplification
2894 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2896 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2897 tree callee
= gimple_call_fndecl (stmt
);
2899 /* Give up for always_inline inline builtins until they are
2901 if (avoid_folding_inline_builtin (callee
))
2904 unsigned n
= gimple_call_num_args (stmt
);
2905 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2908 case BUILT_IN_BZERO
:
2909 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2910 gimple_call_arg (stmt
, 1));
2911 case BUILT_IN_MEMSET
:
2912 return gimple_fold_builtin_memset (gsi
,
2913 gimple_call_arg (stmt
, 1),
2914 gimple_call_arg (stmt
, 2));
2915 case BUILT_IN_BCOPY
:
2916 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2917 gimple_call_arg (stmt
, 0), 3);
2918 case BUILT_IN_MEMCPY
:
2919 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2920 gimple_call_arg (stmt
, 1), 0);
2921 case BUILT_IN_MEMPCPY
:
2922 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2923 gimple_call_arg (stmt
, 1), 1);
2924 case BUILT_IN_MEMMOVE
:
2925 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2926 gimple_call_arg (stmt
, 1), 3);
2927 case BUILT_IN_SPRINTF_CHK
:
2928 case BUILT_IN_VSPRINTF_CHK
:
2929 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2930 case BUILT_IN_STRCAT_CHK
:
2931 return gimple_fold_builtin_strcat_chk (gsi
);
2932 case BUILT_IN_STRNCAT_CHK
:
2933 return gimple_fold_builtin_strncat_chk (gsi
);
2934 case BUILT_IN_STRLEN
:
2935 return gimple_fold_builtin_strlen (gsi
);
2936 case BUILT_IN_STRCPY
:
2937 return gimple_fold_builtin_strcpy (gsi
,
2938 gimple_call_arg (stmt
, 0),
2939 gimple_call_arg (stmt
, 1));
2940 case BUILT_IN_STRNCPY
:
2941 return gimple_fold_builtin_strncpy (gsi
,
2942 gimple_call_arg (stmt
, 0),
2943 gimple_call_arg (stmt
, 1),
2944 gimple_call_arg (stmt
, 2));
2945 case BUILT_IN_STRCAT
:
2946 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2947 gimple_call_arg (stmt
, 1));
2948 case BUILT_IN_STRNCAT
:
2949 return gimple_fold_builtin_strncat (gsi
);
2950 case BUILT_IN_STRCHR
:
2951 return gimple_fold_builtin_strchr (gsi
);
2952 case BUILT_IN_FPUTS
:
2953 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2954 gimple_call_arg (stmt
, 1), false);
2955 case BUILT_IN_FPUTS_UNLOCKED
:
2956 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2957 gimple_call_arg (stmt
, 1), true);
2958 case BUILT_IN_MEMCPY_CHK
:
2959 case BUILT_IN_MEMPCPY_CHK
:
2960 case BUILT_IN_MEMMOVE_CHK
:
2961 case BUILT_IN_MEMSET_CHK
:
2962 return gimple_fold_builtin_memory_chk (gsi
,
2963 gimple_call_arg (stmt
, 0),
2964 gimple_call_arg (stmt
, 1),
2965 gimple_call_arg (stmt
, 2),
2966 gimple_call_arg (stmt
, 3),
2968 case BUILT_IN_STPCPY
:
2969 return gimple_fold_builtin_stpcpy (gsi
);
2970 case BUILT_IN_STRCPY_CHK
:
2971 case BUILT_IN_STPCPY_CHK
:
2972 return gimple_fold_builtin_stxcpy_chk (gsi
,
2973 gimple_call_arg (stmt
, 0),
2974 gimple_call_arg (stmt
, 1),
2975 gimple_call_arg (stmt
, 2),
2977 case BUILT_IN_STRNCPY_CHK
:
2978 case BUILT_IN_STPNCPY_CHK
:
2979 return gimple_fold_builtin_stxncpy_chk (gsi
,
2980 gimple_call_arg (stmt
, 0),
2981 gimple_call_arg (stmt
, 1),
2982 gimple_call_arg (stmt
, 2),
2983 gimple_call_arg (stmt
, 3),
2985 case BUILT_IN_SNPRINTF_CHK
:
2986 case BUILT_IN_VSNPRINTF_CHK
:
2987 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2988 case BUILT_IN_SNPRINTF
:
2989 return gimple_fold_builtin_snprintf (gsi
);
2990 case BUILT_IN_SPRINTF
:
2991 return gimple_fold_builtin_sprintf (gsi
);
2992 case BUILT_IN_FPRINTF
:
2993 case BUILT_IN_FPRINTF_UNLOCKED
:
2994 case BUILT_IN_VFPRINTF
:
2995 if (n
== 2 || n
== 3)
2996 return gimple_fold_builtin_fprintf (gsi
,
2997 gimple_call_arg (stmt
, 0),
2998 gimple_call_arg (stmt
, 1),
3000 ? gimple_call_arg (stmt
, 2)
3004 case BUILT_IN_FPRINTF_CHK
:
3005 case BUILT_IN_VFPRINTF_CHK
:
3006 if (n
== 3 || n
== 4)
3007 return gimple_fold_builtin_fprintf (gsi
,
3008 gimple_call_arg (stmt
, 0),
3009 gimple_call_arg (stmt
, 2),
3011 ? gimple_call_arg (stmt
, 3)
3015 case BUILT_IN_PRINTF
:
3016 case BUILT_IN_PRINTF_UNLOCKED
:
3017 case BUILT_IN_VPRINTF
:
3018 if (n
== 1 || n
== 2)
3019 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
3021 ? gimple_call_arg (stmt
, 1)
3022 : NULL_TREE
, fcode
);
3024 case BUILT_IN_PRINTF_CHK
:
3025 case BUILT_IN_VPRINTF_CHK
:
3026 if (n
== 2 || n
== 3)
3027 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
3029 ? gimple_call_arg (stmt
, 2)
3030 : NULL_TREE
, fcode
);
3032 case BUILT_IN_ACC_ON_DEVICE
:
3033 return gimple_fold_builtin_acc_on_device (gsi
,
3034 gimple_call_arg (stmt
, 0));
3038 /* Try the generic builtin folder. */
3039 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
3040 tree result
= fold_call_stmt (stmt
, ignore
);
3044 STRIP_NOPS (result
);
3046 result
= fold_convert (gimple_call_return_type (stmt
), result
);
3047 if (!update_call_from_tree (gsi
, result
))
3048 gimplify_and_update_call_from_tree (gsi
, result
);
3055 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3056 function calls to constants, where possible. */
3059 fold_internal_goacc_dim (const gimple
*call
)
3061 int axis
= get_oacc_ifn_dim_arg (call
);
3062 int size
= get_oacc_fn_dim_size (current_function_decl
, axis
);
3063 bool is_pos
= gimple_call_internal_fn (call
) == IFN_GOACC_DIM_POS
;
3064 tree result
= NULL_TREE
;
3066 /* If the size is 1, or we only want the size and it is not dynamic,
3067 we know the answer. */
3068 if (size
== 1 || (!is_pos
&& size
))
3070 tree type
= TREE_TYPE (gimple_call_lhs (call
));
3071 result
= build_int_cst (type
, size
- is_pos
);
3077 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3078 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3079 &var where var is only addressable because of such calls. */
3082 optimize_atomic_compare_exchange_p (gimple
*stmt
)
3084 if (gimple_call_num_args (stmt
) != 6
3085 || !flag_inline_atomics
3087 || (flag_sanitize
& (SANITIZE_THREAD
| SANITIZE_ADDRESS
)) != 0
3088 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
3089 || !gimple_vdef (stmt
)
3090 || !gimple_vuse (stmt
))
3093 tree fndecl
= gimple_call_fndecl (stmt
);
3094 switch (DECL_FUNCTION_CODE (fndecl
))
3096 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
3097 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
3098 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
3099 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
3100 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
3106 tree expected
= gimple_call_arg (stmt
, 1);
3107 if (TREE_CODE (expected
) != ADDR_EXPR
3108 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
3111 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
3112 if (!is_gimple_reg_type (etype
)
3113 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
3114 || TREE_THIS_VOLATILE (etype
)
3115 || VECTOR_TYPE_P (etype
)
3116 || TREE_CODE (etype
) == COMPLEX_TYPE
3117 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3118 might not preserve all the bits. See PR71716. */
3119 || SCALAR_FLOAT_TYPE_P (etype
)
3120 || TYPE_PRECISION (etype
) != GET_MODE_BITSIZE (TYPE_MODE (etype
)))
3123 tree weak
= gimple_call_arg (stmt
, 3);
3124 if (!integer_zerop (weak
) && !integer_onep (weak
))
3127 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3128 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3129 machine_mode mode
= TYPE_MODE (itype
);
3131 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
3133 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
3136 if (int_size_in_bytes (etype
) != GET_MODE_SIZE (mode
))
3143 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3145 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3146 i = IMAGPART_EXPR <t>;
3148 e = REALPART_EXPR <t>; */
3151 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
3153 gimple
*stmt
= gsi_stmt (*gsi
);
3154 tree fndecl
= gimple_call_fndecl (stmt
);
3155 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3156 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3157 tree ctype
= build_complex_type (itype
);
3158 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
3159 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3161 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3162 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
3163 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
3165 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
3166 build1 (VIEW_CONVERT_EXPR
, itype
,
3167 gimple_assign_lhs (g
)));
3168 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3170 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
3171 + int_size_in_bytes (itype
);
3172 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
3173 gimple_call_arg (stmt
, 0),
3174 gimple_assign_lhs (g
),
3175 gimple_call_arg (stmt
, 2),
3176 build_int_cst (integer_type_node
, flag
),
3177 gimple_call_arg (stmt
, 4),
3178 gimple_call_arg (stmt
, 5));
3179 tree lhs
= make_ssa_name (ctype
);
3180 gimple_call_set_lhs (g
, lhs
);
3181 gimple_set_vdef (g
, gimple_vdef (stmt
));
3182 gimple_set_vuse (g
, gimple_vuse (stmt
));
3183 SSA_NAME_DEF_STMT (gimple_vdef (g
)) = g
;
3184 if (gimple_call_lhs (stmt
))
3186 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3187 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
3188 build1 (IMAGPART_EXPR
, itype
, lhs
));
3189 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3190 g
= gimple_build_assign (gimple_call_lhs (stmt
), NOP_EXPR
,
3191 gimple_assign_lhs (g
));
3193 gsi_replace (gsi
, g
, true);
3194 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
3195 build1 (REALPART_EXPR
, itype
, lhs
));
3196 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3197 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
3199 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3201 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
3202 gimple_assign_lhs (g
)));
3203 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3205 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
3206 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3210 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3211 doesn't fit into TYPE. The test for overflow should be regardless of
3212 -fwrapv, and even for unsigned types. */
3215 arith_overflowed_p (enum tree_code code
, const_tree type
,
3216 const_tree arg0
, const_tree arg1
)
3218 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3219 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3221 widest2_int warg0
= widest2_int_cst (arg0
);
3222 widest2_int warg1
= widest2_int_cst (arg1
);
3226 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3227 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3228 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3229 default: gcc_unreachable ();
3231 signop sign
= TYPE_SIGN (type
);
3232 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3234 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3237 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3238 The statement may be replaced by another statement, e.g., if the call
3239 simplifies to a constant value. Return true if any changes were made.
3240 It is assumed that the operands have been previously folded. */
3243 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3245 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3247 bool changed
= false;
3250 /* Fold *& in call arguments. */
3251 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3252 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3254 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3257 gimple_call_set_arg (stmt
, i
, tmp
);
3262 /* Check for virtual calls that became direct calls. */
3263 callee
= gimple_call_fn (stmt
);
3264 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3266 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3268 if (dump_file
&& virtual_method_call_p (callee
)
3269 && !possible_polymorphic_call_target_p
3270 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3271 (OBJ_TYPE_REF_EXPR (callee
)))))
3274 "Type inheritance inconsistent devirtualization of ");
3275 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3276 fprintf (dump_file
, " to ");
3277 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3278 fprintf (dump_file
, "\n");
3281 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3284 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3287 vec
<cgraph_node
*>targets
3288 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3289 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3291 tree lhs
= gimple_call_lhs (stmt
);
3292 if (dump_enabled_p ())
3294 location_t loc
= gimple_location_safe (stmt
);
3295 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3296 "folding virtual function call to %s\n",
3297 targets
.length () == 1
3298 ? targets
[0]->name ()
3299 : "__builtin_unreachable");
3301 if (targets
.length () == 1)
3303 tree fndecl
= targets
[0]->decl
;
3304 gimple_call_set_fndecl (stmt
, fndecl
);
3306 /* If changing the call to __cxa_pure_virtual
3307 or similar noreturn function, adjust gimple_call_fntype
3309 if (gimple_call_noreturn_p (stmt
)
3310 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
3311 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
3312 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
3314 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
3315 /* If the call becomes noreturn, remove the lhs. */
3317 && gimple_call_noreturn_p (stmt
)
3318 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
3319 || should_remove_lhs_p (lhs
)))
3321 if (TREE_CODE (lhs
) == SSA_NAME
)
3323 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3324 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3325 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
3326 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3328 gimple_call_set_lhs (stmt
, NULL_TREE
);
3330 maybe_remove_unused_call_args (cfun
, stmt
);
3334 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3335 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
3336 gimple_set_location (new_stmt
, gimple_location (stmt
));
3337 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3339 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3340 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3342 /* To satisfy condition for
3343 cgraph_update_edges_for_call_stmt_node,
3344 we need to preserve GIMPLE_CALL statement
3345 at position of GSI iterator. */
3346 update_call_from_tree (gsi
, def
);
3347 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3351 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3352 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3353 gsi_replace (gsi
, new_stmt
, false);
3361 /* Check for indirect calls that became direct calls, and then
3362 no longer require a static chain. */
3363 if (gimple_call_chain (stmt
))
3365 tree fn
= gimple_call_fndecl (stmt
);
3366 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3368 gimple_call_set_chain (stmt
, NULL
);
3373 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3376 gimple_call_set_chain (stmt
, tmp
);
3385 /* Check for builtins that CCP can handle using information not
3386 available in the generic fold routines. */
3387 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3389 if (gimple_fold_builtin (gsi
))
3392 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3394 changed
|= targetm
.gimple_fold_builtin (gsi
);
3396 else if (gimple_call_internal_p (stmt
))
3398 enum tree_code subcode
= ERROR_MARK
;
3399 tree result
= NULL_TREE
;
3400 bool cplx_result
= false;
3401 tree overflow
= NULL_TREE
;
3402 switch (gimple_call_internal_fn (stmt
))
3404 case IFN_BUILTIN_EXPECT
:
3405 result
= fold_builtin_expect (gimple_location (stmt
),
3406 gimple_call_arg (stmt
, 0),
3407 gimple_call_arg (stmt
, 1),
3408 gimple_call_arg (stmt
, 2));
3410 case IFN_UBSAN_OBJECT_SIZE
:
3411 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3412 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3413 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3414 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3415 gimple_call_arg (stmt
, 2))))
3417 gsi_replace (gsi
, gimple_build_nop (), false);
3418 unlink_stmt_vdef (stmt
);
3419 release_defs (stmt
);
3423 case IFN_GOACC_DIM_SIZE
:
3424 case IFN_GOACC_DIM_POS
:
3425 result
= fold_internal_goacc_dim (stmt
);
3427 case IFN_UBSAN_CHECK_ADD
:
3428 subcode
= PLUS_EXPR
;
3430 case IFN_UBSAN_CHECK_SUB
:
3431 subcode
= MINUS_EXPR
;
3433 case IFN_UBSAN_CHECK_MUL
:
3434 subcode
= MULT_EXPR
;
3436 case IFN_ADD_OVERFLOW
:
3437 subcode
= PLUS_EXPR
;
3440 case IFN_SUB_OVERFLOW
:
3441 subcode
= MINUS_EXPR
;
3444 case IFN_MUL_OVERFLOW
:
3445 subcode
= MULT_EXPR
;
3451 if (subcode
!= ERROR_MARK
)
3453 tree arg0
= gimple_call_arg (stmt
, 0);
3454 tree arg1
= gimple_call_arg (stmt
, 1);
3455 tree type
= TREE_TYPE (arg0
);
3458 tree lhs
= gimple_call_lhs (stmt
);
3459 if (lhs
== NULL_TREE
)
3462 type
= TREE_TYPE (TREE_TYPE (lhs
));
3464 if (type
== NULL_TREE
)
3466 /* x = y + 0; x = y - 0; x = y * 0; */
3467 else if (integer_zerop (arg1
))
3468 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3469 /* x = 0 + y; x = 0 * y; */
3470 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3471 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3473 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3474 result
= integer_zero_node
;
3475 /* x = y * 1; x = 1 * y; */
3476 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3478 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3480 else if (TREE_CODE (arg0
) == INTEGER_CST
3481 && TREE_CODE (arg1
) == INTEGER_CST
)
3484 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3485 fold_convert (type
, arg1
));
3487 result
= int_const_binop (subcode
, arg0
, arg1
);
3488 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3491 overflow
= build_one_cst (type
);
3498 if (result
== integer_zero_node
)
3499 result
= build_zero_cst (type
);
3500 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3502 if (TREE_CODE (result
) == INTEGER_CST
)
3504 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3506 overflow
= build_one_cst (type
);
3508 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3509 && TYPE_UNSIGNED (type
))
3510 || (TYPE_PRECISION (type
)
3511 < (TYPE_PRECISION (TREE_TYPE (result
))
3512 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3513 && !TYPE_UNSIGNED (type
)))))
3516 result
= fold_convert (type
, result
);
3523 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3524 result
= drop_tree_overflow (result
);
3527 if (overflow
== NULL_TREE
)
3528 overflow
= build_zero_cst (TREE_TYPE (result
));
3529 tree ctype
= build_complex_type (TREE_TYPE (result
));
3530 if (TREE_CODE (result
) == INTEGER_CST
3531 && TREE_CODE (overflow
) == INTEGER_CST
)
3532 result
= build_complex (ctype
, result
, overflow
);
3534 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3535 ctype
, result
, overflow
);
3537 if (!update_call_from_tree (gsi
, result
))
3538 gimplify_and_update_call_from_tree (gsi
, result
);
3547 /* Return true whether NAME has a use on STMT. */
3550 has_use_on_stmt (tree name
, gimple
*stmt
)
3552 imm_use_iterator iter
;
3553 use_operand_p use_p
;
3554 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
3555 if (USE_STMT (use_p
) == stmt
)
3560 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3563 Replaces *GSI with the simplification result in RCODE and OPS
3564 and the associated statements in *SEQ. Does the replacement
3565 according to INPLACE and returns true if the operation succeeded. */
3568 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3569 code_helper rcode
, tree
*ops
,
3570 gimple_seq
*seq
, bool inplace
)
3572 gimple
*stmt
= gsi_stmt (*gsi
);
3574 /* Play safe and do not allow abnormals to be mentioned in
3575 newly created statements. See also maybe_push_res_to_seq.
3576 As an exception allow such uses if there was a use of the
3577 same SSA name on the old stmt. */
3578 if ((TREE_CODE (ops
[0]) == SSA_NAME
3579 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
3580 && !has_use_on_stmt (ops
[0], stmt
))
3582 && TREE_CODE (ops
[1]) == SSA_NAME
3583 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
3584 && !has_use_on_stmt (ops
[1], stmt
))
3586 && TREE_CODE (ops
[2]) == SSA_NAME
3587 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
3588 && !has_use_on_stmt (ops
[2], stmt
))
3589 || (COMPARISON_CLASS_P (ops
[0])
3590 && ((TREE_CODE (TREE_OPERAND (ops
[0], 0)) == SSA_NAME
3591 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 0))
3592 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 0), stmt
))
3593 || (TREE_CODE (TREE_OPERAND (ops
[0], 1)) == SSA_NAME
3594 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 1))
3595 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 1), stmt
)))))
3598 /* Don't insert new statements when INPLACE is true, even if we could
3599 reuse STMT for the final statement. */
3600 if (inplace
&& !gimple_seq_empty_p (*seq
))
3603 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3605 gcc_assert (rcode
.is_tree_code ());
3606 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3607 /* GIMPLE_CONDs condition may not throw. */
3608 && (!flag_exceptions
3609 || !cfun
->can_throw_non_call_exceptions
3610 || !operation_could_trap_p (rcode
,
3611 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3613 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3614 else if (rcode
== SSA_NAME
)
3615 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3616 build_zero_cst (TREE_TYPE (ops
[0])));
3617 else if (rcode
== INTEGER_CST
)
3619 if (integer_zerop (ops
[0]))
3620 gimple_cond_make_false (cond_stmt
);
3622 gimple_cond_make_true (cond_stmt
);
3626 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3630 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3631 build_zero_cst (TREE_TYPE (res
)));
3635 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3637 fprintf (dump_file
, "gimple_simplified to ");
3638 if (!gimple_seq_empty_p (*seq
))
3639 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3640 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3643 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3646 else if (is_gimple_assign (stmt
)
3647 && rcode
.is_tree_code ())
3650 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3652 maybe_build_generic_op (rcode
,
3653 TREE_TYPE (gimple_assign_lhs (stmt
)), ops
);
3654 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3655 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3657 fprintf (dump_file
, "gimple_simplified to ");
3658 if (!gimple_seq_empty_p (*seq
))
3659 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3660 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3663 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3667 else if (rcode
.is_fn_code ()
3668 && gimple_call_combined_fn (stmt
) == rcode
)
3671 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3673 gcc_assert (ops
[i
] != NULL_TREE
);
3674 gimple_call_set_arg (stmt
, i
, ops
[i
]);
3677 gcc_assert (ops
[i
] == NULL_TREE
);
3678 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3680 fprintf (dump_file
, "gimple_simplified to ");
3681 if (!gimple_seq_empty_p (*seq
))
3682 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3683 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
3685 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3690 if (gimple_has_lhs (stmt
))
3692 tree lhs
= gimple_get_lhs (stmt
);
3693 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3696 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3698 fprintf (dump_file
, "gimple_simplified to ");
3699 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3701 gsi_replace_with_seq_vops (gsi
, *seq
);
3711 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3714 maybe_canonicalize_mem_ref_addr (tree
*t
)
3718 if (TREE_CODE (*t
) == ADDR_EXPR
)
3719 t
= &TREE_OPERAND (*t
, 0);
3721 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3722 generic vector extension. The actual vector referenced is
3723 view-converted to an array type for this purpose. If the index
3724 is constant the canonical representation in the middle-end is a
3725 BIT_FIELD_REF so re-write the former to the latter here. */
3726 if (TREE_CODE (*t
) == ARRAY_REF
3727 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
3728 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
3729 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
3731 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
3732 if (VECTOR_TYPE_P (vtype
))
3734 tree low
= array_ref_low_bound (*t
);
3735 if (TREE_CODE (low
) == INTEGER_CST
)
3737 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
3739 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
3740 wi::to_widest (low
));
3741 idx
= wi::mul (idx
, wi::to_widest
3742 (TYPE_SIZE (TREE_TYPE (*t
))));
3744 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
3745 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
3747 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
3749 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
3750 TYPE_SIZE (TREE_TYPE (*t
)),
3751 wide_int_to_tree (sizetype
, idx
));
3759 while (handled_component_p (*t
))
3760 t
= &TREE_OPERAND (*t
, 0);
3762 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3763 of invariant addresses into a SSA name MEM_REF address. */
3764 if (TREE_CODE (*t
) == MEM_REF
3765 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3767 tree addr
= TREE_OPERAND (*t
, 0);
3768 if (TREE_CODE (addr
) == ADDR_EXPR
3769 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3770 || handled_component_p (TREE_OPERAND (addr
, 0))))
3773 HOST_WIDE_INT coffset
;
3774 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3779 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3780 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3781 TREE_OPERAND (*t
, 1),
3782 size_int (coffset
));
3785 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3786 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3789 /* Canonicalize back MEM_REFs to plain reference trees if the object
3790 accessed is a decl that has the same access semantics as the MEM_REF. */
3791 if (TREE_CODE (*t
) == MEM_REF
3792 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3793 && integer_zerop (TREE_OPERAND (*t
, 1))
3794 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3796 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3797 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3798 if (/* Same volatile qualification. */
3799 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3800 /* Same TBAA behavior with -fstrict-aliasing. */
3801 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3802 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3803 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3804 /* Same alignment. */
3805 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3806 /* We have to look out here to not drop a required conversion
3807 from the rhs to the lhs if *t appears on the lhs or vice-versa
3808 if it appears on the rhs. Thus require strict type
3810 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3812 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3817 /* Canonicalize TARGET_MEM_REF in particular with respect to
3818 the indexes becoming constant. */
3819 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3821 tree tem
= maybe_fold_tmr (*t
);
3832 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3833 distinguishes both cases. */
3836 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3838 bool changed
= false;
3839 gimple
*stmt
= gsi_stmt (*gsi
);
3840 bool nowarning
= gimple_no_warning_p (stmt
);
3842 fold_defer_overflow_warnings ();
3844 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3846 ??? This shouldn't be done in generic folding but in the
3847 propagation helpers which also know whether an address was
3849 Also canonicalize operand order. */
3850 switch (gimple_code (stmt
))
3853 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3855 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3856 if ((REFERENCE_CLASS_P (*rhs
)
3857 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3858 && maybe_canonicalize_mem_ref_addr (rhs
))
3860 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3861 if (REFERENCE_CLASS_P (*lhs
)
3862 && maybe_canonicalize_mem_ref_addr (lhs
))
3867 /* Canonicalize operand order. */
3868 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3869 if (TREE_CODE_CLASS (code
) == tcc_comparison
3870 || commutative_tree_code (code
)
3871 || commutative_ternary_tree_code (code
))
3873 tree rhs1
= gimple_assign_rhs1 (stmt
);
3874 tree rhs2
= gimple_assign_rhs2 (stmt
);
3875 if (tree_swap_operands_p (rhs1
, rhs2
, false))
3877 gimple_assign_set_rhs1 (stmt
, rhs2
);
3878 gimple_assign_set_rhs2 (stmt
, rhs1
);
3879 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3880 gimple_assign_set_rhs_code (stmt
,
3881 swap_tree_comparison (code
));
3889 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3891 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3892 if (REFERENCE_CLASS_P (*arg
)
3893 && maybe_canonicalize_mem_ref_addr (arg
))
3896 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3898 && REFERENCE_CLASS_P (*lhs
)
3899 && maybe_canonicalize_mem_ref_addr (lhs
))
3905 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3906 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3908 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3909 tree op
= TREE_VALUE (link
);
3910 if (REFERENCE_CLASS_P (op
)
3911 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3914 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3916 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3917 tree op
= TREE_VALUE (link
);
3918 if ((REFERENCE_CLASS_P (op
)
3919 || TREE_CODE (op
) == ADDR_EXPR
)
3920 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3926 if (gimple_debug_bind_p (stmt
))
3928 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3930 && (REFERENCE_CLASS_P (*val
)
3931 || TREE_CODE (*val
) == ADDR_EXPR
)
3932 && maybe_canonicalize_mem_ref_addr (val
))
3938 /* Canonicalize operand order. */
3939 tree lhs
= gimple_cond_lhs (stmt
);
3940 tree rhs
= gimple_cond_rhs (stmt
);
3941 if (tree_swap_operands_p (lhs
, rhs
, false))
3943 gcond
*gc
= as_a
<gcond
*> (stmt
);
3944 gimple_cond_set_lhs (gc
, rhs
);
3945 gimple_cond_set_rhs (gc
, lhs
);
3946 gimple_cond_set_code (gc
,
3947 swap_tree_comparison (gimple_cond_code (gc
)));
3954 /* Dispatch to pattern-based folding. */
3956 || is_gimple_assign (stmt
)
3957 || gimple_code (stmt
) == GIMPLE_COND
)
3959 gimple_seq seq
= NULL
;
3962 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
3963 valueize
, valueize
))
3965 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3968 gimple_seq_discard (seq
);
3972 stmt
= gsi_stmt (*gsi
);
3974 /* Fold the main computation performed by the statement. */
3975 switch (gimple_code (stmt
))
3979 /* Try to canonicalize for boolean-typed X the comparisons
3980 X == 0, X == 1, X != 0, and X != 1. */
3981 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
3982 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
3984 tree lhs
= gimple_assign_lhs (stmt
);
3985 tree op1
= gimple_assign_rhs1 (stmt
);
3986 tree op2
= gimple_assign_rhs2 (stmt
);
3987 tree type
= TREE_TYPE (op1
);
3989 /* Check whether the comparison operands are of the same boolean
3990 type as the result type is.
3991 Check that second operand is an integer-constant with value
3993 if (TREE_CODE (op2
) == INTEGER_CST
3994 && (integer_zerop (op2
) || integer_onep (op2
))
3995 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
3997 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
3998 bool is_logical_not
= false;
4000 /* X == 0 and X != 1 is a logical-not.of X
4001 X == 1 and X != 0 is X */
4002 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
4003 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
4004 is_logical_not
= true;
4006 if (is_logical_not
== false)
4007 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
4008 /* Only for one-bit precision typed X the transformation
4009 !X -> ~X is valied. */
4010 else if (TYPE_PRECISION (type
) == 1)
4011 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
4012 /* Otherwise we use !X -> X ^ 1. */
4014 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
4015 build_int_cst (type
, 1));
4021 unsigned old_num_ops
= gimple_num_ops (stmt
);
4022 tree lhs
= gimple_assign_lhs (stmt
);
4023 tree new_rhs
= fold_gimple_assign (gsi
);
4025 && !useless_type_conversion_p (TREE_TYPE (lhs
),
4026 TREE_TYPE (new_rhs
)))
4027 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
4030 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
4032 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
4039 changed
|= gimple_fold_call (gsi
, inplace
);
4043 /* Fold *& in asm operands. */
4045 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4047 const char **oconstraints
;
4048 const char *constraint
;
4049 bool allows_mem
, allows_reg
;
4051 noutputs
= gimple_asm_noutputs (asm_stmt
);
4052 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4054 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4056 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4057 tree op
= TREE_VALUE (link
);
4059 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4060 if (REFERENCE_CLASS_P (op
)
4061 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
4063 TREE_VALUE (link
) = op
;
4067 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4069 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4070 tree op
= TREE_VALUE (link
);
4072 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4073 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
4074 oconstraints
, &allows_mem
, &allows_reg
);
4075 if (REFERENCE_CLASS_P (op
)
4076 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
4079 TREE_VALUE (link
) = op
;
4087 if (gimple_debug_bind_p (stmt
))
4089 tree val
= gimple_debug_bind_get_value (stmt
);
4091 && REFERENCE_CLASS_P (val
))
4093 tree tem
= maybe_fold_reference (val
, false);
4096 gimple_debug_bind_set_value (stmt
, tem
);
4101 && TREE_CODE (val
) == ADDR_EXPR
)
4103 tree ref
= TREE_OPERAND (val
, 0);
4104 tree tem
= maybe_fold_reference (ref
, false);
4107 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
4108 gimple_debug_bind_set_value (stmt
, tem
);
4118 stmt
= gsi_stmt (*gsi
);
4120 /* Fold *& on the lhs. */
4121 if (gimple_has_lhs (stmt
))
4123 tree lhs
= gimple_get_lhs (stmt
);
4124 if (lhs
&& REFERENCE_CLASS_P (lhs
))
4126 tree new_lhs
= maybe_fold_reference (lhs
, true);
4129 gimple_set_lhs (stmt
, new_lhs
);
4135 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
4139 /* Valueziation callback that ends up not following SSA edges. */
4142 no_follow_ssa_edges (tree
)
4147 /* Valueization callback that ends up following single-use SSA edges only. */
4150 follow_single_use_edges (tree val
)
4152 if (TREE_CODE (val
) == SSA_NAME
4153 && !has_single_use (val
))
4158 /* Fold the statement pointed to by GSI. In some cases, this function may
4159 replace the whole statement with a new one. Returns true iff folding
4161 The statement pointed to by GSI should be in valid gimple form but may
4162 be in unfolded state as resulting from for example constant propagation
4163 which can produce *&x = 0. */
4166 fold_stmt (gimple_stmt_iterator
*gsi
)
4168 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
4172 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
4174 return fold_stmt_1 (gsi
, false, valueize
);
4177 /* Perform the minimal folding on statement *GSI. Only operations like
4178 *&x created by constant propagation are handled. The statement cannot
4179 be replaced with a new one. Return true if the statement was
4180 changed, false otherwise.
4181 The statement *GSI should be in valid gimple form but may
4182 be in unfolded state as resulting from for example constant propagation
4183 which can produce *&x = 0. */
4186 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
4188 gimple
*stmt
= gsi_stmt (*gsi
);
4189 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
4190 gcc_assert (gsi_stmt (*gsi
) == stmt
);
4194 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4195 if EXPR is null or we don't know how.
4196 If non-null, the result always has boolean type. */
4199 canonicalize_bool (tree expr
, bool invert
)
4205 if (integer_nonzerop (expr
))
4206 return boolean_false_node
;
4207 else if (integer_zerop (expr
))
4208 return boolean_true_node
;
4209 else if (TREE_CODE (expr
) == SSA_NAME
)
4210 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
4211 build_int_cst (TREE_TYPE (expr
), 0));
4212 else if (COMPARISON_CLASS_P (expr
))
4213 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
4215 TREE_OPERAND (expr
, 0),
4216 TREE_OPERAND (expr
, 1));
4222 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4224 if (integer_nonzerop (expr
))
4225 return boolean_true_node
;
4226 else if (integer_zerop (expr
))
4227 return boolean_false_node
;
4228 else if (TREE_CODE (expr
) == SSA_NAME
)
4229 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
4230 build_int_cst (TREE_TYPE (expr
), 0));
4231 else if (COMPARISON_CLASS_P (expr
))
4232 return fold_build2 (TREE_CODE (expr
),
4234 TREE_OPERAND (expr
, 0),
4235 TREE_OPERAND (expr
, 1));
4241 /* Check to see if a boolean expression EXPR is logically equivalent to the
4242 comparison (OP1 CODE OP2). Check for various identities involving
4246 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
4247 const_tree op1
, const_tree op2
)
4251 /* The obvious case. */
4252 if (TREE_CODE (expr
) == code
4253 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
4254 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
4257 /* Check for comparing (name, name != 0) and the case where expr
4258 is an SSA_NAME with a definition matching the comparison. */
4259 if (TREE_CODE (expr
) == SSA_NAME
4260 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4262 if (operand_equal_p (expr
, op1
, 0))
4263 return ((code
== NE_EXPR
&& integer_zerop (op2
))
4264 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
4265 s
= SSA_NAME_DEF_STMT (expr
);
4266 if (is_gimple_assign (s
)
4267 && gimple_assign_rhs_code (s
) == code
4268 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
4269 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
4273 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4274 of name is a comparison, recurse. */
4275 if (TREE_CODE (op1
) == SSA_NAME
4276 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
4278 s
= SSA_NAME_DEF_STMT (op1
);
4279 if (is_gimple_assign (s
)
4280 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
4282 enum tree_code c
= gimple_assign_rhs_code (s
);
4283 if ((c
== NE_EXPR
&& integer_zerop (op2
))
4284 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
4285 return same_bool_comparison_p (expr
, c
,
4286 gimple_assign_rhs1 (s
),
4287 gimple_assign_rhs2 (s
));
4288 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
4289 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
4290 return same_bool_comparison_p (expr
,
4291 invert_tree_comparison (c
, false),
4292 gimple_assign_rhs1 (s
),
4293 gimple_assign_rhs2 (s
));
4299 /* Check to see if two boolean expressions OP1 and OP2 are logically
4303 same_bool_result_p (const_tree op1
, const_tree op2
)
4305 /* Simple cases first. */
4306 if (operand_equal_p (op1
, op2
, 0))
4309 /* Check the cases where at least one of the operands is a comparison.
4310 These are a bit smarter than operand_equal_p in that they apply some
4311 identifies on SSA_NAMEs. */
4312 if (COMPARISON_CLASS_P (op2
)
4313 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
4314 TREE_OPERAND (op2
, 0),
4315 TREE_OPERAND (op2
, 1)))
4317 if (COMPARISON_CLASS_P (op1
)
4318 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
4319 TREE_OPERAND (op1
, 0),
4320 TREE_OPERAND (op1
, 1)))
4327 /* Forward declarations for some mutually recursive functions. */
4330 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4331 enum tree_code code2
, tree op2a
, tree op2b
);
4333 and_var_with_comparison (tree var
, bool invert
,
4334 enum tree_code code2
, tree op2a
, tree op2b
);
4336 and_var_with_comparison_1 (gimple
*stmt
,
4337 enum tree_code code2
, tree op2a
, tree op2b
);
4339 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4340 enum tree_code code2
, tree op2a
, tree op2b
);
4342 or_var_with_comparison (tree var
, bool invert
,
4343 enum tree_code code2
, tree op2a
, tree op2b
);
4345 or_var_with_comparison_1 (gimple
*stmt
,
4346 enum tree_code code2
, tree op2a
, tree op2b
);
4348 /* Helper function for and_comparisons_1: try to simplify the AND of the
4349 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4350 If INVERT is true, invert the value of the VAR before doing the AND.
4351 Return NULL_EXPR if we can't simplify this to a single expression. */
4354 and_var_with_comparison (tree var
, bool invert
,
4355 enum tree_code code2
, tree op2a
, tree op2b
)
4358 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4360 /* We can only deal with variables whose definitions are assignments. */
4361 if (!is_gimple_assign (stmt
))
4364 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4365 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4366 Then we only have to consider the simpler non-inverted cases. */
4368 t
= or_var_with_comparison_1 (stmt
,
4369 invert_tree_comparison (code2
, false),
4372 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4373 return canonicalize_bool (t
, invert
);
4376 /* Try to simplify the AND of the ssa variable defined by the assignment
4377 STMT with the comparison specified by (OP2A CODE2 OP2B).
4378 Return NULL_EXPR if we can't simplify this to a single expression. */
4381 and_var_with_comparison_1 (gimple
*stmt
,
4382 enum tree_code code2
, tree op2a
, tree op2b
)
4384 tree var
= gimple_assign_lhs (stmt
);
4385 tree true_test_var
= NULL_TREE
;
4386 tree false_test_var
= NULL_TREE
;
4387 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4389 /* Check for identities like (var AND (var == 0)) => false. */
4390 if (TREE_CODE (op2a
) == SSA_NAME
4391 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4393 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4394 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4396 true_test_var
= op2a
;
4397 if (var
== true_test_var
)
4400 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4401 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4403 false_test_var
= op2a
;
4404 if (var
== false_test_var
)
4405 return boolean_false_node
;
4409 /* If the definition is a comparison, recurse on it. */
4410 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4412 tree t
= and_comparisons_1 (innercode
,
4413 gimple_assign_rhs1 (stmt
),
4414 gimple_assign_rhs2 (stmt
),
4422 /* If the definition is an AND or OR expression, we may be able to
4423 simplify by reassociating. */
4424 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4425 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4427 tree inner1
= gimple_assign_rhs1 (stmt
);
4428 tree inner2
= gimple_assign_rhs2 (stmt
);
4431 tree partial
= NULL_TREE
;
4432 bool is_and
= (innercode
== BIT_AND_EXPR
);
4434 /* Check for boolean identities that don't require recursive examination
4436 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4437 inner1 AND (inner1 OR inner2) => inner1
4438 !inner1 AND (inner1 AND inner2) => false
4439 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4440 Likewise for similar cases involving inner2. */
4441 if (inner1
== true_test_var
)
4442 return (is_and
? var
: inner1
);
4443 else if (inner2
== true_test_var
)
4444 return (is_and
? var
: inner2
);
4445 else if (inner1
== false_test_var
)
4447 ? boolean_false_node
4448 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4449 else if (inner2
== false_test_var
)
4451 ? boolean_false_node
4452 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4454 /* Next, redistribute/reassociate the AND across the inner tests.
4455 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4456 if (TREE_CODE (inner1
) == SSA_NAME
4457 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4458 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4459 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4460 gimple_assign_rhs1 (s
),
4461 gimple_assign_rhs2 (s
),
4462 code2
, op2a
, op2b
)))
4464 /* Handle the AND case, where we are reassociating:
4465 (inner1 AND inner2) AND (op2a code2 op2b)
4467 If the partial result t is a constant, we win. Otherwise
4468 continue on to try reassociating with the other inner test. */
4471 if (integer_onep (t
))
4473 else if (integer_zerop (t
))
4474 return boolean_false_node
;
4477 /* Handle the OR case, where we are redistributing:
4478 (inner1 OR inner2) AND (op2a code2 op2b)
4479 => (t OR (inner2 AND (op2a code2 op2b))) */
4480 else if (integer_onep (t
))
4481 return boolean_true_node
;
4483 /* Save partial result for later. */
4487 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4488 if (TREE_CODE (inner2
) == SSA_NAME
4489 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4490 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4491 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4492 gimple_assign_rhs1 (s
),
4493 gimple_assign_rhs2 (s
),
4494 code2
, op2a
, op2b
)))
4496 /* Handle the AND case, where we are reassociating:
4497 (inner1 AND inner2) AND (op2a code2 op2b)
4498 => (inner1 AND t) */
4501 if (integer_onep (t
))
4503 else if (integer_zerop (t
))
4504 return boolean_false_node
;
4505 /* If both are the same, we can apply the identity
4507 else if (partial
&& same_bool_result_p (t
, partial
))
4511 /* Handle the OR case. where we are redistributing:
4512 (inner1 OR inner2) AND (op2a code2 op2b)
4513 => (t OR (inner1 AND (op2a code2 op2b)))
4514 => (t OR partial) */
4517 if (integer_onep (t
))
4518 return boolean_true_node
;
4521 /* We already got a simplification for the other
4522 operand to the redistributed OR expression. The
4523 interesting case is when at least one is false.
4524 Or, if both are the same, we can apply the identity
4526 if (integer_zerop (partial
))
4528 else if (integer_zerop (t
))
4530 else if (same_bool_result_p (t
, partial
))
4539 /* Try to simplify the AND of two comparisons defined by
4540 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4541 If this can be done without constructing an intermediate value,
4542 return the resulting tree; otherwise NULL_TREE is returned.
4543 This function is deliberately asymmetric as it recurses on SSA_DEFs
4544 in the first comparison but not the second. */
4547 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4548 enum tree_code code2
, tree op2a
, tree op2b
)
4550 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4552 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4553 if (operand_equal_p (op1a
, op2a
, 0)
4554 && operand_equal_p (op1b
, op2b
, 0))
4556 /* Result will be either NULL_TREE, or a combined comparison. */
4557 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4558 TRUTH_ANDIF_EXPR
, code1
, code2
,
4559 truth_type
, op1a
, op1b
);
4564 /* Likewise the swapped case of the above. */
4565 if (operand_equal_p (op1a
, op2b
, 0)
4566 && operand_equal_p (op1b
, op2a
, 0))
4568 /* Result will be either NULL_TREE, or a combined comparison. */
4569 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4570 TRUTH_ANDIF_EXPR
, code1
,
4571 swap_tree_comparison (code2
),
4572 truth_type
, op1a
, op1b
);
4577 /* If both comparisons are of the same value against constants, we might
4578 be able to merge them. */
4579 if (operand_equal_p (op1a
, op2a
, 0)
4580 && TREE_CODE (op1b
) == INTEGER_CST
4581 && TREE_CODE (op2b
) == INTEGER_CST
)
4583 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4585 /* If we have (op1a == op1b), we should either be able to
4586 return that or FALSE, depending on whether the constant op1b
4587 also satisfies the other comparison against op2b. */
4588 if (code1
== EQ_EXPR
)
4594 case EQ_EXPR
: val
= (cmp
== 0); break;
4595 case NE_EXPR
: val
= (cmp
!= 0); break;
4596 case LT_EXPR
: val
= (cmp
< 0); break;
4597 case GT_EXPR
: val
= (cmp
> 0); break;
4598 case LE_EXPR
: val
= (cmp
<= 0); break;
4599 case GE_EXPR
: val
= (cmp
>= 0); break;
4600 default: done
= false;
4605 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4607 return boolean_false_node
;
4610 /* Likewise if the second comparison is an == comparison. */
4611 else if (code2
== EQ_EXPR
)
4617 case EQ_EXPR
: val
= (cmp
== 0); break;
4618 case NE_EXPR
: val
= (cmp
!= 0); break;
4619 case LT_EXPR
: val
= (cmp
> 0); break;
4620 case GT_EXPR
: val
= (cmp
< 0); break;
4621 case LE_EXPR
: val
= (cmp
>= 0); break;
4622 case GE_EXPR
: val
= (cmp
<= 0); break;
4623 default: done
= false;
4628 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4630 return boolean_false_node
;
4634 /* Same business with inequality tests. */
4635 else if (code1
== NE_EXPR
)
4640 case EQ_EXPR
: val
= (cmp
!= 0); break;
4641 case NE_EXPR
: val
= (cmp
== 0); break;
4642 case LT_EXPR
: val
= (cmp
>= 0); break;
4643 case GT_EXPR
: val
= (cmp
<= 0); break;
4644 case LE_EXPR
: val
= (cmp
> 0); break;
4645 case GE_EXPR
: val
= (cmp
< 0); break;
4650 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4652 else if (code2
== NE_EXPR
)
4657 case EQ_EXPR
: val
= (cmp
== 0); break;
4658 case NE_EXPR
: val
= (cmp
!= 0); break;
4659 case LT_EXPR
: val
= (cmp
<= 0); break;
4660 case GT_EXPR
: val
= (cmp
>= 0); break;
4661 case LE_EXPR
: val
= (cmp
< 0); break;
4662 case GE_EXPR
: val
= (cmp
> 0); break;
4667 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4670 /* Chose the more restrictive of two < or <= comparisons. */
4671 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4672 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4674 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4675 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4677 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4680 /* Likewise chose the more restrictive of two > or >= comparisons. */
4681 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4682 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4684 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4685 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4687 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4690 /* Check for singleton ranges. */
4692 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4693 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4694 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4696 /* Check for disjoint ranges. */
4698 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4699 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4700 return boolean_false_node
;
4702 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4703 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4704 return boolean_false_node
;
4707 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4708 NAME's definition is a truth value. See if there are any simplifications
4709 that can be done against the NAME's definition. */
4710 if (TREE_CODE (op1a
) == SSA_NAME
4711 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4712 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4714 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4715 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4716 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
4717 switch (gimple_code (stmt
))
4720 /* Try to simplify by copy-propagating the definition. */
4721 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4724 /* If every argument to the PHI produces the same result when
4725 ANDed with the second comparison, we win.
4726 Do not do this unless the type is bool since we need a bool
4727 result here anyway. */
4728 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4730 tree result
= NULL_TREE
;
4732 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4734 tree arg
= gimple_phi_arg_def (stmt
, i
);
4736 /* If this PHI has itself as an argument, ignore it.
4737 If all the other args produce the same result,
4739 if (arg
== gimple_phi_result (stmt
))
4741 else if (TREE_CODE (arg
) == INTEGER_CST
)
4743 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4746 result
= boolean_false_node
;
4747 else if (!integer_zerop (result
))
4751 result
= fold_build2 (code2
, boolean_type_node
,
4753 else if (!same_bool_comparison_p (result
,
4757 else if (TREE_CODE (arg
) == SSA_NAME
4758 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4761 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
4762 /* In simple cases we can look through PHI nodes,
4763 but we have to be careful with loops.
4765 if (! dom_info_available_p (CDI_DOMINATORS
)
4766 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4767 || dominated_by_p (CDI_DOMINATORS
,
4768 gimple_bb (def_stmt
),
4771 temp
= and_var_with_comparison (arg
, invert
, code2
,
4777 else if (!same_bool_result_p (result
, temp
))
4793 /* Try to simplify the AND of two comparisons, specified by
4794 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4795 If this can be simplified to a single expression (without requiring
4796 introducing more SSA variables to hold intermediate values),
4797 return the resulting tree. Otherwise return NULL_TREE.
4798 If the result expression is non-null, it has boolean type. */
4801 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4802 enum tree_code code2
, tree op2a
, tree op2b
)
4804 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4808 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4811 /* Helper function for or_comparisons_1: try to simplify the OR of the
4812 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4813 If INVERT is true, invert the value of VAR before doing the OR.
4814 Return NULL_EXPR if we can't simplify this to a single expression. */
4817 or_var_with_comparison (tree var
, bool invert
,
4818 enum tree_code code2
, tree op2a
, tree op2b
)
4821 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4823 /* We can only deal with variables whose definitions are assignments. */
4824 if (!is_gimple_assign (stmt
))
4827 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4828 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4829 Then we only have to consider the simpler non-inverted cases. */
4831 t
= and_var_with_comparison_1 (stmt
,
4832 invert_tree_comparison (code2
, false),
4835 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4836 return canonicalize_bool (t
, invert
);
4839 /* Try to simplify the OR of the ssa variable defined by the assignment
4840 STMT with the comparison specified by (OP2A CODE2 OP2B).
4841 Return NULL_EXPR if we can't simplify this to a single expression. */
4844 or_var_with_comparison_1 (gimple
*stmt
,
4845 enum tree_code code2
, tree op2a
, tree op2b
)
4847 tree var
= gimple_assign_lhs (stmt
);
4848 tree true_test_var
= NULL_TREE
;
4849 tree false_test_var
= NULL_TREE
;
4850 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4852 /* Check for identities like (var OR (var != 0)) => true . */
4853 if (TREE_CODE (op2a
) == SSA_NAME
4854 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4856 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4857 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4859 true_test_var
= op2a
;
4860 if (var
== true_test_var
)
4863 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4864 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4866 false_test_var
= op2a
;
4867 if (var
== false_test_var
)
4868 return boolean_true_node
;
4872 /* If the definition is a comparison, recurse on it. */
4873 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4875 tree t
= or_comparisons_1 (innercode
,
4876 gimple_assign_rhs1 (stmt
),
4877 gimple_assign_rhs2 (stmt
),
4885 /* If the definition is an AND or OR expression, we may be able to
4886 simplify by reassociating. */
4887 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4888 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4890 tree inner1
= gimple_assign_rhs1 (stmt
);
4891 tree inner2
= gimple_assign_rhs2 (stmt
);
4894 tree partial
= NULL_TREE
;
4895 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4897 /* Check for boolean identities that don't require recursive examination
4899 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4900 inner1 OR (inner1 AND inner2) => inner1
4901 !inner1 OR (inner1 OR inner2) => true
4902 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4904 if (inner1
== true_test_var
)
4905 return (is_or
? var
: inner1
);
4906 else if (inner2
== true_test_var
)
4907 return (is_or
? var
: inner2
);
4908 else if (inner1
== false_test_var
)
4911 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4912 else if (inner2
== false_test_var
)
4915 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4917 /* Next, redistribute/reassociate the OR across the inner tests.
4918 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4919 if (TREE_CODE (inner1
) == SSA_NAME
4920 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4921 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4922 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4923 gimple_assign_rhs1 (s
),
4924 gimple_assign_rhs2 (s
),
4925 code2
, op2a
, op2b
)))
4927 /* Handle the OR case, where we are reassociating:
4928 (inner1 OR inner2) OR (op2a code2 op2b)
4930 If the partial result t is a constant, we win. Otherwise
4931 continue on to try reassociating with the other inner test. */
4934 if (integer_onep (t
))
4935 return boolean_true_node
;
4936 else if (integer_zerop (t
))
4940 /* Handle the AND case, where we are redistributing:
4941 (inner1 AND inner2) OR (op2a code2 op2b)
4942 => (t AND (inner2 OR (op2a code op2b))) */
4943 else if (integer_zerop (t
))
4944 return boolean_false_node
;
4946 /* Save partial result for later. */
4950 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4951 if (TREE_CODE (inner2
) == SSA_NAME
4952 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4953 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4954 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4955 gimple_assign_rhs1 (s
),
4956 gimple_assign_rhs2 (s
),
4957 code2
, op2a
, op2b
)))
4959 /* Handle the OR case, where we are reassociating:
4960 (inner1 OR inner2) OR (op2a code2 op2b)
4962 => (t OR partial) */
4965 if (integer_zerop (t
))
4967 else if (integer_onep (t
))
4968 return boolean_true_node
;
4969 /* If both are the same, we can apply the identity
4971 else if (partial
&& same_bool_result_p (t
, partial
))
4975 /* Handle the AND case, where we are redistributing:
4976 (inner1 AND inner2) OR (op2a code2 op2b)
4977 => (t AND (inner1 OR (op2a code2 op2b)))
4978 => (t AND partial) */
4981 if (integer_zerop (t
))
4982 return boolean_false_node
;
4985 /* We already got a simplification for the other
4986 operand to the redistributed AND expression. The
4987 interesting case is when at least one is true.
4988 Or, if both are the same, we can apply the identity
4990 if (integer_onep (partial
))
4992 else if (integer_onep (t
))
4994 else if (same_bool_result_p (t
, partial
))
5003 /* Try to simplify the OR of two comparisons defined by
5004 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5005 If this can be done without constructing an intermediate value,
5006 return the resulting tree; otherwise NULL_TREE is returned.
5007 This function is deliberately asymmetric as it recurses on SSA_DEFs
5008 in the first comparison but not the second. */
5011 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5012 enum tree_code code2
, tree op2a
, tree op2b
)
5014 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5016 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5017 if (operand_equal_p (op1a
, op2a
, 0)
5018 && operand_equal_p (op1b
, op2b
, 0))
5020 /* Result will be either NULL_TREE, or a combined comparison. */
5021 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5022 TRUTH_ORIF_EXPR
, code1
, code2
,
5023 truth_type
, op1a
, op1b
);
5028 /* Likewise the swapped case of the above. */
5029 if (operand_equal_p (op1a
, op2b
, 0)
5030 && operand_equal_p (op1b
, op2a
, 0))
5032 /* Result will be either NULL_TREE, or a combined comparison. */
5033 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5034 TRUTH_ORIF_EXPR
, code1
,
5035 swap_tree_comparison (code2
),
5036 truth_type
, op1a
, op1b
);
5041 /* If both comparisons are of the same value against constants, we might
5042 be able to merge them. */
5043 if (operand_equal_p (op1a
, op2a
, 0)
5044 && TREE_CODE (op1b
) == INTEGER_CST
5045 && TREE_CODE (op2b
) == INTEGER_CST
)
5047 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5049 /* If we have (op1a != op1b), we should either be able to
5050 return that or TRUE, depending on whether the constant op1b
5051 also satisfies the other comparison against op2b. */
5052 if (code1
== NE_EXPR
)
5058 case EQ_EXPR
: val
= (cmp
== 0); break;
5059 case NE_EXPR
: val
= (cmp
!= 0); break;
5060 case LT_EXPR
: val
= (cmp
< 0); break;
5061 case GT_EXPR
: val
= (cmp
> 0); break;
5062 case LE_EXPR
: val
= (cmp
<= 0); break;
5063 case GE_EXPR
: val
= (cmp
>= 0); break;
5064 default: done
= false;
5069 return boolean_true_node
;
5071 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5074 /* Likewise if the second comparison is a != comparison. */
5075 else if (code2
== NE_EXPR
)
5081 case EQ_EXPR
: val
= (cmp
== 0); break;
5082 case NE_EXPR
: val
= (cmp
!= 0); break;
5083 case LT_EXPR
: val
= (cmp
> 0); break;
5084 case GT_EXPR
: val
= (cmp
< 0); break;
5085 case LE_EXPR
: val
= (cmp
>= 0); break;
5086 case GE_EXPR
: val
= (cmp
<= 0); break;
5087 default: done
= false;
5092 return boolean_true_node
;
5094 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5098 /* See if an equality test is redundant with the other comparison. */
5099 else if (code1
== EQ_EXPR
)
5104 case EQ_EXPR
: val
= (cmp
== 0); break;
5105 case NE_EXPR
: val
= (cmp
!= 0); break;
5106 case LT_EXPR
: val
= (cmp
< 0); break;
5107 case GT_EXPR
: val
= (cmp
> 0); break;
5108 case LE_EXPR
: val
= (cmp
<= 0); break;
5109 case GE_EXPR
: val
= (cmp
>= 0); break;
5114 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5116 else if (code2
== EQ_EXPR
)
5121 case EQ_EXPR
: val
= (cmp
== 0); break;
5122 case NE_EXPR
: val
= (cmp
!= 0); break;
5123 case LT_EXPR
: val
= (cmp
> 0); break;
5124 case GT_EXPR
: val
= (cmp
< 0); break;
5125 case LE_EXPR
: val
= (cmp
>= 0); break;
5126 case GE_EXPR
: val
= (cmp
<= 0); break;
5131 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5134 /* Chose the less restrictive of two < or <= comparisons. */
5135 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5136 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5138 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5139 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5141 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5144 /* Likewise chose the less restrictive of two > or >= comparisons. */
5145 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5146 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5148 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5149 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5151 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5154 /* Check for singleton ranges. */
5156 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
5157 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
5158 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
5160 /* Check for less/greater pairs that don't restrict the range at all. */
5162 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5163 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5164 return boolean_true_node
;
5166 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5167 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5168 return boolean_true_node
;
5171 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5172 NAME's definition is a truth value. See if there are any simplifications
5173 that can be done against the NAME's definition. */
5174 if (TREE_CODE (op1a
) == SSA_NAME
5175 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5176 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5178 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5179 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5180 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5181 switch (gimple_code (stmt
))
5184 /* Try to simplify by copy-propagating the definition. */
5185 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5188 /* If every argument to the PHI produces the same result when
5189 ORed with the second comparison, we win.
5190 Do not do this unless the type is bool since we need a bool
5191 result here anyway. */
5192 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5194 tree result
= NULL_TREE
;
5196 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5198 tree arg
= gimple_phi_arg_def (stmt
, i
);
5200 /* If this PHI has itself as an argument, ignore it.
5201 If all the other args produce the same result,
5203 if (arg
== gimple_phi_result (stmt
))
5205 else if (TREE_CODE (arg
) == INTEGER_CST
)
5207 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
5210 result
= boolean_true_node
;
5211 else if (!integer_onep (result
))
5215 result
= fold_build2 (code2
, boolean_type_node
,
5217 else if (!same_bool_comparison_p (result
,
5221 else if (TREE_CODE (arg
) == SSA_NAME
5222 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5225 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5226 /* In simple cases we can look through PHI nodes,
5227 but we have to be careful with loops.
5229 if (! dom_info_available_p (CDI_DOMINATORS
)
5230 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5231 || dominated_by_p (CDI_DOMINATORS
,
5232 gimple_bb (def_stmt
),
5235 temp
= or_var_with_comparison (arg
, invert
, code2
,
5241 else if (!same_bool_result_p (result
, temp
))
5257 /* Try to simplify the OR of two comparisons, specified by
5258 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5259 If this can be simplified to a single expression (without requiring
5260 introducing more SSA variables to hold intermediate values),
5261 return the resulting tree. Otherwise return NULL_TREE.
5262 If the result expression is non-null, it has boolean type. */
5265 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5266 enum tree_code code2
, tree op2a
, tree op2b
)
5268 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5272 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5276 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5278 Either NULL_TREE, a simplified but non-constant or a constant
5281 ??? This should go into a gimple-fold-inline.h file to be eventually
5282 privatized with the single valueize function used in the various TUs
5283 to avoid the indirect function call overhead. */
5286 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
5287 tree (*gvalueize
) (tree
))
5291 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5292 edges if there are intermediate VARYING defs. For this reason
5293 do not follow SSA edges here even though SCCVN can technically
5294 just deal fine with that. */
5295 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
))
5297 tree res
= NULL_TREE
;
5298 if (gimple_simplified_result_is_gimple_val (rcode
, ops
))
5300 else if (mprts_hook
)
5301 res
= mprts_hook (rcode
, gimple_expr_type (stmt
), ops
);
5304 if (dump_file
&& dump_flags
& TDF_DETAILS
)
5306 fprintf (dump_file
, "Match-and-simplified ");
5307 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
5308 fprintf (dump_file
, " to ");
5309 print_generic_expr (dump_file
, res
, 0);
5310 fprintf (dump_file
, "\n");
5316 location_t loc
= gimple_location (stmt
);
5317 switch (gimple_code (stmt
))
5321 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
5323 switch (get_gimple_rhs_class (subcode
))
5325 case GIMPLE_SINGLE_RHS
:
5327 tree rhs
= gimple_assign_rhs1 (stmt
);
5328 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
5330 if (TREE_CODE (rhs
) == SSA_NAME
)
5332 /* If the RHS is an SSA_NAME, return its known constant value,
5334 return (*valueize
) (rhs
);
5336 /* Handle propagating invariant addresses into address
5338 else if (TREE_CODE (rhs
) == ADDR_EXPR
5339 && !is_gimple_min_invariant (rhs
))
5341 HOST_WIDE_INT offset
= 0;
5343 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
5347 && (CONSTANT_CLASS_P (base
)
5348 || decl_address_invariant_p (base
)))
5349 return build_invariant_address (TREE_TYPE (rhs
),
5352 else if (TREE_CODE (rhs
) == CONSTRUCTOR
5353 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
5354 && (CONSTRUCTOR_NELTS (rhs
)
5355 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
5360 vec
= XALLOCAVEC (tree
,
5361 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
5362 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
5364 val
= (*valueize
) (val
);
5365 if (TREE_CODE (val
) == INTEGER_CST
5366 || TREE_CODE (val
) == REAL_CST
5367 || TREE_CODE (val
) == FIXED_CST
)
5373 return build_vector (TREE_TYPE (rhs
), vec
);
5375 if (subcode
== OBJ_TYPE_REF
)
5377 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
5378 /* If callee is constant, we can fold away the wrapper. */
5379 if (is_gimple_min_invariant (val
))
5383 if (kind
== tcc_reference
)
5385 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5386 || TREE_CODE (rhs
) == REALPART_EXPR
5387 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5388 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5390 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5391 return fold_unary_loc (EXPR_LOCATION (rhs
),
5393 TREE_TYPE (rhs
), val
);
5395 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5396 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5398 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5399 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5401 TREE_TYPE (rhs
), val
,
5402 TREE_OPERAND (rhs
, 1),
5403 TREE_OPERAND (rhs
, 2));
5405 else if (TREE_CODE (rhs
) == MEM_REF
5406 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5408 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5409 if (TREE_CODE (val
) == ADDR_EXPR
5410 && is_gimple_min_invariant (val
))
5412 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5414 TREE_OPERAND (rhs
, 1));
5419 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5421 else if (kind
== tcc_declaration
)
5422 return get_symbol_constant_value (rhs
);
5426 case GIMPLE_UNARY_RHS
:
5429 case GIMPLE_BINARY_RHS
:
5430 /* Translate &x + CST into an invariant form suitable for
5431 further propagation. */
5432 if (subcode
== POINTER_PLUS_EXPR
)
5434 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5435 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5436 if (TREE_CODE (op0
) == ADDR_EXPR
5437 && TREE_CODE (op1
) == INTEGER_CST
)
5439 tree off
= fold_convert (ptr_type_node
, op1
);
5440 return build_fold_addr_expr_loc
5442 fold_build2 (MEM_REF
,
5443 TREE_TYPE (TREE_TYPE (op0
)),
5444 unshare_expr (op0
), off
));
5447 /* Canonicalize bool != 0 and bool == 0 appearing after
5448 valueization. While gimple_simplify handles this
5449 it can get confused by the ~X == 1 -> X == 0 transform
5450 which we cant reduce to a SSA name or a constant
5451 (and we have no way to tell gimple_simplify to not
5452 consider those transforms in the first place). */
5453 else if (subcode
== EQ_EXPR
5454 || subcode
== NE_EXPR
)
5456 tree lhs
= gimple_assign_lhs (stmt
);
5457 tree op0
= gimple_assign_rhs1 (stmt
);
5458 if (useless_type_conversion_p (TREE_TYPE (lhs
),
5461 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5462 op0
= (*valueize
) (op0
);
5463 if (TREE_CODE (op0
) == INTEGER_CST
)
5464 std::swap (op0
, op1
);
5465 if (TREE_CODE (op1
) == INTEGER_CST
5466 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
5467 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
5473 case GIMPLE_TERNARY_RHS
:
5475 /* Handle ternary operators that can appear in GIMPLE form. */
5476 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5477 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5478 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5479 return fold_ternary_loc (loc
, subcode
,
5480 gimple_expr_type (stmt
), op0
, op1
, op2
);
5491 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5493 if (gimple_call_internal_p (stmt
))
5495 enum tree_code subcode
= ERROR_MARK
;
5496 switch (gimple_call_internal_fn (stmt
))
5498 case IFN_UBSAN_CHECK_ADD
:
5499 subcode
= PLUS_EXPR
;
5501 case IFN_UBSAN_CHECK_SUB
:
5502 subcode
= MINUS_EXPR
;
5504 case IFN_UBSAN_CHECK_MUL
:
5505 subcode
= MULT_EXPR
;
5507 case IFN_BUILTIN_EXPECT
:
5509 tree arg0
= gimple_call_arg (stmt
, 0);
5510 tree op0
= (*valueize
) (arg0
);
5511 if (TREE_CODE (op0
) == INTEGER_CST
)
5518 tree arg0
= gimple_call_arg (stmt
, 0);
5519 tree arg1
= gimple_call_arg (stmt
, 1);
5520 tree op0
= (*valueize
) (arg0
);
5521 tree op1
= (*valueize
) (arg1
);
5523 if (TREE_CODE (op0
) != INTEGER_CST
5524 || TREE_CODE (op1
) != INTEGER_CST
)
5529 /* x * 0 = 0 * x = 0 without overflow. */
5530 if (integer_zerop (op0
) || integer_zerop (op1
))
5531 return build_zero_cst (TREE_TYPE (arg0
));
5534 /* y - y = 0 without overflow. */
5535 if (operand_equal_p (op0
, op1
, 0))
5536 return build_zero_cst (TREE_TYPE (arg0
));
5543 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5545 && TREE_CODE (res
) == INTEGER_CST
5546 && !TREE_OVERFLOW (res
))
5551 fn
= (*valueize
) (gimple_call_fn (stmt
));
5552 if (TREE_CODE (fn
) == ADDR_EXPR
5553 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5554 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5555 && gimple_builtin_call_types_compatible_p (stmt
,
5556 TREE_OPERAND (fn
, 0)))
5558 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5561 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5562 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5563 retval
= fold_builtin_call_array (loc
,
5564 gimple_call_return_type (call_stmt
),
5565 fn
, gimple_call_num_args (stmt
), args
);
5568 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5569 STRIP_NOPS (retval
);
5570 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5583 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5584 Returns NULL_TREE if folding to a constant is not possible, otherwise
5585 returns a constant according to is_gimple_min_invariant. */
5588 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
5590 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5591 if (res
&& is_gimple_min_invariant (res
))
5597 /* The following set of functions are supposed to fold references using
5598 their constant initializers. */
5600 /* See if we can find constructor defining value of BASE.
5601 When we know the consructor with constant offset (such as
5602 base is array[40] and we do know constructor of array), then
5603 BIT_OFFSET is adjusted accordingly.
5605 As a special case, return error_mark_node when constructor
5606 is not explicitly available, but it is known to be zero
5607 such as 'static const int a;'. */
5609 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5610 tree (*valueize
)(tree
))
5612 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5615 if (TREE_CODE (base
) == MEM_REF
)
5617 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5619 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5621 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5626 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5627 base
= valueize (TREE_OPERAND (base
, 0));
5628 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5630 base
= TREE_OPERAND (base
, 0);
5633 && TREE_CODE (base
) == SSA_NAME
)
5634 base
= valueize (base
);
5636 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5637 DECL_INITIAL. If BASE is a nested reference into another
5638 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5639 the inner reference. */
5640 switch (TREE_CODE (base
))
5645 tree init
= ctor_for_folding (base
);
5647 /* Our semantic is exact opposite of ctor_for_folding;
5648 NULL means unknown, while error_mark_node is 0. */
5649 if (init
== error_mark_node
)
5652 return error_mark_node
;
5656 case VIEW_CONVERT_EXPR
:
5657 return get_base_constructor (TREE_OPERAND (base
, 0),
5658 bit_offset
, valueize
);
5662 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
5664 if (max_size
== -1 || size
!= max_size
)
5666 *bit_offset
+= bit_offset2
;
5667 return get_base_constructor (base
, bit_offset
, valueize
);
5673 if (CONSTANT_CLASS_P (base
))
5680 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5681 SIZE to the memory at bit OFFSET. */
5684 fold_array_ctor_reference (tree type
, tree ctor
,
5685 unsigned HOST_WIDE_INT offset
,
5686 unsigned HOST_WIDE_INT size
,
5689 offset_int low_bound
;
5690 offset_int elt_size
;
5691 offset_int access_index
;
5692 tree domain_type
= NULL_TREE
;
5693 HOST_WIDE_INT inner_offset
;
5695 /* Compute low bound and elt size. */
5696 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5697 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5698 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5700 /* Static constructors for variably sized objects makes no sense. */
5701 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
5703 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5707 /* Static constructors for variably sized objects makes no sense. */
5708 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
5710 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5712 /* We can handle only constantly sized accesses that are known to not
5713 be larger than size of array element. */
5714 if (!TYPE_SIZE_UNIT (type
)
5715 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5716 || elt_size
< wi::to_offset (TYPE_SIZE_UNIT (type
))
5720 /* Compute the array index we look for. */
5721 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5723 access_index
+= low_bound
;
5725 /* And offset within the access. */
5726 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5728 /* See if the array field is large enough to span whole access. We do not
5729 care to fold accesses spanning multiple array indexes. */
5730 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5732 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
5733 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
);
5735 /* When memory is not explicitely mentioned in constructor,
5736 it is 0 (or out of range). */
5737 return build_zero_cst (type
);
5740 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5741 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5744 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5745 unsigned HOST_WIDE_INT offset
,
5746 unsigned HOST_WIDE_INT size
,
5749 unsigned HOST_WIDE_INT cnt
;
5752 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5755 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5756 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5757 tree field_size
= DECL_SIZE (cfield
);
5758 offset_int bitoffset
;
5759 offset_int bitoffset_end
, access_end
;
5761 /* Variable sized objects in static constructors makes no sense,
5762 but field_size can be NULL for flexible array members. */
5763 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5764 && TREE_CODE (byte_offset
) == INTEGER_CST
5765 && (field_size
!= NULL_TREE
5766 ? TREE_CODE (field_size
) == INTEGER_CST
5767 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5769 /* Compute bit offset of the field. */
5770 bitoffset
= (wi::to_offset (field_offset
)
5771 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
5772 /* Compute bit offset where the field ends. */
5773 if (field_size
!= NULL_TREE
)
5774 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5778 access_end
= offset_int (offset
) + size
;
5780 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5781 [BITOFFSET, BITOFFSET_END)? */
5782 if (wi::cmps (access_end
, bitoffset
) > 0
5783 && (field_size
== NULL_TREE
5784 || wi::lts_p (offset
, bitoffset_end
)))
5786 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5787 /* We do have overlap. Now see if field is large enough to
5788 cover the access. Give up for accesses spanning multiple
5790 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5792 if (offset
< bitoffset
)
5794 return fold_ctor_reference (type
, cval
,
5795 inner_offset
.to_uhwi (), size
,
5799 /* When memory is not explicitely mentioned in constructor, it is 0. */
5800 return build_zero_cst (type
);
5803 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5804 to the memory at bit OFFSET. */
5807 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5808 unsigned HOST_WIDE_INT size
, tree from_decl
)
5812 /* We found the field with exact match. */
5813 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5815 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5817 /* We are at the end of walk, see if we can view convert the
5819 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5820 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5821 && !compare_tree_int (TYPE_SIZE (type
), size
)
5822 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5824 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5825 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5827 STRIP_USELESS_TYPE_CONVERSION (ret
);
5830 /* For constants and byte-aligned/sized reads try to go through
5831 native_encode/interpret. */
5832 if (CONSTANT_CLASS_P (ctor
)
5833 && BITS_PER_UNIT
== 8
5834 && offset
% BITS_PER_UNIT
== 0
5835 && size
% BITS_PER_UNIT
== 0
5836 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5838 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5839 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5840 offset
/ BITS_PER_UNIT
);
5842 return native_interpret_expr (type
, buf
, len
);
5844 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5847 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5848 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5849 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5852 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5859 /* Return the tree representing the element referenced by T if T is an
5860 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5861 names using VALUEIZE. Return NULL_TREE otherwise. */
5864 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5866 tree ctor
, idx
, base
;
5867 HOST_WIDE_INT offset
, size
, max_size
;
5871 if (TREE_THIS_VOLATILE (t
))
5875 return get_symbol_constant_value (t
);
5877 tem
= fold_read_from_constant_string (t
);
5881 switch (TREE_CODE (t
))
5884 case ARRAY_RANGE_REF
:
5885 /* Constant indexes are handled well by get_base_constructor.
5886 Only special case variable offsets.
5887 FIXME: This code can't handle nested references with variable indexes
5888 (they will be handled only by iteration of ccp). Perhaps we can bring
5889 get_ref_base_and_extent here and make it use a valueize callback. */
5890 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5892 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5893 && TREE_CODE (idx
) == INTEGER_CST
)
5895 tree low_bound
, unit_size
;
5897 /* If the resulting bit-offset is constant, track it. */
5898 if ((low_bound
= array_ref_low_bound (t
),
5899 TREE_CODE (low_bound
) == INTEGER_CST
)
5900 && (unit_size
= array_ref_element_size (t
),
5901 tree_fits_uhwi_p (unit_size
)))
5904 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5905 TYPE_PRECISION (TREE_TYPE (idx
)));
5907 if (wi::fits_shwi_p (woffset
))
5909 offset
= woffset
.to_shwi ();
5910 /* TODO: This code seems wrong, multiply then check
5911 to see if it fits. */
5912 offset
*= tree_to_uhwi (unit_size
);
5913 offset
*= BITS_PER_UNIT
;
5915 base
= TREE_OPERAND (t
, 0);
5916 ctor
= get_base_constructor (base
, &offset
, valueize
);
5917 /* Empty constructor. Always fold to 0. */
5918 if (ctor
== error_mark_node
)
5919 return build_zero_cst (TREE_TYPE (t
));
5920 /* Out of bound array access. Value is undefined,
5924 /* We can not determine ctor. */
5927 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5928 tree_to_uhwi (unit_size
)
5938 case TARGET_MEM_REF
:
5940 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
5941 ctor
= get_base_constructor (base
, &offset
, valueize
);
5943 /* Empty constructor. Always fold to 0. */
5944 if (ctor
== error_mark_node
)
5945 return build_zero_cst (TREE_TYPE (t
));
5946 /* We do not know precise address. */
5947 if (max_size
== -1 || max_size
!= size
)
5949 /* We can not determine ctor. */
5953 /* Out of bound array access. Value is undefined, but don't fold. */
5957 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5963 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5964 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5965 return fold_build1_loc (EXPR_LOCATION (t
),
5966 TREE_CODE (t
), TREE_TYPE (t
), c
);
5978 fold_const_aggregate_ref (tree t
)
5980 return fold_const_aggregate_ref_1 (t
, NULL
);
5983 /* Lookup virtual method with index TOKEN in a virtual table V
5985 Set CAN_REFER if non-NULL to false if method
5986 is not referable or if the virtual table is ill-formed (such as rewriten
5987 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5990 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5992 unsigned HOST_WIDE_INT offset
,
5995 tree vtable
= v
, init
, fn
;
5996 unsigned HOST_WIDE_INT size
;
5997 unsigned HOST_WIDE_INT elt_size
, access_index
;
6003 /* First of all double check we have virtual table. */
6004 if (TREE_CODE (v
) != VAR_DECL
6005 || !DECL_VIRTUAL_P (v
))
6007 /* Pass down that we lost track of the target. */
6013 init
= ctor_for_folding (v
);
6015 /* The virtual tables should always be born with constructors
6016 and we always should assume that they are avaialble for
6017 folding. At the moment we do not stream them in all cases,
6018 but it should never happen that ctor seem unreachable. */
6020 if (init
== error_mark_node
)
6022 gcc_assert (in_lto_p
);
6023 /* Pass down that we lost track of the target. */
6028 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
6029 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
6030 offset
*= BITS_PER_UNIT
;
6031 offset
+= token
* size
;
6033 /* Lookup the value in the constructor that is assumed to be array.
6034 This is equivalent to
6035 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6036 offset, size, NULL);
6037 but in a constant time. We expect that frontend produced a simple
6038 array without indexed initializers. */
6040 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
6041 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
6042 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
6043 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
6045 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
6046 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
6048 /* This code makes an assumption that there are no
6049 indexed fileds produced by C++ FE, so we can directly index the array. */
6050 if (access_index
< CONSTRUCTOR_NELTS (init
))
6052 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
6053 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
6059 /* For type inconsistent program we may end up looking up virtual method
6060 in virtual table that does not contain TOKEN entries. We may overrun
6061 the virtual table and pick up a constant or RTTI info pointer.
6062 In any case the call is undefined. */
6064 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
6065 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
6066 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
6069 fn
= TREE_OPERAND (fn
, 0);
6071 /* When cgraph node is missing and function is not public, we cannot
6072 devirtualize. This can happen in WHOPR when the actual method
6073 ends up in other partition, because we found devirtualization
6074 possibility too late. */
6075 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
6086 /* Make sure we create a cgraph node for functions we'll reference.
6087 They can be non-existent if the reference comes from an entry
6088 of an external vtable for example. */
6089 cgraph_node::get_create (fn
);
6094 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6095 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6096 KNOWN_BINFO carries the binfo describing the true type of
6097 OBJ_TYPE_REF_OBJECT(REF).
6098 Set CAN_REFER if non-NULL to false if method
6099 is not referable or if the virtual table is ill-formed (such as rewriten
6100 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6103 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
6106 unsigned HOST_WIDE_INT offset
;
6109 v
= BINFO_VTABLE (known_binfo
);
6110 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6114 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
6120 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
6123 /* Given a pointer value OP0, return a simplified version of an
6124 indirection through OP0, or NULL_TREE if no simplification is
6125 possible. Note that the resulting type may be different from
6126 the type pointed to in the sense that it is still compatible
6127 from the langhooks point of view. */
6130 gimple_fold_indirect_ref (tree t
)
6132 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
6137 subtype
= TREE_TYPE (sub
);
6138 if (!POINTER_TYPE_P (subtype
))
6141 if (TREE_CODE (sub
) == ADDR_EXPR
)
6143 tree op
= TREE_OPERAND (sub
, 0);
6144 tree optype
= TREE_TYPE (op
);
6146 if (useless_type_conversion_p (type
, optype
))
6149 /* *(foo *)&fooarray => fooarray[0] */
6150 if (TREE_CODE (optype
) == ARRAY_TYPE
6151 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
6152 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6154 tree type_domain
= TYPE_DOMAIN (optype
);
6155 tree min_val
= size_zero_node
;
6156 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6157 min_val
= TYPE_MIN_VALUE (type_domain
);
6158 if (TREE_CODE (min_val
) == INTEGER_CST
)
6159 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
6161 /* *(foo *)&complexfoo => __real__ complexfoo */
6162 else if (TREE_CODE (optype
) == COMPLEX_TYPE
6163 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6164 return fold_build1 (REALPART_EXPR
, type
, op
);
6165 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6166 else if (TREE_CODE (optype
) == VECTOR_TYPE
6167 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6169 tree part_width
= TYPE_SIZE (type
);
6170 tree index
= bitsize_int (0);
6171 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
6175 /* *(p + CST) -> ... */
6176 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
6177 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
6179 tree addr
= TREE_OPERAND (sub
, 0);
6180 tree off
= TREE_OPERAND (sub
, 1);
6184 addrtype
= TREE_TYPE (addr
);
6186 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6187 if (TREE_CODE (addr
) == ADDR_EXPR
6188 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
6189 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
6190 && tree_fits_uhwi_p (off
))
6192 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
6193 tree part_width
= TYPE_SIZE (type
);
6194 unsigned HOST_WIDE_INT part_widthi
6195 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
6196 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
6197 tree index
= bitsize_int (indexi
);
6198 if (offset
/ part_widthi
6199 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
6200 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
6204 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6205 if (TREE_CODE (addr
) == ADDR_EXPR
6206 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
6207 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
6209 tree size
= TYPE_SIZE_UNIT (type
);
6210 if (tree_int_cst_equal (size
, off
))
6211 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
6214 /* *(p + CST) -> MEM_REF <p, CST>. */
6215 if (TREE_CODE (addr
) != ADDR_EXPR
6216 || DECL_P (TREE_OPERAND (addr
, 0)))
6217 return fold_build2 (MEM_REF
, type
,
6219 wide_int_to_tree (ptype
, off
));
6222 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6223 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
6224 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
6225 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
6228 tree min_val
= size_zero_node
;
6230 sub
= gimple_fold_indirect_ref (sub
);
6232 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
6233 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
6234 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6235 min_val
= TYPE_MIN_VALUE (type_domain
);
6236 if (TREE_CODE (min_val
) == INTEGER_CST
)
6237 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6243 /* Return true if CODE is an operation that when operating on signed
6244 integer types involves undefined behavior on overflow and the
6245 operation can be expressed with unsigned arithmetic. */
6248 arith_code_with_undefined_signed_overflow (tree_code code
)
6256 case POINTER_PLUS_EXPR
:
6263 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6264 operation that can be transformed to unsigned arithmetic by converting
6265 its operand, carrying out the operation in the corresponding unsigned
6266 type and converting the result back to the original type.
6268 Returns a sequence of statements that replace STMT and also contain
6269 a modified form of STMT itself. */
6272 rewrite_to_defined_overflow (gimple
*stmt
)
6274 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6276 fprintf (dump_file
, "rewriting stmt with undefined signed "
6278 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6281 tree lhs
= gimple_assign_lhs (stmt
);
6282 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6283 gimple_seq stmts
= NULL
;
6284 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6286 tree op
= gimple_op (stmt
, i
);
6287 op
= gimple_convert (&stmts
, type
, op
);
6288 gimple_set_op (stmt
, i
, op
);
6290 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6291 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6292 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6293 gimple_seq_add_stmt (&stmts
, stmt
);
6294 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6295 gimple_seq_add_stmt (&stmts
, cvt
);
6301 /* The valueization hook we use for the gimple_build API simplification.
6302 This makes us match fold_buildN behavior by only combining with
6303 statements in the sequence(s) we are currently building. */
6306 gimple_build_valueize (tree op
)
6308 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
6313 /* Build the expression CODE OP0 of type TYPE with location LOC,
6314 simplifying it first if possible. Returns the built
6315 expression value and appends statements possibly defining it
6319 gimple_build (gimple_seq
*seq
, location_t loc
,
6320 enum tree_code code
, tree type
, tree op0
)
6322 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
6325 if (gimple_in_ssa_p (cfun
))
6326 res
= make_ssa_name (type
);
6328 res
= create_tmp_reg (type
);
6330 if (code
== REALPART_EXPR
6331 || code
== IMAGPART_EXPR
6332 || code
== VIEW_CONVERT_EXPR
)
6333 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6335 stmt
= gimple_build_assign (res
, code
, op0
);
6336 gimple_set_location (stmt
, loc
);
6337 gimple_seq_add_stmt_without_update (seq
, stmt
);
6342 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6343 simplifying it first if possible. Returns the built
6344 expression value and appends statements possibly defining it
6348 gimple_build (gimple_seq
*seq
, location_t loc
,
6349 enum tree_code code
, tree type
, tree op0
, tree op1
)
6351 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
6354 if (gimple_in_ssa_p (cfun
))
6355 res
= make_ssa_name (type
);
6357 res
= create_tmp_reg (type
);
6358 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6359 gimple_set_location (stmt
, loc
);
6360 gimple_seq_add_stmt_without_update (seq
, stmt
);
6365 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6366 simplifying it first if possible. Returns the built
6367 expression value and appends statements possibly defining it
6371 gimple_build (gimple_seq
*seq
, location_t loc
,
6372 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6374 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6375 seq
, gimple_build_valueize
);
6378 if (gimple_in_ssa_p (cfun
))
6379 res
= make_ssa_name (type
);
6381 res
= create_tmp_reg (type
);
6383 if (code
== BIT_FIELD_REF
)
6384 stmt
= gimple_build_assign (res
, code
,
6385 build3 (code
, type
, op0
, op1
, op2
));
6387 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6388 gimple_set_location (stmt
, loc
);
6389 gimple_seq_add_stmt_without_update (seq
, stmt
);
6394 /* Build the call FN (ARG0) with a result of type TYPE
6395 (or no result if TYPE is void) with location LOC,
6396 simplifying it first if possible. Returns the built
6397 expression value (or NULL_TREE if TYPE is void) and appends
6398 statements possibly defining it to SEQ. */
6401 gimple_build (gimple_seq
*seq
, location_t loc
,
6402 enum built_in_function fn
, tree type
, tree arg0
)
6404 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6407 tree decl
= builtin_decl_implicit (fn
);
6408 gimple
*stmt
= gimple_build_call (decl
, 1, arg0
);
6409 if (!VOID_TYPE_P (type
))
6411 if (gimple_in_ssa_p (cfun
))
6412 res
= make_ssa_name (type
);
6414 res
= create_tmp_reg (type
);
6415 gimple_call_set_lhs (stmt
, res
);
6417 gimple_set_location (stmt
, loc
);
6418 gimple_seq_add_stmt_without_update (seq
, stmt
);
6423 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6424 (or no result if TYPE is void) with location LOC,
6425 simplifying it first if possible. Returns the built
6426 expression value (or NULL_TREE if TYPE is void) and appends
6427 statements possibly defining it to SEQ. */
6430 gimple_build (gimple_seq
*seq
, location_t loc
,
6431 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6433 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6436 tree decl
= builtin_decl_implicit (fn
);
6437 gimple
*stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6438 if (!VOID_TYPE_P (type
))
6440 if (gimple_in_ssa_p (cfun
))
6441 res
= make_ssa_name (type
);
6443 res
= create_tmp_reg (type
);
6444 gimple_call_set_lhs (stmt
, res
);
6446 gimple_set_location (stmt
, loc
);
6447 gimple_seq_add_stmt_without_update (seq
, stmt
);
6452 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6453 (or no result if TYPE is void) with location LOC,
6454 simplifying it first if possible. Returns the built
6455 expression value (or NULL_TREE if TYPE is void) and appends
6456 statements possibly defining it to SEQ. */
6459 gimple_build (gimple_seq
*seq
, location_t loc
,
6460 enum built_in_function fn
, tree type
,
6461 tree arg0
, tree arg1
, tree arg2
)
6463 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
6464 seq
, gimple_build_valueize
);
6467 tree decl
= builtin_decl_implicit (fn
);
6468 gimple
*stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6469 if (!VOID_TYPE_P (type
))
6471 if (gimple_in_ssa_p (cfun
))
6472 res
= make_ssa_name (type
);
6474 res
= create_tmp_reg (type
);
6475 gimple_call_set_lhs (stmt
, res
);
6477 gimple_set_location (stmt
, loc
);
6478 gimple_seq_add_stmt_without_update (seq
, stmt
);
6483 /* Build the conversion (TYPE) OP with a result of type TYPE
6484 with location LOC if such conversion is neccesary in GIMPLE,
6485 simplifying it first.
6486 Returns the built expression value and appends
6487 statements possibly defining it to SEQ. */
6490 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6492 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6494 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
6497 /* Build the conversion (ptrofftype) OP with a result of a type
6498 compatible with ptrofftype with location LOC if such conversion
6499 is neccesary in GIMPLE, simplifying it first.
6500 Returns the built expression value and appends
6501 statements possibly defining it to SEQ. */
6504 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
6506 if (ptrofftype_p (TREE_TYPE (op
)))
6508 return gimple_convert (seq
, loc
, sizetype
, op
);
6511 /* Return true if the result of assignment STMT is known to be non-negative.
6512 If the return value is based on the assumption that signed overflow is
6513 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6514 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6517 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6520 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6521 switch (get_gimple_rhs_class (code
))
6523 case GIMPLE_UNARY_RHS
:
6524 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
6525 gimple_expr_type (stmt
),
6526 gimple_assign_rhs1 (stmt
),
6527 strict_overflow_p
, depth
);
6528 case GIMPLE_BINARY_RHS
:
6529 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
6530 gimple_expr_type (stmt
),
6531 gimple_assign_rhs1 (stmt
),
6532 gimple_assign_rhs2 (stmt
),
6533 strict_overflow_p
, depth
);
6534 case GIMPLE_TERNARY_RHS
:
6536 case GIMPLE_SINGLE_RHS
:
6537 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
6538 strict_overflow_p
, depth
);
6539 case GIMPLE_INVALID_RHS
:
6545 /* Return true if return value of call STMT is known to be non-negative.
6546 If the return value is based on the assumption that signed overflow is
6547 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6548 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6551 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6554 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
6555 gimple_call_arg (stmt
, 0) : NULL_TREE
;
6556 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
6557 gimple_call_arg (stmt
, 1) : NULL_TREE
;
6559 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
6560 gimple_call_combined_fn (stmt
),
6563 strict_overflow_p
, depth
);
6566 /* Return true if return value of call STMT is known to be non-negative.
6567 If the return value is based on the assumption that signed overflow is
6568 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6569 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6572 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6575 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
6577 tree arg
= gimple_phi_arg_def (stmt
, i
);
6578 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
6584 /* Return true if STMT is known to compute a non-negative value.
6585 If the return value is based on the assumption that signed overflow is
6586 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6587 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6590 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6593 switch (gimple_code (stmt
))
6596 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6599 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6602 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6609 /* Return true if the floating-point value computed by assignment STMT
6610 is known to have an integer value. We also allow +Inf, -Inf and NaN
6611 to be considered integer values. Return false for signaling NaN.
6613 DEPTH is the current nesting depth of the query. */
6616 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
6618 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6619 switch (get_gimple_rhs_class (code
))
6621 case GIMPLE_UNARY_RHS
:
6622 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
6623 gimple_assign_rhs1 (stmt
), depth
);
6624 case GIMPLE_BINARY_RHS
:
6625 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
6626 gimple_assign_rhs1 (stmt
),
6627 gimple_assign_rhs2 (stmt
), depth
);
6628 case GIMPLE_TERNARY_RHS
:
6630 case GIMPLE_SINGLE_RHS
:
6631 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
6632 case GIMPLE_INVALID_RHS
:
6638 /* Return true if the floating-point value computed by call STMT is known
6639 to have an integer value. We also allow +Inf, -Inf and NaN to be
6640 considered integer values. Return false for signaling NaN.
6642 DEPTH is the current nesting depth of the query. */
6645 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
6647 tree arg0
= (gimple_call_num_args (stmt
) > 0
6648 ? gimple_call_arg (stmt
, 0)
6650 tree arg1
= (gimple_call_num_args (stmt
) > 1
6651 ? gimple_call_arg (stmt
, 1)
6653 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
6657 /* Return true if the floating-point result of phi STMT is known to have
6658 an integer value. We also allow +Inf, -Inf and NaN to be considered
6659 integer values. Return false for signaling NaN.
6661 DEPTH is the current nesting depth of the query. */
6664 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
6666 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
6668 tree arg
= gimple_phi_arg_def (stmt
, i
);
6669 if (!integer_valued_real_single_p (arg
, depth
+ 1))
6675 /* Return true if the floating-point value computed by STMT is known
6676 to have an integer value. We also allow +Inf, -Inf and NaN to be
6677 considered integer values. Return false for signaling NaN.
6679 DEPTH is the current nesting depth of the query. */
6682 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
6684 switch (gimple_code (stmt
))
6687 return gimple_assign_integer_valued_real_p (stmt
, depth
);
6689 return gimple_call_integer_valued_real_p (stmt
, depth
);
6691 return gimple_phi_integer_valued_real_p (stmt
, depth
);