2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "diagnostic-core.h"
27 #include "tree-inline.h"
31 #include "insn-config.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-iterator.h"
37 #include "tree-mudflap.h"
39 #include "gimple-ssa.h"
41 #include "tree-phinodes.h"
42 #include "ssa-iterators.h"
43 #include "tree-ssanames.h"
44 #include "tree-into-ssa.h"
48 #include "tree-pretty-print.h"
51 #include "pointer-set.h"
53 #include "value-prof.h"
54 #include "tree-pass.h"
58 #include "rtl.h" /* FIXME: For asm_str_count. */
60 /* I'm not real happy about this, but we need to handle gimple and
63 /* Inlining, Cloning, Versioning, Parallelization
65 Inlining: a function body is duplicated, but the PARM_DECLs are
66 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
67 MODIFY_EXPRs that store to a dedicated returned-value variable.
68 The duplicated eh_region info of the copy will later be appended
69 to the info for the caller; the eh_region info in copied throwing
70 statements and RESX statements are adjusted accordingly.
72 Cloning: (only in C++) We have one body for a con/de/structor, and
73 multiple function decls, each with a unique parameter list.
74 Duplicate the body, using the given splay tree; some parameters
75 will become constants (like 0 or 1).
77 Versioning: a function body is duplicated and the result is a new
78 function rather than into blocks of an existing function as with
79 inlining. Some parameters will become constants.
81 Parallelization: a region of a function is duplicated resulting in
82 a new function. Variables may be replaced with complex expressions
83 to enable shared variable semantics.
85 All of these will simultaneously lookup any callgraph edges. If
86 we're going to inline the duplicated function body, and the given
87 function has some cloned callgraph nodes (one for each place this
88 function will be inlined) those callgraph edges will be duplicated.
89 If we're cloning the body, those callgraph edges will be
90 updated to point into the new body. (Note that the original
91 callgraph node and edge list will not be altered.)
93 See the CALL_EXPR handling case in copy_tree_body_r (). */
97 o In order to make inlining-on-trees work, we pessimized
98 function-local static constants. In particular, they are now
99 always output, even when not addressed. Fix this by treating
100 function-local static constants just like global static
101 constants; the back-end already knows not to output them if they
104 o Provide heuristics to clamp inlining of recursive template
108 /* Weights that estimate_num_insns uses to estimate the size of the
111 eni_weights eni_size_weights
;
113 /* Weights that estimate_num_insns uses to estimate the time necessary
114 to execute the produced code. */
116 eni_weights eni_time_weights
;
120 static tree
declare_return_variable (copy_body_data
*, tree
, tree
, basic_block
);
121 static void remap_block (tree
*, copy_body_data
*);
122 static void copy_bind_expr (tree
*, int *, copy_body_data
*);
123 static void declare_inline_vars (tree
, tree
);
124 static void remap_save_expr (tree
*, void *, int *);
125 static void prepend_lexical_block (tree current_block
, tree new_block
);
126 static tree
copy_decl_to_var (tree
, copy_body_data
*);
127 static tree
copy_result_decl_to_var (tree
, copy_body_data
*);
128 static tree
copy_decl_maybe_to_var (tree
, copy_body_data
*);
129 static gimple
remap_gimple_stmt (gimple
, copy_body_data
*);
130 static bool delete_unreachable_blocks_update_callgraph (copy_body_data
*id
);
132 /* Insert a tree->tree mapping for ID. Despite the name suggests
133 that the trees should be variables, it is used for more than that. */
136 insert_decl_map (copy_body_data
*id
, tree key
, tree value
)
138 *pointer_map_insert (id
->decl_map
, key
) = value
;
140 /* Always insert an identity map as well. If we see this same new
141 node again, we won't want to duplicate it a second time. */
143 *pointer_map_insert (id
->decl_map
, value
) = value
;
146 /* Insert a tree->tree mapping for ID. This is only used for
150 insert_debug_decl_map (copy_body_data
*id
, tree key
, tree value
)
152 if (!gimple_in_ssa_p (id
->src_cfun
))
155 if (!MAY_HAVE_DEBUG_STMTS
)
158 if (!target_for_debug_bind (key
))
161 gcc_assert (TREE_CODE (key
) == PARM_DECL
);
162 gcc_assert (TREE_CODE (value
) == VAR_DECL
);
165 id
->debug_map
= pointer_map_create ();
167 *pointer_map_insert (id
->debug_map
, key
) = value
;
170 /* If nonzero, we're remapping the contents of inlined debug
171 statements. If negative, an error has occurred, such as a
172 reference to a variable that isn't available in the inlined
174 static int processing_debug_stmt
= 0;
176 /* Construct new SSA name for old NAME. ID is the inline context. */
179 remap_ssa_name (tree name
, copy_body_data
*id
)
184 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
186 n
= (tree
*) pointer_map_contains (id
->decl_map
, name
);
188 return unshare_expr (*n
);
190 if (processing_debug_stmt
)
192 if (SSA_NAME_IS_DEFAULT_DEF (name
)
193 && TREE_CODE (SSA_NAME_VAR (name
)) == PARM_DECL
194 && id
->entry_bb
== NULL
195 && single_succ_p (ENTRY_BLOCK_PTR
))
197 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
199 gimple_stmt_iterator gsi
;
200 tree val
= SSA_NAME_VAR (name
);
202 n
= (tree
*) pointer_map_contains (id
->decl_map
, val
);
205 if (TREE_CODE (val
) != PARM_DECL
)
207 processing_debug_stmt
= -1;
210 def_temp
= gimple_build_debug_source_bind (vexpr
, val
, NULL
);
211 DECL_ARTIFICIAL (vexpr
) = 1;
212 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
213 DECL_MODE (vexpr
) = DECL_MODE (SSA_NAME_VAR (name
));
214 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR
));
215 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
219 processing_debug_stmt
= -1;
223 /* Remap anonymous SSA names or SSA names of anonymous decls. */
224 var
= SSA_NAME_VAR (name
);
226 || (!SSA_NAME_IS_DEFAULT_DEF (name
)
227 && TREE_CODE (var
) == VAR_DECL
228 && !VAR_DECL_IS_VIRTUAL_OPERAND (var
)
229 && DECL_ARTIFICIAL (var
)
230 && DECL_IGNORED_P (var
)
231 && !DECL_NAME (var
)))
233 struct ptr_info_def
*pi
;
234 new_tree
= make_ssa_name (remap_type (TREE_TYPE (name
), id
), NULL
);
235 if (!var
&& SSA_NAME_IDENTIFIER (name
))
236 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree
, SSA_NAME_IDENTIFIER (name
));
237 insert_decl_map (id
, name
, new_tree
);
238 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
239 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
240 /* At least IPA points-to info can be directly transferred. */
241 if (id
->src_cfun
->gimple_df
242 && id
->src_cfun
->gimple_df
->ipa_pta
243 && (pi
= SSA_NAME_PTR_INFO (name
))
246 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
252 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
254 new_tree
= remap_decl (var
, id
);
256 /* We might've substituted constant or another SSA_NAME for
259 Replace the SSA name representing RESULT_DECL by variable during
260 inlining: this saves us from need to introduce PHI node in a case
261 return value is just partly initialized. */
262 if ((TREE_CODE (new_tree
) == VAR_DECL
|| TREE_CODE (new_tree
) == PARM_DECL
)
263 && (!SSA_NAME_VAR (name
)
264 || TREE_CODE (SSA_NAME_VAR (name
)) != RESULT_DECL
265 || !id
->transform_return_to_modify
))
267 struct ptr_info_def
*pi
;
268 new_tree
= make_ssa_name (new_tree
, NULL
);
269 insert_decl_map (id
, name
, new_tree
);
270 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
271 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
272 /* At least IPA points-to info can be directly transferred. */
273 if (id
->src_cfun
->gimple_df
274 && id
->src_cfun
->gimple_df
->ipa_pta
275 && (pi
= SSA_NAME_PTR_INFO (name
))
278 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
281 if (SSA_NAME_IS_DEFAULT_DEF (name
))
283 /* By inlining function having uninitialized variable, we might
284 extend the lifetime (variable might get reused). This cause
285 ICE in the case we end up extending lifetime of SSA name across
286 abnormal edge, but also increase register pressure.
288 We simply initialize all uninitialized vars by 0 except
289 for case we are inlining to very first BB. We can avoid
290 this for all BBs that are not inside strongly connected
291 regions of the CFG, but this is expensive to test. */
293 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
)
294 && (!SSA_NAME_VAR (name
)
295 || TREE_CODE (SSA_NAME_VAR (name
)) != PARM_DECL
)
296 && (id
->entry_bb
!= EDGE_SUCC (ENTRY_BLOCK_PTR
, 0)->dest
297 || EDGE_COUNT (id
->entry_bb
->preds
) != 1))
299 gimple_stmt_iterator gsi
= gsi_last_bb (id
->entry_bb
);
301 tree zero
= build_zero_cst (TREE_TYPE (new_tree
));
303 init_stmt
= gimple_build_assign (new_tree
, zero
);
304 gsi_insert_after (&gsi
, init_stmt
, GSI_NEW_STMT
);
305 SSA_NAME_IS_DEFAULT_DEF (new_tree
) = 0;
309 SSA_NAME_DEF_STMT (new_tree
) = gimple_build_nop ();
310 set_ssa_default_def (cfun
, SSA_NAME_VAR (new_tree
), new_tree
);
315 insert_decl_map (id
, name
, new_tree
);
319 /* Remap DECL during the copying of the BLOCK tree for the function. */
322 remap_decl (tree decl
, copy_body_data
*id
)
326 /* We only remap local variables in the current function. */
328 /* See if we have remapped this declaration. */
330 n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
332 if (!n
&& processing_debug_stmt
)
334 processing_debug_stmt
= -1;
338 /* If we didn't already have an equivalent for this declaration,
342 /* Make a copy of the variable or label. */
343 tree t
= id
->copy_decl (decl
, id
);
345 /* Remember it, so that if we encounter this local entity again
346 we can reuse this copy. Do this early because remap_type may
347 need this decl for TYPE_STUB_DECL. */
348 insert_decl_map (id
, decl
, t
);
353 /* Remap types, if necessary. */
354 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
355 if (TREE_CODE (t
) == TYPE_DECL
)
356 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
358 /* Remap sizes as necessary. */
359 walk_tree (&DECL_SIZE (t
), copy_tree_body_r
, id
, NULL
);
360 walk_tree (&DECL_SIZE_UNIT (t
), copy_tree_body_r
, id
, NULL
);
362 /* If fields, do likewise for offset and qualifier. */
363 if (TREE_CODE (t
) == FIELD_DECL
)
365 walk_tree (&DECL_FIELD_OFFSET (t
), copy_tree_body_r
, id
, NULL
);
366 if (TREE_CODE (DECL_CONTEXT (t
)) == QUAL_UNION_TYPE
)
367 walk_tree (&DECL_QUALIFIER (t
), copy_tree_body_r
, id
, NULL
);
373 if (id
->do_not_unshare
)
376 return unshare_expr (*n
);
380 remap_type_1 (tree type
, copy_body_data
*id
)
384 /* We do need a copy. build and register it now. If this is a pointer or
385 reference type, remap the designated type and make a new pointer or
387 if (TREE_CODE (type
) == POINTER_TYPE
)
389 new_tree
= build_pointer_type_for_mode (remap_type (TREE_TYPE (type
), id
),
391 TYPE_REF_CAN_ALIAS_ALL (type
));
392 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
393 new_tree
= build_type_attribute_qual_variant (new_tree
,
394 TYPE_ATTRIBUTES (type
),
396 insert_decl_map (id
, type
, new_tree
);
399 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
401 new_tree
= build_reference_type_for_mode (remap_type (TREE_TYPE (type
), id
),
403 TYPE_REF_CAN_ALIAS_ALL (type
));
404 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
405 new_tree
= build_type_attribute_qual_variant (new_tree
,
406 TYPE_ATTRIBUTES (type
),
408 insert_decl_map (id
, type
, new_tree
);
412 new_tree
= copy_node (type
);
414 insert_decl_map (id
, type
, new_tree
);
416 /* This is a new type, not a copy of an old type. Need to reassociate
417 variants. We can handle everything except the main variant lazily. */
418 t
= TYPE_MAIN_VARIANT (type
);
421 t
= remap_type (t
, id
);
422 TYPE_MAIN_VARIANT (new_tree
) = t
;
423 TYPE_NEXT_VARIANT (new_tree
) = TYPE_NEXT_VARIANT (t
);
424 TYPE_NEXT_VARIANT (t
) = new_tree
;
428 TYPE_MAIN_VARIANT (new_tree
) = new_tree
;
429 TYPE_NEXT_VARIANT (new_tree
) = NULL
;
432 if (TYPE_STUB_DECL (type
))
433 TYPE_STUB_DECL (new_tree
) = remap_decl (TYPE_STUB_DECL (type
), id
);
435 /* Lazily create pointer and reference types. */
436 TYPE_POINTER_TO (new_tree
) = NULL
;
437 TYPE_REFERENCE_TO (new_tree
) = NULL
;
439 switch (TREE_CODE (new_tree
))
443 case FIXED_POINT_TYPE
:
446 t
= TYPE_MIN_VALUE (new_tree
);
447 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
448 walk_tree (&TYPE_MIN_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
450 t
= TYPE_MAX_VALUE (new_tree
);
451 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
452 walk_tree (&TYPE_MAX_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
456 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
457 walk_tree (&TYPE_ARG_TYPES (new_tree
), copy_tree_body_r
, id
, NULL
);
461 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
462 TYPE_DOMAIN (new_tree
) = remap_type (TYPE_DOMAIN (new_tree
), id
);
467 case QUAL_UNION_TYPE
:
471 for (f
= TYPE_FIELDS (new_tree
); f
; f
= DECL_CHAIN (f
))
473 t
= remap_decl (f
, id
);
474 DECL_CONTEXT (t
) = new_tree
;
478 TYPE_FIELDS (new_tree
) = nreverse (nf
);
484 /* Shouldn't have been thought variable sized. */
488 walk_tree (&TYPE_SIZE (new_tree
), copy_tree_body_r
, id
, NULL
);
489 walk_tree (&TYPE_SIZE_UNIT (new_tree
), copy_tree_body_r
, id
, NULL
);
495 remap_type (tree type
, copy_body_data
*id
)
503 /* See if we have remapped this type. */
504 node
= (tree
*) pointer_map_contains (id
->decl_map
, type
);
508 /* The type only needs remapping if it's variably modified. */
509 if (! variably_modified_type_p (type
, id
->src_fn
))
511 insert_decl_map (id
, type
, type
);
515 id
->remapping_type_depth
++;
516 tmp
= remap_type_1 (type
, id
);
517 id
->remapping_type_depth
--;
522 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
525 can_be_nonlocal (tree decl
, copy_body_data
*id
)
527 /* We can not duplicate function decls. */
528 if (TREE_CODE (decl
) == FUNCTION_DECL
)
531 /* Local static vars must be non-local or we get multiple declaration
533 if (TREE_CODE (decl
) == VAR_DECL
534 && !auto_var_in_fn_p (decl
, id
->src_fn
))
541 remap_decls (tree decls
, vec
<tree
, va_gc
> **nonlocalized_list
,
545 tree new_decls
= NULL_TREE
;
547 /* Remap its variables. */
548 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
552 if (can_be_nonlocal (old_var
, id
))
554 /* We need to add this variable to the local decls as otherwise
555 nothing else will do so. */
556 if (TREE_CODE (old_var
) == VAR_DECL
557 && ! DECL_EXTERNAL (old_var
))
558 add_local_decl (cfun
, old_var
);
559 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
560 && !DECL_IGNORED_P (old_var
)
561 && nonlocalized_list
)
562 vec_safe_push (*nonlocalized_list
, old_var
);
566 /* Remap the variable. */
567 new_var
= remap_decl (old_var
, id
);
569 /* If we didn't remap this variable, we can't mess with its
570 TREE_CHAIN. If we remapped this variable to the return slot, it's
571 already declared somewhere else, so don't declare it here. */
573 if (new_var
== id
->retvar
)
577 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
578 && !DECL_IGNORED_P (old_var
)
579 && nonlocalized_list
)
580 vec_safe_push (*nonlocalized_list
, old_var
);
584 gcc_assert (DECL_P (new_var
));
585 DECL_CHAIN (new_var
) = new_decls
;
588 /* Also copy value-expressions. */
589 if (TREE_CODE (new_var
) == VAR_DECL
590 && DECL_HAS_VALUE_EXPR_P (new_var
))
592 tree tem
= DECL_VALUE_EXPR (new_var
);
593 bool old_regimplify
= id
->regimplify
;
594 id
->remapping_type_depth
++;
595 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
596 id
->remapping_type_depth
--;
597 id
->regimplify
= old_regimplify
;
598 SET_DECL_VALUE_EXPR (new_var
, tem
);
603 return nreverse (new_decls
);
606 /* Copy the BLOCK to contain remapped versions of the variables
607 therein. And hook the new block into the block-tree. */
610 remap_block (tree
*block
, copy_body_data
*id
)
615 /* Make the new block. */
617 new_block
= make_node (BLOCK
);
618 TREE_USED (new_block
) = TREE_USED (old_block
);
619 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
620 BLOCK_SOURCE_LOCATION (new_block
) = BLOCK_SOURCE_LOCATION (old_block
);
621 BLOCK_NONLOCALIZED_VARS (new_block
)
622 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block
));
625 /* Remap its variables. */
626 BLOCK_VARS (new_block
) = remap_decls (BLOCK_VARS (old_block
),
627 &BLOCK_NONLOCALIZED_VARS (new_block
),
630 if (id
->transform_lang_insert_block
)
631 id
->transform_lang_insert_block (new_block
);
633 /* Remember the remapped block. */
634 insert_decl_map (id
, old_block
, new_block
);
637 /* Copy the whole block tree and root it in id->block. */
639 remap_blocks (tree block
, copy_body_data
*id
)
642 tree new_tree
= block
;
647 remap_block (&new_tree
, id
);
648 gcc_assert (new_tree
!= block
);
649 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
650 prepend_lexical_block (new_tree
, remap_blocks (t
, id
));
651 /* Blocks are in arbitrary order, but make things slightly prettier and do
652 not swap order when producing a copy. */
653 BLOCK_SUBBLOCKS (new_tree
) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree
));
657 /* Remap the block tree rooted at BLOCK to nothing. */
659 remap_blocks_to_null (tree block
, copy_body_data
*id
)
662 insert_decl_map (id
, block
, NULL_TREE
);
663 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
664 remap_blocks_to_null (t
, id
);
668 copy_statement_list (tree
*tp
)
670 tree_stmt_iterator oi
, ni
;
673 new_tree
= alloc_stmt_list ();
674 ni
= tsi_start (new_tree
);
675 oi
= tsi_start (*tp
);
676 TREE_TYPE (new_tree
) = TREE_TYPE (*tp
);
679 for (; !tsi_end_p (oi
); tsi_next (&oi
))
681 tree stmt
= tsi_stmt (oi
);
682 if (TREE_CODE (stmt
) == STATEMENT_LIST
)
683 /* This copy is not redundant; tsi_link_after will smash this
684 STATEMENT_LIST into the end of the one we're building, and we
685 don't want to do that with the original. */
686 copy_statement_list (&stmt
);
687 tsi_link_after (&ni
, stmt
, TSI_CONTINUE_LINKING
);
692 copy_bind_expr (tree
*tp
, int *walk_subtrees
, copy_body_data
*id
)
694 tree block
= BIND_EXPR_BLOCK (*tp
);
695 /* Copy (and replace) the statement. */
696 copy_tree_r (tp
, walk_subtrees
, NULL
);
699 remap_block (&block
, id
);
700 BIND_EXPR_BLOCK (*tp
) = block
;
703 if (BIND_EXPR_VARS (*tp
))
704 /* This will remap a lot of the same decls again, but this should be
706 BIND_EXPR_VARS (*tp
) = remap_decls (BIND_EXPR_VARS (*tp
), NULL
, id
);
710 /* Create a new gimple_seq by remapping all the statements in BODY
711 using the inlining information in ID. */
714 remap_gimple_seq (gimple_seq body
, copy_body_data
*id
)
716 gimple_stmt_iterator si
;
717 gimple_seq new_body
= NULL
;
719 for (si
= gsi_start (body
); !gsi_end_p (si
); gsi_next (&si
))
721 gimple new_stmt
= remap_gimple_stmt (gsi_stmt (si
), id
);
722 gimple_seq_add_stmt (&new_body
, new_stmt
);
729 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
730 block using the mapping information in ID. */
733 copy_gimple_bind (gimple stmt
, copy_body_data
*id
)
736 tree new_block
, new_vars
;
737 gimple_seq body
, new_body
;
739 /* Copy the statement. Note that we purposely don't use copy_stmt
740 here because we need to remap statements as we copy. */
741 body
= gimple_bind_body (stmt
);
742 new_body
= remap_gimple_seq (body
, id
);
744 new_block
= gimple_bind_block (stmt
);
746 remap_block (&new_block
, id
);
748 /* This will remap a lot of the same decls again, but this should be
750 new_vars
= gimple_bind_vars (stmt
);
752 new_vars
= remap_decls (new_vars
, NULL
, id
);
754 new_bind
= gimple_build_bind (new_vars
, new_body
, new_block
);
759 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
764 if (TREE_CODE (decl
) == SSA_NAME
)
766 decl
= SSA_NAME_VAR (decl
);
771 return (TREE_CODE (decl
) == PARM_DECL
);
774 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
775 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
776 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
777 recursing into the children nodes of *TP. */
780 remap_gimple_op_r (tree
*tp
, int *walk_subtrees
, void *data
)
782 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
783 copy_body_data
*id
= (copy_body_data
*) wi_p
->info
;
784 tree fn
= id
->src_fn
;
786 if (TREE_CODE (*tp
) == SSA_NAME
)
788 *tp
= remap_ssa_name (*tp
, id
);
792 else if (auto_var_in_fn_p (*tp
, fn
))
794 /* Local variables and labels need to be replaced by equivalent
795 variables. We don't want to copy static variables; there's
796 only one of those, no matter how many times we inline the
797 containing function. Similarly for globals from an outer
801 /* Remap the declaration. */
802 new_decl
= remap_decl (*tp
, id
);
803 gcc_assert (new_decl
);
804 /* Replace this variable with the copy. */
805 STRIP_TYPE_NOPS (new_decl
);
806 /* ??? The C++ frontend uses void * pointer zero to initialize
807 any other type. This confuses the middle-end type verification.
808 As cloned bodies do not go through gimplification again the fixup
809 there doesn't trigger. */
810 if (TREE_CODE (new_decl
) == INTEGER_CST
811 && !useless_type_conversion_p (TREE_TYPE (*tp
), TREE_TYPE (new_decl
)))
812 new_decl
= fold_convert (TREE_TYPE (*tp
), new_decl
);
816 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
818 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
820 else if (TREE_CODE (*tp
) == LABEL_DECL
821 && (!DECL_CONTEXT (*tp
)
822 || decl_function_context (*tp
) == id
->src_fn
))
823 /* These may need to be remapped for EH handling. */
824 *tp
= remap_decl (*tp
, id
);
825 else if (TREE_CODE (*tp
) == FIELD_DECL
)
827 /* If the enclosing record type is variably_modified_type_p, the field
828 has already been remapped. Otherwise, it need not be. */
829 tree
*n
= (tree
*) pointer_map_contains (id
->decl_map
, *tp
);
834 else if (TYPE_P (*tp
))
835 /* Types may need remapping as well. */
836 *tp
= remap_type (*tp
, id
);
837 else if (CONSTANT_CLASS_P (*tp
))
839 /* If this is a constant, we have to copy the node iff the type
840 will be remapped. copy_tree_r will not copy a constant. */
841 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
843 if (new_type
== TREE_TYPE (*tp
))
846 else if (TREE_CODE (*tp
) == INTEGER_CST
)
847 *tp
= build_int_cst_wide (new_type
, TREE_INT_CST_LOW (*tp
),
848 TREE_INT_CST_HIGH (*tp
));
851 *tp
= copy_node (*tp
);
852 TREE_TYPE (*tp
) = new_type
;
857 /* Otherwise, just copy the node. Note that copy_tree_r already
858 knows not to copy VAR_DECLs, etc., so this is safe. */
860 if (TREE_CODE (*tp
) == MEM_REF
)
862 /* We need to re-canonicalize MEM_REFs from inline substitutions
863 that can happen when a pointer argument is an ADDR_EXPR.
864 Recurse here manually to allow that. */
865 tree ptr
= TREE_OPERAND (*tp
, 0);
866 tree type
= remap_type (TREE_TYPE (*tp
), id
);
868 walk_tree (&ptr
, remap_gimple_op_r
, data
, NULL
);
869 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
870 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
871 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
872 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
873 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
874 remapped a parameter as the property might be valid only
875 for the parameter itself. */
876 if (TREE_THIS_NOTRAP (old
)
877 && (!is_parm (TREE_OPERAND (old
, 0))
878 || (!id
->transform_parameter
&& is_parm (ptr
))))
879 TREE_THIS_NOTRAP (*tp
) = 1;
884 /* Here is the "usual case". Copy this tree node, and then
885 tweak some special cases. */
886 copy_tree_r (tp
, walk_subtrees
, NULL
);
888 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
889 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
891 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
893 /* The copied TARGET_EXPR has never been expanded, even if the
894 original node was expanded already. */
895 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
896 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
898 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
900 /* Variable substitution need not be simple. In particular,
901 the MEM_REF substitution above. Make sure that
902 TREE_CONSTANT and friends are up-to-date. */
903 int invariant
= is_gimple_min_invariant (*tp
);
904 walk_tree (&TREE_OPERAND (*tp
, 0), remap_gimple_op_r
, data
, NULL
);
905 recompute_tree_invariant_for_addr_expr (*tp
);
907 /* If this used to be invariant, but is not any longer,
908 then regimplification is probably needed. */
909 if (invariant
&& !is_gimple_min_invariant (*tp
))
910 id
->regimplify
= true;
916 /* Update the TREE_BLOCK for the cloned expr. */
919 tree new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
920 tree old_block
= TREE_BLOCK (*tp
);
924 n
= (tree
*) pointer_map_contains (id
->decl_map
,
929 TREE_SET_BLOCK (*tp
, new_block
);
932 /* Keep iterating. */
937 /* Called from copy_body_id via walk_tree. DATA is really a
938 `copy_body_data *'. */
941 copy_tree_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
943 copy_body_data
*id
= (copy_body_data
*) data
;
944 tree fn
= id
->src_fn
;
947 /* Begin by recognizing trees that we'll completely rewrite for the
948 inlining context. Our output for these trees is completely
949 different from out input (e.g. RETURN_EXPR is deleted, and morphs
950 into an edge). Further down, we'll handle trees that get
951 duplicated and/or tweaked. */
953 /* When requested, RETURN_EXPRs should be transformed to just the
954 contained MODIFY_EXPR. The branch semantics of the return will
955 be handled elsewhere by manipulating the CFG rather than a statement. */
956 if (TREE_CODE (*tp
) == RETURN_EXPR
&& id
->transform_return_to_modify
)
958 tree assignment
= TREE_OPERAND (*tp
, 0);
960 /* If we're returning something, just turn that into an
961 assignment into the equivalent of the original RESULT_DECL.
962 If the "assignment" is just the result decl, the result
963 decl has already been set (e.g. a recent "foo (&result_decl,
964 ...)"); just toss the entire RETURN_EXPR. */
965 if (assignment
&& TREE_CODE (assignment
) == MODIFY_EXPR
)
967 /* Replace the RETURN_EXPR with (a copy of) the
968 MODIFY_EXPR hanging underneath. */
969 *tp
= copy_node (assignment
);
971 else /* Else the RETURN_EXPR returns no value. */
974 return (tree
) (void *)1;
977 else if (TREE_CODE (*tp
) == SSA_NAME
)
979 *tp
= remap_ssa_name (*tp
, id
);
984 /* Local variables and labels need to be replaced by equivalent
985 variables. We don't want to copy static variables; there's only
986 one of those, no matter how many times we inline the containing
987 function. Similarly for globals from an outer function. */
988 else if (auto_var_in_fn_p (*tp
, fn
))
992 /* Remap the declaration. */
993 new_decl
= remap_decl (*tp
, id
);
994 gcc_assert (new_decl
);
995 /* Replace this variable with the copy. */
996 STRIP_TYPE_NOPS (new_decl
);
1000 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1001 copy_statement_list (tp
);
1002 else if (TREE_CODE (*tp
) == SAVE_EXPR
1003 || TREE_CODE (*tp
) == TARGET_EXPR
)
1004 remap_save_expr (tp
, id
->decl_map
, walk_subtrees
);
1005 else if (TREE_CODE (*tp
) == LABEL_DECL
1006 && (! DECL_CONTEXT (*tp
)
1007 || decl_function_context (*tp
) == id
->src_fn
))
1008 /* These may need to be remapped for EH handling. */
1009 *tp
= remap_decl (*tp
, id
);
1010 else if (TREE_CODE (*tp
) == BIND_EXPR
)
1011 copy_bind_expr (tp
, walk_subtrees
, id
);
1012 /* Types may need remapping as well. */
1013 else if (TYPE_P (*tp
))
1014 *tp
= remap_type (*tp
, id
);
1016 /* If this is a constant, we have to copy the node iff the type will be
1017 remapped. copy_tree_r will not copy a constant. */
1018 else if (CONSTANT_CLASS_P (*tp
))
1020 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1022 if (new_type
== TREE_TYPE (*tp
))
1025 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1026 *tp
= build_int_cst_wide (new_type
, TREE_INT_CST_LOW (*tp
),
1027 TREE_INT_CST_HIGH (*tp
));
1030 *tp
= copy_node (*tp
);
1031 TREE_TYPE (*tp
) = new_type
;
1035 /* Otherwise, just copy the node. Note that copy_tree_r already
1036 knows not to copy VAR_DECLs, etc., so this is safe. */
1039 /* Here we handle trees that are not completely rewritten.
1040 First we detect some inlining-induced bogosities for
1042 if (TREE_CODE (*tp
) == MODIFY_EXPR
1043 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
1044 && (auto_var_in_fn_p (TREE_OPERAND (*tp
, 0), fn
)))
1046 /* Some assignments VAR = VAR; don't generate any rtl code
1047 and thus don't count as variable modification. Avoid
1048 keeping bogosities like 0 = 0. */
1049 tree decl
= TREE_OPERAND (*tp
, 0), value
;
1052 n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
1056 STRIP_TYPE_NOPS (value
);
1057 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1059 *tp
= build_empty_stmt (EXPR_LOCATION (*tp
));
1060 return copy_tree_body_r (tp
, walk_subtrees
, data
);
1064 else if (TREE_CODE (*tp
) == INDIRECT_REF
)
1066 /* Get rid of *& from inline substitutions that can happen when a
1067 pointer argument is an ADDR_EXPR. */
1068 tree decl
= TREE_OPERAND (*tp
, 0);
1069 tree
*n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
1072 /* If we happen to get an ADDR_EXPR in n->value, strip
1073 it manually here as we'll eventually get ADDR_EXPRs
1074 which lie about their types pointed to. In this case
1075 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1076 but we absolutely rely on that. As fold_indirect_ref
1077 does other useful transformations, try that first, though. */
1078 tree type
= TREE_TYPE (*tp
);
1079 tree ptr
= id
->do_not_unshare
? *n
: unshare_expr (*n
);
1081 *tp
= gimple_fold_indirect_ref (ptr
);
1084 if (TREE_CODE (ptr
) == ADDR_EXPR
)
1087 = fold_indirect_ref_1 (EXPR_LOCATION (ptr
), type
, ptr
);
1088 /* ??? We should either assert here or build
1089 a VIEW_CONVERT_EXPR instead of blindly leaking
1090 incompatible types to our IL. */
1092 *tp
= TREE_OPERAND (ptr
, 0);
1096 *tp
= build1 (INDIRECT_REF
, type
, ptr
);
1097 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1098 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1099 TREE_READONLY (*tp
) = TREE_READONLY (old
);
1100 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1101 have remapped a parameter as the property might be
1102 valid only for the parameter itself. */
1103 if (TREE_THIS_NOTRAP (old
)
1104 && (!is_parm (TREE_OPERAND (old
, 0))
1105 || (!id
->transform_parameter
&& is_parm (ptr
))))
1106 TREE_THIS_NOTRAP (*tp
) = 1;
1113 else if (TREE_CODE (*tp
) == MEM_REF
)
1115 /* We need to re-canonicalize MEM_REFs from inline substitutions
1116 that can happen when a pointer argument is an ADDR_EXPR.
1117 Recurse here manually to allow that. */
1118 tree ptr
= TREE_OPERAND (*tp
, 0);
1119 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1121 walk_tree (&ptr
, copy_tree_body_r
, data
, NULL
);
1122 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1123 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1124 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1125 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1126 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1127 remapped a parameter as the property might be valid only
1128 for the parameter itself. */
1129 if (TREE_THIS_NOTRAP (old
)
1130 && (!is_parm (TREE_OPERAND (old
, 0))
1131 || (!id
->transform_parameter
&& is_parm (ptr
))))
1132 TREE_THIS_NOTRAP (*tp
) = 1;
1137 /* Here is the "usual case". Copy this tree node, and then
1138 tweak some special cases. */
1139 copy_tree_r (tp
, walk_subtrees
, NULL
);
1141 /* If EXPR has block defined, map it to newly constructed block.
1142 When inlining we want EXPRs without block appear in the block
1143 of function call if we are not remapping a type. */
1146 new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1147 if (TREE_BLOCK (*tp
))
1150 n
= (tree
*) pointer_map_contains (id
->decl_map
,
1155 TREE_SET_BLOCK (*tp
, new_block
);
1158 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1159 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1161 /* The copied TARGET_EXPR has never been expanded, even if the
1162 original node was expanded already. */
1163 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1165 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1166 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1169 /* Variable substitution need not be simple. In particular, the
1170 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1171 and friends are up-to-date. */
1172 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1174 int invariant
= is_gimple_min_invariant (*tp
);
1175 walk_tree (&TREE_OPERAND (*tp
, 0), copy_tree_body_r
, id
, NULL
);
1177 /* Handle the case where we substituted an INDIRECT_REF
1178 into the operand of the ADDR_EXPR. */
1179 if (TREE_CODE (TREE_OPERAND (*tp
, 0)) == INDIRECT_REF
)
1180 *tp
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0);
1182 recompute_tree_invariant_for_addr_expr (*tp
);
1184 /* If this used to be invariant, but is not any longer,
1185 then regimplification is probably needed. */
1186 if (invariant
&& !is_gimple_min_invariant (*tp
))
1187 id
->regimplify
= true;
1193 /* Keep iterating. */
1197 /* Helper for remap_gimple_stmt. Given an EH region number for the
1198 source function, map that to the duplicate EH region number in
1199 the destination function. */
1202 remap_eh_region_nr (int old_nr
, copy_body_data
*id
)
1204 eh_region old_r
, new_r
;
1207 old_r
= get_eh_region_from_number_fn (id
->src_cfun
, old_nr
);
1208 slot
= pointer_map_contains (id
->eh_map
, old_r
);
1209 new_r
= (eh_region
) *slot
;
1211 return new_r
->index
;
1214 /* Similar, but operate on INTEGER_CSTs. */
1217 remap_eh_region_tree_nr (tree old_t_nr
, copy_body_data
*id
)
1221 old_nr
= tree_low_cst (old_t_nr
, 0);
1222 new_nr
= remap_eh_region_nr (old_nr
, id
);
1224 return build_int_cst (integer_type_node
, new_nr
);
1227 /* Helper for copy_bb. Remap statement STMT using the inlining
1228 information in ID. Return the new statement copy. */
1231 remap_gimple_stmt (gimple stmt
, copy_body_data
*id
)
1234 struct walk_stmt_info wi
;
1235 bool skip_first
= false;
1237 /* Begin by recognizing trees that we'll completely rewrite for the
1238 inlining context. Our output for these trees is completely
1239 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1240 into an edge). Further down, we'll handle trees that get
1241 duplicated and/or tweaked. */
1243 /* When requested, GIMPLE_RETURNs should be transformed to just the
1244 contained GIMPLE_ASSIGN. The branch semantics of the return will
1245 be handled elsewhere by manipulating the CFG rather than the
1247 if (gimple_code (stmt
) == GIMPLE_RETURN
&& id
->transform_return_to_modify
)
1249 tree retval
= gimple_return_retval (stmt
);
1251 /* If we're returning something, just turn that into an
1252 assignment into the equivalent of the original RESULT_DECL.
1253 If RETVAL is just the result decl, the result decl has
1254 already been set (e.g. a recent "foo (&result_decl, ...)");
1255 just toss the entire GIMPLE_RETURN. */
1257 && (TREE_CODE (retval
) != RESULT_DECL
1258 && (TREE_CODE (retval
) != SSA_NAME
1259 || ! SSA_NAME_VAR (retval
)
1260 || TREE_CODE (SSA_NAME_VAR (retval
)) != RESULT_DECL
)))
1262 copy
= gimple_build_assign (id
->retvar
, retval
);
1263 /* id->retvar is already substituted. Skip it on later remapping. */
1267 return gimple_build_nop ();
1269 else if (gimple_has_substatements (stmt
))
1273 /* When cloning bodies from the C++ front end, we will be handed bodies
1274 in High GIMPLE form. Handle here all the High GIMPLE statements that
1275 have embedded statements. */
1276 switch (gimple_code (stmt
))
1279 copy
= copy_gimple_bind (stmt
, id
);
1283 s1
= remap_gimple_seq (gimple_catch_handler (stmt
), id
);
1284 copy
= gimple_build_catch (gimple_catch_types (stmt
), s1
);
1287 case GIMPLE_EH_FILTER
:
1288 s1
= remap_gimple_seq (gimple_eh_filter_failure (stmt
), id
);
1289 copy
= gimple_build_eh_filter (gimple_eh_filter_types (stmt
), s1
);
1293 s1
= remap_gimple_seq (gimple_try_eval (stmt
), id
);
1294 s2
= remap_gimple_seq (gimple_try_cleanup (stmt
), id
);
1295 copy
= gimple_build_try (s1
, s2
, gimple_try_kind (stmt
));
1298 case GIMPLE_WITH_CLEANUP_EXPR
:
1299 s1
= remap_gimple_seq (gimple_wce_cleanup (stmt
), id
);
1300 copy
= gimple_build_wce (s1
);
1303 case GIMPLE_OMP_PARALLEL
:
1304 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1305 copy
= gimple_build_omp_parallel
1307 gimple_omp_parallel_clauses (stmt
),
1308 gimple_omp_parallel_child_fn (stmt
),
1309 gimple_omp_parallel_data_arg (stmt
));
1312 case GIMPLE_OMP_TASK
:
1313 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1314 copy
= gimple_build_omp_task
1316 gimple_omp_task_clauses (stmt
),
1317 gimple_omp_task_child_fn (stmt
),
1318 gimple_omp_task_data_arg (stmt
),
1319 gimple_omp_task_copy_fn (stmt
),
1320 gimple_omp_task_arg_size (stmt
),
1321 gimple_omp_task_arg_align (stmt
));
1324 case GIMPLE_OMP_FOR
:
1325 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1326 s2
= remap_gimple_seq (gimple_omp_for_pre_body (stmt
), id
);
1327 copy
= gimple_build_omp_for (s1
, gimple_omp_for_kind (stmt
),
1328 gimple_omp_for_clauses (stmt
),
1329 gimple_omp_for_collapse (stmt
), s2
);
1332 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1334 gimple_omp_for_set_index (copy
, i
,
1335 gimple_omp_for_index (stmt
, i
));
1336 gimple_omp_for_set_initial (copy
, i
,
1337 gimple_omp_for_initial (stmt
, i
));
1338 gimple_omp_for_set_final (copy
, i
,
1339 gimple_omp_for_final (stmt
, i
));
1340 gimple_omp_for_set_incr (copy
, i
,
1341 gimple_omp_for_incr (stmt
, i
));
1342 gimple_omp_for_set_cond (copy
, i
,
1343 gimple_omp_for_cond (stmt
, i
));
1348 case GIMPLE_OMP_MASTER
:
1349 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1350 copy
= gimple_build_omp_master (s1
);
1353 case GIMPLE_OMP_TASKGROUP
:
1354 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1355 copy
= gimple_build_omp_taskgroup (s1
);
1358 case GIMPLE_OMP_ORDERED
:
1359 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1360 copy
= gimple_build_omp_ordered (s1
);
1363 case GIMPLE_OMP_SECTION
:
1364 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1365 copy
= gimple_build_omp_section (s1
);
1368 case GIMPLE_OMP_SECTIONS
:
1369 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1370 copy
= gimple_build_omp_sections
1371 (s1
, gimple_omp_sections_clauses (stmt
));
1374 case GIMPLE_OMP_SINGLE
:
1375 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1376 copy
= gimple_build_omp_single
1377 (s1
, gimple_omp_single_clauses (stmt
));
1380 case GIMPLE_OMP_TARGET
:
1381 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1382 copy
= gimple_build_omp_target
1383 (s1
, gimple_omp_target_kind (stmt
),
1384 gimple_omp_target_clauses (stmt
));
1387 case GIMPLE_OMP_TEAMS
:
1388 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1389 copy
= gimple_build_omp_teams
1390 (s1
, gimple_omp_teams_clauses (stmt
));
1393 case GIMPLE_OMP_CRITICAL
:
1394 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1396 = gimple_build_omp_critical (s1
, gimple_omp_critical_name (stmt
));
1399 case GIMPLE_TRANSACTION
:
1400 s1
= remap_gimple_seq (gimple_transaction_body (stmt
), id
);
1401 copy
= gimple_build_transaction (s1
, gimple_transaction_label (stmt
));
1402 gimple_transaction_set_subcode (copy
, gimple_transaction_subcode (stmt
));
1411 if (gimple_assign_copy_p (stmt
)
1412 && gimple_assign_lhs (stmt
) == gimple_assign_rhs1 (stmt
)
1413 && auto_var_in_fn_p (gimple_assign_lhs (stmt
), id
->src_fn
))
1415 /* Here we handle statements that are not completely rewritten.
1416 First we detect some inlining-induced bogosities for
1419 /* Some assignments VAR = VAR; don't generate any rtl code
1420 and thus don't count as variable modification. Avoid
1421 keeping bogosities like 0 = 0. */
1422 tree decl
= gimple_assign_lhs (stmt
), value
;
1425 n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
1429 STRIP_TYPE_NOPS (value
);
1430 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1431 return gimple_build_nop ();
1435 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1436 in a block that we aren't copying during tree_function_versioning,
1437 just drop the clobber stmt. */
1438 if (id
->blocks_to_copy
&& gimple_clobber_p (stmt
))
1440 tree lhs
= gimple_assign_lhs (stmt
);
1441 if (TREE_CODE (lhs
) == MEM_REF
1442 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
1444 gimple def_stmt
= SSA_NAME_DEF_STMT (TREE_OPERAND (lhs
, 0));
1445 if (gimple_bb (def_stmt
)
1446 && !bitmap_bit_p (id
->blocks_to_copy
,
1447 gimple_bb (def_stmt
)->index
))
1448 return gimple_build_nop ();
1452 if (gimple_debug_bind_p (stmt
))
1454 copy
= gimple_build_debug_bind (gimple_debug_bind_get_var (stmt
),
1455 gimple_debug_bind_get_value (stmt
),
1457 id
->debug_stmts
.safe_push (copy
);
1460 if (gimple_debug_source_bind_p (stmt
))
1462 copy
= gimple_build_debug_source_bind
1463 (gimple_debug_source_bind_get_var (stmt
),
1464 gimple_debug_source_bind_get_value (stmt
), stmt
);
1465 id
->debug_stmts
.safe_push (copy
);
1469 /* Create a new deep copy of the statement. */
1470 copy
= gimple_copy (stmt
);
1472 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1473 RESX and EH_DISPATCH. */
1475 switch (gimple_code (copy
))
1479 tree r
, fndecl
= gimple_call_fndecl (copy
);
1480 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
1481 switch (DECL_FUNCTION_CODE (fndecl
))
1483 case BUILT_IN_EH_COPY_VALUES
:
1484 r
= gimple_call_arg (copy
, 1);
1485 r
= remap_eh_region_tree_nr (r
, id
);
1486 gimple_call_set_arg (copy
, 1, r
);
1489 case BUILT_IN_EH_POINTER
:
1490 case BUILT_IN_EH_FILTER
:
1491 r
= gimple_call_arg (copy
, 0);
1492 r
= remap_eh_region_tree_nr (r
, id
);
1493 gimple_call_set_arg (copy
, 0, r
);
1500 /* Reset alias info if we didn't apply measures to
1501 keep it valid over inlining by setting DECL_PT_UID. */
1502 if (!id
->src_cfun
->gimple_df
1503 || !id
->src_cfun
->gimple_df
->ipa_pta
)
1504 gimple_call_reset_alias_info (copy
);
1510 int r
= gimple_resx_region (copy
);
1511 r
= remap_eh_region_nr (r
, id
);
1512 gimple_resx_set_region (copy
, r
);
1516 case GIMPLE_EH_DISPATCH
:
1518 int r
= gimple_eh_dispatch_region (copy
);
1519 r
= remap_eh_region_nr (r
, id
);
1520 gimple_eh_dispatch_set_region (copy
, r
);
1529 /* If STMT has a block defined, map it to the newly constructed
1531 if (gimple_block (copy
))
1534 n
= (tree
*) pointer_map_contains (id
->decl_map
, gimple_block (copy
));
1536 gimple_set_block (copy
, *n
);
1539 if (gimple_debug_bind_p (copy
) || gimple_debug_source_bind_p (copy
))
1542 /* Remap all the operands in COPY. */
1543 memset (&wi
, 0, sizeof (wi
));
1546 walk_tree (gimple_op_ptr (copy
, 1), remap_gimple_op_r
, &wi
, NULL
);
1548 walk_gimple_op (copy
, remap_gimple_op_r
, &wi
);
1550 /* Clear the copied virtual operands. We are not remapping them here
1551 but are going to recreate them from scratch. */
1552 if (gimple_has_mem_ops (copy
))
1554 gimple_set_vdef (copy
, NULL_TREE
);
1555 gimple_set_vuse (copy
, NULL_TREE
);
1562 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1566 copy_bb (copy_body_data
*id
, basic_block bb
, int frequency_scale
,
1567 gcov_type count_scale
)
1569 gimple_stmt_iterator gsi
, copy_gsi
, seq_gsi
;
1570 basic_block copy_basic_block
;
1575 /* Search for previous copied basic block. */
1578 prev
= prev
->prev_bb
;
1580 /* create_basic_block() will append every new block to
1581 basic_block_info automatically. */
1582 copy_basic_block
= create_basic_block (NULL
, (void *) 0,
1583 (basic_block
) prev
->aux
);
1584 copy_basic_block
->count
= apply_scale (bb
->count
, count_scale
);
1586 /* We are going to rebuild frequencies from scratch. These values
1587 have just small importance to drive canonicalize_loop_headers. */
1588 freq
= apply_scale ((gcov_type
)bb
->frequency
, frequency_scale
);
1590 /* We recompute frequencies after inlining, so this is quite safe. */
1591 if (freq
> BB_FREQ_MAX
)
1593 copy_basic_block
->frequency
= freq
;
1595 copy_gsi
= gsi_start_bb (copy_basic_block
);
1597 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1599 gimple stmt
= gsi_stmt (gsi
);
1600 gimple orig_stmt
= stmt
;
1602 id
->regimplify
= false;
1603 stmt
= remap_gimple_stmt (stmt
, id
);
1604 if (gimple_nop_p (stmt
))
1607 gimple_duplicate_stmt_histograms (cfun
, stmt
, id
->src_cfun
, orig_stmt
);
1610 /* With return slot optimization we can end up with
1611 non-gimple (foo *)&this->m, fix that here. */
1612 if (is_gimple_assign (stmt
)
1613 && gimple_assign_rhs_code (stmt
) == NOP_EXPR
1614 && !is_gimple_val (gimple_assign_rhs1 (stmt
)))
1617 new_rhs
= force_gimple_operand_gsi (&seq_gsi
,
1618 gimple_assign_rhs1 (stmt
),
1620 GSI_CONTINUE_LINKING
);
1621 gimple_assign_set_rhs1 (stmt
, new_rhs
);
1622 id
->regimplify
= false;
1625 gsi_insert_after (&seq_gsi
, stmt
, GSI_NEW_STMT
);
1628 gimple_regimplify_operands (stmt
, &seq_gsi
);
1630 /* If copy_basic_block has been empty at the start of this iteration,
1631 call gsi_start_bb again to get at the newly added statements. */
1632 if (gsi_end_p (copy_gsi
))
1633 copy_gsi
= gsi_start_bb (copy_basic_block
);
1635 gsi_next (©_gsi
);
1637 /* Process the new statement. The call to gimple_regimplify_operands
1638 possibly turned the statement into multiple statements, we
1639 need to process all of them. */
1644 stmt
= gsi_stmt (copy_gsi
);
1645 if (is_gimple_call (stmt
)
1646 && gimple_call_va_arg_pack_p (stmt
)
1649 /* __builtin_va_arg_pack () should be replaced by
1650 all arguments corresponding to ... in the caller. */
1654 size_t nargs
= gimple_call_num_args (id
->gimple_call
);
1657 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1660 /* Create the new array of arguments. */
1661 n
= nargs
+ gimple_call_num_args (stmt
);
1662 argarray
.create (n
);
1663 argarray
.safe_grow_cleared (n
);
1665 /* Copy all the arguments before '...' */
1666 memcpy (argarray
.address (),
1667 gimple_call_arg_ptr (stmt
, 0),
1668 gimple_call_num_args (stmt
) * sizeof (tree
));
1670 /* Append the arguments passed in '...' */
1671 memcpy (argarray
.address () + gimple_call_num_args (stmt
),
1672 gimple_call_arg_ptr (id
->gimple_call
, 0)
1673 + (gimple_call_num_args (id
->gimple_call
) - nargs
),
1674 nargs
* sizeof (tree
));
1676 new_call
= gimple_build_call_vec (gimple_call_fn (stmt
),
1679 argarray
.release ();
1681 /* Copy all GIMPLE_CALL flags, location and block, except
1682 GF_CALL_VA_ARG_PACK. */
1683 gimple_call_copy_flags (new_call
, stmt
);
1684 gimple_call_set_va_arg_pack (new_call
, false);
1685 gimple_set_location (new_call
, gimple_location (stmt
));
1686 gimple_set_block (new_call
, gimple_block (stmt
));
1687 gimple_call_set_lhs (new_call
, gimple_call_lhs (stmt
));
1689 gsi_replace (©_gsi
, new_call
, false);
1692 else if (is_gimple_call (stmt
)
1694 && (decl
= gimple_call_fndecl (stmt
))
1695 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
1696 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_VA_ARG_PACK_LEN
)
1698 /* __builtin_va_arg_pack_len () should be replaced by
1699 the number of anonymous arguments. */
1700 size_t nargs
= gimple_call_num_args (id
->gimple_call
);
1704 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1707 count
= build_int_cst (integer_type_node
, nargs
);
1708 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
), count
);
1709 gsi_replace (©_gsi
, new_stmt
, false);
1713 /* Statements produced by inlining can be unfolded, especially
1714 when we constant propagated some operands. We can't fold
1715 them right now for two reasons:
1716 1) folding require SSA_NAME_DEF_STMTs to be correct
1717 2) we can't change function calls to builtins.
1718 So we just mark statement for later folding. We mark
1719 all new statements, instead just statements that has changed
1720 by some nontrivial substitution so even statements made
1721 foldable indirectly are updated. If this turns out to be
1722 expensive, copy_body can be told to watch for nontrivial
1724 if (id
->statements_to_fold
)
1725 pointer_set_insert (id
->statements_to_fold
, stmt
);
1727 /* We're duplicating a CALL_EXPR. Find any corresponding
1728 callgraph edges and update or duplicate them. */
1729 if (is_gimple_call (stmt
))
1731 struct cgraph_edge
*edge
;
1734 switch (id
->transform_call_graph_edges
)
1736 case CB_CGE_DUPLICATE
:
1737 edge
= cgraph_edge (id
->src_node
, orig_stmt
);
1740 int edge_freq
= edge
->frequency
;
1742 struct cgraph_edge
*old_edge
= edge
;
1743 edge
= cgraph_clone_edge (edge
, id
->dst_node
, stmt
,
1745 REG_BR_PROB_BASE
, CGRAPH_FREQ_BASE
,
1747 /* We could also just rescale the frequency, but
1748 doing so would introduce roundoff errors and make
1749 verifier unhappy. */
1750 new_freq
= compute_call_stmt_bb_frequency (id
->dst_node
->symbol
.decl
,
1753 /* Speculative calls consist of two edges - direct and indirect.
1754 Duplicate the whole thing and distribute frequencies accordingly. */
1755 if (edge
->speculative
)
1757 struct cgraph_edge
*direct
, *indirect
;
1758 struct ipa_ref
*ref
;
1760 gcc_assert (!edge
->indirect_unknown_callee
);
1761 cgraph_speculative_call_info (old_edge
, direct
, indirect
, ref
);
1762 indirect
= cgraph_clone_edge (indirect
, id
->dst_node
, stmt
,
1764 REG_BR_PROB_BASE
, CGRAPH_FREQ_BASE
,
1766 if (old_edge
->frequency
+ indirect
->frequency
)
1768 edge
->frequency
= MIN (RDIV ((gcov_type
)new_freq
* old_edge
->frequency
,
1769 (old_edge
->frequency
+ indirect
->frequency
)),
1771 indirect
->frequency
= MIN (RDIV ((gcov_type
)new_freq
* indirect
->frequency
,
1772 (old_edge
->frequency
+ indirect
->frequency
)),
1775 ipa_clone_ref (ref
, (symtab_node
)id
->dst_node
, stmt
);
1779 edge
->frequency
= new_freq
;
1781 && profile_status_for_function (cfun
) != PROFILE_ABSENT
1782 && (edge_freq
> edge
->frequency
+ 10
1783 || edge_freq
< edge
->frequency
- 10))
1785 fprintf (dump_file
, "Edge frequency estimated by "
1786 "cgraph %i diverge from inliner's estimate %i\n",
1790 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1793 copy_basic_block
->frequency
);
1799 case CB_CGE_MOVE_CLONES
:
1800 cgraph_set_call_stmt_including_clones (id
->dst_node
,
1802 edge
= cgraph_edge (id
->dst_node
, stmt
);
1806 edge
= cgraph_edge (id
->dst_node
, orig_stmt
);
1808 cgraph_set_call_stmt (edge
, stmt
);
1815 /* Constant propagation on argument done during inlining
1816 may create new direct call. Produce an edge for it. */
1818 || (edge
->indirect_inlining_edge
1819 && id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
))
1820 && id
->dst_node
->symbol
.definition
1821 && (fn
= gimple_call_fndecl (stmt
)) != NULL
)
1823 struct cgraph_node
*dest
= cgraph_get_node (fn
);
1825 /* We have missing edge in the callgraph. This can happen
1826 when previous inlining turned an indirect call into a
1827 direct call by constant propagating arguments or we are
1828 producing dead clone (for further cloning). In all
1829 other cases we hit a bug (incorrect node sharing is the
1830 most common reason for missing edges). */
1831 gcc_assert (!dest
->symbol
.definition
1832 || dest
->symbol
.address_taken
1833 || !id
->src_node
->symbol
.definition
1834 || !id
->dst_node
->symbol
.definition
);
1835 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
)
1836 cgraph_create_edge_including_clones
1837 (id
->dst_node
, dest
, orig_stmt
, stmt
, bb
->count
,
1838 compute_call_stmt_bb_frequency (id
->dst_node
->symbol
.decl
,
1840 CIF_ORIGINALLY_INDIRECT_CALL
);
1842 cgraph_create_edge (id
->dst_node
, dest
, stmt
,
1844 compute_call_stmt_bb_frequency
1845 (id
->dst_node
->symbol
.decl
,
1846 copy_basic_block
))->inline_failed
1847 = CIF_ORIGINALLY_INDIRECT_CALL
;
1850 fprintf (dump_file
, "Created new direct edge to %s\n",
1851 cgraph_node_name (dest
));
1855 flags
= gimple_call_flags (stmt
);
1856 if (flags
& ECF_MAY_BE_ALLOCA
)
1857 cfun
->calls_alloca
= true;
1858 if (flags
& ECF_RETURNS_TWICE
)
1859 cfun
->calls_setjmp
= true;
1862 maybe_duplicate_eh_stmt_fn (cfun
, stmt
, id
->src_cfun
, orig_stmt
,
1863 id
->eh_map
, id
->eh_lp_nr
);
1865 if (gimple_in_ssa_p (cfun
) && !is_gimple_debug (stmt
))
1870 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, i
, SSA_OP_DEF
)
1871 if (TREE_CODE (def
) == SSA_NAME
)
1872 SSA_NAME_DEF_STMT (def
) = stmt
;
1875 gsi_next (©_gsi
);
1877 while (!gsi_end_p (copy_gsi
));
1879 copy_gsi
= gsi_last_bb (copy_basic_block
);
1882 return copy_basic_block
;
1885 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1886 form is quite easy, since dominator relationship for old basic blocks does
1889 There is however exception where inlining might change dominator relation
1890 across EH edges from basic block within inlined functions destinating
1891 to landing pads in function we inline into.
1893 The function fills in PHI_RESULTs of such PHI nodes if they refer
1894 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1895 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1896 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1897 set, and this means that there will be no overlapping live ranges
1898 for the underlying symbol.
1900 This might change in future if we allow redirecting of EH edges and
1901 we might want to change way build CFG pre-inlining to include
1902 all the possible edges then. */
1904 update_ssa_across_abnormal_edges (basic_block bb
, basic_block ret_bb
,
1905 bool can_throw
, bool nonlocal_goto
)
1910 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1912 || ((basic_block
)e
->dest
->aux
)->index
== ENTRY_BLOCK
)
1915 gimple_stmt_iterator si
;
1918 gcc_assert (e
->flags
& EDGE_EH
);
1921 gcc_assert (!(e
->flags
& EDGE_EH
));
1923 for (si
= gsi_start_phis (e
->dest
); !gsi_end_p (si
); gsi_next (&si
))
1927 phi
= gsi_stmt (si
);
1929 /* For abnormal goto/call edges the receiver can be the
1930 ENTRY_BLOCK. Do not assert this cannot happen. */
1932 gcc_assert ((e
->flags
& EDGE_EH
)
1933 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)));
1935 re
= find_edge (ret_bb
, e
->dest
);
1936 gcc_checking_assert (re
);
1937 gcc_assert ((re
->flags
& (EDGE_EH
| EDGE_ABNORMAL
))
1938 == (e
->flags
& (EDGE_EH
| EDGE_ABNORMAL
)));
1940 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
1941 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, re
)));
1947 /* Copy edges from BB into its copy constructed earlier, scale profile
1948 accordingly. Edges will be taken care of later. Assume aux
1949 pointers to point to the copies of each BB. Return true if any
1950 debug stmts are left after a statement that must end the basic block. */
1953 copy_edges_for_bb (basic_block bb
, gcov_type count_scale
, basic_block ret_bb
,
1954 bool can_make_abnormal_goto
)
1956 basic_block new_bb
= (basic_block
) bb
->aux
;
1959 gimple_stmt_iterator si
;
1961 bool need_debug_cleanup
= false;
1963 /* Use the indices from the original blocks to create edges for the
1965 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
1966 if (!(old_edge
->flags
& EDGE_EH
))
1970 flags
= old_edge
->flags
;
1972 /* Return edges do get a FALLTHRU flag when the get inlined. */
1973 if (old_edge
->dest
->index
== EXIT_BLOCK
&& !old_edge
->flags
1974 && old_edge
->dest
->aux
!= EXIT_BLOCK_PTR
)
1975 flags
|= EDGE_FALLTHRU
;
1976 new_edge
= make_edge (new_bb
, (basic_block
) old_edge
->dest
->aux
, flags
);
1977 new_edge
->count
= apply_scale (old_edge
->count
, count_scale
);
1978 new_edge
->probability
= old_edge
->probability
;
1981 if (bb
->index
== ENTRY_BLOCK
|| bb
->index
== EXIT_BLOCK
)
1984 for (si
= gsi_start_bb (new_bb
); !gsi_end_p (si
);)
1987 bool can_throw
, nonlocal_goto
;
1989 copy_stmt
= gsi_stmt (si
);
1990 if (!is_gimple_debug (copy_stmt
))
1991 update_stmt (copy_stmt
);
1993 /* Do this before the possible split_block. */
1996 /* If this tree could throw an exception, there are two
1997 cases where we need to add abnormal edge(s): the
1998 tree wasn't in a region and there is a "current
1999 region" in the caller; or the original tree had
2000 EH edges. In both cases split the block after the tree,
2001 and add abnormal edge(s) as needed; we need both
2002 those from the callee and the caller.
2003 We check whether the copy can throw, because the const
2004 propagation can change an INDIRECT_REF which throws
2005 into a COMPONENT_REF which doesn't. If the copy
2006 can throw, the original could also throw. */
2007 can_throw
= stmt_can_throw_internal (copy_stmt
);
2008 nonlocal_goto
= stmt_can_make_abnormal_goto (copy_stmt
);
2010 if (can_throw
|| nonlocal_goto
)
2012 if (!gsi_end_p (si
))
2014 while (!gsi_end_p (si
) && is_gimple_debug (gsi_stmt (si
)))
2017 need_debug_cleanup
= true;
2019 if (!gsi_end_p (si
))
2020 /* Note that bb's predecessor edges aren't necessarily
2021 right at this point; split_block doesn't care. */
2023 edge e
= split_block (new_bb
, copy_stmt
);
2026 new_bb
->aux
= e
->src
->aux
;
2027 si
= gsi_start_bb (new_bb
);
2031 if (gimple_code (copy_stmt
) == GIMPLE_EH_DISPATCH
)
2032 make_eh_dispatch_edges (copy_stmt
);
2034 make_eh_edges (copy_stmt
);
2036 /* If the call we inline cannot make abnormal goto do not add
2037 additional abnormal edges but only retain those already present
2038 in the original function body. */
2039 nonlocal_goto
&= can_make_abnormal_goto
;
2041 make_abnormal_goto_edges (gimple_bb (copy_stmt
), true);
2043 if ((can_throw
|| nonlocal_goto
)
2044 && gimple_in_ssa_p (cfun
))
2045 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt
), ret_bb
,
2046 can_throw
, nonlocal_goto
);
2048 return need_debug_cleanup
;
2051 /* Copy the PHIs. All blocks and edges are copied, some blocks
2052 was possibly split and new outgoing EH edges inserted.
2053 BB points to the block of original function and AUX pointers links
2054 the original and newly copied blocks. */
2057 copy_phis_for_bb (basic_block bb
, copy_body_data
*id
)
2059 basic_block
const new_bb
= (basic_block
) bb
->aux
;
2062 gimple_stmt_iterator si
;
2064 bool inserted
= false;
2066 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
2071 phi
= gsi_stmt (si
);
2072 res
= PHI_RESULT (phi
);
2074 if (!virtual_operand_p (res
))
2076 walk_tree (&new_res
, copy_tree_body_r
, id
, NULL
);
2077 new_phi
= create_phi_node (new_res
, new_bb
);
2078 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2080 edge old_edge
= find_edge ((basic_block
) new_edge
->src
->aux
, bb
);
2086 /* When doing partial cloning, we allow PHIs on the entry block
2087 as long as all the arguments are the same. Find any input
2088 edge to see argument to copy. */
2090 FOR_EACH_EDGE (old_edge
, ei2
, bb
->preds
)
2091 if (!old_edge
->src
->aux
)
2094 arg
= PHI_ARG_DEF_FROM_EDGE (phi
, old_edge
);
2096 walk_tree (&new_arg
, copy_tree_body_r
, id
, NULL
);
2097 gcc_assert (new_arg
);
2098 /* With return slot optimization we can end up with
2099 non-gimple (foo *)&this->m, fix that here. */
2100 if (TREE_CODE (new_arg
) != SSA_NAME
2101 && TREE_CODE (new_arg
) != FUNCTION_DECL
2102 && !is_gimple_val (new_arg
))
2104 gimple_seq stmts
= NULL
;
2105 new_arg
= force_gimple_operand (new_arg
, &stmts
, true, NULL
);
2106 gsi_insert_seq_on_edge (new_edge
, stmts
);
2109 locus
= gimple_phi_arg_location_from_edge (phi
, old_edge
);
2110 if (LOCATION_BLOCK (locus
))
2113 n
= (tree
*) pointer_map_contains (id
->decl_map
,
2114 LOCATION_BLOCK (locus
));
2117 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, *n
);
2119 locus
= LOCATION_LOCUS (locus
);
2122 locus
= LOCATION_LOCUS (locus
);
2124 add_phi_arg (new_phi
, new_arg
, new_edge
, locus
);
2129 /* Commit the delayed edge insertions. */
2131 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2132 gsi_commit_one_edge_insert (new_edge
, NULL
);
2136 /* Wrapper for remap_decl so it can be used as a callback. */
2139 remap_decl_1 (tree decl
, void *data
)
2141 return remap_decl (decl
, (copy_body_data
*) data
);
2144 /* Build struct function and associated datastructures for the new clone
2145 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2146 the cfun to the function of new_fndecl (and current_function_decl too). */
2149 initialize_cfun (tree new_fndecl
, tree callee_fndecl
, gcov_type count
)
2151 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2152 gcov_type count_scale
;
2154 if (!DECL_ARGUMENTS (new_fndecl
))
2155 DECL_ARGUMENTS (new_fndecl
) = DECL_ARGUMENTS (callee_fndecl
);
2156 if (!DECL_RESULT (new_fndecl
))
2157 DECL_RESULT (new_fndecl
) = DECL_RESULT (callee_fndecl
);
2159 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
)
2161 = GCOV_COMPUTE_SCALE (count
,
2162 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
);
2164 count_scale
= REG_BR_PROB_BASE
;
2166 /* Register specific tree functions. */
2167 gimple_register_cfg_hooks ();
2169 /* Get clean struct function. */
2170 push_struct_function (new_fndecl
);
2172 /* We will rebuild these, so just sanity check that they are empty. */
2173 gcc_assert (VALUE_HISTOGRAMS (cfun
) == NULL
);
2174 gcc_assert (cfun
->local_decls
== NULL
);
2175 gcc_assert (cfun
->cfg
== NULL
);
2176 gcc_assert (cfun
->decl
== new_fndecl
);
2178 /* Copy items we preserve during cloning. */
2179 cfun
->static_chain_decl
= src_cfun
->static_chain_decl
;
2180 cfun
->nonlocal_goto_save_area
= src_cfun
->nonlocal_goto_save_area
;
2181 cfun
->function_end_locus
= src_cfun
->function_end_locus
;
2182 cfun
->curr_properties
= src_cfun
->curr_properties
;
2183 cfun
->last_verified
= src_cfun
->last_verified
;
2184 cfun
->va_list_gpr_size
= src_cfun
->va_list_gpr_size
;
2185 cfun
->va_list_fpr_size
= src_cfun
->va_list_fpr_size
;
2186 cfun
->has_nonlocal_label
= src_cfun
->has_nonlocal_label
;
2187 cfun
->stdarg
= src_cfun
->stdarg
;
2188 cfun
->after_inlining
= src_cfun
->after_inlining
;
2189 cfun
->can_throw_non_call_exceptions
2190 = src_cfun
->can_throw_non_call_exceptions
;
2191 cfun
->can_delete_dead_exceptions
= src_cfun
->can_delete_dead_exceptions
;
2192 cfun
->returns_struct
= src_cfun
->returns_struct
;
2193 cfun
->returns_pcc_struct
= src_cfun
->returns_pcc_struct
;
2195 init_empty_tree_cfg ();
2197 profile_status_for_function (cfun
) = profile_status_for_function (src_cfun
);
2198 ENTRY_BLOCK_PTR
->count
=
2199 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
* count_scale
/
2201 ENTRY_BLOCK_PTR
->frequency
2202 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->frequency
;
2203 EXIT_BLOCK_PTR
->count
=
2204 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
* count_scale
/
2206 EXIT_BLOCK_PTR
->frequency
=
2207 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->frequency
;
2209 init_eh_for_function ();
2211 if (src_cfun
->gimple_df
)
2213 init_tree_ssa (cfun
);
2214 cfun
->gimple_df
->in_ssa_p
= true;
2215 init_ssa_operands (cfun
);
2219 /* Helper function for copy_cfg_body. Move debug stmts from the end
2220 of NEW_BB to the beginning of successor basic blocks when needed. If the
2221 successor has multiple predecessors, reset them, otherwise keep
2225 maybe_move_debug_stmts_to_successors (copy_body_data
*id
, basic_block new_bb
)
2229 gimple_stmt_iterator si
= gsi_last_nondebug_bb (new_bb
);
2232 || gsi_one_before_end_p (si
)
2233 || !(stmt_can_throw_internal (gsi_stmt (si
))
2234 || stmt_can_make_abnormal_goto (gsi_stmt (si
))))
2237 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
2239 gimple_stmt_iterator ssi
= gsi_last_bb (new_bb
);
2240 gimple_stmt_iterator dsi
= gsi_after_labels (e
->dest
);
2241 while (is_gimple_debug (gsi_stmt (ssi
)))
2243 gimple stmt
= gsi_stmt (ssi
), new_stmt
;
2247 /* For the last edge move the debug stmts instead of copying
2249 if (ei_one_before_end_p (ei
))
2253 if (!single_pred_p (e
->dest
) && gimple_debug_bind_p (stmt
))
2254 gimple_debug_bind_reset_value (stmt
);
2255 gsi_remove (&si
, false);
2256 gsi_insert_before (&dsi
, stmt
, GSI_SAME_STMT
);
2260 if (gimple_debug_bind_p (stmt
))
2262 var
= gimple_debug_bind_get_var (stmt
);
2263 if (single_pred_p (e
->dest
))
2265 value
= gimple_debug_bind_get_value (stmt
);
2266 value
= unshare_expr (value
);
2270 new_stmt
= gimple_build_debug_bind (var
, value
, stmt
);
2272 else if (gimple_debug_source_bind_p (stmt
))
2274 var
= gimple_debug_source_bind_get_var (stmt
);
2275 value
= gimple_debug_source_bind_get_value (stmt
);
2276 new_stmt
= gimple_build_debug_source_bind (var
, value
, stmt
);
2280 gsi_insert_before (&dsi
, new_stmt
, GSI_SAME_STMT
);
2281 id
->debug_stmts
.safe_push (new_stmt
);
2287 /* Make a copy of the sub-loops of SRC_PARENT and place them
2288 as siblings of DEST_PARENT. */
2291 copy_loops (copy_body_data
*id
,
2292 struct loop
*dest_parent
, struct loop
*src_parent
)
2294 struct loop
*src_loop
= src_parent
->inner
;
2297 if (!id
->blocks_to_copy
2298 || bitmap_bit_p (id
->blocks_to_copy
, src_loop
->header
->index
))
2300 struct loop
*dest_loop
= alloc_loop ();
2302 /* Assign the new loop its header and latch and associate
2303 those with the new loop. */
2304 if (src_loop
->header
!= NULL
)
2306 dest_loop
->header
= (basic_block
)src_loop
->header
->aux
;
2307 dest_loop
->header
->loop_father
= dest_loop
;
2309 if (src_loop
->latch
!= NULL
)
2311 dest_loop
->latch
= (basic_block
)src_loop
->latch
->aux
;
2312 dest_loop
->latch
->loop_father
= dest_loop
;
2315 /* Copy loop meta-data. */
2316 copy_loop_info (src_loop
, dest_loop
);
2318 /* Finally place it into the loop array and the loop tree. */
2319 place_new_loop (cfun
, dest_loop
);
2320 flow_loop_tree_node_add (dest_parent
, dest_loop
);
2322 if (src_loop
->simduid
)
2324 dest_loop
->simduid
= remap_decl (src_loop
->simduid
, id
);
2325 cfun
->has_simduid_loops
= true;
2327 if (src_loop
->force_vect
)
2329 dest_loop
->force_vect
= true;
2330 cfun
->has_force_vect_loops
= true;
2334 copy_loops (id
, dest_loop
, src_loop
);
2336 src_loop
= src_loop
->next
;
2340 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2343 redirect_all_calls (copy_body_data
* id
, basic_block bb
)
2345 gimple_stmt_iterator si
;
2346 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
2348 if (is_gimple_call (gsi_stmt (si
)))
2350 struct cgraph_edge
*edge
= cgraph_edge (id
->dst_node
, gsi_stmt (si
));
2352 cgraph_redirect_edge_call_stmt_to_callee (edge
);
2357 /* Make a copy of the body of FN so that it can be inserted inline in
2358 another function. Walks FN via CFG, returns new fndecl. */
2361 copy_cfg_body (copy_body_data
* id
, gcov_type count
, int frequency_scale
,
2362 basic_block entry_block_map
, basic_block exit_block_map
,
2363 basic_block new_entry
)
2365 tree callee_fndecl
= id
->src_fn
;
2366 /* Original cfun for the callee, doesn't change. */
2367 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2368 struct function
*cfun_to_copy
;
2370 tree new_fndecl
= NULL
;
2371 bool need_debug_cleanup
= false;
2372 gcov_type count_scale
;
2374 int incoming_frequency
= 0;
2375 gcov_type incoming_count
= 0;
2377 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
)
2379 = GCOV_COMPUTE_SCALE (count
,
2380 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
);
2382 count_scale
= REG_BR_PROB_BASE
;
2384 /* Register specific tree functions. */
2385 gimple_register_cfg_hooks ();
2387 /* If we are inlining just region of the function, make sure to connect new entry
2388 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2389 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2390 probabilities of edges incoming from nonduplicated region. */
2396 FOR_EACH_EDGE (e
, ei
, new_entry
->preds
)
2399 incoming_frequency
+= EDGE_FREQUENCY (e
);
2400 incoming_count
+= e
->count
;
2402 incoming_count
= apply_scale (incoming_count
, count_scale
);
2404 = apply_scale ((gcov_type
)incoming_frequency
, frequency_scale
);
2405 ENTRY_BLOCK_PTR
->count
= incoming_count
;
2406 ENTRY_BLOCK_PTR
->frequency
= incoming_frequency
;
2409 /* Must have a CFG here at this point. */
2410 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2411 (DECL_STRUCT_FUNCTION (callee_fndecl
)));
2413 cfun_to_copy
= id
->src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2415 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
)->aux
= entry_block_map
;
2416 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
)->aux
= exit_block_map
;
2417 entry_block_map
->aux
= ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
);
2418 exit_block_map
->aux
= EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
);
2420 /* Duplicate any exception-handling regions. */
2422 id
->eh_map
= duplicate_eh_regions (cfun_to_copy
, NULL
, id
->eh_lp_nr
,
2425 /* Use aux pointers to map the original blocks to copy. */
2426 FOR_EACH_BB_FN (bb
, cfun_to_copy
)
2427 if (!id
->blocks_to_copy
|| bitmap_bit_p (id
->blocks_to_copy
, bb
->index
))
2429 basic_block new_bb
= copy_bb (id
, bb
, frequency_scale
, count_scale
);
2432 new_bb
->loop_father
= entry_block_map
->loop_father
;
2435 last
= last_basic_block
;
2437 /* Now that we've duplicated the blocks, duplicate their edges. */
2438 bool can_make_abormal_goto
2439 = id
->gimple_call
&& stmt_can_make_abnormal_goto (id
->gimple_call
);
2440 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2441 if (!id
->blocks_to_copy
2442 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2443 need_debug_cleanup
|= copy_edges_for_bb (bb
, count_scale
, exit_block_map
,
2444 can_make_abormal_goto
);
2448 edge e
= make_edge (entry_block_map
, (basic_block
)new_entry
->aux
, EDGE_FALLTHRU
);
2449 e
->probability
= REG_BR_PROB_BASE
;
2450 e
->count
= incoming_count
;
2453 /* Duplicate the loop tree, if available and wanted. */
2454 if (loops_for_fn (src_cfun
) != NULL
2455 && current_loops
!= NULL
)
2457 copy_loops (id
, entry_block_map
->loop_father
,
2458 get_loop (src_cfun
, 0));
2459 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2460 loops_state_set (LOOPS_NEED_FIXUP
);
2463 /* If the loop tree in the source function needed fixup, mark the
2464 destination loop tree for fixup, too. */
2465 if (loops_for_fn (src_cfun
)->state
& LOOPS_NEED_FIXUP
)
2466 loops_state_set (LOOPS_NEED_FIXUP
);
2468 if (gimple_in_ssa_p (cfun
))
2469 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2470 if (!id
->blocks_to_copy
2471 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2472 copy_phis_for_bb (bb
, id
);
2474 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2477 if (need_debug_cleanup
2478 && bb
->index
!= ENTRY_BLOCK
2479 && bb
->index
!= EXIT_BLOCK
)
2480 maybe_move_debug_stmts_to_successors (id
, (basic_block
) bb
->aux
);
2481 /* Update call edge destinations. This can not be done before loop
2482 info is updated, because we may split basic blocks. */
2483 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
)
2484 redirect_all_calls (id
, (basic_block
)bb
->aux
);
2485 ((basic_block
)bb
->aux
)->aux
= NULL
;
2489 /* Zero out AUX fields of newly created block during EH edge
2491 for (; last
< last_basic_block
; last
++)
2493 if (need_debug_cleanup
)
2494 maybe_move_debug_stmts_to_successors (id
, BASIC_BLOCK (last
));
2495 BASIC_BLOCK (last
)->aux
= NULL
;
2496 /* Update call edge destinations. This can not be done before loop
2497 info is updated, because we may split basic blocks. */
2498 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
)
2499 redirect_all_calls (id
, BASIC_BLOCK (last
));
2501 entry_block_map
->aux
= NULL
;
2502 exit_block_map
->aux
= NULL
;
2506 pointer_map_destroy (id
->eh_map
);
2513 /* Copy the debug STMT using ID. We deal with these statements in a
2514 special way: if any variable in their VALUE expression wasn't
2515 remapped yet, we won't remap it, because that would get decl uids
2516 out of sync, causing codegen differences between -g and -g0. If
2517 this arises, we drop the VALUE expression altogether. */
2520 copy_debug_stmt (gimple stmt
, copy_body_data
*id
)
2523 struct walk_stmt_info wi
;
2525 if (gimple_block (stmt
))
2527 n
= (tree
*) pointer_map_contains (id
->decl_map
, gimple_block (stmt
));
2528 gimple_set_block (stmt
, n
? *n
: id
->block
);
2531 /* Remap all the operands in COPY. */
2532 memset (&wi
, 0, sizeof (wi
));
2535 processing_debug_stmt
= 1;
2537 if (gimple_debug_source_bind_p (stmt
))
2538 t
= gimple_debug_source_bind_get_var (stmt
);
2540 t
= gimple_debug_bind_get_var (stmt
);
2542 if (TREE_CODE (t
) == PARM_DECL
&& id
->debug_map
2543 && (n
= (tree
*) pointer_map_contains (id
->debug_map
, t
)))
2545 gcc_assert (TREE_CODE (*n
) == VAR_DECL
);
2548 else if (TREE_CODE (t
) == VAR_DECL
2549 && !is_global_var (t
)
2550 && !pointer_map_contains (id
->decl_map
, t
))
2551 /* T is a non-localized variable. */;
2553 walk_tree (&t
, remap_gimple_op_r
, &wi
, NULL
);
2555 if (gimple_debug_bind_p (stmt
))
2557 gimple_debug_bind_set_var (stmt
, t
);
2559 if (gimple_debug_bind_has_value_p (stmt
))
2560 walk_tree (gimple_debug_bind_get_value_ptr (stmt
),
2561 remap_gimple_op_r
, &wi
, NULL
);
2563 /* Punt if any decl couldn't be remapped. */
2564 if (processing_debug_stmt
< 0)
2565 gimple_debug_bind_reset_value (stmt
);
2567 else if (gimple_debug_source_bind_p (stmt
))
2569 gimple_debug_source_bind_set_var (stmt
, t
);
2570 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt
),
2571 remap_gimple_op_r
, &wi
, NULL
);
2572 /* When inlining and source bind refers to one of the optimized
2573 away parameters, change the source bind into normal debug bind
2574 referring to the corresponding DEBUG_EXPR_DECL that should have
2575 been bound before the call stmt. */
2576 t
= gimple_debug_source_bind_get_value (stmt
);
2578 && TREE_CODE (t
) == PARM_DECL
2581 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (id
->src_fn
);
2583 if (debug_args
!= NULL
)
2585 for (i
= 0; i
< vec_safe_length (*debug_args
); i
+= 2)
2586 if ((**debug_args
)[i
] == DECL_ORIGIN (t
)
2587 && TREE_CODE ((**debug_args
)[i
+ 1]) == DEBUG_EXPR_DECL
)
2589 t
= (**debug_args
)[i
+ 1];
2590 stmt
->gsbase
.subcode
= GIMPLE_DEBUG_BIND
;
2591 gimple_debug_bind_set_value (stmt
, t
);
2598 processing_debug_stmt
= 0;
2603 /* Process deferred debug stmts. In order to give values better odds
2604 of being successfully remapped, we delay the processing of debug
2605 stmts until all other stmts that might require remapping are
2609 copy_debug_stmts (copy_body_data
*id
)
2614 if (!id
->debug_stmts
.exists ())
2617 FOR_EACH_VEC_ELT (id
->debug_stmts
, i
, stmt
)
2618 copy_debug_stmt (stmt
, id
);
2620 id
->debug_stmts
.release ();
2623 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2624 another function. */
2627 copy_tree_body (copy_body_data
*id
)
2629 tree fndecl
= id
->src_fn
;
2630 tree body
= DECL_SAVED_TREE (fndecl
);
2632 walk_tree (&body
, copy_tree_body_r
, id
, NULL
);
2637 /* Make a copy of the body of FN so that it can be inserted inline in
2638 another function. */
2641 copy_body (copy_body_data
*id
, gcov_type count
, int frequency_scale
,
2642 basic_block entry_block_map
, basic_block exit_block_map
,
2643 basic_block new_entry
)
2645 tree fndecl
= id
->src_fn
;
2648 /* If this body has a CFG, walk CFG and copy. */
2649 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl
)));
2650 body
= copy_cfg_body (id
, count
, frequency_scale
, entry_block_map
, exit_block_map
,
2652 copy_debug_stmts (id
);
2657 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2658 defined in function FN, or of a data member thereof. */
2661 self_inlining_addr_expr (tree value
, tree fn
)
2665 if (TREE_CODE (value
) != ADDR_EXPR
)
2668 var
= get_base_address (TREE_OPERAND (value
, 0));
2670 return var
&& auto_var_in_fn_p (var
, fn
);
2673 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2674 lexical block and line number information from base_stmt, if given,
2675 or from the last stmt of the block otherwise. */
2678 insert_init_debug_bind (copy_body_data
*id
,
2679 basic_block bb
, tree var
, tree value
,
2683 gimple_stmt_iterator gsi
;
2686 if (!gimple_in_ssa_p (id
->src_cfun
))
2689 if (!MAY_HAVE_DEBUG_STMTS
)
2692 tracked_var
= target_for_debug_bind (var
);
2698 gsi
= gsi_last_bb (bb
);
2699 if (!base_stmt
&& !gsi_end_p (gsi
))
2700 base_stmt
= gsi_stmt (gsi
);
2703 note
= gimple_build_debug_bind (tracked_var
, value
, base_stmt
);
2707 if (!gsi_end_p (gsi
))
2708 gsi_insert_after (&gsi
, note
, GSI_SAME_STMT
);
2710 gsi_insert_before (&gsi
, note
, GSI_SAME_STMT
);
2717 insert_init_stmt (copy_body_data
*id
, basic_block bb
, gimple init_stmt
)
2719 /* If VAR represents a zero-sized variable, it's possible that the
2720 assignment statement may result in no gimple statements. */
2723 gimple_stmt_iterator si
= gsi_last_bb (bb
);
2725 /* We can end up with init statements that store to a non-register
2726 from a rhs with a conversion. Handle that here by forcing the
2727 rhs into a temporary. gimple_regimplify_operands is not
2728 prepared to do this for us. */
2729 if (!is_gimple_debug (init_stmt
)
2730 && !is_gimple_reg (gimple_assign_lhs (init_stmt
))
2731 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt
)))
2732 && gimple_assign_rhs_class (init_stmt
) == GIMPLE_UNARY_RHS
)
2734 tree rhs
= build1 (gimple_assign_rhs_code (init_stmt
),
2735 gimple_expr_type (init_stmt
),
2736 gimple_assign_rhs1 (init_stmt
));
2737 rhs
= force_gimple_operand_gsi (&si
, rhs
, true, NULL_TREE
, false,
2739 gimple_assign_set_rhs_code (init_stmt
, TREE_CODE (rhs
));
2740 gimple_assign_set_rhs1 (init_stmt
, rhs
);
2742 gsi_insert_after (&si
, init_stmt
, GSI_NEW_STMT
);
2743 gimple_regimplify_operands (init_stmt
, &si
);
2745 if (!is_gimple_debug (init_stmt
) && MAY_HAVE_DEBUG_STMTS
)
2747 tree def
= gimple_assign_lhs (init_stmt
);
2748 insert_init_debug_bind (id
, bb
, def
, def
, init_stmt
);
2753 /* Initialize parameter P with VALUE. If needed, produce init statement
2754 at the end of BB. When BB is NULL, we return init statement to be
2757 setup_one_parameter (copy_body_data
*id
, tree p
, tree value
, tree fn
,
2758 basic_block bb
, tree
*vars
)
2760 gimple init_stmt
= NULL
;
2763 tree def
= (gimple_in_ssa_p (cfun
)
2764 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
2767 && value
!= error_mark_node
2768 && !useless_type_conversion_p (TREE_TYPE (p
), TREE_TYPE (value
)))
2770 /* If we can match up types by promotion/demotion do so. */
2771 if (fold_convertible_p (TREE_TYPE (p
), value
))
2772 rhs
= fold_convert (TREE_TYPE (p
), value
);
2775 /* ??? For valid programs we should not end up here.
2776 Still if we end up with truly mismatched types here, fall back
2777 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2778 GIMPLE to the following passes. */
2779 if (!is_gimple_reg_type (TREE_TYPE (value
))
2780 || TYPE_SIZE (TREE_TYPE (p
)) == TYPE_SIZE (TREE_TYPE (value
)))
2781 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (p
), value
);
2783 rhs
= build_zero_cst (TREE_TYPE (p
));
2787 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2788 here since the type of this decl must be visible to the calling
2790 var
= copy_decl_to_var (p
, id
);
2792 /* Declare this new variable. */
2793 DECL_CHAIN (var
) = *vars
;
2796 /* Make gimplifier happy about this variable. */
2797 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
2799 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2800 we would not need to create a new variable here at all, if it
2801 weren't for debug info. Still, we can just use the argument
2803 if (TREE_READONLY (p
)
2804 && !TREE_ADDRESSABLE (p
)
2805 && value
&& !TREE_SIDE_EFFECTS (value
)
2808 /* We may produce non-gimple trees by adding NOPs or introduce
2809 invalid sharing when operand is not really constant.
2810 It is not big deal to prohibit constant propagation here as
2811 we will constant propagate in DOM1 pass anyway. */
2812 if (is_gimple_min_invariant (value
)
2813 && useless_type_conversion_p (TREE_TYPE (p
),
2815 /* We have to be very careful about ADDR_EXPR. Make sure
2816 the base variable isn't a local variable of the inlined
2817 function, e.g., when doing recursive inlining, direct or
2818 mutually-recursive or whatever, which is why we don't
2819 just test whether fn == current_function_decl. */
2820 && ! self_inlining_addr_expr (value
, fn
))
2822 insert_decl_map (id
, p
, value
);
2823 insert_debug_decl_map (id
, p
, var
);
2824 return insert_init_debug_bind (id
, bb
, var
, value
, NULL
);
2828 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2829 that way, when the PARM_DECL is encountered, it will be
2830 automatically replaced by the VAR_DECL. */
2831 insert_decl_map (id
, p
, var
);
2833 /* Even if P was TREE_READONLY, the new VAR should not be.
2834 In the original code, we would have constructed a
2835 temporary, and then the function body would have never
2836 changed the value of P. However, now, we will be
2837 constructing VAR directly. The constructor body may
2838 change its value multiple times as it is being
2839 constructed. Therefore, it must not be TREE_READONLY;
2840 the back-end assumes that TREE_READONLY variable is
2841 assigned to only once. */
2842 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
2843 TREE_READONLY (var
) = 0;
2845 /* If there is no setup required and we are in SSA, take the easy route
2846 replacing all SSA names representing the function parameter by the
2847 SSA name passed to function.
2849 We need to construct map for the variable anyway as it might be used
2850 in different SSA names when parameter is set in function.
2852 Do replacement at -O0 for const arguments replaced by constant.
2853 This is important for builtin_constant_p and other construct requiring
2854 constant argument to be visible in inlined function body. */
2855 if (gimple_in_ssa_p (cfun
) && rhs
&& def
&& is_gimple_reg (p
)
2857 || (TREE_READONLY (p
)
2858 && is_gimple_min_invariant (rhs
)))
2859 && (TREE_CODE (rhs
) == SSA_NAME
2860 || is_gimple_min_invariant (rhs
))
2861 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
2863 insert_decl_map (id
, def
, rhs
);
2864 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
2867 /* If the value of argument is never used, don't care about initializing
2869 if (optimize
&& gimple_in_ssa_p (cfun
) && !def
&& is_gimple_reg (p
))
2871 gcc_assert (!value
|| !TREE_SIDE_EFFECTS (value
));
2872 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
2875 /* Initialize this VAR_DECL from the equivalent argument. Convert
2876 the argument to the proper type in case it was promoted. */
2879 if (rhs
== error_mark_node
)
2881 insert_decl_map (id
, p
, var
);
2882 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
2885 STRIP_USELESS_TYPE_CONVERSION (rhs
);
2887 /* If we are in SSA form properly remap the default definition
2888 or assign to a dummy SSA name if the parameter is unused and
2889 we are not optimizing. */
2890 if (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
))
2894 def
= remap_ssa_name (def
, id
);
2895 init_stmt
= gimple_build_assign (def
, rhs
);
2896 SSA_NAME_IS_DEFAULT_DEF (def
) = 0;
2897 set_ssa_default_def (cfun
, var
, NULL
);
2901 def
= make_ssa_name (var
, NULL
);
2902 init_stmt
= gimple_build_assign (def
, rhs
);
2906 init_stmt
= gimple_build_assign (var
, rhs
);
2908 if (bb
&& init_stmt
)
2909 insert_init_stmt (id
, bb
, init_stmt
);
2914 /* Generate code to initialize the parameters of the function at the
2915 top of the stack in ID from the GIMPLE_CALL STMT. */
2918 initialize_inlined_parameters (copy_body_data
*id
, gimple stmt
,
2919 tree fn
, basic_block bb
)
2924 tree vars
= NULL_TREE
;
2925 tree static_chain
= gimple_call_chain (stmt
);
2927 /* Figure out what the parameters are. */
2928 parms
= DECL_ARGUMENTS (fn
);
2930 /* Loop through the parameter declarations, replacing each with an
2931 equivalent VAR_DECL, appropriately initialized. */
2932 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
2935 val
= i
< gimple_call_num_args (stmt
) ? gimple_call_arg (stmt
, i
) : NULL
;
2936 setup_one_parameter (id
, p
, val
, fn
, bb
, &vars
);
2938 /* After remapping parameters remap their types. This has to be done
2939 in a second loop over all parameters to appropriately remap
2940 variable sized arrays when the size is specified in a
2941 parameter following the array. */
2942 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
2944 tree
*varp
= (tree
*) pointer_map_contains (id
->decl_map
, p
);
2946 && TREE_CODE (*varp
) == VAR_DECL
)
2948 tree def
= (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
)
2949 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
2951 TREE_TYPE (var
) = remap_type (TREE_TYPE (var
), id
);
2952 /* Also remap the default definition if it was remapped
2953 to the default definition of the parameter replacement
2954 by the parameter setup. */
2957 tree
*defp
= (tree
*) pointer_map_contains (id
->decl_map
, def
);
2959 && TREE_CODE (*defp
) == SSA_NAME
2960 && SSA_NAME_VAR (*defp
) == var
)
2961 TREE_TYPE (*defp
) = TREE_TYPE (var
);
2966 /* Initialize the static chain. */
2967 p
= DECL_STRUCT_FUNCTION (fn
)->static_chain_decl
;
2968 gcc_assert (fn
!= current_function_decl
);
2971 /* No static chain? Seems like a bug in tree-nested.c. */
2972 gcc_assert (static_chain
);
2974 setup_one_parameter (id
, p
, static_chain
, fn
, bb
, &vars
);
2977 declare_inline_vars (id
->block
, vars
);
2981 /* Declare a return variable to replace the RESULT_DECL for the
2982 function we are calling. An appropriate DECL_STMT is returned.
2983 The USE_STMT is filled to contain a use of the declaration to
2984 indicate the return value of the function.
2986 RETURN_SLOT, if non-null is place where to store the result. It
2987 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2988 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2990 The return value is a (possibly null) value that holds the result
2991 as seen by the caller. */
2994 declare_return_variable (copy_body_data
*id
, tree return_slot
, tree modify_dest
,
2995 basic_block entry_bb
)
2997 tree callee
= id
->src_fn
;
2998 tree result
= DECL_RESULT (callee
);
2999 tree callee_type
= TREE_TYPE (result
);
3003 /* Handle type-mismatches in the function declaration return type
3004 vs. the call expression. */
3006 caller_type
= TREE_TYPE (modify_dest
);
3008 caller_type
= TREE_TYPE (TREE_TYPE (callee
));
3010 /* We don't need to do anything for functions that don't return anything. */
3011 if (VOID_TYPE_P (callee_type
))
3014 /* If there was a return slot, then the return value is the
3015 dereferenced address of that object. */
3018 /* The front end shouldn't have used both return_slot and
3019 a modify expression. */
3020 gcc_assert (!modify_dest
);
3021 if (DECL_BY_REFERENCE (result
))
3023 tree return_slot_addr
= build_fold_addr_expr (return_slot
);
3024 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr
);
3026 /* We are going to construct *&return_slot and we can't do that
3027 for variables believed to be not addressable.
3029 FIXME: This check possibly can match, because values returned
3030 via return slot optimization are not believed to have address
3031 taken by alias analysis. */
3032 gcc_assert (TREE_CODE (return_slot
) != SSA_NAME
);
3033 var
= return_slot_addr
;
3038 gcc_assert (TREE_CODE (var
) != SSA_NAME
);
3039 TREE_ADDRESSABLE (var
) |= TREE_ADDRESSABLE (result
);
3041 if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3042 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3043 && !DECL_GIMPLE_REG_P (result
)
3045 DECL_GIMPLE_REG_P (var
) = 0;
3050 /* All types requiring non-trivial constructors should have been handled. */
3051 gcc_assert (!TREE_ADDRESSABLE (callee_type
));
3053 /* Attempt to avoid creating a new temporary variable. */
3055 && TREE_CODE (modify_dest
) != SSA_NAME
)
3057 bool use_it
= false;
3059 /* We can't use MODIFY_DEST if there's type promotion involved. */
3060 if (!useless_type_conversion_p (callee_type
, caller_type
))
3063 /* ??? If we're assigning to a variable sized type, then we must
3064 reuse the destination variable, because we've no good way to
3065 create variable sized temporaries at this point. */
3066 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type
)) != INTEGER_CST
)
3069 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3070 reuse it as the result of the call directly. Don't do this if
3071 it would promote MODIFY_DEST to addressable. */
3072 else if (TREE_ADDRESSABLE (result
))
3076 tree base_m
= get_base_address (modify_dest
);
3078 /* If the base isn't a decl, then it's a pointer, and we don't
3079 know where that's going to go. */
3080 if (!DECL_P (base_m
))
3082 else if (is_global_var (base_m
))
3084 else if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3085 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3086 && !DECL_GIMPLE_REG_P (result
)
3087 && DECL_GIMPLE_REG_P (base_m
))
3089 else if (!TREE_ADDRESSABLE (base_m
))
3101 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type
)) == INTEGER_CST
);
3103 var
= copy_result_decl_to_var (result
, id
);
3104 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3106 /* Do not have the rest of GCC warn about this variable as it should
3107 not be visible to the user. */
3108 TREE_NO_WARNING (var
) = 1;
3110 declare_inline_vars (id
->block
, var
);
3112 /* Build the use expr. If the return type of the function was
3113 promoted, convert it back to the expected type. */
3115 if (!useless_type_conversion_p (caller_type
, TREE_TYPE (var
)))
3117 /* If we can match up types by promotion/demotion do so. */
3118 if (fold_convertible_p (caller_type
, var
))
3119 use
= fold_convert (caller_type
, var
);
3122 /* ??? For valid programs we should not end up here.
3123 Still if we end up with truly mismatched types here, fall back
3124 to using a MEM_REF to not leak invalid GIMPLE to the following
3126 /* Prevent var from being written into SSA form. */
3127 if (TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
3128 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
)
3129 DECL_GIMPLE_REG_P (var
) = false;
3130 else if (is_gimple_reg_type (TREE_TYPE (var
)))
3131 TREE_ADDRESSABLE (var
) = true;
3132 use
= fold_build2 (MEM_REF
, caller_type
,
3133 build_fold_addr_expr (var
),
3134 build_int_cst (ptr_type_node
, 0));
3138 STRIP_USELESS_TYPE_CONVERSION (use
);
3140 if (DECL_BY_REFERENCE (result
))
3142 TREE_ADDRESSABLE (var
) = 1;
3143 var
= build_fold_addr_expr (var
);
3147 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3148 way, when the RESULT_DECL is encountered, it will be
3149 automatically replaced by the VAR_DECL.
3151 When returning by reference, ensure that RESULT_DECL remaps to
3153 if (DECL_BY_REFERENCE (result
)
3154 && !is_gimple_val (var
))
3156 tree temp
= create_tmp_var (TREE_TYPE (result
), "retvalptr");
3157 insert_decl_map (id
, result
, temp
);
3158 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3159 it's default_def SSA_NAME. */
3160 if (gimple_in_ssa_p (id
->src_cfun
)
3161 && is_gimple_reg (result
))
3163 temp
= make_ssa_name (temp
, NULL
);
3164 insert_decl_map (id
, ssa_default_def (id
->src_cfun
, result
), temp
);
3166 insert_init_stmt (id
, entry_bb
, gimple_build_assign (temp
, var
));
3169 insert_decl_map (id
, result
, var
);
3171 /* Remember this so we can ignore it in remap_decls. */
3177 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3178 to a local label. */
3181 has_label_address_in_static_1 (tree
*nodep
, int *walk_subtrees
, void *fnp
)
3184 tree fn
= (tree
) fnp
;
3186 if (TREE_CODE (node
) == LABEL_DECL
&& DECL_CONTEXT (node
) == fn
)
3195 /* Determine if the function can be copied. If so return NULL. If
3196 not return a string describng the reason for failure. */
3199 copy_forbidden (struct function
*fun
, tree fndecl
)
3201 const char *reason
= fun
->cannot_be_copied_reason
;
3205 /* Only examine the function once. */
3206 if (fun
->cannot_be_copied_set
)
3209 /* We cannot copy a function that receives a non-local goto
3210 because we cannot remap the destination label used in the
3211 function that is performing the non-local goto. */
3212 /* ??? Actually, this should be possible, if we work at it.
3213 No doubt there's just a handful of places that simply
3214 assume it doesn't happen and don't substitute properly. */
3215 if (fun
->has_nonlocal_label
)
3217 reason
= G_("function %q+F can never be copied "
3218 "because it receives a non-local goto");
3222 FOR_EACH_LOCAL_DECL (fun
, ix
, decl
)
3223 if (TREE_CODE (decl
) == VAR_DECL
3224 && TREE_STATIC (decl
)
3225 && !DECL_EXTERNAL (decl
)
3226 && DECL_INITIAL (decl
)
3227 && walk_tree_without_duplicates (&DECL_INITIAL (decl
),
3228 has_label_address_in_static_1
,
3231 reason
= G_("function %q+F can never be copied because it saves "
3232 "address of local label in a static variable");
3237 fun
->cannot_be_copied_reason
= reason
;
3238 fun
->cannot_be_copied_set
= true;
3243 static const char *inline_forbidden_reason
;
3245 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3246 iff a function can not be inlined. Also sets the reason why. */
3249 inline_forbidden_p_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3250 struct walk_stmt_info
*wip
)
3252 tree fn
= (tree
) wip
->info
;
3254 gimple stmt
= gsi_stmt (*gsi
);
3256 switch (gimple_code (stmt
))
3259 /* Refuse to inline alloca call unless user explicitly forced so as
3260 this may change program's memory overhead drastically when the
3261 function using alloca is called in loop. In GCC present in
3262 SPEC2000 inlining into schedule_block cause it to require 2GB of
3263 RAM instead of 256MB. Don't do so for alloca calls emitted for
3264 VLA objects as those can't cause unbounded growth (they're always
3265 wrapped inside stack_save/stack_restore regions. */
3266 if (gimple_alloca_call_p (stmt
)
3267 && !gimple_call_alloca_for_var_p (stmt
)
3268 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
3270 inline_forbidden_reason
3271 = G_("function %q+F can never be inlined because it uses "
3272 "alloca (override using the always_inline attribute)");
3273 *handled_ops_p
= true;
3277 t
= gimple_call_fndecl (stmt
);
3281 /* We cannot inline functions that call setjmp. */
3282 if (setjmp_call_p (t
))
3284 inline_forbidden_reason
3285 = G_("function %q+F can never be inlined because it uses setjmp");
3286 *handled_ops_p
= true;
3290 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
3291 switch (DECL_FUNCTION_CODE (t
))
3293 /* We cannot inline functions that take a variable number of
3295 case BUILT_IN_VA_START
:
3296 case BUILT_IN_NEXT_ARG
:
3297 case BUILT_IN_VA_END
:
3298 inline_forbidden_reason
3299 = G_("function %q+F can never be inlined because it "
3300 "uses variable argument lists");
3301 *handled_ops_p
= true;
3304 case BUILT_IN_LONGJMP
:
3305 /* We can't inline functions that call __builtin_longjmp at
3306 all. The non-local goto machinery really requires the
3307 destination be in a different function. If we allow the
3308 function calling __builtin_longjmp to be inlined into the
3309 function calling __builtin_setjmp, Things will Go Awry. */
3310 inline_forbidden_reason
3311 = G_("function %q+F can never be inlined because "
3312 "it uses setjmp-longjmp exception handling");
3313 *handled_ops_p
= true;
3316 case BUILT_IN_NONLOCAL_GOTO
:
3318 inline_forbidden_reason
3319 = G_("function %q+F can never be inlined because "
3320 "it uses non-local goto");
3321 *handled_ops_p
= true;
3324 case BUILT_IN_RETURN
:
3325 case BUILT_IN_APPLY_ARGS
:
3326 /* If a __builtin_apply_args caller would be inlined,
3327 it would be saving arguments of the function it has
3328 been inlined into. Similarly __builtin_return would
3329 return from the function the inline has been inlined into. */
3330 inline_forbidden_reason
3331 = G_("function %q+F can never be inlined because "
3332 "it uses __builtin_return or __builtin_apply_args");
3333 *handled_ops_p
= true;
3342 t
= gimple_goto_dest (stmt
);
3344 /* We will not inline a function which uses computed goto. The
3345 addresses of its local labels, which may be tucked into
3346 global storage, are of course not constant across
3347 instantiations, which causes unexpected behavior. */
3348 if (TREE_CODE (t
) != LABEL_DECL
)
3350 inline_forbidden_reason
3351 = G_("function %q+F can never be inlined "
3352 "because it contains a computed goto");
3353 *handled_ops_p
= true;
3362 *handled_ops_p
= false;
3366 /* Return true if FNDECL is a function that cannot be inlined into
3370 inline_forbidden_p (tree fndecl
)
3372 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
3373 struct walk_stmt_info wi
;
3374 struct pointer_set_t
*visited_nodes
;
3376 bool forbidden_p
= false;
3378 /* First check for shared reasons not to copy the code. */
3379 inline_forbidden_reason
= copy_forbidden (fun
, fndecl
);
3380 if (inline_forbidden_reason
!= NULL
)
3383 /* Next, walk the statements of the function looking for
3384 constraucts we can't handle, or are non-optimal for inlining. */
3385 visited_nodes
= pointer_set_create ();
3386 memset (&wi
, 0, sizeof (wi
));
3387 wi
.info
= (void *) fndecl
;
3388 wi
.pset
= visited_nodes
;
3390 FOR_EACH_BB_FN (bb
, fun
)
3393 gimple_seq seq
= bb_seq (bb
);
3394 ret
= walk_gimple_seq (seq
, inline_forbidden_p_stmt
, NULL
, &wi
);
3395 forbidden_p
= (ret
!= NULL
);
3400 pointer_set_destroy (visited_nodes
);
3404 /* Return false if the function FNDECL cannot be inlined on account of its
3405 attributes, true otherwise. */
3407 function_attribute_inlinable_p (const_tree fndecl
)
3409 if (targetm
.attribute_table
)
3413 for (a
= DECL_ATTRIBUTES (fndecl
); a
; a
= TREE_CHAIN (a
))
3415 const_tree name
= TREE_PURPOSE (a
);
3418 for (i
= 0; targetm
.attribute_table
[i
].name
!= NULL
; i
++)
3419 if (is_attribute_p (targetm
.attribute_table
[i
].name
, name
))
3420 return targetm
.function_attribute_inlinable_p (fndecl
);
3427 /* Returns nonzero if FN is a function that does not have any
3428 fundamental inline blocking properties. */
3431 tree_inlinable_function_p (tree fn
)
3433 bool inlinable
= true;
3437 /* If we've already decided this function shouldn't be inlined,
3438 there's no need to check again. */
3439 if (DECL_UNINLINABLE (fn
))
3442 /* We only warn for functions declared `inline' by the user. */
3443 do_warning
= (warn_inline
3444 && DECL_DECLARED_INLINE_P (fn
)
3445 && !DECL_NO_INLINE_WARNING_P (fn
)
3446 && !DECL_IN_SYSTEM_HEADER (fn
));
3448 always_inline
= lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
));
3451 && always_inline
== NULL
)
3454 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3455 "is suppressed using -fno-inline", fn
);
3459 else if (!function_attribute_inlinable_p (fn
))
3462 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3463 "uses attributes conflicting with inlining", fn
);
3467 else if (inline_forbidden_p (fn
))
3469 /* See if we should warn about uninlinable functions. Previously,
3470 some of these warnings would be issued while trying to expand
3471 the function inline, but that would cause multiple warnings
3472 about functions that would for example call alloca. But since
3473 this a property of the function, just one warning is enough.
3474 As a bonus we can now give more details about the reason why a
3475 function is not inlinable. */
3477 error (inline_forbidden_reason
, fn
);
3478 else if (do_warning
)
3479 warning (OPT_Winline
, inline_forbidden_reason
, fn
);
3484 /* Squirrel away the result so that we don't have to check again. */
3485 DECL_UNINLINABLE (fn
) = !inlinable
;
3490 /* Estimate the cost of a memory move. Use machine dependent
3491 word size and take possible memcpy call into account. */
3494 estimate_move_cost (tree type
)
3498 gcc_assert (!VOID_TYPE_P (type
));
3500 if (TREE_CODE (type
) == VECTOR_TYPE
)
3502 enum machine_mode inner
= TYPE_MODE (TREE_TYPE (type
));
3503 enum machine_mode simd
3504 = targetm
.vectorize
.preferred_simd_mode (inner
);
3505 int simd_mode_size
= GET_MODE_SIZE (simd
);
3506 return ((GET_MODE_SIZE (TYPE_MODE (type
)) + simd_mode_size
- 1)
3510 size
= int_size_in_bytes (type
);
3512 if (size
< 0 || size
> MOVE_MAX_PIECES
* MOVE_RATIO (!optimize_size
))
3513 /* Cost of a memcpy call, 3 arguments and the call. */
3516 return ((size
+ MOVE_MAX_PIECES
- 1) / MOVE_MAX_PIECES
);
3519 /* Returns cost of operation CODE, according to WEIGHTS */
3522 estimate_operator_cost (enum tree_code code
, eni_weights
*weights
,
3523 tree op1 ATTRIBUTE_UNUSED
, tree op2
)
3527 /* These are "free" conversions, or their presumed cost
3528 is folded into other operations. */
3533 case VIEW_CONVERT_EXPR
:
3536 /* Assign cost of 1 to usual operations.
3537 ??? We may consider mapping RTL costs to this. */
3543 case POINTER_PLUS_EXPR
:
3546 case MULT_HIGHPART_EXPR
:
3549 case ADDR_SPACE_CONVERT_EXPR
:
3550 case FIXED_CONVERT_EXPR
:
3551 case FIX_TRUNC_EXPR
:
3563 case VEC_LSHIFT_EXPR
:
3564 case VEC_RSHIFT_EXPR
:
3571 case TRUTH_ANDIF_EXPR
:
3572 case TRUTH_ORIF_EXPR
:
3573 case TRUTH_AND_EXPR
:
3575 case TRUTH_XOR_EXPR
:
3576 case TRUTH_NOT_EXPR
:
3585 case UNORDERED_EXPR
:
3596 case PREDECREMENT_EXPR
:
3597 case PREINCREMENT_EXPR
:
3598 case POSTDECREMENT_EXPR
:
3599 case POSTINCREMENT_EXPR
:
3601 case REALIGN_LOAD_EXPR
:
3603 case REDUC_MAX_EXPR
:
3604 case REDUC_MIN_EXPR
:
3605 case REDUC_PLUS_EXPR
:
3606 case WIDEN_SUM_EXPR
:
3607 case WIDEN_MULT_EXPR
:
3609 case WIDEN_MULT_PLUS_EXPR
:
3610 case WIDEN_MULT_MINUS_EXPR
:
3611 case WIDEN_LSHIFT_EXPR
:
3613 case VEC_WIDEN_MULT_HI_EXPR
:
3614 case VEC_WIDEN_MULT_LO_EXPR
:
3615 case VEC_WIDEN_MULT_EVEN_EXPR
:
3616 case VEC_WIDEN_MULT_ODD_EXPR
:
3617 case VEC_UNPACK_HI_EXPR
:
3618 case VEC_UNPACK_LO_EXPR
:
3619 case VEC_UNPACK_FLOAT_HI_EXPR
:
3620 case VEC_UNPACK_FLOAT_LO_EXPR
:
3621 case VEC_PACK_TRUNC_EXPR
:
3622 case VEC_PACK_SAT_EXPR
:
3623 case VEC_PACK_FIX_TRUNC_EXPR
:
3624 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3625 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3629 /* Few special cases of expensive operations. This is useful
3630 to avoid inlining on functions having too many of these. */
3631 case TRUNC_DIV_EXPR
:
3633 case FLOOR_DIV_EXPR
:
3634 case ROUND_DIV_EXPR
:
3635 case EXACT_DIV_EXPR
:
3636 case TRUNC_MOD_EXPR
:
3638 case FLOOR_MOD_EXPR
:
3639 case ROUND_MOD_EXPR
:
3641 if (TREE_CODE (op2
) != INTEGER_CST
)
3642 return weights
->div_mod_cost
;
3646 /* We expect a copy assignment with no operator. */
3647 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
);
3653 /* Estimate number of instructions that will be created by expanding
3654 the statements in the statement sequence STMTS.
3655 WEIGHTS contains weights attributed to various constructs. */
3658 int estimate_num_insns_seq (gimple_seq stmts
, eni_weights
*weights
)
3661 gimple_stmt_iterator gsi
;
3664 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3665 cost
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
3671 /* Estimate number of instructions that will be created by expanding STMT.
3672 WEIGHTS contains weights attributed to various constructs. */
3675 estimate_num_insns (gimple stmt
, eni_weights
*weights
)
3678 enum gimple_code code
= gimple_code (stmt
);
3685 /* Try to estimate the cost of assignments. We have three cases to
3687 1) Simple assignments to registers;
3688 2) Stores to things that must live in memory. This includes
3689 "normal" stores to scalars, but also assignments of large
3690 structures, or constructors of big arrays;
3692 Let us look at the first two cases, assuming we have "a = b + C":
3693 <GIMPLE_ASSIGN <var_decl "a">
3694 <plus_expr <var_decl "b"> <constant C>>
3695 If "a" is a GIMPLE register, the assignment to it is free on almost
3696 any target, because "a" usually ends up in a real register. Hence
3697 the only cost of this expression comes from the PLUS_EXPR, and we
3698 can ignore the GIMPLE_ASSIGN.
3699 If "a" is not a GIMPLE register, the assignment to "a" will most
3700 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3701 of moving something into "a", which we compute using the function
3702 estimate_move_cost. */
3703 if (gimple_clobber_p (stmt
))
3704 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3706 lhs
= gimple_assign_lhs (stmt
);
3707 rhs
= gimple_assign_rhs1 (stmt
);
3711 /* Account for the cost of moving to / from memory. */
3712 if (gimple_store_p (stmt
))
3713 cost
+= estimate_move_cost (TREE_TYPE (lhs
));
3714 if (gimple_assign_load_p (stmt
))
3715 cost
+= estimate_move_cost (TREE_TYPE (rhs
));
3717 cost
+= estimate_operator_cost (gimple_assign_rhs_code (stmt
), weights
,
3718 gimple_assign_rhs1 (stmt
),
3719 get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
3720 == GIMPLE_BINARY_RHS
3721 ? gimple_assign_rhs2 (stmt
) : NULL
);
3725 cost
= 1 + estimate_operator_cost (gimple_cond_code (stmt
), weights
,
3726 gimple_op (stmt
, 0),
3727 gimple_op (stmt
, 1));
3731 /* Take into account cost of the switch + guess 2 conditional jumps for
3734 TODO: once the switch expansion logic is sufficiently separated, we can
3735 do better job on estimating cost of the switch. */
3736 if (weights
->time_based
)
3737 cost
= floor_log2 (gimple_switch_num_labels (stmt
)) * 2;
3739 cost
= gimple_switch_num_labels (stmt
) * 2;
3744 tree decl
= gimple_call_fndecl (stmt
);
3745 struct cgraph_node
*node
= NULL
;
3747 /* Do not special case builtins where we see the body.
3748 This just confuse inliner. */
3749 if (!decl
|| !(node
= cgraph_get_node (decl
)) || node
->symbol
.definition
)
3751 /* For buitins that are likely expanded to nothing or
3752 inlined do not account operand costs. */
3753 else if (is_simple_builtin (decl
))
3755 else if (is_inexpensive_builtin (decl
))
3756 return weights
->target_builtin_call_cost
;
3757 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
3759 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3760 specialize the cheap expansion we do here.
3761 ??? This asks for a more general solution. */
3762 switch (DECL_FUNCTION_CODE (decl
))
3767 if (TREE_CODE (gimple_call_arg (stmt
, 1)) == REAL_CST
3768 && REAL_VALUES_EQUAL
3769 (TREE_REAL_CST (gimple_call_arg (stmt
, 1)), dconst2
))
3770 return estimate_operator_cost (MULT_EXPR
, weights
,
3771 gimple_call_arg (stmt
, 0),
3772 gimple_call_arg (stmt
, 0));
3780 cost
= node
? weights
->call_cost
: weights
->indirect_call_cost
;
3781 if (gimple_call_lhs (stmt
))
3782 cost
+= estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt
)));
3783 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3785 tree arg
= gimple_call_arg (stmt
, i
);
3786 cost
+= estimate_move_cost (TREE_TYPE (arg
));
3792 return weights
->return_cost
;
3798 case GIMPLE_PREDICT
:
3804 int count
= asm_str_count (gimple_asm_string (stmt
));
3805 /* 1000 means infinity. This avoids overflows later
3806 with very long asm statements. */
3813 /* This is either going to be an external function call with one
3814 argument, or two register copy statements plus a goto. */
3817 case GIMPLE_EH_DISPATCH
:
3818 /* ??? This is going to turn into a switch statement. Ideally
3819 we'd have a look at the eh region and estimate the number of
3824 return estimate_num_insns_seq (gimple_bind_body (stmt
), weights
);
3826 case GIMPLE_EH_FILTER
:
3827 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt
), weights
);
3830 return estimate_num_insns_seq (gimple_catch_handler (stmt
), weights
);
3833 return (estimate_num_insns_seq (gimple_try_eval (stmt
), weights
)
3834 + estimate_num_insns_seq (gimple_try_cleanup (stmt
), weights
));
3836 /* OpenMP directives are generally very expensive. */
3838 case GIMPLE_OMP_RETURN
:
3839 case GIMPLE_OMP_SECTIONS_SWITCH
:
3840 case GIMPLE_OMP_ATOMIC_STORE
:
3841 case GIMPLE_OMP_CONTINUE
:
3842 /* ...except these, which are cheap. */
3845 case GIMPLE_OMP_ATOMIC_LOAD
:
3846 return weights
->omp_cost
;
3848 case GIMPLE_OMP_FOR
:
3849 return (weights
->omp_cost
3850 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
)
3851 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt
), weights
));
3853 case GIMPLE_OMP_PARALLEL
:
3854 case GIMPLE_OMP_TASK
:
3855 case GIMPLE_OMP_CRITICAL
:
3856 case GIMPLE_OMP_MASTER
:
3857 case GIMPLE_OMP_TASKGROUP
:
3858 case GIMPLE_OMP_ORDERED
:
3859 case GIMPLE_OMP_SECTION
:
3860 case GIMPLE_OMP_SECTIONS
:
3861 case GIMPLE_OMP_SINGLE
:
3862 case GIMPLE_OMP_TARGET
:
3863 case GIMPLE_OMP_TEAMS
:
3864 return (weights
->omp_cost
3865 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
));
3867 case GIMPLE_TRANSACTION
:
3868 return (weights
->tm_cost
3869 + estimate_num_insns_seq (gimple_transaction_body (stmt
),
3879 /* Estimate number of instructions that will be created by expanding
3880 function FNDECL. WEIGHTS contains weights attributed to various
3884 estimate_num_insns_fn (tree fndecl
, eni_weights
*weights
)
3886 struct function
*my_function
= DECL_STRUCT_FUNCTION (fndecl
);
3887 gimple_stmt_iterator bsi
;
3891 gcc_assert (my_function
&& my_function
->cfg
);
3892 FOR_EACH_BB_FN (bb
, my_function
)
3894 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
3895 n
+= estimate_num_insns (gsi_stmt (bsi
), weights
);
3902 /* Initializes weights used by estimate_num_insns. */
3905 init_inline_once (void)
3907 eni_size_weights
.call_cost
= 1;
3908 eni_size_weights
.indirect_call_cost
= 3;
3909 eni_size_weights
.target_builtin_call_cost
= 1;
3910 eni_size_weights
.div_mod_cost
= 1;
3911 eni_size_weights
.omp_cost
= 40;
3912 eni_size_weights
.tm_cost
= 10;
3913 eni_size_weights
.time_based
= false;
3914 eni_size_weights
.return_cost
= 1;
3916 /* Estimating time for call is difficult, since we have no idea what the
3917 called function does. In the current uses of eni_time_weights,
3918 underestimating the cost does less harm than overestimating it, so
3919 we choose a rather small value here. */
3920 eni_time_weights
.call_cost
= 10;
3921 eni_time_weights
.indirect_call_cost
= 15;
3922 eni_time_weights
.target_builtin_call_cost
= 1;
3923 eni_time_weights
.div_mod_cost
= 10;
3924 eni_time_weights
.omp_cost
= 40;
3925 eni_time_weights
.tm_cost
= 40;
3926 eni_time_weights
.time_based
= true;
3927 eni_time_weights
.return_cost
= 2;
3930 /* Estimate the number of instructions in a gimple_seq. */
3933 count_insns_seq (gimple_seq seq
, eni_weights
*weights
)
3935 gimple_stmt_iterator gsi
;
3937 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3938 n
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
3944 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3947 prepend_lexical_block (tree current_block
, tree new_block
)
3949 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (current_block
);
3950 BLOCK_SUBBLOCKS (current_block
) = new_block
;
3951 BLOCK_SUPERCONTEXT (new_block
) = current_block
;
3954 /* Add local variables from CALLEE to CALLER. */
3957 add_local_variables (struct function
*callee
, struct function
*caller
,
3963 FOR_EACH_LOCAL_DECL (callee
, ix
, var
)
3964 if (!can_be_nonlocal (var
, id
))
3966 tree new_var
= remap_decl (var
, id
);
3968 /* Remap debug-expressions. */
3969 if (TREE_CODE (new_var
) == VAR_DECL
3970 && DECL_HAS_DEBUG_EXPR_P (var
)
3973 tree tem
= DECL_DEBUG_EXPR (var
);
3974 bool old_regimplify
= id
->regimplify
;
3975 id
->remapping_type_depth
++;
3976 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
3977 id
->remapping_type_depth
--;
3978 id
->regimplify
= old_regimplify
;
3979 SET_DECL_DEBUG_EXPR (new_var
, tem
);
3980 DECL_HAS_DEBUG_EXPR_P (new_var
) = 1;
3982 add_local_decl (caller
, new_var
);
3986 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3989 expand_call_inline (basic_block bb
, gimple stmt
, copy_body_data
*id
)
3993 struct pointer_map_t
*st
, *dst
;
3996 location_t saved_location
;
3997 struct cgraph_edge
*cg_edge
;
3998 cgraph_inline_failed_t reason
;
3999 basic_block return_block
;
4001 gimple_stmt_iterator gsi
, stmt_gsi
;
4002 bool successfully_inlined
= FALSE
;
4003 bool purge_dead_abnormal_edges
;
4005 /* Set input_location here so we get the right instantiation context
4006 if we call instantiate_decl from inlinable_function_p. */
4007 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4008 saved_location
= input_location
;
4009 input_location
= gimple_location (stmt
);
4011 /* From here on, we're only interested in CALL_EXPRs. */
4012 if (gimple_code (stmt
) != GIMPLE_CALL
)
4015 cg_edge
= cgraph_edge (id
->dst_node
, stmt
);
4016 gcc_checking_assert (cg_edge
);
4017 /* First, see if we can figure out what function is being called.
4018 If we cannot, then there is no hope of inlining the function. */
4019 if (cg_edge
->indirect_unknown_callee
)
4021 fn
= cg_edge
->callee
->symbol
.decl
;
4022 gcc_checking_assert (fn
);
4024 /* If FN is a declaration of a function in a nested scope that was
4025 globally declared inline, we don't set its DECL_INITIAL.
4026 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4027 C++ front-end uses it for cdtors to refer to their internal
4028 declarations, that are not real functions. Fortunately those
4029 don't have trees to be saved, so we can tell by checking their
4031 if (!DECL_INITIAL (fn
)
4032 && DECL_ABSTRACT_ORIGIN (fn
)
4033 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn
)))
4034 fn
= DECL_ABSTRACT_ORIGIN (fn
);
4036 /* Don't try to inline functions that are not well-suited to inlining. */
4037 if (cg_edge
->inline_failed
)
4039 reason
= cg_edge
->inline_failed
;
4040 /* If this call was originally indirect, we do not want to emit any
4041 inlining related warnings or sorry messages because there are no
4042 guarantees regarding those. */
4043 if (cg_edge
->indirect_inlining_edge
)
4046 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
))
4047 /* For extern inline functions that get redefined we always
4048 silently ignored always_inline flag. Better behaviour would
4049 be to be able to keep both bodies and use extern inline body
4050 for inlining, but we can't do that because frontends overwrite
4052 && !cg_edge
->callee
->local
.redefined_extern_inline
4053 /* During early inline pass, report only when optimization is
4055 && (cgraph_global_info_ready
4057 /* PR 20090218-1_0.c. Body can be provided by another module. */
4058 && (reason
!= CIF_BODY_NOT_AVAILABLE
|| !flag_generate_lto
))
4060 error ("inlining failed in call to always_inline %q+F: %s", fn
,
4061 cgraph_inline_failed_string (reason
));
4062 error ("called from here");
4064 else if (warn_inline
4065 && DECL_DECLARED_INLINE_P (fn
)
4066 && !DECL_NO_INLINE_WARNING_P (fn
)
4067 && !DECL_IN_SYSTEM_HEADER (fn
)
4068 && reason
!= CIF_UNSPECIFIED
4069 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn
))
4070 /* Do not warn about not inlined recursive calls. */
4071 && !cgraph_edge_recursive_p (cg_edge
)
4072 /* Avoid warnings during early inline pass. */
4073 && cgraph_global_info_ready
)
4075 warning (OPT_Winline
, "inlining failed in call to %q+F: %s",
4076 fn
, _(cgraph_inline_failed_string (reason
)));
4077 warning (OPT_Winline
, "called from here");
4081 fn
= cg_edge
->callee
->symbol
.decl
;
4082 cgraph_get_body (cg_edge
->callee
);
4084 #ifdef ENABLE_CHECKING
4085 if (cg_edge
->callee
->symbol
.decl
!= id
->dst_node
->symbol
.decl
)
4086 verify_cgraph_node (cg_edge
->callee
);
4089 /* We will be inlining this callee. */
4090 id
->eh_lp_nr
= lookup_stmt_eh_lp (stmt
);
4092 /* Update the callers EH personality. */
4093 if (DECL_FUNCTION_PERSONALITY (cg_edge
->callee
->symbol
.decl
))
4094 DECL_FUNCTION_PERSONALITY (cg_edge
->caller
->symbol
.decl
)
4095 = DECL_FUNCTION_PERSONALITY (cg_edge
->callee
->symbol
.decl
);
4097 /* Split the block holding the GIMPLE_CALL. */
4098 e
= split_block (bb
, stmt
);
4100 return_block
= e
->dest
;
4103 /* split_block splits after the statement; work around this by
4104 moving the call into the second block manually. Not pretty,
4105 but seems easier than doing the CFG manipulation by hand
4106 when the GIMPLE_CALL is in the last statement of BB. */
4107 stmt_gsi
= gsi_last_bb (bb
);
4108 gsi_remove (&stmt_gsi
, false);
4110 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4111 been the source of abnormal edges. In this case, schedule
4112 the removal of dead abnormal edges. */
4113 gsi
= gsi_start_bb (return_block
);
4114 if (gsi_end_p (gsi
))
4116 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
4117 purge_dead_abnormal_edges
= true;
4121 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
4122 purge_dead_abnormal_edges
= false;
4125 stmt_gsi
= gsi_start_bb (return_block
);
4127 /* Build a block containing code to initialize the arguments, the
4128 actual inline expansion of the body, and a label for the return
4129 statements within the function to jump to. The type of the
4130 statement expression is the return type of the function call.
4131 ??? If the call does not have an associated block then we will
4132 remap all callee blocks to NULL, effectively dropping most of
4133 its debug information. This should only happen for calls to
4134 artificial decls inserted by the compiler itself. We need to
4135 either link the inlined blocks into the caller block tree or
4136 not refer to them in any way to not break GC for locations. */
4137 if (gimple_block (stmt
))
4139 id
->block
= make_node (BLOCK
);
4140 BLOCK_ABSTRACT_ORIGIN (id
->block
) = fn
;
4141 BLOCK_SOURCE_LOCATION (id
->block
) = LOCATION_LOCUS (input_location
);
4142 prepend_lexical_block (gimple_block (stmt
), id
->block
);
4145 /* Local declarations will be replaced by their equivalents in this
4148 id
->decl_map
= pointer_map_create ();
4149 dst
= id
->debug_map
;
4150 id
->debug_map
= NULL
;
4152 /* Record the function we are about to inline. */
4154 id
->src_node
= cg_edge
->callee
;
4155 id
->src_cfun
= DECL_STRUCT_FUNCTION (fn
);
4156 id
->gimple_call
= stmt
;
4158 gcc_assert (!id
->src_cfun
->after_inlining
);
4161 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn
)))
4163 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4164 gsi_insert_after (&si
, gimple_build_predict (PRED_COLD_FUNCTION
,
4168 initialize_inlined_parameters (id
, stmt
, fn
, bb
);
4170 if (DECL_INITIAL (fn
))
4172 if (gimple_block (stmt
))
4176 prepend_lexical_block (id
->block
,
4177 remap_blocks (DECL_INITIAL (fn
), id
));
4178 gcc_checking_assert (BLOCK_SUBBLOCKS (id
->block
)
4179 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id
->block
))
4181 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4182 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4183 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4184 under it. The parameters can be then evaluated in the debugger,
4185 but don't show in backtraces. */
4186 for (var
= &BLOCK_VARS (BLOCK_SUBBLOCKS (id
->block
)); *var
; )
4187 if (TREE_CODE (DECL_ORIGIN (*var
)) == PARM_DECL
)
4190 *var
= TREE_CHAIN (v
);
4191 TREE_CHAIN (v
) = BLOCK_VARS (id
->block
);
4192 BLOCK_VARS (id
->block
) = v
;
4195 var
= &TREE_CHAIN (*var
);
4198 remap_blocks_to_null (DECL_INITIAL (fn
), id
);
4201 /* Return statements in the function body will be replaced by jumps
4202 to the RET_LABEL. */
4203 gcc_assert (DECL_INITIAL (fn
));
4204 gcc_assert (TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
);
4206 /* Find the LHS to which the result of this call is assigned. */
4208 if (gimple_call_lhs (stmt
))
4210 modify_dest
= gimple_call_lhs (stmt
);
4212 /* The function which we are inlining might not return a value,
4213 in which case we should issue a warning that the function
4214 does not return a value. In that case the optimizers will
4215 see that the variable to which the value is assigned was not
4216 initialized. We do not want to issue a warning about that
4217 uninitialized variable. */
4218 if (DECL_P (modify_dest
))
4219 TREE_NO_WARNING (modify_dest
) = 1;
4221 if (gimple_call_return_slot_opt_p (stmt
))
4223 return_slot
= modify_dest
;
4230 /* If we are inlining a call to the C++ operator new, we don't want
4231 to use type based alias analysis on the return value. Otherwise
4232 we may get confused if the compiler sees that the inlined new
4233 function returns a pointer which was just deleted. See bug
4235 if (DECL_IS_OPERATOR_NEW (fn
))
4241 /* Declare the return variable for the function. */
4242 use_retvar
= declare_return_variable (id
, return_slot
, modify_dest
, bb
);
4244 /* Add local vars in this inlined callee to caller. */
4245 add_local_variables (id
->src_cfun
, cfun
, id
);
4247 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4249 fprintf (dump_file
, "Inlining ");
4250 print_generic_expr (dump_file
, id
->src_fn
, 0);
4251 fprintf (dump_file
, " to ");
4252 print_generic_expr (dump_file
, id
->dst_fn
, 0);
4253 fprintf (dump_file
, " with frequency %i\n", cg_edge
->frequency
);
4256 /* This is it. Duplicate the callee body. Assume callee is
4257 pre-gimplified. Note that we must not alter the caller
4258 function in any way before this point, as this CALL_EXPR may be
4259 a self-referential call; if we're calling ourselves, we need to
4260 duplicate our body before altering anything. */
4261 copy_body (id
, bb
->count
,
4262 GCOV_COMPUTE_SCALE (cg_edge
->frequency
, CGRAPH_FREQ_BASE
),
4263 bb
, return_block
, NULL
);
4265 /* Reset the escaped solution. */
4266 if (cfun
->gimple_df
)
4267 pt_solution_reset (&cfun
->gimple_df
->escaped
);
4272 pointer_map_destroy (id
->debug_map
);
4273 id
->debug_map
= dst
;
4275 pointer_map_destroy (id
->decl_map
);
4278 /* Unlink the calls virtual operands before replacing it. */
4279 unlink_stmt_vdef (stmt
);
4281 /* If the inlined function returns a result that we care about,
4282 substitute the GIMPLE_CALL with an assignment of the return
4283 variable to the LHS of the call. That is, if STMT was
4284 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4285 if (use_retvar
&& gimple_call_lhs (stmt
))
4287 gimple old_stmt
= stmt
;
4288 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), use_retvar
);
4289 gsi_replace (&stmt_gsi
, stmt
, false);
4290 maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
);
4294 /* Handle the case of inlining a function with no return
4295 statement, which causes the return value to become undefined. */
4296 if (gimple_call_lhs (stmt
)
4297 && TREE_CODE (gimple_call_lhs (stmt
)) == SSA_NAME
)
4299 tree name
= gimple_call_lhs (stmt
);
4300 tree var
= SSA_NAME_VAR (name
);
4301 tree def
= ssa_default_def (cfun
, var
);
4305 /* If the variable is used undefined, make this name
4306 undefined via a move. */
4307 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), def
);
4308 gsi_replace (&stmt_gsi
, stmt
, true);
4312 /* Otherwise make this variable undefined. */
4313 gsi_remove (&stmt_gsi
, true);
4314 set_ssa_default_def (cfun
, var
, name
);
4315 SSA_NAME_DEF_STMT (name
) = gimple_build_nop ();
4319 gsi_remove (&stmt_gsi
, true);
4322 if (purge_dead_abnormal_edges
)
4324 gimple_purge_dead_eh_edges (return_block
);
4325 gimple_purge_dead_abnormal_call_edges (return_block
);
4328 /* If the value of the new expression is ignored, that's OK. We
4329 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4330 the equivalent inlined version either. */
4331 if (is_gimple_assign (stmt
))
4333 gcc_assert (gimple_assign_single_p (stmt
)
4334 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)));
4335 TREE_USED (gimple_assign_rhs1 (stmt
)) = 1;
4338 /* Output the inlining info for this abstract function, since it has been
4339 inlined. If we don't do this now, we can lose the information about the
4340 variables in the function when the blocks get blown away as soon as we
4341 remove the cgraph node. */
4342 if (gimple_block (stmt
))
4343 (*debug_hooks
->outlining_inline_function
) (cg_edge
->callee
->symbol
.decl
);
4345 /* Update callgraph if needed. */
4346 cgraph_remove_node (cg_edge
->callee
);
4348 id
->block
= NULL_TREE
;
4349 successfully_inlined
= TRUE
;
4352 input_location
= saved_location
;
4353 return successfully_inlined
;
4356 /* Expand call statements reachable from STMT_P.
4357 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4358 in a MODIFY_EXPR. */
4361 gimple_expand_calls_inline (basic_block bb
, copy_body_data
*id
)
4363 gimple_stmt_iterator gsi
;
4365 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4367 gimple stmt
= gsi_stmt (gsi
);
4369 if (is_gimple_call (stmt
)
4370 && expand_call_inline (bb
, stmt
, id
))
4378 /* Walk all basic blocks created after FIRST and try to fold every statement
4379 in the STATEMENTS pointer set. */
4382 fold_marked_statements (int first
, struct pointer_set_t
*statements
)
4384 for (; first
< n_basic_blocks
; first
++)
4385 if (BASIC_BLOCK (first
))
4387 gimple_stmt_iterator gsi
;
4389 for (gsi
= gsi_start_bb (BASIC_BLOCK (first
));
4392 if (pointer_set_contains (statements
, gsi_stmt (gsi
)))
4394 gimple old_stmt
= gsi_stmt (gsi
);
4395 tree old_decl
= is_gimple_call (old_stmt
) ? gimple_call_fndecl (old_stmt
) : 0;
4397 if (old_decl
&& DECL_BUILT_IN (old_decl
))
4399 /* Folding builtins can create multiple instructions,
4400 we need to look at all of them. */
4401 gimple_stmt_iterator i2
= gsi
;
4403 if (fold_stmt (&gsi
))
4406 /* If a builtin at the end of a bb folded into nothing,
4407 the following loop won't work. */
4408 if (gsi_end_p (gsi
))
4410 cgraph_update_edges_for_call_stmt (old_stmt
,
4415 i2
= gsi_start_bb (BASIC_BLOCK (first
));
4420 new_stmt
= gsi_stmt (i2
);
4421 update_stmt (new_stmt
);
4422 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4425 if (new_stmt
== gsi_stmt (gsi
))
4427 /* It is okay to check only for the very last
4428 of these statements. If it is a throwing
4429 statement nothing will change. If it isn't
4430 this can remove EH edges. If that weren't
4431 correct then because some intermediate stmts
4432 throw, but not the last one. That would mean
4433 we'd have to split the block, which we can't
4434 here and we'd loose anyway. And as builtins
4435 probably never throw, this all
4437 if (maybe_clean_or_replace_eh_stmt (old_stmt
,
4439 gimple_purge_dead_eh_edges (BASIC_BLOCK (first
));
4446 else if (fold_stmt (&gsi
))
4448 /* Re-read the statement from GSI as fold_stmt() may
4450 gimple new_stmt
= gsi_stmt (gsi
);
4451 update_stmt (new_stmt
);
4453 if (is_gimple_call (old_stmt
)
4454 || is_gimple_call (new_stmt
))
4455 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4458 if (maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
))
4459 gimple_purge_dead_eh_edges (BASIC_BLOCK (first
));
4465 /* Return true if BB has at least one abnormal outgoing edge. */
4468 has_abnormal_outgoing_edge_p (basic_block bb
)
4473 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4474 if (e
->flags
& EDGE_ABNORMAL
)
4480 /* Expand calls to inline functions in the body of FN. */
4483 optimize_inline_calls (tree fn
)
4487 int last
= n_basic_blocks
;
4488 struct gimplify_ctx gctx
;
4489 bool inlined_p
= false;
4492 memset (&id
, 0, sizeof (id
));
4494 id
.src_node
= id
.dst_node
= cgraph_get_node (fn
);
4495 gcc_assert (id
.dst_node
->symbol
.definition
);
4497 /* Or any functions that aren't finished yet. */
4498 if (current_function_decl
)
4499 id
.dst_fn
= current_function_decl
;
4501 id
.copy_decl
= copy_decl_maybe_to_var
;
4502 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
4503 id
.transform_new_cfg
= false;
4504 id
.transform_return_to_modify
= true;
4505 id
.transform_parameter
= true;
4506 id
.transform_lang_insert_block
= NULL
;
4507 id
.statements_to_fold
= pointer_set_create ();
4509 push_gimplify_context (&gctx
);
4511 /* We make no attempts to keep dominance info up-to-date. */
4512 free_dominance_info (CDI_DOMINATORS
);
4513 free_dominance_info (CDI_POST_DOMINATORS
);
4515 /* Register specific gimple functions. */
4516 gimple_register_cfg_hooks ();
4518 /* Reach the trees by walking over the CFG, and note the
4519 enclosing basic-blocks in the call edges. */
4520 /* We walk the blocks going forward, because inlined function bodies
4521 will split id->current_basic_block, and the new blocks will
4522 follow it; we'll trudge through them, processing their CALL_EXPRs
4525 inlined_p
|= gimple_expand_calls_inline (bb
, &id
);
4527 pop_gimplify_context (NULL
);
4529 #ifdef ENABLE_CHECKING
4531 struct cgraph_edge
*e
;
4533 verify_cgraph_node (id
.dst_node
);
4535 /* Double check that we inlined everything we are supposed to inline. */
4536 for (e
= id
.dst_node
->callees
; e
; e
= e
->next_callee
)
4537 gcc_assert (e
->inline_failed
);
4541 /* Fold queued statements. */
4542 fold_marked_statements (last
, id
.statements_to_fold
);
4543 pointer_set_destroy (id
.statements_to_fold
);
4545 gcc_assert (!id
.debug_stmts
.exists ());
4547 /* If we didn't inline into the function there is nothing to do. */
4551 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4554 delete_unreachable_blocks_update_callgraph (&id
);
4555 #ifdef ENABLE_CHECKING
4556 verify_cgraph_node (id
.dst_node
);
4559 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4560 not possible yet - the IPA passes might make various functions to not
4561 throw and they don't care to proactively update local EH info. This is
4562 done later in fixup_cfg pass that also execute the verification. */
4563 return (TODO_update_ssa
4565 | (gimple_in_ssa_p (cfun
) ? TODO_remove_unused_locals
: 0)
4566 | (gimple_in_ssa_p (cfun
) ? TODO_update_address_taken
: 0)
4567 | (profile_status
!= PROFILE_ABSENT
? TODO_rebuild_frequencies
: 0));
4570 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4573 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
4575 enum tree_code code
= TREE_CODE (*tp
);
4576 enum tree_code_class cl
= TREE_CODE_CLASS (code
);
4578 /* We make copies of most nodes. */
4579 if (IS_EXPR_CODE_CLASS (cl
)
4580 || code
== TREE_LIST
4582 || code
== TYPE_DECL
4583 || code
== OMP_CLAUSE
)
4585 /* Because the chain gets clobbered when we make a copy, we save it
4587 tree chain
= NULL_TREE
, new_tree
;
4589 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
4590 chain
= TREE_CHAIN (*tp
);
4592 /* Copy the node. */
4593 new_tree
= copy_node (*tp
);
4595 /* Propagate mudflap marked-ness. */
4596 if (flag_mudflap
&& mf_marked_p (*tp
))
4601 /* Now, restore the chain, if appropriate. That will cause
4602 walk_tree to walk into the chain as well. */
4603 if (code
== PARM_DECL
4604 || code
== TREE_LIST
4605 || code
== OMP_CLAUSE
)
4606 TREE_CHAIN (*tp
) = chain
;
4608 /* For now, we don't update BLOCKs when we make copies. So, we
4609 have to nullify all BIND_EXPRs. */
4610 if (TREE_CODE (*tp
) == BIND_EXPR
)
4611 BIND_EXPR_BLOCK (*tp
) = NULL_TREE
;
4613 else if (code
== CONSTRUCTOR
)
4615 /* CONSTRUCTOR nodes need special handling because
4616 we need to duplicate the vector of elements. */
4619 new_tree
= copy_node (*tp
);
4621 /* Propagate mudflap marked-ness. */
4622 if (flag_mudflap
&& mf_marked_p (*tp
))
4625 CONSTRUCTOR_ELTS (new_tree
) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp
));
4628 else if (code
== STATEMENT_LIST
)
4629 /* We used to just abort on STATEMENT_LIST, but we can run into them
4630 with statement-expressions (c++/40975). */
4631 copy_statement_list (tp
);
4632 else if (TREE_CODE_CLASS (code
) == tcc_type
)
4634 else if (TREE_CODE_CLASS (code
) == tcc_declaration
)
4636 else if (TREE_CODE_CLASS (code
) == tcc_constant
)
4641 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4642 information indicating to what new SAVE_EXPR this one should be mapped,
4643 use that one. Otherwise, create a new node and enter it in ST. FN is
4644 the function into which the copy will be placed. */
4647 remap_save_expr (tree
*tp
, void *st_
, int *walk_subtrees
)
4649 struct pointer_map_t
*st
= (struct pointer_map_t
*) st_
;
4653 /* See if we already encountered this SAVE_EXPR. */
4654 n
= (tree
*) pointer_map_contains (st
, *tp
);
4656 /* If we didn't already remap this SAVE_EXPR, do so now. */
4659 t
= copy_node (*tp
);
4661 /* Remember this SAVE_EXPR. */
4662 *pointer_map_insert (st
, *tp
) = t
;
4663 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4664 *pointer_map_insert (st
, t
) = t
;
4668 /* We've already walked into this SAVE_EXPR; don't do it again. */
4673 /* Replace this SAVE_EXPR with the copy. */
4677 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4678 label, copies the declaration and enters it in the splay_tree in DATA (which
4679 is really a 'copy_body_data *'. */
4682 mark_local_labels_stmt (gimple_stmt_iterator
*gsip
,
4683 bool *handled_ops_p ATTRIBUTE_UNUSED
,
4684 struct walk_stmt_info
*wi
)
4686 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
4687 gimple stmt
= gsi_stmt (*gsip
);
4689 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4691 tree decl
= gimple_label_label (stmt
);
4693 /* Copy the decl and remember the copy. */
4694 insert_decl_map (id
, decl
, id
->copy_decl (decl
, id
));
4701 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4702 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4703 remaps all local declarations to appropriate replacements in gimple
4707 replace_locals_op (tree
*tp
, int *walk_subtrees
, void *data
)
4709 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4710 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
4711 struct pointer_map_t
*st
= id
->decl_map
;
4715 /* Only a local declaration (variable or label). */
4716 if ((TREE_CODE (expr
) == VAR_DECL
4717 && !TREE_STATIC (expr
))
4718 || TREE_CODE (expr
) == LABEL_DECL
)
4720 /* Lookup the declaration. */
4721 n
= (tree
*) pointer_map_contains (st
, expr
);
4723 /* If it's there, remap it. */
4728 else if (TREE_CODE (expr
) == STATEMENT_LIST
4729 || TREE_CODE (expr
) == BIND_EXPR
4730 || TREE_CODE (expr
) == SAVE_EXPR
)
4732 else if (TREE_CODE (expr
) == TARGET_EXPR
)
4734 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4735 It's OK for this to happen if it was part of a subtree that
4736 isn't immediately expanded, such as operand 2 of another
4738 if (!TREE_OPERAND (expr
, 1))
4740 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
4741 TREE_OPERAND (expr
, 3) = NULL_TREE
;
4745 /* Keep iterating. */
4750 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4751 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4752 remaps all local declarations to appropriate replacements in gimple
4756 replace_locals_stmt (gimple_stmt_iterator
*gsip
,
4757 bool *handled_ops_p ATTRIBUTE_UNUSED
,
4758 struct walk_stmt_info
*wi
)
4760 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
4761 gimple stmt
= gsi_stmt (*gsip
);
4763 if (gimple_code (stmt
) == GIMPLE_BIND
)
4765 tree block
= gimple_bind_block (stmt
);
4769 remap_block (&block
, id
);
4770 gimple_bind_set_block (stmt
, block
);
4773 /* This will remap a lot of the same decls again, but this should be
4775 if (gimple_bind_vars (stmt
))
4776 gimple_bind_set_vars (stmt
, remap_decls (gimple_bind_vars (stmt
),
4780 /* Keep iterating. */
4785 /* Copies everything in SEQ and replaces variables and labels local to
4786 current_function_decl. */
4789 copy_gimple_seq_and_replace_locals (gimple_seq seq
)
4792 struct walk_stmt_info wi
;
4793 struct pointer_set_t
*visited
;
4796 /* There's nothing to do for NULL_TREE. */
4801 memset (&id
, 0, sizeof (id
));
4802 id
.src_fn
= current_function_decl
;
4803 id
.dst_fn
= current_function_decl
;
4804 id
.decl_map
= pointer_map_create ();
4805 id
.debug_map
= NULL
;
4807 id
.copy_decl
= copy_decl_no_change
;
4808 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
4809 id
.transform_new_cfg
= false;
4810 id
.transform_return_to_modify
= false;
4811 id
.transform_parameter
= false;
4812 id
.transform_lang_insert_block
= NULL
;
4814 /* Walk the tree once to find local labels. */
4815 memset (&wi
, 0, sizeof (wi
));
4816 visited
= pointer_set_create ();
4819 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, &wi
);
4820 pointer_set_destroy (visited
);
4822 copy
= gimple_seq_copy (seq
);
4824 /* Walk the copy, remapping decls. */
4825 memset (&wi
, 0, sizeof (wi
));
4827 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, &wi
);
4830 pointer_map_destroy (id
.decl_map
);
4832 pointer_map_destroy (id
.debug_map
);
4838 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4841 debug_find_tree_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
, void *data
)
4850 debug_find_tree (tree top
, tree search
)
4852 return walk_tree_without_duplicates (&top
, debug_find_tree_1
, search
) != 0;
4856 /* Declare the variables created by the inliner. Add all the variables in
4857 VARS to BIND_EXPR. */
4860 declare_inline_vars (tree block
, tree vars
)
4863 for (t
= vars
; t
; t
= DECL_CHAIN (t
))
4865 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
4866 gcc_assert (!TREE_STATIC (t
) && !TREE_ASM_WRITTEN (t
));
4867 add_local_decl (cfun
, t
);
4871 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), vars
);
4874 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4875 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4876 VAR_DECL translation. */
4879 copy_decl_for_dup_finish (copy_body_data
*id
, tree decl
, tree copy
)
4881 /* Don't generate debug information for the copy if we wouldn't have
4882 generated it for the copy either. */
4883 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (decl
);
4884 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (decl
);
4886 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4887 declaration inspired this copy. */
4888 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
4890 /* The new variable/label has no RTL, yet. */
4891 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy
), TS_DECL_WRTL
)
4892 && !TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
4893 SET_DECL_RTL (copy
, 0);
4895 /* These args would always appear unused, if not for this. */
4896 TREE_USED (copy
) = 1;
4898 /* Set the context for the new declaration. */
4899 if (!DECL_CONTEXT (decl
))
4900 /* Globals stay global. */
4902 else if (DECL_CONTEXT (decl
) != id
->src_fn
)
4903 /* Things that weren't in the scope of the function we're inlining
4904 from aren't in the scope we're inlining to, either. */
4906 else if (TREE_STATIC (decl
))
4907 /* Function-scoped static variables should stay in the original
4911 /* Ordinary automatic local variables are now in the scope of the
4913 DECL_CONTEXT (copy
) = id
->dst_fn
;
4919 copy_decl_to_var (tree decl
, copy_body_data
*id
)
4923 gcc_assert (TREE_CODE (decl
) == PARM_DECL
4924 || TREE_CODE (decl
) == RESULT_DECL
);
4926 type
= TREE_TYPE (decl
);
4928 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
4929 VAR_DECL
, DECL_NAME (decl
), type
);
4930 if (DECL_PT_UID_SET_P (decl
))
4931 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
4932 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
4933 TREE_READONLY (copy
) = TREE_READONLY (decl
);
4934 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
4935 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
4937 return copy_decl_for_dup_finish (id
, decl
, copy
);
4940 /* Like copy_decl_to_var, but create a return slot object instead of a
4941 pointer variable for return by invisible reference. */
4944 copy_result_decl_to_var (tree decl
, copy_body_data
*id
)
4948 gcc_assert (TREE_CODE (decl
) == PARM_DECL
4949 || TREE_CODE (decl
) == RESULT_DECL
);
4951 type
= TREE_TYPE (decl
);
4952 if (DECL_BY_REFERENCE (decl
))
4953 type
= TREE_TYPE (type
);
4955 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
4956 VAR_DECL
, DECL_NAME (decl
), type
);
4957 if (DECL_PT_UID_SET_P (decl
))
4958 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
4959 TREE_READONLY (copy
) = TREE_READONLY (decl
);
4960 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
4961 if (!DECL_BY_REFERENCE (decl
))
4963 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
4964 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
4967 return copy_decl_for_dup_finish (id
, decl
, copy
);
4971 copy_decl_no_change (tree decl
, copy_body_data
*id
)
4975 copy
= copy_node (decl
);
4977 /* The COPY is not abstract; it will be generated in DST_FN. */
4978 DECL_ABSTRACT (copy
) = 0;
4979 lang_hooks
.dup_lang_specific_decl (copy
);
4981 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4982 been taken; it's for internal bookkeeping in expand_goto_internal. */
4983 if (TREE_CODE (copy
) == LABEL_DECL
)
4985 TREE_ADDRESSABLE (copy
) = 0;
4986 LABEL_DECL_UID (copy
) = -1;
4989 return copy_decl_for_dup_finish (id
, decl
, copy
);
4993 copy_decl_maybe_to_var (tree decl
, copy_body_data
*id
)
4995 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
4996 return copy_decl_to_var (decl
, id
);
4998 return copy_decl_no_change (decl
, id
);
5001 /* Return a copy of the function's argument tree. */
5003 copy_arguments_for_versioning (tree orig_parm
, copy_body_data
* id
,
5004 bitmap args_to_skip
, tree
*vars
)
5007 tree new_parm
= NULL
;
5012 for (arg
= orig_parm
; arg
; arg
= DECL_CHAIN (arg
), i
++)
5013 if (!args_to_skip
|| !bitmap_bit_p (args_to_skip
, i
))
5015 tree new_tree
= remap_decl (arg
, id
);
5016 if (TREE_CODE (new_tree
) != PARM_DECL
)
5017 new_tree
= id
->copy_decl (arg
, id
);
5018 lang_hooks
.dup_lang_specific_decl (new_tree
);
5020 parg
= &DECL_CHAIN (new_tree
);
5022 else if (!pointer_map_contains (id
->decl_map
, arg
))
5024 /* Make an equivalent VAR_DECL. If the argument was used
5025 as temporary variable later in function, the uses will be
5026 replaced by local variable. */
5027 tree var
= copy_decl_to_var (arg
, id
);
5028 insert_decl_map (id
, arg
, var
);
5029 /* Declare this new variable. */
5030 DECL_CHAIN (var
) = *vars
;
5036 /* Return a copy of the function's static chain. */
5038 copy_static_chain (tree static_chain
, copy_body_data
* id
)
5040 tree
*chain_copy
, *pvar
;
5042 chain_copy
= &static_chain
;
5043 for (pvar
= chain_copy
; *pvar
; pvar
= &DECL_CHAIN (*pvar
))
5045 tree new_tree
= remap_decl (*pvar
, id
);
5046 lang_hooks
.dup_lang_specific_decl (new_tree
);
5047 DECL_CHAIN (new_tree
) = DECL_CHAIN (*pvar
);
5050 return static_chain
;
5053 /* Return true if the function is allowed to be versioned.
5054 This is a guard for the versioning functionality. */
5057 tree_versionable_function_p (tree fndecl
)
5059 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl
))
5060 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl
), fndecl
) == NULL
);
5063 /* Delete all unreachable basic blocks and update callgraph.
5064 Doing so is somewhat nontrivial because we need to update all clones and
5065 remove inline function that become unreachable. */
5068 delete_unreachable_blocks_update_callgraph (copy_body_data
*id
)
5070 bool changed
= false;
5071 basic_block b
, next_bb
;
5073 find_unreachable_blocks ();
5075 /* Delete all unreachable basic blocks. */
5077 for (b
= ENTRY_BLOCK_PTR
->next_bb
; b
!= EXIT_BLOCK_PTR
; b
= next_bb
)
5079 next_bb
= b
->next_bb
;
5081 if (!(b
->flags
& BB_REACHABLE
))
5083 gimple_stmt_iterator bsi
;
5085 for (bsi
= gsi_start_bb (b
); !gsi_end_p (bsi
); gsi_next (&bsi
))
5087 struct cgraph_edge
*e
;
5088 struct cgraph_node
*node
;
5090 ipa_remove_stmt_references ((symtab_node
)id
->dst_node
, gsi_stmt (bsi
));
5092 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5093 &&(e
= cgraph_edge (id
->dst_node
, gsi_stmt (bsi
))) != NULL
)
5095 if (!e
->inline_failed
)
5096 cgraph_remove_node_and_inline_clones (e
->callee
, id
->dst_node
);
5098 cgraph_remove_edge (e
);
5100 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
5101 && id
->dst_node
->clones
)
5102 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5104 ipa_remove_stmt_references ((symtab_node
)node
, gsi_stmt (bsi
));
5105 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5106 && (e
= cgraph_edge (node
, gsi_stmt (bsi
))) != NULL
)
5108 if (!e
->inline_failed
)
5109 cgraph_remove_node_and_inline_clones (e
->callee
, id
->dst_node
);
5111 cgraph_remove_edge (e
);
5115 node
= node
->clones
;
5116 else if (node
->next_sibling_clone
)
5117 node
= node
->next_sibling_clone
;
5120 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5121 node
= node
->clone_of
;
5122 if (node
!= id
->dst_node
)
5123 node
= node
->next_sibling_clone
;
5127 delete_basic_block (b
);
5135 /* Update clone info after duplication. */
5138 update_clone_info (copy_body_data
* id
)
5140 struct cgraph_node
*node
;
5141 if (!id
->dst_node
->clones
)
5143 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5145 /* First update replace maps to match the new body. */
5146 if (node
->clone
.tree_map
)
5149 for (i
= 0; i
< vec_safe_length (node
->clone
.tree_map
); i
++)
5151 struct ipa_replace_map
*replace_info
;
5152 replace_info
= (*node
->clone
.tree_map
)[i
];
5153 walk_tree (&replace_info
->old_tree
, copy_tree_body_r
, id
, NULL
);
5154 walk_tree (&replace_info
->new_tree
, copy_tree_body_r
, id
, NULL
);
5158 node
= node
->clones
;
5159 else if (node
->next_sibling_clone
)
5160 node
= node
->next_sibling_clone
;
5163 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5164 node
= node
->clone_of
;
5165 if (node
!= id
->dst_node
)
5166 node
= node
->next_sibling_clone
;
5171 /* Create a copy of a function's tree.
5172 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5173 of the original function and the new copied function
5174 respectively. In case we want to replace a DECL
5175 tree with another tree while duplicating the function's
5176 body, TREE_MAP represents the mapping between these
5177 trees. If UPDATE_CLONES is set, the call_stmt fields
5178 of edges of clones of the function will be updated.
5180 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5182 If SKIP_RETURN is true, the new version will return void.
5183 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5184 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5187 tree_function_versioning (tree old_decl
, tree new_decl
,
5188 vec
<ipa_replace_map_p
, va_gc
> *tree_map
,
5189 bool update_clones
, bitmap args_to_skip
,
5190 bool skip_return
, bitmap blocks_to_copy
,
5191 basic_block new_entry
)
5193 struct cgraph_node
*old_version_node
;
5194 struct cgraph_node
*new_version_node
;
5198 struct ipa_replace_map
*replace_info
;
5199 basic_block old_entry_block
, bb
;
5200 vec
<gimple
> init_stmts
;
5201 init_stmts
.create (10);
5202 tree vars
= NULL_TREE
;
5204 gcc_assert (TREE_CODE (old_decl
) == FUNCTION_DECL
5205 && TREE_CODE (new_decl
) == FUNCTION_DECL
);
5206 DECL_POSSIBLY_INLINED (old_decl
) = 1;
5208 old_version_node
= cgraph_get_node (old_decl
);
5209 gcc_checking_assert (old_version_node
);
5210 new_version_node
= cgraph_get_node (new_decl
);
5211 gcc_checking_assert (new_version_node
);
5213 /* Copy over debug args. */
5214 if (DECL_HAS_DEBUG_ARGS_P (old_decl
))
5216 vec
<tree
, va_gc
> **new_debug_args
, **old_debug_args
;
5217 gcc_checking_assert (decl_debug_args_lookup (new_decl
) == NULL
);
5218 DECL_HAS_DEBUG_ARGS_P (new_decl
) = 0;
5219 old_debug_args
= decl_debug_args_lookup (old_decl
);
5222 new_debug_args
= decl_debug_args_insert (new_decl
);
5223 *new_debug_args
= vec_safe_copy (*old_debug_args
);
5227 /* Output the inlining info for this abstract function, since it has been
5228 inlined. If we don't do this now, we can lose the information about the
5229 variables in the function when the blocks get blown away as soon as we
5230 remove the cgraph node. */
5231 (*debug_hooks
->outlining_inline_function
) (old_decl
);
5233 DECL_ARTIFICIAL (new_decl
) = 1;
5234 DECL_ABSTRACT_ORIGIN (new_decl
) = DECL_ORIGIN (old_decl
);
5235 if (DECL_ORIGIN (old_decl
) == old_decl
)
5236 old_version_node
->used_as_abstract_origin
= true;
5237 DECL_FUNCTION_PERSONALITY (new_decl
) = DECL_FUNCTION_PERSONALITY (old_decl
);
5239 /* Prepare the data structures for the tree copy. */
5240 memset (&id
, 0, sizeof (id
));
5242 /* Generate a new name for the new version. */
5243 id
.statements_to_fold
= pointer_set_create ();
5245 id
.decl_map
= pointer_map_create ();
5246 id
.debug_map
= NULL
;
5247 id
.src_fn
= old_decl
;
5248 id
.dst_fn
= new_decl
;
5249 id
.src_node
= old_version_node
;
5250 id
.dst_node
= new_version_node
;
5251 id
.src_cfun
= DECL_STRUCT_FUNCTION (old_decl
);
5252 id
.blocks_to_copy
= blocks_to_copy
;
5253 if (id
.src_node
->ipa_transforms_to_apply
.exists ())
5255 vec
<ipa_opt_pass
> old_transforms_to_apply
5256 = id
.dst_node
->ipa_transforms_to_apply
;
5259 id
.dst_node
->ipa_transforms_to_apply
5260 = id
.src_node
->ipa_transforms_to_apply
.copy ();
5261 for (i
= 0; i
< old_transforms_to_apply
.length (); i
++)
5262 id
.dst_node
->ipa_transforms_to_apply
.safe_push (old_transforms_to_apply
[i
]);
5263 old_transforms_to_apply
.release ();
5266 id
.copy_decl
= copy_decl_no_change
;
5267 id
.transform_call_graph_edges
5268 = update_clones
? CB_CGE_MOVE_CLONES
: CB_CGE_MOVE
;
5269 id
.transform_new_cfg
= true;
5270 id
.transform_return_to_modify
= false;
5271 id
.transform_parameter
= false;
5272 id
.transform_lang_insert_block
= NULL
;
5274 old_entry_block
= ENTRY_BLOCK_PTR_FOR_FUNCTION
5275 (DECL_STRUCT_FUNCTION (old_decl
));
5276 DECL_RESULT (new_decl
) = DECL_RESULT (old_decl
);
5277 DECL_ARGUMENTS (new_decl
) = DECL_ARGUMENTS (old_decl
);
5278 initialize_cfun (new_decl
, old_decl
,
5279 old_entry_block
->count
);
5280 DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
->ipa_pta
5281 = id
.src_cfun
->gimple_df
->ipa_pta
;
5283 /* Copy the function's static chain. */
5284 p
= DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
;
5286 DECL_STRUCT_FUNCTION (new_decl
)->static_chain_decl
=
5287 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
,
5290 /* If there's a tree_map, prepare for substitution. */
5292 for (i
= 0; i
< tree_map
->length (); i
++)
5295 replace_info
= (*tree_map
)[i
];
5296 if (replace_info
->replace_p
)
5298 if (!replace_info
->old_tree
)
5300 int i
= replace_info
->parm_num
;
5304 for (parm
= DECL_ARGUMENTS (old_decl
); i
; parm
= DECL_CHAIN (parm
))
5306 replace_info
->old_tree
= parm
;
5307 req_type
= TREE_TYPE (parm
);
5308 if (!useless_type_conversion_p (req_type
, TREE_TYPE (replace_info
->new_tree
)))
5310 if (fold_convertible_p (req_type
, replace_info
->new_tree
))
5311 replace_info
->new_tree
= fold_build1 (NOP_EXPR
, req_type
, replace_info
->new_tree
);
5312 else if (TYPE_SIZE (req_type
) == TYPE_SIZE (TREE_TYPE (replace_info
->new_tree
)))
5313 replace_info
->new_tree
= fold_build1 (VIEW_CONVERT_EXPR
, req_type
, replace_info
->new_tree
);
5318 fprintf (dump_file
, " const ");
5319 print_generic_expr (dump_file
, replace_info
->new_tree
, 0);
5320 fprintf (dump_file
, " can't be converted to param ");
5321 print_generic_expr (dump_file
, parm
, 0);
5322 fprintf (dump_file
, "\n");
5324 replace_info
->old_tree
= NULL
;
5329 gcc_assert (TREE_CODE (replace_info
->old_tree
) == PARM_DECL
);
5330 if (replace_info
->old_tree
)
5332 init
= setup_one_parameter (&id
, replace_info
->old_tree
,
5333 replace_info
->new_tree
, id
.src_fn
,
5337 init_stmts
.safe_push (init
);
5341 /* Copy the function's arguments. */
5342 if (DECL_ARGUMENTS (old_decl
) != NULL_TREE
)
5343 DECL_ARGUMENTS (new_decl
) =
5344 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl
), &id
,
5345 args_to_skip
, &vars
);
5347 DECL_INITIAL (new_decl
) = remap_blocks (DECL_INITIAL (id
.src_fn
), &id
);
5348 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl
)) = new_decl
;
5350 declare_inline_vars (DECL_INITIAL (new_decl
), vars
);
5352 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl
)->local_decls
))
5353 /* Add local vars. */
5354 add_local_variables (DECL_STRUCT_FUNCTION (old_decl
), cfun
, &id
);
5356 if (DECL_RESULT (old_decl
) == NULL_TREE
)
5358 else if (skip_return
&& !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl
))))
5360 DECL_RESULT (new_decl
)
5361 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl
)),
5362 RESULT_DECL
, NULL_TREE
, void_type_node
);
5363 DECL_CONTEXT (DECL_RESULT (new_decl
)) = new_decl
;
5364 cfun
->returns_struct
= 0;
5365 cfun
->returns_pcc_struct
= 0;
5370 DECL_RESULT (new_decl
) = remap_decl (DECL_RESULT (old_decl
), &id
);
5371 lang_hooks
.dup_lang_specific_decl (DECL_RESULT (new_decl
));
5372 if (gimple_in_ssa_p (id
.src_cfun
)
5373 && DECL_BY_REFERENCE (DECL_RESULT (old_decl
))
5374 && (old_name
= ssa_default_def (id
.src_cfun
, DECL_RESULT (old_decl
))))
5376 tree new_name
= make_ssa_name (DECL_RESULT (new_decl
), NULL
);
5377 insert_decl_map (&id
, old_name
, new_name
);
5378 SSA_NAME_DEF_STMT (new_name
) = gimple_build_nop ();
5379 set_ssa_default_def (cfun
, DECL_RESULT (new_decl
), new_name
);
5383 /* Set up the destination functions loop tree. */
5384 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl
)) != NULL
)
5386 cfun
->curr_properties
&= ~PROP_loops
;
5387 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
5388 cfun
->curr_properties
|= PROP_loops
;
5391 /* Copy the Function's body. */
5392 copy_body (&id
, old_entry_block
->count
, REG_BR_PROB_BASE
,
5393 ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, new_entry
);
5395 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5396 number_blocks (new_decl
);
5398 /* We want to create the BB unconditionally, so that the addition of
5399 debug stmts doesn't affect BB count, which may in the end cause
5400 codegen differences. */
5401 bb
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR
));
5402 while (init_stmts
.length ())
5403 insert_init_stmt (&id
, bb
, init_stmts
.pop ());
5404 update_clone_info (&id
);
5406 /* Remap the nonlocal_goto_save_area, if any. */
5407 if (cfun
->nonlocal_goto_save_area
)
5409 struct walk_stmt_info wi
;
5411 memset (&wi
, 0, sizeof (wi
));
5413 walk_tree (&cfun
->nonlocal_goto_save_area
, remap_gimple_op_r
, &wi
, NULL
);
5417 pointer_map_destroy (id
.decl_map
);
5419 pointer_map_destroy (id
.debug_map
);
5420 free_dominance_info (CDI_DOMINATORS
);
5421 free_dominance_info (CDI_POST_DOMINATORS
);
5423 fold_marked_statements (0, id
.statements_to_fold
);
5424 pointer_set_destroy (id
.statements_to_fold
);
5425 fold_cond_expr_cond ();
5426 delete_unreachable_blocks_update_callgraph (&id
);
5427 if (id
.dst_node
->symbol
.definition
)
5428 cgraph_rebuild_references ();
5429 update_ssa (TODO_update_ssa
);
5431 /* After partial cloning we need to rescale frequencies, so they are
5432 within proper range in the cloned function. */
5435 struct cgraph_edge
*e
;
5436 rebuild_frequencies ();
5438 new_version_node
->count
= ENTRY_BLOCK_PTR
->count
;
5439 for (e
= new_version_node
->callees
; e
; e
= e
->next_callee
)
5441 basic_block bb
= gimple_bb (e
->call_stmt
);
5442 e
->frequency
= compute_call_stmt_bb_frequency (current_function_decl
,
5444 e
->count
= bb
->count
;
5446 for (e
= new_version_node
->indirect_calls
; e
; e
= e
->next_callee
)
5448 basic_block bb
= gimple_bb (e
->call_stmt
);
5449 e
->frequency
= compute_call_stmt_bb_frequency (current_function_decl
,
5451 e
->count
= bb
->count
;
5455 free_dominance_info (CDI_DOMINATORS
);
5456 free_dominance_info (CDI_POST_DOMINATORS
);
5458 gcc_assert (!id
.debug_stmts
.exists ());
5459 init_stmts
.release ();
5464 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5465 the callee and return the inlined body on success. */
5468 maybe_inline_call_in_expr (tree exp
)
5470 tree fn
= get_callee_fndecl (exp
);
5472 /* We can only try to inline "const" functions. */
5473 if (fn
&& TREE_READONLY (fn
) && DECL_SAVED_TREE (fn
))
5475 struct pointer_map_t
*decl_map
= pointer_map_create ();
5476 call_expr_arg_iterator iter
;
5480 /* Remap the parameters. */
5481 for (param
= DECL_ARGUMENTS (fn
), arg
= first_call_expr_arg (exp
, &iter
);
5483 param
= DECL_CHAIN (param
), arg
= next_call_expr_arg (&iter
))
5484 *pointer_map_insert (decl_map
, param
) = arg
;
5486 memset (&id
, 0, sizeof (id
));
5488 id
.dst_fn
= current_function_decl
;
5489 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
5490 id
.decl_map
= decl_map
;
5492 id
.copy_decl
= copy_decl_no_change
;
5493 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5494 id
.transform_new_cfg
= false;
5495 id
.transform_return_to_modify
= true;
5496 id
.transform_parameter
= true;
5497 id
.transform_lang_insert_block
= NULL
;
5499 /* Make sure not to unshare trees behind the front-end's back
5500 since front-end specific mechanisms may rely on sharing. */
5501 id
.regimplify
= false;
5502 id
.do_not_unshare
= true;
5504 /* We're not inside any EH region. */
5507 t
= copy_tree_body (&id
);
5508 pointer_map_destroy (decl_map
);
5510 /* We can only return something suitable for use in a GENERIC
5512 if (TREE_CODE (t
) == MODIFY_EXPR
)
5513 return TREE_OPERAND (t
, 1);
5519 /* Duplicate a type, fields and all. */
5522 build_duplicate_type (tree type
)
5524 struct copy_body_data id
;
5526 memset (&id
, 0, sizeof (id
));
5527 id
.src_fn
= current_function_decl
;
5528 id
.dst_fn
= current_function_decl
;
5530 id
.decl_map
= pointer_map_create ();
5531 id
.debug_map
= NULL
;
5532 id
.copy_decl
= copy_decl_no_change
;
5534 type
= remap_type_1 (type
, &id
);
5536 pointer_map_destroy (id
.decl_map
);
5538 pointer_map_destroy (id
.debug_map
);
5540 TYPE_CANONICAL (type
) = type
;