2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
42 #include "tree-iterator.h"
44 #include "gimple-fold.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
51 #include "tree-into-ssa.h"
57 #include "value-prof.h"
60 #include "stringpool.h"
64 /* I'm not real happy about this, but we need to handle gimple and
67 /* Inlining, Cloning, Versioning, Parallelization
69 Inlining: a function body is duplicated, but the PARM_DECLs are
70 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
71 MODIFY_EXPRs that store to a dedicated returned-value variable.
72 The duplicated eh_region info of the copy will later be appended
73 to the info for the caller; the eh_region info in copied throwing
74 statements and RESX statements are adjusted accordingly.
76 Cloning: (only in C++) We have one body for a con/de/structor, and
77 multiple function decls, each with a unique parameter list.
78 Duplicate the body, using the given splay tree; some parameters
79 will become constants (like 0 or 1).
81 Versioning: a function body is duplicated and the result is a new
82 function rather than into blocks of an existing function as with
83 inlining. Some parameters will become constants.
85 Parallelization: a region of a function is duplicated resulting in
86 a new function. Variables may be replaced with complex expressions
87 to enable shared variable semantics.
89 All of these will simultaneously lookup any callgraph edges. If
90 we're going to inline the duplicated function body, and the given
91 function has some cloned callgraph nodes (one for each place this
92 function will be inlined) those callgraph edges will be duplicated.
93 If we're cloning the body, those callgraph edges will be
94 updated to point into the new body. (Note that the original
95 callgraph node and edge list will not be altered.)
97 See the CALL_EXPR handling case in copy_tree_body_r (). */
101 o In order to make inlining-on-trees work, we pessimized
102 function-local static constants. In particular, they are now
103 always output, even when not addressed. Fix this by treating
104 function-local static constants just like global static
105 constants; the back-end already knows not to output them if they
108 o Provide heuristics to clamp inlining of recursive template
112 /* Weights that estimate_num_insns uses to estimate the size of the
115 eni_weights eni_size_weights
;
117 /* Weights that estimate_num_insns uses to estimate the time necessary
118 to execute the produced code. */
120 eni_weights eni_time_weights
;
124 static tree
declare_return_variable (copy_body_data
*, tree
, tree
,
126 static void remap_block (tree
*, copy_body_data
*);
127 static void copy_bind_expr (tree
*, int *, copy_body_data
*);
128 static void declare_inline_vars (tree
, tree
);
129 static void remap_save_expr (tree
*, hash_map
<tree
, tree
> *, int *);
130 static void prepend_lexical_block (tree current_block
, tree new_block
);
131 static tree
copy_decl_to_var (tree
, copy_body_data
*);
132 static tree
copy_result_decl_to_var (tree
, copy_body_data
*);
133 static tree
copy_decl_maybe_to_var (tree
, copy_body_data
*);
134 static gimple_seq
remap_gimple_stmt (gimple
*, copy_body_data
*);
135 static bool delete_unreachable_blocks_update_callgraph (copy_body_data
*id
);
136 static void insert_init_stmt (copy_body_data
*, basic_block
, gimple
*);
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
142 insert_decl_map (copy_body_data
*id
, tree key
, tree value
)
144 id
->decl_map
->put (key
, value
);
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
149 id
->decl_map
->put (value
, value
);
152 /* Insert a tree->tree mapping for ID. This is only used for
156 insert_debug_decl_map (copy_body_data
*id
, tree key
, tree value
)
158 if (!gimple_in_ssa_p (id
->src_cfun
))
161 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
164 if (!target_for_debug_bind (key
))
167 gcc_assert (TREE_CODE (key
) == PARM_DECL
);
168 gcc_assert (VAR_P (value
));
171 id
->debug_map
= new hash_map
<tree
, tree
>;
173 id
->debug_map
->put (key
, value
);
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
180 static int processing_debug_stmt
= 0;
182 /* Construct new SSA name for old NAME. ID is the inline context. */
185 remap_ssa_name (tree name
, copy_body_data
*id
)
190 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
192 n
= id
->decl_map
->get (name
);
194 return unshare_expr (*n
);
196 if (processing_debug_stmt
)
198 if (SSA_NAME_IS_DEFAULT_DEF (name
)
199 && TREE_CODE (SSA_NAME_VAR (name
)) == PARM_DECL
200 && id
->entry_bb
== NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
203 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
205 gimple_stmt_iterator gsi
;
206 tree val
= SSA_NAME_VAR (name
);
208 n
= id
->decl_map
->get (val
);
211 if (TREE_CODE (val
) != PARM_DECL
)
213 processing_debug_stmt
= -1;
216 def_temp
= gimple_build_debug_source_bind (vexpr
, val
, NULL
);
217 DECL_ARTIFICIAL (vexpr
) = 1;
218 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
219 SET_DECL_MODE (vexpr
, DECL_MODE (SSA_NAME_VAR (name
)));
220 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
221 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
225 processing_debug_stmt
= -1;
229 /* Remap anonymous SSA names or SSA names of anonymous decls. */
230 var
= SSA_NAME_VAR (name
);
232 || (!SSA_NAME_IS_DEFAULT_DEF (name
)
234 && !VAR_DECL_IS_VIRTUAL_OPERAND (var
)
235 && DECL_ARTIFICIAL (var
)
236 && DECL_IGNORED_P (var
)
237 && !DECL_NAME (var
)))
239 struct ptr_info_def
*pi
;
240 new_tree
= make_ssa_name (remap_type (TREE_TYPE (name
), id
));
241 if (!var
&& SSA_NAME_IDENTIFIER (name
))
242 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree
, SSA_NAME_IDENTIFIER (name
));
243 insert_decl_map (id
, name
, new_tree
);
244 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
245 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
246 /* At least IPA points-to info can be directly transferred. */
247 if (id
->src_cfun
->gimple_df
248 && id
->src_cfun
->gimple_df
->ipa_pta
249 && POINTER_TYPE_P (TREE_TYPE (name
))
250 && (pi
= SSA_NAME_PTR_INFO (name
))
253 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
259 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
261 new_tree
= remap_decl (var
, id
);
263 /* We might've substituted constant or another SSA_NAME for
266 Replace the SSA name representing RESULT_DECL by variable during
267 inlining: this saves us from need to introduce PHI node in a case
268 return value is just partly initialized. */
269 if ((VAR_P (new_tree
) || TREE_CODE (new_tree
) == PARM_DECL
)
270 && (!SSA_NAME_VAR (name
)
271 || TREE_CODE (SSA_NAME_VAR (name
)) != RESULT_DECL
272 || !id
->transform_return_to_modify
))
274 struct ptr_info_def
*pi
;
275 new_tree
= make_ssa_name (new_tree
);
276 insert_decl_map (id
, name
, new_tree
);
277 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
278 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
279 /* At least IPA points-to info can be directly transferred. */
280 if (id
->src_cfun
->gimple_df
281 && id
->src_cfun
->gimple_df
->ipa_pta
282 && POINTER_TYPE_P (TREE_TYPE (name
))
283 && (pi
= SSA_NAME_PTR_INFO (name
))
286 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
289 if (SSA_NAME_IS_DEFAULT_DEF (name
))
291 /* By inlining function having uninitialized variable, we might
292 extend the lifetime (variable might get reused). This cause
293 ICE in the case we end up extending lifetime of SSA name across
294 abnormal edge, but also increase register pressure.
296 We simply initialize all uninitialized vars by 0 except
297 for case we are inlining to very first BB. We can avoid
298 this for all BBs that are not inside strongly connected
299 regions of the CFG, but this is expensive to test. */
301 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
)
302 && (!SSA_NAME_VAR (name
)
303 || TREE_CODE (SSA_NAME_VAR (name
)) != PARM_DECL
)
304 && (id
->entry_bb
!= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
306 || EDGE_COUNT (id
->entry_bb
->preds
) != 1))
308 gimple_stmt_iterator gsi
= gsi_last_bb (id
->entry_bb
);
310 tree zero
= build_zero_cst (TREE_TYPE (new_tree
));
312 init_stmt
= gimple_build_assign (new_tree
, zero
);
313 gsi_insert_after (&gsi
, init_stmt
, GSI_NEW_STMT
);
314 SSA_NAME_IS_DEFAULT_DEF (new_tree
) = 0;
318 SSA_NAME_DEF_STMT (new_tree
) = gimple_build_nop ();
319 set_ssa_default_def (cfun
, SSA_NAME_VAR (new_tree
), new_tree
);
324 insert_decl_map (id
, name
, new_tree
);
328 /* Remap DECL during the copying of the BLOCK tree for the function. */
331 remap_decl (tree decl
, copy_body_data
*id
)
335 /* We only remap local variables in the current function. */
337 /* See if we have remapped this declaration. */
339 n
= id
->decl_map
->get (decl
);
341 if (!n
&& processing_debug_stmt
)
343 processing_debug_stmt
= -1;
347 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
348 necessary DECLs have already been remapped and we do not want to duplicate
349 a decl coming from outside of the sequence we are copying. */
351 && id
->prevent_decl_creation_for_types
352 && id
->remapping_type_depth
> 0
353 && (VAR_P (decl
) || TREE_CODE (decl
) == PARM_DECL
))
356 /* If we didn't already have an equivalent for this declaration, create one
360 /* Make a copy of the variable or label. */
361 tree t
= id
->copy_decl (decl
, id
);
363 /* Remember it, so that if we encounter this local entity again
364 we can reuse this copy. Do this early because remap_type may
365 need this decl for TYPE_STUB_DECL. */
366 insert_decl_map (id
, decl
, t
);
371 /* Remap types, if necessary. */
372 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
373 if (TREE_CODE (t
) == TYPE_DECL
)
375 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
377 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
378 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
379 is not set on the TYPE_DECL, for example in LTO mode. */
380 if (DECL_ORIGINAL_TYPE (t
) == TREE_TYPE (t
))
382 tree x
= build_variant_type_copy (TREE_TYPE (t
));
383 TYPE_STUB_DECL (x
) = TYPE_STUB_DECL (TREE_TYPE (t
));
384 TYPE_NAME (x
) = TYPE_NAME (TREE_TYPE (t
));
385 DECL_ORIGINAL_TYPE (t
) = x
;
389 /* Remap sizes as necessary. */
390 walk_tree (&DECL_SIZE (t
), copy_tree_body_r
, id
, NULL
);
391 walk_tree (&DECL_SIZE_UNIT (t
), copy_tree_body_r
, id
, NULL
);
393 /* If fields, do likewise for offset and qualifier. */
394 if (TREE_CODE (t
) == FIELD_DECL
)
396 walk_tree (&DECL_FIELD_OFFSET (t
), copy_tree_body_r
, id
, NULL
);
397 if (TREE_CODE (DECL_CONTEXT (t
)) == QUAL_UNION_TYPE
)
398 walk_tree (&DECL_QUALIFIER (t
), copy_tree_body_r
, id
, NULL
);
404 if (id
->do_not_unshare
)
407 return unshare_expr (*n
);
411 remap_type_1 (tree type
, copy_body_data
*id
)
415 /* We do need a copy. build and register it now. If this is a pointer or
416 reference type, remap the designated type and make a new pointer or
418 if (TREE_CODE (type
) == POINTER_TYPE
)
420 new_tree
= build_pointer_type_for_mode (remap_type (TREE_TYPE (type
), id
),
422 TYPE_REF_CAN_ALIAS_ALL (type
));
423 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
424 new_tree
= build_type_attribute_qual_variant (new_tree
,
425 TYPE_ATTRIBUTES (type
),
427 insert_decl_map (id
, type
, new_tree
);
430 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
432 new_tree
= build_reference_type_for_mode (remap_type (TREE_TYPE (type
), id
),
434 TYPE_REF_CAN_ALIAS_ALL (type
));
435 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
436 new_tree
= build_type_attribute_qual_variant (new_tree
,
437 TYPE_ATTRIBUTES (type
),
439 insert_decl_map (id
, type
, new_tree
);
443 new_tree
= copy_node (type
);
445 insert_decl_map (id
, type
, new_tree
);
447 /* This is a new type, not a copy of an old type. Need to reassociate
448 variants. We can handle everything except the main variant lazily. */
449 t
= TYPE_MAIN_VARIANT (type
);
452 t
= remap_type (t
, id
);
453 TYPE_MAIN_VARIANT (new_tree
) = t
;
454 TYPE_NEXT_VARIANT (new_tree
) = TYPE_NEXT_VARIANT (t
);
455 TYPE_NEXT_VARIANT (t
) = new_tree
;
459 TYPE_MAIN_VARIANT (new_tree
) = new_tree
;
460 TYPE_NEXT_VARIANT (new_tree
) = NULL
;
463 if (TYPE_STUB_DECL (type
))
464 TYPE_STUB_DECL (new_tree
) = remap_decl (TYPE_STUB_DECL (type
), id
);
466 /* Lazily create pointer and reference types. */
467 TYPE_POINTER_TO (new_tree
) = NULL
;
468 TYPE_REFERENCE_TO (new_tree
) = NULL
;
470 /* Copy all types that may contain references to local variables; be sure to
471 preserve sharing in between type and its main variant when possible. */
472 switch (TREE_CODE (new_tree
))
476 case FIXED_POINT_TYPE
:
479 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
481 gcc_checking_assert (TYPE_MIN_VALUE (type
) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type
)));
482 gcc_checking_assert (TYPE_MAX_VALUE (type
) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type
)));
484 TYPE_MIN_VALUE (new_tree
) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree
));
485 TYPE_MAX_VALUE (new_tree
) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree
));
489 t
= TYPE_MIN_VALUE (new_tree
);
490 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
491 walk_tree (&TYPE_MIN_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
493 t
= TYPE_MAX_VALUE (new_tree
);
494 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
495 walk_tree (&TYPE_MAX_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
500 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
501 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
502 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
504 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
505 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
506 && TYPE_ARG_TYPES (type
) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type
)))
507 TYPE_ARG_TYPES (new_tree
) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree
));
509 walk_tree (&TYPE_ARG_TYPES (new_tree
), copy_tree_body_r
, id
, NULL
);
513 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
514 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
515 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
517 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
519 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
521 gcc_checking_assert (TYPE_DOMAIN (type
) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type
)));
522 TYPE_DOMAIN (new_tree
) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree
));
525 TYPE_DOMAIN (new_tree
) = remap_type (TYPE_DOMAIN (new_tree
), id
);
530 case QUAL_UNION_TYPE
:
531 if (TYPE_MAIN_VARIANT (type
) != type
532 && TYPE_FIELDS (type
) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type
)))
533 TYPE_FIELDS (new_tree
) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree
));
538 for (f
= TYPE_FIELDS (new_tree
); f
; f
= DECL_CHAIN (f
))
540 t
= remap_decl (f
, id
);
541 DECL_CONTEXT (t
) = new_tree
;
545 TYPE_FIELDS (new_tree
) = nreverse (nf
);
551 /* Shouldn't have been thought variable sized. */
555 /* All variants of type share the same size, so use the already remaped data. */
556 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
558 tree s
= TYPE_SIZE (type
);
559 tree mvs
= TYPE_SIZE (TYPE_MAIN_VARIANT (type
));
560 tree su
= TYPE_SIZE_UNIT (type
);
561 tree mvsu
= TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
));
562 gcc_checking_assert ((TREE_CODE (s
) == PLACEHOLDER_EXPR
563 && (TREE_CODE (mvs
) == PLACEHOLDER_EXPR
))
565 gcc_checking_assert ((TREE_CODE (su
) == PLACEHOLDER_EXPR
566 && (TREE_CODE (mvsu
) == PLACEHOLDER_EXPR
))
568 TYPE_SIZE (new_tree
) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree
));
569 TYPE_SIZE_UNIT (new_tree
) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree
));
573 walk_tree (&TYPE_SIZE (new_tree
), copy_tree_body_r
, id
, NULL
);
574 walk_tree (&TYPE_SIZE_UNIT (new_tree
), copy_tree_body_r
, id
, NULL
);
581 remap_type (tree type
, copy_body_data
*id
)
589 /* See if we have remapped this type. */
590 node
= id
->decl_map
->get (type
);
594 /* The type only needs remapping if it's variably modified. */
595 if (! variably_modified_type_p (type
, id
->src_fn
))
597 insert_decl_map (id
, type
, type
);
601 id
->remapping_type_depth
++;
602 tmp
= remap_type_1 (type
, id
);
603 id
->remapping_type_depth
--;
608 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
611 can_be_nonlocal (tree decl
, copy_body_data
*id
)
613 /* We can not duplicate function decls. */
614 if (TREE_CODE (decl
) == FUNCTION_DECL
)
617 /* Local static vars must be non-local or we get multiple declaration
619 if (VAR_P (decl
) && !auto_var_in_fn_p (decl
, id
->src_fn
))
626 remap_decls (tree decls
, vec
<tree
, va_gc
> **nonlocalized_list
,
630 tree new_decls
= NULL_TREE
;
632 /* Remap its variables. */
633 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
637 if (can_be_nonlocal (old_var
, id
))
639 /* We need to add this variable to the local decls as otherwise
640 nothing else will do so. */
641 if (VAR_P (old_var
) && ! DECL_EXTERNAL (old_var
) && cfun
)
642 add_local_decl (cfun
, old_var
);
643 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
644 && !DECL_IGNORED_P (old_var
)
645 && nonlocalized_list
)
646 vec_safe_push (*nonlocalized_list
, old_var
);
650 /* Remap the variable. */
651 new_var
= remap_decl (old_var
, id
);
653 /* If we didn't remap this variable, we can't mess with its
654 TREE_CHAIN. If we remapped this variable to the return slot, it's
655 already declared somewhere else, so don't declare it here. */
657 if (new_var
== id
->retvar
)
661 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
662 && !DECL_IGNORED_P (old_var
)
663 && nonlocalized_list
)
664 vec_safe_push (*nonlocalized_list
, old_var
);
668 gcc_assert (DECL_P (new_var
));
669 DECL_CHAIN (new_var
) = new_decls
;
672 /* Also copy value-expressions. */
673 if (VAR_P (new_var
) && DECL_HAS_VALUE_EXPR_P (new_var
))
675 tree tem
= DECL_VALUE_EXPR (new_var
);
676 bool old_regimplify
= id
->regimplify
;
677 id
->remapping_type_depth
++;
678 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
679 id
->remapping_type_depth
--;
680 id
->regimplify
= old_regimplify
;
681 SET_DECL_VALUE_EXPR (new_var
, tem
);
686 return nreverse (new_decls
);
689 /* Copy the BLOCK to contain remapped versions of the variables
690 therein. And hook the new block into the block-tree. */
693 remap_block (tree
*block
, copy_body_data
*id
)
698 /* Make the new block. */
700 new_block
= make_node (BLOCK
);
701 TREE_USED (new_block
) = TREE_USED (old_block
);
702 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
703 BLOCK_SOURCE_LOCATION (new_block
) = BLOCK_SOURCE_LOCATION (old_block
);
704 BLOCK_NONLOCALIZED_VARS (new_block
)
705 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block
));
708 /* Remap its variables. */
709 BLOCK_VARS (new_block
) = remap_decls (BLOCK_VARS (old_block
),
710 &BLOCK_NONLOCALIZED_VARS (new_block
),
713 if (id
->transform_lang_insert_block
)
714 id
->transform_lang_insert_block (new_block
);
716 /* Remember the remapped block. */
717 insert_decl_map (id
, old_block
, new_block
);
720 /* Copy the whole block tree and root it in id->block. */
723 remap_blocks (tree block
, copy_body_data
*id
)
726 tree new_tree
= block
;
731 remap_block (&new_tree
, id
);
732 gcc_assert (new_tree
!= block
);
733 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
734 prepend_lexical_block (new_tree
, remap_blocks (t
, id
));
735 /* Blocks are in arbitrary order, but make things slightly prettier and do
736 not swap order when producing a copy. */
737 BLOCK_SUBBLOCKS (new_tree
) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree
));
741 /* Remap the block tree rooted at BLOCK to nothing. */
744 remap_blocks_to_null (tree block
, copy_body_data
*id
)
747 insert_decl_map (id
, block
, NULL_TREE
);
748 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
749 remap_blocks_to_null (t
, id
);
752 /* Remap the location info pointed to by LOCUS. */
755 remap_location (location_t locus
, copy_body_data
*id
)
757 if (LOCATION_BLOCK (locus
))
759 tree
*n
= id
->decl_map
->get (LOCATION_BLOCK (locus
));
762 return set_block (locus
, *n
);
765 locus
= LOCATION_LOCUS (locus
);
767 if (locus
!= UNKNOWN_LOCATION
&& id
->block
)
768 return set_block (locus
, id
->block
);
774 copy_statement_list (tree
*tp
)
776 tree_stmt_iterator oi
, ni
;
779 new_tree
= alloc_stmt_list ();
780 ni
= tsi_start (new_tree
);
781 oi
= tsi_start (*tp
);
782 TREE_TYPE (new_tree
) = TREE_TYPE (*tp
);
785 for (; !tsi_end_p (oi
); tsi_next (&oi
))
787 tree stmt
= tsi_stmt (oi
);
788 if (TREE_CODE (stmt
) == STATEMENT_LIST
)
789 /* This copy is not redundant; tsi_link_after will smash this
790 STATEMENT_LIST into the end of the one we're building, and we
791 don't want to do that with the original. */
792 copy_statement_list (&stmt
);
793 tsi_link_after (&ni
, stmt
, TSI_CONTINUE_LINKING
);
798 copy_bind_expr (tree
*tp
, int *walk_subtrees
, copy_body_data
*id
)
800 tree block
= BIND_EXPR_BLOCK (*tp
);
801 /* Copy (and replace) the statement. */
802 copy_tree_r (tp
, walk_subtrees
, NULL
);
805 remap_block (&block
, id
);
806 BIND_EXPR_BLOCK (*tp
) = block
;
809 if (BIND_EXPR_VARS (*tp
))
810 /* This will remap a lot of the same decls again, but this should be
812 BIND_EXPR_VARS (*tp
) = remap_decls (BIND_EXPR_VARS (*tp
), NULL
, id
);
816 /* Create a new gimple_seq by remapping all the statements in BODY
817 using the inlining information in ID. */
820 remap_gimple_seq (gimple_seq body
, copy_body_data
*id
)
822 gimple_stmt_iterator si
;
823 gimple_seq new_body
= NULL
;
825 for (si
= gsi_start (body
); !gsi_end_p (si
); gsi_next (&si
))
827 gimple_seq new_stmts
= remap_gimple_stmt (gsi_stmt (si
), id
);
828 gimple_seq_add_seq (&new_body
, new_stmts
);
835 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
836 block using the mapping information in ID. */
839 copy_gimple_bind (gbind
*stmt
, copy_body_data
*id
)
842 tree new_block
, new_vars
;
843 gimple_seq body
, new_body
;
845 /* Copy the statement. Note that we purposely don't use copy_stmt
846 here because we need to remap statements as we copy. */
847 body
= gimple_bind_body (stmt
);
848 new_body
= remap_gimple_seq (body
, id
);
850 new_block
= gimple_bind_block (stmt
);
852 remap_block (&new_block
, id
);
854 /* This will remap a lot of the same decls again, but this should be
856 new_vars
= gimple_bind_vars (stmt
);
858 new_vars
= remap_decls (new_vars
, NULL
, id
);
860 new_bind
= gimple_build_bind (new_vars
, new_body
, new_block
);
865 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
870 if (TREE_CODE (decl
) == SSA_NAME
)
872 decl
= SSA_NAME_VAR (decl
);
877 return (TREE_CODE (decl
) == PARM_DECL
);
880 /* Remap the dependence CLIQUE from the source to the destination function
881 as specified in ID. */
883 static unsigned short
884 remap_dependence_clique (copy_body_data
*id
, unsigned short clique
)
886 if (clique
== 0 || processing_debug_stmt
)
888 if (!id
->dependence_map
)
889 id
->dependence_map
= new hash_map
<dependence_hash
, unsigned short>;
891 unsigned short &newc
= id
->dependence_map
->get_or_insert (clique
, &existed
);
893 newc
= ++cfun
->last_clique
;
897 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
898 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
899 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
900 recursing into the children nodes of *TP. */
903 remap_gimple_op_r (tree
*tp
, int *walk_subtrees
, void *data
)
905 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
906 copy_body_data
*id
= (copy_body_data
*) wi_p
->info
;
907 tree fn
= id
->src_fn
;
909 /* For recursive invocations this is no longer the LHS itself. */
910 bool is_lhs
= wi_p
->is_lhs
;
911 wi_p
->is_lhs
= false;
913 if (TREE_CODE (*tp
) == SSA_NAME
)
915 *tp
= remap_ssa_name (*tp
, id
);
918 SSA_NAME_DEF_STMT (*tp
) = wi_p
->stmt
;
921 else if (auto_var_in_fn_p (*tp
, fn
))
923 /* Local variables and labels need to be replaced by equivalent
924 variables. We don't want to copy static variables; there's
925 only one of those, no matter how many times we inline the
926 containing function. Similarly for globals from an outer
930 /* Remap the declaration. */
931 new_decl
= remap_decl (*tp
, id
);
932 gcc_assert (new_decl
);
933 /* Replace this variable with the copy. */
934 STRIP_TYPE_NOPS (new_decl
);
935 /* ??? The C++ frontend uses void * pointer zero to initialize
936 any other type. This confuses the middle-end type verification.
937 As cloned bodies do not go through gimplification again the fixup
938 there doesn't trigger. */
939 if (TREE_CODE (new_decl
) == INTEGER_CST
940 && !useless_type_conversion_p (TREE_TYPE (*tp
), TREE_TYPE (new_decl
)))
941 new_decl
= fold_convert (TREE_TYPE (*tp
), new_decl
);
945 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
947 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
949 else if (TREE_CODE (*tp
) == LABEL_DECL
950 && (!DECL_CONTEXT (*tp
)
951 || decl_function_context (*tp
) == id
->src_fn
))
952 /* These may need to be remapped for EH handling. */
953 *tp
= remap_decl (*tp
, id
);
954 else if (TREE_CODE (*tp
) == FIELD_DECL
)
956 /* If the enclosing record type is variably_modified_type_p, the field
957 has already been remapped. Otherwise, it need not be. */
958 tree
*n
= id
->decl_map
->get (*tp
);
963 else if (TYPE_P (*tp
))
964 /* Types may need remapping as well. */
965 *tp
= remap_type (*tp
, id
);
966 else if (CONSTANT_CLASS_P (*tp
))
968 /* If this is a constant, we have to copy the node iff the type
969 will be remapped. copy_tree_r will not copy a constant. */
970 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
972 if (new_type
== TREE_TYPE (*tp
))
975 else if (TREE_CODE (*tp
) == INTEGER_CST
)
976 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
979 *tp
= copy_node (*tp
);
980 TREE_TYPE (*tp
) = new_type
;
985 /* Otherwise, just copy the node. Note that copy_tree_r already
986 knows not to copy VAR_DECLs, etc., so this is safe. */
988 if (TREE_CODE (*tp
) == MEM_REF
)
990 /* We need to re-canonicalize MEM_REFs from inline substitutions
991 that can happen when a pointer argument is an ADDR_EXPR.
992 Recurse here manually to allow that. */
993 tree ptr
= TREE_OPERAND (*tp
, 0);
994 tree type
= remap_type (TREE_TYPE (*tp
), id
);
996 walk_tree (&ptr
, remap_gimple_op_r
, data
, NULL
);
997 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
998 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
999 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1000 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1001 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1003 MR_DEPENDENCE_CLIQUE (*tp
)
1004 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1005 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1007 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1008 remapped a parameter as the property might be valid only
1009 for the parameter itself. */
1010 if (TREE_THIS_NOTRAP (old
)
1011 && (!is_parm (TREE_OPERAND (old
, 0))
1012 || (!id
->transform_parameter
&& is_parm (ptr
))))
1013 TREE_THIS_NOTRAP (*tp
) = 1;
1014 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
1019 /* Here is the "usual case". Copy this tree node, and then
1020 tweak some special cases. */
1021 copy_tree_r (tp
, walk_subtrees
, NULL
);
1023 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1024 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1026 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1028 /* The copied TARGET_EXPR has never been expanded, even if the
1029 original node was expanded already. */
1030 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1031 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1033 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1035 /* Variable substitution need not be simple. In particular,
1036 the MEM_REF substitution above. Make sure that
1037 TREE_CONSTANT and friends are up-to-date. */
1038 int invariant
= is_gimple_min_invariant (*tp
);
1039 walk_tree (&TREE_OPERAND (*tp
, 0), remap_gimple_op_r
, data
, NULL
);
1040 recompute_tree_invariant_for_addr_expr (*tp
);
1042 /* If this used to be invariant, but is not any longer,
1043 then regimplification is probably needed. */
1044 if (invariant
&& !is_gimple_min_invariant (*tp
))
1045 id
->regimplify
= true;
1051 /* Update the TREE_BLOCK for the cloned expr. */
1054 tree new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1055 tree old_block
= TREE_BLOCK (*tp
);
1059 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1063 TREE_SET_BLOCK (*tp
, new_block
);
1066 /* Keep iterating. */
1071 /* Called from copy_body_id via walk_tree. DATA is really a
1072 `copy_body_data *'. */
1075 copy_tree_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
1077 copy_body_data
*id
= (copy_body_data
*) data
;
1078 tree fn
= id
->src_fn
;
1081 /* Begin by recognizing trees that we'll completely rewrite for the
1082 inlining context. Our output for these trees is completely
1083 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1084 into an edge). Further down, we'll handle trees that get
1085 duplicated and/or tweaked. */
1087 /* When requested, RETURN_EXPRs should be transformed to just the
1088 contained MODIFY_EXPR. The branch semantics of the return will
1089 be handled elsewhere by manipulating the CFG rather than a statement. */
1090 if (TREE_CODE (*tp
) == RETURN_EXPR
&& id
->transform_return_to_modify
)
1092 tree assignment
= TREE_OPERAND (*tp
, 0);
1094 /* If we're returning something, just turn that into an
1095 assignment into the equivalent of the original RESULT_DECL.
1096 If the "assignment" is just the result decl, the result
1097 decl has already been set (e.g. a recent "foo (&result_decl,
1098 ...)"); just toss the entire RETURN_EXPR. */
1099 if (assignment
&& TREE_CODE (assignment
) == MODIFY_EXPR
)
1101 /* Replace the RETURN_EXPR with (a copy of) the
1102 MODIFY_EXPR hanging underneath. */
1103 *tp
= copy_node (assignment
);
1105 else /* Else the RETURN_EXPR returns no value. */
1108 return (tree
) (void *)1;
1111 else if (TREE_CODE (*tp
) == SSA_NAME
)
1113 *tp
= remap_ssa_name (*tp
, id
);
1118 /* Local variables and labels need to be replaced by equivalent
1119 variables. We don't want to copy static variables; there's only
1120 one of those, no matter how many times we inline the containing
1121 function. Similarly for globals from an outer function. */
1122 else if (auto_var_in_fn_p (*tp
, fn
))
1126 /* Remap the declaration. */
1127 new_decl
= remap_decl (*tp
, id
);
1128 gcc_assert (new_decl
);
1129 /* Replace this variable with the copy. */
1130 STRIP_TYPE_NOPS (new_decl
);
1134 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1135 copy_statement_list (tp
);
1136 else if (TREE_CODE (*tp
) == SAVE_EXPR
1137 || TREE_CODE (*tp
) == TARGET_EXPR
)
1138 remap_save_expr (tp
, id
->decl_map
, walk_subtrees
);
1139 else if (TREE_CODE (*tp
) == LABEL_DECL
1140 && (! DECL_CONTEXT (*tp
)
1141 || decl_function_context (*tp
) == id
->src_fn
))
1142 /* These may need to be remapped for EH handling. */
1143 *tp
= remap_decl (*tp
, id
);
1144 else if (TREE_CODE (*tp
) == BIND_EXPR
)
1145 copy_bind_expr (tp
, walk_subtrees
, id
);
1146 /* Types may need remapping as well. */
1147 else if (TYPE_P (*tp
))
1148 *tp
= remap_type (*tp
, id
);
1150 /* If this is a constant, we have to copy the node iff the type will be
1151 remapped. copy_tree_r will not copy a constant. */
1152 else if (CONSTANT_CLASS_P (*tp
))
1154 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1156 if (new_type
== TREE_TYPE (*tp
))
1159 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1160 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
1163 *tp
= copy_node (*tp
);
1164 TREE_TYPE (*tp
) = new_type
;
1168 /* Otherwise, just copy the node. Note that copy_tree_r already
1169 knows not to copy VAR_DECLs, etc., so this is safe. */
1172 /* Here we handle trees that are not completely rewritten.
1173 First we detect some inlining-induced bogosities for
1175 if (TREE_CODE (*tp
) == MODIFY_EXPR
1176 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
1177 && (auto_var_in_fn_p (TREE_OPERAND (*tp
, 0), fn
)))
1179 /* Some assignments VAR = VAR; don't generate any rtl code
1180 and thus don't count as variable modification. Avoid
1181 keeping bogosities like 0 = 0. */
1182 tree decl
= TREE_OPERAND (*tp
, 0), value
;
1185 n
= id
->decl_map
->get (decl
);
1189 STRIP_TYPE_NOPS (value
);
1190 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1192 *tp
= build_empty_stmt (EXPR_LOCATION (*tp
));
1193 return copy_tree_body_r (tp
, walk_subtrees
, data
);
1197 else if (TREE_CODE (*tp
) == INDIRECT_REF
)
1199 /* Get rid of *& from inline substitutions that can happen when a
1200 pointer argument is an ADDR_EXPR. */
1201 tree decl
= TREE_OPERAND (*tp
, 0);
1202 tree
*n
= id
->decl_map
->get (decl
);
1205 /* If we happen to get an ADDR_EXPR in n->value, strip
1206 it manually here as we'll eventually get ADDR_EXPRs
1207 which lie about their types pointed to. In this case
1208 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1209 but we absolutely rely on that. As fold_indirect_ref
1210 does other useful transformations, try that first, though. */
1211 tree type
= TREE_TYPE (*tp
);
1212 tree ptr
= id
->do_not_unshare
? *n
: unshare_expr (*n
);
1214 *tp
= gimple_fold_indirect_ref (ptr
);
1217 type
= remap_type (type
, id
);
1218 if (TREE_CODE (ptr
) == ADDR_EXPR
)
1221 = fold_indirect_ref_1 (EXPR_LOCATION (ptr
), type
, ptr
);
1222 /* ??? We should either assert here or build
1223 a VIEW_CONVERT_EXPR instead of blindly leaking
1224 incompatible types to our IL. */
1226 *tp
= TREE_OPERAND (ptr
, 0);
1230 *tp
= build1 (INDIRECT_REF
, type
, ptr
);
1231 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1232 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1233 TREE_READONLY (*tp
) = TREE_READONLY (old
);
1234 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1235 have remapped a parameter as the property might be
1236 valid only for the parameter itself. */
1237 if (TREE_THIS_NOTRAP (old
)
1238 && (!is_parm (TREE_OPERAND (old
, 0))
1239 || (!id
->transform_parameter
&& is_parm (ptr
))))
1240 TREE_THIS_NOTRAP (*tp
) = 1;
1247 else if (TREE_CODE (*tp
) == MEM_REF
)
1249 /* We need to re-canonicalize MEM_REFs from inline substitutions
1250 that can happen when a pointer argument is an ADDR_EXPR.
1251 Recurse here manually to allow that. */
1252 tree ptr
= TREE_OPERAND (*tp
, 0);
1253 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1255 walk_tree (&ptr
, copy_tree_body_r
, data
, NULL
);
1256 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1257 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1258 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1259 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1260 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1262 MR_DEPENDENCE_CLIQUE (*tp
)
1263 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1264 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1266 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1267 remapped a parameter as the property might be valid only
1268 for the parameter itself. */
1269 if (TREE_THIS_NOTRAP (old
)
1270 && (!is_parm (TREE_OPERAND (old
, 0))
1271 || (!id
->transform_parameter
&& is_parm (ptr
))))
1272 TREE_THIS_NOTRAP (*tp
) = 1;
1273 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
1278 /* Here is the "usual case". Copy this tree node, and then
1279 tweak some special cases. */
1280 copy_tree_r (tp
, walk_subtrees
, NULL
);
1282 /* If EXPR has block defined, map it to newly constructed block.
1283 When inlining we want EXPRs without block appear in the block
1284 of function call if we are not remapping a type. */
1287 new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1288 if (TREE_BLOCK (*tp
))
1291 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1295 TREE_SET_BLOCK (*tp
, new_block
);
1298 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1299 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1301 /* The copied TARGET_EXPR has never been expanded, even if the
1302 original node was expanded already. */
1303 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1305 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1306 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1309 /* Variable substitution need not be simple. In particular, the
1310 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1311 and friends are up-to-date. */
1312 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1314 int invariant
= is_gimple_min_invariant (*tp
);
1315 walk_tree (&TREE_OPERAND (*tp
, 0), copy_tree_body_r
, id
, NULL
);
1317 /* Handle the case where we substituted an INDIRECT_REF
1318 into the operand of the ADDR_EXPR. */
1319 if (TREE_CODE (TREE_OPERAND (*tp
, 0)) == INDIRECT_REF
)
1321 tree t
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0);
1322 if (TREE_TYPE (t
) != TREE_TYPE (*tp
))
1323 t
= fold_convert (remap_type (TREE_TYPE (*tp
), id
), t
);
1327 recompute_tree_invariant_for_addr_expr (*tp
);
1329 /* If this used to be invariant, but is not any longer,
1330 then regimplification is probably needed. */
1331 if (invariant
&& !is_gimple_min_invariant (*tp
))
1332 id
->regimplify
= true;
1338 /* Keep iterating. */
1342 /* Helper for remap_gimple_stmt. Given an EH region number for the
1343 source function, map that to the duplicate EH region number in
1344 the destination function. */
1347 remap_eh_region_nr (int old_nr
, copy_body_data
*id
)
1349 eh_region old_r
, new_r
;
1351 old_r
= get_eh_region_from_number_fn (id
->src_cfun
, old_nr
);
1352 new_r
= static_cast<eh_region
> (*id
->eh_map
->get (old_r
));
1354 return new_r
->index
;
1357 /* Similar, but operate on INTEGER_CSTs. */
1360 remap_eh_region_tree_nr (tree old_t_nr
, copy_body_data
*id
)
1364 old_nr
= tree_to_shwi (old_t_nr
);
1365 new_nr
= remap_eh_region_nr (old_nr
, id
);
1367 return build_int_cst (integer_type_node
, new_nr
);
1370 /* Helper for copy_bb. Remap statement STMT using the inlining
1371 information in ID. Return the new statement copy. */
1374 remap_gimple_stmt (gimple
*stmt
, copy_body_data
*id
)
1376 gimple
*copy
= NULL
;
1377 struct walk_stmt_info wi
;
1378 bool skip_first
= false;
1379 gimple_seq stmts
= NULL
;
1381 if (is_gimple_debug (stmt
)
1382 && (gimple_debug_nonbind_marker_p (stmt
)
1383 ? !DECL_STRUCT_FUNCTION (id
->dst_fn
)->debug_nonbind_markers
1384 : !opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
)))
1387 /* Begin by recognizing trees that we'll completely rewrite for the
1388 inlining context. Our output for these trees is completely
1389 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1390 into an edge). Further down, we'll handle trees that get
1391 duplicated and/or tweaked. */
1393 /* When requested, GIMPLE_RETURNs should be transformed to just the
1394 contained GIMPLE_ASSIGN. The branch semantics of the return will
1395 be handled elsewhere by manipulating the CFG rather than the
1397 if (gimple_code (stmt
) == GIMPLE_RETURN
&& id
->transform_return_to_modify
)
1399 tree retval
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1400 tree retbnd
= gimple_return_retbnd (stmt
);
1401 tree bndslot
= id
->retbnd
;
1403 if (retbnd
&& bndslot
)
1405 gimple
*bndcopy
= gimple_build_assign (bndslot
, retbnd
);
1406 memset (&wi
, 0, sizeof (wi
));
1408 walk_gimple_op (bndcopy
, remap_gimple_op_r
, &wi
);
1409 gimple_seq_add_stmt (&stmts
, bndcopy
);
1412 /* If we're returning something, just turn that into an
1413 assignment into the equivalent of the original RESULT_DECL.
1414 If RETVAL is just the result decl, the result decl has
1415 already been set (e.g. a recent "foo (&result_decl, ...)");
1416 just toss the entire GIMPLE_RETURN. */
1418 && (TREE_CODE (retval
) != RESULT_DECL
1419 && (TREE_CODE (retval
) != SSA_NAME
1420 || ! SSA_NAME_VAR (retval
)
1421 || TREE_CODE (SSA_NAME_VAR (retval
)) != RESULT_DECL
)))
1423 copy
= gimple_build_assign (id
->do_not_unshare
1424 ? id
->retvar
: unshare_expr (id
->retvar
),
1426 /* id->retvar is already substituted. Skip it on later remapping. */
1432 else if (gimple_has_substatements (stmt
))
1436 /* When cloning bodies from the C++ front end, we will be handed bodies
1437 in High GIMPLE form. Handle here all the High GIMPLE statements that
1438 have embedded statements. */
1439 switch (gimple_code (stmt
))
1442 copy
= copy_gimple_bind (as_a
<gbind
*> (stmt
), id
);
1447 gcatch
*catch_stmt
= as_a
<gcatch
*> (stmt
);
1448 s1
= remap_gimple_seq (gimple_catch_handler (catch_stmt
), id
);
1449 copy
= gimple_build_catch (gimple_catch_types (catch_stmt
), s1
);
1453 case GIMPLE_EH_FILTER
:
1454 s1
= remap_gimple_seq (gimple_eh_filter_failure (stmt
), id
);
1455 copy
= gimple_build_eh_filter (gimple_eh_filter_types (stmt
), s1
);
1459 s1
= remap_gimple_seq (gimple_try_eval (stmt
), id
);
1460 s2
= remap_gimple_seq (gimple_try_cleanup (stmt
), id
);
1461 copy
= gimple_build_try (s1
, s2
, gimple_try_kind (stmt
));
1464 case GIMPLE_WITH_CLEANUP_EXPR
:
1465 s1
= remap_gimple_seq (gimple_wce_cleanup (stmt
), id
);
1466 copy
= gimple_build_wce (s1
);
1469 case GIMPLE_OMP_PARALLEL
:
1471 gomp_parallel
*omp_par_stmt
= as_a
<gomp_parallel
*> (stmt
);
1472 s1
= remap_gimple_seq (gimple_omp_body (omp_par_stmt
), id
);
1473 copy
= gimple_build_omp_parallel
1475 gimple_omp_parallel_clauses (omp_par_stmt
),
1476 gimple_omp_parallel_child_fn (omp_par_stmt
),
1477 gimple_omp_parallel_data_arg (omp_par_stmt
));
1481 case GIMPLE_OMP_TASK
:
1482 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1483 copy
= gimple_build_omp_task
1485 gimple_omp_task_clauses (stmt
),
1486 gimple_omp_task_child_fn (stmt
),
1487 gimple_omp_task_data_arg (stmt
),
1488 gimple_omp_task_copy_fn (stmt
),
1489 gimple_omp_task_arg_size (stmt
),
1490 gimple_omp_task_arg_align (stmt
));
1493 case GIMPLE_OMP_FOR
:
1494 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1495 s2
= remap_gimple_seq (gimple_omp_for_pre_body (stmt
), id
);
1496 copy
= gimple_build_omp_for (s1
, gimple_omp_for_kind (stmt
),
1497 gimple_omp_for_clauses (stmt
),
1498 gimple_omp_for_collapse (stmt
), s2
);
1501 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1503 gimple_omp_for_set_index (copy
, i
,
1504 gimple_omp_for_index (stmt
, i
));
1505 gimple_omp_for_set_initial (copy
, i
,
1506 gimple_omp_for_initial (stmt
, i
));
1507 gimple_omp_for_set_final (copy
, i
,
1508 gimple_omp_for_final (stmt
, i
));
1509 gimple_omp_for_set_incr (copy
, i
,
1510 gimple_omp_for_incr (stmt
, i
));
1511 gimple_omp_for_set_cond (copy
, i
,
1512 gimple_omp_for_cond (stmt
, i
));
1517 case GIMPLE_OMP_MASTER
:
1518 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1519 copy
= gimple_build_omp_master (s1
);
1522 case GIMPLE_OMP_TASKGROUP
:
1523 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1524 copy
= gimple_build_omp_taskgroup (s1
);
1527 case GIMPLE_OMP_ORDERED
:
1528 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1529 copy
= gimple_build_omp_ordered
1531 gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
)));
1534 case GIMPLE_OMP_SECTION
:
1535 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1536 copy
= gimple_build_omp_section (s1
);
1539 case GIMPLE_OMP_SECTIONS
:
1540 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1541 copy
= gimple_build_omp_sections
1542 (s1
, gimple_omp_sections_clauses (stmt
));
1545 case GIMPLE_OMP_SINGLE
:
1546 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1547 copy
= gimple_build_omp_single
1548 (s1
, gimple_omp_single_clauses (stmt
));
1551 case GIMPLE_OMP_TARGET
:
1552 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1553 copy
= gimple_build_omp_target
1554 (s1
, gimple_omp_target_kind (stmt
),
1555 gimple_omp_target_clauses (stmt
));
1558 case GIMPLE_OMP_TEAMS
:
1559 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1560 copy
= gimple_build_omp_teams
1561 (s1
, gimple_omp_teams_clauses (stmt
));
1564 case GIMPLE_OMP_CRITICAL
:
1565 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1566 copy
= gimple_build_omp_critical (s1
,
1567 gimple_omp_critical_name
1568 (as_a
<gomp_critical
*> (stmt
)),
1569 gimple_omp_critical_clauses
1570 (as_a
<gomp_critical
*> (stmt
)));
1573 case GIMPLE_TRANSACTION
:
1575 gtransaction
*old_trans_stmt
= as_a
<gtransaction
*> (stmt
);
1576 gtransaction
*new_trans_stmt
;
1577 s1
= remap_gimple_seq (gimple_transaction_body (old_trans_stmt
),
1579 copy
= new_trans_stmt
= gimple_build_transaction (s1
);
1580 gimple_transaction_set_subcode (new_trans_stmt
,
1581 gimple_transaction_subcode (old_trans_stmt
));
1582 gimple_transaction_set_label_norm (new_trans_stmt
,
1583 gimple_transaction_label_norm (old_trans_stmt
));
1584 gimple_transaction_set_label_uninst (new_trans_stmt
,
1585 gimple_transaction_label_uninst (old_trans_stmt
));
1586 gimple_transaction_set_label_over (new_trans_stmt
,
1587 gimple_transaction_label_over (old_trans_stmt
));
1597 if (gimple_assign_copy_p (stmt
)
1598 && gimple_assign_lhs (stmt
) == gimple_assign_rhs1 (stmt
)
1599 && auto_var_in_fn_p (gimple_assign_lhs (stmt
), id
->src_fn
))
1601 /* Here we handle statements that are not completely rewritten.
1602 First we detect some inlining-induced bogosities for
1605 /* Some assignments VAR = VAR; don't generate any rtl code
1606 and thus don't count as variable modification. Avoid
1607 keeping bogosities like 0 = 0. */
1608 tree decl
= gimple_assign_lhs (stmt
), value
;
1611 n
= id
->decl_map
->get (decl
);
1615 STRIP_TYPE_NOPS (value
);
1616 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1621 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1622 in a block that we aren't copying during tree_function_versioning,
1623 just drop the clobber stmt. */
1624 if (id
->blocks_to_copy
&& gimple_clobber_p (stmt
))
1626 tree lhs
= gimple_assign_lhs (stmt
);
1627 if (TREE_CODE (lhs
) == MEM_REF
1628 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
1630 gimple
*def_stmt
= SSA_NAME_DEF_STMT (TREE_OPERAND (lhs
, 0));
1631 if (gimple_bb (def_stmt
)
1632 && !bitmap_bit_p (id
->blocks_to_copy
,
1633 gimple_bb (def_stmt
)->index
))
1638 if (gimple_debug_bind_p (stmt
))
1641 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt
),
1642 gimple_debug_bind_get_value (stmt
),
1644 id
->debug_stmts
.safe_push (copy
);
1645 gimple_seq_add_stmt (&stmts
, copy
);
1648 if (gimple_debug_source_bind_p (stmt
))
1650 gdebug
*copy
= gimple_build_debug_source_bind
1651 (gimple_debug_source_bind_get_var (stmt
),
1652 gimple_debug_source_bind_get_value (stmt
),
1654 id
->debug_stmts
.safe_push (copy
);
1655 gimple_seq_add_stmt (&stmts
, copy
);
1658 if (gimple_debug_nonbind_marker_p (stmt
))
1660 /* If the inlined function has too many debug markers,
1662 if (id
->src_cfun
->debug_marker_count
1663 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT
))
1666 gdebug
*copy
= as_a
<gdebug
*> (gimple_copy (stmt
));
1667 id
->debug_stmts
.safe_push (copy
);
1668 gimple_seq_add_stmt (&stmts
, copy
);
1671 gcc_checking_assert (!is_gimple_debug (stmt
));
1673 /* Create a new deep copy of the statement. */
1674 copy
= gimple_copy (stmt
);
1676 /* Clear flags that need revisiting. */
1677 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (copy
))
1679 if (gimple_call_tail_p (call_stmt
))
1680 gimple_call_set_tail (call_stmt
, false);
1681 if (gimple_call_from_thunk_p (call_stmt
))
1682 gimple_call_set_from_thunk (call_stmt
, false);
1683 if (gimple_call_internal_p (call_stmt
))
1684 switch (gimple_call_internal_fn (call_stmt
))
1686 case IFN_GOMP_SIMD_LANE
:
1687 case IFN_GOMP_SIMD_VF
:
1688 case IFN_GOMP_SIMD_LAST_LANE
:
1689 case IFN_GOMP_SIMD_ORDERED_START
:
1690 case IFN_GOMP_SIMD_ORDERED_END
:
1691 DECL_STRUCT_FUNCTION (id
->dst_fn
)->has_simduid_loops
= true;
1698 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1699 RESX and EH_DISPATCH. */
1701 switch (gimple_code (copy
))
1705 tree r
, fndecl
= gimple_call_fndecl (copy
);
1706 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
1707 switch (DECL_FUNCTION_CODE (fndecl
))
1709 case BUILT_IN_EH_COPY_VALUES
:
1710 r
= gimple_call_arg (copy
, 1);
1711 r
= remap_eh_region_tree_nr (r
, id
);
1712 gimple_call_set_arg (copy
, 1, r
);
1715 case BUILT_IN_EH_POINTER
:
1716 case BUILT_IN_EH_FILTER
:
1717 r
= gimple_call_arg (copy
, 0);
1718 r
= remap_eh_region_tree_nr (r
, id
);
1719 gimple_call_set_arg (copy
, 0, r
);
1726 /* Reset alias info if we didn't apply measures to
1727 keep it valid over inlining by setting DECL_PT_UID. */
1728 if (!id
->src_cfun
->gimple_df
1729 || !id
->src_cfun
->gimple_df
->ipa_pta
)
1730 gimple_call_reset_alias_info (as_a
<gcall
*> (copy
));
1736 gresx
*resx_stmt
= as_a
<gresx
*> (copy
);
1737 int r
= gimple_resx_region (resx_stmt
);
1738 r
= remap_eh_region_nr (r
, id
);
1739 gimple_resx_set_region (resx_stmt
, r
);
1743 case GIMPLE_EH_DISPATCH
:
1745 geh_dispatch
*eh_dispatch
= as_a
<geh_dispatch
*> (copy
);
1746 int r
= gimple_eh_dispatch_region (eh_dispatch
);
1747 r
= remap_eh_region_nr (r
, id
);
1748 gimple_eh_dispatch_set_region (eh_dispatch
, r
);
1757 /* If STMT has a block defined, map it to the newly constructed
1759 if (gimple_block (copy
))
1762 n
= id
->decl_map
->get (gimple_block (copy
));
1764 gimple_set_block (copy
, *n
);
1767 if (gimple_debug_bind_p (copy
) || gimple_debug_source_bind_p (copy
)
1768 || gimple_debug_nonbind_marker_p (copy
))
1770 gimple_seq_add_stmt (&stmts
, copy
);
1774 /* Remap all the operands in COPY. */
1775 memset (&wi
, 0, sizeof (wi
));
1778 walk_tree (gimple_op_ptr (copy
, 1), remap_gimple_op_r
, &wi
, NULL
);
1780 walk_gimple_op (copy
, remap_gimple_op_r
, &wi
);
1782 /* Clear the copied virtual operands. We are not remapping them here
1783 but are going to recreate them from scratch. */
1784 if (gimple_has_mem_ops (copy
))
1786 gimple_set_vdef (copy
, NULL_TREE
);
1787 gimple_set_vuse (copy
, NULL_TREE
);
1790 gimple_seq_add_stmt (&stmts
, copy
);
1795 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1799 copy_bb (copy_body_data
*id
, basic_block bb
,
1800 profile_count num
, profile_count den
)
1802 gimple_stmt_iterator gsi
, copy_gsi
, seq_gsi
;
1803 basic_block copy_basic_block
;
1807 profile_count::adjust_for_ipa_scaling (&num
, &den
);
1809 /* Search for previous copied basic block. */
1812 prev
= prev
->prev_bb
;
1814 /* create_basic_block() will append every new block to
1815 basic_block_info automatically. */
1816 copy_basic_block
= create_basic_block (NULL
, (basic_block
) prev
->aux
);
1817 copy_basic_block
->count
= bb
->count
.apply_scale (num
, den
);
1819 copy_gsi
= gsi_start_bb (copy_basic_block
);
1821 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1824 gimple
*stmt
= gsi_stmt (gsi
);
1825 gimple
*orig_stmt
= stmt
;
1826 gimple_stmt_iterator stmts_gsi
;
1827 bool stmt_added
= false;
1829 id
->regimplify
= false;
1830 stmts
= remap_gimple_stmt (stmt
, id
);
1832 if (gimple_seq_empty_p (stmts
))
1837 for (stmts_gsi
= gsi_start (stmts
);
1838 !gsi_end_p (stmts_gsi
); )
1840 stmt
= gsi_stmt (stmts_gsi
);
1842 /* Advance iterator now before stmt is moved to seq_gsi. */
1843 gsi_next (&stmts_gsi
);
1845 if (gimple_nop_p (stmt
))
1848 gimple_duplicate_stmt_histograms (cfun
, stmt
, id
->src_cfun
,
1851 /* With return slot optimization we can end up with
1852 non-gimple (foo *)&this->m, fix that here. */
1853 if (is_gimple_assign (stmt
)
1854 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
1855 && !is_gimple_val (gimple_assign_rhs1 (stmt
)))
1858 new_rhs
= force_gimple_operand_gsi (&seq_gsi
,
1859 gimple_assign_rhs1 (stmt
),
1861 GSI_CONTINUE_LINKING
);
1862 gimple_assign_set_rhs1 (stmt
, new_rhs
);
1863 id
->regimplify
= false;
1866 gsi_insert_after (&seq_gsi
, stmt
, GSI_NEW_STMT
);
1869 gimple_regimplify_operands (stmt
, &seq_gsi
);
1877 /* If copy_basic_block has been empty at the start of this iteration,
1878 call gsi_start_bb again to get at the newly added statements. */
1879 if (gsi_end_p (copy_gsi
))
1880 copy_gsi
= gsi_start_bb (copy_basic_block
);
1882 gsi_next (©_gsi
);
1884 /* Process the new statement. The call to gimple_regimplify_operands
1885 possibly turned the statement into multiple statements, we
1886 need to process all of them. */
1892 stmt
= gsi_stmt (copy_gsi
);
1893 call_stmt
= dyn_cast
<gcall
*> (stmt
);
1895 && gimple_call_va_arg_pack_p (call_stmt
)
1897 && ! gimple_call_va_arg_pack_p (id
->call_stmt
))
1899 /* __builtin_va_arg_pack () should be replaced by
1900 all arguments corresponding to ... in the caller. */
1904 size_t nargs
= gimple_call_num_args (id
->call_stmt
);
1907 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1910 /* Create the new array of arguments. */
1911 n
= nargs
+ gimple_call_num_args (call_stmt
);
1912 argarray
.create (n
);
1913 argarray
.safe_grow_cleared (n
);
1915 /* Copy all the arguments before '...' */
1916 memcpy (argarray
.address (),
1917 gimple_call_arg_ptr (call_stmt
, 0),
1918 gimple_call_num_args (call_stmt
) * sizeof (tree
));
1920 /* Append the arguments passed in '...' */
1921 memcpy (argarray
.address () + gimple_call_num_args (call_stmt
),
1922 gimple_call_arg_ptr (id
->call_stmt
, 0)
1923 + (gimple_call_num_args (id
->call_stmt
) - nargs
),
1924 nargs
* sizeof (tree
));
1926 new_call
= gimple_build_call_vec (gimple_call_fn (call_stmt
),
1929 argarray
.release ();
1931 /* Copy all GIMPLE_CALL flags, location and block, except
1932 GF_CALL_VA_ARG_PACK. */
1933 gimple_call_copy_flags (new_call
, call_stmt
);
1934 gimple_call_set_va_arg_pack (new_call
, false);
1935 gimple_set_location (new_call
, gimple_location (stmt
));
1936 gimple_set_block (new_call
, gimple_block (stmt
));
1937 gimple_call_set_lhs (new_call
, gimple_call_lhs (call_stmt
));
1939 gsi_replace (©_gsi
, new_call
, false);
1944 && (decl
= gimple_call_fndecl (stmt
))
1945 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
1946 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_VA_ARG_PACK_LEN
1947 && ! gimple_call_va_arg_pack_p (id
->call_stmt
))
1949 /* __builtin_va_arg_pack_len () should be replaced by
1950 the number of anonymous arguments. */
1951 size_t nargs
= gimple_call_num_args (id
->call_stmt
);
1955 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1958 count
= build_int_cst (integer_type_node
, nargs
);
1959 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
), count
);
1960 gsi_replace (©_gsi
, new_stmt
, false);
1965 && gimple_call_internal_p (stmt
)
1966 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
1968 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1969 gsi_remove (©_gsi
, false);
1973 /* Statements produced by inlining can be unfolded, especially
1974 when we constant propagated some operands. We can't fold
1975 them right now for two reasons:
1976 1) folding require SSA_NAME_DEF_STMTs to be correct
1977 2) we can't change function calls to builtins.
1978 So we just mark statement for later folding. We mark
1979 all new statements, instead just statements that has changed
1980 by some nontrivial substitution so even statements made
1981 foldable indirectly are updated. If this turns out to be
1982 expensive, copy_body can be told to watch for nontrivial
1984 if (id
->statements_to_fold
)
1985 id
->statements_to_fold
->add (stmt
);
1987 /* We're duplicating a CALL_EXPR. Find any corresponding
1988 callgraph edges and update or duplicate them. */
1989 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
1991 struct cgraph_edge
*edge
;
1993 switch (id
->transform_call_graph_edges
)
1995 case CB_CGE_DUPLICATE
:
1996 edge
= id
->src_node
->get_edge (orig_stmt
);
1999 struct cgraph_edge
*old_edge
= edge
;
2000 profile_count old_cnt
= edge
->count
;
2001 edge
= edge
->clone (id
->dst_node
, call_stmt
,
2006 /* Speculative calls consist of two edges - direct and
2007 indirect. Duplicate the whole thing and distribute
2008 frequencies accordingly. */
2009 if (edge
->speculative
)
2011 struct cgraph_edge
*direct
, *indirect
;
2012 struct ipa_ref
*ref
;
2014 gcc_assert (!edge
->indirect_unknown_callee
);
2015 old_edge
->speculative_call_info (direct
, indirect
, ref
);
2017 profile_count indir_cnt
= indirect
->count
;
2018 indirect
= indirect
->clone (id
->dst_node
, call_stmt
,
2023 profile_probability prob
2024 = indir_cnt
.probability_in (old_cnt
+ indir_cnt
);
2026 = copy_basic_block
->count
.apply_probability (prob
);
2027 edge
->count
= copy_basic_block
->count
- indirect
->count
;
2028 id
->dst_node
->clone_reference (ref
, stmt
);
2031 edge
->count
= copy_basic_block
->count
;
2035 case CB_CGE_MOVE_CLONES
:
2036 id
->dst_node
->set_call_stmt_including_clones (orig_stmt
,
2038 edge
= id
->dst_node
->get_edge (stmt
);
2042 edge
= id
->dst_node
->get_edge (orig_stmt
);
2044 edge
->set_call_stmt (call_stmt
);
2051 /* Constant propagation on argument done during inlining
2052 may create new direct call. Produce an edge for it. */
2054 || (edge
->indirect_inlining_edge
2055 && id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
))
2056 && id
->dst_node
->definition
2057 && (fn
= gimple_call_fndecl (stmt
)) != NULL
)
2059 struct cgraph_node
*dest
= cgraph_node::get_create (fn
);
2061 /* We have missing edge in the callgraph. This can happen
2062 when previous inlining turned an indirect call into a
2063 direct call by constant propagating arguments or we are
2064 producing dead clone (for further cloning). In all
2065 other cases we hit a bug (incorrect node sharing is the
2066 most common reason for missing edges). */
2067 gcc_assert (!dest
->definition
2068 || dest
->address_taken
2069 || !id
->src_node
->definition
2070 || !id
->dst_node
->definition
);
2071 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
)
2072 id
->dst_node
->create_edge_including_clones
2073 (dest
, orig_stmt
, call_stmt
, bb
->count
,
2074 CIF_ORIGINALLY_INDIRECT_CALL
);
2076 id
->dst_node
->create_edge (dest
, call_stmt
,
2077 bb
->count
)->inline_failed
2078 = CIF_ORIGINALLY_INDIRECT_CALL
;
2081 fprintf (dump_file
, "Created new direct edge to %s\n",
2086 notice_special_calls (as_a
<gcall
*> (stmt
));
2089 maybe_duplicate_eh_stmt_fn (cfun
, stmt
, id
->src_cfun
, orig_stmt
,
2090 id
->eh_map
, id
->eh_lp_nr
);
2092 gsi_next (©_gsi
);
2094 while (!gsi_end_p (copy_gsi
));
2096 copy_gsi
= gsi_last_bb (copy_basic_block
);
2099 return copy_basic_block
;
2102 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2103 form is quite easy, since dominator relationship for old basic blocks does
2106 There is however exception where inlining might change dominator relation
2107 across EH edges from basic block within inlined functions destinating
2108 to landing pads in function we inline into.
2110 The function fills in PHI_RESULTs of such PHI nodes if they refer
2111 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2112 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2113 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2114 set, and this means that there will be no overlapping live ranges
2115 for the underlying symbol.
2117 This might change in future if we allow redirecting of EH edges and
2118 we might want to change way build CFG pre-inlining to include
2119 all the possible edges then. */
2121 update_ssa_across_abnormal_edges (basic_block bb
, basic_block ret_bb
,
2122 bool can_throw
, bool nonlocal_goto
)
2127 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2129 || ((basic_block
)e
->dest
->aux
)->index
== ENTRY_BLOCK
)
2135 gcc_assert (e
->flags
& EDGE_EH
);
2138 gcc_assert (!(e
->flags
& EDGE_EH
));
2140 for (si
= gsi_start_phis (e
->dest
); !gsi_end_p (si
); gsi_next (&si
))
2146 /* For abnormal goto/call edges the receiver can be the
2147 ENTRY_BLOCK. Do not assert this cannot happen. */
2149 gcc_assert ((e
->flags
& EDGE_EH
)
2150 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)));
2152 re
= find_edge (ret_bb
, e
->dest
);
2153 gcc_checking_assert (re
);
2154 gcc_assert ((re
->flags
& (EDGE_EH
| EDGE_ABNORMAL
))
2155 == (e
->flags
& (EDGE_EH
| EDGE_ABNORMAL
)));
2157 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
2158 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, re
)));
2164 /* Copy edges from BB into its copy constructed earlier, scale profile
2165 accordingly. Edges will be taken care of later. Assume aux
2166 pointers to point to the copies of each BB. Return true if any
2167 debug stmts are left after a statement that must end the basic block. */
2170 copy_edges_for_bb (basic_block bb
, profile_count num
, profile_count den
,
2171 basic_block ret_bb
, basic_block abnormal_goto_dest
,
2174 basic_block new_bb
= (basic_block
) bb
->aux
;
2177 gimple_stmt_iterator si
;
2178 bool need_debug_cleanup
= false;
2180 /* Use the indices from the original blocks to create edges for the
2182 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2183 if (!(old_edge
->flags
& EDGE_EH
))
2186 int flags
= old_edge
->flags
;
2187 location_t locus
= old_edge
->goto_locus
;
2189 /* Return edges do get a FALLTHRU flag when they get inlined. */
2190 if (old_edge
->dest
->index
== EXIT_BLOCK
2191 && !(flags
& (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
|EDGE_FAKE
))
2192 && old_edge
->dest
->aux
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2193 flags
|= EDGE_FALLTHRU
;
2196 = make_edge (new_bb
, (basic_block
) old_edge
->dest
->aux
, flags
);
2197 new_edge
->probability
= old_edge
->probability
;
2198 new_edge
->goto_locus
= remap_location (locus
, id
);
2201 if (bb
->index
== ENTRY_BLOCK
|| bb
->index
== EXIT_BLOCK
)
2204 /* When doing function splitting, we must decreate count of the return block
2205 which was previously reachable by block we did not copy. */
2206 if (single_succ_p (bb
) && single_succ_edge (bb
)->dest
->index
== EXIT_BLOCK
)
2207 FOR_EACH_EDGE (old_edge
, ei
, bb
->preds
)
2208 if (old_edge
->src
->index
!= ENTRY_BLOCK
2209 && !old_edge
->src
->aux
)
2210 new_bb
->count
-= old_edge
->count ().apply_scale (num
, den
);
2212 for (si
= gsi_start_bb (new_bb
); !gsi_end_p (si
);)
2215 bool can_throw
, nonlocal_goto
;
2217 copy_stmt
= gsi_stmt (si
);
2218 if (!is_gimple_debug (copy_stmt
))
2219 update_stmt (copy_stmt
);
2221 /* Do this before the possible split_block. */
2224 /* If this tree could throw an exception, there are two
2225 cases where we need to add abnormal edge(s): the
2226 tree wasn't in a region and there is a "current
2227 region" in the caller; or the original tree had
2228 EH edges. In both cases split the block after the tree,
2229 and add abnormal edge(s) as needed; we need both
2230 those from the callee and the caller.
2231 We check whether the copy can throw, because the const
2232 propagation can change an INDIRECT_REF which throws
2233 into a COMPONENT_REF which doesn't. If the copy
2234 can throw, the original could also throw. */
2235 can_throw
= stmt_can_throw_internal (copy_stmt
);
2237 = (stmt_can_make_abnormal_goto (copy_stmt
)
2238 && !computed_goto_p (copy_stmt
));
2240 if (can_throw
|| nonlocal_goto
)
2242 if (!gsi_end_p (si
))
2244 while (!gsi_end_p (si
) && is_gimple_debug (gsi_stmt (si
)))
2247 need_debug_cleanup
= true;
2249 if (!gsi_end_p (si
))
2250 /* Note that bb's predecessor edges aren't necessarily
2251 right at this point; split_block doesn't care. */
2253 edge e
= split_block (new_bb
, copy_stmt
);
2256 new_bb
->aux
= e
->src
->aux
;
2257 si
= gsi_start_bb (new_bb
);
2261 bool update_probs
= false;
2263 if (gimple_code (copy_stmt
) == GIMPLE_EH_DISPATCH
)
2265 make_eh_dispatch_edges (as_a
<geh_dispatch
*> (copy_stmt
));
2266 update_probs
= true;
2270 make_eh_edges (copy_stmt
);
2271 update_probs
= true;
2274 /* EH edges may not match old edges. Copy as much as possible. */
2279 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2281 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2282 if ((old_edge
->flags
& EDGE_EH
)
2283 && (e
= find_edge (copy_stmt_bb
,
2284 (basic_block
) old_edge
->dest
->aux
))
2285 && (e
->flags
& EDGE_EH
))
2286 e
->probability
= old_edge
->probability
;
2288 FOR_EACH_EDGE (e
, ei
, copy_stmt_bb
->succs
)
2289 if ((e
->flags
& EDGE_EH
) && !e
->probability
.initialized_p ())
2290 e
->probability
= profile_probability::never ();
2294 /* If the call we inline cannot make abnormal goto do not add
2295 additional abnormal edges but only retain those already present
2296 in the original function body. */
2297 if (abnormal_goto_dest
== NULL
)
2298 nonlocal_goto
= false;
2301 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2303 if (get_abnormal_succ_dispatcher (copy_stmt_bb
))
2304 nonlocal_goto
= false;
2305 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2306 in OpenMP regions which aren't allowed to be left abnormally.
2307 So, no need to add abnormal edge in that case. */
2308 else if (is_gimple_call (copy_stmt
)
2309 && gimple_call_internal_p (copy_stmt
)
2310 && (gimple_call_internal_fn (copy_stmt
)
2311 == IFN_ABNORMAL_DISPATCHER
)
2312 && gimple_call_arg (copy_stmt
, 0) == boolean_true_node
)
2313 nonlocal_goto
= false;
2315 make_single_succ_edge (copy_stmt_bb
, abnormal_goto_dest
,
2319 if ((can_throw
|| nonlocal_goto
)
2320 && gimple_in_ssa_p (cfun
))
2321 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt
), ret_bb
,
2322 can_throw
, nonlocal_goto
);
2324 return need_debug_cleanup
;
2327 /* Copy the PHIs. All blocks and edges are copied, some blocks
2328 was possibly split and new outgoing EH edges inserted.
2329 BB points to the block of original function and AUX pointers links
2330 the original and newly copied blocks. */
2333 copy_phis_for_bb (basic_block bb
, copy_body_data
*id
)
2335 basic_block
const new_bb
= (basic_block
) bb
->aux
;
2340 bool inserted
= false;
2342 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
2348 res
= PHI_RESULT (phi
);
2350 if (!virtual_operand_p (res
))
2352 walk_tree (&new_res
, copy_tree_body_r
, id
, NULL
);
2353 if (EDGE_COUNT (new_bb
->preds
) == 0)
2355 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2356 SSA_NAME_DEF_STMT (new_res
) = gimple_build_nop ();
2360 new_phi
= create_phi_node (new_res
, new_bb
);
2361 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2363 edge old_edge
= find_edge ((basic_block
) new_edge
->src
->aux
,
2370 /* When doing partial cloning, we allow PHIs on the entry
2371 block as long as all the arguments are the same.
2372 Find any input edge to see argument to copy. */
2374 FOR_EACH_EDGE (old_edge
, ei2
, bb
->preds
)
2375 if (!old_edge
->src
->aux
)
2378 arg
= PHI_ARG_DEF_FROM_EDGE (phi
, old_edge
);
2380 walk_tree (&new_arg
, copy_tree_body_r
, id
, NULL
);
2381 gcc_assert (new_arg
);
2382 /* With return slot optimization we can end up with
2383 non-gimple (foo *)&this->m, fix that here. */
2384 if (TREE_CODE (new_arg
) != SSA_NAME
2385 && TREE_CODE (new_arg
) != FUNCTION_DECL
2386 && !is_gimple_val (new_arg
))
2388 gimple_seq stmts
= NULL
;
2389 new_arg
= force_gimple_operand (new_arg
, &stmts
, true,
2391 gsi_insert_seq_on_edge (new_edge
, stmts
);
2394 locus
= gimple_phi_arg_location_from_edge (phi
, old_edge
);
2395 locus
= remap_location (locus
, id
);
2396 add_phi_arg (new_phi
, new_arg
, new_edge
, locus
);
2402 /* Commit the delayed edge insertions. */
2404 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2405 gsi_commit_one_edge_insert (new_edge
, NULL
);
2409 /* Wrapper for remap_decl so it can be used as a callback. */
2412 remap_decl_1 (tree decl
, void *data
)
2414 return remap_decl (decl
, (copy_body_data
*) data
);
2417 /* Build struct function and associated datastructures for the new clone
2418 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2419 the cfun to the function of new_fndecl (and current_function_decl too). */
2422 initialize_cfun (tree new_fndecl
, tree callee_fndecl
, profile_count count
)
2424 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2426 if (!DECL_ARGUMENTS (new_fndecl
))
2427 DECL_ARGUMENTS (new_fndecl
) = DECL_ARGUMENTS (callee_fndecl
);
2428 if (!DECL_RESULT (new_fndecl
))
2429 DECL_RESULT (new_fndecl
) = DECL_RESULT (callee_fndecl
);
2431 /* Register specific tree functions. */
2432 gimple_register_cfg_hooks ();
2434 /* Get clean struct function. */
2435 push_struct_function (new_fndecl
);
2437 /* We will rebuild these, so just sanity check that they are empty. */
2438 gcc_assert (VALUE_HISTOGRAMS (cfun
) == NULL
);
2439 gcc_assert (cfun
->local_decls
== NULL
);
2440 gcc_assert (cfun
->cfg
== NULL
);
2441 gcc_assert (cfun
->decl
== new_fndecl
);
2443 /* Copy items we preserve during cloning. */
2444 cfun
->static_chain_decl
= src_cfun
->static_chain_decl
;
2445 cfun
->nonlocal_goto_save_area
= src_cfun
->nonlocal_goto_save_area
;
2446 cfun
->function_end_locus
= src_cfun
->function_end_locus
;
2447 cfun
->curr_properties
= src_cfun
->curr_properties
;
2448 cfun
->last_verified
= src_cfun
->last_verified
;
2449 cfun
->va_list_gpr_size
= src_cfun
->va_list_gpr_size
;
2450 cfun
->va_list_fpr_size
= src_cfun
->va_list_fpr_size
;
2451 cfun
->has_nonlocal_label
= src_cfun
->has_nonlocal_label
;
2452 cfun
->stdarg
= src_cfun
->stdarg
;
2453 cfun
->after_inlining
= src_cfun
->after_inlining
;
2454 cfun
->can_throw_non_call_exceptions
2455 = src_cfun
->can_throw_non_call_exceptions
;
2456 cfun
->can_delete_dead_exceptions
= src_cfun
->can_delete_dead_exceptions
;
2457 cfun
->returns_struct
= src_cfun
->returns_struct
;
2458 cfun
->returns_pcc_struct
= src_cfun
->returns_pcc_struct
;
2460 init_empty_tree_cfg ();
2462 profile_status_for_fn (cfun
) = profile_status_for_fn (src_cfun
);
2464 profile_count num
= count
;
2465 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
;
2466 profile_count::adjust_for_ipa_scaling (&num
, &den
);
2468 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
2469 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2470 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2471 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
2472 EXIT_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2473 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2475 init_eh_for_function ();
2477 if (src_cfun
->gimple_df
)
2479 init_tree_ssa (cfun
);
2480 cfun
->gimple_df
->in_ssa_p
= src_cfun
->gimple_df
->in_ssa_p
;
2481 if (cfun
->gimple_df
->in_ssa_p
)
2482 init_ssa_operands (cfun
);
2486 /* Helper function for copy_cfg_body. Move debug stmts from the end
2487 of NEW_BB to the beginning of successor basic blocks when needed. If the
2488 successor has multiple predecessors, reset them, otherwise keep
2492 maybe_move_debug_stmts_to_successors (copy_body_data
*id
, basic_block new_bb
)
2496 gimple_stmt_iterator si
= gsi_last_nondebug_bb (new_bb
);
2499 || gsi_one_before_end_p (si
)
2500 || !(stmt_can_throw_internal (gsi_stmt (si
))
2501 || stmt_can_make_abnormal_goto (gsi_stmt (si
))))
2504 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
2506 gimple_stmt_iterator ssi
= gsi_last_bb (new_bb
);
2507 gimple_stmt_iterator dsi
= gsi_after_labels (e
->dest
);
2508 while (is_gimple_debug (gsi_stmt (ssi
)))
2510 gimple
*stmt
= gsi_stmt (ssi
);
2515 /* For the last edge move the debug stmts instead of copying
2517 if (ei_one_before_end_p (ei
))
2521 if (!single_pred_p (e
->dest
) && gimple_debug_bind_p (stmt
))
2523 gimple_debug_bind_reset_value (stmt
);
2524 gimple_set_location (stmt
, UNKNOWN_LOCATION
);
2526 gsi_remove (&si
, false);
2527 gsi_insert_before (&dsi
, stmt
, GSI_SAME_STMT
);
2531 if (gimple_debug_bind_p (stmt
))
2533 var
= gimple_debug_bind_get_var (stmt
);
2534 if (single_pred_p (e
->dest
))
2536 value
= gimple_debug_bind_get_value (stmt
);
2537 value
= unshare_expr (value
);
2538 new_stmt
= gimple_build_debug_bind (var
, value
, stmt
);
2541 new_stmt
= gimple_build_debug_bind (var
, NULL_TREE
, NULL
);
2543 else if (gimple_debug_source_bind_p (stmt
))
2545 var
= gimple_debug_source_bind_get_var (stmt
);
2546 value
= gimple_debug_source_bind_get_value (stmt
);
2547 new_stmt
= gimple_build_debug_source_bind (var
, value
, stmt
);
2549 else if (gimple_debug_nonbind_marker_p (stmt
))
2550 new_stmt
= as_a
<gdebug
*> (gimple_copy (stmt
));
2553 gsi_insert_before (&dsi
, new_stmt
, GSI_SAME_STMT
);
2554 id
->debug_stmts
.safe_push (new_stmt
);
2560 /* Make a copy of the sub-loops of SRC_PARENT and place them
2561 as siblings of DEST_PARENT. */
2564 copy_loops (copy_body_data
*id
,
2565 struct loop
*dest_parent
, struct loop
*src_parent
)
2567 struct loop
*src_loop
= src_parent
->inner
;
2570 if (!id
->blocks_to_copy
2571 || bitmap_bit_p (id
->blocks_to_copy
, src_loop
->header
->index
))
2573 struct loop
*dest_loop
= alloc_loop ();
2575 /* Assign the new loop its header and latch and associate
2576 those with the new loop. */
2577 dest_loop
->header
= (basic_block
)src_loop
->header
->aux
;
2578 dest_loop
->header
->loop_father
= dest_loop
;
2579 if (src_loop
->latch
!= NULL
)
2581 dest_loop
->latch
= (basic_block
)src_loop
->latch
->aux
;
2582 dest_loop
->latch
->loop_father
= dest_loop
;
2585 /* Copy loop meta-data. */
2586 copy_loop_info (src_loop
, dest_loop
);
2588 /* Finally place it into the loop array and the loop tree. */
2589 place_new_loop (cfun
, dest_loop
);
2590 flow_loop_tree_node_add (dest_parent
, dest_loop
);
2592 dest_loop
->safelen
= src_loop
->safelen
;
2593 if (src_loop
->unroll
)
2595 dest_loop
->unroll
= src_loop
->unroll
;
2596 cfun
->has_unroll
= true;
2598 dest_loop
->dont_vectorize
= src_loop
->dont_vectorize
;
2599 if (src_loop
->force_vectorize
)
2601 dest_loop
->force_vectorize
= true;
2602 cfun
->has_force_vectorize_loops
= true;
2604 if (src_loop
->simduid
)
2606 dest_loop
->simduid
= remap_decl (src_loop
->simduid
, id
);
2607 cfun
->has_simduid_loops
= true;
2611 copy_loops (id
, dest_loop
, src_loop
);
2613 src_loop
= src_loop
->next
;
2617 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2620 redirect_all_calls (copy_body_data
* id
, basic_block bb
)
2622 gimple_stmt_iterator si
;
2623 gimple
*last
= last_stmt (bb
);
2624 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
2626 gimple
*stmt
= gsi_stmt (si
);
2627 if (is_gimple_call (stmt
))
2629 struct cgraph_edge
*edge
= id
->dst_node
->get_edge (stmt
);
2632 edge
->redirect_call_stmt_to_callee ();
2633 if (stmt
== last
&& id
->call_stmt
&& maybe_clean_eh_stmt (stmt
))
2634 gimple_purge_dead_eh_edges (bb
);
2640 /* Make a copy of the body of FN so that it can be inserted inline in
2641 another function. Walks FN via CFG, returns new fndecl. */
2644 copy_cfg_body (copy_body_data
* id
,
2645 basic_block entry_block_map
, basic_block exit_block_map
,
2646 basic_block new_entry
)
2648 tree callee_fndecl
= id
->src_fn
;
2649 /* Original cfun for the callee, doesn't change. */
2650 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2651 struct function
*cfun_to_copy
;
2653 tree new_fndecl
= NULL
;
2654 bool need_debug_cleanup
= false;
2656 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
;
2657 profile_count num
= entry_block_map
->count
;
2659 cfun_to_copy
= id
->src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2661 /* Register specific tree functions. */
2662 gimple_register_cfg_hooks ();
2664 /* If we are inlining just region of the function, make sure to connect
2665 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2666 part of loop, we must compute frequency and probability of
2667 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2668 probabilities of edges incoming from nonduplicated region. */
2673 den
= profile_count::zero ();
2675 FOR_EACH_EDGE (e
, ei
, new_entry
->preds
)
2678 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= den
;
2681 profile_count::adjust_for_ipa_scaling (&num
, &den
);
2683 /* Must have a CFG here at this point. */
2684 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2685 (DECL_STRUCT_FUNCTION (callee_fndecl
)));
2688 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= entry_block_map
;
2689 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= exit_block_map
;
2690 entry_block_map
->aux
= ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2691 exit_block_map
->aux
= EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2693 /* Duplicate any exception-handling regions. */
2695 id
->eh_map
= duplicate_eh_regions (cfun_to_copy
, NULL
, id
->eh_lp_nr
,
2698 /* Use aux pointers to map the original blocks to copy. */
2699 FOR_EACH_BB_FN (bb
, cfun_to_copy
)
2700 if (!id
->blocks_to_copy
|| bitmap_bit_p (id
->blocks_to_copy
, bb
->index
))
2702 basic_block new_bb
= copy_bb (id
, bb
, num
, den
);
2705 new_bb
->loop_father
= entry_block_map
->loop_father
;
2708 last
= last_basic_block_for_fn (cfun
);
2710 /* Now that we've duplicated the blocks, duplicate their edges. */
2711 basic_block abnormal_goto_dest
= NULL
;
2713 && stmt_can_make_abnormal_goto (id
->call_stmt
))
2715 gimple_stmt_iterator gsi
= gsi_for_stmt (id
->call_stmt
);
2717 bb
= gimple_bb (id
->call_stmt
);
2719 if (gsi_end_p (gsi
))
2720 abnormal_goto_dest
= get_abnormal_succ_dispatcher (bb
);
2722 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2723 if (!id
->blocks_to_copy
2724 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2725 need_debug_cleanup
|= copy_edges_for_bb (bb
, num
, den
, exit_block_map
,
2726 abnormal_goto_dest
, id
);
2730 edge e
= make_edge (entry_block_map
, (basic_block
)new_entry
->aux
,
2732 e
->probability
= profile_probability::always ();
2735 /* Duplicate the loop tree, if available and wanted. */
2736 if (loops_for_fn (src_cfun
) != NULL
2737 && current_loops
!= NULL
)
2739 copy_loops (id
, entry_block_map
->loop_father
,
2740 get_loop (src_cfun
, 0));
2741 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2742 loops_state_set (LOOPS_NEED_FIXUP
);
2745 /* If the loop tree in the source function needed fixup, mark the
2746 destination loop tree for fixup, too. */
2747 if (loops_for_fn (src_cfun
)->state
& LOOPS_NEED_FIXUP
)
2748 loops_state_set (LOOPS_NEED_FIXUP
);
2750 if (gimple_in_ssa_p (cfun
))
2751 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2752 if (!id
->blocks_to_copy
2753 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2754 copy_phis_for_bb (bb
, id
);
2756 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2759 if (need_debug_cleanup
2760 && bb
->index
!= ENTRY_BLOCK
2761 && bb
->index
!= EXIT_BLOCK
)
2762 maybe_move_debug_stmts_to_successors (id
, (basic_block
) bb
->aux
);
2763 /* Update call edge destinations. This can not be done before loop
2764 info is updated, because we may split basic blocks. */
2765 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
2766 && bb
->index
!= ENTRY_BLOCK
2767 && bb
->index
!= EXIT_BLOCK
)
2768 redirect_all_calls (id
, (basic_block
)bb
->aux
);
2769 ((basic_block
)bb
->aux
)->aux
= NULL
;
2773 /* Zero out AUX fields of newly created block during EH edge
2775 for (; last
< last_basic_block_for_fn (cfun
); last
++)
2777 if (need_debug_cleanup
)
2778 maybe_move_debug_stmts_to_successors (id
,
2779 BASIC_BLOCK_FOR_FN (cfun
, last
));
2780 BASIC_BLOCK_FOR_FN (cfun
, last
)->aux
= NULL
;
2781 /* Update call edge destinations. This can not be done before loop
2782 info is updated, because we may split basic blocks. */
2783 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
)
2784 redirect_all_calls (id
, BASIC_BLOCK_FOR_FN (cfun
, last
));
2786 entry_block_map
->aux
= NULL
;
2787 exit_block_map
->aux
= NULL
;
2794 if (id
->dependence_map
)
2796 delete id
->dependence_map
;
2797 id
->dependence_map
= NULL
;
2803 /* Copy the debug STMT using ID. We deal with these statements in a
2804 special way: if any variable in their VALUE expression wasn't
2805 remapped yet, we won't remap it, because that would get decl uids
2806 out of sync, causing codegen differences between -g and -g0. If
2807 this arises, we drop the VALUE expression altogether. */
2810 copy_debug_stmt (gdebug
*stmt
, copy_body_data
*id
)
2813 struct walk_stmt_info wi
;
2815 if (gimple_block (stmt
))
2817 n
= id
->decl_map
->get (gimple_block (stmt
));
2818 gimple_set_block (stmt
, n
? *n
: id
->block
);
2821 if (gimple_debug_nonbind_marker_p (stmt
))
2824 /* Remap all the operands in COPY. */
2825 memset (&wi
, 0, sizeof (wi
));
2828 processing_debug_stmt
= 1;
2830 if (gimple_debug_source_bind_p (stmt
))
2831 t
= gimple_debug_source_bind_get_var (stmt
);
2832 else if (gimple_debug_bind_p (stmt
))
2833 t
= gimple_debug_bind_get_var (stmt
);
2837 if (TREE_CODE (t
) == PARM_DECL
&& id
->debug_map
2838 && (n
= id
->debug_map
->get (t
)))
2840 gcc_assert (VAR_P (*n
));
2843 else if (VAR_P (t
) && !is_global_var (t
) && !id
->decl_map
->get (t
))
2844 /* T is a non-localized variable. */;
2846 walk_tree (&t
, remap_gimple_op_r
, &wi
, NULL
);
2848 if (gimple_debug_bind_p (stmt
))
2850 gimple_debug_bind_set_var (stmt
, t
);
2852 if (gimple_debug_bind_has_value_p (stmt
))
2853 walk_tree (gimple_debug_bind_get_value_ptr (stmt
),
2854 remap_gimple_op_r
, &wi
, NULL
);
2856 /* Punt if any decl couldn't be remapped. */
2857 if (processing_debug_stmt
< 0)
2858 gimple_debug_bind_reset_value (stmt
);
2860 else if (gimple_debug_source_bind_p (stmt
))
2862 gimple_debug_source_bind_set_var (stmt
, t
);
2863 /* When inlining and source bind refers to one of the optimized
2864 away parameters, change the source bind into normal debug bind
2865 referring to the corresponding DEBUG_EXPR_DECL that should have
2866 been bound before the call stmt. */
2867 t
= gimple_debug_source_bind_get_value (stmt
);
2869 && TREE_CODE (t
) == PARM_DECL
2872 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (id
->src_fn
);
2874 if (debug_args
!= NULL
)
2876 for (i
= 0; i
< vec_safe_length (*debug_args
); i
+= 2)
2877 if ((**debug_args
)[i
] == DECL_ORIGIN (t
)
2878 && TREE_CODE ((**debug_args
)[i
+ 1]) == DEBUG_EXPR_DECL
)
2880 t
= (**debug_args
)[i
+ 1];
2881 stmt
->subcode
= GIMPLE_DEBUG_BIND
;
2882 gimple_debug_bind_set_value (stmt
, t
);
2887 if (gimple_debug_source_bind_p (stmt
))
2888 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt
),
2889 remap_gimple_op_r
, &wi
, NULL
);
2892 processing_debug_stmt
= 0;
2897 /* Process deferred debug stmts. In order to give values better odds
2898 of being successfully remapped, we delay the processing of debug
2899 stmts until all other stmts that might require remapping are
2903 copy_debug_stmts (copy_body_data
*id
)
2908 if (!id
->debug_stmts
.exists ())
2911 FOR_EACH_VEC_ELT (id
->debug_stmts
, i
, stmt
)
2912 copy_debug_stmt (stmt
, id
);
2914 id
->debug_stmts
.release ();
2917 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2918 another function. */
2921 copy_tree_body (copy_body_data
*id
)
2923 tree fndecl
= id
->src_fn
;
2924 tree body
= DECL_SAVED_TREE (fndecl
);
2926 walk_tree (&body
, copy_tree_body_r
, id
, NULL
);
2931 /* Make a copy of the body of FN so that it can be inserted inline in
2932 another function. */
2935 copy_body (copy_body_data
*id
,
2936 basic_block entry_block_map
, basic_block exit_block_map
,
2937 basic_block new_entry
)
2939 tree fndecl
= id
->src_fn
;
2942 /* If this body has a CFG, walk CFG and copy. */
2943 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl
)));
2944 body
= copy_cfg_body (id
, entry_block_map
, exit_block_map
,
2946 copy_debug_stmts (id
);
2951 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2952 defined in function FN, or of a data member thereof. */
2955 self_inlining_addr_expr (tree value
, tree fn
)
2959 if (TREE_CODE (value
) != ADDR_EXPR
)
2962 var
= get_base_address (TREE_OPERAND (value
, 0));
2964 return var
&& auto_var_in_fn_p (var
, fn
);
2967 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2968 lexical block and line number information from base_stmt, if given,
2969 or from the last stmt of the block otherwise. */
2972 insert_init_debug_bind (copy_body_data
*id
,
2973 basic_block bb
, tree var
, tree value
,
2977 gimple_stmt_iterator gsi
;
2980 if (!gimple_in_ssa_p (id
->src_cfun
))
2983 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
2986 tracked_var
= target_for_debug_bind (var
);
2992 gsi
= gsi_last_bb (bb
);
2993 if (!base_stmt
&& !gsi_end_p (gsi
))
2994 base_stmt
= gsi_stmt (gsi
);
2997 note
= gimple_build_debug_bind (tracked_var
, unshare_expr (value
), base_stmt
);
3001 if (!gsi_end_p (gsi
))
3002 gsi_insert_after (&gsi
, note
, GSI_SAME_STMT
);
3004 gsi_insert_before (&gsi
, note
, GSI_SAME_STMT
);
3011 insert_init_stmt (copy_body_data
*id
, basic_block bb
, gimple
*init_stmt
)
3013 /* If VAR represents a zero-sized variable, it's possible that the
3014 assignment statement may result in no gimple statements. */
3017 gimple_stmt_iterator si
= gsi_last_bb (bb
);
3019 /* We can end up with init statements that store to a non-register
3020 from a rhs with a conversion. Handle that here by forcing the
3021 rhs into a temporary. gimple_regimplify_operands is not
3022 prepared to do this for us. */
3023 if (!is_gimple_debug (init_stmt
)
3024 && !is_gimple_reg (gimple_assign_lhs (init_stmt
))
3025 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt
)))
3026 && gimple_assign_rhs_class (init_stmt
) == GIMPLE_UNARY_RHS
)
3028 tree rhs
= build1 (gimple_assign_rhs_code (init_stmt
),
3029 gimple_expr_type (init_stmt
),
3030 gimple_assign_rhs1 (init_stmt
));
3031 rhs
= force_gimple_operand_gsi (&si
, rhs
, true, NULL_TREE
, false,
3033 gimple_assign_set_rhs_code (init_stmt
, TREE_CODE (rhs
));
3034 gimple_assign_set_rhs1 (init_stmt
, rhs
);
3036 gsi_insert_after (&si
, init_stmt
, GSI_NEW_STMT
);
3037 gimple_regimplify_operands (init_stmt
, &si
);
3039 if (!is_gimple_debug (init_stmt
))
3041 tree def
= gimple_assign_lhs (init_stmt
);
3042 insert_init_debug_bind (id
, bb
, def
, def
, init_stmt
);
3047 /* Initialize parameter P with VALUE. If needed, produce init statement
3048 at the end of BB. When BB is NULL, we return init statement to be
3051 setup_one_parameter (copy_body_data
*id
, tree p
, tree value
, tree fn
,
3052 basic_block bb
, tree
*vars
)
3054 gimple
*init_stmt
= NULL
;
3057 tree def
= (gimple_in_ssa_p (cfun
)
3058 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3061 && value
!= error_mark_node
3062 && !useless_type_conversion_p (TREE_TYPE (p
), TREE_TYPE (value
)))
3064 /* If we can match up types by promotion/demotion do so. */
3065 if (fold_convertible_p (TREE_TYPE (p
), value
))
3066 rhs
= fold_convert (TREE_TYPE (p
), value
);
3069 /* ??? For valid programs we should not end up here.
3070 Still if we end up with truly mismatched types here, fall back
3071 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3072 GIMPLE to the following passes. */
3073 if (!is_gimple_reg_type (TREE_TYPE (value
))
3074 || TYPE_SIZE (TREE_TYPE (p
)) == TYPE_SIZE (TREE_TYPE (value
)))
3075 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (p
), value
);
3077 rhs
= build_zero_cst (TREE_TYPE (p
));
3081 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3082 here since the type of this decl must be visible to the calling
3084 var
= copy_decl_to_var (p
, id
);
3086 /* Declare this new variable. */
3087 DECL_CHAIN (var
) = *vars
;
3090 /* Make gimplifier happy about this variable. */
3091 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3093 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3094 we would not need to create a new variable here at all, if it
3095 weren't for debug info. Still, we can just use the argument
3097 if (TREE_READONLY (p
)
3098 && !TREE_ADDRESSABLE (p
)
3099 && value
&& !TREE_SIDE_EFFECTS (value
)
3102 /* We may produce non-gimple trees by adding NOPs or introduce
3103 invalid sharing when operand is not really constant.
3104 It is not big deal to prohibit constant propagation here as
3105 we will constant propagate in DOM1 pass anyway. */
3106 if (is_gimple_min_invariant (value
)
3107 && useless_type_conversion_p (TREE_TYPE (p
),
3109 /* We have to be very careful about ADDR_EXPR. Make sure
3110 the base variable isn't a local variable of the inlined
3111 function, e.g., when doing recursive inlining, direct or
3112 mutually-recursive or whatever, which is why we don't
3113 just test whether fn == current_function_decl. */
3114 && ! self_inlining_addr_expr (value
, fn
))
3116 insert_decl_map (id
, p
, value
);
3117 insert_debug_decl_map (id
, p
, var
);
3118 return insert_init_debug_bind (id
, bb
, var
, value
, NULL
);
3122 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3123 that way, when the PARM_DECL is encountered, it will be
3124 automatically replaced by the VAR_DECL. */
3125 insert_decl_map (id
, p
, var
);
3127 /* Even if P was TREE_READONLY, the new VAR should not be.
3128 In the original code, we would have constructed a
3129 temporary, and then the function body would have never
3130 changed the value of P. However, now, we will be
3131 constructing VAR directly. The constructor body may
3132 change its value multiple times as it is being
3133 constructed. Therefore, it must not be TREE_READONLY;
3134 the back-end assumes that TREE_READONLY variable is
3135 assigned to only once. */
3136 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
3137 TREE_READONLY (var
) = 0;
3139 /* If there is no setup required and we are in SSA, take the easy route
3140 replacing all SSA names representing the function parameter by the
3141 SSA name passed to function.
3143 We need to construct map for the variable anyway as it might be used
3144 in different SSA names when parameter is set in function.
3146 Do replacement at -O0 for const arguments replaced by constant.
3147 This is important for builtin_constant_p and other construct requiring
3148 constant argument to be visible in inlined function body. */
3149 if (gimple_in_ssa_p (cfun
) && rhs
&& def
&& is_gimple_reg (p
)
3151 || (TREE_READONLY (p
)
3152 && is_gimple_min_invariant (rhs
)))
3153 && (TREE_CODE (rhs
) == SSA_NAME
3154 || is_gimple_min_invariant (rhs
))
3155 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
3157 insert_decl_map (id
, def
, rhs
);
3158 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3161 /* If the value of argument is never used, don't care about initializing
3163 if (optimize
&& gimple_in_ssa_p (cfun
) && !def
&& is_gimple_reg (p
))
3165 gcc_assert (!value
|| !TREE_SIDE_EFFECTS (value
));
3166 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3169 /* Initialize this VAR_DECL from the equivalent argument. Convert
3170 the argument to the proper type in case it was promoted. */
3173 if (rhs
== error_mark_node
)
3175 insert_decl_map (id
, p
, var
);
3176 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3179 STRIP_USELESS_TYPE_CONVERSION (rhs
);
3181 /* If we are in SSA form properly remap the default definition
3182 or assign to a dummy SSA name if the parameter is unused and
3183 we are not optimizing. */
3184 if (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
))
3188 def
= remap_ssa_name (def
, id
);
3189 init_stmt
= gimple_build_assign (def
, rhs
);
3190 SSA_NAME_IS_DEFAULT_DEF (def
) = 0;
3191 set_ssa_default_def (cfun
, var
, NULL
);
3195 def
= make_ssa_name (var
);
3196 init_stmt
= gimple_build_assign (def
, rhs
);
3200 init_stmt
= gimple_build_assign (var
, rhs
);
3202 if (bb
&& init_stmt
)
3203 insert_init_stmt (id
, bb
, init_stmt
);
3208 /* Generate code to initialize the parameters of the function at the
3209 top of the stack in ID from the GIMPLE_CALL STMT. */
3212 initialize_inlined_parameters (copy_body_data
*id
, gimple
*stmt
,
3213 tree fn
, basic_block bb
)
3218 tree vars
= NULL_TREE
;
3219 tree static_chain
= gimple_call_chain (stmt
);
3221 /* Figure out what the parameters are. */
3222 parms
= DECL_ARGUMENTS (fn
);
3224 /* Loop through the parameter declarations, replacing each with an
3225 equivalent VAR_DECL, appropriately initialized. */
3226 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3229 val
= i
< gimple_call_num_args (stmt
) ? gimple_call_arg (stmt
, i
) : NULL
;
3230 setup_one_parameter (id
, p
, val
, fn
, bb
, &vars
);
3232 /* After remapping parameters remap their types. This has to be done
3233 in a second loop over all parameters to appropriately remap
3234 variable sized arrays when the size is specified in a
3235 parameter following the array. */
3236 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3238 tree
*varp
= id
->decl_map
->get (p
);
3239 if (varp
&& VAR_P (*varp
))
3241 tree def
= (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
)
3242 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3244 TREE_TYPE (var
) = remap_type (TREE_TYPE (var
), id
);
3245 /* Also remap the default definition if it was remapped
3246 to the default definition of the parameter replacement
3247 by the parameter setup. */
3250 tree
*defp
= id
->decl_map
->get (def
);
3252 && TREE_CODE (*defp
) == SSA_NAME
3253 && SSA_NAME_VAR (*defp
) == var
)
3254 TREE_TYPE (*defp
) = TREE_TYPE (var
);
3259 /* Initialize the static chain. */
3260 p
= DECL_STRUCT_FUNCTION (fn
)->static_chain_decl
;
3261 gcc_assert (fn
!= current_function_decl
);
3264 /* No static chain? Seems like a bug in tree-nested.c. */
3265 gcc_assert (static_chain
);
3267 setup_one_parameter (id
, p
, static_chain
, fn
, bb
, &vars
);
3270 declare_inline_vars (id
->block
, vars
);
3274 /* Declare a return variable to replace the RESULT_DECL for the
3275 function we are calling. An appropriate DECL_STMT is returned.
3276 The USE_STMT is filled to contain a use of the declaration to
3277 indicate the return value of the function.
3279 RETURN_SLOT, if non-null is place where to store the result. It
3280 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3281 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3283 The return value is a (possibly null) value that holds the result
3284 as seen by the caller. */
3287 declare_return_variable (copy_body_data
*id
, tree return_slot
, tree modify_dest
,
3288 basic_block entry_bb
)
3290 tree callee
= id
->src_fn
;
3291 tree result
= DECL_RESULT (callee
);
3292 tree callee_type
= TREE_TYPE (result
);
3296 /* Handle type-mismatches in the function declaration return type
3297 vs. the call expression. */
3299 caller_type
= TREE_TYPE (modify_dest
);
3301 caller_type
= TREE_TYPE (TREE_TYPE (callee
));
3303 /* We don't need to do anything for functions that don't return anything. */
3304 if (VOID_TYPE_P (callee_type
))
3307 /* If there was a return slot, then the return value is the
3308 dereferenced address of that object. */
3311 /* The front end shouldn't have used both return_slot and
3312 a modify expression. */
3313 gcc_assert (!modify_dest
);
3314 if (DECL_BY_REFERENCE (result
))
3316 tree return_slot_addr
= build_fold_addr_expr (return_slot
);
3317 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr
);
3319 /* We are going to construct *&return_slot and we can't do that
3320 for variables believed to be not addressable.
3322 FIXME: This check possibly can match, because values returned
3323 via return slot optimization are not believed to have address
3324 taken by alias analysis. */
3325 gcc_assert (TREE_CODE (return_slot
) != SSA_NAME
);
3326 var
= return_slot_addr
;
3331 gcc_assert (TREE_CODE (var
) != SSA_NAME
);
3332 if (TREE_ADDRESSABLE (result
))
3333 mark_addressable (var
);
3335 if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3336 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3337 && !DECL_GIMPLE_REG_P (result
)
3339 DECL_GIMPLE_REG_P (var
) = 0;
3344 /* All types requiring non-trivial constructors should have been handled. */
3345 gcc_assert (!TREE_ADDRESSABLE (callee_type
));
3347 /* Attempt to avoid creating a new temporary variable. */
3349 && TREE_CODE (modify_dest
) != SSA_NAME
)
3351 bool use_it
= false;
3353 /* We can't use MODIFY_DEST if there's type promotion involved. */
3354 if (!useless_type_conversion_p (callee_type
, caller_type
))
3357 /* ??? If we're assigning to a variable sized type, then we must
3358 reuse the destination variable, because we've no good way to
3359 create variable sized temporaries at this point. */
3360 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type
)) != INTEGER_CST
)
3363 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3364 reuse it as the result of the call directly. Don't do this if
3365 it would promote MODIFY_DEST to addressable. */
3366 else if (TREE_ADDRESSABLE (result
))
3370 tree base_m
= get_base_address (modify_dest
);
3372 /* If the base isn't a decl, then it's a pointer, and we don't
3373 know where that's going to go. */
3374 if (!DECL_P (base_m
))
3376 else if (is_global_var (base_m
))
3378 else if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3379 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3380 && !DECL_GIMPLE_REG_P (result
)
3381 && DECL_GIMPLE_REG_P (base_m
))
3383 else if (!TREE_ADDRESSABLE (base_m
))
3395 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type
)) == INTEGER_CST
);
3397 var
= copy_result_decl_to_var (result
, id
);
3398 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3400 /* Do not have the rest of GCC warn about this variable as it should
3401 not be visible to the user. */
3402 TREE_NO_WARNING (var
) = 1;
3404 declare_inline_vars (id
->block
, var
);
3406 /* Build the use expr. If the return type of the function was
3407 promoted, convert it back to the expected type. */
3409 if (!useless_type_conversion_p (caller_type
, TREE_TYPE (var
)))
3411 /* If we can match up types by promotion/demotion do so. */
3412 if (fold_convertible_p (caller_type
, var
))
3413 use
= fold_convert (caller_type
, var
);
3416 /* ??? For valid programs we should not end up here.
3417 Still if we end up with truly mismatched types here, fall back
3418 to using a MEM_REF to not leak invalid GIMPLE to the following
3420 /* Prevent var from being written into SSA form. */
3421 if (TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
3422 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
)
3423 DECL_GIMPLE_REG_P (var
) = false;
3424 else if (is_gimple_reg_type (TREE_TYPE (var
)))
3425 TREE_ADDRESSABLE (var
) = true;
3426 use
= fold_build2 (MEM_REF
, caller_type
,
3427 build_fold_addr_expr (var
),
3428 build_int_cst (ptr_type_node
, 0));
3432 STRIP_USELESS_TYPE_CONVERSION (use
);
3434 if (DECL_BY_REFERENCE (result
))
3436 TREE_ADDRESSABLE (var
) = 1;
3437 var
= build_fold_addr_expr (var
);
3441 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3442 way, when the RESULT_DECL is encountered, it will be
3443 automatically replaced by the VAR_DECL.
3445 When returning by reference, ensure that RESULT_DECL remaps to
3447 if (DECL_BY_REFERENCE (result
)
3448 && !is_gimple_val (var
))
3450 tree temp
= create_tmp_var (TREE_TYPE (result
), "retvalptr");
3451 insert_decl_map (id
, result
, temp
);
3452 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3453 it's default_def SSA_NAME. */
3454 if (gimple_in_ssa_p (id
->src_cfun
)
3455 && is_gimple_reg (result
))
3457 temp
= make_ssa_name (temp
);
3458 insert_decl_map (id
, ssa_default_def (id
->src_cfun
, result
), temp
);
3460 insert_init_stmt (id
, entry_bb
, gimple_build_assign (temp
, var
));
3463 insert_decl_map (id
, result
, var
);
3465 /* Remember this so we can ignore it in remap_decls. */
3470 /* Determine if the function can be copied. If so return NULL. If
3471 not return a string describng the reason for failure. */
3474 copy_forbidden (struct function
*fun
)
3476 const char *reason
= fun
->cannot_be_copied_reason
;
3478 /* Only examine the function once. */
3479 if (fun
->cannot_be_copied_set
)
3482 /* We cannot copy a function that receives a non-local goto
3483 because we cannot remap the destination label used in the
3484 function that is performing the non-local goto. */
3485 /* ??? Actually, this should be possible, if we work at it.
3486 No doubt there's just a handful of places that simply
3487 assume it doesn't happen and don't substitute properly. */
3488 if (fun
->has_nonlocal_label
)
3490 reason
= G_("function %q+F can never be copied "
3491 "because it receives a non-local goto");
3495 if (fun
->has_forced_label_in_static
)
3497 reason
= G_("function %q+F can never be copied because it saves "
3498 "address of local label in a static variable");
3503 fun
->cannot_be_copied_reason
= reason
;
3504 fun
->cannot_be_copied_set
= true;
3509 static const char *inline_forbidden_reason
;
3511 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3512 iff a function can not be inlined. Also sets the reason why. */
3515 inline_forbidden_p_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3516 struct walk_stmt_info
*wip
)
3518 tree fn
= (tree
) wip
->info
;
3520 gimple
*stmt
= gsi_stmt (*gsi
);
3522 switch (gimple_code (stmt
))
3525 /* Refuse to inline alloca call unless user explicitly forced so as
3526 this may change program's memory overhead drastically when the
3527 function using alloca is called in loop. In GCC present in
3528 SPEC2000 inlining into schedule_block cause it to require 2GB of
3529 RAM instead of 256MB. Don't do so for alloca calls emitted for
3530 VLA objects as those can't cause unbounded growth (they're always
3531 wrapped inside stack_save/stack_restore regions. */
3532 if (gimple_maybe_alloca_call_p (stmt
)
3533 && !gimple_call_alloca_for_var_p (as_a
<gcall
*> (stmt
))
3534 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
3536 inline_forbidden_reason
3537 = G_("function %q+F can never be inlined because it uses "
3538 "alloca (override using the always_inline attribute)");
3539 *handled_ops_p
= true;
3543 t
= gimple_call_fndecl (stmt
);
3547 /* We cannot inline functions that call setjmp. */
3548 if (setjmp_call_p (t
))
3550 inline_forbidden_reason
3551 = G_("function %q+F can never be inlined because it uses setjmp");
3552 *handled_ops_p
= true;
3556 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
3557 switch (DECL_FUNCTION_CODE (t
))
3559 /* We cannot inline functions that take a variable number of
3561 case BUILT_IN_VA_START
:
3562 case BUILT_IN_NEXT_ARG
:
3563 case BUILT_IN_VA_END
:
3564 inline_forbidden_reason
3565 = G_("function %q+F can never be inlined because it "
3566 "uses variable argument lists");
3567 *handled_ops_p
= true;
3570 case BUILT_IN_LONGJMP
:
3571 /* We can't inline functions that call __builtin_longjmp at
3572 all. The non-local goto machinery really requires the
3573 destination be in a different function. If we allow the
3574 function calling __builtin_longjmp to be inlined into the
3575 function calling __builtin_setjmp, Things will Go Awry. */
3576 inline_forbidden_reason
3577 = G_("function %q+F can never be inlined because "
3578 "it uses setjmp-longjmp exception handling");
3579 *handled_ops_p
= true;
3582 case BUILT_IN_NONLOCAL_GOTO
:
3584 inline_forbidden_reason
3585 = G_("function %q+F can never be inlined because "
3586 "it uses non-local goto");
3587 *handled_ops_p
= true;
3590 case BUILT_IN_RETURN
:
3591 case BUILT_IN_APPLY_ARGS
:
3592 /* If a __builtin_apply_args caller would be inlined,
3593 it would be saving arguments of the function it has
3594 been inlined into. Similarly __builtin_return would
3595 return from the function the inline has been inlined into. */
3596 inline_forbidden_reason
3597 = G_("function %q+F can never be inlined because "
3598 "it uses __builtin_return or __builtin_apply_args");
3599 *handled_ops_p
= true;
3608 t
= gimple_goto_dest (stmt
);
3610 /* We will not inline a function which uses computed goto. The
3611 addresses of its local labels, which may be tucked into
3612 global storage, are of course not constant across
3613 instantiations, which causes unexpected behavior. */
3614 if (TREE_CODE (t
) != LABEL_DECL
)
3616 inline_forbidden_reason
3617 = G_("function %q+F can never be inlined "
3618 "because it contains a computed goto");
3619 *handled_ops_p
= true;
3628 *handled_ops_p
= false;
3632 /* Return true if FNDECL is a function that cannot be inlined into
3636 inline_forbidden_p (tree fndecl
)
3638 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
3639 struct walk_stmt_info wi
;
3641 bool forbidden_p
= false;
3643 /* First check for shared reasons not to copy the code. */
3644 inline_forbidden_reason
= copy_forbidden (fun
);
3645 if (inline_forbidden_reason
!= NULL
)
3648 /* Next, walk the statements of the function looking for
3649 constraucts we can't handle, or are non-optimal for inlining. */
3650 hash_set
<tree
> visited_nodes
;
3651 memset (&wi
, 0, sizeof (wi
));
3652 wi
.info
= (void *) fndecl
;
3653 wi
.pset
= &visited_nodes
;
3655 FOR_EACH_BB_FN (bb
, fun
)
3658 gimple_seq seq
= bb_seq (bb
);
3659 ret
= walk_gimple_seq (seq
, inline_forbidden_p_stmt
, NULL
, &wi
);
3660 forbidden_p
= (ret
!= NULL
);
3668 /* Return false if the function FNDECL cannot be inlined on account of its
3669 attributes, true otherwise. */
3671 function_attribute_inlinable_p (const_tree fndecl
)
3673 if (targetm
.attribute_table
)
3677 for (a
= DECL_ATTRIBUTES (fndecl
); a
; a
= TREE_CHAIN (a
))
3679 const_tree name
= TREE_PURPOSE (a
);
3682 for (i
= 0; targetm
.attribute_table
[i
].name
!= NULL
; i
++)
3683 if (is_attribute_p (targetm
.attribute_table
[i
].name
, name
))
3684 return targetm
.function_attribute_inlinable_p (fndecl
);
3691 /* Returns nonzero if FN is a function that does not have any
3692 fundamental inline blocking properties. */
3695 tree_inlinable_function_p (tree fn
)
3697 bool inlinable
= true;
3701 /* If we've already decided this function shouldn't be inlined,
3702 there's no need to check again. */
3703 if (DECL_UNINLINABLE (fn
))
3706 /* We only warn for functions declared `inline' by the user. */
3707 do_warning
= (warn_inline
3708 && DECL_DECLARED_INLINE_P (fn
)
3709 && !DECL_NO_INLINE_WARNING_P (fn
)
3710 && !DECL_IN_SYSTEM_HEADER (fn
));
3712 always_inline
= lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
));
3715 && always_inline
== NULL
)
3718 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3719 "is suppressed using -fno-inline", fn
);
3723 else if (!function_attribute_inlinable_p (fn
))
3726 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3727 "uses attributes conflicting with inlining", fn
);
3731 else if (inline_forbidden_p (fn
))
3733 /* See if we should warn about uninlinable functions. Previously,
3734 some of these warnings would be issued while trying to expand
3735 the function inline, but that would cause multiple warnings
3736 about functions that would for example call alloca. But since
3737 this a property of the function, just one warning is enough.
3738 As a bonus we can now give more details about the reason why a
3739 function is not inlinable. */
3741 error (inline_forbidden_reason
, fn
);
3742 else if (do_warning
)
3743 warning (OPT_Winline
, inline_forbidden_reason
, fn
);
3748 /* Squirrel away the result so that we don't have to check again. */
3749 DECL_UNINLINABLE (fn
) = !inlinable
;
3754 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3755 word size and take possible memcpy call into account and return
3756 cost based on whether optimizing for size or speed according to SPEED_P. */
3759 estimate_move_cost (tree type
, bool ARG_UNUSED (speed_p
))
3763 gcc_assert (!VOID_TYPE_P (type
));
3765 if (TREE_CODE (type
) == VECTOR_TYPE
)
3767 scalar_mode inner
= SCALAR_TYPE_MODE (TREE_TYPE (type
));
3768 machine_mode simd
= targetm
.vectorize
.preferred_simd_mode (inner
);
3770 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type
)));
3771 int simd_mode_size
= estimated_poly_value (GET_MODE_SIZE (simd
));
3772 return ((orig_mode_size
+ simd_mode_size
- 1)
3776 size
= int_size_in_bytes (type
);
3778 if (size
< 0 || size
> MOVE_MAX_PIECES
* MOVE_RATIO (speed_p
))
3779 /* Cost of a memcpy call, 3 arguments and the call. */
3782 return ((size
+ MOVE_MAX_PIECES
- 1) / MOVE_MAX_PIECES
);
3785 /* Returns cost of operation CODE, according to WEIGHTS */
3788 estimate_operator_cost (enum tree_code code
, eni_weights
*weights
,
3789 tree op1 ATTRIBUTE_UNUSED
, tree op2
)
3793 /* These are "free" conversions, or their presumed cost
3794 is folded into other operations. */
3799 case VIEW_CONVERT_EXPR
:
3802 /* Assign cost of 1 to usual operations.
3803 ??? We may consider mapping RTL costs to this. */
3809 case POINTER_PLUS_EXPR
:
3810 case POINTER_DIFF_EXPR
:
3813 case MULT_HIGHPART_EXPR
:
3815 case ADDR_SPACE_CONVERT_EXPR
:
3816 case FIXED_CONVERT_EXPR
:
3817 case FIX_TRUNC_EXPR
:
3836 case TRUTH_ANDIF_EXPR
:
3837 case TRUTH_ORIF_EXPR
:
3838 case TRUTH_AND_EXPR
:
3840 case TRUTH_XOR_EXPR
:
3841 case TRUTH_NOT_EXPR
:
3850 case UNORDERED_EXPR
:
3861 case PREDECREMENT_EXPR
:
3862 case PREINCREMENT_EXPR
:
3863 case POSTDECREMENT_EXPR
:
3864 case POSTINCREMENT_EXPR
:
3866 case REALIGN_LOAD_EXPR
:
3868 case WIDEN_SUM_EXPR
:
3869 case WIDEN_MULT_EXPR
:
3872 case WIDEN_MULT_PLUS_EXPR
:
3873 case WIDEN_MULT_MINUS_EXPR
:
3874 case WIDEN_LSHIFT_EXPR
:
3876 case VEC_WIDEN_MULT_HI_EXPR
:
3877 case VEC_WIDEN_MULT_LO_EXPR
:
3878 case VEC_WIDEN_MULT_EVEN_EXPR
:
3879 case VEC_WIDEN_MULT_ODD_EXPR
:
3880 case VEC_UNPACK_HI_EXPR
:
3881 case VEC_UNPACK_LO_EXPR
:
3882 case VEC_UNPACK_FLOAT_HI_EXPR
:
3883 case VEC_UNPACK_FLOAT_LO_EXPR
:
3884 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
3885 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
3886 case VEC_PACK_TRUNC_EXPR
:
3887 case VEC_PACK_SAT_EXPR
:
3888 case VEC_PACK_FIX_TRUNC_EXPR
:
3889 case VEC_PACK_FLOAT_EXPR
:
3890 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3891 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3892 case VEC_DUPLICATE_EXPR
:
3893 case VEC_SERIES_EXPR
:
3897 /* Few special cases of expensive operations. This is useful
3898 to avoid inlining on functions having too many of these. */
3899 case TRUNC_DIV_EXPR
:
3901 case FLOOR_DIV_EXPR
:
3902 case ROUND_DIV_EXPR
:
3903 case EXACT_DIV_EXPR
:
3904 case TRUNC_MOD_EXPR
:
3906 case FLOOR_MOD_EXPR
:
3907 case ROUND_MOD_EXPR
:
3909 if (TREE_CODE (op2
) != INTEGER_CST
)
3910 return weights
->div_mod_cost
;
3913 /* Bit-field insertion needs several shift and mask operations. */
3914 case BIT_INSERT_EXPR
:
3918 /* We expect a copy assignment with no operator. */
3919 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
);
3925 /* Estimate number of instructions that will be created by expanding
3926 the statements in the statement sequence STMTS.
3927 WEIGHTS contains weights attributed to various constructs. */
3930 estimate_num_insns_seq (gimple_seq stmts
, eni_weights
*weights
)
3933 gimple_stmt_iterator gsi
;
3936 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3937 cost
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
3943 /* Estimate number of instructions that will be created by expanding STMT.
3944 WEIGHTS contains weights attributed to various constructs. */
3947 estimate_num_insns (gimple
*stmt
, eni_weights
*weights
)
3950 enum gimple_code code
= gimple_code (stmt
);
3957 /* Try to estimate the cost of assignments. We have three cases to
3959 1) Simple assignments to registers;
3960 2) Stores to things that must live in memory. This includes
3961 "normal" stores to scalars, but also assignments of large
3962 structures, or constructors of big arrays;
3964 Let us look at the first two cases, assuming we have "a = b + C":
3965 <GIMPLE_ASSIGN <var_decl "a">
3966 <plus_expr <var_decl "b"> <constant C>>
3967 If "a" is a GIMPLE register, the assignment to it is free on almost
3968 any target, because "a" usually ends up in a real register. Hence
3969 the only cost of this expression comes from the PLUS_EXPR, and we
3970 can ignore the GIMPLE_ASSIGN.
3971 If "a" is not a GIMPLE register, the assignment to "a" will most
3972 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3973 of moving something into "a", which we compute using the function
3974 estimate_move_cost. */
3975 if (gimple_clobber_p (stmt
))
3976 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3978 lhs
= gimple_assign_lhs (stmt
);
3979 rhs
= gimple_assign_rhs1 (stmt
);
3983 /* Account for the cost of moving to / from memory. */
3984 if (gimple_store_p (stmt
))
3985 cost
+= estimate_move_cost (TREE_TYPE (lhs
), weights
->time_based
);
3986 if (gimple_assign_load_p (stmt
))
3987 cost
+= estimate_move_cost (TREE_TYPE (rhs
), weights
->time_based
);
3989 cost
+= estimate_operator_cost (gimple_assign_rhs_code (stmt
), weights
,
3990 gimple_assign_rhs1 (stmt
),
3991 get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
3992 == GIMPLE_BINARY_RHS
3993 ? gimple_assign_rhs2 (stmt
) : NULL
);
3997 cost
= 1 + estimate_operator_cost (gimple_cond_code (stmt
), weights
,
3998 gimple_op (stmt
, 0),
3999 gimple_op (stmt
, 1));
4004 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
4005 /* Take into account cost of the switch + guess 2 conditional jumps for
4008 TODO: once the switch expansion logic is sufficiently separated, we can
4009 do better job on estimating cost of the switch. */
4010 if (weights
->time_based
)
4011 cost
= floor_log2 (gimple_switch_num_labels (switch_stmt
)) * 2;
4013 cost
= gimple_switch_num_labels (switch_stmt
) * 2;
4021 if (gimple_call_internal_p (stmt
))
4023 else if ((decl
= gimple_call_fndecl (stmt
))
4024 && DECL_BUILT_IN (decl
))
4026 /* Do not special case builtins where we see the body.
4027 This just confuse inliner. */
4028 struct cgraph_node
*node
;
4029 if (!(node
= cgraph_node::get (decl
))
4030 || node
->definition
)
4032 /* For buitins that are likely expanded to nothing or
4033 inlined do not account operand costs. */
4034 else if (is_simple_builtin (decl
))
4036 else if (is_inexpensive_builtin (decl
))
4037 return weights
->target_builtin_call_cost
;
4038 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4040 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4041 specialize the cheap expansion we do here.
4042 ??? This asks for a more general solution. */
4043 switch (DECL_FUNCTION_CODE (decl
))
4048 if (TREE_CODE (gimple_call_arg (stmt
, 1)) == REAL_CST
4050 (&TREE_REAL_CST (gimple_call_arg (stmt
, 1)),
4052 return estimate_operator_cost
4053 (MULT_EXPR
, weights
, gimple_call_arg (stmt
, 0),
4054 gimple_call_arg (stmt
, 0));
4063 cost
= decl
? weights
->call_cost
: weights
->indirect_call_cost
;
4064 if (gimple_call_lhs (stmt
))
4065 cost
+= estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt
)),
4066 weights
->time_based
);
4067 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4069 tree arg
= gimple_call_arg (stmt
, i
);
4070 cost
+= estimate_move_cost (TREE_TYPE (arg
),
4071 weights
->time_based
);
4077 return weights
->return_cost
;
4083 case GIMPLE_PREDICT
:
4089 int count
= asm_str_count (gimple_asm_string (as_a
<gasm
*> (stmt
)));
4090 /* 1000 means infinity. This avoids overflows later
4091 with very long asm statements. */
4094 return MAX (1, count
);
4098 /* This is either going to be an external function call with one
4099 argument, or two register copy statements plus a goto. */
4102 case GIMPLE_EH_DISPATCH
:
4103 /* ??? This is going to turn into a switch statement. Ideally
4104 we'd have a look at the eh region and estimate the number of
4109 return estimate_num_insns_seq (
4110 gimple_bind_body (as_a
<gbind
*> (stmt
)),
4113 case GIMPLE_EH_FILTER
:
4114 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt
), weights
);
4117 return estimate_num_insns_seq (gimple_catch_handler (
4118 as_a
<gcatch
*> (stmt
)),
4122 return (estimate_num_insns_seq (gimple_try_eval (stmt
), weights
)
4123 + estimate_num_insns_seq (gimple_try_cleanup (stmt
), weights
));
4125 /* OMP directives are generally very expensive. */
4127 case GIMPLE_OMP_RETURN
:
4128 case GIMPLE_OMP_SECTIONS_SWITCH
:
4129 case GIMPLE_OMP_ATOMIC_STORE
:
4130 case GIMPLE_OMP_CONTINUE
:
4131 /* ...except these, which are cheap. */
4134 case GIMPLE_OMP_ATOMIC_LOAD
:
4135 return weights
->omp_cost
;
4137 case GIMPLE_OMP_FOR
:
4138 return (weights
->omp_cost
4139 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
)
4140 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt
), weights
));
4142 case GIMPLE_OMP_PARALLEL
:
4143 case GIMPLE_OMP_TASK
:
4144 case GIMPLE_OMP_CRITICAL
:
4145 case GIMPLE_OMP_MASTER
:
4146 case GIMPLE_OMP_TASKGROUP
:
4147 case GIMPLE_OMP_ORDERED
:
4148 case GIMPLE_OMP_SECTION
:
4149 case GIMPLE_OMP_SECTIONS
:
4150 case GIMPLE_OMP_SINGLE
:
4151 case GIMPLE_OMP_TARGET
:
4152 case GIMPLE_OMP_TEAMS
:
4153 return (weights
->omp_cost
4154 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
));
4156 case GIMPLE_TRANSACTION
:
4157 return (weights
->tm_cost
4158 + estimate_num_insns_seq (gimple_transaction_body (
4159 as_a
<gtransaction
*> (stmt
)),
4169 /* Estimate number of instructions that will be created by expanding
4170 function FNDECL. WEIGHTS contains weights attributed to various
4174 estimate_num_insns_fn (tree fndecl
, eni_weights
*weights
)
4176 struct function
*my_function
= DECL_STRUCT_FUNCTION (fndecl
);
4177 gimple_stmt_iterator bsi
;
4181 gcc_assert (my_function
&& my_function
->cfg
);
4182 FOR_EACH_BB_FN (bb
, my_function
)
4184 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
4185 n
+= estimate_num_insns (gsi_stmt (bsi
), weights
);
4192 /* Initializes weights used by estimate_num_insns. */
4195 init_inline_once (void)
4197 eni_size_weights
.call_cost
= 1;
4198 eni_size_weights
.indirect_call_cost
= 3;
4199 eni_size_weights
.target_builtin_call_cost
= 1;
4200 eni_size_weights
.div_mod_cost
= 1;
4201 eni_size_weights
.omp_cost
= 40;
4202 eni_size_weights
.tm_cost
= 10;
4203 eni_size_weights
.time_based
= false;
4204 eni_size_weights
.return_cost
= 1;
4206 /* Estimating time for call is difficult, since we have no idea what the
4207 called function does. In the current uses of eni_time_weights,
4208 underestimating the cost does less harm than overestimating it, so
4209 we choose a rather small value here. */
4210 eni_time_weights
.call_cost
= 10;
4211 eni_time_weights
.indirect_call_cost
= 15;
4212 eni_time_weights
.target_builtin_call_cost
= 1;
4213 eni_time_weights
.div_mod_cost
= 10;
4214 eni_time_weights
.omp_cost
= 40;
4215 eni_time_weights
.tm_cost
= 40;
4216 eni_time_weights
.time_based
= true;
4217 eni_time_weights
.return_cost
= 2;
4221 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4224 prepend_lexical_block (tree current_block
, tree new_block
)
4226 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (current_block
);
4227 BLOCK_SUBBLOCKS (current_block
) = new_block
;
4228 BLOCK_SUPERCONTEXT (new_block
) = current_block
;
4231 /* Add local variables from CALLEE to CALLER. */
4234 add_local_variables (struct function
*callee
, struct function
*caller
,
4240 FOR_EACH_LOCAL_DECL (callee
, ix
, var
)
4241 if (!can_be_nonlocal (var
, id
))
4243 tree new_var
= remap_decl (var
, id
);
4245 /* Remap debug-expressions. */
4247 && DECL_HAS_DEBUG_EXPR_P (var
)
4250 tree tem
= DECL_DEBUG_EXPR (var
);
4251 bool old_regimplify
= id
->regimplify
;
4252 id
->remapping_type_depth
++;
4253 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
4254 id
->remapping_type_depth
--;
4255 id
->regimplify
= old_regimplify
;
4256 SET_DECL_DEBUG_EXPR (new_var
, tem
);
4257 DECL_HAS_DEBUG_EXPR_P (new_var
) = 1;
4259 add_local_decl (caller
, new_var
);
4263 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4264 have brought in or introduced any debug stmts for SRCVAR. */
4267 reset_debug_binding (copy_body_data
*id
, tree srcvar
, gimple_seq
*bindings
)
4269 tree
*remappedvarp
= id
->decl_map
->get (srcvar
);
4274 if (!VAR_P (*remappedvarp
))
4277 if (*remappedvarp
== id
->retvar
|| *remappedvarp
== id
->retbnd
)
4280 tree tvar
= target_for_debug_bind (*remappedvarp
);
4284 gdebug
*stmt
= gimple_build_debug_bind (tvar
, NULL_TREE
,
4286 gimple_seq_add_stmt (bindings
, stmt
);
4289 /* For each inlined variable for which we may have debug bind stmts,
4290 add before GSI a final debug stmt resetting it, marking the end of
4291 its life, so that var-tracking knows it doesn't have to compute
4292 further locations for it. */
4295 reset_debug_bindings (copy_body_data
*id
, gimple_stmt_iterator gsi
)
4299 gimple_seq bindings
= NULL
;
4301 if (!gimple_in_ssa_p (id
->src_cfun
))
4304 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
4307 for (var
= DECL_ARGUMENTS (id
->src_fn
);
4308 var
; var
= DECL_CHAIN (var
))
4309 reset_debug_binding (id
, var
, &bindings
);
4311 FOR_EACH_LOCAL_DECL (id
->src_cfun
, ix
, var
)
4312 reset_debug_binding (id
, var
, &bindings
);
4314 gsi_insert_seq_before_without_update (&gsi
, bindings
, GSI_SAME_STMT
);
4317 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4320 expand_call_inline (basic_block bb
, gimple
*stmt
, copy_body_data
*id
)
4324 hash_map
<tree
, tree
> *dst
;
4325 hash_map
<tree
, tree
> *st
= NULL
;
4328 struct cgraph_edge
*cg_edge
;
4329 cgraph_inline_failed_t reason
;
4330 basic_block return_block
;
4332 gimple_stmt_iterator gsi
, stmt_gsi
;
4333 bool successfully_inlined
= false;
4334 bool purge_dead_abnormal_edges
;
4336 unsigned int prop_mask
, src_properties
;
4337 struct function
*dst_cfun
;
4340 gimple
*simtenter_stmt
= NULL
;
4341 vec
<tree
> *simtvars_save
;
4343 /* The gimplifier uses input_location in too many places, such as
4344 internal_get_tmp_var (). */
4345 location_t saved_location
= input_location
;
4346 input_location
= gimple_location (stmt
);
4348 /* From here on, we're only interested in CALL_EXPRs. */
4349 call_stmt
= dyn_cast
<gcall
*> (stmt
);
4353 cg_edge
= id
->dst_node
->get_edge (stmt
);
4354 gcc_checking_assert (cg_edge
);
4355 /* First, see if we can figure out what function is being called.
4356 If we cannot, then there is no hope of inlining the function. */
4357 if (cg_edge
->indirect_unknown_callee
)
4359 fn
= cg_edge
->callee
->decl
;
4360 gcc_checking_assert (fn
);
4362 /* If FN is a declaration of a function in a nested scope that was
4363 globally declared inline, we don't set its DECL_INITIAL.
4364 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4365 C++ front-end uses it for cdtors to refer to their internal
4366 declarations, that are not real functions. Fortunately those
4367 don't have trees to be saved, so we can tell by checking their
4369 if (!DECL_INITIAL (fn
)
4370 && DECL_ABSTRACT_ORIGIN (fn
)
4371 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn
)))
4372 fn
= DECL_ABSTRACT_ORIGIN (fn
);
4374 /* Don't try to inline functions that are not well-suited to inlining. */
4375 if (cg_edge
->inline_failed
)
4377 reason
= cg_edge
->inline_failed
;
4378 /* If this call was originally indirect, we do not want to emit any
4379 inlining related warnings or sorry messages because there are no
4380 guarantees regarding those. */
4381 if (cg_edge
->indirect_inlining_edge
)
4384 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
))
4385 /* For extern inline functions that get redefined we always
4386 silently ignored always_inline flag. Better behavior would
4387 be to be able to keep both bodies and use extern inline body
4388 for inlining, but we can't do that because frontends overwrite
4390 && !cg_edge
->callee
->local
.redefined_extern_inline
4391 /* During early inline pass, report only when optimization is
4393 && (symtab
->global_info_ready
4395 || cgraph_inline_failed_type (reason
) == CIF_FINAL_ERROR
)
4396 /* PR 20090218-1_0.c. Body can be provided by another module. */
4397 && (reason
!= CIF_BODY_NOT_AVAILABLE
|| !flag_generate_lto
))
4399 error ("inlining failed in call to always_inline %q+F: %s", fn
,
4400 cgraph_inline_failed_string (reason
));
4401 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4402 inform (gimple_location (stmt
), "called from here");
4403 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4404 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4405 "called from this function");
4407 else if (warn_inline
4408 && DECL_DECLARED_INLINE_P (fn
)
4409 && !DECL_NO_INLINE_WARNING_P (fn
)
4410 && !DECL_IN_SYSTEM_HEADER (fn
)
4411 && reason
!= CIF_UNSPECIFIED
4412 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn
))
4413 /* Do not warn about not inlined recursive calls. */
4414 && !cg_edge
->recursive_p ()
4415 /* Avoid warnings during early inline pass. */
4416 && symtab
->global_info_ready
)
4418 if (warning (OPT_Winline
, "inlining failed in call to %q+F: %s",
4419 fn
, _(cgraph_inline_failed_string (reason
))))
4421 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4422 inform (gimple_location (stmt
), "called from here");
4423 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4424 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4425 "called from this function");
4430 id
->src_node
= cg_edge
->callee
;
4432 /* If callee is thunk, all we need is to adjust the THIS pointer
4433 and redirect to function being thunked. */
4434 if (id
->src_node
->thunk
.thunk_p
)
4437 tree virtual_offset
= NULL
;
4438 profile_count count
= cg_edge
->count
;
4440 gimple_stmt_iterator iter
= gsi_for_stmt (stmt
);
4443 edge
= id
->src_node
->callees
->clone (id
->dst_node
, call_stmt
,
4445 profile_count::one (),
4446 profile_count::one (),
4448 edge
->count
= count
;
4449 if (id
->src_node
->thunk
.virtual_offset_p
)
4450 virtual_offset
= size_int (id
->src_node
->thunk
.virtual_value
);
4451 op
= create_tmp_reg_fn (cfun
, TREE_TYPE (gimple_call_arg (stmt
, 0)),
4453 gsi_insert_before (&iter
, gimple_build_assign (op
,
4454 gimple_call_arg (stmt
, 0)),
4456 gcc_assert (id
->src_node
->thunk
.this_adjusting
);
4457 op
= thunk_adjust (&iter
, op
, 1, id
->src_node
->thunk
.fixed_offset
,
4460 gimple_call_set_arg (stmt
, 0, op
);
4461 gimple_call_set_fndecl (stmt
, edge
->callee
->decl
);
4463 id
->src_node
->remove ();
4464 expand_call_inline (bb
, stmt
, id
);
4465 maybe_remove_unused_call_args (cfun
, stmt
);
4468 fn
= cg_edge
->callee
->decl
;
4469 cg_edge
->callee
->get_untransformed_body ();
4471 if (flag_checking
&& cg_edge
->callee
->decl
!= id
->dst_node
->decl
)
4472 cg_edge
->callee
->verify ();
4474 /* We will be inlining this callee. */
4475 id
->eh_lp_nr
= lookup_stmt_eh_lp (stmt
);
4476 id
->assign_stmts
.create (0);
4478 /* Update the callers EH personality. */
4479 if (DECL_FUNCTION_PERSONALITY (fn
))
4480 DECL_FUNCTION_PERSONALITY (cg_edge
->caller
->decl
)
4481 = DECL_FUNCTION_PERSONALITY (fn
);
4483 /* Split the block before the GIMPLE_CALL. */
4484 stmt_gsi
= gsi_for_stmt (stmt
);
4485 gsi_prev (&stmt_gsi
);
4486 e
= split_block (bb
, gsi_end_p (stmt_gsi
) ? NULL
: gsi_stmt (stmt_gsi
));
4488 return_block
= e
->dest
;
4491 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4492 been the source of abnormal edges. In this case, schedule
4493 the removal of dead abnormal edges. */
4494 gsi
= gsi_start_bb (return_block
);
4496 purge_dead_abnormal_edges
= gsi_end_p (gsi
);
4498 stmt_gsi
= gsi_start_bb (return_block
);
4500 /* Build a block containing code to initialize the arguments, the
4501 actual inline expansion of the body, and a label for the return
4502 statements within the function to jump to. The type of the
4503 statement expression is the return type of the function call.
4504 ??? If the call does not have an associated block then we will
4505 remap all callee blocks to NULL, effectively dropping most of
4506 its debug information. This should only happen for calls to
4507 artificial decls inserted by the compiler itself. We need to
4508 either link the inlined blocks into the caller block tree or
4509 not refer to them in any way to not break GC for locations. */
4510 if (gimple_block (stmt
))
4512 id
->block
= make_node (BLOCK
);
4513 BLOCK_ABSTRACT_ORIGIN (id
->block
) = fn
;
4514 BLOCK_SOURCE_LOCATION (id
->block
)
4515 = LOCATION_LOCUS (gimple_location (stmt
));
4516 prepend_lexical_block (gimple_block (stmt
), id
->block
);
4519 /* Local declarations will be replaced by their equivalents in this
4522 id
->decl_map
= new hash_map
<tree
, tree
>;
4523 dst
= id
->debug_map
;
4524 id
->debug_map
= NULL
;
4526 /* Record the function we are about to inline. */
4528 id
->src_cfun
= DECL_STRUCT_FUNCTION (fn
);
4529 id
->call_stmt
= call_stmt
;
4531 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4532 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4533 dst_cfun
= DECL_STRUCT_FUNCTION (id
->dst_fn
);
4534 simtvars_save
= id
->dst_simt_vars
;
4535 if (!(dst_cfun
->curr_properties
& PROP_gimple_lomp_dev
)
4536 && (simduid
= bb
->loop_father
->simduid
) != NULL_TREE
4537 && (simduid
= ssa_default_def (dst_cfun
, simduid
)) != NULL_TREE
4538 && single_imm_use (simduid
, &use
, &simtenter_stmt
)
4539 && is_gimple_call (simtenter_stmt
)
4540 && gimple_call_internal_p (simtenter_stmt
, IFN_GOMP_SIMT_ENTER
))
4541 vec_alloc (id
->dst_simt_vars
, 0);
4543 id
->dst_simt_vars
= NULL
;
4545 if (profile_status_for_fn (id
->src_cfun
) == PROFILE_ABSENT
)
4546 profile_status_for_fn (dst_cfun
) = PROFILE_ABSENT
;
4548 /* If the src function contains an IFN_VA_ARG, then so will the dst
4549 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4550 prop_mask
= PROP_gimple_lva
| PROP_gimple_lomp_dev
;
4551 src_properties
= id
->src_cfun
->curr_properties
& prop_mask
;
4552 if (src_properties
!= prop_mask
)
4553 dst_cfun
->curr_properties
&= src_properties
| ~prop_mask
;
4555 gcc_assert (!id
->src_cfun
->after_inlining
);
4558 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn
)))
4560 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4561 gsi_insert_after (&si
, gimple_build_predict (PRED_COLD_FUNCTION
,
4565 initialize_inlined_parameters (id
, stmt
, fn
, bb
);
4566 if (debug_nonbind_markers_p
&& debug_inline_points
&& id
->block
4567 && inlined_function_outer_scope_p (id
->block
))
4569 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4570 gsi_insert_after (&si
, gimple_build_debug_inline_entry
4571 (id
->block
, input_location
), GSI_NEW_STMT
);
4574 if (DECL_INITIAL (fn
))
4576 if (gimple_block (stmt
))
4580 prepend_lexical_block (id
->block
,
4581 remap_blocks (DECL_INITIAL (fn
), id
));
4582 gcc_checking_assert (BLOCK_SUBBLOCKS (id
->block
)
4583 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id
->block
))
4585 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4586 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4587 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4588 under it. The parameters can be then evaluated in the debugger,
4589 but don't show in backtraces. */
4590 for (var
= &BLOCK_VARS (BLOCK_SUBBLOCKS (id
->block
)); *var
; )
4591 if (TREE_CODE (DECL_ORIGIN (*var
)) == PARM_DECL
)
4594 *var
= TREE_CHAIN (v
);
4595 TREE_CHAIN (v
) = BLOCK_VARS (id
->block
);
4596 BLOCK_VARS (id
->block
) = v
;
4599 var
= &TREE_CHAIN (*var
);
4602 remap_blocks_to_null (DECL_INITIAL (fn
), id
);
4605 /* Return statements in the function body will be replaced by jumps
4606 to the RET_LABEL. */
4607 gcc_assert (DECL_INITIAL (fn
));
4608 gcc_assert (TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
);
4610 /* Find the LHS to which the result of this call is assigned. */
4612 if (gimple_call_lhs (stmt
))
4614 modify_dest
= gimple_call_lhs (stmt
);
4616 /* The function which we are inlining might not return a value,
4617 in which case we should issue a warning that the function
4618 does not return a value. In that case the optimizers will
4619 see that the variable to which the value is assigned was not
4620 initialized. We do not want to issue a warning about that
4621 uninitialized variable. */
4622 if (DECL_P (modify_dest
))
4623 TREE_NO_WARNING (modify_dest
) = 1;
4625 if (gimple_call_return_slot_opt_p (call_stmt
))
4627 return_slot
= modify_dest
;
4634 /* If we are inlining a call to the C++ operator new, we don't want
4635 to use type based alias analysis on the return value. Otherwise
4636 we may get confused if the compiler sees that the inlined new
4637 function returns a pointer which was just deleted. See bug
4639 if (DECL_IS_OPERATOR_NEW (fn
))
4645 /* Declare the return variable for the function. */
4646 use_retvar
= declare_return_variable (id
, return_slot
, modify_dest
, bb
);
4648 /* Add local vars in this inlined callee to caller. */
4649 add_local_variables (id
->src_cfun
, cfun
, id
);
4651 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4653 fprintf (dump_file
, "Inlining %s to %s with frequency %4.2f\n",
4654 id
->src_node
->dump_name (),
4655 id
->dst_node
->dump_name (),
4656 cg_edge
->sreal_frequency ().to_double ());
4657 id
->src_node
->dump (dump_file
);
4658 id
->dst_node
->dump (dump_file
);
4661 /* This is it. Duplicate the callee body. Assume callee is
4662 pre-gimplified. Note that we must not alter the caller
4663 function in any way before this point, as this CALL_EXPR may be
4664 a self-referential call; if we're calling ourselves, we need to
4665 duplicate our body before altering anything. */
4666 copy_body (id
, bb
, return_block
, NULL
);
4668 reset_debug_bindings (id
, stmt_gsi
);
4670 if (flag_stack_reuse
!= SR_NONE
)
4671 for (tree p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
4672 if (!TREE_THIS_VOLATILE (p
))
4674 tree
*varp
= id
->decl_map
->get (p
);
4675 if (varp
&& VAR_P (*varp
) && !is_gimple_reg (*varp
))
4677 tree clobber
= build_constructor (TREE_TYPE (*varp
), NULL
);
4678 gimple
*clobber_stmt
;
4679 TREE_THIS_VOLATILE (clobber
) = 1;
4680 clobber_stmt
= gimple_build_assign (*varp
, clobber
);
4681 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
4682 gsi_insert_before (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
4686 /* Reset the escaped solution. */
4687 if (cfun
->gimple_df
)
4688 pt_solution_reset (&cfun
->gimple_df
->escaped
);
4690 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4691 if (id
->dst_simt_vars
&& id
->dst_simt_vars
->length () > 0)
4693 size_t nargs
= gimple_call_num_args (simtenter_stmt
);
4694 vec
<tree
> *vars
= id
->dst_simt_vars
;
4695 auto_vec
<tree
> newargs (nargs
+ vars
->length ());
4696 for (size_t i
= 0; i
< nargs
; i
++)
4697 newargs
.quick_push (gimple_call_arg (simtenter_stmt
, i
));
4698 for (tree
*pvar
= vars
->begin (); pvar
!= vars
->end (); pvar
++)
4700 tree ptrtype
= build_pointer_type (TREE_TYPE (*pvar
));
4701 newargs
.quick_push (build1 (ADDR_EXPR
, ptrtype
, *pvar
));
4703 gcall
*g
= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, newargs
);
4704 gimple_call_set_lhs (g
, gimple_call_lhs (simtenter_stmt
));
4705 gimple_stmt_iterator gsi
= gsi_for_stmt (simtenter_stmt
);
4706 gsi_replace (&gsi
, g
, false);
4708 vec_free (id
->dst_simt_vars
);
4709 id
->dst_simt_vars
= simtvars_save
;
4714 delete id
->debug_map
;
4715 id
->debug_map
= dst
;
4717 delete id
->decl_map
;
4720 /* Unlink the calls virtual operands before replacing it. */
4721 unlink_stmt_vdef (stmt
);
4722 if (gimple_vdef (stmt
)
4723 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
4724 release_ssa_name (gimple_vdef (stmt
));
4726 /* If the inlined function returns a result that we care about,
4727 substitute the GIMPLE_CALL with an assignment of the return
4728 variable to the LHS of the call. That is, if STMT was
4729 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4730 if (use_retvar
&& gimple_call_lhs (stmt
))
4732 gimple
*old_stmt
= stmt
;
4733 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), use_retvar
);
4734 gimple_set_location (stmt
, gimple_location (old_stmt
));
4735 gsi_replace (&stmt_gsi
, stmt
, false);
4736 maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
);
4737 /* Append a clobber for id->retvar if easily possible. */
4738 if (flag_stack_reuse
!= SR_NONE
4740 && VAR_P (id
->retvar
)
4741 && id
->retvar
!= return_slot
4742 && id
->retvar
!= modify_dest
4743 && !TREE_THIS_VOLATILE (id
->retvar
)
4744 && !is_gimple_reg (id
->retvar
)
4745 && !stmt_ends_bb_p (stmt
))
4747 tree clobber
= build_constructor (TREE_TYPE (id
->retvar
), NULL
);
4748 gimple
*clobber_stmt
;
4749 TREE_THIS_VOLATILE (clobber
) = 1;
4750 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
4751 gimple_set_location (clobber_stmt
, gimple_location (old_stmt
));
4752 gsi_insert_after (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
4757 /* Handle the case of inlining a function with no return
4758 statement, which causes the return value to become undefined. */
4759 if (gimple_call_lhs (stmt
)
4760 && TREE_CODE (gimple_call_lhs (stmt
)) == SSA_NAME
)
4762 tree name
= gimple_call_lhs (stmt
);
4763 tree var
= SSA_NAME_VAR (name
);
4764 tree def
= var
? ssa_default_def (cfun
, var
) : NULL
;
4768 /* If the variable is used undefined, make this name
4769 undefined via a move. */
4770 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), def
);
4771 gsi_replace (&stmt_gsi
, stmt
, true);
4777 var
= create_tmp_reg_fn (cfun
, TREE_TYPE (name
), NULL
);
4778 SET_SSA_NAME_VAR_OR_IDENTIFIER (name
, var
);
4780 /* Otherwise make this variable undefined. */
4781 gsi_remove (&stmt_gsi
, true);
4782 set_ssa_default_def (cfun
, var
, name
);
4783 SSA_NAME_DEF_STMT (name
) = gimple_build_nop ();
4786 /* Replace with a clobber for id->retvar. */
4787 else if (flag_stack_reuse
!= SR_NONE
4789 && VAR_P (id
->retvar
)
4790 && id
->retvar
!= return_slot
4791 && id
->retvar
!= modify_dest
4792 && !TREE_THIS_VOLATILE (id
->retvar
)
4793 && !is_gimple_reg (id
->retvar
))
4795 tree clobber
= build_constructor (TREE_TYPE (id
->retvar
), NULL
);
4796 gimple
*clobber_stmt
;
4797 TREE_THIS_VOLATILE (clobber
) = 1;
4798 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
4799 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
4800 gsi_replace (&stmt_gsi
, clobber_stmt
, false);
4801 maybe_clean_or_replace_eh_stmt (stmt
, clobber_stmt
);
4804 gsi_remove (&stmt_gsi
, true);
4807 if (purge_dead_abnormal_edges
)
4809 gimple_purge_dead_eh_edges (return_block
);
4810 gimple_purge_dead_abnormal_call_edges (return_block
);
4813 /* If the value of the new expression is ignored, that's OK. We
4814 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4815 the equivalent inlined version either. */
4816 if (is_gimple_assign (stmt
))
4818 gcc_assert (gimple_assign_single_p (stmt
)
4819 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)));
4820 TREE_USED (gimple_assign_rhs1 (stmt
)) = 1;
4823 id
->assign_stmts
.release ();
4825 /* Output the inlining info for this abstract function, since it has been
4826 inlined. If we don't do this now, we can lose the information about the
4827 variables in the function when the blocks get blown away as soon as we
4828 remove the cgraph node. */
4829 if (gimple_block (stmt
))
4830 (*debug_hooks
->outlining_inline_function
) (fn
);
4832 /* Update callgraph if needed. */
4833 cg_edge
->callee
->remove ();
4835 id
->block
= NULL_TREE
;
4836 id
->retvar
= NULL_TREE
;
4837 id
->retbnd
= NULL_TREE
;
4838 successfully_inlined
= true;
4841 input_location
= saved_location
;
4842 return successfully_inlined
;
4845 /* Expand call statements reachable from STMT_P.
4846 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4847 in a MODIFY_EXPR. */
4850 gimple_expand_calls_inline (basic_block bb
, copy_body_data
*id
)
4852 gimple_stmt_iterator gsi
;
4853 bool inlined
= false;
4855 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);)
4857 gimple
*stmt
= gsi_stmt (gsi
);
4860 if (is_gimple_call (stmt
)
4861 && !gimple_call_internal_p (stmt
))
4862 inlined
|= expand_call_inline (bb
, stmt
, id
);
4869 /* Walk all basic blocks created after FIRST and try to fold every statement
4870 in the STATEMENTS pointer set. */
4873 fold_marked_statements (int first
, hash_set
<gimple
*> *statements
)
4875 for (; first
< n_basic_blocks_for_fn (cfun
); first
++)
4876 if (BASIC_BLOCK_FOR_FN (cfun
, first
))
4878 gimple_stmt_iterator gsi
;
4880 for (gsi
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
4883 if (statements
->contains (gsi_stmt (gsi
)))
4885 gimple
*old_stmt
= gsi_stmt (gsi
);
4886 tree old_decl
= is_gimple_call (old_stmt
) ? gimple_call_fndecl (old_stmt
) : 0;
4888 if (old_decl
&& DECL_BUILT_IN (old_decl
))
4890 /* Folding builtins can create multiple instructions,
4891 we need to look at all of them. */
4892 gimple_stmt_iterator i2
= gsi
;
4894 if (fold_stmt (&gsi
))
4897 /* If a builtin at the end of a bb folded into nothing,
4898 the following loop won't work. */
4899 if (gsi_end_p (gsi
))
4901 cgraph_update_edges_for_call_stmt (old_stmt
,
4906 i2
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
4911 new_stmt
= gsi_stmt (i2
);
4912 update_stmt (new_stmt
);
4913 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4916 if (new_stmt
== gsi_stmt (gsi
))
4918 /* It is okay to check only for the very last
4919 of these statements. If it is a throwing
4920 statement nothing will change. If it isn't
4921 this can remove EH edges. If that weren't
4922 correct then because some intermediate stmts
4923 throw, but not the last one. That would mean
4924 we'd have to split the block, which we can't
4925 here and we'd loose anyway. And as builtins
4926 probably never throw, this all
4928 if (maybe_clean_or_replace_eh_stmt (old_stmt
,
4930 gimple_purge_dead_eh_edges (
4931 BASIC_BLOCK_FOR_FN (cfun
, first
));
4938 else if (fold_stmt (&gsi
))
4940 /* Re-read the statement from GSI as fold_stmt() may
4942 gimple
*new_stmt
= gsi_stmt (gsi
);
4943 update_stmt (new_stmt
);
4945 if (is_gimple_call (old_stmt
)
4946 || is_gimple_call (new_stmt
))
4947 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4950 if (maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
))
4951 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun
,
4958 /* Expand calls to inline functions in the body of FN. */
4961 optimize_inline_calls (tree fn
)
4965 int last
= n_basic_blocks_for_fn (cfun
);
4966 bool inlined_p
= false;
4969 memset (&id
, 0, sizeof (id
));
4971 id
.src_node
= id
.dst_node
= cgraph_node::get (fn
);
4972 gcc_assert (id
.dst_node
->definition
);
4974 /* Or any functions that aren't finished yet. */
4975 if (current_function_decl
)
4976 id
.dst_fn
= current_function_decl
;
4978 id
.copy_decl
= copy_decl_maybe_to_var
;
4979 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
4980 id
.transform_new_cfg
= false;
4981 id
.transform_return_to_modify
= true;
4982 id
.transform_parameter
= true;
4983 id
.transform_lang_insert_block
= NULL
;
4984 id
.statements_to_fold
= new hash_set
<gimple
*>;
4986 push_gimplify_context ();
4988 /* We make no attempts to keep dominance info up-to-date. */
4989 free_dominance_info (CDI_DOMINATORS
);
4990 free_dominance_info (CDI_POST_DOMINATORS
);
4992 /* Register specific gimple functions. */
4993 gimple_register_cfg_hooks ();
4995 /* Reach the trees by walking over the CFG, and note the
4996 enclosing basic-blocks in the call edges. */
4997 /* We walk the blocks going forward, because inlined function bodies
4998 will split id->current_basic_block, and the new blocks will
4999 follow it; we'll trudge through them, processing their CALL_EXPRs
5001 FOR_EACH_BB_FN (bb
, cfun
)
5002 inlined_p
|= gimple_expand_calls_inline (bb
, &id
);
5004 pop_gimplify_context (NULL
);
5008 struct cgraph_edge
*e
;
5010 id
.dst_node
->verify ();
5012 /* Double check that we inlined everything we are supposed to inline. */
5013 for (e
= id
.dst_node
->callees
; e
; e
= e
->next_callee
)
5014 gcc_assert (e
->inline_failed
);
5017 /* Fold queued statements. */
5018 update_max_bb_count ();
5019 fold_marked_statements (last
, id
.statements_to_fold
);
5020 delete id
.statements_to_fold
;
5022 gcc_assert (!id
.debug_stmts
.exists ());
5024 /* If we didn't inline into the function there is nothing to do. */
5028 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5031 delete_unreachable_blocks_update_callgraph (&id
);
5033 id
.dst_node
->verify ();
5035 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5036 not possible yet - the IPA passes might make various functions to not
5037 throw and they don't care to proactively update local EH info. This is
5038 done later in fixup_cfg pass that also execute the verification. */
5039 return (TODO_update_ssa
5041 | (gimple_in_ssa_p (cfun
) ? TODO_remove_unused_locals
: 0)
5042 | (gimple_in_ssa_p (cfun
) ? TODO_update_address_taken
: 0)
5043 | (profile_status_for_fn (cfun
) != PROFILE_ABSENT
5044 ? TODO_rebuild_frequencies
: 0));
5047 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5050 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
5052 enum tree_code code
= TREE_CODE (*tp
);
5053 enum tree_code_class cl
= TREE_CODE_CLASS (code
);
5055 /* We make copies of most nodes. */
5056 if (IS_EXPR_CODE_CLASS (cl
)
5057 || code
== TREE_LIST
5059 || code
== TYPE_DECL
5060 || code
== OMP_CLAUSE
)
5062 /* Because the chain gets clobbered when we make a copy, we save it
5064 tree chain
= NULL_TREE
, new_tree
;
5066 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
5067 chain
= TREE_CHAIN (*tp
);
5069 /* Copy the node. */
5070 new_tree
= copy_node (*tp
);
5074 /* Now, restore the chain, if appropriate. That will cause
5075 walk_tree to walk into the chain as well. */
5076 if (code
== PARM_DECL
5077 || code
== TREE_LIST
5078 || code
== OMP_CLAUSE
)
5079 TREE_CHAIN (*tp
) = chain
;
5081 /* For now, we don't update BLOCKs when we make copies. So, we
5082 have to nullify all BIND_EXPRs. */
5083 if (TREE_CODE (*tp
) == BIND_EXPR
)
5084 BIND_EXPR_BLOCK (*tp
) = NULL_TREE
;
5086 else if (code
== CONSTRUCTOR
)
5088 /* CONSTRUCTOR nodes need special handling because
5089 we need to duplicate the vector of elements. */
5092 new_tree
= copy_node (*tp
);
5093 CONSTRUCTOR_ELTS (new_tree
) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp
));
5096 else if (code
== STATEMENT_LIST
)
5097 /* We used to just abort on STATEMENT_LIST, but we can run into them
5098 with statement-expressions (c++/40975). */
5099 copy_statement_list (tp
);
5100 else if (TREE_CODE_CLASS (code
) == tcc_type
)
5102 else if (TREE_CODE_CLASS (code
) == tcc_declaration
)
5104 else if (TREE_CODE_CLASS (code
) == tcc_constant
)
5109 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5110 information indicating to what new SAVE_EXPR this one should be mapped,
5111 use that one. Otherwise, create a new node and enter it in ST. FN is
5112 the function into which the copy will be placed. */
5115 remap_save_expr (tree
*tp
, hash_map
<tree
, tree
> *st
, int *walk_subtrees
)
5120 /* See if we already encountered this SAVE_EXPR. */
5123 /* If we didn't already remap this SAVE_EXPR, do so now. */
5126 t
= copy_node (*tp
);
5128 /* Remember this SAVE_EXPR. */
5130 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5135 /* We've already walked into this SAVE_EXPR; don't do it again. */
5140 /* Replace this SAVE_EXPR with the copy. */
5144 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5145 label, copies the declaration and enters it in the splay_tree in DATA (which
5146 is really a 'copy_body_data *'. */
5149 mark_local_labels_stmt (gimple_stmt_iterator
*gsip
,
5150 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5151 struct walk_stmt_info
*wi
)
5153 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5154 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsip
));
5158 tree decl
= gimple_label_label (stmt
);
5160 /* Copy the decl and remember the copy. */
5161 insert_decl_map (id
, decl
, id
->copy_decl (decl
, id
));
5167 static gimple_seq
duplicate_remap_omp_clause_seq (gimple_seq seq
,
5168 struct walk_stmt_info
*wi
);
5170 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5171 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5172 remaps all local declarations to appropriate replacements in gimple
5176 replace_locals_op (tree
*tp
, int *walk_subtrees
, void *data
)
5178 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5179 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5180 hash_map
<tree
, tree
> *st
= id
->decl_map
;
5184 /* For recursive invocations this is no longer the LHS itself. */
5185 bool is_lhs
= wi
->is_lhs
;
5188 if (TREE_CODE (expr
) == SSA_NAME
)
5190 *tp
= remap_ssa_name (*tp
, id
);
5193 SSA_NAME_DEF_STMT (*tp
) = gsi_stmt (wi
->gsi
);
5195 /* Only a local declaration (variable or label). */
5196 else if ((VAR_P (expr
) && !TREE_STATIC (expr
))
5197 || TREE_CODE (expr
) == LABEL_DECL
)
5199 /* Lookup the declaration. */
5202 /* If it's there, remap it. */
5207 else if (TREE_CODE (expr
) == STATEMENT_LIST
5208 || TREE_CODE (expr
) == BIND_EXPR
5209 || TREE_CODE (expr
) == SAVE_EXPR
)
5211 else if (TREE_CODE (expr
) == TARGET_EXPR
)
5213 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5214 It's OK for this to happen if it was part of a subtree that
5215 isn't immediately expanded, such as operand 2 of another
5217 if (!TREE_OPERAND (expr
, 1))
5219 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
5220 TREE_OPERAND (expr
, 3) = NULL_TREE
;
5223 else if (TREE_CODE (expr
) == OMP_CLAUSE
)
5225 /* Before the omplower pass completes, some OMP clauses can contain
5226 sequences that are neither copied by gimple_seq_copy nor walked by
5227 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5228 in those situations, we have to copy and process them explicitely. */
5230 if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LASTPRIVATE
)
5232 gimple_seq seq
= OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
);
5233 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5234 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
) = seq
;
5236 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LINEAR
)
5238 gimple_seq seq
= OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
);
5239 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5240 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
) = seq
;
5242 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_REDUCTION
)
5244 gimple_seq seq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
);
5245 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5246 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
) = seq
;
5247 seq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
);
5248 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5249 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
) = seq
;
5253 /* Keep iterating. */
5258 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5259 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5260 remaps all local declarations to appropriate replacements in gimple
5264 replace_locals_stmt (gimple_stmt_iterator
*gsip
,
5265 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5266 struct walk_stmt_info
*wi
)
5268 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5269 gimple
*gs
= gsi_stmt (*gsip
);
5271 if (gbind
*stmt
= dyn_cast
<gbind
*> (gs
))
5273 tree block
= gimple_bind_block (stmt
);
5277 remap_block (&block
, id
);
5278 gimple_bind_set_block (stmt
, block
);
5281 /* This will remap a lot of the same decls again, but this should be
5283 if (gimple_bind_vars (stmt
))
5285 tree old_var
, decls
= gimple_bind_vars (stmt
);
5287 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
5288 if (!can_be_nonlocal (old_var
, id
)
5289 && ! variably_modified_type_p (TREE_TYPE (old_var
), id
->src_fn
))
5290 remap_decl (old_var
, id
);
5292 gcc_checking_assert (!id
->prevent_decl_creation_for_types
);
5293 id
->prevent_decl_creation_for_types
= true;
5294 gimple_bind_set_vars (stmt
, remap_decls (decls
, NULL
, id
));
5295 id
->prevent_decl_creation_for_types
= false;
5299 /* Keep iterating. */
5303 /* Create a copy of SEQ and remap all decls in it. */
5306 duplicate_remap_omp_clause_seq (gimple_seq seq
, struct walk_stmt_info
*wi
)
5311 /* If there are any labels in OMP sequences, they can be only referred to in
5312 the sequence itself and therefore we can do both here. */
5313 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, wi
);
5314 gimple_seq copy
= gimple_seq_copy (seq
);
5315 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, wi
);
5319 /* Copies everything in SEQ and replaces variables and labels local to
5320 current_function_decl. */
5323 copy_gimple_seq_and_replace_locals (gimple_seq seq
)
5326 struct walk_stmt_info wi
;
5329 /* There's nothing to do for NULL_TREE. */
5334 memset (&id
, 0, sizeof (id
));
5335 id
.src_fn
= current_function_decl
;
5336 id
.dst_fn
= current_function_decl
;
5338 id
.decl_map
= new hash_map
<tree
, tree
>;
5339 id
.debug_map
= NULL
;
5341 id
.copy_decl
= copy_decl_no_change
;
5342 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5343 id
.transform_new_cfg
= false;
5344 id
.transform_return_to_modify
= false;
5345 id
.transform_parameter
= false;
5346 id
.transform_lang_insert_block
= NULL
;
5348 /* Walk the tree once to find local labels. */
5349 memset (&wi
, 0, sizeof (wi
));
5350 hash_set
<tree
> visited
;
5353 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, &wi
);
5355 copy
= gimple_seq_copy (seq
);
5357 /* Walk the copy, remapping decls. */
5358 memset (&wi
, 0, sizeof (wi
));
5360 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, &wi
);
5365 delete id
.debug_map
;
5366 if (id
.dependence_map
)
5368 delete id
.dependence_map
;
5369 id
.dependence_map
= NULL
;
5376 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5379 debug_find_tree_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
, void *data
)
5388 debug_find_tree (tree top
, tree search
)
5390 return walk_tree_without_duplicates (&top
, debug_find_tree_1
, search
) != 0;
5394 /* Declare the variables created by the inliner. Add all the variables in
5395 VARS to BIND_EXPR. */
5398 declare_inline_vars (tree block
, tree vars
)
5401 for (t
= vars
; t
; t
= DECL_CHAIN (t
))
5403 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
5404 gcc_assert (!TREE_STATIC (t
) && !TREE_ASM_WRITTEN (t
));
5405 add_local_decl (cfun
, t
);
5409 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), vars
);
5412 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5413 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5414 VAR_DECL translation. */
5417 copy_decl_for_dup_finish (copy_body_data
*id
, tree decl
, tree copy
)
5419 /* Don't generate debug information for the copy if we wouldn't have
5420 generated it for the copy either. */
5421 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (decl
);
5422 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (decl
);
5424 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5425 declaration inspired this copy. */
5426 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
5428 /* The new variable/label has no RTL, yet. */
5429 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy
), TS_DECL_WRTL
)
5430 && !TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
5431 SET_DECL_RTL (copy
, 0);
5433 /* These args would always appear unused, if not for this. */
5434 TREE_USED (copy
) = 1;
5436 /* Set the context for the new declaration. */
5437 if (!DECL_CONTEXT (decl
))
5438 /* Globals stay global. */
5440 else if (DECL_CONTEXT (decl
) != id
->src_fn
)
5441 /* Things that weren't in the scope of the function we're inlining
5442 from aren't in the scope we're inlining to, either. */
5444 else if (TREE_STATIC (decl
))
5445 /* Function-scoped static variables should stay in the original
5450 /* Ordinary automatic local variables are now in the scope of the
5452 DECL_CONTEXT (copy
) = id
->dst_fn
;
5453 if (VAR_P (copy
) && id
->dst_simt_vars
&& !is_gimple_reg (copy
))
5455 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy
)))
5456 DECL_ATTRIBUTES (copy
)
5457 = tree_cons (get_identifier ("omp simt private"), NULL
,
5458 DECL_ATTRIBUTES (copy
));
5459 id
->dst_simt_vars
->safe_push (copy
);
5467 copy_decl_to_var (tree decl
, copy_body_data
*id
)
5471 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5472 || TREE_CODE (decl
) == RESULT_DECL
);
5474 type
= TREE_TYPE (decl
);
5476 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5477 VAR_DECL
, DECL_NAME (decl
), type
);
5478 if (DECL_PT_UID_SET_P (decl
))
5479 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5480 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5481 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5482 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5483 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5485 return copy_decl_for_dup_finish (id
, decl
, copy
);
5488 /* Like copy_decl_to_var, but create a return slot object instead of a
5489 pointer variable for return by invisible reference. */
5492 copy_result_decl_to_var (tree decl
, copy_body_data
*id
)
5496 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5497 || TREE_CODE (decl
) == RESULT_DECL
);
5499 type
= TREE_TYPE (decl
);
5500 if (DECL_BY_REFERENCE (decl
))
5501 type
= TREE_TYPE (type
);
5503 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5504 VAR_DECL
, DECL_NAME (decl
), type
);
5505 if (DECL_PT_UID_SET_P (decl
))
5506 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5507 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5508 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5509 if (!DECL_BY_REFERENCE (decl
))
5511 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5512 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5515 return copy_decl_for_dup_finish (id
, decl
, copy
);
5519 copy_decl_no_change (tree decl
, copy_body_data
*id
)
5523 copy
= copy_node (decl
);
5525 /* The COPY is not abstract; it will be generated in DST_FN. */
5526 DECL_ABSTRACT_P (copy
) = false;
5527 lang_hooks
.dup_lang_specific_decl (copy
);
5529 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5530 been taken; it's for internal bookkeeping in expand_goto_internal. */
5531 if (TREE_CODE (copy
) == LABEL_DECL
)
5533 TREE_ADDRESSABLE (copy
) = 0;
5534 LABEL_DECL_UID (copy
) = -1;
5537 return copy_decl_for_dup_finish (id
, decl
, copy
);
5541 copy_decl_maybe_to_var (tree decl
, copy_body_data
*id
)
5543 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
5544 return copy_decl_to_var (decl
, id
);
5546 return copy_decl_no_change (decl
, id
);
5549 /* Return a copy of the function's argument tree. */
5551 copy_arguments_for_versioning (tree orig_parm
, copy_body_data
* id
,
5552 bitmap args_to_skip
, tree
*vars
)
5555 tree new_parm
= NULL
;
5560 for (arg
= orig_parm
; arg
; arg
= DECL_CHAIN (arg
), i
++)
5561 if (!args_to_skip
|| !bitmap_bit_p (args_to_skip
, i
))
5563 tree new_tree
= remap_decl (arg
, id
);
5564 if (TREE_CODE (new_tree
) != PARM_DECL
)
5565 new_tree
= id
->copy_decl (arg
, id
);
5566 lang_hooks
.dup_lang_specific_decl (new_tree
);
5568 parg
= &DECL_CHAIN (new_tree
);
5570 else if (!id
->decl_map
->get (arg
))
5572 /* Make an equivalent VAR_DECL. If the argument was used
5573 as temporary variable later in function, the uses will be
5574 replaced by local variable. */
5575 tree var
= copy_decl_to_var (arg
, id
);
5576 insert_decl_map (id
, arg
, var
);
5577 /* Declare this new variable. */
5578 DECL_CHAIN (var
) = *vars
;
5584 /* Return a copy of the function's static chain. */
5586 copy_static_chain (tree static_chain
, copy_body_data
* id
)
5588 tree
*chain_copy
, *pvar
;
5590 chain_copy
= &static_chain
;
5591 for (pvar
= chain_copy
; *pvar
; pvar
= &DECL_CHAIN (*pvar
))
5593 tree new_tree
= remap_decl (*pvar
, id
);
5594 lang_hooks
.dup_lang_specific_decl (new_tree
);
5595 DECL_CHAIN (new_tree
) = DECL_CHAIN (*pvar
);
5598 return static_chain
;
5601 /* Return true if the function is allowed to be versioned.
5602 This is a guard for the versioning functionality. */
5605 tree_versionable_function_p (tree fndecl
)
5607 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl
))
5608 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl
)) == NULL
);
5611 /* Delete all unreachable basic blocks and update callgraph.
5612 Doing so is somewhat nontrivial because we need to update all clones and
5613 remove inline function that become unreachable. */
5616 delete_unreachable_blocks_update_callgraph (copy_body_data
*id
)
5618 bool changed
= false;
5619 basic_block b
, next_bb
;
5621 find_unreachable_blocks ();
5623 /* Delete all unreachable basic blocks. */
5625 for (b
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; b
5626 != EXIT_BLOCK_PTR_FOR_FN (cfun
); b
= next_bb
)
5628 next_bb
= b
->next_bb
;
5630 if (!(b
->flags
& BB_REACHABLE
))
5632 gimple_stmt_iterator bsi
;
5634 for (bsi
= gsi_start_bb (b
); !gsi_end_p (bsi
); gsi_next (&bsi
))
5636 struct cgraph_edge
*e
;
5637 struct cgraph_node
*node
;
5639 id
->dst_node
->remove_stmt_references (gsi_stmt (bsi
));
5641 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5642 &&(e
= id
->dst_node
->get_edge (gsi_stmt (bsi
))) != NULL
)
5644 if (!e
->inline_failed
)
5645 e
->callee
->remove_symbol_and_inline_clones (id
->dst_node
);
5649 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
5650 && id
->dst_node
->clones
)
5651 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5653 node
->remove_stmt_references (gsi_stmt (bsi
));
5654 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5655 && (e
= node
->get_edge (gsi_stmt (bsi
))) != NULL
)
5657 if (!e
->inline_failed
)
5658 e
->callee
->remove_symbol_and_inline_clones (id
->dst_node
);
5664 node
= node
->clones
;
5665 else if (node
->next_sibling_clone
)
5666 node
= node
->next_sibling_clone
;
5669 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5670 node
= node
->clone_of
;
5671 if (node
!= id
->dst_node
)
5672 node
= node
->next_sibling_clone
;
5676 delete_basic_block (b
);
5684 /* Update clone info after duplication. */
5687 update_clone_info (copy_body_data
* id
)
5689 struct cgraph_node
*node
;
5690 if (!id
->dst_node
->clones
)
5692 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5694 /* First update replace maps to match the new body. */
5695 if (node
->clone
.tree_map
)
5698 for (i
= 0; i
< vec_safe_length (node
->clone
.tree_map
); i
++)
5700 struct ipa_replace_map
*replace_info
;
5701 replace_info
= (*node
->clone
.tree_map
)[i
];
5702 walk_tree (&replace_info
->old_tree
, copy_tree_body_r
, id
, NULL
);
5703 walk_tree (&replace_info
->new_tree
, copy_tree_body_r
, id
, NULL
);
5707 node
= node
->clones
;
5708 else if (node
->next_sibling_clone
)
5709 node
= node
->next_sibling_clone
;
5712 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5713 node
= node
->clone_of
;
5714 if (node
!= id
->dst_node
)
5715 node
= node
->next_sibling_clone
;
5720 /* Create a copy of a function's tree.
5721 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5722 of the original function and the new copied function
5723 respectively. In case we want to replace a DECL
5724 tree with another tree while duplicating the function's
5725 body, TREE_MAP represents the mapping between these
5726 trees. If UPDATE_CLONES is set, the call_stmt fields
5727 of edges of clones of the function will be updated.
5729 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5731 If SKIP_RETURN is true, the new version will return void.
5732 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5733 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5736 tree_function_versioning (tree old_decl
, tree new_decl
,
5737 vec
<ipa_replace_map
*, va_gc
> *tree_map
,
5738 bool update_clones
, bitmap args_to_skip
,
5739 bool skip_return
, bitmap blocks_to_copy
,
5740 basic_block new_entry
)
5742 struct cgraph_node
*old_version_node
;
5743 struct cgraph_node
*new_version_node
;
5747 struct ipa_replace_map
*replace_info
;
5748 basic_block old_entry_block
, bb
;
5749 auto_vec
<gimple
*, 10> init_stmts
;
5750 tree vars
= NULL_TREE
;
5751 bitmap debug_args_to_skip
= args_to_skip
;
5753 gcc_assert (TREE_CODE (old_decl
) == FUNCTION_DECL
5754 && TREE_CODE (new_decl
) == FUNCTION_DECL
);
5755 DECL_POSSIBLY_INLINED (old_decl
) = 1;
5757 old_version_node
= cgraph_node::get (old_decl
);
5758 gcc_checking_assert (old_version_node
);
5759 new_version_node
= cgraph_node::get (new_decl
);
5760 gcc_checking_assert (new_version_node
);
5762 /* Copy over debug args. */
5763 if (DECL_HAS_DEBUG_ARGS_P (old_decl
))
5765 vec
<tree
, va_gc
> **new_debug_args
, **old_debug_args
;
5766 gcc_checking_assert (decl_debug_args_lookup (new_decl
) == NULL
);
5767 DECL_HAS_DEBUG_ARGS_P (new_decl
) = 0;
5768 old_debug_args
= decl_debug_args_lookup (old_decl
);
5771 new_debug_args
= decl_debug_args_insert (new_decl
);
5772 *new_debug_args
= vec_safe_copy (*old_debug_args
);
5776 /* Output the inlining info for this abstract function, since it has been
5777 inlined. If we don't do this now, we can lose the information about the
5778 variables in the function when the blocks get blown away as soon as we
5779 remove the cgraph node. */
5780 (*debug_hooks
->outlining_inline_function
) (old_decl
);
5782 DECL_ARTIFICIAL (new_decl
) = 1;
5783 DECL_ABSTRACT_ORIGIN (new_decl
) = DECL_ORIGIN (old_decl
);
5784 if (DECL_ORIGIN (old_decl
) == old_decl
)
5785 old_version_node
->used_as_abstract_origin
= true;
5786 DECL_FUNCTION_PERSONALITY (new_decl
) = DECL_FUNCTION_PERSONALITY (old_decl
);
5788 /* Prepare the data structures for the tree copy. */
5789 memset (&id
, 0, sizeof (id
));
5791 /* Generate a new name for the new version. */
5792 id
.statements_to_fold
= new hash_set
<gimple
*>;
5794 id
.decl_map
= new hash_map
<tree
, tree
>;
5795 id
.debug_map
= NULL
;
5796 id
.src_fn
= old_decl
;
5797 id
.dst_fn
= new_decl
;
5798 id
.src_node
= old_version_node
;
5799 id
.dst_node
= new_version_node
;
5800 id
.src_cfun
= DECL_STRUCT_FUNCTION (old_decl
);
5801 id
.blocks_to_copy
= blocks_to_copy
;
5803 id
.copy_decl
= copy_decl_no_change
;
5804 id
.transform_call_graph_edges
5805 = update_clones
? CB_CGE_MOVE_CLONES
: CB_CGE_MOVE
;
5806 id
.transform_new_cfg
= true;
5807 id
.transform_return_to_modify
= false;
5808 id
.transform_parameter
= false;
5809 id
.transform_lang_insert_block
= NULL
;
5811 old_entry_block
= ENTRY_BLOCK_PTR_FOR_FN
5812 (DECL_STRUCT_FUNCTION (old_decl
));
5813 DECL_RESULT (new_decl
) = DECL_RESULT (old_decl
);
5814 DECL_ARGUMENTS (new_decl
) = DECL_ARGUMENTS (old_decl
);
5815 initialize_cfun (new_decl
, old_decl
,
5816 new_entry
? new_entry
->count
: old_entry_block
->count
);
5817 if (DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
)
5818 DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
->ipa_pta
5819 = id
.src_cfun
->gimple_df
->ipa_pta
;
5821 /* Copy the function's static chain. */
5822 p
= DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
;
5824 DECL_STRUCT_FUNCTION (new_decl
)->static_chain_decl
5825 = copy_static_chain (p
, &id
);
5827 /* If there's a tree_map, prepare for substitution. */
5829 for (i
= 0; i
< tree_map
->length (); i
++)
5832 replace_info
= (*tree_map
)[i
];
5833 if (replace_info
->replace_p
)
5836 if (!replace_info
->old_tree
)
5838 int p
= replace_info
->parm_num
;
5840 tree req_type
, new_type
;
5842 for (parm
= DECL_ARGUMENTS (old_decl
); p
;
5843 parm
= DECL_CHAIN (parm
))
5845 replace_info
->old_tree
= parm
;
5846 parm_num
= replace_info
->parm_num
;
5847 req_type
= TREE_TYPE (parm
);
5848 new_type
= TREE_TYPE (replace_info
->new_tree
);
5849 if (!useless_type_conversion_p (req_type
, new_type
))
5851 if (fold_convertible_p (req_type
, replace_info
->new_tree
))
5852 replace_info
->new_tree
5853 = fold_build1 (NOP_EXPR
, req_type
,
5854 replace_info
->new_tree
);
5855 else if (TYPE_SIZE (req_type
) == TYPE_SIZE (new_type
))
5856 replace_info
->new_tree
5857 = fold_build1 (VIEW_CONVERT_EXPR
, req_type
,
5858 replace_info
->new_tree
);
5863 fprintf (dump_file
, " const ");
5864 print_generic_expr (dump_file
,
5865 replace_info
->new_tree
);
5867 " can't be converted to param ");
5868 print_generic_expr (dump_file
, parm
);
5869 fprintf (dump_file
, "\n");
5871 replace_info
->old_tree
= NULL
;
5876 gcc_assert (TREE_CODE (replace_info
->old_tree
) == PARM_DECL
);
5877 if (replace_info
->old_tree
)
5879 init
= setup_one_parameter (&id
, replace_info
->old_tree
,
5880 replace_info
->new_tree
, id
.src_fn
,
5884 init_stmts
.safe_push (init
);
5885 if (MAY_HAVE_DEBUG_BIND_STMTS
&& args_to_skip
)
5891 for (parm
= DECL_ARGUMENTS (old_decl
), p
= 0; parm
;
5892 parm
= DECL_CHAIN (parm
), p
++)
5893 if (parm
== replace_info
->old_tree
)
5901 if (debug_args_to_skip
== args_to_skip
)
5903 debug_args_to_skip
= BITMAP_ALLOC (NULL
);
5904 bitmap_copy (debug_args_to_skip
, args_to_skip
);
5906 bitmap_clear_bit (debug_args_to_skip
, parm_num
);
5912 /* Copy the function's arguments. */
5913 if (DECL_ARGUMENTS (old_decl
) != NULL_TREE
)
5914 DECL_ARGUMENTS (new_decl
)
5915 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl
), &id
,
5916 args_to_skip
, &vars
);
5918 DECL_INITIAL (new_decl
) = remap_blocks (DECL_INITIAL (id
.src_fn
), &id
);
5919 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl
)) = new_decl
;
5921 declare_inline_vars (DECL_INITIAL (new_decl
), vars
);
5923 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl
)->local_decls
))
5924 /* Add local vars. */
5925 add_local_variables (DECL_STRUCT_FUNCTION (old_decl
), cfun
, &id
);
5927 if (DECL_RESULT (old_decl
) == NULL_TREE
)
5929 else if (skip_return
&& !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl
))))
5931 DECL_RESULT (new_decl
)
5932 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl
)),
5933 RESULT_DECL
, NULL_TREE
, void_type_node
);
5934 DECL_CONTEXT (DECL_RESULT (new_decl
)) = new_decl
;
5935 cfun
->returns_struct
= 0;
5936 cfun
->returns_pcc_struct
= 0;
5941 DECL_RESULT (new_decl
) = remap_decl (DECL_RESULT (old_decl
), &id
);
5942 lang_hooks
.dup_lang_specific_decl (DECL_RESULT (new_decl
));
5943 if (gimple_in_ssa_p (id
.src_cfun
)
5944 && DECL_BY_REFERENCE (DECL_RESULT (old_decl
))
5945 && (old_name
= ssa_default_def (id
.src_cfun
, DECL_RESULT (old_decl
))))
5947 tree new_name
= make_ssa_name (DECL_RESULT (new_decl
));
5948 insert_decl_map (&id
, old_name
, new_name
);
5949 SSA_NAME_DEF_STMT (new_name
) = gimple_build_nop ();
5950 set_ssa_default_def (cfun
, DECL_RESULT (new_decl
), new_name
);
5954 /* Set up the destination functions loop tree. */
5955 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl
)) != NULL
)
5957 cfun
->curr_properties
&= ~PROP_loops
;
5958 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
5959 cfun
->curr_properties
|= PROP_loops
;
5962 /* Copy the Function's body. */
5963 copy_body (&id
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), EXIT_BLOCK_PTR_FOR_FN (cfun
),
5966 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5967 number_blocks (new_decl
);
5969 /* We want to create the BB unconditionally, so that the addition of
5970 debug stmts doesn't affect BB count, which may in the end cause
5971 codegen differences. */
5972 bb
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
5973 while (init_stmts
.length ())
5974 insert_init_stmt (&id
, bb
, init_stmts
.pop ());
5975 update_clone_info (&id
);
5977 /* Remap the nonlocal_goto_save_area, if any. */
5978 if (cfun
->nonlocal_goto_save_area
)
5980 struct walk_stmt_info wi
;
5982 memset (&wi
, 0, sizeof (wi
));
5984 walk_tree (&cfun
->nonlocal_goto_save_area
, remap_gimple_op_r
, &wi
, NULL
);
5990 delete id
.debug_map
;
5991 free_dominance_info (CDI_DOMINATORS
);
5992 free_dominance_info (CDI_POST_DOMINATORS
);
5994 update_max_bb_count ();
5995 fold_marked_statements (0, id
.statements_to_fold
);
5996 delete id
.statements_to_fold
;
5997 delete_unreachable_blocks_update_callgraph (&id
);
5998 if (id
.dst_node
->definition
)
5999 cgraph_edge::rebuild_references ();
6000 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
6002 calculate_dominance_info (CDI_DOMINATORS
);
6003 fix_loop_structure (NULL
);
6005 update_ssa (TODO_update_ssa
);
6007 /* After partial cloning we need to rescale frequencies, so they are
6008 within proper range in the cloned function. */
6011 struct cgraph_edge
*e
;
6012 rebuild_frequencies ();
6014 new_version_node
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
6015 for (e
= new_version_node
->callees
; e
; e
= e
->next_callee
)
6017 basic_block bb
= gimple_bb (e
->call_stmt
);
6018 e
->count
= bb
->count
;
6020 for (e
= new_version_node
->indirect_calls
; e
; e
= e
->next_callee
)
6022 basic_block bb
= gimple_bb (e
->call_stmt
);
6023 e
->count
= bb
->count
;
6027 if (debug_args_to_skip
&& MAY_HAVE_DEBUG_BIND_STMTS
)
6030 vec
<tree
, va_gc
> **debug_args
= NULL
;
6031 unsigned int len
= 0;
6032 for (parm
= DECL_ARGUMENTS (old_decl
), i
= 0;
6033 parm
; parm
= DECL_CHAIN (parm
), i
++)
6034 if (bitmap_bit_p (debug_args_to_skip
, i
) && is_gimple_reg (parm
))
6038 if (debug_args
== NULL
)
6040 debug_args
= decl_debug_args_insert (new_decl
);
6041 len
= vec_safe_length (*debug_args
);
6043 ddecl
= make_node (DEBUG_EXPR_DECL
);
6044 DECL_ARTIFICIAL (ddecl
) = 1;
6045 TREE_TYPE (ddecl
) = TREE_TYPE (parm
);
6046 SET_DECL_MODE (ddecl
, DECL_MODE (parm
));
6047 vec_safe_push (*debug_args
, DECL_ORIGIN (parm
));
6048 vec_safe_push (*debug_args
, ddecl
);
6050 if (debug_args
!= NULL
)
6052 /* On the callee side, add
6055 stmts to the first bb where var is a VAR_DECL created for the
6056 optimized away parameter in DECL_INITIAL block. This hints
6057 in the debug info that var (whole DECL_ORIGIN is the parm
6058 PARM_DECL) is optimized away, but could be looked up at the
6059 call site as value of D#X there. */
6060 tree var
= vars
, vexpr
;
6061 gimple_stmt_iterator cgsi
6062 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6065 i
= vec_safe_length (*debug_args
);
6069 while (var
!= NULL_TREE
6070 && DECL_ABSTRACT_ORIGIN (var
) != (**debug_args
)[i
])
6071 var
= TREE_CHAIN (var
);
6072 if (var
== NULL_TREE
)
6074 vexpr
= make_node (DEBUG_EXPR_DECL
);
6075 parm
= (**debug_args
)[i
];
6076 DECL_ARTIFICIAL (vexpr
) = 1;
6077 TREE_TYPE (vexpr
) = TREE_TYPE (parm
);
6078 SET_DECL_MODE (vexpr
, DECL_MODE (parm
));
6079 def_temp
= gimple_build_debug_bind (var
, vexpr
, NULL
);
6080 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6081 def_temp
= gimple_build_debug_source_bind (vexpr
, parm
, NULL
);
6082 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6088 if (debug_args_to_skip
&& debug_args_to_skip
!= args_to_skip
)
6089 BITMAP_FREE (debug_args_to_skip
);
6090 free_dominance_info (CDI_DOMINATORS
);
6091 free_dominance_info (CDI_POST_DOMINATORS
);
6093 gcc_assert (!id
.debug_stmts
.exists ());
6098 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6099 the callee and return the inlined body on success. */
6102 maybe_inline_call_in_expr (tree exp
)
6104 tree fn
= get_callee_fndecl (exp
);
6106 /* We can only try to inline "const" functions. */
6107 if (fn
&& TREE_READONLY (fn
) && DECL_SAVED_TREE (fn
))
6109 call_expr_arg_iterator iter
;
6112 hash_map
<tree
, tree
> decl_map
;
6114 /* Remap the parameters. */
6115 for (param
= DECL_ARGUMENTS (fn
), arg
= first_call_expr_arg (exp
, &iter
);
6117 param
= DECL_CHAIN (param
), arg
= next_call_expr_arg (&iter
))
6118 decl_map
.put (param
, arg
);
6120 memset (&id
, 0, sizeof (id
));
6122 id
.dst_fn
= current_function_decl
;
6123 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6124 id
.decl_map
= &decl_map
;
6126 id
.copy_decl
= copy_decl_no_change
;
6127 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6128 id
.transform_new_cfg
= false;
6129 id
.transform_return_to_modify
= true;
6130 id
.transform_parameter
= true;
6131 id
.transform_lang_insert_block
= NULL
;
6133 /* Make sure not to unshare trees behind the front-end's back
6134 since front-end specific mechanisms may rely on sharing. */
6135 id
.regimplify
= false;
6136 id
.do_not_unshare
= true;
6138 /* We're not inside any EH region. */
6141 t
= copy_tree_body (&id
);
6143 /* We can only return something suitable for use in a GENERIC
6145 if (TREE_CODE (t
) == MODIFY_EXPR
)
6146 return TREE_OPERAND (t
, 1);
6152 /* Duplicate a type, fields and all. */
6155 build_duplicate_type (tree type
)
6157 struct copy_body_data id
;
6159 memset (&id
, 0, sizeof (id
));
6160 id
.src_fn
= current_function_decl
;
6161 id
.dst_fn
= current_function_decl
;
6163 id
.decl_map
= new hash_map
<tree
, tree
>;
6164 id
.debug_map
= NULL
;
6165 id
.copy_decl
= copy_decl_no_change
;
6167 type
= remap_type_1 (type
, &id
);
6171 delete id
.debug_map
;
6173 TYPE_CANONICAL (type
) = type
;
6178 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6179 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6183 copy_fn (tree fn
, tree
& parms
, tree
& result
)
6187 hash_map
<tree
, tree
> decl_map
;
6192 memset (&id
, 0, sizeof (id
));
6194 id
.dst_fn
= current_function_decl
;
6195 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6196 id
.decl_map
= &decl_map
;
6198 id
.copy_decl
= copy_decl_no_change
;
6199 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6200 id
.transform_new_cfg
= false;
6201 id
.transform_return_to_modify
= false;
6202 id
.transform_parameter
= true;
6203 id
.transform_lang_insert_block
= NULL
;
6205 /* Make sure not to unshare trees behind the front-end's back
6206 since front-end specific mechanisms may rely on sharing. */
6207 id
.regimplify
= false;
6208 id
.do_not_unshare
= true;
6210 /* We're not inside any EH region. */
6213 /* Remap the parameters and result and return them to the caller. */
6214 for (param
= DECL_ARGUMENTS (fn
);
6216 param
= DECL_CHAIN (param
))
6218 *p
= remap_decl (param
, &id
);
6219 p
= &DECL_CHAIN (*p
);
6222 if (DECL_RESULT (fn
))
6223 result
= remap_decl (DECL_RESULT (fn
), &id
);
6227 return copy_tree_body (&id
);