2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
42 #include "tree-iterator.h"
44 #include "gimple-fold.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
51 #include "tree-into-ssa.h"
57 #include "value-prof.h"
60 #include "stringpool.h"
64 /* I'm not real happy about this, but we need to handle gimple and
67 /* Inlining, Cloning, Versioning, Parallelization
69 Inlining: a function body is duplicated, but the PARM_DECLs are
70 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
71 MODIFY_EXPRs that store to a dedicated returned-value variable.
72 The duplicated eh_region info of the copy will later be appended
73 to the info for the caller; the eh_region info in copied throwing
74 statements and RESX statements are adjusted accordingly.
76 Cloning: (only in C++) We have one body for a con/de/structor, and
77 multiple function decls, each with a unique parameter list.
78 Duplicate the body, using the given splay tree; some parameters
79 will become constants (like 0 or 1).
81 Versioning: a function body is duplicated and the result is a new
82 function rather than into blocks of an existing function as with
83 inlining. Some parameters will become constants.
85 Parallelization: a region of a function is duplicated resulting in
86 a new function. Variables may be replaced with complex expressions
87 to enable shared variable semantics.
89 All of these will simultaneously lookup any callgraph edges. If
90 we're going to inline the duplicated function body, and the given
91 function has some cloned callgraph nodes (one for each place this
92 function will be inlined) those callgraph edges will be duplicated.
93 If we're cloning the body, those callgraph edges will be
94 updated to point into the new body. (Note that the original
95 callgraph node and edge list will not be altered.)
97 See the CALL_EXPR handling case in copy_tree_body_r (). */
101 o In order to make inlining-on-trees work, we pessimized
102 function-local static constants. In particular, they are now
103 always output, even when not addressed. Fix this by treating
104 function-local static constants just like global static
105 constants; the back-end already knows not to output them if they
108 o Provide heuristics to clamp inlining of recursive template
112 /* Weights that estimate_num_insns uses to estimate the size of the
115 eni_weights eni_size_weights
;
117 /* Weights that estimate_num_insns uses to estimate the time necessary
118 to execute the produced code. */
120 eni_weights eni_time_weights
;
124 static tree
declare_return_variable (copy_body_data
*, tree
, tree
,
126 static void remap_block (tree
*, copy_body_data
*);
127 static void copy_bind_expr (tree
*, int *, copy_body_data
*);
128 static void declare_inline_vars (tree
, tree
);
129 static void remap_save_expr (tree
*, hash_map
<tree
, tree
> *, int *);
130 static void prepend_lexical_block (tree current_block
, tree new_block
);
131 static tree
copy_decl_to_var (tree
, copy_body_data
*);
132 static tree
copy_result_decl_to_var (tree
, copy_body_data
*);
133 static tree
copy_decl_maybe_to_var (tree
, copy_body_data
*);
134 static gimple_seq
remap_gimple_stmt (gimple
*, copy_body_data
*);
135 static bool delete_unreachable_blocks_update_callgraph (copy_body_data
*id
);
136 static void insert_init_stmt (copy_body_data
*, basic_block
, gimple
*);
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
142 insert_decl_map (copy_body_data
*id
, tree key
, tree value
)
144 id
->decl_map
->put (key
, value
);
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
149 id
->decl_map
->put (value
, value
);
152 /* Insert a tree->tree mapping for ID. This is only used for
156 insert_debug_decl_map (copy_body_data
*id
, tree key
, tree value
)
158 if (!gimple_in_ssa_p (id
->src_cfun
))
161 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
164 if (!target_for_debug_bind (key
))
167 gcc_assert (TREE_CODE (key
) == PARM_DECL
);
168 gcc_assert (VAR_P (value
));
171 id
->debug_map
= new hash_map
<tree
, tree
>;
173 id
->debug_map
->put (key
, value
);
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
180 static int processing_debug_stmt
= 0;
182 /* Construct new SSA name for old NAME. ID is the inline context. */
185 remap_ssa_name (tree name
, copy_body_data
*id
)
190 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
192 n
= id
->decl_map
->get (name
);
194 return unshare_expr (*n
);
196 if (processing_debug_stmt
)
198 if (SSA_NAME_IS_DEFAULT_DEF (name
)
199 && TREE_CODE (SSA_NAME_VAR (name
)) == PARM_DECL
200 && id
->entry_bb
== NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
203 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
205 gimple_stmt_iterator gsi
;
206 tree val
= SSA_NAME_VAR (name
);
208 n
= id
->decl_map
->get (val
);
211 if (TREE_CODE (val
) != PARM_DECL
212 && !(VAR_P (val
) && DECL_ABSTRACT_ORIGIN (val
)))
214 processing_debug_stmt
= -1;
217 n
= id
->decl_map
->get (val
);
218 if (n
&& TREE_CODE (*n
) == DEBUG_EXPR_DECL
)
220 def_temp
= gimple_build_debug_source_bind (vexpr
, val
, NULL
);
221 DECL_ARTIFICIAL (vexpr
) = 1;
222 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
223 SET_DECL_MODE (vexpr
, DECL_MODE (SSA_NAME_VAR (name
)));
224 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
225 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
226 insert_decl_map (id
, val
, vexpr
);
230 processing_debug_stmt
= -1;
234 /* Remap anonymous SSA names or SSA names of anonymous decls. */
235 var
= SSA_NAME_VAR (name
);
237 || (!SSA_NAME_IS_DEFAULT_DEF (name
)
239 && !VAR_DECL_IS_VIRTUAL_OPERAND (var
)
240 && DECL_ARTIFICIAL (var
)
241 && DECL_IGNORED_P (var
)
242 && !DECL_NAME (var
)))
244 struct ptr_info_def
*pi
;
245 new_tree
= make_ssa_name (remap_type (TREE_TYPE (name
), id
));
246 if (!var
&& SSA_NAME_IDENTIFIER (name
))
247 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree
, SSA_NAME_IDENTIFIER (name
));
248 insert_decl_map (id
, name
, new_tree
);
249 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
250 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
251 /* At least IPA points-to info can be directly transferred. */
252 if (id
->src_cfun
->gimple_df
253 && id
->src_cfun
->gimple_df
->ipa_pta
254 && POINTER_TYPE_P (TREE_TYPE (name
))
255 && (pi
= SSA_NAME_PTR_INFO (name
))
258 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
264 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
266 new_tree
= remap_decl (var
, id
);
268 /* We might've substituted constant or another SSA_NAME for
271 Replace the SSA name representing RESULT_DECL by variable during
272 inlining: this saves us from need to introduce PHI node in a case
273 return value is just partly initialized. */
274 if ((VAR_P (new_tree
) || TREE_CODE (new_tree
) == PARM_DECL
)
275 && (!SSA_NAME_VAR (name
)
276 || TREE_CODE (SSA_NAME_VAR (name
)) != RESULT_DECL
277 || !id
->transform_return_to_modify
))
279 struct ptr_info_def
*pi
;
280 new_tree
= make_ssa_name (new_tree
);
281 insert_decl_map (id
, name
, new_tree
);
282 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
283 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
284 /* At least IPA points-to info can be directly transferred. */
285 if (id
->src_cfun
->gimple_df
286 && id
->src_cfun
->gimple_df
->ipa_pta
287 && POINTER_TYPE_P (TREE_TYPE (name
))
288 && (pi
= SSA_NAME_PTR_INFO (name
))
291 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
294 if (SSA_NAME_IS_DEFAULT_DEF (name
))
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
299 abnormal edge, but also increase register pressure.
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
)
307 && (!SSA_NAME_VAR (name
)
308 || TREE_CODE (SSA_NAME_VAR (name
)) != PARM_DECL
)
309 && (id
->entry_bb
!= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
311 || EDGE_COUNT (id
->entry_bb
->preds
) != 1))
313 gimple_stmt_iterator gsi
= gsi_last_bb (id
->entry_bb
);
315 tree zero
= build_zero_cst (TREE_TYPE (new_tree
));
317 init_stmt
= gimple_build_assign (new_tree
, zero
);
318 gsi_insert_after (&gsi
, init_stmt
, GSI_NEW_STMT
);
319 SSA_NAME_IS_DEFAULT_DEF (new_tree
) = 0;
323 SSA_NAME_DEF_STMT (new_tree
) = gimple_build_nop ();
324 set_ssa_default_def (cfun
, SSA_NAME_VAR (new_tree
), new_tree
);
329 insert_decl_map (id
, name
, new_tree
);
333 /* Remap DECL during the copying of the BLOCK tree for the function. */
336 remap_decl (tree decl
, copy_body_data
*id
)
340 /* We only remap local variables in the current function. */
342 /* See if we have remapped this declaration. */
344 n
= id
->decl_map
->get (decl
);
346 if (!n
&& processing_debug_stmt
)
348 processing_debug_stmt
= -1;
352 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
353 necessary DECLs have already been remapped and we do not want to duplicate
354 a decl coming from outside of the sequence we are copying. */
356 && id
->prevent_decl_creation_for_types
357 && id
->remapping_type_depth
> 0
358 && (VAR_P (decl
) || TREE_CODE (decl
) == PARM_DECL
))
361 /* If we didn't already have an equivalent for this declaration, create one
365 /* Make a copy of the variable or label. */
366 tree t
= id
->copy_decl (decl
, id
);
368 /* Remember it, so that if we encounter this local entity again
369 we can reuse this copy. Do this early because remap_type may
370 need this decl for TYPE_STUB_DECL. */
371 insert_decl_map (id
, decl
, t
);
376 /* Remap types, if necessary. */
377 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
378 if (TREE_CODE (t
) == TYPE_DECL
)
380 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
382 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
383 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
384 is not set on the TYPE_DECL, for example in LTO mode. */
385 if (DECL_ORIGINAL_TYPE (t
) == TREE_TYPE (t
))
387 tree x
= build_variant_type_copy (TREE_TYPE (t
));
388 TYPE_STUB_DECL (x
) = TYPE_STUB_DECL (TREE_TYPE (t
));
389 TYPE_NAME (x
) = TYPE_NAME (TREE_TYPE (t
));
390 DECL_ORIGINAL_TYPE (t
) = x
;
394 /* Remap sizes as necessary. */
395 walk_tree (&DECL_SIZE (t
), copy_tree_body_r
, id
, NULL
);
396 walk_tree (&DECL_SIZE_UNIT (t
), copy_tree_body_r
, id
, NULL
);
398 /* If fields, do likewise for offset and qualifier. */
399 if (TREE_CODE (t
) == FIELD_DECL
)
401 walk_tree (&DECL_FIELD_OFFSET (t
), copy_tree_body_r
, id
, NULL
);
402 if (TREE_CODE (DECL_CONTEXT (t
)) == QUAL_UNION_TYPE
)
403 walk_tree (&DECL_QUALIFIER (t
), copy_tree_body_r
, id
, NULL
);
409 if (id
->do_not_unshare
)
412 return unshare_expr (*n
);
416 remap_type_1 (tree type
, copy_body_data
*id
)
420 /* We do need a copy. build and register it now. If this is a pointer or
421 reference type, remap the designated type and make a new pointer or
423 if (TREE_CODE (type
) == POINTER_TYPE
)
425 new_tree
= build_pointer_type_for_mode (remap_type (TREE_TYPE (type
), id
),
427 TYPE_REF_CAN_ALIAS_ALL (type
));
428 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
429 new_tree
= build_type_attribute_qual_variant (new_tree
,
430 TYPE_ATTRIBUTES (type
),
432 insert_decl_map (id
, type
, new_tree
);
435 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
437 new_tree
= build_reference_type_for_mode (remap_type (TREE_TYPE (type
), id
),
439 TYPE_REF_CAN_ALIAS_ALL (type
));
440 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
441 new_tree
= build_type_attribute_qual_variant (new_tree
,
442 TYPE_ATTRIBUTES (type
),
444 insert_decl_map (id
, type
, new_tree
);
448 new_tree
= copy_node (type
);
450 insert_decl_map (id
, type
, new_tree
);
452 /* This is a new type, not a copy of an old type. Need to reassociate
453 variants. We can handle everything except the main variant lazily. */
454 t
= TYPE_MAIN_VARIANT (type
);
457 t
= remap_type (t
, id
);
458 TYPE_MAIN_VARIANT (new_tree
) = t
;
459 TYPE_NEXT_VARIANT (new_tree
) = TYPE_NEXT_VARIANT (t
);
460 TYPE_NEXT_VARIANT (t
) = new_tree
;
464 TYPE_MAIN_VARIANT (new_tree
) = new_tree
;
465 TYPE_NEXT_VARIANT (new_tree
) = NULL
;
468 if (TYPE_STUB_DECL (type
))
469 TYPE_STUB_DECL (new_tree
) = remap_decl (TYPE_STUB_DECL (type
), id
);
471 /* Lazily create pointer and reference types. */
472 TYPE_POINTER_TO (new_tree
) = NULL
;
473 TYPE_REFERENCE_TO (new_tree
) = NULL
;
475 /* Copy all types that may contain references to local variables; be sure to
476 preserve sharing in between type and its main variant when possible. */
477 switch (TREE_CODE (new_tree
))
481 case FIXED_POINT_TYPE
:
484 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
486 gcc_checking_assert (TYPE_MIN_VALUE (type
) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type
)));
487 gcc_checking_assert (TYPE_MAX_VALUE (type
) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type
)));
489 TYPE_MIN_VALUE (new_tree
) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree
));
490 TYPE_MAX_VALUE (new_tree
) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree
));
494 t
= TYPE_MIN_VALUE (new_tree
);
495 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
496 walk_tree (&TYPE_MIN_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
498 t
= TYPE_MAX_VALUE (new_tree
);
499 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
500 walk_tree (&TYPE_MAX_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
505 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
506 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
507 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
509 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
510 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
511 && TYPE_ARG_TYPES (type
) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type
)))
512 TYPE_ARG_TYPES (new_tree
) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree
));
514 walk_tree (&TYPE_ARG_TYPES (new_tree
), copy_tree_body_r
, id
, NULL
);
518 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
519 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
520 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
522 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
524 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
526 gcc_checking_assert (TYPE_DOMAIN (type
)
527 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type
)));
528 TYPE_DOMAIN (new_tree
) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree
));
532 TYPE_DOMAIN (new_tree
) = remap_type (TYPE_DOMAIN (new_tree
), id
);
533 /* For array bounds where we have decided not to copy over the bounds
534 variable which isn't used in OpenMP/OpenACC region, change them to
535 an uninitialized VAR_DECL temporary. */
536 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree
)) == error_mark_node
537 && id
->adjust_array_error_bounds
538 && TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) != error_mark_node
)
540 tree v
= create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree
)));
542 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE
,
543 DECL_ATTRIBUTES (v
));
544 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree
)) = v
;
551 case QUAL_UNION_TYPE
:
552 if (TYPE_MAIN_VARIANT (type
) != type
553 && TYPE_FIELDS (type
) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type
)))
554 TYPE_FIELDS (new_tree
) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree
));
559 for (f
= TYPE_FIELDS (new_tree
); f
; f
= DECL_CHAIN (f
))
561 t
= remap_decl (f
, id
);
562 DECL_CONTEXT (t
) = new_tree
;
566 TYPE_FIELDS (new_tree
) = nreverse (nf
);
572 /* Shouldn't have been thought variable sized. */
576 /* All variants of type share the same size, so use the already remaped data. */
577 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
579 tree s
= TYPE_SIZE (type
);
580 tree mvs
= TYPE_SIZE (TYPE_MAIN_VARIANT (type
));
581 tree su
= TYPE_SIZE_UNIT (type
);
582 tree mvsu
= TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
));
583 gcc_checking_assert ((TREE_CODE (s
) == PLACEHOLDER_EXPR
584 && (TREE_CODE (mvs
) == PLACEHOLDER_EXPR
))
586 gcc_checking_assert ((TREE_CODE (su
) == PLACEHOLDER_EXPR
587 && (TREE_CODE (mvsu
) == PLACEHOLDER_EXPR
))
589 TYPE_SIZE (new_tree
) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree
));
590 TYPE_SIZE_UNIT (new_tree
) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree
));
594 walk_tree (&TYPE_SIZE (new_tree
), copy_tree_body_r
, id
, NULL
);
595 walk_tree (&TYPE_SIZE_UNIT (new_tree
), copy_tree_body_r
, id
, NULL
);
602 remap_type (tree type
, copy_body_data
*id
)
610 /* See if we have remapped this type. */
611 node
= id
->decl_map
->get (type
);
615 /* The type only needs remapping if it's variably modified. */
616 if (! variably_modified_type_p (type
, id
->src_fn
))
618 insert_decl_map (id
, type
, type
);
622 id
->remapping_type_depth
++;
623 tmp
= remap_type_1 (type
, id
);
624 id
->remapping_type_depth
--;
629 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
632 can_be_nonlocal (tree decl
, copy_body_data
*id
)
634 /* We cannot duplicate function decls. */
635 if (TREE_CODE (decl
) == FUNCTION_DECL
)
638 /* Local static vars must be non-local or we get multiple declaration
640 if (VAR_P (decl
) && !auto_var_in_fn_p (decl
, id
->src_fn
))
647 remap_decls (tree decls
, vec
<tree
, va_gc
> **nonlocalized_list
,
651 tree new_decls
= NULL_TREE
;
653 /* Remap its variables. */
654 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
658 if (can_be_nonlocal (old_var
, id
))
660 /* We need to add this variable to the local decls as otherwise
661 nothing else will do so. */
662 if (VAR_P (old_var
) && ! DECL_EXTERNAL (old_var
) && cfun
)
663 add_local_decl (cfun
, old_var
);
664 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
665 && !DECL_IGNORED_P (old_var
)
666 && nonlocalized_list
)
667 vec_safe_push (*nonlocalized_list
, old_var
);
671 /* Remap the variable. */
672 new_var
= remap_decl (old_var
, id
);
674 /* If we didn't remap this variable, we can't mess with its
675 TREE_CHAIN. If we remapped this variable to the return slot, it's
676 already declared somewhere else, so don't declare it here. */
678 if (new_var
== id
->retvar
)
682 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
683 && !DECL_IGNORED_P (old_var
)
684 && nonlocalized_list
)
685 vec_safe_push (*nonlocalized_list
, old_var
);
689 gcc_assert (DECL_P (new_var
));
690 DECL_CHAIN (new_var
) = new_decls
;
693 /* Also copy value-expressions. */
694 if (VAR_P (new_var
) && DECL_HAS_VALUE_EXPR_P (new_var
))
696 tree tem
= DECL_VALUE_EXPR (new_var
);
697 bool old_regimplify
= id
->regimplify
;
698 id
->remapping_type_depth
++;
699 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
700 id
->remapping_type_depth
--;
701 id
->regimplify
= old_regimplify
;
702 SET_DECL_VALUE_EXPR (new_var
, tem
);
707 return nreverse (new_decls
);
710 /* Copy the BLOCK to contain remapped versions of the variables
711 therein. And hook the new block into the block-tree. */
714 remap_block (tree
*block
, copy_body_data
*id
)
719 /* Make the new block. */
721 new_block
= make_node (BLOCK
);
722 TREE_USED (new_block
) = TREE_USED (old_block
);
723 BLOCK_ABSTRACT_ORIGIN (new_block
) = BLOCK_ORIGIN (old_block
);
724 BLOCK_SOURCE_LOCATION (new_block
) = BLOCK_SOURCE_LOCATION (old_block
);
725 BLOCK_NONLOCALIZED_VARS (new_block
)
726 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block
));
729 /* Remap its variables. */
730 BLOCK_VARS (new_block
) = remap_decls (BLOCK_VARS (old_block
),
731 &BLOCK_NONLOCALIZED_VARS (new_block
),
734 if (id
->transform_lang_insert_block
)
735 id
->transform_lang_insert_block (new_block
);
737 /* Remember the remapped block. */
738 insert_decl_map (id
, old_block
, new_block
);
741 /* Copy the whole block tree and root it in id->block. */
744 remap_blocks (tree block
, copy_body_data
*id
)
747 tree new_tree
= block
;
752 remap_block (&new_tree
, id
);
753 gcc_assert (new_tree
!= block
);
754 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
755 prepend_lexical_block (new_tree
, remap_blocks (t
, id
));
756 /* Blocks are in arbitrary order, but make things slightly prettier and do
757 not swap order when producing a copy. */
758 BLOCK_SUBBLOCKS (new_tree
) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree
));
762 /* Remap the block tree rooted at BLOCK to nothing. */
765 remap_blocks_to_null (tree block
, copy_body_data
*id
)
768 insert_decl_map (id
, block
, NULL_TREE
);
769 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
770 remap_blocks_to_null (t
, id
);
773 /* Remap the location info pointed to by LOCUS. */
776 remap_location (location_t locus
, copy_body_data
*id
)
778 if (LOCATION_BLOCK (locus
))
780 tree
*n
= id
->decl_map
->get (LOCATION_BLOCK (locus
));
783 return set_block (locus
, *n
);
786 locus
= LOCATION_LOCUS (locus
);
788 if (locus
!= UNKNOWN_LOCATION
&& id
->block
)
789 return set_block (locus
, id
->block
);
795 copy_statement_list (tree
*tp
)
797 tree_stmt_iterator oi
, ni
;
800 new_tree
= alloc_stmt_list ();
801 ni
= tsi_start (new_tree
);
802 oi
= tsi_start (*tp
);
803 TREE_TYPE (new_tree
) = TREE_TYPE (*tp
);
806 for (; !tsi_end_p (oi
); tsi_next (&oi
))
808 tree stmt
= tsi_stmt (oi
);
809 if (TREE_CODE (stmt
) == STATEMENT_LIST
)
810 /* This copy is not redundant; tsi_link_after will smash this
811 STATEMENT_LIST into the end of the one we're building, and we
812 don't want to do that with the original. */
813 copy_statement_list (&stmt
);
814 tsi_link_after (&ni
, stmt
, TSI_CONTINUE_LINKING
);
819 copy_bind_expr (tree
*tp
, int *walk_subtrees
, copy_body_data
*id
)
821 tree block
= BIND_EXPR_BLOCK (*tp
);
822 /* Copy (and replace) the statement. */
823 copy_tree_r (tp
, walk_subtrees
, NULL
);
826 remap_block (&block
, id
);
827 BIND_EXPR_BLOCK (*tp
) = block
;
830 if (BIND_EXPR_VARS (*tp
))
831 /* This will remap a lot of the same decls again, but this should be
833 BIND_EXPR_VARS (*tp
) = remap_decls (BIND_EXPR_VARS (*tp
), NULL
, id
);
837 /* Create a new gimple_seq by remapping all the statements in BODY
838 using the inlining information in ID. */
841 remap_gimple_seq (gimple_seq body
, copy_body_data
*id
)
843 gimple_stmt_iterator si
;
844 gimple_seq new_body
= NULL
;
846 for (si
= gsi_start (body
); !gsi_end_p (si
); gsi_next (&si
))
848 gimple_seq new_stmts
= remap_gimple_stmt (gsi_stmt (si
), id
);
849 gimple_seq_add_seq (&new_body
, new_stmts
);
856 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
857 block using the mapping information in ID. */
860 copy_gimple_bind (gbind
*stmt
, copy_body_data
*id
)
863 tree new_block
, new_vars
;
864 gimple_seq body
, new_body
;
866 /* Copy the statement. Note that we purposely don't use copy_stmt
867 here because we need to remap statements as we copy. */
868 body
= gimple_bind_body (stmt
);
869 new_body
= remap_gimple_seq (body
, id
);
871 new_block
= gimple_bind_block (stmt
);
873 remap_block (&new_block
, id
);
875 /* This will remap a lot of the same decls again, but this should be
877 new_vars
= gimple_bind_vars (stmt
);
879 new_vars
= remap_decls (new_vars
, NULL
, id
);
881 new_bind
= gimple_build_bind (new_vars
, new_body
, new_block
);
886 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
891 if (TREE_CODE (decl
) == SSA_NAME
)
893 decl
= SSA_NAME_VAR (decl
);
898 return (TREE_CODE (decl
) == PARM_DECL
);
901 /* Remap the dependence CLIQUE from the source to the destination function
902 as specified in ID. */
904 static unsigned short
905 remap_dependence_clique (copy_body_data
*id
, unsigned short clique
)
907 if (clique
== 0 || processing_debug_stmt
)
909 if (!id
->dependence_map
)
910 id
->dependence_map
= new hash_map
<dependence_hash
, unsigned short>;
912 unsigned short &newc
= id
->dependence_map
->get_or_insert (clique
, &existed
);
914 newc
= ++cfun
->last_clique
;
918 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
919 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
920 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
921 recursing into the children nodes of *TP. */
924 remap_gimple_op_r (tree
*tp
, int *walk_subtrees
, void *data
)
926 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
927 copy_body_data
*id
= (copy_body_data
*) wi_p
->info
;
928 tree fn
= id
->src_fn
;
930 /* For recursive invocations this is no longer the LHS itself. */
931 bool is_lhs
= wi_p
->is_lhs
;
932 wi_p
->is_lhs
= false;
934 if (TREE_CODE (*tp
) == SSA_NAME
)
936 *tp
= remap_ssa_name (*tp
, id
);
939 SSA_NAME_DEF_STMT (*tp
) = wi_p
->stmt
;
942 else if (auto_var_in_fn_p (*tp
, fn
))
944 /* Local variables and labels need to be replaced by equivalent
945 variables. We don't want to copy static variables; there's
946 only one of those, no matter how many times we inline the
947 containing function. Similarly for globals from an outer
951 /* Remap the declaration. */
952 new_decl
= remap_decl (*tp
, id
);
953 gcc_assert (new_decl
);
954 /* Replace this variable with the copy. */
955 STRIP_TYPE_NOPS (new_decl
);
956 /* ??? The C++ frontend uses void * pointer zero to initialize
957 any other type. This confuses the middle-end type verification.
958 As cloned bodies do not go through gimplification again the fixup
959 there doesn't trigger. */
960 if (TREE_CODE (new_decl
) == INTEGER_CST
961 && !useless_type_conversion_p (TREE_TYPE (*tp
), TREE_TYPE (new_decl
)))
962 new_decl
= fold_convert (TREE_TYPE (*tp
), new_decl
);
966 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
968 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
970 else if (TREE_CODE (*tp
) == LABEL_DECL
971 && (!DECL_CONTEXT (*tp
)
972 || decl_function_context (*tp
) == id
->src_fn
))
973 /* These may need to be remapped for EH handling. */
974 *tp
= remap_decl (*tp
, id
);
975 else if (TREE_CODE (*tp
) == FIELD_DECL
)
977 /* If the enclosing record type is variably_modified_type_p, the field
978 has already been remapped. Otherwise, it need not be. */
979 tree
*n
= id
->decl_map
->get (*tp
);
984 else if (TYPE_P (*tp
))
985 /* Types may need remapping as well. */
986 *tp
= remap_type (*tp
, id
);
987 else if (CONSTANT_CLASS_P (*tp
))
989 /* If this is a constant, we have to copy the node iff the type
990 will be remapped. copy_tree_r will not copy a constant. */
991 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
993 if (new_type
== TREE_TYPE (*tp
))
996 else if (TREE_CODE (*tp
) == INTEGER_CST
)
997 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
1000 *tp
= copy_node (*tp
);
1001 TREE_TYPE (*tp
) = new_type
;
1006 /* Otherwise, just copy the node. Note that copy_tree_r already
1007 knows not to copy VAR_DECLs, etc., so this is safe. */
1009 if (TREE_CODE (*tp
) == MEM_REF
)
1011 /* We need to re-canonicalize MEM_REFs from inline substitutions
1012 that can happen when a pointer argument is an ADDR_EXPR.
1013 Recurse here manually to allow that. */
1014 tree ptr
= TREE_OPERAND (*tp
, 0);
1015 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1017 walk_tree (&ptr
, remap_gimple_op_r
, data
, NULL
);
1018 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1019 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1020 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1021 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1022 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1024 MR_DEPENDENCE_CLIQUE (*tp
)
1025 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1026 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1028 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1029 remapped a parameter as the property might be valid only
1030 for the parameter itself. */
1031 if (TREE_THIS_NOTRAP (old
)
1032 && (!is_parm (TREE_OPERAND (old
, 0))
1033 || (!id
->transform_parameter
&& is_parm (ptr
))))
1034 TREE_THIS_NOTRAP (*tp
) = 1;
1035 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
1040 /* Here is the "usual case". Copy this tree node, and then
1041 tweak some special cases. */
1042 copy_tree_r (tp
, walk_subtrees
, NULL
);
1044 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1045 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1047 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1049 /* The copied TARGET_EXPR has never been expanded, even if the
1050 original node was expanded already. */
1051 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1052 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1054 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1056 /* Variable substitution need not be simple. In particular,
1057 the MEM_REF substitution above. Make sure that
1058 TREE_CONSTANT and friends are up-to-date. */
1059 int invariant
= is_gimple_min_invariant (*tp
);
1060 walk_tree (&TREE_OPERAND (*tp
, 0), remap_gimple_op_r
, data
, NULL
);
1061 recompute_tree_invariant_for_addr_expr (*tp
);
1063 /* If this used to be invariant, but is not any longer,
1064 then regimplification is probably needed. */
1065 if (invariant
&& !is_gimple_min_invariant (*tp
))
1066 id
->regimplify
= true;
1072 /* Update the TREE_BLOCK for the cloned expr. */
1075 tree new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1076 tree old_block
= TREE_BLOCK (*tp
);
1080 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1084 TREE_SET_BLOCK (*tp
, new_block
);
1087 /* Keep iterating. */
1092 /* Called from copy_body_id via walk_tree. DATA is really a
1093 `copy_body_data *'. */
1096 copy_tree_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
1098 copy_body_data
*id
= (copy_body_data
*) data
;
1099 tree fn
= id
->src_fn
;
1102 /* Begin by recognizing trees that we'll completely rewrite for the
1103 inlining context. Our output for these trees is completely
1104 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1105 into an edge). Further down, we'll handle trees that get
1106 duplicated and/or tweaked. */
1108 /* When requested, RETURN_EXPRs should be transformed to just the
1109 contained MODIFY_EXPR. The branch semantics of the return will
1110 be handled elsewhere by manipulating the CFG rather than a statement. */
1111 if (TREE_CODE (*tp
) == RETURN_EXPR
&& id
->transform_return_to_modify
)
1113 tree assignment
= TREE_OPERAND (*tp
, 0);
1115 /* If we're returning something, just turn that into an
1116 assignment into the equivalent of the original RESULT_DECL.
1117 If the "assignment" is just the result decl, the result
1118 decl has already been set (e.g. a recent "foo (&result_decl,
1119 ...)"); just toss the entire RETURN_EXPR. */
1120 if (assignment
&& TREE_CODE (assignment
) == MODIFY_EXPR
)
1122 /* Replace the RETURN_EXPR with (a copy of) the
1123 MODIFY_EXPR hanging underneath. */
1124 *tp
= copy_node (assignment
);
1126 else /* Else the RETURN_EXPR returns no value. */
1129 return (tree
) (void *)1;
1132 else if (TREE_CODE (*tp
) == SSA_NAME
)
1134 *tp
= remap_ssa_name (*tp
, id
);
1139 /* Local variables and labels need to be replaced by equivalent
1140 variables. We don't want to copy static variables; there's only
1141 one of those, no matter how many times we inline the containing
1142 function. Similarly for globals from an outer function. */
1143 else if (auto_var_in_fn_p (*tp
, fn
))
1147 /* Remap the declaration. */
1148 new_decl
= remap_decl (*tp
, id
);
1149 gcc_assert (new_decl
);
1150 /* Replace this variable with the copy. */
1151 STRIP_TYPE_NOPS (new_decl
);
1155 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1156 copy_statement_list (tp
);
1157 else if (TREE_CODE (*tp
) == SAVE_EXPR
1158 || TREE_CODE (*tp
) == TARGET_EXPR
)
1159 remap_save_expr (tp
, id
->decl_map
, walk_subtrees
);
1160 else if (TREE_CODE (*tp
) == LABEL_DECL
1161 && (! DECL_CONTEXT (*tp
)
1162 || decl_function_context (*tp
) == id
->src_fn
))
1163 /* These may need to be remapped for EH handling. */
1164 *tp
= remap_decl (*tp
, id
);
1165 else if (TREE_CODE (*tp
) == BIND_EXPR
)
1166 copy_bind_expr (tp
, walk_subtrees
, id
);
1167 /* Types may need remapping as well. */
1168 else if (TYPE_P (*tp
))
1169 *tp
= remap_type (*tp
, id
);
1171 /* If this is a constant, we have to copy the node iff the type will be
1172 remapped. copy_tree_r will not copy a constant. */
1173 else if (CONSTANT_CLASS_P (*tp
))
1175 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1177 if (new_type
== TREE_TYPE (*tp
))
1180 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1181 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
1184 *tp
= copy_node (*tp
);
1185 TREE_TYPE (*tp
) = new_type
;
1189 /* Otherwise, just copy the node. Note that copy_tree_r already
1190 knows not to copy VAR_DECLs, etc., so this is safe. */
1193 /* Here we handle trees that are not completely rewritten.
1194 First we detect some inlining-induced bogosities for
1196 if (TREE_CODE (*tp
) == MODIFY_EXPR
1197 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
1198 && (auto_var_in_fn_p (TREE_OPERAND (*tp
, 0), fn
)))
1200 /* Some assignments VAR = VAR; don't generate any rtl code
1201 and thus don't count as variable modification. Avoid
1202 keeping bogosities like 0 = 0. */
1203 tree decl
= TREE_OPERAND (*tp
, 0), value
;
1206 n
= id
->decl_map
->get (decl
);
1210 STRIP_TYPE_NOPS (value
);
1211 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1213 *tp
= build_empty_stmt (EXPR_LOCATION (*tp
));
1214 return copy_tree_body_r (tp
, walk_subtrees
, data
);
1218 else if (TREE_CODE (*tp
) == INDIRECT_REF
)
1220 /* Get rid of *& from inline substitutions that can happen when a
1221 pointer argument is an ADDR_EXPR. */
1222 tree decl
= TREE_OPERAND (*tp
, 0);
1223 tree
*n
= id
->decl_map
->get (decl
);
1226 /* If we happen to get an ADDR_EXPR in n->value, strip
1227 it manually here as we'll eventually get ADDR_EXPRs
1228 which lie about their types pointed to. In this case
1229 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1230 but we absolutely rely on that. As fold_indirect_ref
1231 does other useful transformations, try that first, though. */
1232 tree type
= TREE_TYPE (*tp
);
1233 tree ptr
= id
->do_not_unshare
? *n
: unshare_expr (*n
);
1235 *tp
= gimple_fold_indirect_ref (ptr
);
1238 type
= remap_type (type
, id
);
1239 if (TREE_CODE (ptr
) == ADDR_EXPR
)
1242 = fold_indirect_ref_1 (EXPR_LOCATION (ptr
), type
, ptr
);
1243 /* ??? We should either assert here or build
1244 a VIEW_CONVERT_EXPR instead of blindly leaking
1245 incompatible types to our IL. */
1247 *tp
= TREE_OPERAND (ptr
, 0);
1251 *tp
= build1 (INDIRECT_REF
, type
, ptr
);
1252 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1253 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1254 TREE_READONLY (*tp
) = TREE_READONLY (old
);
1255 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1256 have remapped a parameter as the property might be
1257 valid only for the parameter itself. */
1258 if (TREE_THIS_NOTRAP (old
)
1259 && (!is_parm (TREE_OPERAND (old
, 0))
1260 || (!id
->transform_parameter
&& is_parm (ptr
))))
1261 TREE_THIS_NOTRAP (*tp
) = 1;
1268 else if (TREE_CODE (*tp
) == MEM_REF
)
1270 /* We need to re-canonicalize MEM_REFs from inline substitutions
1271 that can happen when a pointer argument is an ADDR_EXPR.
1272 Recurse here manually to allow that. */
1273 tree ptr
= TREE_OPERAND (*tp
, 0);
1274 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1276 walk_tree (&ptr
, copy_tree_body_r
, data
, NULL
);
1277 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1278 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1279 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1280 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1281 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1283 MR_DEPENDENCE_CLIQUE (*tp
)
1284 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1285 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1287 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1288 remapped a parameter as the property might be valid only
1289 for the parameter itself. */
1290 if (TREE_THIS_NOTRAP (old
)
1291 && (!is_parm (TREE_OPERAND (old
, 0))
1292 || (!id
->transform_parameter
&& is_parm (ptr
))))
1293 TREE_THIS_NOTRAP (*tp
) = 1;
1294 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
1299 /* Here is the "usual case". Copy this tree node, and then
1300 tweak some special cases. */
1301 copy_tree_r (tp
, walk_subtrees
, NULL
);
1303 /* If EXPR has block defined, map it to newly constructed block.
1304 When inlining we want EXPRs without block appear in the block
1305 of function call if we are not remapping a type. */
1308 new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1309 if (TREE_BLOCK (*tp
))
1312 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1316 TREE_SET_BLOCK (*tp
, new_block
);
1319 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1320 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1322 /* The copied TARGET_EXPR has never been expanded, even if the
1323 original node was expanded already. */
1324 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1326 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1327 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1330 /* Variable substitution need not be simple. In particular, the
1331 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1332 and friends are up-to-date. */
1333 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1335 int invariant
= is_gimple_min_invariant (*tp
);
1336 walk_tree (&TREE_OPERAND (*tp
, 0), copy_tree_body_r
, id
, NULL
);
1338 /* Handle the case where we substituted an INDIRECT_REF
1339 into the operand of the ADDR_EXPR. */
1340 if (TREE_CODE (TREE_OPERAND (*tp
, 0)) == INDIRECT_REF
)
1342 tree t
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0);
1343 if (TREE_TYPE (t
) != TREE_TYPE (*tp
))
1344 t
= fold_convert (remap_type (TREE_TYPE (*tp
), id
), t
);
1348 recompute_tree_invariant_for_addr_expr (*tp
);
1350 /* If this used to be invariant, but is not any longer,
1351 then regimplification is probably needed. */
1352 if (invariant
&& !is_gimple_min_invariant (*tp
))
1353 id
->regimplify
= true;
1359 /* Keep iterating. */
1363 /* Helper for remap_gimple_stmt. Given an EH region number for the
1364 source function, map that to the duplicate EH region number in
1365 the destination function. */
1368 remap_eh_region_nr (int old_nr
, copy_body_data
*id
)
1370 eh_region old_r
, new_r
;
1372 old_r
= get_eh_region_from_number_fn (id
->src_cfun
, old_nr
);
1373 new_r
= static_cast<eh_region
> (*id
->eh_map
->get (old_r
));
1375 return new_r
->index
;
1378 /* Similar, but operate on INTEGER_CSTs. */
1381 remap_eh_region_tree_nr (tree old_t_nr
, copy_body_data
*id
)
1385 old_nr
= tree_to_shwi (old_t_nr
);
1386 new_nr
= remap_eh_region_nr (old_nr
, id
);
1388 return build_int_cst (integer_type_node
, new_nr
);
1391 /* Helper for copy_bb. Remap statement STMT using the inlining
1392 information in ID. Return the new statement copy. */
1395 remap_gimple_stmt (gimple
*stmt
, copy_body_data
*id
)
1397 gimple
*copy
= NULL
;
1398 struct walk_stmt_info wi
;
1399 bool skip_first
= false;
1400 gimple_seq stmts
= NULL
;
1402 if (is_gimple_debug (stmt
)
1403 && (gimple_debug_nonbind_marker_p (stmt
)
1404 ? !DECL_STRUCT_FUNCTION (id
->dst_fn
)->debug_nonbind_markers
1405 : !opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
)))
1408 /* Begin by recognizing trees that we'll completely rewrite for the
1409 inlining context. Our output for these trees is completely
1410 different from our input (e.g. RETURN_EXPR is deleted and morphs
1411 into an edge). Further down, we'll handle trees that get
1412 duplicated and/or tweaked. */
1414 /* When requested, GIMPLE_RETURN should be transformed to just the
1415 contained GIMPLE_ASSIGN. The branch semantics of the return will
1416 be handled elsewhere by manipulating the CFG rather than the
1418 if (gimple_code (stmt
) == GIMPLE_RETURN
&& id
->transform_return_to_modify
)
1420 tree retval
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1422 /* If we're returning something, just turn that into an
1423 assignment to the equivalent of the original RESULT_DECL.
1424 If RETVAL is just the result decl, the result decl has
1425 already been set (e.g. a recent "foo (&result_decl, ...)");
1426 just toss the entire GIMPLE_RETURN. */
1428 && (TREE_CODE (retval
) != RESULT_DECL
1429 && (TREE_CODE (retval
) != SSA_NAME
1430 || ! SSA_NAME_VAR (retval
)
1431 || TREE_CODE (SSA_NAME_VAR (retval
)) != RESULT_DECL
)))
1433 copy
= gimple_build_assign (id
->do_not_unshare
1434 ? id
->retvar
: unshare_expr (id
->retvar
),
1436 /* id->retvar is already substituted. Skip it on later remapping. */
1442 else if (gimple_has_substatements (stmt
))
1446 /* When cloning bodies from the C++ front end, we will be handed bodies
1447 in High GIMPLE form. Handle here all the High GIMPLE statements that
1448 have embedded statements. */
1449 switch (gimple_code (stmt
))
1452 copy
= copy_gimple_bind (as_a
<gbind
*> (stmt
), id
);
1457 gcatch
*catch_stmt
= as_a
<gcatch
*> (stmt
);
1458 s1
= remap_gimple_seq (gimple_catch_handler (catch_stmt
), id
);
1459 copy
= gimple_build_catch (gimple_catch_types (catch_stmt
), s1
);
1463 case GIMPLE_EH_FILTER
:
1464 s1
= remap_gimple_seq (gimple_eh_filter_failure (stmt
), id
);
1465 copy
= gimple_build_eh_filter (gimple_eh_filter_types (stmt
), s1
);
1469 s1
= remap_gimple_seq (gimple_try_eval (stmt
), id
);
1470 s2
= remap_gimple_seq (gimple_try_cleanup (stmt
), id
);
1471 copy
= gimple_build_try (s1
, s2
, gimple_try_kind (stmt
));
1474 case GIMPLE_WITH_CLEANUP_EXPR
:
1475 s1
= remap_gimple_seq (gimple_wce_cleanup (stmt
), id
);
1476 copy
= gimple_build_wce (s1
);
1479 case GIMPLE_OMP_PARALLEL
:
1481 gomp_parallel
*omp_par_stmt
= as_a
<gomp_parallel
*> (stmt
);
1482 s1
= remap_gimple_seq (gimple_omp_body (omp_par_stmt
), id
);
1483 copy
= gimple_build_omp_parallel
1485 gimple_omp_parallel_clauses (omp_par_stmt
),
1486 gimple_omp_parallel_child_fn (omp_par_stmt
),
1487 gimple_omp_parallel_data_arg (omp_par_stmt
));
1491 case GIMPLE_OMP_TASK
:
1492 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1493 copy
= gimple_build_omp_task
1495 gimple_omp_task_clauses (stmt
),
1496 gimple_omp_task_child_fn (stmt
),
1497 gimple_omp_task_data_arg (stmt
),
1498 gimple_omp_task_copy_fn (stmt
),
1499 gimple_omp_task_arg_size (stmt
),
1500 gimple_omp_task_arg_align (stmt
));
1503 case GIMPLE_OMP_FOR
:
1504 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1505 s2
= remap_gimple_seq (gimple_omp_for_pre_body (stmt
), id
);
1506 copy
= gimple_build_omp_for (s1
, gimple_omp_for_kind (stmt
),
1507 gimple_omp_for_clauses (stmt
),
1508 gimple_omp_for_collapse (stmt
), s2
);
1511 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1513 gimple_omp_for_set_index (copy
, i
,
1514 gimple_omp_for_index (stmt
, i
));
1515 gimple_omp_for_set_initial (copy
, i
,
1516 gimple_omp_for_initial (stmt
, i
));
1517 gimple_omp_for_set_final (copy
, i
,
1518 gimple_omp_for_final (stmt
, i
));
1519 gimple_omp_for_set_incr (copy
, i
,
1520 gimple_omp_for_incr (stmt
, i
));
1521 gimple_omp_for_set_cond (copy
, i
,
1522 gimple_omp_for_cond (stmt
, i
));
1527 case GIMPLE_OMP_MASTER
:
1528 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1529 copy
= gimple_build_omp_master (s1
);
1532 case GIMPLE_OMP_TASKGROUP
:
1533 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1534 copy
= gimple_build_omp_taskgroup
1535 (s1
, gimple_omp_taskgroup_clauses (stmt
));
1538 case GIMPLE_OMP_ORDERED
:
1539 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1540 copy
= gimple_build_omp_ordered
1542 gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
)));
1545 case GIMPLE_OMP_SECTION
:
1546 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1547 copy
= gimple_build_omp_section (s1
);
1550 case GIMPLE_OMP_SECTIONS
:
1551 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1552 copy
= gimple_build_omp_sections
1553 (s1
, gimple_omp_sections_clauses (stmt
));
1556 case GIMPLE_OMP_SINGLE
:
1557 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1558 copy
= gimple_build_omp_single
1559 (s1
, gimple_omp_single_clauses (stmt
));
1562 case GIMPLE_OMP_TARGET
:
1563 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1564 copy
= gimple_build_omp_target
1565 (s1
, gimple_omp_target_kind (stmt
),
1566 gimple_omp_target_clauses (stmt
));
1569 case GIMPLE_OMP_TEAMS
:
1570 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1571 copy
= gimple_build_omp_teams
1572 (s1
, gimple_omp_teams_clauses (stmt
));
1575 case GIMPLE_OMP_CRITICAL
:
1576 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1577 copy
= gimple_build_omp_critical (s1
,
1578 gimple_omp_critical_name
1579 (as_a
<gomp_critical
*> (stmt
)),
1580 gimple_omp_critical_clauses
1581 (as_a
<gomp_critical
*> (stmt
)));
1584 case GIMPLE_TRANSACTION
:
1586 gtransaction
*old_trans_stmt
= as_a
<gtransaction
*> (stmt
);
1587 gtransaction
*new_trans_stmt
;
1588 s1
= remap_gimple_seq (gimple_transaction_body (old_trans_stmt
),
1590 copy
= new_trans_stmt
= gimple_build_transaction (s1
);
1591 gimple_transaction_set_subcode (new_trans_stmt
,
1592 gimple_transaction_subcode (old_trans_stmt
));
1593 gimple_transaction_set_label_norm (new_trans_stmt
,
1594 gimple_transaction_label_norm (old_trans_stmt
));
1595 gimple_transaction_set_label_uninst (new_trans_stmt
,
1596 gimple_transaction_label_uninst (old_trans_stmt
));
1597 gimple_transaction_set_label_over (new_trans_stmt
,
1598 gimple_transaction_label_over (old_trans_stmt
));
1608 if (gimple_assign_copy_p (stmt
)
1609 && gimple_assign_lhs (stmt
) == gimple_assign_rhs1 (stmt
)
1610 && auto_var_in_fn_p (gimple_assign_lhs (stmt
), id
->src_fn
))
1612 /* Here we handle statements that are not completely rewritten.
1613 First we detect some inlining-induced bogosities for
1616 /* Some assignments VAR = VAR; don't generate any rtl code
1617 and thus don't count as variable modification. Avoid
1618 keeping bogosities like 0 = 0. */
1619 tree decl
= gimple_assign_lhs (stmt
), value
;
1622 n
= id
->decl_map
->get (decl
);
1626 STRIP_TYPE_NOPS (value
);
1627 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1632 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1633 in a block that we aren't copying during tree_function_versioning,
1634 just drop the clobber stmt. */
1635 if (id
->blocks_to_copy
&& gimple_clobber_p (stmt
))
1637 tree lhs
= gimple_assign_lhs (stmt
);
1638 if (TREE_CODE (lhs
) == MEM_REF
1639 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
1641 gimple
*def_stmt
= SSA_NAME_DEF_STMT (TREE_OPERAND (lhs
, 0));
1642 if (gimple_bb (def_stmt
)
1643 && !bitmap_bit_p (id
->blocks_to_copy
,
1644 gimple_bb (def_stmt
)->index
))
1649 if (gimple_debug_bind_p (stmt
))
1652 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt
),
1653 gimple_debug_bind_get_value (stmt
),
1655 if (id
->reset_location
)
1656 gimple_set_location (copy
, input_location
);
1657 id
->debug_stmts
.safe_push (copy
);
1658 gimple_seq_add_stmt (&stmts
, copy
);
1661 if (gimple_debug_source_bind_p (stmt
))
1663 gdebug
*copy
= gimple_build_debug_source_bind
1664 (gimple_debug_source_bind_get_var (stmt
),
1665 gimple_debug_source_bind_get_value (stmt
),
1667 if (id
->reset_location
)
1668 gimple_set_location (copy
, input_location
);
1669 id
->debug_stmts
.safe_push (copy
);
1670 gimple_seq_add_stmt (&stmts
, copy
);
1673 if (gimple_debug_nonbind_marker_p (stmt
))
1675 /* If the inlined function has too many debug markers,
1677 if (id
->src_cfun
->debug_marker_count
1678 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT
))
1681 gdebug
*copy
= as_a
<gdebug
*> (gimple_copy (stmt
));
1682 if (id
->reset_location
)
1683 gimple_set_location (copy
, input_location
);
1684 id
->debug_stmts
.safe_push (copy
);
1685 gimple_seq_add_stmt (&stmts
, copy
);
1689 /* Create a new deep copy of the statement. */
1690 copy
= gimple_copy (stmt
);
1692 /* Clear flags that need revisiting. */
1693 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (copy
))
1695 if (gimple_call_tail_p (call_stmt
))
1696 gimple_call_set_tail (call_stmt
, false);
1697 if (gimple_call_from_thunk_p (call_stmt
))
1698 gimple_call_set_from_thunk (call_stmt
, false);
1699 if (gimple_call_internal_p (call_stmt
))
1700 switch (gimple_call_internal_fn (call_stmt
))
1702 case IFN_GOMP_SIMD_LANE
:
1703 case IFN_GOMP_SIMD_VF
:
1704 case IFN_GOMP_SIMD_LAST_LANE
:
1705 case IFN_GOMP_SIMD_ORDERED_START
:
1706 case IFN_GOMP_SIMD_ORDERED_END
:
1707 DECL_STRUCT_FUNCTION (id
->dst_fn
)->has_simduid_loops
= true;
1714 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1715 RESX and EH_DISPATCH. */
1717 switch (gimple_code (copy
))
1721 tree r
, fndecl
= gimple_call_fndecl (copy
);
1722 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
1723 switch (DECL_FUNCTION_CODE (fndecl
))
1725 case BUILT_IN_EH_COPY_VALUES
:
1726 r
= gimple_call_arg (copy
, 1);
1727 r
= remap_eh_region_tree_nr (r
, id
);
1728 gimple_call_set_arg (copy
, 1, r
);
1731 case BUILT_IN_EH_POINTER
:
1732 case BUILT_IN_EH_FILTER
:
1733 r
= gimple_call_arg (copy
, 0);
1734 r
= remap_eh_region_tree_nr (r
, id
);
1735 gimple_call_set_arg (copy
, 0, r
);
1742 /* Reset alias info if we didn't apply measures to
1743 keep it valid over inlining by setting DECL_PT_UID. */
1744 if (!id
->src_cfun
->gimple_df
1745 || !id
->src_cfun
->gimple_df
->ipa_pta
)
1746 gimple_call_reset_alias_info (as_a
<gcall
*> (copy
));
1752 gresx
*resx_stmt
= as_a
<gresx
*> (copy
);
1753 int r
= gimple_resx_region (resx_stmt
);
1754 r
= remap_eh_region_nr (r
, id
);
1755 gimple_resx_set_region (resx_stmt
, r
);
1759 case GIMPLE_EH_DISPATCH
:
1761 geh_dispatch
*eh_dispatch
= as_a
<geh_dispatch
*> (copy
);
1762 int r
= gimple_eh_dispatch_region (eh_dispatch
);
1763 r
= remap_eh_region_nr (r
, id
);
1764 gimple_eh_dispatch_set_region (eh_dispatch
, r
);
1773 /* If STMT has a block defined, map it to the newly constructed block. */
1774 if (gimple_block (copy
))
1777 n
= id
->decl_map
->get (gimple_block (copy
));
1779 gimple_set_block (copy
, *n
);
1782 if (id
->reset_location
)
1783 gimple_set_location (copy
, input_location
);
1785 /* Debug statements ought to be rebuilt and not copied. */
1786 gcc_checking_assert (!is_gimple_debug (copy
));
1788 /* Remap all the operands in COPY. */
1789 memset (&wi
, 0, sizeof (wi
));
1792 walk_tree (gimple_op_ptr (copy
, 1), remap_gimple_op_r
, &wi
, NULL
);
1794 walk_gimple_op (copy
, remap_gimple_op_r
, &wi
);
1796 /* Clear the copied virtual operands. We are not remapping them here
1797 but are going to recreate them from scratch. */
1798 if (gimple_has_mem_ops (copy
))
1800 gimple_set_vdef (copy
, NULL_TREE
);
1801 gimple_set_vuse (copy
, NULL_TREE
);
1804 gimple_seq_add_stmt (&stmts
, copy
);
1809 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1813 copy_bb (copy_body_data
*id
, basic_block bb
,
1814 profile_count num
, profile_count den
)
1816 gimple_stmt_iterator gsi
, copy_gsi
, seq_gsi
;
1817 basic_block copy_basic_block
;
1821 profile_count::adjust_for_ipa_scaling (&num
, &den
);
1823 /* Search for previous copied basic block. */
1826 prev
= prev
->prev_bb
;
1828 /* create_basic_block() will append every new block to
1829 basic_block_info automatically. */
1830 copy_basic_block
= create_basic_block (NULL
, (basic_block
) prev
->aux
);
1831 copy_basic_block
->count
= bb
->count
.apply_scale (num
, den
);
1833 copy_gsi
= gsi_start_bb (copy_basic_block
);
1835 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1838 gimple
*stmt
= gsi_stmt (gsi
);
1839 gimple
*orig_stmt
= stmt
;
1840 gimple_stmt_iterator stmts_gsi
;
1841 bool stmt_added
= false;
1843 id
->regimplify
= false;
1844 stmts
= remap_gimple_stmt (stmt
, id
);
1846 if (gimple_seq_empty_p (stmts
))
1851 for (stmts_gsi
= gsi_start (stmts
);
1852 !gsi_end_p (stmts_gsi
); )
1854 stmt
= gsi_stmt (stmts_gsi
);
1856 /* Advance iterator now before stmt is moved to seq_gsi. */
1857 gsi_next (&stmts_gsi
);
1859 if (gimple_nop_p (stmt
))
1862 gimple_duplicate_stmt_histograms (cfun
, stmt
, id
->src_cfun
,
1865 /* With return slot optimization we can end up with
1866 non-gimple (foo *)&this->m, fix that here. */
1867 if (is_gimple_assign (stmt
)
1868 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
1869 && !is_gimple_val (gimple_assign_rhs1 (stmt
)))
1872 new_rhs
= force_gimple_operand_gsi (&seq_gsi
,
1873 gimple_assign_rhs1 (stmt
),
1875 GSI_CONTINUE_LINKING
);
1876 gimple_assign_set_rhs1 (stmt
, new_rhs
);
1877 id
->regimplify
= false;
1880 gsi_insert_after (&seq_gsi
, stmt
, GSI_NEW_STMT
);
1883 gimple_regimplify_operands (stmt
, &seq_gsi
);
1891 /* If copy_basic_block has been empty at the start of this iteration,
1892 call gsi_start_bb again to get at the newly added statements. */
1893 if (gsi_end_p (copy_gsi
))
1894 copy_gsi
= gsi_start_bb (copy_basic_block
);
1896 gsi_next (©_gsi
);
1898 /* Process the new statement. The call to gimple_regimplify_operands
1899 possibly turned the statement into multiple statements, we
1900 need to process all of them. */
1906 stmt
= gsi_stmt (copy_gsi
);
1907 call_stmt
= dyn_cast
<gcall
*> (stmt
);
1909 && gimple_call_va_arg_pack_p (call_stmt
)
1911 && ! gimple_call_va_arg_pack_p (id
->call_stmt
))
1913 /* __builtin_va_arg_pack () should be replaced by
1914 all arguments corresponding to ... in the caller. */
1918 size_t nargs
= gimple_call_num_args (id
->call_stmt
);
1921 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1924 /* Create the new array of arguments. */
1925 n
= nargs
+ gimple_call_num_args (call_stmt
);
1926 argarray
.create (n
);
1927 argarray
.safe_grow_cleared (n
);
1929 /* Copy all the arguments before '...' */
1930 memcpy (argarray
.address (),
1931 gimple_call_arg_ptr (call_stmt
, 0),
1932 gimple_call_num_args (call_stmt
) * sizeof (tree
));
1934 /* Append the arguments passed in '...' */
1935 memcpy (argarray
.address () + gimple_call_num_args (call_stmt
),
1936 gimple_call_arg_ptr (id
->call_stmt
, 0)
1937 + (gimple_call_num_args (id
->call_stmt
) - nargs
),
1938 nargs
* sizeof (tree
));
1940 new_call
= gimple_build_call_vec (gimple_call_fn (call_stmt
),
1943 argarray
.release ();
1945 /* Copy all GIMPLE_CALL flags, location and block, except
1946 GF_CALL_VA_ARG_PACK. */
1947 gimple_call_copy_flags (new_call
, call_stmt
);
1948 gimple_call_set_va_arg_pack (new_call
, false);
1949 gimple_set_location (new_call
, gimple_location (stmt
));
1950 gimple_set_block (new_call
, gimple_block (stmt
));
1951 gimple_call_set_lhs (new_call
, gimple_call_lhs (call_stmt
));
1953 gsi_replace (©_gsi
, new_call
, false);
1958 && (decl
= gimple_call_fndecl (stmt
))
1959 && fndecl_built_in_p (decl
, BUILT_IN_VA_ARG_PACK_LEN
))
1961 /* __builtin_va_arg_pack_len () should be replaced by
1962 the number of anonymous arguments. */
1963 size_t nargs
= gimple_call_num_args (id
->call_stmt
);
1967 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1970 if (!gimple_call_lhs (stmt
))
1972 /* Drop unused calls. */
1973 gsi_remove (©_gsi
, false);
1976 else if (!gimple_call_va_arg_pack_p (id
->call_stmt
))
1978 count
= build_int_cst (integer_type_node
, nargs
);
1979 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
), count
);
1980 gsi_replace (©_gsi
, new_stmt
, false);
1983 else if (nargs
!= 0)
1985 tree newlhs
= create_tmp_reg_or_ssa_name (integer_type_node
);
1986 count
= build_int_cst (integer_type_node
, nargs
);
1987 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1988 PLUS_EXPR
, newlhs
, count
);
1989 gimple_call_set_lhs (stmt
, newlhs
);
1990 gsi_insert_after (©_gsi
, new_stmt
, GSI_NEW_STMT
);
1995 && gimple_call_internal_p (stmt
)
1996 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
1998 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1999 gsi_remove (©_gsi
, false);
2003 /* Statements produced by inlining can be unfolded, especially
2004 when we constant propagated some operands. We can't fold
2005 them right now for two reasons:
2006 1) folding require SSA_NAME_DEF_STMTs to be correct
2007 2) we can't change function calls to builtins.
2008 So we just mark statement for later folding. We mark
2009 all new statements, instead just statements that has changed
2010 by some nontrivial substitution so even statements made
2011 foldable indirectly are updated. If this turns out to be
2012 expensive, copy_body can be told to watch for nontrivial
2014 if (id
->statements_to_fold
)
2015 id
->statements_to_fold
->add (stmt
);
2017 /* We're duplicating a CALL_EXPR. Find any corresponding
2018 callgraph edges and update or duplicate them. */
2019 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
2021 struct cgraph_edge
*edge
;
2023 switch (id
->transform_call_graph_edges
)
2025 case CB_CGE_DUPLICATE
:
2026 edge
= id
->src_node
->get_edge (orig_stmt
);
2029 struct cgraph_edge
*old_edge
= edge
;
2030 profile_count old_cnt
= edge
->count
;
2031 edge
= edge
->clone (id
->dst_node
, call_stmt
,
2036 /* Speculative calls consist of two edges - direct and
2037 indirect. Duplicate the whole thing and distribute
2038 frequencies accordingly. */
2039 if (edge
->speculative
)
2041 struct cgraph_edge
*direct
, *indirect
;
2042 struct ipa_ref
*ref
;
2044 gcc_assert (!edge
->indirect_unknown_callee
);
2045 old_edge
->speculative_call_info (direct
, indirect
, ref
);
2047 profile_count indir_cnt
= indirect
->count
;
2048 indirect
= indirect
->clone (id
->dst_node
, call_stmt
,
2053 profile_probability prob
2054 = indir_cnt
.probability_in (old_cnt
+ indir_cnt
);
2056 = copy_basic_block
->count
.apply_probability (prob
);
2057 edge
->count
= copy_basic_block
->count
- indirect
->count
;
2058 id
->dst_node
->clone_reference (ref
, stmt
);
2061 edge
->count
= copy_basic_block
->count
;
2065 case CB_CGE_MOVE_CLONES
:
2066 id
->dst_node
->set_call_stmt_including_clones (orig_stmt
,
2068 edge
= id
->dst_node
->get_edge (stmt
);
2072 edge
= id
->dst_node
->get_edge (orig_stmt
);
2074 edge
->set_call_stmt (call_stmt
);
2081 /* Constant propagation on argument done during inlining
2082 may create new direct call. Produce an edge for it. */
2084 || (edge
->indirect_inlining_edge
2085 && id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
))
2086 && id
->dst_node
->definition
2087 && (fn
= gimple_call_fndecl (stmt
)) != NULL
)
2089 struct cgraph_node
*dest
= cgraph_node::get_create (fn
);
2091 /* We have missing edge in the callgraph. This can happen
2092 when previous inlining turned an indirect call into a
2093 direct call by constant propagating arguments or we are
2094 producing dead clone (for further cloning). In all
2095 other cases we hit a bug (incorrect node sharing is the
2096 most common reason for missing edges). */
2097 gcc_assert (!dest
->definition
2098 || dest
->address_taken
2099 || !id
->src_node
->definition
2100 || !id
->dst_node
->definition
);
2101 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
)
2102 id
->dst_node
->create_edge_including_clones
2103 (dest
, orig_stmt
, call_stmt
, bb
->count
,
2104 CIF_ORIGINALLY_INDIRECT_CALL
);
2106 id
->dst_node
->create_edge (dest
, call_stmt
,
2107 bb
->count
)->inline_failed
2108 = CIF_ORIGINALLY_INDIRECT_CALL
;
2111 fprintf (dump_file
, "Created new direct edge to %s\n",
2116 notice_special_calls (as_a
<gcall
*> (stmt
));
2119 maybe_duplicate_eh_stmt_fn (cfun
, stmt
, id
->src_cfun
, orig_stmt
,
2120 id
->eh_map
, id
->eh_lp_nr
);
2122 gsi_next (©_gsi
);
2124 while (!gsi_end_p (copy_gsi
));
2126 copy_gsi
= gsi_last_bb (copy_basic_block
);
2129 return copy_basic_block
;
2132 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2133 form is quite easy, since dominator relationship for old basic blocks does
2136 There is however exception where inlining might change dominator relation
2137 across EH edges from basic block within inlined functions destinating
2138 to landing pads in function we inline into.
2140 The function fills in PHI_RESULTs of such PHI nodes if they refer
2141 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2142 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2143 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2144 set, and this means that there will be no overlapping live ranges
2145 for the underlying symbol.
2147 This might change in future if we allow redirecting of EH edges and
2148 we might want to change way build CFG pre-inlining to include
2149 all the possible edges then. */
2151 update_ssa_across_abnormal_edges (basic_block bb
, basic_block ret_bb
,
2152 bool can_throw
, bool nonlocal_goto
)
2157 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2159 || ((basic_block
)e
->dest
->aux
)->index
== ENTRY_BLOCK
)
2165 gcc_assert (e
->flags
& EDGE_EH
);
2168 gcc_assert (!(e
->flags
& EDGE_EH
));
2170 for (si
= gsi_start_phis (e
->dest
); !gsi_end_p (si
); gsi_next (&si
))
2176 /* For abnormal goto/call edges the receiver can be the
2177 ENTRY_BLOCK. Do not assert this cannot happen. */
2179 gcc_assert ((e
->flags
& EDGE_EH
)
2180 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)));
2182 re
= find_edge (ret_bb
, e
->dest
);
2183 gcc_checking_assert (re
);
2184 gcc_assert ((re
->flags
& (EDGE_EH
| EDGE_ABNORMAL
))
2185 == (e
->flags
& (EDGE_EH
| EDGE_ABNORMAL
)));
2187 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
2188 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, re
)));
2193 /* Insert clobbers for automatic variables of inlined ID->src_fn
2194 function at the start of basic block BB. */
2197 add_clobbers_to_eh_landing_pad (basic_block bb
, copy_body_data
*id
)
2201 FOR_EACH_VEC_SAFE_ELT (id
->src_cfun
->local_decls
, i
, var
)
2203 && !DECL_HARD_REGISTER (var
)
2204 && !TREE_THIS_VOLATILE (var
)
2205 && !DECL_HAS_VALUE_EXPR_P (var
)
2206 && !is_gimple_reg (var
)
2207 && auto_var_in_fn_p (var
, id
->src_fn
))
2209 tree
*t
= id
->decl_map
->get (var
);
2214 && !DECL_HARD_REGISTER (new_var
)
2215 && !TREE_THIS_VOLATILE (new_var
)
2216 && !DECL_HAS_VALUE_EXPR_P (new_var
)
2217 && !is_gimple_reg (new_var
)
2218 && auto_var_in_fn_p (new_var
, id
->dst_fn
))
2220 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
2221 tree clobber
= build_clobber (TREE_TYPE (new_var
));
2222 gimple
*clobber_stmt
= gimple_build_assign (new_var
, clobber
);
2223 gsi_insert_before (&gsi
, clobber_stmt
, GSI_NEW_STMT
);
2228 /* Copy edges from BB into its copy constructed earlier, scale profile
2229 accordingly. Edges will be taken care of later. Assume aux
2230 pointers to point to the copies of each BB. Return true if any
2231 debug stmts are left after a statement that must end the basic block. */
2234 copy_edges_for_bb (basic_block bb
, profile_count num
, profile_count den
,
2235 basic_block ret_bb
, basic_block abnormal_goto_dest
,
2238 basic_block new_bb
= (basic_block
) bb
->aux
;
2241 gimple_stmt_iterator si
;
2242 bool need_debug_cleanup
= false;
2244 /* Use the indices from the original blocks to create edges for the
2246 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2247 if (!(old_edge
->flags
& EDGE_EH
))
2250 int flags
= old_edge
->flags
;
2251 location_t locus
= old_edge
->goto_locus
;
2253 /* Return edges do get a FALLTHRU flag when they get inlined. */
2254 if (old_edge
->dest
->index
== EXIT_BLOCK
2255 && !(flags
& (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
|EDGE_FAKE
))
2256 && old_edge
->dest
->aux
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2257 flags
|= EDGE_FALLTHRU
;
2260 = make_edge (new_bb
, (basic_block
) old_edge
->dest
->aux
, flags
);
2261 new_edge
->probability
= old_edge
->probability
;
2262 if (!id
->reset_location
)
2263 new_edge
->goto_locus
= remap_location (locus
, id
);
2266 if (bb
->index
== ENTRY_BLOCK
|| bb
->index
== EXIT_BLOCK
)
2269 /* When doing function splitting, we must decrease count of the return block
2270 which was previously reachable by block we did not copy. */
2271 if (single_succ_p (bb
) && single_succ_edge (bb
)->dest
->index
== EXIT_BLOCK
)
2272 FOR_EACH_EDGE (old_edge
, ei
, bb
->preds
)
2273 if (old_edge
->src
->index
!= ENTRY_BLOCK
2274 && !old_edge
->src
->aux
)
2275 new_bb
->count
-= old_edge
->count ().apply_scale (num
, den
);
2277 for (si
= gsi_start_bb (new_bb
); !gsi_end_p (si
);)
2280 bool can_throw
, nonlocal_goto
;
2282 copy_stmt
= gsi_stmt (si
);
2283 if (!is_gimple_debug (copy_stmt
))
2284 update_stmt (copy_stmt
);
2286 /* Do this before the possible split_block. */
2289 /* If this tree could throw an exception, there are two
2290 cases where we need to add abnormal edge(s): the
2291 tree wasn't in a region and there is a "current
2292 region" in the caller; or the original tree had
2293 EH edges. In both cases split the block after the tree,
2294 and add abnormal edge(s) as needed; we need both
2295 those from the callee and the caller.
2296 We check whether the copy can throw, because the const
2297 propagation can change an INDIRECT_REF which throws
2298 into a COMPONENT_REF which doesn't. If the copy
2299 can throw, the original could also throw. */
2300 can_throw
= stmt_can_throw_internal (cfun
, copy_stmt
);
2302 = (stmt_can_make_abnormal_goto (copy_stmt
)
2303 && !computed_goto_p (copy_stmt
));
2305 if (can_throw
|| nonlocal_goto
)
2307 if (!gsi_end_p (si
))
2309 while (!gsi_end_p (si
) && is_gimple_debug (gsi_stmt (si
)))
2312 need_debug_cleanup
= true;
2314 if (!gsi_end_p (si
))
2315 /* Note that bb's predecessor edges aren't necessarily
2316 right at this point; split_block doesn't care. */
2318 edge e
= split_block (new_bb
, copy_stmt
);
2321 new_bb
->aux
= e
->src
->aux
;
2322 si
= gsi_start_bb (new_bb
);
2326 bool update_probs
= false;
2328 if (gimple_code (copy_stmt
) == GIMPLE_EH_DISPATCH
)
2330 make_eh_dispatch_edges (as_a
<geh_dispatch
*> (copy_stmt
));
2331 update_probs
= true;
2335 make_eh_edges (copy_stmt
);
2336 update_probs
= true;
2339 /* EH edges may not match old edges. Copy as much as possible. */
2344 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2346 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2347 if ((old_edge
->flags
& EDGE_EH
)
2348 && (e
= find_edge (copy_stmt_bb
,
2349 (basic_block
) old_edge
->dest
->aux
))
2350 && (e
->flags
& EDGE_EH
))
2351 e
->probability
= old_edge
->probability
;
2353 FOR_EACH_EDGE (e
, ei
, copy_stmt_bb
->succs
)
2354 if (e
->flags
& EDGE_EH
)
2356 if (!e
->probability
.initialized_p ())
2357 e
->probability
= profile_probability::never ();
2358 if (e
->dest
->index
< id
->add_clobbers_to_eh_landing_pads
)
2360 add_clobbers_to_eh_landing_pad (e
->dest
, id
);
2361 id
->add_clobbers_to_eh_landing_pads
= 0;
2367 /* If the call we inline cannot make abnormal goto do not add
2368 additional abnormal edges but only retain those already present
2369 in the original function body. */
2370 if (abnormal_goto_dest
== NULL
)
2371 nonlocal_goto
= false;
2374 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2376 if (get_abnormal_succ_dispatcher (copy_stmt_bb
))
2377 nonlocal_goto
= false;
2378 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2379 in OpenMP regions which aren't allowed to be left abnormally.
2380 So, no need to add abnormal edge in that case. */
2381 else if (is_gimple_call (copy_stmt
)
2382 && gimple_call_internal_p (copy_stmt
)
2383 && (gimple_call_internal_fn (copy_stmt
)
2384 == IFN_ABNORMAL_DISPATCHER
)
2385 && gimple_call_arg (copy_stmt
, 0) == boolean_true_node
)
2386 nonlocal_goto
= false;
2388 make_single_succ_edge (copy_stmt_bb
, abnormal_goto_dest
,
2392 if ((can_throw
|| nonlocal_goto
)
2393 && gimple_in_ssa_p (cfun
))
2394 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt
), ret_bb
,
2395 can_throw
, nonlocal_goto
);
2397 return need_debug_cleanup
;
2400 /* Copy the PHIs. All blocks and edges are copied, some blocks
2401 was possibly split and new outgoing EH edges inserted.
2402 BB points to the block of original function and AUX pointers links
2403 the original and newly copied blocks. */
2406 copy_phis_for_bb (basic_block bb
, copy_body_data
*id
)
2408 basic_block
const new_bb
= (basic_block
) bb
->aux
;
2413 bool inserted
= false;
2415 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
2421 res
= PHI_RESULT (phi
);
2423 if (!virtual_operand_p (res
))
2425 walk_tree (&new_res
, copy_tree_body_r
, id
, NULL
);
2426 if (EDGE_COUNT (new_bb
->preds
) == 0)
2428 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2429 SSA_NAME_DEF_STMT (new_res
) = gimple_build_nop ();
2433 new_phi
= create_phi_node (new_res
, new_bb
);
2434 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2436 edge old_edge
= find_edge ((basic_block
) new_edge
->src
->aux
,
2443 /* When doing partial cloning, we allow PHIs on the entry
2444 block as long as all the arguments are the same.
2445 Find any input edge to see argument to copy. */
2447 FOR_EACH_EDGE (old_edge
, ei2
, bb
->preds
)
2448 if (!old_edge
->src
->aux
)
2451 arg
= PHI_ARG_DEF_FROM_EDGE (phi
, old_edge
);
2453 walk_tree (&new_arg
, copy_tree_body_r
, id
, NULL
);
2454 gcc_assert (new_arg
);
2455 /* With return slot optimization we can end up with
2456 non-gimple (foo *)&this->m, fix that here. */
2457 if (TREE_CODE (new_arg
) != SSA_NAME
2458 && TREE_CODE (new_arg
) != FUNCTION_DECL
2459 && !is_gimple_val (new_arg
))
2461 gimple_seq stmts
= NULL
;
2462 new_arg
= force_gimple_operand (new_arg
, &stmts
, true,
2464 gsi_insert_seq_on_edge (new_edge
, stmts
);
2467 locus
= gimple_phi_arg_location_from_edge (phi
, old_edge
);
2468 if (id
->reset_location
)
2469 locus
= input_location
;
2471 locus
= remap_location (locus
, id
);
2472 add_phi_arg (new_phi
, new_arg
, new_edge
, locus
);
2478 /* Commit the delayed edge insertions. */
2480 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2481 gsi_commit_one_edge_insert (new_edge
, NULL
);
2485 /* Wrapper for remap_decl so it can be used as a callback. */
2488 remap_decl_1 (tree decl
, void *data
)
2490 return remap_decl (decl
, (copy_body_data
*) data
);
2493 /* Build struct function and associated datastructures for the new clone
2494 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2495 the cfun to the function of new_fndecl (and current_function_decl too). */
2498 initialize_cfun (tree new_fndecl
, tree callee_fndecl
, profile_count count
)
2500 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2502 if (!DECL_ARGUMENTS (new_fndecl
))
2503 DECL_ARGUMENTS (new_fndecl
) = DECL_ARGUMENTS (callee_fndecl
);
2504 if (!DECL_RESULT (new_fndecl
))
2505 DECL_RESULT (new_fndecl
) = DECL_RESULT (callee_fndecl
);
2507 /* Register specific tree functions. */
2508 gimple_register_cfg_hooks ();
2510 /* Get clean struct function. */
2511 push_struct_function (new_fndecl
);
2513 /* We will rebuild these, so just sanity check that they are empty. */
2514 gcc_assert (VALUE_HISTOGRAMS (cfun
) == NULL
);
2515 gcc_assert (cfun
->local_decls
== NULL
);
2516 gcc_assert (cfun
->cfg
== NULL
);
2517 gcc_assert (cfun
->decl
== new_fndecl
);
2519 /* Copy items we preserve during cloning. */
2520 cfun
->static_chain_decl
= src_cfun
->static_chain_decl
;
2521 cfun
->nonlocal_goto_save_area
= src_cfun
->nonlocal_goto_save_area
;
2522 cfun
->function_end_locus
= src_cfun
->function_end_locus
;
2523 cfun
->curr_properties
= src_cfun
->curr_properties
;
2524 cfun
->last_verified
= src_cfun
->last_verified
;
2525 cfun
->va_list_gpr_size
= src_cfun
->va_list_gpr_size
;
2526 cfun
->va_list_fpr_size
= src_cfun
->va_list_fpr_size
;
2527 cfun
->has_nonlocal_label
= src_cfun
->has_nonlocal_label
;
2528 cfun
->stdarg
= src_cfun
->stdarg
;
2529 cfun
->after_inlining
= src_cfun
->after_inlining
;
2530 cfun
->can_throw_non_call_exceptions
2531 = src_cfun
->can_throw_non_call_exceptions
;
2532 cfun
->can_delete_dead_exceptions
= src_cfun
->can_delete_dead_exceptions
;
2533 cfun
->returns_struct
= src_cfun
->returns_struct
;
2534 cfun
->returns_pcc_struct
= src_cfun
->returns_pcc_struct
;
2536 init_empty_tree_cfg ();
2538 profile_status_for_fn (cfun
) = profile_status_for_fn (src_cfun
);
2540 profile_count num
= count
;
2541 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
;
2542 profile_count::adjust_for_ipa_scaling (&num
, &den
);
2544 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
2545 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2546 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2547 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
2548 EXIT_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2549 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2551 init_eh_for_function ();
2553 if (src_cfun
->gimple_df
)
2555 init_tree_ssa (cfun
);
2556 cfun
->gimple_df
->in_ssa_p
= src_cfun
->gimple_df
->in_ssa_p
;
2557 if (cfun
->gimple_df
->in_ssa_p
)
2558 init_ssa_operands (cfun
);
2562 /* Helper function for copy_cfg_body. Move debug stmts from the end
2563 of NEW_BB to the beginning of successor basic blocks when needed. If the
2564 successor has multiple predecessors, reset them, otherwise keep
2568 maybe_move_debug_stmts_to_successors (copy_body_data
*id
, basic_block new_bb
)
2572 gimple_stmt_iterator si
= gsi_last_nondebug_bb (new_bb
);
2575 || gsi_one_before_end_p (si
)
2576 || !(stmt_can_throw_internal (cfun
, gsi_stmt (si
))
2577 || stmt_can_make_abnormal_goto (gsi_stmt (si
))))
2580 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
2582 gimple_stmt_iterator ssi
= gsi_last_bb (new_bb
);
2583 gimple_stmt_iterator dsi
= gsi_after_labels (e
->dest
);
2584 while (is_gimple_debug (gsi_stmt (ssi
)))
2586 gimple
*stmt
= gsi_stmt (ssi
);
2591 /* For the last edge move the debug stmts instead of copying
2593 if (ei_one_before_end_p (ei
))
2597 if (!single_pred_p (e
->dest
) && gimple_debug_bind_p (stmt
))
2599 gimple_debug_bind_reset_value (stmt
);
2600 gimple_set_location (stmt
, UNKNOWN_LOCATION
);
2602 gsi_remove (&si
, false);
2603 gsi_insert_before (&dsi
, stmt
, GSI_SAME_STMT
);
2607 if (gimple_debug_bind_p (stmt
))
2609 var
= gimple_debug_bind_get_var (stmt
);
2610 if (single_pred_p (e
->dest
))
2612 value
= gimple_debug_bind_get_value (stmt
);
2613 value
= unshare_expr (value
);
2614 new_stmt
= gimple_build_debug_bind (var
, value
, stmt
);
2617 new_stmt
= gimple_build_debug_bind (var
, NULL_TREE
, NULL
);
2619 else if (gimple_debug_source_bind_p (stmt
))
2621 var
= gimple_debug_source_bind_get_var (stmt
);
2622 value
= gimple_debug_source_bind_get_value (stmt
);
2623 new_stmt
= gimple_build_debug_source_bind (var
, value
, stmt
);
2625 else if (gimple_debug_nonbind_marker_p (stmt
))
2626 new_stmt
= as_a
<gdebug
*> (gimple_copy (stmt
));
2629 gsi_insert_before (&dsi
, new_stmt
, GSI_SAME_STMT
);
2630 id
->debug_stmts
.safe_push (new_stmt
);
2636 /* Make a copy of the sub-loops of SRC_PARENT and place them
2637 as siblings of DEST_PARENT. */
2640 copy_loops (copy_body_data
*id
,
2641 struct loop
*dest_parent
, struct loop
*src_parent
)
2643 struct loop
*src_loop
= src_parent
->inner
;
2646 if (!id
->blocks_to_copy
2647 || bitmap_bit_p (id
->blocks_to_copy
, src_loop
->header
->index
))
2649 struct loop
*dest_loop
= alloc_loop ();
2651 /* Assign the new loop its header and latch and associate
2652 those with the new loop. */
2653 dest_loop
->header
= (basic_block
)src_loop
->header
->aux
;
2654 dest_loop
->header
->loop_father
= dest_loop
;
2655 if (src_loop
->latch
!= NULL
)
2657 dest_loop
->latch
= (basic_block
)src_loop
->latch
->aux
;
2658 dest_loop
->latch
->loop_father
= dest_loop
;
2661 /* Copy loop meta-data. */
2662 copy_loop_info (src_loop
, dest_loop
);
2664 /* Finally place it into the loop array and the loop tree. */
2665 place_new_loop (cfun
, dest_loop
);
2666 flow_loop_tree_node_add (dest_parent
, dest_loop
);
2668 dest_loop
->safelen
= src_loop
->safelen
;
2669 if (src_loop
->unroll
)
2671 dest_loop
->unroll
= src_loop
->unroll
;
2672 cfun
->has_unroll
= true;
2674 dest_loop
->dont_vectorize
= src_loop
->dont_vectorize
;
2675 if (src_loop
->force_vectorize
)
2677 dest_loop
->force_vectorize
= true;
2678 cfun
->has_force_vectorize_loops
= true;
2680 if (src_loop
->simduid
)
2682 dest_loop
->simduid
= remap_decl (src_loop
->simduid
, id
);
2683 cfun
->has_simduid_loops
= true;
2687 copy_loops (id
, dest_loop
, src_loop
);
2689 src_loop
= src_loop
->next
;
2693 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2696 redirect_all_calls (copy_body_data
* id
, basic_block bb
)
2698 gimple_stmt_iterator si
;
2699 gimple
*last
= last_stmt (bb
);
2700 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
2702 gimple
*stmt
= gsi_stmt (si
);
2703 if (is_gimple_call (stmt
))
2705 struct cgraph_edge
*edge
= id
->dst_node
->get_edge (stmt
);
2708 edge
->redirect_call_stmt_to_callee ();
2709 if (stmt
== last
&& id
->call_stmt
&& maybe_clean_eh_stmt (stmt
))
2710 gimple_purge_dead_eh_edges (bb
);
2716 /* Make a copy of the body of FN so that it can be inserted inline in
2717 another function. Walks FN via CFG, returns new fndecl. */
2720 copy_cfg_body (copy_body_data
* id
,
2721 basic_block entry_block_map
, basic_block exit_block_map
,
2722 basic_block new_entry
)
2724 tree callee_fndecl
= id
->src_fn
;
2725 /* Original cfun for the callee, doesn't change. */
2726 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2727 struct function
*cfun_to_copy
;
2729 tree new_fndecl
= NULL
;
2730 bool need_debug_cleanup
= false;
2732 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
;
2733 profile_count num
= entry_block_map
->count
;
2735 cfun_to_copy
= id
->src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2737 /* Register specific tree functions. */
2738 gimple_register_cfg_hooks ();
2740 /* If we are inlining just region of the function, make sure to connect
2741 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2742 part of loop, we must compute frequency and probability of
2743 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2744 probabilities of edges incoming from nonduplicated region. */
2749 den
= profile_count::zero ();
2751 FOR_EACH_EDGE (e
, ei
, new_entry
->preds
)
2754 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= den
;
2757 profile_count::adjust_for_ipa_scaling (&num
, &den
);
2759 /* Must have a CFG here at this point. */
2760 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2761 (DECL_STRUCT_FUNCTION (callee_fndecl
)));
2764 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= entry_block_map
;
2765 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= exit_block_map
;
2766 entry_block_map
->aux
= ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2767 exit_block_map
->aux
= EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2769 /* Duplicate any exception-handling regions. */
2771 id
->eh_map
= duplicate_eh_regions (cfun_to_copy
, NULL
, id
->eh_lp_nr
,
2774 /* Use aux pointers to map the original blocks to copy. */
2775 FOR_EACH_BB_FN (bb
, cfun_to_copy
)
2776 if (!id
->blocks_to_copy
|| bitmap_bit_p (id
->blocks_to_copy
, bb
->index
))
2778 basic_block new_bb
= copy_bb (id
, bb
, num
, den
);
2781 new_bb
->loop_father
= entry_block_map
->loop_father
;
2784 last
= last_basic_block_for_fn (cfun
);
2786 /* Now that we've duplicated the blocks, duplicate their edges. */
2787 basic_block abnormal_goto_dest
= NULL
;
2789 && stmt_can_make_abnormal_goto (id
->call_stmt
))
2791 gimple_stmt_iterator gsi
= gsi_for_stmt (id
->call_stmt
);
2793 bb
= gimple_bb (id
->call_stmt
);
2795 if (gsi_end_p (gsi
))
2796 abnormal_goto_dest
= get_abnormal_succ_dispatcher (bb
);
2798 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2799 if (!id
->blocks_to_copy
2800 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2801 need_debug_cleanup
|= copy_edges_for_bb (bb
, num
, den
, exit_block_map
,
2802 abnormal_goto_dest
, id
);
2806 edge e
= make_edge (entry_block_map
, (basic_block
)new_entry
->aux
,
2808 e
->probability
= profile_probability::always ();
2811 /* Duplicate the loop tree, if available and wanted. */
2812 if (loops_for_fn (src_cfun
) != NULL
2813 && current_loops
!= NULL
)
2815 copy_loops (id
, entry_block_map
->loop_father
,
2816 get_loop (src_cfun
, 0));
2817 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2818 loops_state_set (LOOPS_NEED_FIXUP
);
2821 /* If the loop tree in the source function needed fixup, mark the
2822 destination loop tree for fixup, too. */
2823 if (loops_for_fn (src_cfun
)->state
& LOOPS_NEED_FIXUP
)
2824 loops_state_set (LOOPS_NEED_FIXUP
);
2826 if (gimple_in_ssa_p (cfun
))
2827 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2828 if (!id
->blocks_to_copy
2829 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2830 copy_phis_for_bb (bb
, id
);
2832 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2835 if (need_debug_cleanup
2836 && bb
->index
!= ENTRY_BLOCK
2837 && bb
->index
!= EXIT_BLOCK
)
2838 maybe_move_debug_stmts_to_successors (id
, (basic_block
) bb
->aux
);
2839 /* Update call edge destinations. This cannot be done before loop
2840 info is updated, because we may split basic blocks. */
2841 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
2842 && bb
->index
!= ENTRY_BLOCK
2843 && bb
->index
!= EXIT_BLOCK
)
2844 redirect_all_calls (id
, (basic_block
)bb
->aux
);
2845 ((basic_block
)bb
->aux
)->aux
= NULL
;
2849 /* Zero out AUX fields of newly created block during EH edge
2851 for (; last
< last_basic_block_for_fn (cfun
); last
++)
2853 if (need_debug_cleanup
)
2854 maybe_move_debug_stmts_to_successors (id
,
2855 BASIC_BLOCK_FOR_FN (cfun
, last
));
2856 BASIC_BLOCK_FOR_FN (cfun
, last
)->aux
= NULL
;
2857 /* Update call edge destinations. This cannot be done before loop
2858 info is updated, because we may split basic blocks. */
2859 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
)
2860 redirect_all_calls (id
, BASIC_BLOCK_FOR_FN (cfun
, last
));
2862 entry_block_map
->aux
= NULL
;
2863 exit_block_map
->aux
= NULL
;
2870 if (id
->dependence_map
)
2872 delete id
->dependence_map
;
2873 id
->dependence_map
= NULL
;
2879 /* Copy the debug STMT using ID. We deal with these statements in a
2880 special way: if any variable in their VALUE expression wasn't
2881 remapped yet, we won't remap it, because that would get decl uids
2882 out of sync, causing codegen differences between -g and -g0. If
2883 this arises, we drop the VALUE expression altogether. */
2886 copy_debug_stmt (gdebug
*stmt
, copy_body_data
*id
)
2889 struct walk_stmt_info wi
;
2891 if (gimple_block (stmt
))
2893 n
= id
->decl_map
->get (gimple_block (stmt
));
2894 gimple_set_block (stmt
, n
? *n
: id
->block
);
2897 if (gimple_debug_nonbind_marker_p (stmt
))
2900 /* Remap all the operands in COPY. */
2901 memset (&wi
, 0, sizeof (wi
));
2904 processing_debug_stmt
= 1;
2906 if (gimple_debug_source_bind_p (stmt
))
2907 t
= gimple_debug_source_bind_get_var (stmt
);
2908 else if (gimple_debug_bind_p (stmt
))
2909 t
= gimple_debug_bind_get_var (stmt
);
2913 if (TREE_CODE (t
) == PARM_DECL
&& id
->debug_map
2914 && (n
= id
->debug_map
->get (t
)))
2916 gcc_assert (VAR_P (*n
));
2919 else if (VAR_P (t
) && !is_global_var (t
) && !id
->decl_map
->get (t
))
2920 /* T is a non-localized variable. */;
2922 walk_tree (&t
, remap_gimple_op_r
, &wi
, NULL
);
2924 if (gimple_debug_bind_p (stmt
))
2926 gimple_debug_bind_set_var (stmt
, t
);
2928 if (gimple_debug_bind_has_value_p (stmt
))
2929 walk_tree (gimple_debug_bind_get_value_ptr (stmt
),
2930 remap_gimple_op_r
, &wi
, NULL
);
2932 /* Punt if any decl couldn't be remapped. */
2933 if (processing_debug_stmt
< 0)
2934 gimple_debug_bind_reset_value (stmt
);
2936 else if (gimple_debug_source_bind_p (stmt
))
2938 gimple_debug_source_bind_set_var (stmt
, t
);
2939 /* When inlining and source bind refers to one of the optimized
2940 away parameters, change the source bind into normal debug bind
2941 referring to the corresponding DEBUG_EXPR_DECL that should have
2942 been bound before the call stmt. */
2943 t
= gimple_debug_source_bind_get_value (stmt
);
2945 && TREE_CODE (t
) == PARM_DECL
2948 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (id
->src_fn
);
2950 if (debug_args
!= NULL
)
2952 for (i
= 0; i
< vec_safe_length (*debug_args
); i
+= 2)
2953 if ((**debug_args
)[i
] == DECL_ORIGIN (t
)
2954 && TREE_CODE ((**debug_args
)[i
+ 1]) == DEBUG_EXPR_DECL
)
2956 t
= (**debug_args
)[i
+ 1];
2957 stmt
->subcode
= GIMPLE_DEBUG_BIND
;
2958 gimple_debug_bind_set_value (stmt
, t
);
2963 if (gimple_debug_source_bind_p (stmt
))
2964 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt
),
2965 remap_gimple_op_r
, &wi
, NULL
);
2968 processing_debug_stmt
= 0;
2973 /* Process deferred debug stmts. In order to give values better odds
2974 of being successfully remapped, we delay the processing of debug
2975 stmts until all other stmts that might require remapping are
2979 copy_debug_stmts (copy_body_data
*id
)
2984 if (!id
->debug_stmts
.exists ())
2987 FOR_EACH_VEC_ELT (id
->debug_stmts
, i
, stmt
)
2988 copy_debug_stmt (stmt
, id
);
2990 id
->debug_stmts
.release ();
2993 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2994 another function. */
2997 copy_tree_body (copy_body_data
*id
)
2999 tree fndecl
= id
->src_fn
;
3000 tree body
= DECL_SAVED_TREE (fndecl
);
3002 walk_tree (&body
, copy_tree_body_r
, id
, NULL
);
3007 /* Make a copy of the body of FN so that it can be inserted inline in
3008 another function. */
3011 copy_body (copy_body_data
*id
,
3012 basic_block entry_block_map
, basic_block exit_block_map
,
3013 basic_block new_entry
)
3015 tree fndecl
= id
->src_fn
;
3018 /* If this body has a CFG, walk CFG and copy. */
3019 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl
)));
3020 body
= copy_cfg_body (id
, entry_block_map
, exit_block_map
,
3022 copy_debug_stmts (id
);
3027 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3028 defined in function FN, or of a data member thereof. */
3031 self_inlining_addr_expr (tree value
, tree fn
)
3035 if (TREE_CODE (value
) != ADDR_EXPR
)
3038 var
= get_base_address (TREE_OPERAND (value
, 0));
3040 return var
&& auto_var_in_fn_p (var
, fn
);
3043 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3044 lexical block and line number information from base_stmt, if given,
3045 or from the last stmt of the block otherwise. */
3048 insert_init_debug_bind (copy_body_data
*id
,
3049 basic_block bb
, tree var
, tree value
,
3053 gimple_stmt_iterator gsi
;
3056 if (!gimple_in_ssa_p (id
->src_cfun
))
3059 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
3062 tracked_var
= target_for_debug_bind (var
);
3068 gsi
= gsi_last_bb (bb
);
3069 if (!base_stmt
&& !gsi_end_p (gsi
))
3070 base_stmt
= gsi_stmt (gsi
);
3073 note
= gimple_build_debug_bind (tracked_var
, unshare_expr (value
), base_stmt
);
3077 if (!gsi_end_p (gsi
))
3078 gsi_insert_after (&gsi
, note
, GSI_SAME_STMT
);
3080 gsi_insert_before (&gsi
, note
, GSI_SAME_STMT
);
3087 insert_init_stmt (copy_body_data
*id
, basic_block bb
, gimple
*init_stmt
)
3089 /* If VAR represents a zero-sized variable, it's possible that the
3090 assignment statement may result in no gimple statements. */
3093 gimple_stmt_iterator si
= gsi_last_bb (bb
);
3095 /* We can end up with init statements that store to a non-register
3096 from a rhs with a conversion. Handle that here by forcing the
3097 rhs into a temporary. gimple_regimplify_operands is not
3098 prepared to do this for us. */
3099 if (!is_gimple_debug (init_stmt
)
3100 && !is_gimple_reg (gimple_assign_lhs (init_stmt
))
3101 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt
)))
3102 && gimple_assign_rhs_class (init_stmt
) == GIMPLE_UNARY_RHS
)
3104 tree rhs
= build1 (gimple_assign_rhs_code (init_stmt
),
3105 gimple_expr_type (init_stmt
),
3106 gimple_assign_rhs1 (init_stmt
));
3107 rhs
= force_gimple_operand_gsi (&si
, rhs
, true, NULL_TREE
, false,
3109 gimple_assign_set_rhs_code (init_stmt
, TREE_CODE (rhs
));
3110 gimple_assign_set_rhs1 (init_stmt
, rhs
);
3112 gsi_insert_after (&si
, init_stmt
, GSI_NEW_STMT
);
3113 gimple_regimplify_operands (init_stmt
, &si
);
3115 if (!is_gimple_debug (init_stmt
))
3117 tree def
= gimple_assign_lhs (init_stmt
);
3118 insert_init_debug_bind (id
, bb
, def
, def
, init_stmt
);
3123 /* Initialize parameter P with VALUE. If needed, produce init statement
3124 at the end of BB. When BB is NULL, we return init statement to be
3127 setup_one_parameter (copy_body_data
*id
, tree p
, tree value
, tree fn
,
3128 basic_block bb
, tree
*vars
)
3130 gimple
*init_stmt
= NULL
;
3133 tree def
= (gimple_in_ssa_p (cfun
)
3134 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3137 && value
!= error_mark_node
3138 && !useless_type_conversion_p (TREE_TYPE (p
), TREE_TYPE (value
)))
3140 /* If we can match up types by promotion/demotion do so. */
3141 if (fold_convertible_p (TREE_TYPE (p
), value
))
3142 rhs
= fold_convert (TREE_TYPE (p
), value
);
3145 /* ??? For valid programs we should not end up here.
3146 Still if we end up with truly mismatched types here, fall back
3147 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3148 GIMPLE to the following passes. */
3149 if (!is_gimple_reg_type (TREE_TYPE (value
))
3150 || TYPE_SIZE (TREE_TYPE (p
)) == TYPE_SIZE (TREE_TYPE (value
)))
3151 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (p
), value
);
3153 rhs
= build_zero_cst (TREE_TYPE (p
));
3157 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3158 here since the type of this decl must be visible to the calling
3160 var
= copy_decl_to_var (p
, id
);
3162 /* Declare this new variable. */
3163 DECL_CHAIN (var
) = *vars
;
3166 /* Make gimplifier happy about this variable. */
3167 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3169 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3170 we would not need to create a new variable here at all, if it
3171 weren't for debug info. Still, we can just use the argument
3173 if (TREE_READONLY (p
)
3174 && !TREE_ADDRESSABLE (p
)
3175 && value
&& !TREE_SIDE_EFFECTS (value
)
3178 /* We may produce non-gimple trees by adding NOPs or introduce
3179 invalid sharing when operand is not really constant.
3180 It is not big deal to prohibit constant propagation here as
3181 we will constant propagate in DOM1 pass anyway. */
3182 if (is_gimple_min_invariant (value
)
3183 && useless_type_conversion_p (TREE_TYPE (p
),
3185 /* We have to be very careful about ADDR_EXPR. Make sure
3186 the base variable isn't a local variable of the inlined
3187 function, e.g., when doing recursive inlining, direct or
3188 mutually-recursive or whatever, which is why we don't
3189 just test whether fn == current_function_decl. */
3190 && ! self_inlining_addr_expr (value
, fn
))
3192 insert_decl_map (id
, p
, value
);
3193 insert_debug_decl_map (id
, p
, var
);
3194 return insert_init_debug_bind (id
, bb
, var
, value
, NULL
);
3198 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3199 that way, when the PARM_DECL is encountered, it will be
3200 automatically replaced by the VAR_DECL. */
3201 insert_decl_map (id
, p
, var
);
3203 /* Even if P was TREE_READONLY, the new VAR should not be.
3204 In the original code, we would have constructed a
3205 temporary, and then the function body would have never
3206 changed the value of P. However, now, we will be
3207 constructing VAR directly. The constructor body may
3208 change its value multiple times as it is being
3209 constructed. Therefore, it must not be TREE_READONLY;
3210 the back-end assumes that TREE_READONLY variable is
3211 assigned to only once. */
3212 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
3213 TREE_READONLY (var
) = 0;
3215 /* If there is no setup required and we are in SSA, take the easy route
3216 replacing all SSA names representing the function parameter by the
3217 SSA name passed to function.
3219 We need to construct map for the variable anyway as it might be used
3220 in different SSA names when parameter is set in function.
3222 Do replacement at -O0 for const arguments replaced by constant.
3223 This is important for builtin_constant_p and other construct requiring
3224 constant argument to be visible in inlined function body. */
3225 if (gimple_in_ssa_p (cfun
) && rhs
&& def
&& is_gimple_reg (p
)
3227 || (TREE_READONLY (p
)
3228 && is_gimple_min_invariant (rhs
)))
3229 && (TREE_CODE (rhs
) == SSA_NAME
3230 || is_gimple_min_invariant (rhs
))
3231 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
3233 insert_decl_map (id
, def
, rhs
);
3234 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3237 /* If the value of argument is never used, don't care about initializing
3239 if (optimize
&& gimple_in_ssa_p (cfun
) && !def
&& is_gimple_reg (p
))
3241 gcc_assert (!value
|| !TREE_SIDE_EFFECTS (value
));
3242 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3245 /* Initialize this VAR_DECL from the equivalent argument. Convert
3246 the argument to the proper type in case it was promoted. */
3249 if (rhs
== error_mark_node
)
3251 insert_decl_map (id
, p
, var
);
3252 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3255 STRIP_USELESS_TYPE_CONVERSION (rhs
);
3257 /* If we are in SSA form properly remap the default definition
3258 or assign to a dummy SSA name if the parameter is unused and
3259 we are not optimizing. */
3260 if (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
))
3264 def
= remap_ssa_name (def
, id
);
3265 init_stmt
= gimple_build_assign (def
, rhs
);
3266 SSA_NAME_IS_DEFAULT_DEF (def
) = 0;
3267 set_ssa_default_def (cfun
, var
, NULL
);
3271 def
= make_ssa_name (var
);
3272 init_stmt
= gimple_build_assign (def
, rhs
);
3276 init_stmt
= gimple_build_assign (var
, rhs
);
3278 if (bb
&& init_stmt
)
3279 insert_init_stmt (id
, bb
, init_stmt
);
3284 /* Generate code to initialize the parameters of the function at the
3285 top of the stack in ID from the GIMPLE_CALL STMT. */
3288 initialize_inlined_parameters (copy_body_data
*id
, gimple
*stmt
,
3289 tree fn
, basic_block bb
)
3294 tree vars
= NULL_TREE
;
3295 tree static_chain
= gimple_call_chain (stmt
);
3297 /* Figure out what the parameters are. */
3298 parms
= DECL_ARGUMENTS (fn
);
3300 /* Loop through the parameter declarations, replacing each with an
3301 equivalent VAR_DECL, appropriately initialized. */
3302 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3305 val
= i
< gimple_call_num_args (stmt
) ? gimple_call_arg (stmt
, i
) : NULL
;
3306 setup_one_parameter (id
, p
, val
, fn
, bb
, &vars
);
3308 /* After remapping parameters remap their types. This has to be done
3309 in a second loop over all parameters to appropriately remap
3310 variable sized arrays when the size is specified in a
3311 parameter following the array. */
3312 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3314 tree
*varp
= id
->decl_map
->get (p
);
3315 if (varp
&& VAR_P (*varp
))
3317 tree def
= (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
)
3318 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3320 TREE_TYPE (var
) = remap_type (TREE_TYPE (var
), id
);
3321 /* Also remap the default definition if it was remapped
3322 to the default definition of the parameter replacement
3323 by the parameter setup. */
3326 tree
*defp
= id
->decl_map
->get (def
);
3328 && TREE_CODE (*defp
) == SSA_NAME
3329 && SSA_NAME_VAR (*defp
) == var
)
3330 TREE_TYPE (*defp
) = TREE_TYPE (var
);
3335 /* Initialize the static chain. */
3336 p
= DECL_STRUCT_FUNCTION (fn
)->static_chain_decl
;
3337 gcc_assert (fn
!= current_function_decl
);
3340 /* No static chain? Seems like a bug in tree-nested.c. */
3341 gcc_assert (static_chain
);
3343 setup_one_parameter (id
, p
, static_chain
, fn
, bb
, &vars
);
3346 declare_inline_vars (id
->block
, vars
);
3350 /* Declare a return variable to replace the RESULT_DECL for the
3351 function we are calling. An appropriate DECL_STMT is returned.
3352 The USE_STMT is filled to contain a use of the declaration to
3353 indicate the return value of the function.
3355 RETURN_SLOT, if non-null is place where to store the result. It
3356 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3357 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3359 The return value is a (possibly null) value that holds the result
3360 as seen by the caller. */
3363 declare_return_variable (copy_body_data
*id
, tree return_slot
, tree modify_dest
,
3364 basic_block entry_bb
)
3366 tree callee
= id
->src_fn
;
3367 tree result
= DECL_RESULT (callee
);
3368 tree callee_type
= TREE_TYPE (result
);
3372 /* Handle type-mismatches in the function declaration return type
3373 vs. the call expression. */
3375 caller_type
= TREE_TYPE (modify_dest
);
3377 caller_type
= TREE_TYPE (TREE_TYPE (callee
));
3379 /* We don't need to do anything for functions that don't return anything. */
3380 if (VOID_TYPE_P (callee_type
))
3383 /* If there was a return slot, then the return value is the
3384 dereferenced address of that object. */
3387 /* The front end shouldn't have used both return_slot and
3388 a modify expression. */
3389 gcc_assert (!modify_dest
);
3390 if (DECL_BY_REFERENCE (result
))
3392 tree return_slot_addr
= build_fold_addr_expr (return_slot
);
3393 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr
);
3395 /* We are going to construct *&return_slot and we can't do that
3396 for variables believed to be not addressable.
3398 FIXME: This check possibly can match, because values returned
3399 via return slot optimization are not believed to have address
3400 taken by alias analysis. */
3401 gcc_assert (TREE_CODE (return_slot
) != SSA_NAME
);
3402 var
= return_slot_addr
;
3407 gcc_assert (TREE_CODE (var
) != SSA_NAME
);
3408 if (TREE_ADDRESSABLE (result
))
3409 mark_addressable (var
);
3411 if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3412 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3413 && !DECL_GIMPLE_REG_P (result
)
3415 DECL_GIMPLE_REG_P (var
) = 0;
3420 /* All types requiring non-trivial constructors should have been handled. */
3421 gcc_assert (!TREE_ADDRESSABLE (callee_type
));
3423 /* Attempt to avoid creating a new temporary variable. */
3425 && TREE_CODE (modify_dest
) != SSA_NAME
)
3427 bool use_it
= false;
3429 /* We can't use MODIFY_DEST if there's type promotion involved. */
3430 if (!useless_type_conversion_p (callee_type
, caller_type
))
3433 /* ??? If we're assigning to a variable sized type, then we must
3434 reuse the destination variable, because we've no good way to
3435 create variable sized temporaries at this point. */
3436 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type
)) != INTEGER_CST
)
3439 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3440 reuse it as the result of the call directly. Don't do this if
3441 it would promote MODIFY_DEST to addressable. */
3442 else if (TREE_ADDRESSABLE (result
))
3446 tree base_m
= get_base_address (modify_dest
);
3448 /* If the base isn't a decl, then it's a pointer, and we don't
3449 know where that's going to go. */
3450 if (!DECL_P (base_m
))
3452 else if (is_global_var (base_m
))
3454 else if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3455 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3456 && !DECL_GIMPLE_REG_P (result
)
3457 && DECL_GIMPLE_REG_P (base_m
))
3459 else if (!TREE_ADDRESSABLE (base_m
))
3471 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type
)) == INTEGER_CST
);
3473 var
= copy_result_decl_to_var (result
, id
);
3474 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3476 /* Do not have the rest of GCC warn about this variable as it should
3477 not be visible to the user. */
3478 TREE_NO_WARNING (var
) = 1;
3480 declare_inline_vars (id
->block
, var
);
3482 /* Build the use expr. If the return type of the function was
3483 promoted, convert it back to the expected type. */
3485 if (!useless_type_conversion_p (caller_type
, TREE_TYPE (var
)))
3487 /* If we can match up types by promotion/demotion do so. */
3488 if (fold_convertible_p (caller_type
, var
))
3489 use
= fold_convert (caller_type
, var
);
3492 /* ??? For valid programs we should not end up here.
3493 Still if we end up with truly mismatched types here, fall back
3494 to using a MEM_REF to not leak invalid GIMPLE to the following
3496 /* Prevent var from being written into SSA form. */
3497 if (TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
3498 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
)
3499 DECL_GIMPLE_REG_P (var
) = false;
3500 else if (is_gimple_reg_type (TREE_TYPE (var
)))
3501 TREE_ADDRESSABLE (var
) = true;
3502 use
= fold_build2 (MEM_REF
, caller_type
,
3503 build_fold_addr_expr (var
),
3504 build_int_cst (ptr_type_node
, 0));
3508 STRIP_USELESS_TYPE_CONVERSION (use
);
3510 if (DECL_BY_REFERENCE (result
))
3512 TREE_ADDRESSABLE (var
) = 1;
3513 var
= build_fold_addr_expr (var
);
3517 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3518 way, when the RESULT_DECL is encountered, it will be
3519 automatically replaced by the VAR_DECL.
3521 When returning by reference, ensure that RESULT_DECL remaps to
3523 if (DECL_BY_REFERENCE (result
)
3524 && !is_gimple_val (var
))
3526 tree temp
= create_tmp_var (TREE_TYPE (result
), "retvalptr");
3527 insert_decl_map (id
, result
, temp
);
3528 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3529 it's default_def SSA_NAME. */
3530 if (gimple_in_ssa_p (id
->src_cfun
)
3531 && is_gimple_reg (result
))
3533 temp
= make_ssa_name (temp
);
3534 insert_decl_map (id
, ssa_default_def (id
->src_cfun
, result
), temp
);
3536 insert_init_stmt (id
, entry_bb
, gimple_build_assign (temp
, var
));
3539 insert_decl_map (id
, result
, var
);
3541 /* Remember this so we can ignore it in remap_decls. */
3546 /* Determine if the function can be copied. If so return NULL. If
3547 not return a string describng the reason for failure. */
3550 copy_forbidden (struct function
*fun
)
3552 const char *reason
= fun
->cannot_be_copied_reason
;
3554 /* Only examine the function once. */
3555 if (fun
->cannot_be_copied_set
)
3558 /* We cannot copy a function that receives a non-local goto
3559 because we cannot remap the destination label used in the
3560 function that is performing the non-local goto. */
3561 /* ??? Actually, this should be possible, if we work at it.
3562 No doubt there's just a handful of places that simply
3563 assume it doesn't happen and don't substitute properly. */
3564 if (fun
->has_nonlocal_label
)
3566 reason
= G_("function %q+F can never be copied "
3567 "because it receives a non-local goto");
3571 if (fun
->has_forced_label_in_static
)
3573 reason
= G_("function %q+F can never be copied because it saves "
3574 "address of local label in a static variable");
3579 fun
->cannot_be_copied_reason
= reason
;
3580 fun
->cannot_be_copied_set
= true;
3585 static const char *inline_forbidden_reason
;
3587 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3588 iff a function cannot be inlined. Also sets the reason why. */
3591 inline_forbidden_p_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3592 struct walk_stmt_info
*wip
)
3594 tree fn
= (tree
) wip
->info
;
3596 gimple
*stmt
= gsi_stmt (*gsi
);
3598 switch (gimple_code (stmt
))
3601 /* Refuse to inline alloca call unless user explicitly forced so as
3602 this may change program's memory overhead drastically when the
3603 function using alloca is called in loop. In GCC present in
3604 SPEC2000 inlining into schedule_block cause it to require 2GB of
3605 RAM instead of 256MB. Don't do so for alloca calls emitted for
3606 VLA objects as those can't cause unbounded growth (they're always
3607 wrapped inside stack_save/stack_restore regions. */
3608 if (gimple_maybe_alloca_call_p (stmt
)
3609 && !gimple_call_alloca_for_var_p (as_a
<gcall
*> (stmt
))
3610 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
3612 inline_forbidden_reason
3613 = G_("function %q+F can never be inlined because it uses "
3614 "alloca (override using the always_inline attribute)");
3615 *handled_ops_p
= true;
3619 t
= gimple_call_fndecl (stmt
);
3623 /* We cannot inline functions that call setjmp. */
3624 if (setjmp_call_p (t
))
3626 inline_forbidden_reason
3627 = G_("function %q+F can never be inlined because it uses setjmp");
3628 *handled_ops_p
= true;
3632 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
3633 switch (DECL_FUNCTION_CODE (t
))
3635 /* We cannot inline functions that take a variable number of
3637 case BUILT_IN_VA_START
:
3638 case BUILT_IN_NEXT_ARG
:
3639 case BUILT_IN_VA_END
:
3640 inline_forbidden_reason
3641 = G_("function %q+F can never be inlined because it "
3642 "uses variable argument lists");
3643 *handled_ops_p
= true;
3646 case BUILT_IN_LONGJMP
:
3647 /* We can't inline functions that call __builtin_longjmp at
3648 all. The non-local goto machinery really requires the
3649 destination be in a different function. If we allow the
3650 function calling __builtin_longjmp to be inlined into the
3651 function calling __builtin_setjmp, Things will Go Awry. */
3652 inline_forbidden_reason
3653 = G_("function %q+F can never be inlined because "
3654 "it uses setjmp-longjmp exception handling");
3655 *handled_ops_p
= true;
3658 case BUILT_IN_NONLOCAL_GOTO
:
3660 inline_forbidden_reason
3661 = G_("function %q+F can never be inlined because "
3662 "it uses non-local goto");
3663 *handled_ops_p
= true;
3666 case BUILT_IN_RETURN
:
3667 case BUILT_IN_APPLY_ARGS
:
3668 /* If a __builtin_apply_args caller would be inlined,
3669 it would be saving arguments of the function it has
3670 been inlined into. Similarly __builtin_return would
3671 return from the function the inline has been inlined into. */
3672 inline_forbidden_reason
3673 = G_("function %q+F can never be inlined because "
3674 "it uses __builtin_return or __builtin_apply_args");
3675 *handled_ops_p
= true;
3684 t
= gimple_goto_dest (stmt
);
3686 /* We will not inline a function which uses computed goto. The
3687 addresses of its local labels, which may be tucked into
3688 global storage, are of course not constant across
3689 instantiations, which causes unexpected behavior. */
3690 if (TREE_CODE (t
) != LABEL_DECL
)
3692 inline_forbidden_reason
3693 = G_("function %q+F can never be inlined "
3694 "because it contains a computed goto");
3695 *handled_ops_p
= true;
3704 *handled_ops_p
= false;
3708 /* Return true if FNDECL is a function that cannot be inlined into
3712 inline_forbidden_p (tree fndecl
)
3714 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
3715 struct walk_stmt_info wi
;
3717 bool forbidden_p
= false;
3719 /* First check for shared reasons not to copy the code. */
3720 inline_forbidden_reason
= copy_forbidden (fun
);
3721 if (inline_forbidden_reason
!= NULL
)
3724 /* Next, walk the statements of the function looking for
3725 constraucts we can't handle, or are non-optimal for inlining. */
3726 hash_set
<tree
> visited_nodes
;
3727 memset (&wi
, 0, sizeof (wi
));
3728 wi
.info
= (void *) fndecl
;
3729 wi
.pset
= &visited_nodes
;
3731 FOR_EACH_BB_FN (bb
, fun
)
3734 gimple_seq seq
= bb_seq (bb
);
3735 ret
= walk_gimple_seq (seq
, inline_forbidden_p_stmt
, NULL
, &wi
);
3736 forbidden_p
= (ret
!= NULL
);
3744 /* Return false if the function FNDECL cannot be inlined on account of its
3745 attributes, true otherwise. */
3747 function_attribute_inlinable_p (const_tree fndecl
)
3749 if (targetm
.attribute_table
)
3753 for (a
= DECL_ATTRIBUTES (fndecl
); a
; a
= TREE_CHAIN (a
))
3755 const_tree name
= TREE_PURPOSE (a
);
3758 for (i
= 0; targetm
.attribute_table
[i
].name
!= NULL
; i
++)
3759 if (is_attribute_p (targetm
.attribute_table
[i
].name
, name
))
3760 return targetm
.function_attribute_inlinable_p (fndecl
);
3767 /* Returns nonzero if FN is a function that does not have any
3768 fundamental inline blocking properties. */
3771 tree_inlinable_function_p (tree fn
)
3773 bool inlinable
= true;
3777 /* If we've already decided this function shouldn't be inlined,
3778 there's no need to check again. */
3779 if (DECL_UNINLINABLE (fn
))
3782 /* We only warn for functions declared `inline' by the user. */
3783 do_warning
= (warn_inline
3784 && DECL_DECLARED_INLINE_P (fn
)
3785 && !DECL_NO_INLINE_WARNING_P (fn
)
3786 && !DECL_IN_SYSTEM_HEADER (fn
));
3788 always_inline
= lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
));
3791 && always_inline
== NULL
)
3794 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3795 "is suppressed using -fno-inline", fn
);
3799 else if (!function_attribute_inlinable_p (fn
))
3802 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3803 "uses attributes conflicting with inlining", fn
);
3807 else if (inline_forbidden_p (fn
))
3809 /* See if we should warn about uninlinable functions. Previously,
3810 some of these warnings would be issued while trying to expand
3811 the function inline, but that would cause multiple warnings
3812 about functions that would for example call alloca. But since
3813 this a property of the function, just one warning is enough.
3814 As a bonus we can now give more details about the reason why a
3815 function is not inlinable. */
3817 error (inline_forbidden_reason
, fn
);
3818 else if (do_warning
)
3819 warning (OPT_Winline
, inline_forbidden_reason
, fn
);
3824 /* Squirrel away the result so that we don't have to check again. */
3825 DECL_UNINLINABLE (fn
) = !inlinable
;
3830 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3831 word size and take possible memcpy call into account and return
3832 cost based on whether optimizing for size or speed according to SPEED_P. */
3835 estimate_move_cost (tree type
, bool ARG_UNUSED (speed_p
))
3839 gcc_assert (!VOID_TYPE_P (type
));
3841 if (TREE_CODE (type
) == VECTOR_TYPE
)
3843 scalar_mode inner
= SCALAR_TYPE_MODE (TREE_TYPE (type
));
3844 machine_mode simd
= targetm
.vectorize
.preferred_simd_mode (inner
);
3846 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type
)));
3847 int simd_mode_size
= estimated_poly_value (GET_MODE_SIZE (simd
));
3848 return ((orig_mode_size
+ simd_mode_size
- 1)
3852 size
= int_size_in_bytes (type
);
3854 if (size
< 0 || size
> MOVE_MAX_PIECES
* MOVE_RATIO (speed_p
))
3855 /* Cost of a memcpy call, 3 arguments and the call. */
3858 return ((size
+ MOVE_MAX_PIECES
- 1) / MOVE_MAX_PIECES
);
3861 /* Returns cost of operation CODE, according to WEIGHTS */
3864 estimate_operator_cost (enum tree_code code
, eni_weights
*weights
,
3865 tree op1 ATTRIBUTE_UNUSED
, tree op2
)
3869 /* These are "free" conversions, or their presumed cost
3870 is folded into other operations. */
3875 case VIEW_CONVERT_EXPR
:
3878 /* Assign cost of 1 to usual operations.
3879 ??? We may consider mapping RTL costs to this. */
3885 case POINTER_PLUS_EXPR
:
3886 case POINTER_DIFF_EXPR
:
3889 case MULT_HIGHPART_EXPR
:
3891 case ADDR_SPACE_CONVERT_EXPR
:
3892 case FIXED_CONVERT_EXPR
:
3893 case FIX_TRUNC_EXPR
:
3912 case TRUTH_ANDIF_EXPR
:
3913 case TRUTH_ORIF_EXPR
:
3914 case TRUTH_AND_EXPR
:
3916 case TRUTH_XOR_EXPR
:
3917 case TRUTH_NOT_EXPR
:
3926 case UNORDERED_EXPR
:
3937 case PREDECREMENT_EXPR
:
3938 case PREINCREMENT_EXPR
:
3939 case POSTDECREMENT_EXPR
:
3940 case POSTINCREMENT_EXPR
:
3942 case REALIGN_LOAD_EXPR
:
3944 case WIDEN_SUM_EXPR
:
3945 case WIDEN_MULT_EXPR
:
3948 case WIDEN_MULT_PLUS_EXPR
:
3949 case WIDEN_MULT_MINUS_EXPR
:
3950 case WIDEN_LSHIFT_EXPR
:
3952 case VEC_WIDEN_MULT_HI_EXPR
:
3953 case VEC_WIDEN_MULT_LO_EXPR
:
3954 case VEC_WIDEN_MULT_EVEN_EXPR
:
3955 case VEC_WIDEN_MULT_ODD_EXPR
:
3956 case VEC_UNPACK_HI_EXPR
:
3957 case VEC_UNPACK_LO_EXPR
:
3958 case VEC_UNPACK_FLOAT_HI_EXPR
:
3959 case VEC_UNPACK_FLOAT_LO_EXPR
:
3960 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
3961 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
3962 case VEC_PACK_TRUNC_EXPR
:
3963 case VEC_PACK_SAT_EXPR
:
3964 case VEC_PACK_FIX_TRUNC_EXPR
:
3965 case VEC_PACK_FLOAT_EXPR
:
3966 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3967 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3968 case VEC_DUPLICATE_EXPR
:
3969 case VEC_SERIES_EXPR
:
3973 /* Few special cases of expensive operations. This is useful
3974 to avoid inlining on functions having too many of these. */
3975 case TRUNC_DIV_EXPR
:
3977 case FLOOR_DIV_EXPR
:
3978 case ROUND_DIV_EXPR
:
3979 case EXACT_DIV_EXPR
:
3980 case TRUNC_MOD_EXPR
:
3982 case FLOOR_MOD_EXPR
:
3983 case ROUND_MOD_EXPR
:
3985 if (TREE_CODE (op2
) != INTEGER_CST
)
3986 return weights
->div_mod_cost
;
3989 /* Bit-field insertion needs several shift and mask operations. */
3990 case BIT_INSERT_EXPR
:
3994 /* We expect a copy assignment with no operator. */
3995 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
);
4001 /* Estimate number of instructions that will be created by expanding
4002 the statements in the statement sequence STMTS.
4003 WEIGHTS contains weights attributed to various constructs. */
4006 estimate_num_insns_seq (gimple_seq stmts
, eni_weights
*weights
)
4009 gimple_stmt_iterator gsi
;
4012 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4013 cost
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
4019 /* Estimate number of instructions that will be created by expanding STMT.
4020 WEIGHTS contains weights attributed to various constructs. */
4023 estimate_num_insns (gimple
*stmt
, eni_weights
*weights
)
4026 enum gimple_code code
= gimple_code (stmt
);
4033 /* Try to estimate the cost of assignments. We have three cases to
4035 1) Simple assignments to registers;
4036 2) Stores to things that must live in memory. This includes
4037 "normal" stores to scalars, but also assignments of large
4038 structures, or constructors of big arrays;
4040 Let us look at the first two cases, assuming we have "a = b + C":
4041 <GIMPLE_ASSIGN <var_decl "a">
4042 <plus_expr <var_decl "b"> <constant C>>
4043 If "a" is a GIMPLE register, the assignment to it is free on almost
4044 any target, because "a" usually ends up in a real register. Hence
4045 the only cost of this expression comes from the PLUS_EXPR, and we
4046 can ignore the GIMPLE_ASSIGN.
4047 If "a" is not a GIMPLE register, the assignment to "a" will most
4048 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4049 of moving something into "a", which we compute using the function
4050 estimate_move_cost. */
4051 if (gimple_clobber_p (stmt
))
4052 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4054 lhs
= gimple_assign_lhs (stmt
);
4055 rhs
= gimple_assign_rhs1 (stmt
);
4059 /* Account for the cost of moving to / from memory. */
4060 if (gimple_store_p (stmt
))
4061 cost
+= estimate_move_cost (TREE_TYPE (lhs
), weights
->time_based
);
4062 if (gimple_assign_load_p (stmt
))
4063 cost
+= estimate_move_cost (TREE_TYPE (rhs
), weights
->time_based
);
4065 cost
+= estimate_operator_cost (gimple_assign_rhs_code (stmt
), weights
,
4066 gimple_assign_rhs1 (stmt
),
4067 get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
4068 == GIMPLE_BINARY_RHS
4069 ? gimple_assign_rhs2 (stmt
) : NULL
);
4073 cost
= 1 + estimate_operator_cost (gimple_cond_code (stmt
), weights
,
4074 gimple_op (stmt
, 0),
4075 gimple_op (stmt
, 1));
4080 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
4081 /* Take into account cost of the switch + guess 2 conditional jumps for
4084 TODO: once the switch expansion logic is sufficiently separated, we can
4085 do better job on estimating cost of the switch. */
4086 if (weights
->time_based
)
4087 cost
= floor_log2 (gimple_switch_num_labels (switch_stmt
)) * 2;
4089 cost
= gimple_switch_num_labels (switch_stmt
) * 2;
4097 if (gimple_call_internal_p (stmt
))
4099 else if ((decl
= gimple_call_fndecl (stmt
))
4100 && fndecl_built_in_p (decl
))
4102 /* Do not special case builtins where we see the body.
4103 This just confuse inliner. */
4104 struct cgraph_node
*node
;
4105 if (!(node
= cgraph_node::get (decl
))
4106 || node
->definition
)
4108 /* For buitins that are likely expanded to nothing or
4109 inlined do not account operand costs. */
4110 else if (is_simple_builtin (decl
))
4112 else if (is_inexpensive_builtin (decl
))
4113 return weights
->target_builtin_call_cost
;
4114 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4116 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4117 specialize the cheap expansion we do here.
4118 ??? This asks for a more general solution. */
4119 switch (DECL_FUNCTION_CODE (decl
))
4124 if (TREE_CODE (gimple_call_arg (stmt
, 1)) == REAL_CST
4126 (&TREE_REAL_CST (gimple_call_arg (stmt
, 1)),
4128 return estimate_operator_cost
4129 (MULT_EXPR
, weights
, gimple_call_arg (stmt
, 0),
4130 gimple_call_arg (stmt
, 0));
4139 cost
= decl
? weights
->call_cost
: weights
->indirect_call_cost
;
4140 if (gimple_call_lhs (stmt
))
4141 cost
+= estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt
)),
4142 weights
->time_based
);
4143 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4145 tree arg
= gimple_call_arg (stmt
, i
);
4146 cost
+= estimate_move_cost (TREE_TYPE (arg
),
4147 weights
->time_based
);
4153 return weights
->return_cost
;
4159 case GIMPLE_PREDICT
:
4165 int count
= asm_str_count (gimple_asm_string (as_a
<gasm
*> (stmt
)));
4166 /* 1000 means infinity. This avoids overflows later
4167 with very long asm statements. */
4170 /* If this asm is asm inline, count anything as minimum size. */
4171 if (gimple_asm_inline_p (as_a
<gasm
*> (stmt
)))
4172 count
= MIN (1, count
);
4173 return MAX (1, count
);
4177 /* This is either going to be an external function call with one
4178 argument, or two register copy statements plus a goto. */
4181 case GIMPLE_EH_DISPATCH
:
4182 /* ??? This is going to turn into a switch statement. Ideally
4183 we'd have a look at the eh region and estimate the number of
4188 return estimate_num_insns_seq (
4189 gimple_bind_body (as_a
<gbind
*> (stmt
)),
4192 case GIMPLE_EH_FILTER
:
4193 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt
), weights
);
4196 return estimate_num_insns_seq (gimple_catch_handler (
4197 as_a
<gcatch
*> (stmt
)),
4201 return (estimate_num_insns_seq (gimple_try_eval (stmt
), weights
)
4202 + estimate_num_insns_seq (gimple_try_cleanup (stmt
), weights
));
4204 /* OMP directives are generally very expensive. */
4206 case GIMPLE_OMP_RETURN
:
4207 case GIMPLE_OMP_SECTIONS_SWITCH
:
4208 case GIMPLE_OMP_ATOMIC_STORE
:
4209 case GIMPLE_OMP_CONTINUE
:
4210 /* ...except these, which are cheap. */
4213 case GIMPLE_OMP_ATOMIC_LOAD
:
4214 return weights
->omp_cost
;
4216 case GIMPLE_OMP_FOR
:
4217 return (weights
->omp_cost
4218 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
)
4219 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt
), weights
));
4221 case GIMPLE_OMP_PARALLEL
:
4222 case GIMPLE_OMP_TASK
:
4223 case GIMPLE_OMP_CRITICAL
:
4224 case GIMPLE_OMP_MASTER
:
4225 case GIMPLE_OMP_TASKGROUP
:
4226 case GIMPLE_OMP_ORDERED
:
4227 case GIMPLE_OMP_SECTION
:
4228 case GIMPLE_OMP_SECTIONS
:
4229 case GIMPLE_OMP_SINGLE
:
4230 case GIMPLE_OMP_TARGET
:
4231 case GIMPLE_OMP_TEAMS
:
4232 return (weights
->omp_cost
4233 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
));
4235 case GIMPLE_TRANSACTION
:
4236 return (weights
->tm_cost
4237 + estimate_num_insns_seq (gimple_transaction_body (
4238 as_a
<gtransaction
*> (stmt
)),
4248 /* Estimate number of instructions that will be created by expanding
4249 function FNDECL. WEIGHTS contains weights attributed to various
4253 estimate_num_insns_fn (tree fndecl
, eni_weights
*weights
)
4255 struct function
*my_function
= DECL_STRUCT_FUNCTION (fndecl
);
4256 gimple_stmt_iterator bsi
;
4260 gcc_assert (my_function
&& my_function
->cfg
);
4261 FOR_EACH_BB_FN (bb
, my_function
)
4263 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
4264 n
+= estimate_num_insns (gsi_stmt (bsi
), weights
);
4271 /* Initializes weights used by estimate_num_insns. */
4274 init_inline_once (void)
4276 eni_size_weights
.call_cost
= 1;
4277 eni_size_weights
.indirect_call_cost
= 3;
4278 eni_size_weights
.target_builtin_call_cost
= 1;
4279 eni_size_weights
.div_mod_cost
= 1;
4280 eni_size_weights
.omp_cost
= 40;
4281 eni_size_weights
.tm_cost
= 10;
4282 eni_size_weights
.time_based
= false;
4283 eni_size_weights
.return_cost
= 1;
4285 /* Estimating time for call is difficult, since we have no idea what the
4286 called function does. In the current uses of eni_time_weights,
4287 underestimating the cost does less harm than overestimating it, so
4288 we choose a rather small value here. */
4289 eni_time_weights
.call_cost
= 10;
4290 eni_time_weights
.indirect_call_cost
= 15;
4291 eni_time_weights
.target_builtin_call_cost
= 1;
4292 eni_time_weights
.div_mod_cost
= 10;
4293 eni_time_weights
.omp_cost
= 40;
4294 eni_time_weights
.tm_cost
= 40;
4295 eni_time_weights
.time_based
= true;
4296 eni_time_weights
.return_cost
= 2;
4300 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4303 prepend_lexical_block (tree current_block
, tree new_block
)
4305 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (current_block
);
4306 BLOCK_SUBBLOCKS (current_block
) = new_block
;
4307 BLOCK_SUPERCONTEXT (new_block
) = current_block
;
4310 /* Add local variables from CALLEE to CALLER. */
4313 add_local_variables (struct function
*callee
, struct function
*caller
,
4319 FOR_EACH_LOCAL_DECL (callee
, ix
, var
)
4320 if (!can_be_nonlocal (var
, id
))
4322 tree new_var
= remap_decl (var
, id
);
4324 /* Remap debug-expressions. */
4326 && DECL_HAS_DEBUG_EXPR_P (var
)
4329 tree tem
= DECL_DEBUG_EXPR (var
);
4330 bool old_regimplify
= id
->regimplify
;
4331 id
->remapping_type_depth
++;
4332 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
4333 id
->remapping_type_depth
--;
4334 id
->regimplify
= old_regimplify
;
4335 SET_DECL_DEBUG_EXPR (new_var
, tem
);
4336 DECL_HAS_DEBUG_EXPR_P (new_var
) = 1;
4338 add_local_decl (caller
, new_var
);
4342 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4343 have brought in or introduced any debug stmts for SRCVAR. */
4346 reset_debug_binding (copy_body_data
*id
, tree srcvar
, gimple_seq
*bindings
)
4348 tree
*remappedvarp
= id
->decl_map
->get (srcvar
);
4353 if (!VAR_P (*remappedvarp
))
4356 if (*remappedvarp
== id
->retvar
)
4359 tree tvar
= target_for_debug_bind (*remappedvarp
);
4363 gdebug
*stmt
= gimple_build_debug_bind (tvar
, NULL_TREE
,
4365 gimple_seq_add_stmt (bindings
, stmt
);
4368 /* For each inlined variable for which we may have debug bind stmts,
4369 add before GSI a final debug stmt resetting it, marking the end of
4370 its life, so that var-tracking knows it doesn't have to compute
4371 further locations for it. */
4374 reset_debug_bindings (copy_body_data
*id
, gimple_stmt_iterator gsi
)
4378 gimple_seq bindings
= NULL
;
4380 if (!gimple_in_ssa_p (id
->src_cfun
))
4383 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
4386 for (var
= DECL_ARGUMENTS (id
->src_fn
);
4387 var
; var
= DECL_CHAIN (var
))
4388 reset_debug_binding (id
, var
, &bindings
);
4390 FOR_EACH_LOCAL_DECL (id
->src_cfun
, ix
, var
)
4391 reset_debug_binding (id
, var
, &bindings
);
4393 gsi_insert_seq_before_without_update (&gsi
, bindings
, GSI_SAME_STMT
);
4396 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4399 expand_call_inline (basic_block bb
, gimple
*stmt
, copy_body_data
*id
)
4403 hash_map
<tree
, tree
> *dst
;
4404 hash_map
<tree
, tree
> *st
= NULL
;
4407 struct cgraph_edge
*cg_edge
;
4408 cgraph_inline_failed_t reason
;
4409 basic_block return_block
;
4411 gimple_stmt_iterator gsi
, stmt_gsi
;
4412 bool successfully_inlined
= false;
4413 bool purge_dead_abnormal_edges
;
4415 unsigned int prop_mask
, src_properties
;
4416 struct function
*dst_cfun
;
4419 gimple
*simtenter_stmt
= NULL
;
4420 vec
<tree
> *simtvars_save
;
4422 /* The gimplifier uses input_location in too many places, such as
4423 internal_get_tmp_var (). */
4424 location_t saved_location
= input_location
;
4425 input_location
= gimple_location (stmt
);
4427 /* From here on, we're only interested in CALL_EXPRs. */
4428 call_stmt
= dyn_cast
<gcall
*> (stmt
);
4432 cg_edge
= id
->dst_node
->get_edge (stmt
);
4433 gcc_checking_assert (cg_edge
);
4434 /* First, see if we can figure out what function is being called.
4435 If we cannot, then there is no hope of inlining the function. */
4436 if (cg_edge
->indirect_unknown_callee
)
4438 fn
= cg_edge
->callee
->decl
;
4439 gcc_checking_assert (fn
);
4441 /* If FN is a declaration of a function in a nested scope that was
4442 globally declared inline, we don't set its DECL_INITIAL.
4443 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4444 C++ front-end uses it for cdtors to refer to their internal
4445 declarations, that are not real functions. Fortunately those
4446 don't have trees to be saved, so we can tell by checking their
4448 if (!DECL_INITIAL (fn
)
4449 && DECL_ABSTRACT_ORIGIN (fn
)
4450 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn
)))
4451 fn
= DECL_ABSTRACT_ORIGIN (fn
);
4453 /* Don't try to inline functions that are not well-suited to inlining. */
4454 if (cg_edge
->inline_failed
)
4456 reason
= cg_edge
->inline_failed
;
4457 /* If this call was originally indirect, we do not want to emit any
4458 inlining related warnings or sorry messages because there are no
4459 guarantees regarding those. */
4460 if (cg_edge
->indirect_inlining_edge
)
4463 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
))
4464 /* For extern inline functions that get redefined we always
4465 silently ignored always_inline flag. Better behavior would
4466 be to be able to keep both bodies and use extern inline body
4467 for inlining, but we can't do that because frontends overwrite
4469 && !cg_edge
->callee
->local
.redefined_extern_inline
4470 /* During early inline pass, report only when optimization is
4472 && (symtab
->global_info_ready
4474 || cgraph_inline_failed_type (reason
) == CIF_FINAL_ERROR
)
4475 /* PR 20090218-1_0.c. Body can be provided by another module. */
4476 && (reason
!= CIF_BODY_NOT_AVAILABLE
|| !flag_generate_lto
))
4478 error ("inlining failed in call to always_inline %q+F: %s", fn
,
4479 cgraph_inline_failed_string (reason
));
4480 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4481 inform (gimple_location (stmt
), "called from here");
4482 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4483 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4484 "called from this function");
4486 else if (warn_inline
4487 && DECL_DECLARED_INLINE_P (fn
)
4488 && !DECL_NO_INLINE_WARNING_P (fn
)
4489 && !DECL_IN_SYSTEM_HEADER (fn
)
4490 && reason
!= CIF_UNSPECIFIED
4491 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn
))
4492 /* Do not warn about not inlined recursive calls. */
4493 && !cg_edge
->recursive_p ()
4494 /* Avoid warnings during early inline pass. */
4495 && symtab
->global_info_ready
)
4497 auto_diagnostic_group d
;
4498 if (warning (OPT_Winline
, "inlining failed in call to %q+F: %s",
4499 fn
, _(cgraph_inline_failed_string (reason
))))
4501 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4502 inform (gimple_location (stmt
), "called from here");
4503 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4504 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4505 "called from this function");
4510 id
->src_node
= cg_edge
->callee
;
4512 /* If callee is thunk, all we need is to adjust the THIS pointer
4513 and redirect to function being thunked. */
4514 if (id
->src_node
->thunk
.thunk_p
)
4517 tree virtual_offset
= NULL
;
4518 profile_count count
= cg_edge
->count
;
4520 gimple_stmt_iterator iter
= gsi_for_stmt (stmt
);
4523 edge
= id
->src_node
->callees
->clone (id
->dst_node
, call_stmt
,
4525 profile_count::one (),
4526 profile_count::one (),
4528 edge
->count
= count
;
4529 if (id
->src_node
->thunk
.virtual_offset_p
)
4530 virtual_offset
= size_int (id
->src_node
->thunk
.virtual_value
);
4531 op
= create_tmp_reg_fn (cfun
, TREE_TYPE (gimple_call_arg (stmt
, 0)),
4533 gsi_insert_before (&iter
, gimple_build_assign (op
,
4534 gimple_call_arg (stmt
, 0)),
4536 gcc_assert (id
->src_node
->thunk
.this_adjusting
);
4537 op
= thunk_adjust (&iter
, op
, 1, id
->src_node
->thunk
.fixed_offset
,
4538 virtual_offset
, id
->src_node
->thunk
.indirect_offset
);
4540 gimple_call_set_arg (stmt
, 0, op
);
4541 gimple_call_set_fndecl (stmt
, edge
->callee
->decl
);
4543 id
->src_node
->remove ();
4544 expand_call_inline (bb
, stmt
, id
);
4545 maybe_remove_unused_call_args (cfun
, stmt
);
4548 fn
= cg_edge
->callee
->decl
;
4549 cg_edge
->callee
->get_untransformed_body ();
4551 if (flag_checking
&& cg_edge
->callee
->decl
!= id
->dst_node
->decl
)
4552 cg_edge
->callee
->verify ();
4554 /* We will be inlining this callee. */
4555 id
->eh_lp_nr
= lookup_stmt_eh_lp (stmt
);
4556 id
->assign_stmts
.create (0);
4558 /* Update the callers EH personality. */
4559 if (DECL_FUNCTION_PERSONALITY (fn
))
4560 DECL_FUNCTION_PERSONALITY (cg_edge
->caller
->decl
)
4561 = DECL_FUNCTION_PERSONALITY (fn
);
4563 /* Split the block before the GIMPLE_CALL. */
4564 stmt_gsi
= gsi_for_stmt (stmt
);
4565 gsi_prev (&stmt_gsi
);
4566 e
= split_block (bb
, gsi_end_p (stmt_gsi
) ? NULL
: gsi_stmt (stmt_gsi
));
4568 return_block
= e
->dest
;
4571 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4572 been the source of abnormal edges. In this case, schedule
4573 the removal of dead abnormal edges. */
4574 gsi
= gsi_start_bb (return_block
);
4576 purge_dead_abnormal_edges
= gsi_end_p (gsi
);
4578 stmt_gsi
= gsi_start_bb (return_block
);
4580 /* Build a block containing code to initialize the arguments, the
4581 actual inline expansion of the body, and a label for the return
4582 statements within the function to jump to. The type of the
4583 statement expression is the return type of the function call.
4584 ??? If the call does not have an associated block then we will
4585 remap all callee blocks to NULL, effectively dropping most of
4586 its debug information. This should only happen for calls to
4587 artificial decls inserted by the compiler itself. We need to
4588 either link the inlined blocks into the caller block tree or
4589 not refer to them in any way to not break GC for locations. */
4590 if (gimple_block (stmt
))
4592 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4593 to make inlined_function_outer_scope_p return true on this BLOCK. */
4594 location_t loc
= LOCATION_LOCUS (gimple_location (stmt
));
4595 if (loc
== UNKNOWN_LOCATION
)
4596 loc
= LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn
));
4597 if (loc
== UNKNOWN_LOCATION
)
4598 loc
= BUILTINS_LOCATION
;
4599 id
->block
= make_node (BLOCK
);
4600 BLOCK_ABSTRACT_ORIGIN (id
->block
) = DECL_ORIGIN (fn
);
4601 BLOCK_SOURCE_LOCATION (id
->block
) = loc
;
4602 prepend_lexical_block (gimple_block (stmt
), id
->block
);
4605 /* Local declarations will be replaced by their equivalents in this map. */
4607 id
->decl_map
= new hash_map
<tree
, tree
>;
4608 dst
= id
->debug_map
;
4609 id
->debug_map
= NULL
;
4610 if (flag_stack_reuse
!= SR_NONE
)
4611 id
->add_clobbers_to_eh_landing_pads
= last_basic_block_for_fn (cfun
);
4613 /* Record the function we are about to inline. */
4615 id
->src_cfun
= DECL_STRUCT_FUNCTION (fn
);
4616 id
->reset_location
= DECL_IGNORED_P (fn
);
4617 id
->call_stmt
= call_stmt
;
4619 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4620 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4621 dst_cfun
= DECL_STRUCT_FUNCTION (id
->dst_fn
);
4622 simtvars_save
= id
->dst_simt_vars
;
4623 if (!(dst_cfun
->curr_properties
& PROP_gimple_lomp_dev
)
4624 && (simduid
= bb
->loop_father
->simduid
) != NULL_TREE
4625 && (simduid
= ssa_default_def (dst_cfun
, simduid
)) != NULL_TREE
4626 && single_imm_use (simduid
, &use
, &simtenter_stmt
)
4627 && is_gimple_call (simtenter_stmt
)
4628 && gimple_call_internal_p (simtenter_stmt
, IFN_GOMP_SIMT_ENTER
))
4629 vec_alloc (id
->dst_simt_vars
, 0);
4631 id
->dst_simt_vars
= NULL
;
4633 if (profile_status_for_fn (id
->src_cfun
) == PROFILE_ABSENT
)
4634 profile_status_for_fn (dst_cfun
) = PROFILE_ABSENT
;
4636 /* If the src function contains an IFN_VA_ARG, then so will the dst
4637 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4638 prop_mask
= PROP_gimple_lva
| PROP_gimple_lomp_dev
;
4639 src_properties
= id
->src_cfun
->curr_properties
& prop_mask
;
4640 if (src_properties
!= prop_mask
)
4641 dst_cfun
->curr_properties
&= src_properties
| ~prop_mask
;
4643 gcc_assert (!id
->src_cfun
->after_inlining
);
4646 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn
)))
4648 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4649 gsi_insert_after (&si
, gimple_build_predict (PRED_COLD_FUNCTION
,
4653 initialize_inlined_parameters (id
, stmt
, fn
, bb
);
4654 if (debug_nonbind_markers_p
&& debug_inline_points
&& id
->block
4655 && inlined_function_outer_scope_p (id
->block
))
4657 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4658 gsi_insert_after (&si
, gimple_build_debug_inline_entry
4659 (id
->block
, DECL_SOURCE_LOCATION (id
->src_fn
)),
4663 if (DECL_INITIAL (fn
))
4665 if (gimple_block (stmt
))
4669 prepend_lexical_block (id
->block
,
4670 remap_blocks (DECL_INITIAL (fn
), id
));
4671 gcc_checking_assert (BLOCK_SUBBLOCKS (id
->block
)
4672 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id
->block
))
4674 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4675 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4676 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4677 under it. The parameters can be then evaluated in the debugger,
4678 but don't show in backtraces. */
4679 for (var
= &BLOCK_VARS (BLOCK_SUBBLOCKS (id
->block
)); *var
; )
4680 if (TREE_CODE (DECL_ORIGIN (*var
)) == PARM_DECL
)
4683 *var
= TREE_CHAIN (v
);
4684 TREE_CHAIN (v
) = BLOCK_VARS (id
->block
);
4685 BLOCK_VARS (id
->block
) = v
;
4688 var
= &TREE_CHAIN (*var
);
4691 remap_blocks_to_null (DECL_INITIAL (fn
), id
);
4694 /* Return statements in the function body will be replaced by jumps
4695 to the RET_LABEL. */
4696 gcc_assert (DECL_INITIAL (fn
));
4697 gcc_assert (TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
);
4699 /* Find the LHS to which the result of this call is assigned. */
4701 if (gimple_call_lhs (stmt
))
4703 modify_dest
= gimple_call_lhs (stmt
);
4705 /* The function which we are inlining might not return a value,
4706 in which case we should issue a warning that the function
4707 does not return a value. In that case the optimizers will
4708 see that the variable to which the value is assigned was not
4709 initialized. We do not want to issue a warning about that
4710 uninitialized variable. */
4711 if (DECL_P (modify_dest
))
4712 TREE_NO_WARNING (modify_dest
) = 1;
4714 if (gimple_call_return_slot_opt_p (call_stmt
))
4716 return_slot
= modify_dest
;
4723 /* If we are inlining a call to the C++ operator new, we don't want
4724 to use type based alias analysis on the return value. Otherwise
4725 we may get confused if the compiler sees that the inlined new
4726 function returns a pointer which was just deleted. See bug
4728 if (DECL_IS_OPERATOR_NEW (fn
))
4734 /* Declare the return variable for the function. */
4735 use_retvar
= declare_return_variable (id
, return_slot
, modify_dest
, bb
);
4737 /* Add local vars in this inlined callee to caller. */
4738 add_local_variables (id
->src_cfun
, cfun
, id
);
4740 if (dump_enabled_p ())
4743 snprintf (buf
, sizeof(buf
), "%4.2f",
4744 cg_edge
->sreal_frequency ().to_double ());
4745 dump_printf_loc (MSG_NOTE
| MSG_PRIORITY_INTERNALS
,
4747 "Inlining %C to %C with frequency %s\n",
4748 id
->src_node
, id
->dst_node
, buf
);
4749 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4751 id
->src_node
->dump (dump_file
);
4752 id
->dst_node
->dump (dump_file
);
4756 /* This is it. Duplicate the callee body. Assume callee is
4757 pre-gimplified. Note that we must not alter the caller
4758 function in any way before this point, as this CALL_EXPR may be
4759 a self-referential call; if we're calling ourselves, we need to
4760 duplicate our body before altering anything. */
4761 copy_body (id
, bb
, return_block
, NULL
);
4763 reset_debug_bindings (id
, stmt_gsi
);
4765 if (flag_stack_reuse
!= SR_NONE
)
4766 for (tree p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
4767 if (!TREE_THIS_VOLATILE (p
))
4769 tree
*varp
= id
->decl_map
->get (p
);
4770 if (varp
&& VAR_P (*varp
) && !is_gimple_reg (*varp
))
4772 tree clobber
= build_constructor (TREE_TYPE (*varp
), NULL
);
4773 gimple
*clobber_stmt
;
4774 TREE_THIS_VOLATILE (clobber
) = 1;
4775 clobber_stmt
= gimple_build_assign (*varp
, clobber
);
4776 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
4777 gsi_insert_before (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
4781 /* Reset the escaped solution. */
4782 if (cfun
->gimple_df
)
4783 pt_solution_reset (&cfun
->gimple_df
->escaped
);
4785 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4786 if (id
->dst_simt_vars
&& id
->dst_simt_vars
->length () > 0)
4788 size_t nargs
= gimple_call_num_args (simtenter_stmt
);
4789 vec
<tree
> *vars
= id
->dst_simt_vars
;
4790 auto_vec
<tree
> newargs (nargs
+ vars
->length ());
4791 for (size_t i
= 0; i
< nargs
; i
++)
4792 newargs
.quick_push (gimple_call_arg (simtenter_stmt
, i
));
4793 for (tree
*pvar
= vars
->begin (); pvar
!= vars
->end (); pvar
++)
4795 tree ptrtype
= build_pointer_type (TREE_TYPE (*pvar
));
4796 newargs
.quick_push (build1 (ADDR_EXPR
, ptrtype
, *pvar
));
4798 gcall
*g
= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, newargs
);
4799 gimple_call_set_lhs (g
, gimple_call_lhs (simtenter_stmt
));
4800 gimple_stmt_iterator gsi
= gsi_for_stmt (simtenter_stmt
);
4801 gsi_replace (&gsi
, g
, false);
4803 vec_free (id
->dst_simt_vars
);
4804 id
->dst_simt_vars
= simtvars_save
;
4809 delete id
->debug_map
;
4810 id
->debug_map
= dst
;
4812 delete id
->decl_map
;
4815 /* Unlink the calls virtual operands before replacing it. */
4816 unlink_stmt_vdef (stmt
);
4817 if (gimple_vdef (stmt
)
4818 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
4819 release_ssa_name (gimple_vdef (stmt
));
4821 /* If the inlined function returns a result that we care about,
4822 substitute the GIMPLE_CALL with an assignment of the return
4823 variable to the LHS of the call. That is, if STMT was
4824 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4825 if (use_retvar
&& gimple_call_lhs (stmt
))
4827 gimple
*old_stmt
= stmt
;
4828 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), use_retvar
);
4829 gimple_set_location (stmt
, gimple_location (old_stmt
));
4830 gsi_replace (&stmt_gsi
, stmt
, false);
4831 maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
);
4832 /* Append a clobber for id->retvar if easily possible. */
4833 if (flag_stack_reuse
!= SR_NONE
4835 && VAR_P (id
->retvar
)
4836 && id
->retvar
!= return_slot
4837 && id
->retvar
!= modify_dest
4838 && !TREE_THIS_VOLATILE (id
->retvar
)
4839 && !is_gimple_reg (id
->retvar
)
4840 && !stmt_ends_bb_p (stmt
))
4842 tree clobber
= build_constructor (TREE_TYPE (id
->retvar
), NULL
);
4843 gimple
*clobber_stmt
;
4844 TREE_THIS_VOLATILE (clobber
) = 1;
4845 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
4846 gimple_set_location (clobber_stmt
, gimple_location (old_stmt
));
4847 gsi_insert_after (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
4852 /* Handle the case of inlining a function with no return
4853 statement, which causes the return value to become undefined. */
4854 if (gimple_call_lhs (stmt
)
4855 && TREE_CODE (gimple_call_lhs (stmt
)) == SSA_NAME
)
4857 tree name
= gimple_call_lhs (stmt
);
4858 tree var
= SSA_NAME_VAR (name
);
4859 tree def
= var
? ssa_default_def (cfun
, var
) : NULL
;
4863 /* If the variable is used undefined, make this name
4864 undefined via a move. */
4865 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), def
);
4866 gsi_replace (&stmt_gsi
, stmt
, true);
4872 var
= create_tmp_reg_fn (cfun
, TREE_TYPE (name
), NULL
);
4873 SET_SSA_NAME_VAR_OR_IDENTIFIER (name
, var
);
4875 /* Otherwise make this variable undefined. */
4876 gsi_remove (&stmt_gsi
, true);
4877 set_ssa_default_def (cfun
, var
, name
);
4878 SSA_NAME_DEF_STMT (name
) = gimple_build_nop ();
4881 /* Replace with a clobber for id->retvar. */
4882 else if (flag_stack_reuse
!= SR_NONE
4884 && VAR_P (id
->retvar
)
4885 && id
->retvar
!= return_slot
4886 && id
->retvar
!= modify_dest
4887 && !TREE_THIS_VOLATILE (id
->retvar
)
4888 && !is_gimple_reg (id
->retvar
))
4890 tree clobber
= build_constructor (TREE_TYPE (id
->retvar
), NULL
);
4891 gimple
*clobber_stmt
;
4892 TREE_THIS_VOLATILE (clobber
) = 1;
4893 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
4894 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
4895 gsi_replace (&stmt_gsi
, clobber_stmt
, false);
4896 maybe_clean_or_replace_eh_stmt (stmt
, clobber_stmt
);
4899 gsi_remove (&stmt_gsi
, true);
4902 if (purge_dead_abnormal_edges
)
4904 gimple_purge_dead_eh_edges (return_block
);
4905 gimple_purge_dead_abnormal_call_edges (return_block
);
4908 /* If the value of the new expression is ignored, that's OK. We
4909 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4910 the equivalent inlined version either. */
4911 if (is_gimple_assign (stmt
))
4913 gcc_assert (gimple_assign_single_p (stmt
)
4914 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)));
4915 TREE_USED (gimple_assign_rhs1 (stmt
)) = 1;
4918 id
->assign_stmts
.release ();
4919 id
->add_clobbers_to_eh_landing_pads
= 0;
4921 /* Output the inlining info for this abstract function, since it has been
4922 inlined. If we don't do this now, we can lose the information about the
4923 variables in the function when the blocks get blown away as soon as we
4924 remove the cgraph node. */
4925 if (gimple_block (stmt
))
4926 (*debug_hooks
->outlining_inline_function
) (fn
);
4928 /* Update callgraph if needed. */
4929 cg_edge
->callee
->remove ();
4931 id
->block
= NULL_TREE
;
4932 id
->retvar
= NULL_TREE
;
4933 successfully_inlined
= true;
4936 input_location
= saved_location
;
4937 return successfully_inlined
;
4940 /* Expand call statements reachable from STMT_P.
4941 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4942 in a MODIFY_EXPR. */
4945 gimple_expand_calls_inline (basic_block bb
, copy_body_data
*id
)
4947 gimple_stmt_iterator gsi
;
4948 bool inlined
= false;
4950 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);)
4952 gimple
*stmt
= gsi_stmt (gsi
);
4955 if (is_gimple_call (stmt
)
4956 && !gimple_call_internal_p (stmt
))
4957 inlined
|= expand_call_inline (bb
, stmt
, id
);
4964 /* Walk all basic blocks created after FIRST and try to fold every statement
4965 in the STATEMENTS pointer set. */
4968 fold_marked_statements (int first
, hash_set
<gimple
*> *statements
)
4970 for (; first
< last_basic_block_for_fn (cfun
); first
++)
4971 if (BASIC_BLOCK_FOR_FN (cfun
, first
))
4973 gimple_stmt_iterator gsi
;
4975 for (gsi
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
4978 if (statements
->contains (gsi_stmt (gsi
)))
4980 gimple
*old_stmt
= gsi_stmt (gsi
);
4981 tree old_decl
= is_gimple_call (old_stmt
) ? gimple_call_fndecl (old_stmt
) : 0;
4983 if (old_decl
&& fndecl_built_in_p (old_decl
))
4985 /* Folding builtins can create multiple instructions,
4986 we need to look at all of them. */
4987 gimple_stmt_iterator i2
= gsi
;
4989 if (fold_stmt (&gsi
))
4992 /* If a builtin at the end of a bb folded into nothing,
4993 the following loop won't work. */
4994 if (gsi_end_p (gsi
))
4996 cgraph_update_edges_for_call_stmt (old_stmt
,
5001 i2
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
5006 new_stmt
= gsi_stmt (i2
);
5007 update_stmt (new_stmt
);
5008 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
5011 if (new_stmt
== gsi_stmt (gsi
))
5013 /* It is okay to check only for the very last
5014 of these statements. If it is a throwing
5015 statement nothing will change. If it isn't
5016 this can remove EH edges. If that weren't
5017 correct then because some intermediate stmts
5018 throw, but not the last one. That would mean
5019 we'd have to split the block, which we can't
5020 here and we'd loose anyway. And as builtins
5021 probably never throw, this all
5023 if (maybe_clean_or_replace_eh_stmt (old_stmt
,
5025 gimple_purge_dead_eh_edges (
5026 BASIC_BLOCK_FOR_FN (cfun
, first
));
5033 else if (fold_stmt (&gsi
))
5035 /* Re-read the statement from GSI as fold_stmt() may
5037 gimple
*new_stmt
= gsi_stmt (gsi
);
5038 update_stmt (new_stmt
);
5040 if (is_gimple_call (old_stmt
)
5041 || is_gimple_call (new_stmt
))
5042 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
5045 if (maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
))
5046 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun
,
5053 /* Expand calls to inline functions in the body of FN. */
5056 optimize_inline_calls (tree fn
)
5060 int last
= n_basic_blocks_for_fn (cfun
);
5061 bool inlined_p
= false;
5064 memset (&id
, 0, sizeof (id
));
5066 id
.src_node
= id
.dst_node
= cgraph_node::get (fn
);
5067 gcc_assert (id
.dst_node
->definition
);
5069 /* Or any functions that aren't finished yet. */
5070 if (current_function_decl
)
5071 id
.dst_fn
= current_function_decl
;
5073 id
.copy_decl
= copy_decl_maybe_to_var
;
5074 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5075 id
.transform_new_cfg
= false;
5076 id
.transform_return_to_modify
= true;
5077 id
.transform_parameter
= true;
5078 id
.transform_lang_insert_block
= NULL
;
5079 id
.statements_to_fold
= new hash_set
<gimple
*>;
5081 push_gimplify_context ();
5083 /* We make no attempts to keep dominance info up-to-date. */
5084 free_dominance_info (CDI_DOMINATORS
);
5085 free_dominance_info (CDI_POST_DOMINATORS
);
5087 /* Register specific gimple functions. */
5088 gimple_register_cfg_hooks ();
5090 /* Reach the trees by walking over the CFG, and note the
5091 enclosing basic-blocks in the call edges. */
5092 /* We walk the blocks going forward, because inlined function bodies
5093 will split id->current_basic_block, and the new blocks will
5094 follow it; we'll trudge through them, processing their CALL_EXPRs
5096 FOR_EACH_BB_FN (bb
, cfun
)
5097 inlined_p
|= gimple_expand_calls_inline (bb
, &id
);
5099 pop_gimplify_context (NULL
);
5103 struct cgraph_edge
*e
;
5105 id
.dst_node
->verify ();
5107 /* Double check that we inlined everything we are supposed to inline. */
5108 for (e
= id
.dst_node
->callees
; e
; e
= e
->next_callee
)
5109 gcc_assert (e
->inline_failed
);
5112 /* Fold queued statements. */
5113 update_max_bb_count ();
5114 fold_marked_statements (last
, id
.statements_to_fold
);
5115 delete id
.statements_to_fold
;
5117 gcc_assert (!id
.debug_stmts
.exists ());
5119 /* If we didn't inline into the function there is nothing to do. */
5123 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5126 delete_unreachable_blocks_update_callgraph (&id
);
5128 id
.dst_node
->verify ();
5130 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5131 not possible yet - the IPA passes might make various functions to not
5132 throw and they don't care to proactively update local EH info. This is
5133 done later in fixup_cfg pass that also execute the verification. */
5134 return (TODO_update_ssa
5136 | (gimple_in_ssa_p (cfun
) ? TODO_remove_unused_locals
: 0)
5137 | (gimple_in_ssa_p (cfun
) ? TODO_update_address_taken
: 0)
5138 | (profile_status_for_fn (cfun
) != PROFILE_ABSENT
5139 ? TODO_rebuild_frequencies
: 0));
5142 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5145 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
5147 enum tree_code code
= TREE_CODE (*tp
);
5148 enum tree_code_class cl
= TREE_CODE_CLASS (code
);
5150 /* We make copies of most nodes. */
5151 if (IS_EXPR_CODE_CLASS (cl
)
5152 || code
== TREE_LIST
5154 || code
== TYPE_DECL
5155 || code
== OMP_CLAUSE
)
5157 /* Because the chain gets clobbered when we make a copy, we save it
5159 tree chain
= NULL_TREE
, new_tree
;
5161 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
5162 chain
= TREE_CHAIN (*tp
);
5164 /* Copy the node. */
5165 new_tree
= copy_node (*tp
);
5169 /* Now, restore the chain, if appropriate. That will cause
5170 walk_tree to walk into the chain as well. */
5171 if (code
== PARM_DECL
5172 || code
== TREE_LIST
5173 || code
== OMP_CLAUSE
)
5174 TREE_CHAIN (*tp
) = chain
;
5176 /* For now, we don't update BLOCKs when we make copies. So, we
5177 have to nullify all BIND_EXPRs. */
5178 if (TREE_CODE (*tp
) == BIND_EXPR
)
5179 BIND_EXPR_BLOCK (*tp
) = NULL_TREE
;
5181 else if (code
== CONSTRUCTOR
)
5183 /* CONSTRUCTOR nodes need special handling because
5184 we need to duplicate the vector of elements. */
5187 new_tree
= copy_node (*tp
);
5188 CONSTRUCTOR_ELTS (new_tree
) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp
));
5191 else if (code
== STATEMENT_LIST
)
5192 /* We used to just abort on STATEMENT_LIST, but we can run into them
5193 with statement-expressions (c++/40975). */
5194 copy_statement_list (tp
);
5195 else if (TREE_CODE_CLASS (code
) == tcc_type
)
5197 else if (TREE_CODE_CLASS (code
) == tcc_declaration
)
5199 else if (TREE_CODE_CLASS (code
) == tcc_constant
)
5204 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5205 information indicating to what new SAVE_EXPR this one should be mapped,
5206 use that one. Otherwise, create a new node and enter it in ST. FN is
5207 the function into which the copy will be placed. */
5210 remap_save_expr (tree
*tp
, hash_map
<tree
, tree
> *st
, int *walk_subtrees
)
5215 /* See if we already encountered this SAVE_EXPR. */
5218 /* If we didn't already remap this SAVE_EXPR, do so now. */
5221 t
= copy_node (*tp
);
5223 /* Remember this SAVE_EXPR. */
5225 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5230 /* We've already walked into this SAVE_EXPR; don't do it again. */
5235 /* Replace this SAVE_EXPR with the copy. */
5239 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5240 label, copies the declaration and enters it in the splay_tree in DATA (which
5241 is really a 'copy_body_data *'. */
5244 mark_local_labels_stmt (gimple_stmt_iterator
*gsip
,
5245 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5246 struct walk_stmt_info
*wi
)
5248 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5249 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsip
));
5253 tree decl
= gimple_label_label (stmt
);
5255 /* Copy the decl and remember the copy. */
5256 insert_decl_map (id
, decl
, id
->copy_decl (decl
, id
));
5262 static gimple_seq
duplicate_remap_omp_clause_seq (gimple_seq seq
,
5263 struct walk_stmt_info
*wi
);
5265 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5266 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5267 remaps all local declarations to appropriate replacements in gimple
5271 replace_locals_op (tree
*tp
, int *walk_subtrees
, void *data
)
5273 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5274 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5275 hash_map
<tree
, tree
> *st
= id
->decl_map
;
5279 /* For recursive invocations this is no longer the LHS itself. */
5280 bool is_lhs
= wi
->is_lhs
;
5283 if (TREE_CODE (expr
) == SSA_NAME
)
5285 *tp
= remap_ssa_name (*tp
, id
);
5288 SSA_NAME_DEF_STMT (*tp
) = gsi_stmt (wi
->gsi
);
5290 /* Only a local declaration (variable or label). */
5291 else if ((VAR_P (expr
) && !TREE_STATIC (expr
))
5292 || TREE_CODE (expr
) == LABEL_DECL
)
5294 /* Lookup the declaration. */
5297 /* If it's there, remap it. */
5302 else if (TREE_CODE (expr
) == STATEMENT_LIST
5303 || TREE_CODE (expr
) == BIND_EXPR
5304 || TREE_CODE (expr
) == SAVE_EXPR
)
5306 else if (TREE_CODE (expr
) == TARGET_EXPR
)
5308 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5309 It's OK for this to happen if it was part of a subtree that
5310 isn't immediately expanded, such as operand 2 of another
5312 if (!TREE_OPERAND (expr
, 1))
5314 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
5315 TREE_OPERAND (expr
, 3) = NULL_TREE
;
5318 else if (TREE_CODE (expr
) == OMP_CLAUSE
)
5320 /* Before the omplower pass completes, some OMP clauses can contain
5321 sequences that are neither copied by gimple_seq_copy nor walked by
5322 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5323 in those situations, we have to copy and process them explicitely. */
5325 if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LASTPRIVATE
)
5327 gimple_seq seq
= OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
);
5328 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5329 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
) = seq
;
5331 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LINEAR
)
5333 gimple_seq seq
= OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
);
5334 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5335 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
) = seq
;
5337 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_REDUCTION
)
5339 gimple_seq seq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
);
5340 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5341 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
) = seq
;
5342 seq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
);
5343 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5344 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
) = seq
;
5348 /* Keep iterating. */
5353 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5354 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5355 remaps all local declarations to appropriate replacements in gimple
5359 replace_locals_stmt (gimple_stmt_iterator
*gsip
,
5360 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5361 struct walk_stmt_info
*wi
)
5363 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5364 gimple
*gs
= gsi_stmt (*gsip
);
5366 if (gbind
*stmt
= dyn_cast
<gbind
*> (gs
))
5368 tree block
= gimple_bind_block (stmt
);
5372 remap_block (&block
, id
);
5373 gimple_bind_set_block (stmt
, block
);
5376 /* This will remap a lot of the same decls again, but this should be
5378 if (gimple_bind_vars (stmt
))
5380 tree old_var
, decls
= gimple_bind_vars (stmt
);
5382 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
5383 if (!can_be_nonlocal (old_var
, id
)
5384 && ! variably_modified_type_p (TREE_TYPE (old_var
), id
->src_fn
))
5385 remap_decl (old_var
, id
);
5387 gcc_checking_assert (!id
->prevent_decl_creation_for_types
);
5388 id
->prevent_decl_creation_for_types
= true;
5389 gimple_bind_set_vars (stmt
, remap_decls (decls
, NULL
, id
));
5390 id
->prevent_decl_creation_for_types
= false;
5394 /* Keep iterating. */
5398 /* Create a copy of SEQ and remap all decls in it. */
5401 duplicate_remap_omp_clause_seq (gimple_seq seq
, struct walk_stmt_info
*wi
)
5406 /* If there are any labels in OMP sequences, they can be only referred to in
5407 the sequence itself and therefore we can do both here. */
5408 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, wi
);
5409 gimple_seq copy
= gimple_seq_copy (seq
);
5410 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, wi
);
5414 /* Copies everything in SEQ and replaces variables and labels local to
5415 current_function_decl. */
5418 copy_gimple_seq_and_replace_locals (gimple_seq seq
)
5421 struct walk_stmt_info wi
;
5424 /* There's nothing to do for NULL_TREE. */
5429 memset (&id
, 0, sizeof (id
));
5430 id
.src_fn
= current_function_decl
;
5431 id
.dst_fn
= current_function_decl
;
5433 id
.decl_map
= new hash_map
<tree
, tree
>;
5434 id
.debug_map
= NULL
;
5436 id
.copy_decl
= copy_decl_no_change
;
5437 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5438 id
.transform_new_cfg
= false;
5439 id
.transform_return_to_modify
= false;
5440 id
.transform_parameter
= false;
5441 id
.transform_lang_insert_block
= NULL
;
5443 /* Walk the tree once to find local labels. */
5444 memset (&wi
, 0, sizeof (wi
));
5445 hash_set
<tree
> visited
;
5448 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, &wi
);
5450 copy
= gimple_seq_copy (seq
);
5452 /* Walk the copy, remapping decls. */
5453 memset (&wi
, 0, sizeof (wi
));
5455 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, &wi
);
5460 delete id
.debug_map
;
5461 if (id
.dependence_map
)
5463 delete id
.dependence_map
;
5464 id
.dependence_map
= NULL
;
5471 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5474 debug_find_tree_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
, void *data
)
5483 debug_find_tree (tree top
, tree search
)
5485 return walk_tree_without_duplicates (&top
, debug_find_tree_1
, search
) != 0;
5489 /* Declare the variables created by the inliner. Add all the variables in
5490 VARS to BIND_EXPR. */
5493 declare_inline_vars (tree block
, tree vars
)
5496 for (t
= vars
; t
; t
= DECL_CHAIN (t
))
5498 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
5499 gcc_assert (!TREE_STATIC (t
) && !TREE_ASM_WRITTEN (t
));
5500 add_local_decl (cfun
, t
);
5504 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), vars
);
5507 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5508 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5509 VAR_DECL translation. */
5512 copy_decl_for_dup_finish (copy_body_data
*id
, tree decl
, tree copy
)
5514 /* Don't generate debug information for the copy if we wouldn't have
5515 generated it for the copy either. */
5516 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (decl
);
5517 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (decl
);
5519 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5520 declaration inspired this copy. */
5521 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
5523 /* The new variable/label has no RTL, yet. */
5524 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy
), TS_DECL_WRTL
)
5525 && !TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
5526 SET_DECL_RTL (copy
, 0);
5527 /* For vector typed decls make sure to update DECL_MODE according
5528 to the new function context. */
5529 if (VECTOR_TYPE_P (TREE_TYPE (copy
)))
5530 SET_DECL_MODE (copy
, TYPE_MODE (TREE_TYPE (copy
)));
5532 /* These args would always appear unused, if not for this. */
5533 TREE_USED (copy
) = 1;
5535 /* Set the context for the new declaration. */
5536 if (!DECL_CONTEXT (decl
))
5537 /* Globals stay global. */
5539 else if (DECL_CONTEXT (decl
) != id
->src_fn
)
5540 /* Things that weren't in the scope of the function we're inlining
5541 from aren't in the scope we're inlining to, either. */
5543 else if (TREE_STATIC (decl
))
5544 /* Function-scoped static variables should stay in the original
5549 /* Ordinary automatic local variables are now in the scope of the
5551 DECL_CONTEXT (copy
) = id
->dst_fn
;
5552 if (VAR_P (copy
) && id
->dst_simt_vars
&& !is_gimple_reg (copy
))
5554 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy
)))
5555 DECL_ATTRIBUTES (copy
)
5556 = tree_cons (get_identifier ("omp simt private"), NULL
,
5557 DECL_ATTRIBUTES (copy
));
5558 id
->dst_simt_vars
->safe_push (copy
);
5566 copy_decl_to_var (tree decl
, copy_body_data
*id
)
5570 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5571 || TREE_CODE (decl
) == RESULT_DECL
);
5573 type
= TREE_TYPE (decl
);
5575 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5576 VAR_DECL
, DECL_NAME (decl
), type
);
5577 if (DECL_PT_UID_SET_P (decl
))
5578 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5579 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5580 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5581 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5582 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5584 return copy_decl_for_dup_finish (id
, decl
, copy
);
5587 /* Like copy_decl_to_var, but create a return slot object instead of a
5588 pointer variable for return by invisible reference. */
5591 copy_result_decl_to_var (tree decl
, copy_body_data
*id
)
5595 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5596 || TREE_CODE (decl
) == RESULT_DECL
);
5598 type
= TREE_TYPE (decl
);
5599 if (DECL_BY_REFERENCE (decl
))
5600 type
= TREE_TYPE (type
);
5602 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5603 VAR_DECL
, DECL_NAME (decl
), type
);
5604 if (DECL_PT_UID_SET_P (decl
))
5605 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5606 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5607 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5608 if (!DECL_BY_REFERENCE (decl
))
5610 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5611 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5614 return copy_decl_for_dup_finish (id
, decl
, copy
);
5618 copy_decl_no_change (tree decl
, copy_body_data
*id
)
5622 copy
= copy_node (decl
);
5624 /* The COPY is not abstract; it will be generated in DST_FN. */
5625 DECL_ABSTRACT_P (copy
) = false;
5626 lang_hooks
.dup_lang_specific_decl (copy
);
5628 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5629 been taken; it's for internal bookkeeping in expand_goto_internal. */
5630 if (TREE_CODE (copy
) == LABEL_DECL
)
5632 TREE_ADDRESSABLE (copy
) = 0;
5633 LABEL_DECL_UID (copy
) = -1;
5636 return copy_decl_for_dup_finish (id
, decl
, copy
);
5640 copy_decl_maybe_to_var (tree decl
, copy_body_data
*id
)
5642 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
5643 return copy_decl_to_var (decl
, id
);
5645 return copy_decl_no_change (decl
, id
);
5648 /* Return a copy of the function's argument tree. */
5650 copy_arguments_for_versioning (tree orig_parm
, copy_body_data
* id
,
5651 bitmap args_to_skip
, tree
*vars
)
5654 tree new_parm
= NULL
;
5659 for (arg
= orig_parm
; arg
; arg
= DECL_CHAIN (arg
), i
++)
5660 if (!args_to_skip
|| !bitmap_bit_p (args_to_skip
, i
))
5662 tree new_tree
= remap_decl (arg
, id
);
5663 if (TREE_CODE (new_tree
) != PARM_DECL
)
5664 new_tree
= id
->copy_decl (arg
, id
);
5665 lang_hooks
.dup_lang_specific_decl (new_tree
);
5667 parg
= &DECL_CHAIN (new_tree
);
5669 else if (!id
->decl_map
->get (arg
))
5671 /* Make an equivalent VAR_DECL. If the argument was used
5672 as temporary variable later in function, the uses will be
5673 replaced by local variable. */
5674 tree var
= copy_decl_to_var (arg
, id
);
5675 insert_decl_map (id
, arg
, var
);
5676 /* Declare this new variable. */
5677 DECL_CHAIN (var
) = *vars
;
5683 /* Return a copy of the function's static chain. */
5685 copy_static_chain (tree static_chain
, copy_body_data
* id
)
5687 tree
*chain_copy
, *pvar
;
5689 chain_copy
= &static_chain
;
5690 for (pvar
= chain_copy
; *pvar
; pvar
= &DECL_CHAIN (*pvar
))
5692 tree new_tree
= remap_decl (*pvar
, id
);
5693 lang_hooks
.dup_lang_specific_decl (new_tree
);
5694 DECL_CHAIN (new_tree
) = DECL_CHAIN (*pvar
);
5697 return static_chain
;
5700 /* Return true if the function is allowed to be versioned.
5701 This is a guard for the versioning functionality. */
5704 tree_versionable_function_p (tree fndecl
)
5706 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl
))
5707 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl
)) == NULL
);
5710 /* Delete all unreachable basic blocks and update callgraph.
5711 Doing so is somewhat nontrivial because we need to update all clones and
5712 remove inline function that become unreachable. */
5715 delete_unreachable_blocks_update_callgraph (copy_body_data
*id
)
5717 bool changed
= false;
5718 basic_block b
, next_bb
;
5720 find_unreachable_blocks ();
5722 /* Delete all unreachable basic blocks. */
5724 for (b
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; b
5725 != EXIT_BLOCK_PTR_FOR_FN (cfun
); b
= next_bb
)
5727 next_bb
= b
->next_bb
;
5729 if (!(b
->flags
& BB_REACHABLE
))
5731 gimple_stmt_iterator bsi
;
5733 for (bsi
= gsi_start_bb (b
); !gsi_end_p (bsi
); gsi_next (&bsi
))
5735 struct cgraph_edge
*e
;
5736 struct cgraph_node
*node
;
5738 id
->dst_node
->remove_stmt_references (gsi_stmt (bsi
));
5740 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5741 &&(e
= id
->dst_node
->get_edge (gsi_stmt (bsi
))) != NULL
)
5743 if (!e
->inline_failed
)
5744 e
->callee
->remove_symbol_and_inline_clones (id
->dst_node
);
5748 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
5749 && id
->dst_node
->clones
)
5750 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5752 node
->remove_stmt_references (gsi_stmt (bsi
));
5753 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5754 && (e
= node
->get_edge (gsi_stmt (bsi
))) != NULL
)
5756 if (!e
->inline_failed
)
5757 e
->callee
->remove_symbol_and_inline_clones (id
->dst_node
);
5763 node
= node
->clones
;
5764 else if (node
->next_sibling_clone
)
5765 node
= node
->next_sibling_clone
;
5768 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5769 node
= node
->clone_of
;
5770 if (node
!= id
->dst_node
)
5771 node
= node
->next_sibling_clone
;
5775 delete_basic_block (b
);
5783 /* Update clone info after duplication. */
5786 update_clone_info (copy_body_data
* id
)
5788 struct cgraph_node
*node
;
5789 if (!id
->dst_node
->clones
)
5791 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5793 /* First update replace maps to match the new body. */
5794 if (node
->clone
.tree_map
)
5797 for (i
= 0; i
< vec_safe_length (node
->clone
.tree_map
); i
++)
5799 struct ipa_replace_map
*replace_info
;
5800 replace_info
= (*node
->clone
.tree_map
)[i
];
5801 walk_tree (&replace_info
->old_tree
, copy_tree_body_r
, id
, NULL
);
5802 walk_tree (&replace_info
->new_tree
, copy_tree_body_r
, id
, NULL
);
5806 node
= node
->clones
;
5807 else if (node
->next_sibling_clone
)
5808 node
= node
->next_sibling_clone
;
5811 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5812 node
= node
->clone_of
;
5813 if (node
!= id
->dst_node
)
5814 node
= node
->next_sibling_clone
;
5819 /* Create a copy of a function's tree.
5820 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5821 of the original function and the new copied function
5822 respectively. In case we want to replace a DECL
5823 tree with another tree while duplicating the function's
5824 body, TREE_MAP represents the mapping between these
5825 trees. If UPDATE_CLONES is set, the call_stmt fields
5826 of edges of clones of the function will be updated.
5828 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5830 If SKIP_RETURN is true, the new version will return void.
5831 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5832 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5835 tree_function_versioning (tree old_decl
, tree new_decl
,
5836 vec
<ipa_replace_map
*, va_gc
> *tree_map
,
5837 bool update_clones
, bitmap args_to_skip
,
5838 bool skip_return
, bitmap blocks_to_copy
,
5839 basic_block new_entry
)
5841 struct cgraph_node
*old_version_node
;
5842 struct cgraph_node
*new_version_node
;
5846 struct ipa_replace_map
*replace_info
;
5847 basic_block old_entry_block
, bb
;
5848 auto_vec
<gimple
*, 10> init_stmts
;
5849 tree vars
= NULL_TREE
;
5850 bitmap debug_args_to_skip
= args_to_skip
;
5852 gcc_assert (TREE_CODE (old_decl
) == FUNCTION_DECL
5853 && TREE_CODE (new_decl
) == FUNCTION_DECL
);
5854 DECL_POSSIBLY_INLINED (old_decl
) = 1;
5856 old_version_node
= cgraph_node::get (old_decl
);
5857 gcc_checking_assert (old_version_node
);
5858 new_version_node
= cgraph_node::get (new_decl
);
5859 gcc_checking_assert (new_version_node
);
5861 /* Copy over debug args. */
5862 if (DECL_HAS_DEBUG_ARGS_P (old_decl
))
5864 vec
<tree
, va_gc
> **new_debug_args
, **old_debug_args
;
5865 gcc_checking_assert (decl_debug_args_lookup (new_decl
) == NULL
);
5866 DECL_HAS_DEBUG_ARGS_P (new_decl
) = 0;
5867 old_debug_args
= decl_debug_args_lookup (old_decl
);
5870 new_debug_args
= decl_debug_args_insert (new_decl
);
5871 *new_debug_args
= vec_safe_copy (*old_debug_args
);
5875 /* Output the inlining info for this abstract function, since it has been
5876 inlined. If we don't do this now, we can lose the information about the
5877 variables in the function when the blocks get blown away as soon as we
5878 remove the cgraph node. */
5879 (*debug_hooks
->outlining_inline_function
) (old_decl
);
5881 DECL_ARTIFICIAL (new_decl
) = 1;
5882 DECL_ABSTRACT_ORIGIN (new_decl
) = DECL_ORIGIN (old_decl
);
5883 if (DECL_ORIGIN (old_decl
) == old_decl
)
5884 old_version_node
->used_as_abstract_origin
= true;
5885 DECL_FUNCTION_PERSONALITY (new_decl
) = DECL_FUNCTION_PERSONALITY (old_decl
);
5887 /* Prepare the data structures for the tree copy. */
5888 memset (&id
, 0, sizeof (id
));
5890 /* Generate a new name for the new version. */
5891 id
.statements_to_fold
= new hash_set
<gimple
*>;
5893 id
.decl_map
= new hash_map
<tree
, tree
>;
5894 id
.debug_map
= NULL
;
5895 id
.src_fn
= old_decl
;
5896 id
.dst_fn
= new_decl
;
5897 id
.src_node
= old_version_node
;
5898 id
.dst_node
= new_version_node
;
5899 id
.src_cfun
= DECL_STRUCT_FUNCTION (old_decl
);
5900 id
.blocks_to_copy
= blocks_to_copy
;
5902 id
.copy_decl
= copy_decl_no_change
;
5903 id
.transform_call_graph_edges
5904 = update_clones
? CB_CGE_MOVE_CLONES
: CB_CGE_MOVE
;
5905 id
.transform_new_cfg
= true;
5906 id
.transform_return_to_modify
= false;
5907 id
.transform_parameter
= false;
5908 id
.transform_lang_insert_block
= NULL
;
5910 old_entry_block
= ENTRY_BLOCK_PTR_FOR_FN
5911 (DECL_STRUCT_FUNCTION (old_decl
));
5912 DECL_RESULT (new_decl
) = DECL_RESULT (old_decl
);
5913 DECL_ARGUMENTS (new_decl
) = DECL_ARGUMENTS (old_decl
);
5914 initialize_cfun (new_decl
, old_decl
,
5915 new_entry
? new_entry
->count
: old_entry_block
->count
);
5916 if (DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
)
5917 DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
->ipa_pta
5918 = id
.src_cfun
->gimple_df
->ipa_pta
;
5920 /* Copy the function's static chain. */
5921 p
= DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
;
5923 DECL_STRUCT_FUNCTION (new_decl
)->static_chain_decl
5924 = copy_static_chain (p
, &id
);
5926 /* If there's a tree_map, prepare for substitution. */
5928 for (i
= 0; i
< tree_map
->length (); i
++)
5931 replace_info
= (*tree_map
)[i
];
5932 if (replace_info
->replace_p
)
5935 if (!replace_info
->old_tree
)
5937 int p
= replace_info
->parm_num
;
5939 tree req_type
, new_type
;
5941 for (parm
= DECL_ARGUMENTS (old_decl
); p
;
5942 parm
= DECL_CHAIN (parm
))
5944 replace_info
->old_tree
= parm
;
5945 parm_num
= replace_info
->parm_num
;
5946 req_type
= TREE_TYPE (parm
);
5947 new_type
= TREE_TYPE (replace_info
->new_tree
);
5948 if (!useless_type_conversion_p (req_type
, new_type
))
5950 if (fold_convertible_p (req_type
, replace_info
->new_tree
))
5951 replace_info
->new_tree
5952 = fold_build1 (NOP_EXPR
, req_type
,
5953 replace_info
->new_tree
);
5954 else if (TYPE_SIZE (req_type
) == TYPE_SIZE (new_type
))
5955 replace_info
->new_tree
5956 = fold_build1 (VIEW_CONVERT_EXPR
, req_type
,
5957 replace_info
->new_tree
);
5962 fprintf (dump_file
, " const ");
5963 print_generic_expr (dump_file
,
5964 replace_info
->new_tree
);
5966 " can't be converted to param ");
5967 print_generic_expr (dump_file
, parm
);
5968 fprintf (dump_file
, "\n");
5970 replace_info
->old_tree
= NULL
;
5975 gcc_assert (TREE_CODE (replace_info
->old_tree
) == PARM_DECL
);
5976 if (replace_info
->old_tree
)
5978 init
= setup_one_parameter (&id
, replace_info
->old_tree
,
5979 replace_info
->new_tree
, id
.src_fn
,
5983 init_stmts
.safe_push (init
);
5984 if (MAY_HAVE_DEBUG_BIND_STMTS
&& args_to_skip
)
5990 for (parm
= DECL_ARGUMENTS (old_decl
), p
= 0; parm
;
5991 parm
= DECL_CHAIN (parm
), p
++)
5992 if (parm
== replace_info
->old_tree
)
6000 if (debug_args_to_skip
== args_to_skip
)
6002 debug_args_to_skip
= BITMAP_ALLOC (NULL
);
6003 bitmap_copy (debug_args_to_skip
, args_to_skip
);
6005 bitmap_clear_bit (debug_args_to_skip
, parm_num
);
6011 /* Copy the function's arguments. */
6012 if (DECL_ARGUMENTS (old_decl
) != NULL_TREE
)
6013 DECL_ARGUMENTS (new_decl
)
6014 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl
), &id
,
6015 args_to_skip
, &vars
);
6017 DECL_INITIAL (new_decl
) = remap_blocks (DECL_INITIAL (id
.src_fn
), &id
);
6018 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl
)) = new_decl
;
6020 declare_inline_vars (DECL_INITIAL (new_decl
), vars
);
6022 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl
)->local_decls
))
6023 /* Add local vars. */
6024 add_local_variables (DECL_STRUCT_FUNCTION (old_decl
), cfun
, &id
);
6026 if (DECL_RESULT (old_decl
) == NULL_TREE
)
6028 else if (skip_return
&& !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl
))))
6030 DECL_RESULT (new_decl
)
6031 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl
)),
6032 RESULT_DECL
, NULL_TREE
, void_type_node
);
6033 DECL_CONTEXT (DECL_RESULT (new_decl
)) = new_decl
;
6034 cfun
->returns_struct
= 0;
6035 cfun
->returns_pcc_struct
= 0;
6040 DECL_RESULT (new_decl
) = remap_decl (DECL_RESULT (old_decl
), &id
);
6041 lang_hooks
.dup_lang_specific_decl (DECL_RESULT (new_decl
));
6042 if (gimple_in_ssa_p (id
.src_cfun
)
6043 && DECL_BY_REFERENCE (DECL_RESULT (old_decl
))
6044 && (old_name
= ssa_default_def (id
.src_cfun
, DECL_RESULT (old_decl
))))
6046 tree new_name
= make_ssa_name (DECL_RESULT (new_decl
));
6047 insert_decl_map (&id
, old_name
, new_name
);
6048 SSA_NAME_DEF_STMT (new_name
) = gimple_build_nop ();
6049 set_ssa_default_def (cfun
, DECL_RESULT (new_decl
), new_name
);
6053 /* Set up the destination functions loop tree. */
6054 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl
)) != NULL
)
6056 cfun
->curr_properties
&= ~PROP_loops
;
6057 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
6058 cfun
->curr_properties
|= PROP_loops
;
6061 /* Copy the Function's body. */
6062 copy_body (&id
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), EXIT_BLOCK_PTR_FOR_FN (cfun
),
6065 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6066 number_blocks (new_decl
);
6068 /* We want to create the BB unconditionally, so that the addition of
6069 debug stmts doesn't affect BB count, which may in the end cause
6070 codegen differences. */
6071 bb
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6072 while (init_stmts
.length ())
6073 insert_init_stmt (&id
, bb
, init_stmts
.pop ());
6074 update_clone_info (&id
);
6076 /* Remap the nonlocal_goto_save_area, if any. */
6077 if (cfun
->nonlocal_goto_save_area
)
6079 struct walk_stmt_info wi
;
6081 memset (&wi
, 0, sizeof (wi
));
6083 walk_tree (&cfun
->nonlocal_goto_save_area
, remap_gimple_op_r
, &wi
, NULL
);
6089 delete id
.debug_map
;
6090 free_dominance_info (CDI_DOMINATORS
);
6091 free_dominance_info (CDI_POST_DOMINATORS
);
6093 update_max_bb_count ();
6094 fold_marked_statements (0, id
.statements_to_fold
);
6095 delete id
.statements_to_fold
;
6096 delete_unreachable_blocks_update_callgraph (&id
);
6097 if (id
.dst_node
->definition
)
6098 cgraph_edge::rebuild_references ();
6099 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
6101 calculate_dominance_info (CDI_DOMINATORS
);
6102 fix_loop_structure (NULL
);
6104 update_ssa (TODO_update_ssa
);
6106 /* After partial cloning we need to rescale frequencies, so they are
6107 within proper range in the cloned function. */
6110 struct cgraph_edge
*e
;
6111 rebuild_frequencies ();
6113 new_version_node
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
6114 for (e
= new_version_node
->callees
; e
; e
= e
->next_callee
)
6116 basic_block bb
= gimple_bb (e
->call_stmt
);
6117 e
->count
= bb
->count
;
6119 for (e
= new_version_node
->indirect_calls
; e
; e
= e
->next_callee
)
6121 basic_block bb
= gimple_bb (e
->call_stmt
);
6122 e
->count
= bb
->count
;
6126 if (debug_args_to_skip
&& MAY_HAVE_DEBUG_BIND_STMTS
)
6129 vec
<tree
, va_gc
> **debug_args
= NULL
;
6130 unsigned int len
= 0;
6131 for (parm
= DECL_ARGUMENTS (old_decl
), i
= 0;
6132 parm
; parm
= DECL_CHAIN (parm
), i
++)
6133 if (bitmap_bit_p (debug_args_to_skip
, i
) && is_gimple_reg (parm
))
6137 if (debug_args
== NULL
)
6139 debug_args
= decl_debug_args_insert (new_decl
);
6140 len
= vec_safe_length (*debug_args
);
6142 ddecl
= make_node (DEBUG_EXPR_DECL
);
6143 DECL_ARTIFICIAL (ddecl
) = 1;
6144 TREE_TYPE (ddecl
) = TREE_TYPE (parm
);
6145 SET_DECL_MODE (ddecl
, DECL_MODE (parm
));
6146 vec_safe_push (*debug_args
, DECL_ORIGIN (parm
));
6147 vec_safe_push (*debug_args
, ddecl
);
6149 if (debug_args
!= NULL
)
6151 /* On the callee side, add
6154 stmts to the first bb where var is a VAR_DECL created for the
6155 optimized away parameter in DECL_INITIAL block. This hints
6156 in the debug info that var (whole DECL_ORIGIN is the parm
6157 PARM_DECL) is optimized away, but could be looked up at the
6158 call site as value of D#X there. */
6159 tree var
= vars
, vexpr
;
6160 gimple_stmt_iterator cgsi
6161 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6164 i
= vec_safe_length (*debug_args
);
6168 while (var
!= NULL_TREE
6169 && DECL_ABSTRACT_ORIGIN (var
) != (**debug_args
)[i
])
6170 var
= TREE_CHAIN (var
);
6171 if (var
== NULL_TREE
)
6173 vexpr
= make_node (DEBUG_EXPR_DECL
);
6174 parm
= (**debug_args
)[i
];
6175 DECL_ARTIFICIAL (vexpr
) = 1;
6176 TREE_TYPE (vexpr
) = TREE_TYPE (parm
);
6177 SET_DECL_MODE (vexpr
, DECL_MODE (parm
));
6178 def_temp
= gimple_build_debug_bind (var
, vexpr
, NULL
);
6179 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6180 def_temp
= gimple_build_debug_source_bind (vexpr
, parm
, NULL
);
6181 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6187 if (debug_args_to_skip
&& debug_args_to_skip
!= args_to_skip
)
6188 BITMAP_FREE (debug_args_to_skip
);
6189 free_dominance_info (CDI_DOMINATORS
);
6190 free_dominance_info (CDI_POST_DOMINATORS
);
6192 gcc_assert (!id
.debug_stmts
.exists ());
6197 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6198 the callee and return the inlined body on success. */
6201 maybe_inline_call_in_expr (tree exp
)
6203 tree fn
= get_callee_fndecl (exp
);
6205 /* We can only try to inline "const" functions. */
6206 if (fn
&& TREE_READONLY (fn
) && DECL_SAVED_TREE (fn
))
6208 call_expr_arg_iterator iter
;
6211 hash_map
<tree
, tree
> decl_map
;
6213 /* Remap the parameters. */
6214 for (param
= DECL_ARGUMENTS (fn
), arg
= first_call_expr_arg (exp
, &iter
);
6216 param
= DECL_CHAIN (param
), arg
= next_call_expr_arg (&iter
))
6217 decl_map
.put (param
, arg
);
6219 memset (&id
, 0, sizeof (id
));
6221 id
.dst_fn
= current_function_decl
;
6222 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6223 id
.decl_map
= &decl_map
;
6225 id
.copy_decl
= copy_decl_no_change
;
6226 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6227 id
.transform_new_cfg
= false;
6228 id
.transform_return_to_modify
= true;
6229 id
.transform_parameter
= true;
6230 id
.transform_lang_insert_block
= NULL
;
6232 /* Make sure not to unshare trees behind the front-end's back
6233 since front-end specific mechanisms may rely on sharing. */
6234 id
.regimplify
= false;
6235 id
.do_not_unshare
= true;
6237 /* We're not inside any EH region. */
6240 t
= copy_tree_body (&id
);
6242 /* We can only return something suitable for use in a GENERIC
6244 if (TREE_CODE (t
) == MODIFY_EXPR
)
6245 return TREE_OPERAND (t
, 1);
6251 /* Duplicate a type, fields and all. */
6254 build_duplicate_type (tree type
)
6256 struct copy_body_data id
;
6258 memset (&id
, 0, sizeof (id
));
6259 id
.src_fn
= current_function_decl
;
6260 id
.dst_fn
= current_function_decl
;
6262 id
.decl_map
= new hash_map
<tree
, tree
>;
6263 id
.debug_map
= NULL
;
6264 id
.copy_decl
= copy_decl_no_change
;
6266 type
= remap_type_1 (type
, &id
);
6270 delete id
.debug_map
;
6272 TYPE_CANONICAL (type
) = type
;
6277 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6278 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6282 copy_fn (tree fn
, tree
& parms
, tree
& result
)
6286 hash_map
<tree
, tree
> decl_map
;
6291 memset (&id
, 0, sizeof (id
));
6293 id
.dst_fn
= current_function_decl
;
6294 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6295 id
.decl_map
= &decl_map
;
6297 id
.copy_decl
= copy_decl_no_change
;
6298 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6299 id
.transform_new_cfg
= false;
6300 id
.transform_return_to_modify
= false;
6301 id
.transform_parameter
= true;
6302 id
.transform_lang_insert_block
= NULL
;
6304 /* Make sure not to unshare trees behind the front-end's back
6305 since front-end specific mechanisms may rely on sharing. */
6306 id
.regimplify
= false;
6307 id
.do_not_unshare
= true;
6309 /* We're not inside any EH region. */
6312 /* Remap the parameters and result and return them to the caller. */
6313 for (param
= DECL_ARGUMENTS (fn
);
6315 param
= DECL_CHAIN (param
))
6317 *p
= remap_decl (param
, &id
);
6318 p
= &DECL_CHAIN (*p
);
6321 if (DECL_RESULT (fn
))
6322 result
= remap_decl (DECL_RESULT (fn
), &id
);
6326 return copy_tree_body (&id
);