ipa-inline-analysis.c (inline_node_duplication_hook): Initialize info->entry with 0
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "diagnostic-core.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "insn-config.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-iterator.h"
37 #include "cgraph.h"
38 #include "intl.h"
39 #include "tree-mudflap.h"
40 #include "tree-flow.h"
41 #include "function.h"
42 #include "tree-flow.h"
43 #include "tree-pretty-print.h"
44 #include "except.h"
45 #include "debug.h"
46 #include "pointer-set.h"
47 #include "ipa-prop.h"
48 #include "value-prof.h"
49 #include "tree-pass.h"
50 #include "target.h"
51 #include "integrate.h"
52
53 #include "rtl.h" /* FIXME: For asm_str_count. */
54
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "gimple.h"
58
59 /* Inlining, Cloning, Versioning, Parallelization
60
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
67
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
72
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
76
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
80
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
88
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
90
91 /* To Do:
92
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
99
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
102
103
104 /* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
106
107 eni_weights eni_size_weights;
108
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
111
112 eni_weights eni_time_weights;
113
114 /* Prototypes. */
115
116 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
117 static void remap_block (tree *, copy_body_data *);
118 static void copy_bind_expr (tree *, int *, copy_body_data *);
119 static tree mark_local_for_remap_r (tree *, int *, void *);
120 static void unsave_expr_1 (tree);
121 static tree unsave_r (tree *, int *, void *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
130
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
133
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
136 {
137 *pointer_map_insert (id->decl_map, key) = value;
138
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
143 }
144
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
147
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150 {
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
153
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
156
157 if (!target_for_debug_bind (key))
158 return;
159
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
162
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
165
166 *pointer_map_insert (id->debug_map, key) = value;
167 }
168
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
174
175 /* Construct new SSA name for old NAME. ID is the inline context. */
176
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
179 {
180 tree new_tree;
181 tree *n;
182
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
184
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
188
189 if (processing_debug_stmt)
190 {
191 processing_debug_stmt = -1;
192 return name;
193 }
194
195 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
196 in copy_bb. */
197 new_tree = remap_decl (SSA_NAME_VAR (name), id);
198
199 /* We might've substituted constant or another SSA_NAME for
200 the variable.
201
202 Replace the SSA name representing RESULT_DECL by variable during
203 inlining: this saves us from need to introduce PHI node in a case
204 return value is just partly initialized. */
205 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
206 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
207 || !id->transform_return_to_modify))
208 {
209 struct ptr_info_def *pi;
210 new_tree = make_ssa_name (new_tree, NULL);
211 insert_decl_map (id, name, new_tree);
212 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
213 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
214 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
215 /* At least IPA points-to info can be directly transferred. */
216 if (id->src_cfun->gimple_df
217 && id->src_cfun->gimple_df->ipa_pta
218 && (pi = SSA_NAME_PTR_INFO (name))
219 && !pi->pt.anything)
220 {
221 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
222 new_pi->pt = pi->pt;
223 }
224 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
225 {
226 /* By inlining function having uninitialized variable, we might
227 extend the lifetime (variable might get reused). This cause
228 ICE in the case we end up extending lifetime of SSA name across
229 abnormal edge, but also increase register pressure.
230
231 We simply initialize all uninitialized vars by 0 except
232 for case we are inlining to very first BB. We can avoid
233 this for all BBs that are not inside strongly connected
234 regions of the CFG, but this is expensive to test. */
235 if (id->entry_bb
236 && is_gimple_reg (SSA_NAME_VAR (name))
237 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
238 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
239 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
240 || EDGE_COUNT (id->entry_bb->preds) != 1))
241 {
242 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
243 gimple init_stmt;
244 tree zero = build_zero_cst (TREE_TYPE (new_tree));
245
246 init_stmt = gimple_build_assign (new_tree, zero);
247 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
248 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
249 }
250 else
251 {
252 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
253 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
254 == name)
255 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
256 }
257 }
258 }
259 else
260 insert_decl_map (id, name, new_tree);
261 return new_tree;
262 }
263
264 /* Remap DECL during the copying of the BLOCK tree for the function. */
265
266 tree
267 remap_decl (tree decl, copy_body_data *id)
268 {
269 tree *n;
270
271 /* We only remap local variables in the current function. */
272
273 /* See if we have remapped this declaration. */
274
275 n = (tree *) pointer_map_contains (id->decl_map, decl);
276
277 if (!n && processing_debug_stmt)
278 {
279 processing_debug_stmt = -1;
280 return decl;
281 }
282
283 /* If we didn't already have an equivalent for this declaration,
284 create one now. */
285 if (!n)
286 {
287 /* Make a copy of the variable or label. */
288 tree t = id->copy_decl (decl, id);
289
290 /* Remember it, so that if we encounter this local entity again
291 we can reuse this copy. Do this early because remap_type may
292 need this decl for TYPE_STUB_DECL. */
293 insert_decl_map (id, decl, t);
294
295 if (!DECL_P (t))
296 return t;
297
298 /* Remap types, if necessary. */
299 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
300 if (TREE_CODE (t) == TYPE_DECL)
301 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
302
303 /* Remap sizes as necessary. */
304 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
305 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
306
307 /* If fields, do likewise for offset and qualifier. */
308 if (TREE_CODE (t) == FIELD_DECL)
309 {
310 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
311 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
312 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
313 }
314
315 if ((TREE_CODE (t) == VAR_DECL
316 || TREE_CODE (t) == RESULT_DECL
317 || TREE_CODE (t) == PARM_DECL)
318 && id->src_fn && DECL_STRUCT_FUNCTION (id->src_fn)
319 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
320 /* We don't want to mark as referenced VAR_DECLs that were
321 not marked as such in the src function. */
322 && (TREE_CODE (decl) != VAR_DECL
323 || referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
324 DECL_UID (decl))))
325 add_referenced_var (t);
326 return t;
327 }
328
329 if (id->do_not_unshare)
330 return *n;
331 else
332 return unshare_expr (*n);
333 }
334
335 static tree
336 remap_type_1 (tree type, copy_body_data *id)
337 {
338 tree new_tree, t;
339
340 /* We do need a copy. build and register it now. If this is a pointer or
341 reference type, remap the designated type and make a new pointer or
342 reference type. */
343 if (TREE_CODE (type) == POINTER_TYPE)
344 {
345 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
346 TYPE_MODE (type),
347 TYPE_REF_CAN_ALIAS_ALL (type));
348 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
349 new_tree = build_type_attribute_qual_variant (new_tree,
350 TYPE_ATTRIBUTES (type),
351 TYPE_QUALS (type));
352 insert_decl_map (id, type, new_tree);
353 return new_tree;
354 }
355 else if (TREE_CODE (type) == REFERENCE_TYPE)
356 {
357 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
358 TYPE_MODE (type),
359 TYPE_REF_CAN_ALIAS_ALL (type));
360 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
361 new_tree = build_type_attribute_qual_variant (new_tree,
362 TYPE_ATTRIBUTES (type),
363 TYPE_QUALS (type));
364 insert_decl_map (id, type, new_tree);
365 return new_tree;
366 }
367 else
368 new_tree = copy_node (type);
369
370 insert_decl_map (id, type, new_tree);
371
372 /* This is a new type, not a copy of an old type. Need to reassociate
373 variants. We can handle everything except the main variant lazily. */
374 t = TYPE_MAIN_VARIANT (type);
375 if (type != t)
376 {
377 t = remap_type (t, id);
378 TYPE_MAIN_VARIANT (new_tree) = t;
379 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
380 TYPE_NEXT_VARIANT (t) = new_tree;
381 }
382 else
383 {
384 TYPE_MAIN_VARIANT (new_tree) = new_tree;
385 TYPE_NEXT_VARIANT (new_tree) = NULL;
386 }
387
388 if (TYPE_STUB_DECL (type))
389 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
390
391 /* Lazily create pointer and reference types. */
392 TYPE_POINTER_TO (new_tree) = NULL;
393 TYPE_REFERENCE_TO (new_tree) = NULL;
394
395 switch (TREE_CODE (new_tree))
396 {
397 case INTEGER_TYPE:
398 case REAL_TYPE:
399 case FIXED_POINT_TYPE:
400 case ENUMERAL_TYPE:
401 case BOOLEAN_TYPE:
402 t = TYPE_MIN_VALUE (new_tree);
403 if (t && TREE_CODE (t) != INTEGER_CST)
404 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
405
406 t = TYPE_MAX_VALUE (new_tree);
407 if (t && TREE_CODE (t) != INTEGER_CST)
408 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
409 return new_tree;
410
411 case FUNCTION_TYPE:
412 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
413 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
414 return new_tree;
415
416 case ARRAY_TYPE:
417 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
418 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
419 break;
420
421 case RECORD_TYPE:
422 case UNION_TYPE:
423 case QUAL_UNION_TYPE:
424 {
425 tree f, nf = NULL;
426
427 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
428 {
429 t = remap_decl (f, id);
430 DECL_CONTEXT (t) = new_tree;
431 DECL_CHAIN (t) = nf;
432 nf = t;
433 }
434 TYPE_FIELDS (new_tree) = nreverse (nf);
435 }
436 break;
437
438 case OFFSET_TYPE:
439 default:
440 /* Shouldn't have been thought variable sized. */
441 gcc_unreachable ();
442 }
443
444 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
445 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
446
447 return new_tree;
448 }
449
450 tree
451 remap_type (tree type, copy_body_data *id)
452 {
453 tree *node;
454 tree tmp;
455
456 if (type == NULL)
457 return type;
458
459 /* See if we have remapped this type. */
460 node = (tree *) pointer_map_contains (id->decl_map, type);
461 if (node)
462 return *node;
463
464 /* The type only needs remapping if it's variably modified. */
465 if (! variably_modified_type_p (type, id->src_fn))
466 {
467 insert_decl_map (id, type, type);
468 return type;
469 }
470
471 id->remapping_type_depth++;
472 tmp = remap_type_1 (type, id);
473 id->remapping_type_depth--;
474
475 return tmp;
476 }
477
478 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
479 is NULL or TYPE has not been remapped before. */
480
481 static tree
482 remapped_type (tree type, copy_body_data *id)
483 {
484 tree *node;
485
486 if (type == NULL)
487 return type;
488
489 /* See if we have remapped this type. */
490 node = (tree *) pointer_map_contains (id->decl_map, type);
491 if (node)
492 return *node;
493 else
494 return NULL;
495 }
496
497 /* The type only needs remapping if it's variably modified. */
498 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
499
500 static bool
501 can_be_nonlocal (tree decl, copy_body_data *id)
502 {
503 /* We can not duplicate function decls. */
504 if (TREE_CODE (decl) == FUNCTION_DECL)
505 return true;
506
507 /* Local static vars must be non-local or we get multiple declaration
508 problems. */
509 if (TREE_CODE (decl) == VAR_DECL
510 && !auto_var_in_fn_p (decl, id->src_fn))
511 return true;
512
513 /* At the moment dwarf2out can handle only these types of nodes. We
514 can support more later. */
515 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
516 return false;
517
518 /* We must use global type. We call remapped_type instead of
519 remap_type since we don't want to remap this type here if it
520 hasn't been remapped before. */
521 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
522 return false;
523
524 /* Wihtout SSA we can't tell if variable is used. */
525 if (!gimple_in_ssa_p (cfun))
526 return false;
527
528 /* Live variables must be copied so we can attach DECL_RTL. */
529 if (var_ann (decl))
530 return false;
531
532 return true;
533 }
534
535 static tree
536 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
537 {
538 tree old_var;
539 tree new_decls = NULL_TREE;
540
541 /* Remap its variables. */
542 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
543 {
544 tree new_var;
545
546 if (can_be_nonlocal (old_var, id))
547 {
548 if (TREE_CODE (old_var) == VAR_DECL
549 && ! DECL_EXTERNAL (old_var)
550 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
551 add_local_decl (cfun, old_var);
552 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
553 && !DECL_IGNORED_P (old_var)
554 && nonlocalized_list)
555 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
556 continue;
557 }
558
559 /* Remap the variable. */
560 new_var = remap_decl (old_var, id);
561
562 /* If we didn't remap this variable, we can't mess with its
563 TREE_CHAIN. If we remapped this variable to the return slot, it's
564 already declared somewhere else, so don't declare it here. */
565
566 if (new_var == id->retvar)
567 ;
568 else if (!new_var)
569 {
570 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
571 && !DECL_IGNORED_P (old_var)
572 && nonlocalized_list)
573 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
574 }
575 else
576 {
577 gcc_assert (DECL_P (new_var));
578 DECL_CHAIN (new_var) = new_decls;
579 new_decls = new_var;
580
581 /* Also copy value-expressions. */
582 if (TREE_CODE (new_var) == VAR_DECL
583 && DECL_HAS_VALUE_EXPR_P (new_var))
584 {
585 tree tem = DECL_VALUE_EXPR (new_var);
586 bool old_regimplify = id->regimplify;
587 id->remapping_type_depth++;
588 walk_tree (&tem, copy_tree_body_r, id, NULL);
589 id->remapping_type_depth--;
590 id->regimplify = old_regimplify;
591 SET_DECL_VALUE_EXPR (new_var, tem);
592 }
593 }
594 }
595
596 return nreverse (new_decls);
597 }
598
599 /* Copy the BLOCK to contain remapped versions of the variables
600 therein. And hook the new block into the block-tree. */
601
602 static void
603 remap_block (tree *block, copy_body_data *id)
604 {
605 tree old_block;
606 tree new_block;
607
608 /* Make the new block. */
609 old_block = *block;
610 new_block = make_node (BLOCK);
611 TREE_USED (new_block) = TREE_USED (old_block);
612 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
613 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
614 BLOCK_NONLOCALIZED_VARS (new_block)
615 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
616 *block = new_block;
617
618 /* Remap its variables. */
619 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
620 &BLOCK_NONLOCALIZED_VARS (new_block),
621 id);
622
623 if (id->transform_lang_insert_block)
624 id->transform_lang_insert_block (new_block);
625
626 /* Remember the remapped block. */
627 insert_decl_map (id, old_block, new_block);
628 }
629
630 /* Copy the whole block tree and root it in id->block. */
631 static tree
632 remap_blocks (tree block, copy_body_data *id)
633 {
634 tree t;
635 tree new_tree = block;
636
637 if (!block)
638 return NULL;
639
640 remap_block (&new_tree, id);
641 gcc_assert (new_tree != block);
642 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
643 prepend_lexical_block (new_tree, remap_blocks (t, id));
644 /* Blocks are in arbitrary order, but make things slightly prettier and do
645 not swap order when producing a copy. */
646 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
647 return new_tree;
648 }
649
650 static void
651 copy_statement_list (tree *tp)
652 {
653 tree_stmt_iterator oi, ni;
654 tree new_tree;
655
656 new_tree = alloc_stmt_list ();
657 ni = tsi_start (new_tree);
658 oi = tsi_start (*tp);
659 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
660 *tp = new_tree;
661
662 for (; !tsi_end_p (oi); tsi_next (&oi))
663 {
664 tree stmt = tsi_stmt (oi);
665 if (TREE_CODE (stmt) == STATEMENT_LIST)
666 /* This copy is not redundant; tsi_link_after will smash this
667 STATEMENT_LIST into the end of the one we're building, and we
668 don't want to do that with the original. */
669 copy_statement_list (&stmt);
670 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
671 }
672 }
673
674 static void
675 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
676 {
677 tree block = BIND_EXPR_BLOCK (*tp);
678 /* Copy (and replace) the statement. */
679 copy_tree_r (tp, walk_subtrees, NULL);
680 if (block)
681 {
682 remap_block (&block, id);
683 BIND_EXPR_BLOCK (*tp) = block;
684 }
685
686 if (BIND_EXPR_VARS (*tp))
687 /* This will remap a lot of the same decls again, but this should be
688 harmless. */
689 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
690 }
691
692
693 /* Create a new gimple_seq by remapping all the statements in BODY
694 using the inlining information in ID. */
695
696 static gimple_seq
697 remap_gimple_seq (gimple_seq body, copy_body_data *id)
698 {
699 gimple_stmt_iterator si;
700 gimple_seq new_body = NULL;
701
702 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
703 {
704 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
705 gimple_seq_add_stmt (&new_body, new_stmt);
706 }
707
708 return new_body;
709 }
710
711
712 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
713 block using the mapping information in ID. */
714
715 static gimple
716 copy_gimple_bind (gimple stmt, copy_body_data *id)
717 {
718 gimple new_bind;
719 tree new_block, new_vars;
720 gimple_seq body, new_body;
721
722 /* Copy the statement. Note that we purposely don't use copy_stmt
723 here because we need to remap statements as we copy. */
724 body = gimple_bind_body (stmt);
725 new_body = remap_gimple_seq (body, id);
726
727 new_block = gimple_bind_block (stmt);
728 if (new_block)
729 remap_block (&new_block, id);
730
731 /* This will remap a lot of the same decls again, but this should be
732 harmless. */
733 new_vars = gimple_bind_vars (stmt);
734 if (new_vars)
735 new_vars = remap_decls (new_vars, NULL, id);
736
737 new_bind = gimple_build_bind (new_vars, new_body, new_block);
738
739 return new_bind;
740 }
741
742
743 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
744 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
745 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
746 recursing into the children nodes of *TP. */
747
748 static tree
749 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
750 {
751 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
752 copy_body_data *id = (copy_body_data *) wi_p->info;
753 tree fn = id->src_fn;
754
755 if (TREE_CODE (*tp) == SSA_NAME)
756 {
757 *tp = remap_ssa_name (*tp, id);
758 *walk_subtrees = 0;
759 return NULL;
760 }
761 else if (auto_var_in_fn_p (*tp, fn))
762 {
763 /* Local variables and labels need to be replaced by equivalent
764 variables. We don't want to copy static variables; there's
765 only one of those, no matter how many times we inline the
766 containing function. Similarly for globals from an outer
767 function. */
768 tree new_decl;
769
770 /* Remap the declaration. */
771 new_decl = remap_decl (*tp, id);
772 gcc_assert (new_decl);
773 /* Replace this variable with the copy. */
774 STRIP_TYPE_NOPS (new_decl);
775 /* ??? The C++ frontend uses void * pointer zero to initialize
776 any other type. This confuses the middle-end type verification.
777 As cloned bodies do not go through gimplification again the fixup
778 there doesn't trigger. */
779 if (TREE_CODE (new_decl) == INTEGER_CST
780 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
781 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
782 *tp = new_decl;
783 *walk_subtrees = 0;
784 }
785 else if (TREE_CODE (*tp) == STATEMENT_LIST)
786 gcc_unreachable ();
787 else if (TREE_CODE (*tp) == SAVE_EXPR)
788 gcc_unreachable ();
789 else if (TREE_CODE (*tp) == LABEL_DECL
790 && (!DECL_CONTEXT (*tp)
791 || decl_function_context (*tp) == id->src_fn))
792 /* These may need to be remapped for EH handling. */
793 *tp = remap_decl (*tp, id);
794 else if (TYPE_P (*tp))
795 /* Types may need remapping as well. */
796 *tp = remap_type (*tp, id);
797 else if (CONSTANT_CLASS_P (*tp))
798 {
799 /* If this is a constant, we have to copy the node iff the type
800 will be remapped. copy_tree_r will not copy a constant. */
801 tree new_type = remap_type (TREE_TYPE (*tp), id);
802
803 if (new_type == TREE_TYPE (*tp))
804 *walk_subtrees = 0;
805
806 else if (TREE_CODE (*tp) == INTEGER_CST)
807 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
808 TREE_INT_CST_HIGH (*tp));
809 else
810 {
811 *tp = copy_node (*tp);
812 TREE_TYPE (*tp) = new_type;
813 }
814 }
815 else
816 {
817 /* Otherwise, just copy the node. Note that copy_tree_r already
818 knows not to copy VAR_DECLs, etc., so this is safe. */
819
820 /* We should never have TREE_BLOCK set on non-statements. */
821 if (EXPR_P (*tp))
822 gcc_assert (!TREE_BLOCK (*tp));
823
824 if (TREE_CODE (*tp) == MEM_REF)
825 {
826 tree ptr = TREE_OPERAND (*tp, 0);
827 tree type = remap_type (TREE_TYPE (*tp), id);
828 tree old = *tp;
829 tree tem;
830
831 /* We need to re-canonicalize MEM_REFs from inline substitutions
832 that can happen when a pointer argument is an ADDR_EXPR.
833 Recurse here manually to allow that. */
834 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
835 if ((tem = maybe_fold_offset_to_reference (EXPR_LOCATION (*tp),
836 ptr,
837 TREE_OPERAND (*tp, 1),
838 type))
839 && TREE_THIS_VOLATILE (tem) == TREE_THIS_VOLATILE (old))
840 {
841 tree *tem_basep = &tem;
842 while (handled_component_p (*tem_basep))
843 tem_basep = &TREE_OPERAND (*tem_basep, 0);
844 if (TREE_CODE (*tem_basep) == MEM_REF)
845 *tem_basep
846 = build2 (MEM_REF, TREE_TYPE (*tem_basep),
847 TREE_OPERAND (*tem_basep, 0),
848 fold_convert (TREE_TYPE (TREE_OPERAND (*tp, 1)),
849 TREE_OPERAND (*tem_basep, 1)));
850 else
851 *tem_basep
852 = build2 (MEM_REF, TREE_TYPE (*tem_basep),
853 build_fold_addr_expr (*tem_basep),
854 build_int_cst
855 (TREE_TYPE (TREE_OPERAND (*tp, 1)), 0));
856 *tp = tem;
857 }
858 else
859 {
860 *tp = fold_build2 (MEM_REF, type,
861 ptr, TREE_OPERAND (*tp, 1));
862 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
863 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
864 }
865 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
866 *walk_subtrees = 0;
867 return NULL;
868 }
869
870 /* Here is the "usual case". Copy this tree node, and then
871 tweak some special cases. */
872 copy_tree_r (tp, walk_subtrees, NULL);
873
874 if (TREE_CODE (*tp) != OMP_CLAUSE)
875 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
876
877 /* Global variables we haven't seen yet need to go into referenced
878 vars. If not referenced from types only. */
879 if (gimple_in_ssa_p (cfun)
880 && TREE_CODE (*tp) == VAR_DECL
881 && id->remapping_type_depth == 0
882 && !processing_debug_stmt)
883 add_referenced_var (*tp);
884
885 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
886 {
887 /* The copied TARGET_EXPR has never been expanded, even if the
888 original node was expanded already. */
889 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
890 TREE_OPERAND (*tp, 3) = NULL_TREE;
891 }
892 else if (TREE_CODE (*tp) == ADDR_EXPR)
893 {
894 /* Variable substitution need not be simple. In particular,
895 the MEM_REF substitution above. Make sure that
896 TREE_CONSTANT and friends are up-to-date. But make sure
897 to not improperly set TREE_BLOCK on some sub-expressions. */
898 int invariant = is_gimple_min_invariant (*tp);
899 tree block = id->block;
900 id->block = NULL_TREE;
901 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
902 id->block = block;
903 recompute_tree_invariant_for_addr_expr (*tp);
904
905 /* If this used to be invariant, but is not any longer,
906 then regimplification is probably needed. */
907 if (invariant && !is_gimple_min_invariant (*tp))
908 id->regimplify = true;
909
910 *walk_subtrees = 0;
911 }
912 }
913
914 /* Keep iterating. */
915 return NULL_TREE;
916 }
917
918
919 /* Called from copy_body_id via walk_tree. DATA is really a
920 `copy_body_data *'. */
921
922 tree
923 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
924 {
925 copy_body_data *id = (copy_body_data *) data;
926 tree fn = id->src_fn;
927 tree new_block;
928
929 /* Begin by recognizing trees that we'll completely rewrite for the
930 inlining context. Our output for these trees is completely
931 different from out input (e.g. RETURN_EXPR is deleted, and morphs
932 into an edge). Further down, we'll handle trees that get
933 duplicated and/or tweaked. */
934
935 /* When requested, RETURN_EXPRs should be transformed to just the
936 contained MODIFY_EXPR. The branch semantics of the return will
937 be handled elsewhere by manipulating the CFG rather than a statement. */
938 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
939 {
940 tree assignment = TREE_OPERAND (*tp, 0);
941
942 /* If we're returning something, just turn that into an
943 assignment into the equivalent of the original RESULT_DECL.
944 If the "assignment" is just the result decl, the result
945 decl has already been set (e.g. a recent "foo (&result_decl,
946 ...)"); just toss the entire RETURN_EXPR. */
947 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
948 {
949 /* Replace the RETURN_EXPR with (a copy of) the
950 MODIFY_EXPR hanging underneath. */
951 *tp = copy_node (assignment);
952 }
953 else /* Else the RETURN_EXPR returns no value. */
954 {
955 *tp = NULL;
956 return (tree) (void *)1;
957 }
958 }
959 else if (TREE_CODE (*tp) == SSA_NAME)
960 {
961 *tp = remap_ssa_name (*tp, id);
962 *walk_subtrees = 0;
963 return NULL;
964 }
965
966 /* Local variables and labels need to be replaced by equivalent
967 variables. We don't want to copy static variables; there's only
968 one of those, no matter how many times we inline the containing
969 function. Similarly for globals from an outer function. */
970 else if (auto_var_in_fn_p (*tp, fn))
971 {
972 tree new_decl;
973
974 /* Remap the declaration. */
975 new_decl = remap_decl (*tp, id);
976 gcc_assert (new_decl);
977 /* Replace this variable with the copy. */
978 STRIP_TYPE_NOPS (new_decl);
979 *tp = new_decl;
980 *walk_subtrees = 0;
981 }
982 else if (TREE_CODE (*tp) == STATEMENT_LIST)
983 copy_statement_list (tp);
984 else if (TREE_CODE (*tp) == SAVE_EXPR
985 || TREE_CODE (*tp) == TARGET_EXPR)
986 remap_save_expr (tp, id->decl_map, walk_subtrees);
987 else if (TREE_CODE (*tp) == LABEL_DECL
988 && (! DECL_CONTEXT (*tp)
989 || decl_function_context (*tp) == id->src_fn))
990 /* These may need to be remapped for EH handling. */
991 *tp = remap_decl (*tp, id);
992 else if (TREE_CODE (*tp) == BIND_EXPR)
993 copy_bind_expr (tp, walk_subtrees, id);
994 /* Types may need remapping as well. */
995 else if (TYPE_P (*tp))
996 *tp = remap_type (*tp, id);
997
998 /* If this is a constant, we have to copy the node iff the type will be
999 remapped. copy_tree_r will not copy a constant. */
1000 else if (CONSTANT_CLASS_P (*tp))
1001 {
1002 tree new_type = remap_type (TREE_TYPE (*tp), id);
1003
1004 if (new_type == TREE_TYPE (*tp))
1005 *walk_subtrees = 0;
1006
1007 else if (TREE_CODE (*tp) == INTEGER_CST)
1008 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1009 TREE_INT_CST_HIGH (*tp));
1010 else
1011 {
1012 *tp = copy_node (*tp);
1013 TREE_TYPE (*tp) = new_type;
1014 }
1015 }
1016
1017 /* Otherwise, just copy the node. Note that copy_tree_r already
1018 knows not to copy VAR_DECLs, etc., so this is safe. */
1019 else
1020 {
1021 /* Here we handle trees that are not completely rewritten.
1022 First we detect some inlining-induced bogosities for
1023 discarding. */
1024 if (TREE_CODE (*tp) == MODIFY_EXPR
1025 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1026 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1027 {
1028 /* Some assignments VAR = VAR; don't generate any rtl code
1029 and thus don't count as variable modification. Avoid
1030 keeping bogosities like 0 = 0. */
1031 tree decl = TREE_OPERAND (*tp, 0), value;
1032 tree *n;
1033
1034 n = (tree *) pointer_map_contains (id->decl_map, decl);
1035 if (n)
1036 {
1037 value = *n;
1038 STRIP_TYPE_NOPS (value);
1039 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1040 {
1041 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1042 return copy_tree_body_r (tp, walk_subtrees, data);
1043 }
1044 }
1045 }
1046 else if (TREE_CODE (*tp) == INDIRECT_REF)
1047 {
1048 /* Get rid of *& from inline substitutions that can happen when a
1049 pointer argument is an ADDR_EXPR. */
1050 tree decl = TREE_OPERAND (*tp, 0);
1051 tree *n;
1052
1053 n = (tree *) pointer_map_contains (id->decl_map, decl);
1054 if (n)
1055 {
1056 tree new_tree;
1057 tree old;
1058 /* If we happen to get an ADDR_EXPR in n->value, strip
1059 it manually here as we'll eventually get ADDR_EXPRs
1060 which lie about their types pointed to. In this case
1061 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1062 but we absolutely rely on that. As fold_indirect_ref
1063 does other useful transformations, try that first, though. */
1064 tree type = TREE_TYPE (TREE_TYPE (*n));
1065 if (id->do_not_unshare)
1066 new_tree = *n;
1067 else
1068 new_tree = unshare_expr (*n);
1069 old = *tp;
1070 *tp = gimple_fold_indirect_ref (new_tree);
1071 if (! *tp)
1072 {
1073 if (TREE_CODE (new_tree) == ADDR_EXPR)
1074 {
1075 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1076 type, new_tree);
1077 /* ??? We should either assert here or build
1078 a VIEW_CONVERT_EXPR instead of blindly leaking
1079 incompatible types to our IL. */
1080 if (! *tp)
1081 *tp = TREE_OPERAND (new_tree, 0);
1082 }
1083 else
1084 {
1085 *tp = build1 (INDIRECT_REF, type, new_tree);
1086 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1087 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1088 TREE_READONLY (*tp) = TREE_READONLY (old);
1089 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1090 }
1091 }
1092 *walk_subtrees = 0;
1093 return NULL;
1094 }
1095 }
1096 else if (TREE_CODE (*tp) == MEM_REF)
1097 {
1098 /* We need to re-canonicalize MEM_REFs from inline substitutions
1099 that can happen when a pointer argument is an ADDR_EXPR. */
1100 tree decl = TREE_OPERAND (*tp, 0);
1101 tree *n;
1102
1103 n = (tree *) pointer_map_contains (id->decl_map, decl);
1104 if (n)
1105 {
1106 tree old = *tp;
1107 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1108 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1109 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1110 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1111 *walk_subtrees = 0;
1112 return NULL;
1113 }
1114 }
1115
1116 /* Here is the "usual case". Copy this tree node, and then
1117 tweak some special cases. */
1118 copy_tree_r (tp, walk_subtrees, NULL);
1119
1120 /* Global variables we haven't seen yet needs to go into referenced
1121 vars. If not referenced from types or debug stmts only. */
1122 if (gimple_in_ssa_p (cfun)
1123 && TREE_CODE (*tp) == VAR_DECL
1124 && id->remapping_type_depth == 0
1125 && !processing_debug_stmt)
1126 add_referenced_var (*tp);
1127
1128 /* If EXPR has block defined, map it to newly constructed block.
1129 When inlining we want EXPRs without block appear in the block
1130 of function call if we are not remapping a type. */
1131 if (EXPR_P (*tp))
1132 {
1133 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1134 if (TREE_BLOCK (*tp))
1135 {
1136 tree *n;
1137 n = (tree *) pointer_map_contains (id->decl_map,
1138 TREE_BLOCK (*tp));
1139 gcc_assert (n || id->remapping_type_depth != 0);
1140 if (n)
1141 new_block = *n;
1142 }
1143 TREE_BLOCK (*tp) = new_block;
1144 }
1145
1146 if (TREE_CODE (*tp) != OMP_CLAUSE)
1147 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1148
1149 /* The copied TARGET_EXPR has never been expanded, even if the
1150 original node was expanded already. */
1151 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1152 {
1153 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1154 TREE_OPERAND (*tp, 3) = NULL_TREE;
1155 }
1156
1157 /* Variable substitution need not be simple. In particular, the
1158 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1159 and friends are up-to-date. */
1160 else if (TREE_CODE (*tp) == ADDR_EXPR)
1161 {
1162 int invariant = is_gimple_min_invariant (*tp);
1163 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1164
1165 /* Handle the case where we substituted an INDIRECT_REF
1166 into the operand of the ADDR_EXPR. */
1167 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1168 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1169 else
1170 recompute_tree_invariant_for_addr_expr (*tp);
1171
1172 /* If this used to be invariant, but is not any longer,
1173 then regimplification is probably needed. */
1174 if (invariant && !is_gimple_min_invariant (*tp))
1175 id->regimplify = true;
1176
1177 *walk_subtrees = 0;
1178 }
1179 }
1180
1181 /* Keep iterating. */
1182 return NULL_TREE;
1183 }
1184
1185 /* Helper for remap_gimple_stmt. Given an EH region number for the
1186 source function, map that to the duplicate EH region number in
1187 the destination function. */
1188
1189 static int
1190 remap_eh_region_nr (int old_nr, copy_body_data *id)
1191 {
1192 eh_region old_r, new_r;
1193 void **slot;
1194
1195 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1196 slot = pointer_map_contains (id->eh_map, old_r);
1197 new_r = (eh_region) *slot;
1198
1199 return new_r->index;
1200 }
1201
1202 /* Similar, but operate on INTEGER_CSTs. */
1203
1204 static tree
1205 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1206 {
1207 int old_nr, new_nr;
1208
1209 old_nr = tree_low_cst (old_t_nr, 0);
1210 new_nr = remap_eh_region_nr (old_nr, id);
1211
1212 return build_int_cst (integer_type_node, new_nr);
1213 }
1214
1215 /* Helper for copy_bb. Remap statement STMT using the inlining
1216 information in ID. Return the new statement copy. */
1217
1218 static gimple
1219 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1220 {
1221 gimple copy = NULL;
1222 struct walk_stmt_info wi;
1223 tree new_block;
1224 bool skip_first = false;
1225
1226 /* Begin by recognizing trees that we'll completely rewrite for the
1227 inlining context. Our output for these trees is completely
1228 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1229 into an edge). Further down, we'll handle trees that get
1230 duplicated and/or tweaked. */
1231
1232 /* When requested, GIMPLE_RETURNs should be transformed to just the
1233 contained GIMPLE_ASSIGN. The branch semantics of the return will
1234 be handled elsewhere by manipulating the CFG rather than the
1235 statement. */
1236 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1237 {
1238 tree retval = gimple_return_retval (stmt);
1239
1240 /* If we're returning something, just turn that into an
1241 assignment into the equivalent of the original RESULT_DECL.
1242 If RETVAL is just the result decl, the result decl has
1243 already been set (e.g. a recent "foo (&result_decl, ...)");
1244 just toss the entire GIMPLE_RETURN. */
1245 if (retval
1246 && (TREE_CODE (retval) != RESULT_DECL
1247 && (TREE_CODE (retval) != SSA_NAME
1248 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1249 {
1250 copy = gimple_build_assign (id->retvar, retval);
1251 /* id->retvar is already substituted. Skip it on later remapping. */
1252 skip_first = true;
1253 }
1254 else
1255 return gimple_build_nop ();
1256 }
1257 else if (gimple_has_substatements (stmt))
1258 {
1259 gimple_seq s1, s2;
1260
1261 /* When cloning bodies from the C++ front end, we will be handed bodies
1262 in High GIMPLE form. Handle here all the High GIMPLE statements that
1263 have embedded statements. */
1264 switch (gimple_code (stmt))
1265 {
1266 case GIMPLE_BIND:
1267 copy = copy_gimple_bind (stmt, id);
1268 break;
1269
1270 case GIMPLE_CATCH:
1271 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1272 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1273 break;
1274
1275 case GIMPLE_EH_FILTER:
1276 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1277 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1278 break;
1279
1280 case GIMPLE_TRY:
1281 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1282 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1283 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1284 break;
1285
1286 case GIMPLE_WITH_CLEANUP_EXPR:
1287 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1288 copy = gimple_build_wce (s1);
1289 break;
1290
1291 case GIMPLE_OMP_PARALLEL:
1292 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1293 copy = gimple_build_omp_parallel
1294 (s1,
1295 gimple_omp_parallel_clauses (stmt),
1296 gimple_omp_parallel_child_fn (stmt),
1297 gimple_omp_parallel_data_arg (stmt));
1298 break;
1299
1300 case GIMPLE_OMP_TASK:
1301 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1302 copy = gimple_build_omp_task
1303 (s1,
1304 gimple_omp_task_clauses (stmt),
1305 gimple_omp_task_child_fn (stmt),
1306 gimple_omp_task_data_arg (stmt),
1307 gimple_omp_task_copy_fn (stmt),
1308 gimple_omp_task_arg_size (stmt),
1309 gimple_omp_task_arg_align (stmt));
1310 break;
1311
1312 case GIMPLE_OMP_FOR:
1313 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1314 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1315 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1316 gimple_omp_for_collapse (stmt), s2);
1317 {
1318 size_t i;
1319 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1320 {
1321 gimple_omp_for_set_index (copy, i,
1322 gimple_omp_for_index (stmt, i));
1323 gimple_omp_for_set_initial (copy, i,
1324 gimple_omp_for_initial (stmt, i));
1325 gimple_omp_for_set_final (copy, i,
1326 gimple_omp_for_final (stmt, i));
1327 gimple_omp_for_set_incr (copy, i,
1328 gimple_omp_for_incr (stmt, i));
1329 gimple_omp_for_set_cond (copy, i,
1330 gimple_omp_for_cond (stmt, i));
1331 }
1332 }
1333 break;
1334
1335 case GIMPLE_OMP_MASTER:
1336 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1337 copy = gimple_build_omp_master (s1);
1338 break;
1339
1340 case GIMPLE_OMP_ORDERED:
1341 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1342 copy = gimple_build_omp_ordered (s1);
1343 break;
1344
1345 case GIMPLE_OMP_SECTION:
1346 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1347 copy = gimple_build_omp_section (s1);
1348 break;
1349
1350 case GIMPLE_OMP_SECTIONS:
1351 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1352 copy = gimple_build_omp_sections
1353 (s1, gimple_omp_sections_clauses (stmt));
1354 break;
1355
1356 case GIMPLE_OMP_SINGLE:
1357 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1358 copy = gimple_build_omp_single
1359 (s1, gimple_omp_single_clauses (stmt));
1360 break;
1361
1362 case GIMPLE_OMP_CRITICAL:
1363 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1364 copy
1365 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1366 break;
1367
1368 default:
1369 gcc_unreachable ();
1370 }
1371 }
1372 else
1373 {
1374 if (gimple_assign_copy_p (stmt)
1375 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1376 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1377 {
1378 /* Here we handle statements that are not completely rewritten.
1379 First we detect some inlining-induced bogosities for
1380 discarding. */
1381
1382 /* Some assignments VAR = VAR; don't generate any rtl code
1383 and thus don't count as variable modification. Avoid
1384 keeping bogosities like 0 = 0. */
1385 tree decl = gimple_assign_lhs (stmt), value;
1386 tree *n;
1387
1388 n = (tree *) pointer_map_contains (id->decl_map, decl);
1389 if (n)
1390 {
1391 value = *n;
1392 STRIP_TYPE_NOPS (value);
1393 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1394 return gimple_build_nop ();
1395 }
1396 }
1397
1398 if (gimple_debug_bind_p (stmt))
1399 {
1400 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1401 gimple_debug_bind_get_value (stmt),
1402 stmt);
1403 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1404 return copy;
1405 }
1406
1407 /* Create a new deep copy of the statement. */
1408 copy = gimple_copy (stmt);
1409
1410 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1411 RESX and EH_DISPATCH. */
1412 if (id->eh_map)
1413 switch (gimple_code (copy))
1414 {
1415 case GIMPLE_CALL:
1416 {
1417 tree r, fndecl = gimple_call_fndecl (copy);
1418 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1419 switch (DECL_FUNCTION_CODE (fndecl))
1420 {
1421 case BUILT_IN_EH_COPY_VALUES:
1422 r = gimple_call_arg (copy, 1);
1423 r = remap_eh_region_tree_nr (r, id);
1424 gimple_call_set_arg (copy, 1, r);
1425 /* FALLTHRU */
1426
1427 case BUILT_IN_EH_POINTER:
1428 case BUILT_IN_EH_FILTER:
1429 r = gimple_call_arg (copy, 0);
1430 r = remap_eh_region_tree_nr (r, id);
1431 gimple_call_set_arg (copy, 0, r);
1432 break;
1433
1434 default:
1435 break;
1436 }
1437
1438 /* Reset alias info if we didn't apply measures to
1439 keep it valid over inlining by setting DECL_PT_UID. */
1440 if (!id->src_cfun->gimple_df
1441 || !id->src_cfun->gimple_df->ipa_pta)
1442 gimple_call_reset_alias_info (copy);
1443 }
1444 break;
1445
1446 case GIMPLE_RESX:
1447 {
1448 int r = gimple_resx_region (copy);
1449 r = remap_eh_region_nr (r, id);
1450 gimple_resx_set_region (copy, r);
1451 }
1452 break;
1453
1454 case GIMPLE_EH_DISPATCH:
1455 {
1456 int r = gimple_eh_dispatch_region (copy);
1457 r = remap_eh_region_nr (r, id);
1458 gimple_eh_dispatch_set_region (copy, r);
1459 }
1460 break;
1461
1462 default:
1463 break;
1464 }
1465 }
1466
1467 /* If STMT has a block defined, map it to the newly constructed
1468 block. When inlining we want statements without a block to
1469 appear in the block of the function call. */
1470 new_block = id->block;
1471 if (gimple_block (copy))
1472 {
1473 tree *n;
1474 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1475 gcc_assert (n);
1476 new_block = *n;
1477 }
1478
1479 gimple_set_block (copy, new_block);
1480
1481 if (gimple_debug_bind_p (copy))
1482 return copy;
1483
1484 /* Remap all the operands in COPY. */
1485 memset (&wi, 0, sizeof (wi));
1486 wi.info = id;
1487 if (skip_first)
1488 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1489 else
1490 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1491
1492 /* Clear the copied virtual operands. We are not remapping them here
1493 but are going to recreate them from scratch. */
1494 if (gimple_has_mem_ops (copy))
1495 {
1496 gimple_set_vdef (copy, NULL_TREE);
1497 gimple_set_vuse (copy, NULL_TREE);
1498 }
1499
1500 return copy;
1501 }
1502
1503
1504 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1505 later */
1506
1507 static basic_block
1508 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1509 gcov_type count_scale)
1510 {
1511 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1512 basic_block copy_basic_block;
1513 tree decl;
1514 gcov_type freq;
1515 basic_block prev;
1516
1517 /* Search for previous copied basic block. */
1518 prev = bb->prev_bb;
1519 while (!prev->aux)
1520 prev = prev->prev_bb;
1521
1522 /* create_basic_block() will append every new block to
1523 basic_block_info automatically. */
1524 copy_basic_block = create_basic_block (NULL, (void *) 0,
1525 (basic_block) prev->aux);
1526 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1527
1528 /* We are going to rebuild frequencies from scratch. These values
1529 have just small importance to drive canonicalize_loop_headers. */
1530 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1531
1532 /* We recompute frequencies after inlining, so this is quite safe. */
1533 if (freq > BB_FREQ_MAX)
1534 freq = BB_FREQ_MAX;
1535 copy_basic_block->frequency = freq;
1536
1537 copy_gsi = gsi_start_bb (copy_basic_block);
1538
1539 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1540 {
1541 gimple stmt = gsi_stmt (gsi);
1542 gimple orig_stmt = stmt;
1543
1544 id->regimplify = false;
1545 stmt = remap_gimple_stmt (stmt, id);
1546 if (gimple_nop_p (stmt))
1547 continue;
1548
1549 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1550 seq_gsi = copy_gsi;
1551
1552 /* With return slot optimization we can end up with
1553 non-gimple (foo *)&this->m, fix that here. */
1554 if (is_gimple_assign (stmt)
1555 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1556 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1557 {
1558 tree new_rhs;
1559 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1560 gimple_assign_rhs1 (stmt),
1561 true, NULL, false,
1562 GSI_CONTINUE_LINKING);
1563 gimple_assign_set_rhs1 (stmt, new_rhs);
1564 id->regimplify = false;
1565 }
1566
1567 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1568
1569 if (id->regimplify)
1570 gimple_regimplify_operands (stmt, &seq_gsi);
1571
1572 /* If copy_basic_block has been empty at the start of this iteration,
1573 call gsi_start_bb again to get at the newly added statements. */
1574 if (gsi_end_p (copy_gsi))
1575 copy_gsi = gsi_start_bb (copy_basic_block);
1576 else
1577 gsi_next (&copy_gsi);
1578
1579 /* Process the new statement. The call to gimple_regimplify_operands
1580 possibly turned the statement into multiple statements, we
1581 need to process all of them. */
1582 do
1583 {
1584 tree fn;
1585
1586 stmt = gsi_stmt (copy_gsi);
1587 if (is_gimple_call (stmt)
1588 && gimple_call_va_arg_pack_p (stmt)
1589 && id->gimple_call)
1590 {
1591 /* __builtin_va_arg_pack () should be replaced by
1592 all arguments corresponding to ... in the caller. */
1593 tree p;
1594 gimple new_call;
1595 VEC(tree, heap) *argarray;
1596 size_t nargs = gimple_call_num_args (id->gimple_call);
1597 size_t n;
1598
1599 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1600 nargs--;
1601
1602 /* Create the new array of arguments. */
1603 n = nargs + gimple_call_num_args (stmt);
1604 argarray = VEC_alloc (tree, heap, n);
1605 VEC_safe_grow (tree, heap, argarray, n);
1606
1607 /* Copy all the arguments before '...' */
1608 memcpy (VEC_address (tree, argarray),
1609 gimple_call_arg_ptr (stmt, 0),
1610 gimple_call_num_args (stmt) * sizeof (tree));
1611
1612 /* Append the arguments passed in '...' */
1613 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1614 gimple_call_arg_ptr (id->gimple_call, 0)
1615 + (gimple_call_num_args (id->gimple_call) - nargs),
1616 nargs * sizeof (tree));
1617
1618 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1619 argarray);
1620
1621 VEC_free (tree, heap, argarray);
1622
1623 /* Copy all GIMPLE_CALL flags, location and block, except
1624 GF_CALL_VA_ARG_PACK. */
1625 gimple_call_copy_flags (new_call, stmt);
1626 gimple_call_set_va_arg_pack (new_call, false);
1627 gimple_set_location (new_call, gimple_location (stmt));
1628 gimple_set_block (new_call, gimple_block (stmt));
1629 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1630
1631 gsi_replace (&copy_gsi, new_call, false);
1632 stmt = new_call;
1633 }
1634 else if (is_gimple_call (stmt)
1635 && id->gimple_call
1636 && (decl = gimple_call_fndecl (stmt))
1637 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1638 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1639 {
1640 /* __builtin_va_arg_pack_len () should be replaced by
1641 the number of anonymous arguments. */
1642 size_t nargs = gimple_call_num_args (id->gimple_call);
1643 tree count, p;
1644 gimple new_stmt;
1645
1646 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1647 nargs--;
1648
1649 count = build_int_cst (integer_type_node, nargs);
1650 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1651 gsi_replace (&copy_gsi, new_stmt, false);
1652 stmt = new_stmt;
1653 }
1654
1655 /* Statements produced by inlining can be unfolded, especially
1656 when we constant propagated some operands. We can't fold
1657 them right now for two reasons:
1658 1) folding require SSA_NAME_DEF_STMTs to be correct
1659 2) we can't change function calls to builtins.
1660 So we just mark statement for later folding. We mark
1661 all new statements, instead just statements that has changed
1662 by some nontrivial substitution so even statements made
1663 foldable indirectly are updated. If this turns out to be
1664 expensive, copy_body can be told to watch for nontrivial
1665 changes. */
1666 if (id->statements_to_fold)
1667 pointer_set_insert (id->statements_to_fold, stmt);
1668
1669 /* We're duplicating a CALL_EXPR. Find any corresponding
1670 callgraph edges and update or duplicate them. */
1671 if (is_gimple_call (stmt))
1672 {
1673 struct cgraph_edge *edge;
1674 int flags;
1675
1676 switch (id->transform_call_graph_edges)
1677 {
1678 case CB_CGE_DUPLICATE:
1679 edge = cgraph_edge (id->src_node, orig_stmt);
1680 if (edge)
1681 {
1682 int edge_freq = edge->frequency;
1683 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1684 gimple_uid (stmt),
1685 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1686 true);
1687 /* We could also just rescale the frequency, but
1688 doing so would introduce roundoff errors and make
1689 verifier unhappy. */
1690 edge->frequency
1691 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1692 copy_basic_block);
1693 if (dump_file
1694 && profile_status_for_function (cfun) != PROFILE_ABSENT
1695 && (edge_freq > edge->frequency + 10
1696 || edge_freq < edge->frequency - 10))
1697 {
1698 fprintf (dump_file, "Edge frequency estimated by "
1699 "cgraph %i diverge from inliner's estimate %i\n",
1700 edge_freq,
1701 edge->frequency);
1702 fprintf (dump_file,
1703 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1704 bb->index,
1705 bb->frequency,
1706 copy_basic_block->frequency);
1707 }
1708 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1709 }
1710 break;
1711
1712 case CB_CGE_MOVE_CLONES:
1713 cgraph_set_call_stmt_including_clones (id->dst_node,
1714 orig_stmt, stmt);
1715 edge = cgraph_edge (id->dst_node, stmt);
1716 break;
1717
1718 case CB_CGE_MOVE:
1719 edge = cgraph_edge (id->dst_node, orig_stmt);
1720 if (edge)
1721 cgraph_set_call_stmt (edge, stmt);
1722 break;
1723
1724 default:
1725 gcc_unreachable ();
1726 }
1727
1728 /* Constant propagation on argument done during inlining
1729 may create new direct call. Produce an edge for it. */
1730 if ((!edge
1731 || (edge->indirect_inlining_edge
1732 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1733 && id->dst_node->analyzed
1734 && (fn = gimple_call_fndecl (stmt)) != NULL)
1735 {
1736 struct cgraph_node *dest = cgraph_get_node (fn);
1737
1738 /* We have missing edge in the callgraph. This can happen
1739 when previous inlining turned an indirect call into a
1740 direct call by constant propagating arguments or we are
1741 producing dead clone (for further cloning). In all
1742 other cases we hit a bug (incorrect node sharing is the
1743 most common reason for missing edges). */
1744 gcc_assert (dest->needed || !dest->analyzed
1745 || dest->address_taken
1746 || !id->src_node->analyzed
1747 || !id->dst_node->analyzed);
1748 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1749 cgraph_create_edge_including_clones
1750 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1751 compute_call_stmt_bb_frequency (id->dst_node->decl,
1752 copy_basic_block),
1753 CIF_ORIGINALLY_INDIRECT_CALL);
1754 else
1755 cgraph_create_edge (id->dst_node, dest, stmt,
1756 bb->count,
1757 compute_call_stmt_bb_frequency
1758 (id->dst_node->decl, copy_basic_block))->inline_failed
1759 = CIF_ORIGINALLY_INDIRECT_CALL;
1760 if (dump_file)
1761 {
1762 fprintf (dump_file, "Created new direct edge to %s\n",
1763 cgraph_node_name (dest));
1764 }
1765 }
1766
1767 flags = gimple_call_flags (stmt);
1768 if (flags & ECF_MAY_BE_ALLOCA)
1769 cfun->calls_alloca = true;
1770 if (flags & ECF_RETURNS_TWICE)
1771 cfun->calls_setjmp = true;
1772 }
1773
1774 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1775 id->eh_map, id->eh_lp_nr);
1776
1777 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1778 {
1779 ssa_op_iter i;
1780 tree def;
1781
1782 find_new_referenced_vars (gsi_stmt (copy_gsi));
1783 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1784 if (TREE_CODE (def) == SSA_NAME)
1785 SSA_NAME_DEF_STMT (def) = stmt;
1786 }
1787
1788 gsi_next (&copy_gsi);
1789 }
1790 while (!gsi_end_p (copy_gsi));
1791
1792 copy_gsi = gsi_last_bb (copy_basic_block);
1793 }
1794
1795 return copy_basic_block;
1796 }
1797
1798 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1799 form is quite easy, since dominator relationship for old basic blocks does
1800 not change.
1801
1802 There is however exception where inlining might change dominator relation
1803 across EH edges from basic block within inlined functions destinating
1804 to landing pads in function we inline into.
1805
1806 The function fills in PHI_RESULTs of such PHI nodes if they refer
1807 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1808 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1809 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1810 set, and this means that there will be no overlapping live ranges
1811 for the underlying symbol.
1812
1813 This might change in future if we allow redirecting of EH edges and
1814 we might want to change way build CFG pre-inlining to include
1815 all the possible edges then. */
1816 static void
1817 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1818 bool can_throw, bool nonlocal_goto)
1819 {
1820 edge e;
1821 edge_iterator ei;
1822
1823 FOR_EACH_EDGE (e, ei, bb->succs)
1824 if (!e->dest->aux
1825 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1826 {
1827 gimple phi;
1828 gimple_stmt_iterator si;
1829
1830 if (!nonlocal_goto)
1831 gcc_assert (e->flags & EDGE_EH);
1832
1833 if (!can_throw)
1834 gcc_assert (!(e->flags & EDGE_EH));
1835
1836 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1837 {
1838 edge re;
1839
1840 phi = gsi_stmt (si);
1841
1842 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1843 gcc_assert (!e->dest->aux);
1844
1845 gcc_assert ((e->flags & EDGE_EH)
1846 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1847
1848 if (!is_gimple_reg (PHI_RESULT (phi)))
1849 {
1850 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1851 continue;
1852 }
1853
1854 re = find_edge (ret_bb, e->dest);
1855 gcc_assert (re);
1856 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1857 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1858
1859 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1860 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1861 }
1862 }
1863 }
1864
1865
1866 /* Copy edges from BB into its copy constructed earlier, scale profile
1867 accordingly. Edges will be taken care of later. Assume aux
1868 pointers to point to the copies of each BB. Return true if any
1869 debug stmts are left after a statement that must end the basic block. */
1870
1871 static bool
1872 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1873 {
1874 basic_block new_bb = (basic_block) bb->aux;
1875 edge_iterator ei;
1876 edge old_edge;
1877 gimple_stmt_iterator si;
1878 int flags;
1879 bool need_debug_cleanup = false;
1880
1881 /* Use the indices from the original blocks to create edges for the
1882 new ones. */
1883 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1884 if (!(old_edge->flags & EDGE_EH))
1885 {
1886 edge new_edge;
1887
1888 flags = old_edge->flags;
1889
1890 /* Return edges do get a FALLTHRU flag when the get inlined. */
1891 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1892 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1893 flags |= EDGE_FALLTHRU;
1894 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1895 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1896 new_edge->probability = old_edge->probability;
1897 }
1898
1899 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1900 return false;
1901
1902 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1903 {
1904 gimple copy_stmt;
1905 bool can_throw, nonlocal_goto;
1906
1907 copy_stmt = gsi_stmt (si);
1908 if (!is_gimple_debug (copy_stmt))
1909 {
1910 update_stmt (copy_stmt);
1911 if (gimple_in_ssa_p (cfun))
1912 mark_symbols_for_renaming (copy_stmt);
1913 }
1914
1915 /* Do this before the possible split_block. */
1916 gsi_next (&si);
1917
1918 /* If this tree could throw an exception, there are two
1919 cases where we need to add abnormal edge(s): the
1920 tree wasn't in a region and there is a "current
1921 region" in the caller; or the original tree had
1922 EH edges. In both cases split the block after the tree,
1923 and add abnormal edge(s) as needed; we need both
1924 those from the callee and the caller.
1925 We check whether the copy can throw, because the const
1926 propagation can change an INDIRECT_REF which throws
1927 into a COMPONENT_REF which doesn't. If the copy
1928 can throw, the original could also throw. */
1929 can_throw = stmt_can_throw_internal (copy_stmt);
1930 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1931
1932 if (can_throw || nonlocal_goto)
1933 {
1934 if (!gsi_end_p (si))
1935 {
1936 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1937 gsi_next (&si);
1938 if (gsi_end_p (si))
1939 need_debug_cleanup = true;
1940 }
1941 if (!gsi_end_p (si))
1942 /* Note that bb's predecessor edges aren't necessarily
1943 right at this point; split_block doesn't care. */
1944 {
1945 edge e = split_block (new_bb, copy_stmt);
1946
1947 new_bb = e->dest;
1948 new_bb->aux = e->src->aux;
1949 si = gsi_start_bb (new_bb);
1950 }
1951 }
1952
1953 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1954 make_eh_dispatch_edges (copy_stmt);
1955 else if (can_throw)
1956 make_eh_edges (copy_stmt);
1957
1958 if (nonlocal_goto)
1959 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1960
1961 if ((can_throw || nonlocal_goto)
1962 && gimple_in_ssa_p (cfun))
1963 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1964 can_throw, nonlocal_goto);
1965 }
1966 return need_debug_cleanup;
1967 }
1968
1969 /* Copy the PHIs. All blocks and edges are copied, some blocks
1970 was possibly split and new outgoing EH edges inserted.
1971 BB points to the block of original function and AUX pointers links
1972 the original and newly copied blocks. */
1973
1974 static void
1975 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1976 {
1977 basic_block const new_bb = (basic_block) bb->aux;
1978 edge_iterator ei;
1979 gimple phi;
1980 gimple_stmt_iterator si;
1981 edge new_edge;
1982 bool inserted = false;
1983
1984 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1985 {
1986 tree res, new_res;
1987 gimple new_phi;
1988
1989 phi = gsi_stmt (si);
1990 res = PHI_RESULT (phi);
1991 new_res = res;
1992 if (is_gimple_reg (res))
1993 {
1994 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1995 SSA_NAME_DEF_STMT (new_res)
1996 = new_phi = create_phi_node (new_res, new_bb);
1997 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1998 {
1999 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2000 tree arg;
2001 tree new_arg;
2002 tree block = id->block;
2003 edge_iterator ei2;
2004
2005 /* When doing partial cloning, we allow PHIs on the entry block
2006 as long as all the arguments are the same. Find any input
2007 edge to see argument to copy. */
2008 if (!old_edge)
2009 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2010 if (!old_edge->src->aux)
2011 break;
2012
2013 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2014 new_arg = arg;
2015 id->block = NULL_TREE;
2016 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2017 id->block = block;
2018 gcc_assert (new_arg);
2019 /* With return slot optimization we can end up with
2020 non-gimple (foo *)&this->m, fix that here. */
2021 if (TREE_CODE (new_arg) != SSA_NAME
2022 && TREE_CODE (new_arg) != FUNCTION_DECL
2023 && !is_gimple_val (new_arg))
2024 {
2025 gimple_seq stmts = NULL;
2026 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2027 gsi_insert_seq_on_edge (new_edge, stmts);
2028 inserted = true;
2029 }
2030 add_phi_arg (new_phi, new_arg, new_edge,
2031 gimple_phi_arg_location_from_edge (phi, old_edge));
2032 }
2033 }
2034 }
2035
2036 /* Commit the delayed edge insertions. */
2037 if (inserted)
2038 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2039 gsi_commit_one_edge_insert (new_edge, NULL);
2040 }
2041
2042
2043 /* Wrapper for remap_decl so it can be used as a callback. */
2044
2045 static tree
2046 remap_decl_1 (tree decl, void *data)
2047 {
2048 return remap_decl (decl, (copy_body_data *) data);
2049 }
2050
2051 /* Build struct function and associated datastructures for the new clone
2052 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2053
2054 static void
2055 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2056 {
2057 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2058 gcov_type count_scale;
2059
2060 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2061 count_scale = (REG_BR_PROB_BASE * count
2062 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2063 else
2064 count_scale = REG_BR_PROB_BASE;
2065
2066 /* Register specific tree functions. */
2067 gimple_register_cfg_hooks ();
2068
2069 /* Get clean struct function. */
2070 push_struct_function (new_fndecl);
2071
2072 /* We will rebuild these, so just sanity check that they are empty. */
2073 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2074 gcc_assert (cfun->local_decls == NULL);
2075 gcc_assert (cfun->cfg == NULL);
2076 gcc_assert (cfun->decl == new_fndecl);
2077
2078 /* Copy items we preserve during cloning. */
2079 cfun->static_chain_decl = src_cfun->static_chain_decl;
2080 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2081 cfun->function_end_locus = src_cfun->function_end_locus;
2082 cfun->curr_properties = src_cfun->curr_properties;
2083 cfun->last_verified = src_cfun->last_verified;
2084 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2085 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2086 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2087 cfun->stdarg = src_cfun->stdarg;
2088 cfun->after_inlining = src_cfun->after_inlining;
2089 cfun->can_throw_non_call_exceptions
2090 = src_cfun->can_throw_non_call_exceptions;
2091 cfun->returns_struct = src_cfun->returns_struct;
2092 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2093 cfun->after_tree_profile = src_cfun->after_tree_profile;
2094
2095 init_empty_tree_cfg ();
2096
2097 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2098 ENTRY_BLOCK_PTR->count =
2099 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2100 REG_BR_PROB_BASE);
2101 ENTRY_BLOCK_PTR->frequency
2102 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2103 EXIT_BLOCK_PTR->count =
2104 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2105 REG_BR_PROB_BASE);
2106 EXIT_BLOCK_PTR->frequency =
2107 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2108 if (src_cfun->eh)
2109 init_eh_for_function ();
2110
2111 if (src_cfun->gimple_df)
2112 {
2113 init_tree_ssa (cfun);
2114 cfun->gimple_df->in_ssa_p = true;
2115 init_ssa_operands ();
2116 }
2117 pop_cfun ();
2118 }
2119
2120 /* Helper function for copy_cfg_body. Move debug stmts from the end
2121 of NEW_BB to the beginning of successor basic blocks when needed. If the
2122 successor has multiple predecessors, reset them, otherwise keep
2123 their value. */
2124
2125 static void
2126 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2127 {
2128 edge e;
2129 edge_iterator ei;
2130 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2131
2132 if (gsi_end_p (si)
2133 || gsi_one_before_end_p (si)
2134 || !(stmt_can_throw_internal (gsi_stmt (si))
2135 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2136 return;
2137
2138 FOR_EACH_EDGE (e, ei, new_bb->succs)
2139 {
2140 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2141 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2142 while (is_gimple_debug (gsi_stmt (ssi)))
2143 {
2144 gimple stmt = gsi_stmt (ssi), new_stmt;
2145 tree var;
2146 tree value;
2147
2148 /* For the last edge move the debug stmts instead of copying
2149 them. */
2150 if (ei_one_before_end_p (ei))
2151 {
2152 si = ssi;
2153 gsi_prev (&ssi);
2154 if (!single_pred_p (e->dest))
2155 gimple_debug_bind_reset_value (stmt);
2156 gsi_remove (&si, false);
2157 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2158 continue;
2159 }
2160
2161 var = gimple_debug_bind_get_var (stmt);
2162 if (single_pred_p (e->dest))
2163 {
2164 value = gimple_debug_bind_get_value (stmt);
2165 value = unshare_expr (value);
2166 }
2167 else
2168 value = NULL_TREE;
2169 new_stmt = gimple_build_debug_bind (var, value, stmt);
2170 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2171 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2172 gsi_prev (&ssi);
2173 }
2174 }
2175 }
2176
2177 /* Make a copy of the body of FN so that it can be inserted inline in
2178 another function. Walks FN via CFG, returns new fndecl. */
2179
2180 static tree
2181 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2182 basic_block entry_block_map, basic_block exit_block_map,
2183 bitmap blocks_to_copy, basic_block new_entry)
2184 {
2185 tree callee_fndecl = id->src_fn;
2186 /* Original cfun for the callee, doesn't change. */
2187 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2188 struct function *cfun_to_copy;
2189 basic_block bb;
2190 tree new_fndecl = NULL;
2191 bool need_debug_cleanup = false;
2192 gcov_type count_scale;
2193 int last;
2194 int incoming_frequency = 0;
2195 gcov_type incoming_count = 0;
2196
2197 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2198 count_scale = (REG_BR_PROB_BASE * count
2199 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2200 else
2201 count_scale = REG_BR_PROB_BASE;
2202
2203 /* Register specific tree functions. */
2204 gimple_register_cfg_hooks ();
2205
2206 /* If we are inlining just region of the function, make sure to connect new entry
2207 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2208 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2209 probabilities of edges incoming from nonduplicated region. */
2210 if (new_entry)
2211 {
2212 edge e;
2213 edge_iterator ei;
2214
2215 FOR_EACH_EDGE (e, ei, new_entry->preds)
2216 if (!e->src->aux)
2217 {
2218 incoming_frequency += EDGE_FREQUENCY (e);
2219 incoming_count += e->count;
2220 }
2221 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2222 incoming_frequency
2223 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2224 ENTRY_BLOCK_PTR->count = incoming_count;
2225 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2226 }
2227
2228 /* Must have a CFG here at this point. */
2229 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2230 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2231
2232 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2233
2234 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2235 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2236 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2237 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2238
2239 /* Duplicate any exception-handling regions. */
2240 if (cfun->eh)
2241 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2242 remap_decl_1, id);
2243
2244 /* Use aux pointers to map the original blocks to copy. */
2245 FOR_EACH_BB_FN (bb, cfun_to_copy)
2246 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2247 {
2248 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2249 bb->aux = new_bb;
2250 new_bb->aux = bb;
2251 }
2252
2253 last = last_basic_block;
2254
2255 /* Now that we've duplicated the blocks, duplicate their edges. */
2256 FOR_ALL_BB_FN (bb, cfun_to_copy)
2257 if (!blocks_to_copy
2258 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2259 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2260
2261 if (new_entry)
2262 {
2263 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2264 e->probability = REG_BR_PROB_BASE;
2265 e->count = incoming_count;
2266 }
2267
2268 if (gimple_in_ssa_p (cfun))
2269 FOR_ALL_BB_FN (bb, cfun_to_copy)
2270 if (!blocks_to_copy
2271 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2272 copy_phis_for_bb (bb, id);
2273
2274 FOR_ALL_BB_FN (bb, cfun_to_copy)
2275 if (bb->aux)
2276 {
2277 if (need_debug_cleanup
2278 && bb->index != ENTRY_BLOCK
2279 && bb->index != EXIT_BLOCK)
2280 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2281 ((basic_block)bb->aux)->aux = NULL;
2282 bb->aux = NULL;
2283 }
2284
2285 /* Zero out AUX fields of newly created block during EH edge
2286 insertion. */
2287 for (; last < last_basic_block; last++)
2288 {
2289 if (need_debug_cleanup)
2290 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2291 BASIC_BLOCK (last)->aux = NULL;
2292 }
2293 entry_block_map->aux = NULL;
2294 exit_block_map->aux = NULL;
2295
2296 if (id->eh_map)
2297 {
2298 pointer_map_destroy (id->eh_map);
2299 id->eh_map = NULL;
2300 }
2301
2302 return new_fndecl;
2303 }
2304
2305 /* Copy the debug STMT using ID. We deal with these statements in a
2306 special way: if any variable in their VALUE expression wasn't
2307 remapped yet, we won't remap it, because that would get decl uids
2308 out of sync, causing codegen differences between -g and -g0. If
2309 this arises, we drop the VALUE expression altogether. */
2310
2311 static void
2312 copy_debug_stmt (gimple stmt, copy_body_data *id)
2313 {
2314 tree t, *n;
2315 struct walk_stmt_info wi;
2316
2317 t = id->block;
2318 if (gimple_block (stmt))
2319 {
2320 tree *n;
2321 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2322 if (n)
2323 t = *n;
2324 }
2325 gimple_set_block (stmt, t);
2326
2327 /* Remap all the operands in COPY. */
2328 memset (&wi, 0, sizeof (wi));
2329 wi.info = id;
2330
2331 processing_debug_stmt = 1;
2332
2333 t = gimple_debug_bind_get_var (stmt);
2334
2335 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2336 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2337 {
2338 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2339 t = *n;
2340 }
2341 else if (TREE_CODE (t) == VAR_DECL
2342 && !TREE_STATIC (t)
2343 && gimple_in_ssa_p (cfun)
2344 && !pointer_map_contains (id->decl_map, t)
2345 && !var_ann (t))
2346 /* T is a non-localized variable. */;
2347 else
2348 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2349
2350 gimple_debug_bind_set_var (stmt, t);
2351
2352 if (gimple_debug_bind_has_value_p (stmt))
2353 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2354 remap_gimple_op_r, &wi, NULL);
2355
2356 /* Punt if any decl couldn't be remapped. */
2357 if (processing_debug_stmt < 0)
2358 gimple_debug_bind_reset_value (stmt);
2359
2360 processing_debug_stmt = 0;
2361
2362 update_stmt (stmt);
2363 if (gimple_in_ssa_p (cfun))
2364 mark_symbols_for_renaming (stmt);
2365 }
2366
2367 /* Process deferred debug stmts. In order to give values better odds
2368 of being successfully remapped, we delay the processing of debug
2369 stmts until all other stmts that might require remapping are
2370 processed. */
2371
2372 static void
2373 copy_debug_stmts (copy_body_data *id)
2374 {
2375 size_t i;
2376 gimple stmt;
2377
2378 if (!id->debug_stmts)
2379 return;
2380
2381 FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
2382 copy_debug_stmt (stmt, id);
2383
2384 VEC_free (gimple, heap, id->debug_stmts);
2385 }
2386
2387 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2388 another function. */
2389
2390 static tree
2391 copy_tree_body (copy_body_data *id)
2392 {
2393 tree fndecl = id->src_fn;
2394 tree body = DECL_SAVED_TREE (fndecl);
2395
2396 walk_tree (&body, copy_tree_body_r, id, NULL);
2397
2398 return body;
2399 }
2400
2401 /* Make a copy of the body of FN so that it can be inserted inline in
2402 another function. */
2403
2404 static tree
2405 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2406 basic_block entry_block_map, basic_block exit_block_map,
2407 bitmap blocks_to_copy, basic_block new_entry)
2408 {
2409 tree fndecl = id->src_fn;
2410 tree body;
2411
2412 /* If this body has a CFG, walk CFG and copy. */
2413 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2414 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2415 blocks_to_copy, new_entry);
2416 copy_debug_stmts (id);
2417
2418 return body;
2419 }
2420
2421 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2422 defined in function FN, or of a data member thereof. */
2423
2424 static bool
2425 self_inlining_addr_expr (tree value, tree fn)
2426 {
2427 tree var;
2428
2429 if (TREE_CODE (value) != ADDR_EXPR)
2430 return false;
2431
2432 var = get_base_address (TREE_OPERAND (value, 0));
2433
2434 return var && auto_var_in_fn_p (var, fn);
2435 }
2436
2437 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2438 lexical block and line number information from base_stmt, if given,
2439 or from the last stmt of the block otherwise. */
2440
2441 static gimple
2442 insert_init_debug_bind (copy_body_data *id,
2443 basic_block bb, tree var, tree value,
2444 gimple base_stmt)
2445 {
2446 gimple note;
2447 gimple_stmt_iterator gsi;
2448 tree tracked_var;
2449
2450 if (!gimple_in_ssa_p (id->src_cfun))
2451 return NULL;
2452
2453 if (!MAY_HAVE_DEBUG_STMTS)
2454 return NULL;
2455
2456 tracked_var = target_for_debug_bind (var);
2457 if (!tracked_var)
2458 return NULL;
2459
2460 if (bb)
2461 {
2462 gsi = gsi_last_bb (bb);
2463 if (!base_stmt && !gsi_end_p (gsi))
2464 base_stmt = gsi_stmt (gsi);
2465 }
2466
2467 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2468
2469 if (bb)
2470 {
2471 if (!gsi_end_p (gsi))
2472 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2473 else
2474 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2475 }
2476
2477 return note;
2478 }
2479
2480 static void
2481 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2482 {
2483 /* If VAR represents a zero-sized variable, it's possible that the
2484 assignment statement may result in no gimple statements. */
2485 if (init_stmt)
2486 {
2487 gimple_stmt_iterator si = gsi_last_bb (bb);
2488
2489 /* We can end up with init statements that store to a non-register
2490 from a rhs with a conversion. Handle that here by forcing the
2491 rhs into a temporary. gimple_regimplify_operands is not
2492 prepared to do this for us. */
2493 if (!is_gimple_debug (init_stmt)
2494 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2495 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2496 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2497 {
2498 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2499 gimple_expr_type (init_stmt),
2500 gimple_assign_rhs1 (init_stmt));
2501 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2502 GSI_NEW_STMT);
2503 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2504 gimple_assign_set_rhs1 (init_stmt, rhs);
2505 }
2506 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2507 gimple_regimplify_operands (init_stmt, &si);
2508 mark_symbols_for_renaming (init_stmt);
2509
2510 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2511 {
2512 tree var, def = gimple_assign_lhs (init_stmt);
2513
2514 if (TREE_CODE (def) == SSA_NAME)
2515 var = SSA_NAME_VAR (def);
2516 else
2517 var = def;
2518
2519 insert_init_debug_bind (id, bb, var, def, init_stmt);
2520 }
2521 }
2522 }
2523
2524 /* Initialize parameter P with VALUE. If needed, produce init statement
2525 at the end of BB. When BB is NULL, we return init statement to be
2526 output later. */
2527 static gimple
2528 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2529 basic_block bb, tree *vars)
2530 {
2531 gimple init_stmt = NULL;
2532 tree var;
2533 tree rhs = value;
2534 tree def = (gimple_in_ssa_p (cfun)
2535 ? gimple_default_def (id->src_cfun, p) : NULL);
2536
2537 if (value
2538 && value != error_mark_node
2539 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2540 {
2541 if (fold_convertible_p (TREE_TYPE (p), value))
2542 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2543 else
2544 /* ??? For valid (GIMPLE) programs we should not end up here.
2545 Still if something has gone wrong and we end up with truly
2546 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2547 to not leak invalid GIMPLE to the following passes. */
2548 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2549 }
2550
2551 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2552 here since the type of this decl must be visible to the calling
2553 function. */
2554 var = copy_decl_to_var (p, id);
2555
2556 /* We're actually using the newly-created var. */
2557 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2558 add_referenced_var (var);
2559
2560 /* Declare this new variable. */
2561 DECL_CHAIN (var) = *vars;
2562 *vars = var;
2563
2564 /* Make gimplifier happy about this variable. */
2565 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2566
2567 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2568 we would not need to create a new variable here at all, if it
2569 weren't for debug info. Still, we can just use the argument
2570 value. */
2571 if (TREE_READONLY (p)
2572 && !TREE_ADDRESSABLE (p)
2573 && value && !TREE_SIDE_EFFECTS (value)
2574 && !def)
2575 {
2576 /* We may produce non-gimple trees by adding NOPs or introduce
2577 invalid sharing when operand is not really constant.
2578 It is not big deal to prohibit constant propagation here as
2579 we will constant propagate in DOM1 pass anyway. */
2580 if (is_gimple_min_invariant (value)
2581 && useless_type_conversion_p (TREE_TYPE (p),
2582 TREE_TYPE (value))
2583 /* We have to be very careful about ADDR_EXPR. Make sure
2584 the base variable isn't a local variable of the inlined
2585 function, e.g., when doing recursive inlining, direct or
2586 mutually-recursive or whatever, which is why we don't
2587 just test whether fn == current_function_decl. */
2588 && ! self_inlining_addr_expr (value, fn))
2589 {
2590 insert_decl_map (id, p, value);
2591 insert_debug_decl_map (id, p, var);
2592 return insert_init_debug_bind (id, bb, var, value, NULL);
2593 }
2594 }
2595
2596 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2597 that way, when the PARM_DECL is encountered, it will be
2598 automatically replaced by the VAR_DECL. */
2599 insert_decl_map (id, p, var);
2600
2601 /* Even if P was TREE_READONLY, the new VAR should not be.
2602 In the original code, we would have constructed a
2603 temporary, and then the function body would have never
2604 changed the value of P. However, now, we will be
2605 constructing VAR directly. The constructor body may
2606 change its value multiple times as it is being
2607 constructed. Therefore, it must not be TREE_READONLY;
2608 the back-end assumes that TREE_READONLY variable is
2609 assigned to only once. */
2610 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2611 TREE_READONLY (var) = 0;
2612
2613 /* If there is no setup required and we are in SSA, take the easy route
2614 replacing all SSA names representing the function parameter by the
2615 SSA name passed to function.
2616
2617 We need to construct map for the variable anyway as it might be used
2618 in different SSA names when parameter is set in function.
2619
2620 Do replacement at -O0 for const arguments replaced by constant.
2621 This is important for builtin_constant_p and other construct requiring
2622 constant argument to be visible in inlined function body. */
2623 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2624 && (optimize
2625 || (TREE_READONLY (p)
2626 && is_gimple_min_invariant (rhs)))
2627 && (TREE_CODE (rhs) == SSA_NAME
2628 || is_gimple_min_invariant (rhs))
2629 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2630 {
2631 insert_decl_map (id, def, rhs);
2632 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2633 }
2634
2635 /* If the value of argument is never used, don't care about initializing
2636 it. */
2637 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2638 {
2639 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2640 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2641 }
2642
2643 /* Initialize this VAR_DECL from the equivalent argument. Convert
2644 the argument to the proper type in case it was promoted. */
2645 if (value)
2646 {
2647 if (rhs == error_mark_node)
2648 {
2649 insert_decl_map (id, p, var);
2650 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2651 }
2652
2653 STRIP_USELESS_TYPE_CONVERSION (rhs);
2654
2655 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2656 keep our trees in gimple form. */
2657 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2658 {
2659 def = remap_ssa_name (def, id);
2660 init_stmt = gimple_build_assign (def, rhs);
2661 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2662 set_default_def (var, NULL);
2663 }
2664 else
2665 init_stmt = gimple_build_assign (var, rhs);
2666
2667 if (bb && init_stmt)
2668 insert_init_stmt (id, bb, init_stmt);
2669 }
2670 return init_stmt;
2671 }
2672
2673 /* Generate code to initialize the parameters of the function at the
2674 top of the stack in ID from the GIMPLE_CALL STMT. */
2675
2676 static void
2677 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2678 tree fn, basic_block bb)
2679 {
2680 tree parms;
2681 size_t i;
2682 tree p;
2683 tree vars = NULL_TREE;
2684 tree static_chain = gimple_call_chain (stmt);
2685
2686 /* Figure out what the parameters are. */
2687 parms = DECL_ARGUMENTS (fn);
2688
2689 /* Loop through the parameter declarations, replacing each with an
2690 equivalent VAR_DECL, appropriately initialized. */
2691 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2692 {
2693 tree val;
2694 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2695 setup_one_parameter (id, p, val, fn, bb, &vars);
2696 }
2697 /* After remapping parameters remap their types. This has to be done
2698 in a second loop over all parameters to appropriately remap
2699 variable sized arrays when the size is specified in a
2700 parameter following the array. */
2701 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2702 {
2703 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2704 if (varp
2705 && TREE_CODE (*varp) == VAR_DECL)
2706 {
2707 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2708 ? gimple_default_def (id->src_cfun, p) : NULL);
2709 tree var = *varp;
2710 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2711 /* Also remap the default definition if it was remapped
2712 to the default definition of the parameter replacement
2713 by the parameter setup. */
2714 if (def)
2715 {
2716 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2717 if (defp
2718 && TREE_CODE (*defp) == SSA_NAME
2719 && SSA_NAME_VAR (*defp) == var)
2720 TREE_TYPE (*defp) = TREE_TYPE (var);
2721 }
2722 }
2723 }
2724
2725 /* Initialize the static chain. */
2726 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2727 gcc_assert (fn != current_function_decl);
2728 if (p)
2729 {
2730 /* No static chain? Seems like a bug in tree-nested.c. */
2731 gcc_assert (static_chain);
2732
2733 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2734 }
2735
2736 declare_inline_vars (id->block, vars);
2737 }
2738
2739
2740 /* Declare a return variable to replace the RESULT_DECL for the
2741 function we are calling. An appropriate DECL_STMT is returned.
2742 The USE_STMT is filled to contain a use of the declaration to
2743 indicate the return value of the function.
2744
2745 RETURN_SLOT, if non-null is place where to store the result. It
2746 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2747 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2748
2749 The return value is a (possibly null) value that holds the result
2750 as seen by the caller. */
2751
2752 static tree
2753 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2754 basic_block entry_bb)
2755 {
2756 tree callee = id->src_fn;
2757 tree result = DECL_RESULT (callee);
2758 tree callee_type = TREE_TYPE (result);
2759 tree caller_type;
2760 tree var, use;
2761
2762 /* Handle type-mismatches in the function declaration return type
2763 vs. the call expression. */
2764 if (modify_dest)
2765 caller_type = TREE_TYPE (modify_dest);
2766 else
2767 caller_type = TREE_TYPE (TREE_TYPE (callee));
2768
2769 /* We don't need to do anything for functions that don't return
2770 anything. */
2771 if (!result || VOID_TYPE_P (callee_type))
2772 return NULL_TREE;
2773
2774 /* If there was a return slot, then the return value is the
2775 dereferenced address of that object. */
2776 if (return_slot)
2777 {
2778 /* The front end shouldn't have used both return_slot and
2779 a modify expression. */
2780 gcc_assert (!modify_dest);
2781 if (DECL_BY_REFERENCE (result))
2782 {
2783 tree return_slot_addr = build_fold_addr_expr (return_slot);
2784 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2785
2786 /* We are going to construct *&return_slot and we can't do that
2787 for variables believed to be not addressable.
2788
2789 FIXME: This check possibly can match, because values returned
2790 via return slot optimization are not believed to have address
2791 taken by alias analysis. */
2792 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2793 var = return_slot_addr;
2794 }
2795 else
2796 {
2797 var = return_slot;
2798 gcc_assert (TREE_CODE (var) != SSA_NAME);
2799 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2800 }
2801 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2802 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2803 && !DECL_GIMPLE_REG_P (result)
2804 && DECL_P (var))
2805 DECL_GIMPLE_REG_P (var) = 0;
2806 use = NULL;
2807 goto done;
2808 }
2809
2810 /* All types requiring non-trivial constructors should have been handled. */
2811 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2812
2813 /* Attempt to avoid creating a new temporary variable. */
2814 if (modify_dest
2815 && TREE_CODE (modify_dest) != SSA_NAME)
2816 {
2817 bool use_it = false;
2818
2819 /* We can't use MODIFY_DEST if there's type promotion involved. */
2820 if (!useless_type_conversion_p (callee_type, caller_type))
2821 use_it = false;
2822
2823 /* ??? If we're assigning to a variable sized type, then we must
2824 reuse the destination variable, because we've no good way to
2825 create variable sized temporaries at this point. */
2826 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2827 use_it = true;
2828
2829 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2830 reuse it as the result of the call directly. Don't do this if
2831 it would promote MODIFY_DEST to addressable. */
2832 else if (TREE_ADDRESSABLE (result))
2833 use_it = false;
2834 else
2835 {
2836 tree base_m = get_base_address (modify_dest);
2837
2838 /* If the base isn't a decl, then it's a pointer, and we don't
2839 know where that's going to go. */
2840 if (!DECL_P (base_m))
2841 use_it = false;
2842 else if (is_global_var (base_m))
2843 use_it = false;
2844 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2845 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2846 && !DECL_GIMPLE_REG_P (result)
2847 && DECL_GIMPLE_REG_P (base_m))
2848 use_it = false;
2849 else if (!TREE_ADDRESSABLE (base_m))
2850 use_it = true;
2851 }
2852
2853 if (use_it)
2854 {
2855 var = modify_dest;
2856 use = NULL;
2857 goto done;
2858 }
2859 }
2860
2861 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2862
2863 var = copy_result_decl_to_var (result, id);
2864 if (gimple_in_ssa_p (cfun))
2865 add_referenced_var (var);
2866
2867 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2868
2869 /* Do not have the rest of GCC warn about this variable as it should
2870 not be visible to the user. */
2871 TREE_NO_WARNING (var) = 1;
2872
2873 declare_inline_vars (id->block, var);
2874
2875 /* Build the use expr. If the return type of the function was
2876 promoted, convert it back to the expected type. */
2877 use = var;
2878 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2879 use = fold_convert (caller_type, var);
2880
2881 STRIP_USELESS_TYPE_CONVERSION (use);
2882
2883 if (DECL_BY_REFERENCE (result))
2884 {
2885 TREE_ADDRESSABLE (var) = 1;
2886 var = build_fold_addr_expr (var);
2887 }
2888
2889 done:
2890 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2891 way, when the RESULT_DECL is encountered, it will be
2892 automatically replaced by the VAR_DECL.
2893
2894 When returning by reference, ensure that RESULT_DECL remaps to
2895 gimple_val. */
2896 if (DECL_BY_REFERENCE (result)
2897 && !is_gimple_val (var))
2898 {
2899 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2900 if (gimple_in_ssa_p (id->src_cfun))
2901 add_referenced_var (temp);
2902 insert_decl_map (id, result, temp);
2903 /* When RESULT_DECL is in SSA form, we need to use it's default_def
2904 SSA_NAME. */
2905 if (gimple_in_ssa_p (id->src_cfun) && gimple_default_def (id->src_cfun, result))
2906 temp = remap_ssa_name (gimple_default_def (id->src_cfun, result), id);
2907 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2908 }
2909 else
2910 insert_decl_map (id, result, var);
2911
2912 /* Remember this so we can ignore it in remap_decls. */
2913 id->retvar = var;
2914
2915 return use;
2916 }
2917
2918 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2919 to a local label. */
2920
2921 static tree
2922 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2923 {
2924 tree node = *nodep;
2925 tree fn = (tree) fnp;
2926
2927 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2928 return node;
2929
2930 if (TYPE_P (node))
2931 *walk_subtrees = 0;
2932
2933 return NULL_TREE;
2934 }
2935
2936 /* Determine if the function can be copied. If so return NULL. If
2937 not return a string describng the reason for failure. */
2938
2939 static const char *
2940 copy_forbidden (struct function *fun, tree fndecl)
2941 {
2942 const char *reason = fun->cannot_be_copied_reason;
2943 tree decl;
2944 unsigned ix;
2945
2946 /* Only examine the function once. */
2947 if (fun->cannot_be_copied_set)
2948 return reason;
2949
2950 /* We cannot copy a function that receives a non-local goto
2951 because we cannot remap the destination label used in the
2952 function that is performing the non-local goto. */
2953 /* ??? Actually, this should be possible, if we work at it.
2954 No doubt there's just a handful of places that simply
2955 assume it doesn't happen and don't substitute properly. */
2956 if (fun->has_nonlocal_label)
2957 {
2958 reason = G_("function %q+F can never be copied "
2959 "because it receives a non-local goto");
2960 goto fail;
2961 }
2962
2963 FOR_EACH_LOCAL_DECL (fun, ix, decl)
2964 if (TREE_CODE (decl) == VAR_DECL
2965 && TREE_STATIC (decl)
2966 && !DECL_EXTERNAL (decl)
2967 && DECL_INITIAL (decl)
2968 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2969 has_label_address_in_static_1,
2970 fndecl))
2971 {
2972 reason = G_("function %q+F can never be copied because it saves "
2973 "address of local label in a static variable");
2974 goto fail;
2975 }
2976
2977 fail:
2978 fun->cannot_be_copied_reason = reason;
2979 fun->cannot_be_copied_set = true;
2980 return reason;
2981 }
2982
2983
2984 static const char *inline_forbidden_reason;
2985
2986 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2987 iff a function can not be inlined. Also sets the reason why. */
2988
2989 static tree
2990 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2991 struct walk_stmt_info *wip)
2992 {
2993 tree fn = (tree) wip->info;
2994 tree t;
2995 gimple stmt = gsi_stmt (*gsi);
2996
2997 switch (gimple_code (stmt))
2998 {
2999 case GIMPLE_CALL:
3000 /* Refuse to inline alloca call unless user explicitly forced so as
3001 this may change program's memory overhead drastically when the
3002 function using alloca is called in loop. In GCC present in
3003 SPEC2000 inlining into schedule_block cause it to require 2GB of
3004 RAM instead of 256MB. Don't do so for alloca calls emitted for
3005 VLA objects as those can't cause unbounded growth (they're always
3006 wrapped inside stack_save/stack_restore regions. */
3007 if (gimple_alloca_call_p (stmt)
3008 && !gimple_call_alloca_for_var_p (stmt)
3009 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3010 {
3011 inline_forbidden_reason
3012 = G_("function %q+F can never be inlined because it uses "
3013 "alloca (override using the always_inline attribute)");
3014 *handled_ops_p = true;
3015 return fn;
3016 }
3017
3018 t = gimple_call_fndecl (stmt);
3019 if (t == NULL_TREE)
3020 break;
3021
3022 /* We cannot inline functions that call setjmp. */
3023 if (setjmp_call_p (t))
3024 {
3025 inline_forbidden_reason
3026 = G_("function %q+F can never be inlined because it uses setjmp");
3027 *handled_ops_p = true;
3028 return t;
3029 }
3030
3031 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3032 switch (DECL_FUNCTION_CODE (t))
3033 {
3034 /* We cannot inline functions that take a variable number of
3035 arguments. */
3036 case BUILT_IN_VA_START:
3037 case BUILT_IN_NEXT_ARG:
3038 case BUILT_IN_VA_END:
3039 inline_forbidden_reason
3040 = G_("function %q+F can never be inlined because it "
3041 "uses variable argument lists");
3042 *handled_ops_p = true;
3043 return t;
3044
3045 case BUILT_IN_LONGJMP:
3046 /* We can't inline functions that call __builtin_longjmp at
3047 all. The non-local goto machinery really requires the
3048 destination be in a different function. If we allow the
3049 function calling __builtin_longjmp to be inlined into the
3050 function calling __builtin_setjmp, Things will Go Awry. */
3051 inline_forbidden_reason
3052 = G_("function %q+F can never be inlined because "
3053 "it uses setjmp-longjmp exception handling");
3054 *handled_ops_p = true;
3055 return t;
3056
3057 case BUILT_IN_NONLOCAL_GOTO:
3058 /* Similarly. */
3059 inline_forbidden_reason
3060 = G_("function %q+F can never be inlined because "
3061 "it uses non-local goto");
3062 *handled_ops_p = true;
3063 return t;
3064
3065 case BUILT_IN_RETURN:
3066 case BUILT_IN_APPLY_ARGS:
3067 /* If a __builtin_apply_args caller would be inlined,
3068 it would be saving arguments of the function it has
3069 been inlined into. Similarly __builtin_return would
3070 return from the function the inline has been inlined into. */
3071 inline_forbidden_reason
3072 = G_("function %q+F can never be inlined because "
3073 "it uses __builtin_return or __builtin_apply_args");
3074 *handled_ops_p = true;
3075 return t;
3076
3077 default:
3078 break;
3079 }
3080 break;
3081
3082 case GIMPLE_GOTO:
3083 t = gimple_goto_dest (stmt);
3084
3085 /* We will not inline a function which uses computed goto. The
3086 addresses of its local labels, which may be tucked into
3087 global storage, are of course not constant across
3088 instantiations, which causes unexpected behavior. */
3089 if (TREE_CODE (t) != LABEL_DECL)
3090 {
3091 inline_forbidden_reason
3092 = G_("function %q+F can never be inlined "
3093 "because it contains a computed goto");
3094 *handled_ops_p = true;
3095 return t;
3096 }
3097 break;
3098
3099 default:
3100 break;
3101 }
3102
3103 *handled_ops_p = false;
3104 return NULL_TREE;
3105 }
3106
3107 /* Return true if FNDECL is a function that cannot be inlined into
3108 another one. */
3109
3110 static bool
3111 inline_forbidden_p (tree fndecl)
3112 {
3113 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3114 struct walk_stmt_info wi;
3115 struct pointer_set_t *visited_nodes;
3116 basic_block bb;
3117 bool forbidden_p = false;
3118
3119 /* First check for shared reasons not to copy the code. */
3120 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3121 if (inline_forbidden_reason != NULL)
3122 return true;
3123
3124 /* Next, walk the statements of the function looking for
3125 constraucts we can't handle, or are non-optimal for inlining. */
3126 visited_nodes = pointer_set_create ();
3127 memset (&wi, 0, sizeof (wi));
3128 wi.info = (void *) fndecl;
3129 wi.pset = visited_nodes;
3130
3131 FOR_EACH_BB_FN (bb, fun)
3132 {
3133 gimple ret;
3134 gimple_seq seq = bb_seq (bb);
3135 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3136 forbidden_p = (ret != NULL);
3137 if (forbidden_p)
3138 break;
3139 }
3140
3141 pointer_set_destroy (visited_nodes);
3142 return forbidden_p;
3143 }
3144
3145 /* Returns nonzero if FN is a function that does not have any
3146 fundamental inline blocking properties. */
3147
3148 bool
3149 tree_inlinable_function_p (tree fn)
3150 {
3151 bool inlinable = true;
3152 bool do_warning;
3153 tree always_inline;
3154
3155 /* If we've already decided this function shouldn't be inlined,
3156 there's no need to check again. */
3157 if (DECL_UNINLINABLE (fn))
3158 return false;
3159
3160 /* We only warn for functions declared `inline' by the user. */
3161 do_warning = (warn_inline
3162 && DECL_DECLARED_INLINE_P (fn)
3163 && !DECL_NO_INLINE_WARNING_P (fn)
3164 && !DECL_IN_SYSTEM_HEADER (fn));
3165
3166 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3167
3168 if (flag_no_inline
3169 && always_inline == NULL)
3170 {
3171 if (do_warning)
3172 warning (OPT_Winline, "function %q+F can never be inlined because it "
3173 "is suppressed using -fno-inline", fn);
3174 inlinable = false;
3175 }
3176
3177 else if (!function_attribute_inlinable_p (fn))
3178 {
3179 if (do_warning)
3180 warning (OPT_Winline, "function %q+F can never be inlined because it "
3181 "uses attributes conflicting with inlining", fn);
3182 inlinable = false;
3183 }
3184
3185 else if (inline_forbidden_p (fn))
3186 {
3187 /* See if we should warn about uninlinable functions. Previously,
3188 some of these warnings would be issued while trying to expand
3189 the function inline, but that would cause multiple warnings
3190 about functions that would for example call alloca. But since
3191 this a property of the function, just one warning is enough.
3192 As a bonus we can now give more details about the reason why a
3193 function is not inlinable. */
3194 if (always_inline)
3195 sorry (inline_forbidden_reason, fn);
3196 else if (do_warning)
3197 warning (OPT_Winline, inline_forbidden_reason, fn);
3198
3199 inlinable = false;
3200 }
3201
3202 /* Squirrel away the result so that we don't have to check again. */
3203 DECL_UNINLINABLE (fn) = !inlinable;
3204
3205 return inlinable;
3206 }
3207
3208 /* Estimate the cost of a memory move. Use machine dependent
3209 word size and take possible memcpy call into account. */
3210
3211 int
3212 estimate_move_cost (tree type)
3213 {
3214 HOST_WIDE_INT size;
3215
3216 gcc_assert (!VOID_TYPE_P (type));
3217
3218 if (TREE_CODE (type) == VECTOR_TYPE)
3219 {
3220 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3221 enum machine_mode simd
3222 = targetm.vectorize.preferred_simd_mode (inner);
3223 int simd_mode_size = GET_MODE_SIZE (simd);
3224 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3225 / simd_mode_size);
3226 }
3227
3228 size = int_size_in_bytes (type);
3229
3230 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3231 /* Cost of a memcpy call, 3 arguments and the call. */
3232 return 4;
3233 else
3234 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3235 }
3236
3237 /* Returns cost of operation CODE, according to WEIGHTS */
3238
3239 static int
3240 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3241 tree op1 ATTRIBUTE_UNUSED, tree op2)
3242 {
3243 switch (code)
3244 {
3245 /* These are "free" conversions, or their presumed cost
3246 is folded into other operations. */
3247 case RANGE_EXPR:
3248 CASE_CONVERT:
3249 case COMPLEX_EXPR:
3250 case PAREN_EXPR:
3251 case VIEW_CONVERT_EXPR:
3252 return 0;
3253
3254 /* Assign cost of 1 to usual operations.
3255 ??? We may consider mapping RTL costs to this. */
3256 case COND_EXPR:
3257 case VEC_COND_EXPR:
3258
3259 case PLUS_EXPR:
3260 case POINTER_PLUS_EXPR:
3261 case MINUS_EXPR:
3262 case MULT_EXPR:
3263 case FMA_EXPR:
3264
3265 case ADDR_SPACE_CONVERT_EXPR:
3266 case FIXED_CONVERT_EXPR:
3267 case FIX_TRUNC_EXPR:
3268
3269 case NEGATE_EXPR:
3270 case FLOAT_EXPR:
3271 case MIN_EXPR:
3272 case MAX_EXPR:
3273 case ABS_EXPR:
3274
3275 case LSHIFT_EXPR:
3276 case RSHIFT_EXPR:
3277 case LROTATE_EXPR:
3278 case RROTATE_EXPR:
3279 case VEC_LSHIFT_EXPR:
3280 case VEC_RSHIFT_EXPR:
3281
3282 case BIT_IOR_EXPR:
3283 case BIT_XOR_EXPR:
3284 case BIT_AND_EXPR:
3285 case BIT_NOT_EXPR:
3286
3287 case TRUTH_ANDIF_EXPR:
3288 case TRUTH_ORIF_EXPR:
3289 case TRUTH_AND_EXPR:
3290 case TRUTH_OR_EXPR:
3291 case TRUTH_XOR_EXPR:
3292 case TRUTH_NOT_EXPR:
3293
3294 case LT_EXPR:
3295 case LE_EXPR:
3296 case GT_EXPR:
3297 case GE_EXPR:
3298 case EQ_EXPR:
3299 case NE_EXPR:
3300 case ORDERED_EXPR:
3301 case UNORDERED_EXPR:
3302
3303 case UNLT_EXPR:
3304 case UNLE_EXPR:
3305 case UNGT_EXPR:
3306 case UNGE_EXPR:
3307 case UNEQ_EXPR:
3308 case LTGT_EXPR:
3309
3310 case CONJ_EXPR:
3311
3312 case PREDECREMENT_EXPR:
3313 case PREINCREMENT_EXPR:
3314 case POSTDECREMENT_EXPR:
3315 case POSTINCREMENT_EXPR:
3316
3317 case REALIGN_LOAD_EXPR:
3318
3319 case REDUC_MAX_EXPR:
3320 case REDUC_MIN_EXPR:
3321 case REDUC_PLUS_EXPR:
3322 case WIDEN_SUM_EXPR:
3323 case WIDEN_MULT_EXPR:
3324 case DOT_PROD_EXPR:
3325 case WIDEN_MULT_PLUS_EXPR:
3326 case WIDEN_MULT_MINUS_EXPR:
3327
3328 case VEC_WIDEN_MULT_HI_EXPR:
3329 case VEC_WIDEN_MULT_LO_EXPR:
3330 case VEC_UNPACK_HI_EXPR:
3331 case VEC_UNPACK_LO_EXPR:
3332 case VEC_UNPACK_FLOAT_HI_EXPR:
3333 case VEC_UNPACK_FLOAT_LO_EXPR:
3334 case VEC_PACK_TRUNC_EXPR:
3335 case VEC_PACK_SAT_EXPR:
3336 case VEC_PACK_FIX_TRUNC_EXPR:
3337 case VEC_EXTRACT_EVEN_EXPR:
3338 case VEC_EXTRACT_ODD_EXPR:
3339 case VEC_INTERLEAVE_HIGH_EXPR:
3340 case VEC_INTERLEAVE_LOW_EXPR:
3341
3342 return 1;
3343
3344 /* Few special cases of expensive operations. This is useful
3345 to avoid inlining on functions having too many of these. */
3346 case TRUNC_DIV_EXPR:
3347 case CEIL_DIV_EXPR:
3348 case FLOOR_DIV_EXPR:
3349 case ROUND_DIV_EXPR:
3350 case EXACT_DIV_EXPR:
3351 case TRUNC_MOD_EXPR:
3352 case CEIL_MOD_EXPR:
3353 case FLOOR_MOD_EXPR:
3354 case ROUND_MOD_EXPR:
3355 case RDIV_EXPR:
3356 if (TREE_CODE (op2) != INTEGER_CST)
3357 return weights->div_mod_cost;
3358 return 1;
3359
3360 default:
3361 /* We expect a copy assignment with no operator. */
3362 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3363 return 0;
3364 }
3365 }
3366
3367
3368 /* Estimate number of instructions that will be created by expanding
3369 the statements in the statement sequence STMTS.
3370 WEIGHTS contains weights attributed to various constructs. */
3371
3372 static
3373 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3374 {
3375 int cost;
3376 gimple_stmt_iterator gsi;
3377
3378 cost = 0;
3379 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3380 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3381
3382 return cost;
3383 }
3384
3385
3386 /* Estimate number of instructions that will be created by expanding STMT.
3387 WEIGHTS contains weights attributed to various constructs. */
3388
3389 int
3390 estimate_num_insns (gimple stmt, eni_weights *weights)
3391 {
3392 unsigned cost, i;
3393 enum gimple_code code = gimple_code (stmt);
3394 tree lhs;
3395 tree rhs;
3396
3397 switch (code)
3398 {
3399 case GIMPLE_ASSIGN:
3400 /* Try to estimate the cost of assignments. We have three cases to
3401 deal with:
3402 1) Simple assignments to registers;
3403 2) Stores to things that must live in memory. This includes
3404 "normal" stores to scalars, but also assignments of large
3405 structures, or constructors of big arrays;
3406
3407 Let us look at the first two cases, assuming we have "a = b + C":
3408 <GIMPLE_ASSIGN <var_decl "a">
3409 <plus_expr <var_decl "b"> <constant C>>
3410 If "a" is a GIMPLE register, the assignment to it is free on almost
3411 any target, because "a" usually ends up in a real register. Hence
3412 the only cost of this expression comes from the PLUS_EXPR, and we
3413 can ignore the GIMPLE_ASSIGN.
3414 If "a" is not a GIMPLE register, the assignment to "a" will most
3415 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3416 of moving something into "a", which we compute using the function
3417 estimate_move_cost. */
3418 lhs = gimple_assign_lhs (stmt);
3419 rhs = gimple_assign_rhs1 (stmt);
3420
3421 if (is_gimple_reg (lhs))
3422 cost = 0;
3423 else
3424 cost = estimate_move_cost (TREE_TYPE (lhs));
3425
3426 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3427 cost += estimate_move_cost (TREE_TYPE (rhs));
3428
3429 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3430 gimple_assign_rhs1 (stmt),
3431 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3432 == GIMPLE_BINARY_RHS
3433 ? gimple_assign_rhs2 (stmt) : NULL);
3434 break;
3435
3436 case GIMPLE_COND:
3437 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3438 gimple_op (stmt, 0),
3439 gimple_op (stmt, 1));
3440 break;
3441
3442 case GIMPLE_SWITCH:
3443 /* Take into account cost of the switch + guess 2 conditional jumps for
3444 each case label.
3445
3446 TODO: once the switch expansion logic is sufficiently separated, we can
3447 do better job on estimating cost of the switch. */
3448 if (weights->time_based)
3449 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3450 else
3451 cost = gimple_switch_num_labels (stmt) * 2;
3452 break;
3453
3454 case GIMPLE_CALL:
3455 {
3456 tree decl = gimple_call_fndecl (stmt);
3457 struct cgraph_node *node;
3458
3459 /* Do not special case builtins where we see the body.
3460 This just confuse inliner. */
3461 if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
3462 ;
3463 /* For buitins that are likely expanded to nothing or
3464 inlined do not account operand costs. */
3465 else if (is_simple_builtin (decl))
3466 return 0;
3467 else if (is_inexpensive_builtin (decl))
3468 return weights->target_builtin_call_cost;
3469 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3470 {
3471 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3472 specialize the cheap expansion we do here.
3473 ??? This asks for a more general solution. */
3474 switch (DECL_FUNCTION_CODE (decl))
3475 {
3476 case BUILT_IN_POW:
3477 case BUILT_IN_POWF:
3478 case BUILT_IN_POWL:
3479 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3480 && REAL_VALUES_EQUAL
3481 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3482 return estimate_operator_cost (MULT_EXPR, weights,
3483 gimple_call_arg (stmt, 0),
3484 gimple_call_arg (stmt, 0));
3485 break;
3486
3487 default:
3488 break;
3489 }
3490 }
3491
3492 cost = weights->call_cost;
3493 if (gimple_call_lhs (stmt))
3494 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3495 for (i = 0; i < gimple_call_num_args (stmt); i++)
3496 {
3497 tree arg = gimple_call_arg (stmt, i);
3498 cost += estimate_move_cost (TREE_TYPE (arg));
3499 }
3500 break;
3501 }
3502
3503 case GIMPLE_RETURN:
3504 return weights->return_cost;
3505
3506 case GIMPLE_GOTO:
3507 case GIMPLE_LABEL:
3508 case GIMPLE_NOP:
3509 case GIMPLE_PHI:
3510 case GIMPLE_PREDICT:
3511 case GIMPLE_DEBUG:
3512 return 0;
3513
3514 case GIMPLE_ASM:
3515 return asm_str_count (gimple_asm_string (stmt));
3516
3517 case GIMPLE_RESX:
3518 /* This is either going to be an external function call with one
3519 argument, or two register copy statements plus a goto. */
3520 return 2;
3521
3522 case GIMPLE_EH_DISPATCH:
3523 /* ??? This is going to turn into a switch statement. Ideally
3524 we'd have a look at the eh region and estimate the number of
3525 edges involved. */
3526 return 10;
3527
3528 case GIMPLE_BIND:
3529 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3530
3531 case GIMPLE_EH_FILTER:
3532 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3533
3534 case GIMPLE_CATCH:
3535 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3536
3537 case GIMPLE_TRY:
3538 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3539 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3540
3541 /* OpenMP directives are generally very expensive. */
3542
3543 case GIMPLE_OMP_RETURN:
3544 case GIMPLE_OMP_SECTIONS_SWITCH:
3545 case GIMPLE_OMP_ATOMIC_STORE:
3546 case GIMPLE_OMP_CONTINUE:
3547 /* ...except these, which are cheap. */
3548 return 0;
3549
3550 case GIMPLE_OMP_ATOMIC_LOAD:
3551 return weights->omp_cost;
3552
3553 case GIMPLE_OMP_FOR:
3554 return (weights->omp_cost
3555 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3556 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3557
3558 case GIMPLE_OMP_PARALLEL:
3559 case GIMPLE_OMP_TASK:
3560 case GIMPLE_OMP_CRITICAL:
3561 case GIMPLE_OMP_MASTER:
3562 case GIMPLE_OMP_ORDERED:
3563 case GIMPLE_OMP_SECTION:
3564 case GIMPLE_OMP_SECTIONS:
3565 case GIMPLE_OMP_SINGLE:
3566 return (weights->omp_cost
3567 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3568
3569 default:
3570 gcc_unreachable ();
3571 }
3572
3573 return cost;
3574 }
3575
3576 /* Estimate number of instructions that will be created by expanding
3577 function FNDECL. WEIGHTS contains weights attributed to various
3578 constructs. */
3579
3580 int
3581 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3582 {
3583 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3584 gimple_stmt_iterator bsi;
3585 basic_block bb;
3586 int n = 0;
3587
3588 gcc_assert (my_function && my_function->cfg);
3589 FOR_EACH_BB_FN (bb, my_function)
3590 {
3591 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3592 n += estimate_num_insns (gsi_stmt (bsi), weights);
3593 }
3594
3595 return n;
3596 }
3597
3598
3599 /* Initializes weights used by estimate_num_insns. */
3600
3601 void
3602 init_inline_once (void)
3603 {
3604 eni_size_weights.call_cost = 1;
3605 eni_size_weights.target_builtin_call_cost = 1;
3606 eni_size_weights.div_mod_cost = 1;
3607 eni_size_weights.omp_cost = 40;
3608 eni_size_weights.time_based = false;
3609 eni_size_weights.return_cost = 1;
3610
3611 /* Estimating time for call is difficult, since we have no idea what the
3612 called function does. In the current uses of eni_time_weights,
3613 underestimating the cost does less harm than overestimating it, so
3614 we choose a rather small value here. */
3615 eni_time_weights.call_cost = 10;
3616 eni_time_weights.target_builtin_call_cost = 1;
3617 eni_time_weights.div_mod_cost = 10;
3618 eni_time_weights.omp_cost = 40;
3619 eni_time_weights.time_based = true;
3620 eni_time_weights.return_cost = 2;
3621 }
3622
3623 /* Estimate the number of instructions in a gimple_seq. */
3624
3625 int
3626 count_insns_seq (gimple_seq seq, eni_weights *weights)
3627 {
3628 gimple_stmt_iterator gsi;
3629 int n = 0;
3630 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3631 n += estimate_num_insns (gsi_stmt (gsi), weights);
3632
3633 return n;
3634 }
3635
3636
3637 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3638
3639 static void
3640 prepend_lexical_block (tree current_block, tree new_block)
3641 {
3642 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3643 BLOCK_SUBBLOCKS (current_block) = new_block;
3644 BLOCK_SUPERCONTEXT (new_block) = current_block;
3645 }
3646
3647 /* Add local variables from CALLEE to CALLER. */
3648
3649 static inline void
3650 add_local_variables (struct function *callee, struct function *caller,
3651 copy_body_data *id, bool check_var_ann)
3652 {
3653 tree var;
3654 unsigned ix;
3655
3656 FOR_EACH_LOCAL_DECL (callee, ix, var)
3657 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3658 {
3659 if (!check_var_ann
3660 || (var_ann (var) && add_referenced_var (var)))
3661 add_local_decl (caller, var);
3662 }
3663 else if (!can_be_nonlocal (var, id))
3664 {
3665 tree new_var = remap_decl (var, id);
3666
3667 /* Remap debug-expressions. */
3668 if (TREE_CODE (new_var) == VAR_DECL
3669 && DECL_DEBUG_EXPR_IS_FROM (new_var)
3670 && new_var != var)
3671 {
3672 tree tem = DECL_DEBUG_EXPR (var);
3673 bool old_regimplify = id->regimplify;
3674 id->remapping_type_depth++;
3675 walk_tree (&tem, copy_tree_body_r, id, NULL);
3676 id->remapping_type_depth--;
3677 id->regimplify = old_regimplify;
3678 SET_DECL_DEBUG_EXPR (new_var, tem);
3679 }
3680 add_local_decl (caller, new_var);
3681 }
3682 }
3683
3684 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3685
3686 static bool
3687 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3688 {
3689 tree use_retvar;
3690 tree fn;
3691 struct pointer_map_t *st, *dst;
3692 tree return_slot;
3693 tree modify_dest;
3694 location_t saved_location;
3695 struct cgraph_edge *cg_edge;
3696 cgraph_inline_failed_t reason;
3697 basic_block return_block;
3698 edge e;
3699 gimple_stmt_iterator gsi, stmt_gsi;
3700 bool successfully_inlined = FALSE;
3701 bool purge_dead_abnormal_edges;
3702
3703 /* Set input_location here so we get the right instantiation context
3704 if we call instantiate_decl from inlinable_function_p. */
3705 saved_location = input_location;
3706 if (gimple_has_location (stmt))
3707 input_location = gimple_location (stmt);
3708
3709 /* From here on, we're only interested in CALL_EXPRs. */
3710 if (gimple_code (stmt) != GIMPLE_CALL)
3711 goto egress;
3712
3713 cg_edge = cgraph_edge (id->dst_node, stmt);
3714 gcc_checking_assert (cg_edge);
3715 /* First, see if we can figure out what function is being called.
3716 If we cannot, then there is no hope of inlining the function. */
3717 if (cg_edge->indirect_unknown_callee)
3718 goto egress;
3719 fn = cg_edge->callee->decl;
3720 gcc_checking_assert (fn);
3721
3722 /* If FN is a declaration of a function in a nested scope that was
3723 globally declared inline, we don't set its DECL_INITIAL.
3724 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3725 C++ front-end uses it for cdtors to refer to their internal
3726 declarations, that are not real functions. Fortunately those
3727 don't have trees to be saved, so we can tell by checking their
3728 gimple_body. */
3729 if (!DECL_INITIAL (fn)
3730 && DECL_ABSTRACT_ORIGIN (fn)
3731 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3732 fn = DECL_ABSTRACT_ORIGIN (fn);
3733
3734 /* Don't try to inline functions that are not well-suited to inlining. */
3735 if (!cgraph_inline_p (cg_edge, &reason))
3736 {
3737 /* If this call was originally indirect, we do not want to emit any
3738 inlining related warnings or sorry messages because there are no
3739 guarantees regarding those. */
3740 if (cg_edge->indirect_inlining_edge)
3741 goto egress;
3742
3743 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3744 /* Avoid warnings during early inline pass. */
3745 && cgraph_global_info_ready)
3746 {
3747 sorry ("inlining failed in call to %q+F: %s", fn,
3748 _(cgraph_inline_failed_string (reason)));
3749 sorry ("called from here");
3750 }
3751 else if (warn_inline
3752 && DECL_DECLARED_INLINE_P (fn)
3753 && !DECL_NO_INLINE_WARNING_P (fn)
3754 && !DECL_IN_SYSTEM_HEADER (fn)
3755 && reason != CIF_UNSPECIFIED
3756 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3757 /* Do not warn about not inlined recursive calls. */
3758 && !cgraph_edge_recursive_p (cg_edge)
3759 /* Avoid warnings during early inline pass. */
3760 && cgraph_global_info_ready)
3761 {
3762 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3763 fn, _(cgraph_inline_failed_string (reason)));
3764 warning (OPT_Winline, "called from here");
3765 }
3766 goto egress;
3767 }
3768 fn = cg_edge->callee->decl;
3769
3770 #ifdef ENABLE_CHECKING
3771 if (cg_edge->callee->decl != id->dst_node->decl)
3772 verify_cgraph_node (cg_edge->callee);
3773 #endif
3774
3775 /* We will be inlining this callee. */
3776 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3777
3778 /* Update the callers EH personality. */
3779 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3780 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3781 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3782
3783 /* Split the block holding the GIMPLE_CALL. */
3784 e = split_block (bb, stmt);
3785 bb = e->src;
3786 return_block = e->dest;
3787 remove_edge (e);
3788
3789 /* split_block splits after the statement; work around this by
3790 moving the call into the second block manually. Not pretty,
3791 but seems easier than doing the CFG manipulation by hand
3792 when the GIMPLE_CALL is in the last statement of BB. */
3793 stmt_gsi = gsi_last_bb (bb);
3794 gsi_remove (&stmt_gsi, false);
3795
3796 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3797 been the source of abnormal edges. In this case, schedule
3798 the removal of dead abnormal edges. */
3799 gsi = gsi_start_bb (return_block);
3800 if (gsi_end_p (gsi))
3801 {
3802 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3803 purge_dead_abnormal_edges = true;
3804 }
3805 else
3806 {
3807 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3808 purge_dead_abnormal_edges = false;
3809 }
3810
3811 stmt_gsi = gsi_start_bb (return_block);
3812
3813 /* Build a block containing code to initialize the arguments, the
3814 actual inline expansion of the body, and a label for the return
3815 statements within the function to jump to. The type of the
3816 statement expression is the return type of the function call. */
3817 id->block = make_node (BLOCK);
3818 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3819 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3820 prepend_lexical_block (gimple_block (stmt), id->block);
3821
3822 /* Local declarations will be replaced by their equivalents in this
3823 map. */
3824 st = id->decl_map;
3825 id->decl_map = pointer_map_create ();
3826 dst = id->debug_map;
3827 id->debug_map = NULL;
3828
3829 /* Record the function we are about to inline. */
3830 id->src_fn = fn;
3831 id->src_node = cg_edge->callee;
3832 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3833 id->gimple_call = stmt;
3834
3835 gcc_assert (!id->src_cfun->after_inlining);
3836
3837 id->entry_bb = bb;
3838 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3839 {
3840 gimple_stmt_iterator si = gsi_last_bb (bb);
3841 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3842 NOT_TAKEN),
3843 GSI_NEW_STMT);
3844 }
3845 initialize_inlined_parameters (id, stmt, fn, bb);
3846
3847 if (DECL_INITIAL (fn))
3848 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3849
3850 /* Return statements in the function body will be replaced by jumps
3851 to the RET_LABEL. */
3852 gcc_assert (DECL_INITIAL (fn));
3853 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3854
3855 /* Find the LHS to which the result of this call is assigned. */
3856 return_slot = NULL;
3857 if (gimple_call_lhs (stmt))
3858 {
3859 modify_dest = gimple_call_lhs (stmt);
3860
3861 /* The function which we are inlining might not return a value,
3862 in which case we should issue a warning that the function
3863 does not return a value. In that case the optimizers will
3864 see that the variable to which the value is assigned was not
3865 initialized. We do not want to issue a warning about that
3866 uninitialized variable. */
3867 if (DECL_P (modify_dest))
3868 TREE_NO_WARNING (modify_dest) = 1;
3869
3870 if (gimple_call_return_slot_opt_p (stmt))
3871 {
3872 return_slot = modify_dest;
3873 modify_dest = NULL;
3874 }
3875 }
3876 else
3877 modify_dest = NULL;
3878
3879 /* If we are inlining a call to the C++ operator new, we don't want
3880 to use type based alias analysis on the return value. Otherwise
3881 we may get confused if the compiler sees that the inlined new
3882 function returns a pointer which was just deleted. See bug
3883 33407. */
3884 if (DECL_IS_OPERATOR_NEW (fn))
3885 {
3886 return_slot = NULL;
3887 modify_dest = NULL;
3888 }
3889
3890 /* Declare the return variable for the function. */
3891 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
3892
3893 /* Add local vars in this inlined callee to caller. */
3894 add_local_variables (id->src_cfun, cfun, id, true);
3895
3896 if (dump_file && (dump_flags & TDF_DETAILS))
3897 {
3898 fprintf (dump_file, "Inlining ");
3899 print_generic_expr (dump_file, id->src_fn, 0);
3900 fprintf (dump_file, " to ");
3901 print_generic_expr (dump_file, id->dst_fn, 0);
3902 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3903 }
3904
3905 /* This is it. Duplicate the callee body. Assume callee is
3906 pre-gimplified. Note that we must not alter the caller
3907 function in any way before this point, as this CALL_EXPR may be
3908 a self-referential call; if we're calling ourselves, we need to
3909 duplicate our body before altering anything. */
3910 copy_body (id, bb->count,
3911 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3912 bb, return_block, NULL, NULL);
3913
3914 /* Reset the escaped solution. */
3915 if (cfun->gimple_df)
3916 pt_solution_reset (&cfun->gimple_df->escaped);
3917
3918 /* Clean up. */
3919 if (id->debug_map)
3920 {
3921 pointer_map_destroy (id->debug_map);
3922 id->debug_map = dst;
3923 }
3924 pointer_map_destroy (id->decl_map);
3925 id->decl_map = st;
3926
3927 /* Unlink the calls virtual operands before replacing it. */
3928 unlink_stmt_vdef (stmt);
3929
3930 /* If the inlined function returns a result that we care about,
3931 substitute the GIMPLE_CALL with an assignment of the return
3932 variable to the LHS of the call. That is, if STMT was
3933 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3934 if (use_retvar && gimple_call_lhs (stmt))
3935 {
3936 gimple old_stmt = stmt;
3937 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3938 gsi_replace (&stmt_gsi, stmt, false);
3939 if (gimple_in_ssa_p (cfun))
3940 mark_symbols_for_renaming (stmt);
3941 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3942 }
3943 else
3944 {
3945 /* Handle the case of inlining a function with no return
3946 statement, which causes the return value to become undefined. */
3947 if (gimple_call_lhs (stmt)
3948 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3949 {
3950 tree name = gimple_call_lhs (stmt);
3951 tree var = SSA_NAME_VAR (name);
3952 tree def = gimple_default_def (cfun, var);
3953
3954 if (def)
3955 {
3956 /* If the variable is used undefined, make this name
3957 undefined via a move. */
3958 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3959 gsi_replace (&stmt_gsi, stmt, true);
3960 }
3961 else
3962 {
3963 /* Otherwise make this variable undefined. */
3964 gsi_remove (&stmt_gsi, true);
3965 set_default_def (var, name);
3966 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3967 }
3968 }
3969 else
3970 gsi_remove (&stmt_gsi, true);
3971 }
3972
3973 if (purge_dead_abnormal_edges)
3974 {
3975 gimple_purge_dead_eh_edges (return_block);
3976 gimple_purge_dead_abnormal_call_edges (return_block);
3977 }
3978
3979 /* If the value of the new expression is ignored, that's OK. We
3980 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3981 the equivalent inlined version either. */
3982 if (is_gimple_assign (stmt))
3983 {
3984 gcc_assert (gimple_assign_single_p (stmt)
3985 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3986 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3987 }
3988
3989 /* Output the inlining info for this abstract function, since it has been
3990 inlined. If we don't do this now, we can lose the information about the
3991 variables in the function when the blocks get blown away as soon as we
3992 remove the cgraph node. */
3993 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3994
3995 /* Update callgraph if needed. */
3996 cgraph_remove_node (cg_edge->callee);
3997
3998 id->block = NULL_TREE;
3999 successfully_inlined = TRUE;
4000
4001 egress:
4002 input_location = saved_location;
4003 return successfully_inlined;
4004 }
4005
4006 /* Expand call statements reachable from STMT_P.
4007 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4008 in a MODIFY_EXPR. See gimple.c:get_call_expr_in(). We can
4009 unfortunately not use that function here because we need a pointer
4010 to the CALL_EXPR, not the tree itself. */
4011
4012 static bool
4013 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4014 {
4015 gimple_stmt_iterator gsi;
4016
4017 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4018 {
4019 gimple stmt = gsi_stmt (gsi);
4020
4021 if (is_gimple_call (stmt)
4022 && expand_call_inline (bb, stmt, id))
4023 return true;
4024 }
4025
4026 return false;
4027 }
4028
4029
4030 /* Walk all basic blocks created after FIRST and try to fold every statement
4031 in the STATEMENTS pointer set. */
4032
4033 static void
4034 fold_marked_statements (int first, struct pointer_set_t *statements)
4035 {
4036 for (; first < n_basic_blocks; first++)
4037 if (BASIC_BLOCK (first))
4038 {
4039 gimple_stmt_iterator gsi;
4040
4041 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4042 !gsi_end_p (gsi);
4043 gsi_next (&gsi))
4044 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4045 {
4046 gimple old_stmt = gsi_stmt (gsi);
4047 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4048
4049 if (old_decl && DECL_BUILT_IN (old_decl))
4050 {
4051 /* Folding builtins can create multiple instructions,
4052 we need to look at all of them. */
4053 gimple_stmt_iterator i2 = gsi;
4054 gsi_prev (&i2);
4055 if (fold_stmt (&gsi))
4056 {
4057 gimple new_stmt;
4058 if (gsi_end_p (i2))
4059 i2 = gsi_start_bb (BASIC_BLOCK (first));
4060 else
4061 gsi_next (&i2);
4062 while (1)
4063 {
4064 new_stmt = gsi_stmt (i2);
4065 update_stmt (new_stmt);
4066 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4067 new_stmt);
4068
4069 if (new_stmt == gsi_stmt (gsi))
4070 {
4071 /* It is okay to check only for the very last
4072 of these statements. If it is a throwing
4073 statement nothing will change. If it isn't
4074 this can remove EH edges. If that weren't
4075 correct then because some intermediate stmts
4076 throw, but not the last one. That would mean
4077 we'd have to split the block, which we can't
4078 here and we'd loose anyway. And as builtins
4079 probably never throw, this all
4080 is mood anyway. */
4081 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4082 new_stmt))
4083 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4084 break;
4085 }
4086 gsi_next (&i2);
4087 }
4088 }
4089 }
4090 else if (fold_stmt (&gsi))
4091 {
4092 /* Re-read the statement from GSI as fold_stmt() may
4093 have changed it. */
4094 gimple new_stmt = gsi_stmt (gsi);
4095 update_stmt (new_stmt);
4096
4097 if (is_gimple_call (old_stmt)
4098 || is_gimple_call (new_stmt))
4099 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4100 new_stmt);
4101
4102 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4103 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4104 }
4105 }
4106 }
4107 }
4108
4109 /* Return true if BB has at least one abnormal outgoing edge. */
4110
4111 static inline bool
4112 has_abnormal_outgoing_edge_p (basic_block bb)
4113 {
4114 edge e;
4115 edge_iterator ei;
4116
4117 FOR_EACH_EDGE (e, ei, bb->succs)
4118 if (e->flags & EDGE_ABNORMAL)
4119 return true;
4120
4121 return false;
4122 }
4123
4124 /* Expand calls to inline functions in the body of FN. */
4125
4126 unsigned int
4127 optimize_inline_calls (tree fn)
4128 {
4129 copy_body_data id;
4130 basic_block bb;
4131 int last = n_basic_blocks;
4132 struct gimplify_ctx gctx;
4133 bool inlined_p = false;
4134
4135 /* There is no point in performing inlining if errors have already
4136 occurred -- and we might crash if we try to inline invalid
4137 code. */
4138 if (seen_error ())
4139 return 0;
4140
4141 /* Clear out ID. */
4142 memset (&id, 0, sizeof (id));
4143
4144 id.src_node = id.dst_node = cgraph_get_node (fn);
4145 gcc_assert (id.dst_node->analyzed);
4146 id.dst_fn = fn;
4147 /* Or any functions that aren't finished yet. */
4148 if (current_function_decl)
4149 id.dst_fn = current_function_decl;
4150
4151 id.copy_decl = copy_decl_maybe_to_var;
4152 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4153 id.transform_new_cfg = false;
4154 id.transform_return_to_modify = true;
4155 id.transform_lang_insert_block = NULL;
4156 id.statements_to_fold = pointer_set_create ();
4157
4158 push_gimplify_context (&gctx);
4159
4160 /* We make no attempts to keep dominance info up-to-date. */
4161 free_dominance_info (CDI_DOMINATORS);
4162 free_dominance_info (CDI_POST_DOMINATORS);
4163
4164 /* Register specific gimple functions. */
4165 gimple_register_cfg_hooks ();
4166
4167 /* Reach the trees by walking over the CFG, and note the
4168 enclosing basic-blocks in the call edges. */
4169 /* We walk the blocks going forward, because inlined function bodies
4170 will split id->current_basic_block, and the new blocks will
4171 follow it; we'll trudge through them, processing their CALL_EXPRs
4172 along the way. */
4173 FOR_EACH_BB (bb)
4174 inlined_p |= gimple_expand_calls_inline (bb, &id);
4175
4176 pop_gimplify_context (NULL);
4177
4178 #ifdef ENABLE_CHECKING
4179 {
4180 struct cgraph_edge *e;
4181
4182 verify_cgraph_node (id.dst_node);
4183
4184 /* Double check that we inlined everything we are supposed to inline. */
4185 for (e = id.dst_node->callees; e; e = e->next_callee)
4186 gcc_assert (e->inline_failed);
4187 }
4188 #endif
4189
4190 /* Fold queued statements. */
4191 fold_marked_statements (last, id.statements_to_fold);
4192 pointer_set_destroy (id.statements_to_fold);
4193
4194 gcc_assert (!id.debug_stmts);
4195
4196 /* If we didn't inline into the function there is nothing to do. */
4197 if (!inlined_p)
4198 return 0;
4199
4200 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4201 number_blocks (fn);
4202
4203 delete_unreachable_blocks_update_callgraph (&id);
4204 #ifdef ENABLE_CHECKING
4205 verify_cgraph_node (id.dst_node);
4206 #endif
4207
4208 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4209 not possible yet - the IPA passes might make various functions to not
4210 throw and they don't care to proactively update local EH info. This is
4211 done later in fixup_cfg pass that also execute the verification. */
4212 return (TODO_update_ssa
4213 | TODO_cleanup_cfg
4214 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4215 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4216 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4217 }
4218
4219 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4220
4221 tree
4222 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4223 {
4224 enum tree_code code = TREE_CODE (*tp);
4225 enum tree_code_class cl = TREE_CODE_CLASS (code);
4226
4227 /* We make copies of most nodes. */
4228 if (IS_EXPR_CODE_CLASS (cl)
4229 || code == TREE_LIST
4230 || code == TREE_VEC
4231 || code == TYPE_DECL
4232 || code == OMP_CLAUSE)
4233 {
4234 /* Because the chain gets clobbered when we make a copy, we save it
4235 here. */
4236 tree chain = NULL_TREE, new_tree;
4237
4238 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4239 chain = TREE_CHAIN (*tp);
4240
4241 /* Copy the node. */
4242 new_tree = copy_node (*tp);
4243
4244 /* Propagate mudflap marked-ness. */
4245 if (flag_mudflap && mf_marked_p (*tp))
4246 mf_mark (new_tree);
4247
4248 *tp = new_tree;
4249
4250 /* Now, restore the chain, if appropriate. That will cause
4251 walk_tree to walk into the chain as well. */
4252 if (code == PARM_DECL
4253 || code == TREE_LIST
4254 || code == OMP_CLAUSE)
4255 TREE_CHAIN (*tp) = chain;
4256
4257 /* For now, we don't update BLOCKs when we make copies. So, we
4258 have to nullify all BIND_EXPRs. */
4259 if (TREE_CODE (*tp) == BIND_EXPR)
4260 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4261 }
4262 else if (code == CONSTRUCTOR)
4263 {
4264 /* CONSTRUCTOR nodes need special handling because
4265 we need to duplicate the vector of elements. */
4266 tree new_tree;
4267
4268 new_tree = copy_node (*tp);
4269
4270 /* Propagate mudflap marked-ness. */
4271 if (flag_mudflap && mf_marked_p (*tp))
4272 mf_mark (new_tree);
4273
4274 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4275 CONSTRUCTOR_ELTS (*tp));
4276 *tp = new_tree;
4277 }
4278 else if (code == STATEMENT_LIST)
4279 /* We used to just abort on STATEMENT_LIST, but we can run into them
4280 with statement-expressions (c++/40975). */
4281 copy_statement_list (tp);
4282 else if (TREE_CODE_CLASS (code) == tcc_type)
4283 *walk_subtrees = 0;
4284 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4285 *walk_subtrees = 0;
4286 else if (TREE_CODE_CLASS (code) == tcc_constant)
4287 *walk_subtrees = 0;
4288 return NULL_TREE;
4289 }
4290
4291 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4292 information indicating to what new SAVE_EXPR this one should be mapped,
4293 use that one. Otherwise, create a new node and enter it in ST. FN is
4294 the function into which the copy will be placed. */
4295
4296 static void
4297 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4298 {
4299 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4300 tree *n;
4301 tree t;
4302
4303 /* See if we already encountered this SAVE_EXPR. */
4304 n = (tree *) pointer_map_contains (st, *tp);
4305
4306 /* If we didn't already remap this SAVE_EXPR, do so now. */
4307 if (!n)
4308 {
4309 t = copy_node (*tp);
4310
4311 /* Remember this SAVE_EXPR. */
4312 *pointer_map_insert (st, *tp) = t;
4313 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4314 *pointer_map_insert (st, t) = t;
4315 }
4316 else
4317 {
4318 /* We've already walked into this SAVE_EXPR; don't do it again. */
4319 *walk_subtrees = 0;
4320 t = *n;
4321 }
4322
4323 /* Replace this SAVE_EXPR with the copy. */
4324 *tp = t;
4325 }
4326
4327 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4328 copies the declaration and enters it in the splay_tree in DATA (which is
4329 really an `copy_body_data *'). */
4330
4331 static tree
4332 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4333 void *data)
4334 {
4335 copy_body_data *id = (copy_body_data *) data;
4336
4337 /* Don't walk into types. */
4338 if (TYPE_P (*tp))
4339 *walk_subtrees = 0;
4340
4341 else if (TREE_CODE (*tp) == LABEL_EXPR)
4342 {
4343 tree decl = TREE_OPERAND (*tp, 0);
4344
4345 /* Copy the decl and remember the copy. */
4346 insert_decl_map (id, decl, id->copy_decl (decl, id));
4347 }
4348
4349 return NULL_TREE;
4350 }
4351
4352 /* Perform any modifications to EXPR required when it is unsaved. Does
4353 not recurse into EXPR's subtrees. */
4354
4355 static void
4356 unsave_expr_1 (tree expr)
4357 {
4358 switch (TREE_CODE (expr))
4359 {
4360 case TARGET_EXPR:
4361 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4362 It's OK for this to happen if it was part of a subtree that
4363 isn't immediately expanded, such as operand 2 of another
4364 TARGET_EXPR. */
4365 if (TREE_OPERAND (expr, 1))
4366 break;
4367
4368 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4369 TREE_OPERAND (expr, 3) = NULL_TREE;
4370 break;
4371
4372 default:
4373 break;
4374 }
4375 }
4376
4377 /* Called via walk_tree when an expression is unsaved. Using the
4378 splay_tree pointed to by ST (which is really a `splay_tree'),
4379 remaps all local declarations to appropriate replacements. */
4380
4381 static tree
4382 unsave_r (tree *tp, int *walk_subtrees, void *data)
4383 {
4384 copy_body_data *id = (copy_body_data *) data;
4385 struct pointer_map_t *st = id->decl_map;
4386 tree *n;
4387
4388 /* Only a local declaration (variable or label). */
4389 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4390 || TREE_CODE (*tp) == LABEL_DECL)
4391 {
4392 /* Lookup the declaration. */
4393 n = (tree *) pointer_map_contains (st, *tp);
4394
4395 /* If it's there, remap it. */
4396 if (n)
4397 *tp = *n;
4398 }
4399
4400 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4401 gcc_unreachable ();
4402 else if (TREE_CODE (*tp) == BIND_EXPR)
4403 copy_bind_expr (tp, walk_subtrees, id);
4404 else if (TREE_CODE (*tp) == SAVE_EXPR
4405 || TREE_CODE (*tp) == TARGET_EXPR)
4406 remap_save_expr (tp, st, walk_subtrees);
4407 else
4408 {
4409 copy_tree_r (tp, walk_subtrees, NULL);
4410
4411 /* Do whatever unsaving is required. */
4412 unsave_expr_1 (*tp);
4413 }
4414
4415 /* Keep iterating. */
4416 return NULL_TREE;
4417 }
4418
4419 /* Copies everything in EXPR and replaces variables, labels
4420 and SAVE_EXPRs local to EXPR. */
4421
4422 tree
4423 unsave_expr_now (tree expr)
4424 {
4425 copy_body_data id;
4426
4427 /* There's nothing to do for NULL_TREE. */
4428 if (expr == 0)
4429 return expr;
4430
4431 /* Set up ID. */
4432 memset (&id, 0, sizeof (id));
4433 id.src_fn = current_function_decl;
4434 id.dst_fn = current_function_decl;
4435 id.decl_map = pointer_map_create ();
4436 id.debug_map = NULL;
4437
4438 id.copy_decl = copy_decl_no_change;
4439 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4440 id.transform_new_cfg = false;
4441 id.transform_return_to_modify = false;
4442 id.transform_lang_insert_block = NULL;
4443
4444 /* Walk the tree once to find local labels. */
4445 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4446
4447 /* Walk the tree again, copying, remapping, and unsaving. */
4448 walk_tree (&expr, unsave_r, &id, NULL);
4449
4450 /* Clean up. */
4451 pointer_map_destroy (id.decl_map);
4452 if (id.debug_map)
4453 pointer_map_destroy (id.debug_map);
4454
4455 return expr;
4456 }
4457
4458 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4459 label, copies the declaration and enters it in the splay_tree in DATA (which
4460 is really a 'copy_body_data *'. */
4461
4462 static tree
4463 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4464 bool *handled_ops_p ATTRIBUTE_UNUSED,
4465 struct walk_stmt_info *wi)
4466 {
4467 copy_body_data *id = (copy_body_data *) wi->info;
4468 gimple stmt = gsi_stmt (*gsip);
4469
4470 if (gimple_code (stmt) == GIMPLE_LABEL)
4471 {
4472 tree decl = gimple_label_label (stmt);
4473
4474 /* Copy the decl and remember the copy. */
4475 insert_decl_map (id, decl, id->copy_decl (decl, id));
4476 }
4477
4478 return NULL_TREE;
4479 }
4480
4481
4482 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4483 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4484 remaps all local declarations to appropriate replacements in gimple
4485 operands. */
4486
4487 static tree
4488 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4489 {
4490 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4491 copy_body_data *id = (copy_body_data *) wi->info;
4492 struct pointer_map_t *st = id->decl_map;
4493 tree *n;
4494 tree expr = *tp;
4495
4496 /* Only a local declaration (variable or label). */
4497 if ((TREE_CODE (expr) == VAR_DECL
4498 && !TREE_STATIC (expr))
4499 || TREE_CODE (expr) == LABEL_DECL)
4500 {
4501 /* Lookup the declaration. */
4502 n = (tree *) pointer_map_contains (st, expr);
4503
4504 /* If it's there, remap it. */
4505 if (n)
4506 *tp = *n;
4507 *walk_subtrees = 0;
4508 }
4509 else if (TREE_CODE (expr) == STATEMENT_LIST
4510 || TREE_CODE (expr) == BIND_EXPR
4511 || TREE_CODE (expr) == SAVE_EXPR)
4512 gcc_unreachable ();
4513 else if (TREE_CODE (expr) == TARGET_EXPR)
4514 {
4515 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4516 It's OK for this to happen if it was part of a subtree that
4517 isn't immediately expanded, such as operand 2 of another
4518 TARGET_EXPR. */
4519 if (!TREE_OPERAND (expr, 1))
4520 {
4521 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4522 TREE_OPERAND (expr, 3) = NULL_TREE;
4523 }
4524 }
4525
4526 /* Keep iterating. */
4527 return NULL_TREE;
4528 }
4529
4530
4531 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4532 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4533 remaps all local declarations to appropriate replacements in gimple
4534 statements. */
4535
4536 static tree
4537 replace_locals_stmt (gimple_stmt_iterator *gsip,
4538 bool *handled_ops_p ATTRIBUTE_UNUSED,
4539 struct walk_stmt_info *wi)
4540 {
4541 copy_body_data *id = (copy_body_data *) wi->info;
4542 gimple stmt = gsi_stmt (*gsip);
4543
4544 if (gimple_code (stmt) == GIMPLE_BIND)
4545 {
4546 tree block = gimple_bind_block (stmt);
4547
4548 if (block)
4549 {
4550 remap_block (&block, id);
4551 gimple_bind_set_block (stmt, block);
4552 }
4553
4554 /* This will remap a lot of the same decls again, but this should be
4555 harmless. */
4556 if (gimple_bind_vars (stmt))
4557 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4558 }
4559
4560 /* Keep iterating. */
4561 return NULL_TREE;
4562 }
4563
4564
4565 /* Copies everything in SEQ and replaces variables and labels local to
4566 current_function_decl. */
4567
4568 gimple_seq
4569 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4570 {
4571 copy_body_data id;
4572 struct walk_stmt_info wi;
4573 struct pointer_set_t *visited;
4574 gimple_seq copy;
4575
4576 /* There's nothing to do for NULL_TREE. */
4577 if (seq == NULL)
4578 return seq;
4579
4580 /* Set up ID. */
4581 memset (&id, 0, sizeof (id));
4582 id.src_fn = current_function_decl;
4583 id.dst_fn = current_function_decl;
4584 id.decl_map = pointer_map_create ();
4585 id.debug_map = NULL;
4586
4587 id.copy_decl = copy_decl_no_change;
4588 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4589 id.transform_new_cfg = false;
4590 id.transform_return_to_modify = false;
4591 id.transform_lang_insert_block = NULL;
4592
4593 /* Walk the tree once to find local labels. */
4594 memset (&wi, 0, sizeof (wi));
4595 visited = pointer_set_create ();
4596 wi.info = &id;
4597 wi.pset = visited;
4598 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4599 pointer_set_destroy (visited);
4600
4601 copy = gimple_seq_copy (seq);
4602
4603 /* Walk the copy, remapping decls. */
4604 memset (&wi, 0, sizeof (wi));
4605 wi.info = &id;
4606 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4607
4608 /* Clean up. */
4609 pointer_map_destroy (id.decl_map);
4610 if (id.debug_map)
4611 pointer_map_destroy (id.debug_map);
4612
4613 return copy;
4614 }
4615
4616
4617 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4618
4619 static tree
4620 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4621 {
4622 if (*tp == data)
4623 return (tree) data;
4624 else
4625 return NULL;
4626 }
4627
4628 DEBUG_FUNCTION bool
4629 debug_find_tree (tree top, tree search)
4630 {
4631 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4632 }
4633
4634
4635 /* Declare the variables created by the inliner. Add all the variables in
4636 VARS to BIND_EXPR. */
4637
4638 static void
4639 declare_inline_vars (tree block, tree vars)
4640 {
4641 tree t;
4642 for (t = vars; t; t = DECL_CHAIN (t))
4643 {
4644 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4645 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4646 add_local_decl (cfun, t);
4647 }
4648
4649 if (block)
4650 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4651 }
4652
4653 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4654 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4655 VAR_DECL translation. */
4656
4657 static tree
4658 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4659 {
4660 /* Don't generate debug information for the copy if we wouldn't have
4661 generated it for the copy either. */
4662 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4663 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4664
4665 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4666 declaration inspired this copy. */
4667 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4668
4669 /* The new variable/label has no RTL, yet. */
4670 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4671 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4672 SET_DECL_RTL (copy, 0);
4673
4674 /* These args would always appear unused, if not for this. */
4675 TREE_USED (copy) = 1;
4676
4677 /* Set the context for the new declaration. */
4678 if (!DECL_CONTEXT (decl))
4679 /* Globals stay global. */
4680 ;
4681 else if (DECL_CONTEXT (decl) != id->src_fn)
4682 /* Things that weren't in the scope of the function we're inlining
4683 from aren't in the scope we're inlining to, either. */
4684 ;
4685 else if (TREE_STATIC (decl))
4686 /* Function-scoped static variables should stay in the original
4687 function. */
4688 ;
4689 else
4690 /* Ordinary automatic local variables are now in the scope of the
4691 new function. */
4692 DECL_CONTEXT (copy) = id->dst_fn;
4693
4694 if (TREE_CODE (decl) == VAR_DECL
4695 /* C++ clones functions during parsing, before
4696 referenced_vars. */
4697 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
4698 && referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
4699 DECL_UID (decl)))
4700 add_referenced_var (copy);
4701
4702 return copy;
4703 }
4704
4705 static tree
4706 copy_decl_to_var (tree decl, copy_body_data *id)
4707 {
4708 tree copy, type;
4709
4710 gcc_assert (TREE_CODE (decl) == PARM_DECL
4711 || TREE_CODE (decl) == RESULT_DECL);
4712
4713 type = TREE_TYPE (decl);
4714
4715 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4716 VAR_DECL, DECL_NAME (decl), type);
4717 if (DECL_PT_UID_SET_P (decl))
4718 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4719 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4720 TREE_READONLY (copy) = TREE_READONLY (decl);
4721 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4722 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4723
4724 return copy_decl_for_dup_finish (id, decl, copy);
4725 }
4726
4727 /* Like copy_decl_to_var, but create a return slot object instead of a
4728 pointer variable for return by invisible reference. */
4729
4730 static tree
4731 copy_result_decl_to_var (tree decl, copy_body_data *id)
4732 {
4733 tree copy, type;
4734
4735 gcc_assert (TREE_CODE (decl) == PARM_DECL
4736 || TREE_CODE (decl) == RESULT_DECL);
4737
4738 type = TREE_TYPE (decl);
4739 if (DECL_BY_REFERENCE (decl))
4740 type = TREE_TYPE (type);
4741
4742 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4743 VAR_DECL, DECL_NAME (decl), type);
4744 if (DECL_PT_UID_SET_P (decl))
4745 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4746 TREE_READONLY (copy) = TREE_READONLY (decl);
4747 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4748 if (!DECL_BY_REFERENCE (decl))
4749 {
4750 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4751 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4752 }
4753
4754 return copy_decl_for_dup_finish (id, decl, copy);
4755 }
4756
4757 tree
4758 copy_decl_no_change (tree decl, copy_body_data *id)
4759 {
4760 tree copy;
4761
4762 copy = copy_node (decl);
4763
4764 /* The COPY is not abstract; it will be generated in DST_FN. */
4765 DECL_ABSTRACT (copy) = 0;
4766 lang_hooks.dup_lang_specific_decl (copy);
4767
4768 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4769 been taken; it's for internal bookkeeping in expand_goto_internal. */
4770 if (TREE_CODE (copy) == LABEL_DECL)
4771 {
4772 TREE_ADDRESSABLE (copy) = 0;
4773 LABEL_DECL_UID (copy) = -1;
4774 }
4775
4776 return copy_decl_for_dup_finish (id, decl, copy);
4777 }
4778
4779 static tree
4780 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4781 {
4782 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4783 return copy_decl_to_var (decl, id);
4784 else
4785 return copy_decl_no_change (decl, id);
4786 }
4787
4788 /* Return a copy of the function's argument tree. */
4789 static tree
4790 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4791 bitmap args_to_skip, tree *vars)
4792 {
4793 tree arg, *parg;
4794 tree new_parm = NULL;
4795 int i = 0;
4796
4797 parg = &new_parm;
4798
4799 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
4800 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4801 {
4802 tree new_tree = remap_decl (arg, id);
4803 lang_hooks.dup_lang_specific_decl (new_tree);
4804 *parg = new_tree;
4805 parg = &DECL_CHAIN (new_tree);
4806 }
4807 else if (!pointer_map_contains (id->decl_map, arg))
4808 {
4809 /* Make an equivalent VAR_DECL. If the argument was used
4810 as temporary variable later in function, the uses will be
4811 replaced by local variable. */
4812 tree var = copy_decl_to_var (arg, id);
4813 add_referenced_var (var);
4814 insert_decl_map (id, arg, var);
4815 /* Declare this new variable. */
4816 DECL_CHAIN (var) = *vars;
4817 *vars = var;
4818 }
4819 return new_parm;
4820 }
4821
4822 /* Return a copy of the function's static chain. */
4823 static tree
4824 copy_static_chain (tree static_chain, copy_body_data * id)
4825 {
4826 tree *chain_copy, *pvar;
4827
4828 chain_copy = &static_chain;
4829 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
4830 {
4831 tree new_tree = remap_decl (*pvar, id);
4832 lang_hooks.dup_lang_specific_decl (new_tree);
4833 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
4834 *pvar = new_tree;
4835 }
4836 return static_chain;
4837 }
4838
4839 /* Return true if the function is allowed to be versioned.
4840 This is a guard for the versioning functionality. */
4841
4842 bool
4843 tree_versionable_function_p (tree fndecl)
4844 {
4845 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4846 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4847 }
4848
4849 /* Delete all unreachable basic blocks and update callgraph.
4850 Doing so is somewhat nontrivial because we need to update all clones and
4851 remove inline function that become unreachable. */
4852
4853 static bool
4854 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4855 {
4856 bool changed = false;
4857 basic_block b, next_bb;
4858
4859 find_unreachable_blocks ();
4860
4861 /* Delete all unreachable basic blocks. */
4862
4863 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4864 {
4865 next_bb = b->next_bb;
4866
4867 if (!(b->flags & BB_REACHABLE))
4868 {
4869 gimple_stmt_iterator bsi;
4870
4871 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4872 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4873 {
4874 struct cgraph_edge *e;
4875 struct cgraph_node *node;
4876
4877 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4878 {
4879 if (!e->inline_failed)
4880 cgraph_remove_node_and_inline_clones (e->callee);
4881 else
4882 cgraph_remove_edge (e);
4883 }
4884 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4885 && id->dst_node->clones)
4886 for (node = id->dst_node->clones; node != id->dst_node;)
4887 {
4888 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4889 {
4890 if (!e->inline_failed)
4891 cgraph_remove_node_and_inline_clones (e->callee);
4892 else
4893 cgraph_remove_edge (e);
4894 }
4895
4896 if (node->clones)
4897 node = node->clones;
4898 else if (node->next_sibling_clone)
4899 node = node->next_sibling_clone;
4900 else
4901 {
4902 while (node != id->dst_node && !node->next_sibling_clone)
4903 node = node->clone_of;
4904 if (node != id->dst_node)
4905 node = node->next_sibling_clone;
4906 }
4907 }
4908 }
4909 delete_basic_block (b);
4910 changed = true;
4911 }
4912 }
4913
4914 return changed;
4915 }
4916
4917 /* Update clone info after duplication. */
4918
4919 static void
4920 update_clone_info (copy_body_data * id)
4921 {
4922 struct cgraph_node *node;
4923 if (!id->dst_node->clones)
4924 return;
4925 for (node = id->dst_node->clones; node != id->dst_node;)
4926 {
4927 /* First update replace maps to match the new body. */
4928 if (node->clone.tree_map)
4929 {
4930 unsigned int i;
4931 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4932 {
4933 struct ipa_replace_map *replace_info;
4934 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4935 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4936 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4937 }
4938 }
4939 if (node->clones)
4940 node = node->clones;
4941 else if (node->next_sibling_clone)
4942 node = node->next_sibling_clone;
4943 else
4944 {
4945 while (node != id->dst_node && !node->next_sibling_clone)
4946 node = node->clone_of;
4947 if (node != id->dst_node)
4948 node = node->next_sibling_clone;
4949 }
4950 }
4951 }
4952
4953 /* Create a copy of a function's tree.
4954 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4955 of the original function and the new copied function
4956 respectively. In case we want to replace a DECL
4957 tree with another tree while duplicating the function's
4958 body, TREE_MAP represents the mapping between these
4959 trees. If UPDATE_CLONES is set, the call_stmt fields
4960 of edges of clones of the function will be updated.
4961
4962 If non-NULL ARGS_TO_SKIP determine function parameters to remove
4963 from new version.
4964 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
4965 If non_NULL NEW_ENTRY determine new entry BB of the clone.
4966 */
4967 void
4968 tree_function_versioning (tree old_decl, tree new_decl,
4969 VEC(ipa_replace_map_p,gc)* tree_map,
4970 bool update_clones, bitmap args_to_skip,
4971 bitmap blocks_to_copy, basic_block new_entry)
4972 {
4973 struct cgraph_node *old_version_node;
4974 struct cgraph_node *new_version_node;
4975 copy_body_data id;
4976 tree p;
4977 unsigned i;
4978 struct ipa_replace_map *replace_info;
4979 basic_block old_entry_block, bb;
4980 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4981
4982 tree old_current_function_decl = current_function_decl;
4983 tree vars = NULL_TREE;
4984
4985 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4986 && TREE_CODE (new_decl) == FUNCTION_DECL);
4987 DECL_POSSIBLY_INLINED (old_decl) = 1;
4988
4989 old_version_node = cgraph_get_node (old_decl);
4990 gcc_checking_assert (old_version_node);
4991 new_version_node = cgraph_get_node (new_decl);
4992 gcc_checking_assert (new_version_node);
4993
4994 /* Output the inlining info for this abstract function, since it has been
4995 inlined. If we don't do this now, we can lose the information about the
4996 variables in the function when the blocks get blown away as soon as we
4997 remove the cgraph node. */
4998 (*debug_hooks->outlining_inline_function) (old_decl);
4999
5000 DECL_ARTIFICIAL (new_decl) = 1;
5001 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5002 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5003
5004 /* Prepare the data structures for the tree copy. */
5005 memset (&id, 0, sizeof (id));
5006
5007 /* Generate a new name for the new version. */
5008 id.statements_to_fold = pointer_set_create ();
5009
5010 id.decl_map = pointer_map_create ();
5011 id.debug_map = NULL;
5012 id.src_fn = old_decl;
5013 id.dst_fn = new_decl;
5014 id.src_node = old_version_node;
5015 id.dst_node = new_version_node;
5016 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5017 if (id.src_node->ipa_transforms_to_apply)
5018 {
5019 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5020 unsigned int i;
5021
5022 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5023 id.src_node->ipa_transforms_to_apply);
5024 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5025 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5026 VEC_index (ipa_opt_pass,
5027 old_transforms_to_apply,
5028 i));
5029 }
5030
5031 id.copy_decl = copy_decl_no_change;
5032 id.transform_call_graph_edges
5033 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5034 id.transform_new_cfg = true;
5035 id.transform_return_to_modify = false;
5036 id.transform_lang_insert_block = NULL;
5037
5038 current_function_decl = new_decl;
5039 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5040 (DECL_STRUCT_FUNCTION (old_decl));
5041 initialize_cfun (new_decl, old_decl,
5042 old_entry_block->count);
5043 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5044 = id.src_cfun->gimple_df->ipa_pta;
5045 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5046
5047 /* Copy the function's static chain. */
5048 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5049 if (p)
5050 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5051 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5052 &id);
5053
5054 /* If there's a tree_map, prepare for substitution. */
5055 if (tree_map)
5056 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5057 {
5058 gimple init;
5059 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5060 if (replace_info->replace_p)
5061 {
5062 tree op = replace_info->new_tree;
5063 if (!replace_info->old_tree)
5064 {
5065 int i = replace_info->parm_num;
5066 tree parm;
5067 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5068 i --;
5069 replace_info->old_tree = parm;
5070 }
5071
5072
5073 STRIP_NOPS (op);
5074
5075 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5076 op = TREE_OPERAND (op, 0);
5077
5078 if (TREE_CODE (op) == ADDR_EXPR)
5079 {
5080 op = TREE_OPERAND (op, 0);
5081 while (handled_component_p (op))
5082 op = TREE_OPERAND (op, 0);
5083 if (TREE_CODE (op) == VAR_DECL)
5084 add_referenced_var (op);
5085 }
5086 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5087 init = setup_one_parameter (&id, replace_info->old_tree,
5088 replace_info->new_tree, id.src_fn,
5089 NULL,
5090 &vars);
5091 if (init)
5092 VEC_safe_push (gimple, heap, init_stmts, init);
5093 }
5094 }
5095 /* Copy the function's arguments. */
5096 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5097 DECL_ARGUMENTS (new_decl) =
5098 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5099 args_to_skip, &vars);
5100
5101 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5102 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5103
5104 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5105
5106 if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5107 /* Add local vars. */
5108 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false);
5109
5110 if (DECL_RESULT (old_decl) != NULL_TREE)
5111 {
5112 tree old_name;
5113 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5114 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5115 if (gimple_in_ssa_p (id.src_cfun)
5116 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5117 && (old_name
5118 = gimple_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5119 {
5120 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5121 insert_decl_map (&id, old_name, new_name);
5122 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5123 set_default_def (DECL_RESULT (new_decl), new_name);
5124 }
5125 }
5126
5127 /* Copy the Function's body. */
5128 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5129 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5130
5131 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5132 number_blocks (new_decl);
5133
5134 /* We want to create the BB unconditionally, so that the addition of
5135 debug stmts doesn't affect BB count, which may in the end cause
5136 codegen differences. */
5137 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5138 while (VEC_length (gimple, init_stmts))
5139 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5140 update_clone_info (&id);
5141
5142 /* Remap the nonlocal_goto_save_area, if any. */
5143 if (cfun->nonlocal_goto_save_area)
5144 {
5145 struct walk_stmt_info wi;
5146
5147 memset (&wi, 0, sizeof (wi));
5148 wi.info = &id;
5149 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5150 }
5151
5152 /* Clean up. */
5153 pointer_map_destroy (id.decl_map);
5154 if (id.debug_map)
5155 pointer_map_destroy (id.debug_map);
5156 free_dominance_info (CDI_DOMINATORS);
5157 free_dominance_info (CDI_POST_DOMINATORS);
5158
5159 fold_marked_statements (0, id.statements_to_fold);
5160 pointer_set_destroy (id.statements_to_fold);
5161 fold_cond_expr_cond ();
5162 delete_unreachable_blocks_update_callgraph (&id);
5163 if (id.dst_node->analyzed)
5164 cgraph_rebuild_references ();
5165 update_ssa (TODO_update_ssa);
5166
5167 /* After partial cloning we need to rescale frequencies, so they are
5168 within proper range in the cloned function. */
5169 if (new_entry)
5170 {
5171 struct cgraph_edge *e;
5172 rebuild_frequencies ();
5173
5174 new_version_node->count = ENTRY_BLOCK_PTR->count;
5175 for (e = new_version_node->callees; e; e = e->next_callee)
5176 {
5177 basic_block bb = gimple_bb (e->call_stmt);
5178 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5179 bb);
5180 e->count = bb->count;
5181 }
5182 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5183 {
5184 basic_block bb = gimple_bb (e->call_stmt);
5185 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5186 bb);
5187 e->count = bb->count;
5188 }
5189 }
5190
5191 free_dominance_info (CDI_DOMINATORS);
5192 free_dominance_info (CDI_POST_DOMINATORS);
5193
5194 gcc_assert (!id.debug_stmts);
5195 VEC_free (gimple, heap, init_stmts);
5196 pop_cfun ();
5197 current_function_decl = old_current_function_decl;
5198 gcc_assert (!current_function_decl
5199 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5200 return;
5201 }
5202
5203 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5204 the callee and return the inlined body on success. */
5205
5206 tree
5207 maybe_inline_call_in_expr (tree exp)
5208 {
5209 tree fn = get_callee_fndecl (exp);
5210
5211 /* We can only try to inline "const" functions. */
5212 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5213 {
5214 struct pointer_map_t *decl_map = pointer_map_create ();
5215 call_expr_arg_iterator iter;
5216 copy_body_data id;
5217 tree param, arg, t;
5218
5219 /* Remap the parameters. */
5220 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5221 param;
5222 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5223 *pointer_map_insert (decl_map, param) = arg;
5224
5225 memset (&id, 0, sizeof (id));
5226 id.src_fn = fn;
5227 id.dst_fn = current_function_decl;
5228 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5229 id.decl_map = decl_map;
5230
5231 id.copy_decl = copy_decl_no_change;
5232 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5233 id.transform_new_cfg = false;
5234 id.transform_return_to_modify = true;
5235 id.transform_lang_insert_block = NULL;
5236
5237 /* Make sure not to unshare trees behind the front-end's back
5238 since front-end specific mechanisms may rely on sharing. */
5239 id.regimplify = false;
5240 id.do_not_unshare = true;
5241
5242 /* We're not inside any EH region. */
5243 id.eh_lp_nr = 0;
5244
5245 t = copy_tree_body (&id);
5246 pointer_map_destroy (decl_map);
5247
5248 /* We can only return something suitable for use in a GENERIC
5249 expression tree. */
5250 if (TREE_CODE (t) == MODIFY_EXPR)
5251 return TREE_OPERAND (t, 1);
5252 }
5253
5254 return NULL_TREE;
5255 }
5256
5257 /* Duplicate a type, fields and all. */
5258
5259 tree
5260 build_duplicate_type (tree type)
5261 {
5262 struct copy_body_data id;
5263
5264 memset (&id, 0, sizeof (id));
5265 id.src_fn = current_function_decl;
5266 id.dst_fn = current_function_decl;
5267 id.src_cfun = cfun;
5268 id.decl_map = pointer_map_create ();
5269 id.debug_map = NULL;
5270 id.copy_decl = copy_decl_no_change;
5271
5272 type = remap_type_1 (type, &id);
5273
5274 pointer_map_destroy (id.decl_map);
5275 if (id.debug_map)
5276 pointer_map_destroy (id.debug_map);
5277
5278 TYPE_CANONICAL (type) = type;
5279
5280 return type;
5281 }