re PR ipa/60243 (IPA is slow on large cgraph tree)
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "calls.h"
29 #include "tree-inline.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "insn-config.h"
34 #include "hashtab.h"
35 #include "langhooks.h"
36 #include "basic-block.h"
37 #include "tree-iterator.h"
38 #include "intl.h"
39 #include "pointer-set.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "tree-cfg.h"
53 #include "tree-phinodes.h"
54 #include "ssa-iterators.h"
55 #include "stringpool.h"
56 #include "tree-ssanames.h"
57 #include "tree-into-ssa.h"
58 #include "expr.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "function.h"
62 #include "tree-pretty-print.h"
63 #include "except.h"
64 #include "debug.h"
65 #include "ipa-prop.h"
66 #include "value-prof.h"
67 #include "tree-pass.h"
68 #include "target.h"
69 #include "cfgloop.h"
70
71 #include "rtl.h" /* FIXME: For asm_str_count. */
72
73 /* I'm not real happy about this, but we need to handle gimple and
74 non-gimple trees. */
75
76 /* Inlining, Cloning, Versioning, Parallelization
77
78 Inlining: a function body is duplicated, but the PARM_DECLs are
79 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
80 MODIFY_EXPRs that store to a dedicated returned-value variable.
81 The duplicated eh_region info of the copy will later be appended
82 to the info for the caller; the eh_region info in copied throwing
83 statements and RESX statements are adjusted accordingly.
84
85 Cloning: (only in C++) We have one body for a con/de/structor, and
86 multiple function decls, each with a unique parameter list.
87 Duplicate the body, using the given splay tree; some parameters
88 will become constants (like 0 or 1).
89
90 Versioning: a function body is duplicated and the result is a new
91 function rather than into blocks of an existing function as with
92 inlining. Some parameters will become constants.
93
94 Parallelization: a region of a function is duplicated resulting in
95 a new function. Variables may be replaced with complex expressions
96 to enable shared variable semantics.
97
98 All of these will simultaneously lookup any callgraph edges. If
99 we're going to inline the duplicated function body, and the given
100 function has some cloned callgraph nodes (one for each place this
101 function will be inlined) those callgraph edges will be duplicated.
102 If we're cloning the body, those callgraph edges will be
103 updated to point into the new body. (Note that the original
104 callgraph node and edge list will not be altered.)
105
106 See the CALL_EXPR handling case in copy_tree_body_r (). */
107
108 /* To Do:
109
110 o In order to make inlining-on-trees work, we pessimized
111 function-local static constants. In particular, they are now
112 always output, even when not addressed. Fix this by treating
113 function-local static constants just like global static
114 constants; the back-end already knows not to output them if they
115 are not needed.
116
117 o Provide heuristics to clamp inlining of recursive template
118 calls? */
119
120
121 /* Weights that estimate_num_insns uses to estimate the size of the
122 produced code. */
123
124 eni_weights eni_size_weights;
125
126 /* Weights that estimate_num_insns uses to estimate the time necessary
127 to execute the produced code. */
128
129 eni_weights eni_time_weights;
130
131 /* Prototypes. */
132
133 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
134 static void remap_block (tree *, copy_body_data *);
135 static void copy_bind_expr (tree *, int *, copy_body_data *);
136 static void declare_inline_vars (tree, tree);
137 static void remap_save_expr (tree *, void *, int *);
138 static void prepend_lexical_block (tree current_block, tree new_block);
139 static tree copy_decl_to_var (tree, copy_body_data *);
140 static tree copy_result_decl_to_var (tree, copy_body_data *);
141 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
142 static gimple remap_gimple_stmt (gimple, copy_body_data *);
143 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
144
145 /* Insert a tree->tree mapping for ID. Despite the name suggests
146 that the trees should be variables, it is used for more than that. */
147
148 void
149 insert_decl_map (copy_body_data *id, tree key, tree value)
150 {
151 *pointer_map_insert (id->decl_map, key) = value;
152
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
155 if (key != value)
156 *pointer_map_insert (id->decl_map, value) = value;
157 }
158
159 /* Insert a tree->tree mapping for ID. This is only used for
160 variables. */
161
162 static void
163 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
164 {
165 if (!gimple_in_ssa_p (id->src_cfun))
166 return;
167
168 if (!MAY_HAVE_DEBUG_STMTS)
169 return;
170
171 if (!target_for_debug_bind (key))
172 return;
173
174 gcc_assert (TREE_CODE (key) == PARM_DECL);
175 gcc_assert (TREE_CODE (value) == VAR_DECL);
176
177 if (!id->debug_map)
178 id->debug_map = pointer_map_create ();
179
180 *pointer_map_insert (id->debug_map, key) = value;
181 }
182
183 /* If nonzero, we're remapping the contents of inlined debug
184 statements. If negative, an error has occurred, such as a
185 reference to a variable that isn't available in the inlined
186 context. */
187 static int processing_debug_stmt = 0;
188
189 /* Construct new SSA name for old NAME. ID is the inline context. */
190
191 static tree
192 remap_ssa_name (tree name, copy_body_data *id)
193 {
194 tree new_tree, var;
195 tree *n;
196
197 gcc_assert (TREE_CODE (name) == SSA_NAME);
198
199 n = (tree *) pointer_map_contains (id->decl_map, name);
200 if (n)
201 return unshare_expr (*n);
202
203 if (processing_debug_stmt)
204 {
205 if (SSA_NAME_IS_DEFAULT_DEF (name)
206 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
207 && id->entry_bb == NULL
208 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
209 {
210 tree vexpr = make_node (DEBUG_EXPR_DECL);
211 gimple def_temp;
212 gimple_stmt_iterator gsi;
213 tree val = SSA_NAME_VAR (name);
214
215 n = (tree *) pointer_map_contains (id->decl_map, val);
216 if (n != NULL)
217 val = *n;
218 if (TREE_CODE (val) != PARM_DECL)
219 {
220 processing_debug_stmt = -1;
221 return name;
222 }
223 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
224 DECL_ARTIFICIAL (vexpr) = 1;
225 TREE_TYPE (vexpr) = TREE_TYPE (name);
226 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
227 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
228 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
229 return vexpr;
230 }
231
232 processing_debug_stmt = -1;
233 return name;
234 }
235
236 /* Remap anonymous SSA names or SSA names of anonymous decls. */
237 var = SSA_NAME_VAR (name);
238 if (!var
239 || (!SSA_NAME_IS_DEFAULT_DEF (name)
240 && TREE_CODE (var) == VAR_DECL
241 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
242 && DECL_ARTIFICIAL (var)
243 && DECL_IGNORED_P (var)
244 && !DECL_NAME (var)))
245 {
246 struct ptr_info_def *pi;
247 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
248 if (!var && SSA_NAME_IDENTIFIER (name))
249 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
250 insert_decl_map (id, name, new_tree);
251 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
252 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
253 /* At least IPA points-to info can be directly transferred. */
254 if (id->src_cfun->gimple_df
255 && id->src_cfun->gimple_df->ipa_pta
256 && (pi = SSA_NAME_PTR_INFO (name))
257 && !pi->pt.anything)
258 {
259 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
260 new_pi->pt = pi->pt;
261 }
262 return new_tree;
263 }
264
265 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
266 in copy_bb. */
267 new_tree = remap_decl (var, id);
268
269 /* We might've substituted constant or another SSA_NAME for
270 the variable.
271
272 Replace the SSA name representing RESULT_DECL by variable during
273 inlining: this saves us from need to introduce PHI node in a case
274 return value is just partly initialized. */
275 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
276 && (!SSA_NAME_VAR (name)
277 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
278 || !id->transform_return_to_modify))
279 {
280 struct ptr_info_def *pi;
281 new_tree = make_ssa_name (new_tree, NULL);
282 insert_decl_map (id, name, new_tree);
283 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
284 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
285 /* At least IPA points-to info can be directly transferred. */
286 if (id->src_cfun->gimple_df
287 && id->src_cfun->gimple_df->ipa_pta
288 && (pi = SSA_NAME_PTR_INFO (name))
289 && !pi->pt.anything)
290 {
291 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
292 new_pi->pt = pi->pt;
293 }
294 if (SSA_NAME_IS_DEFAULT_DEF (name))
295 {
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
299 abnormal edge, but also increase register pressure.
300
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
305 if (id->entry_bb
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
307 && (!SSA_NAME_VAR (name)
308 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
309 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
310 0)->dest
311 || EDGE_COUNT (id->entry_bb->preds) != 1))
312 {
313 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
314 gimple init_stmt;
315 tree zero = build_zero_cst (TREE_TYPE (new_tree));
316
317 init_stmt = gimple_build_assign (new_tree, zero);
318 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
319 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
320 }
321 else
322 {
323 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
324 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
325 }
326 }
327 }
328 else
329 insert_decl_map (id, name, new_tree);
330 return new_tree;
331 }
332
333 /* Remap DECL during the copying of the BLOCK tree for the function. */
334
335 tree
336 remap_decl (tree decl, copy_body_data *id)
337 {
338 tree *n;
339
340 /* We only remap local variables in the current function. */
341
342 /* See if we have remapped this declaration. */
343
344 n = (tree *) pointer_map_contains (id->decl_map, decl);
345
346 if (!n && processing_debug_stmt)
347 {
348 processing_debug_stmt = -1;
349 return decl;
350 }
351
352 /* If we didn't already have an equivalent for this declaration,
353 create one now. */
354 if (!n)
355 {
356 /* Make a copy of the variable or label. */
357 tree t = id->copy_decl (decl, id);
358
359 /* Remember it, so that if we encounter this local entity again
360 we can reuse this copy. Do this early because remap_type may
361 need this decl for TYPE_STUB_DECL. */
362 insert_decl_map (id, decl, t);
363
364 if (!DECL_P (t))
365 return t;
366
367 /* Remap types, if necessary. */
368 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
369 if (TREE_CODE (t) == TYPE_DECL)
370 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
371
372 /* Remap sizes as necessary. */
373 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
374 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
375
376 /* If fields, do likewise for offset and qualifier. */
377 if (TREE_CODE (t) == FIELD_DECL)
378 {
379 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
380 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
381 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
382 }
383
384 return t;
385 }
386
387 if (id->do_not_unshare)
388 return *n;
389 else
390 return unshare_expr (*n);
391 }
392
393 static tree
394 remap_type_1 (tree type, copy_body_data *id)
395 {
396 tree new_tree, t;
397
398 /* We do need a copy. build and register it now. If this is a pointer or
399 reference type, remap the designated type and make a new pointer or
400 reference type. */
401 if (TREE_CODE (type) == POINTER_TYPE)
402 {
403 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
404 TYPE_MODE (type),
405 TYPE_REF_CAN_ALIAS_ALL (type));
406 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
407 new_tree = build_type_attribute_qual_variant (new_tree,
408 TYPE_ATTRIBUTES (type),
409 TYPE_QUALS (type));
410 insert_decl_map (id, type, new_tree);
411 return new_tree;
412 }
413 else if (TREE_CODE (type) == REFERENCE_TYPE)
414 {
415 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
416 TYPE_MODE (type),
417 TYPE_REF_CAN_ALIAS_ALL (type));
418 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
419 new_tree = build_type_attribute_qual_variant (new_tree,
420 TYPE_ATTRIBUTES (type),
421 TYPE_QUALS (type));
422 insert_decl_map (id, type, new_tree);
423 return new_tree;
424 }
425 else
426 new_tree = copy_node (type);
427
428 insert_decl_map (id, type, new_tree);
429
430 /* This is a new type, not a copy of an old type. Need to reassociate
431 variants. We can handle everything except the main variant lazily. */
432 t = TYPE_MAIN_VARIANT (type);
433 if (type != t)
434 {
435 t = remap_type (t, id);
436 TYPE_MAIN_VARIANT (new_tree) = t;
437 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
438 TYPE_NEXT_VARIANT (t) = new_tree;
439 }
440 else
441 {
442 TYPE_MAIN_VARIANT (new_tree) = new_tree;
443 TYPE_NEXT_VARIANT (new_tree) = NULL;
444 }
445
446 if (TYPE_STUB_DECL (type))
447 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
448
449 /* Lazily create pointer and reference types. */
450 TYPE_POINTER_TO (new_tree) = NULL;
451 TYPE_REFERENCE_TO (new_tree) = NULL;
452
453 switch (TREE_CODE (new_tree))
454 {
455 case INTEGER_TYPE:
456 case REAL_TYPE:
457 case FIXED_POINT_TYPE:
458 case ENUMERAL_TYPE:
459 case BOOLEAN_TYPE:
460 t = TYPE_MIN_VALUE (new_tree);
461 if (t && TREE_CODE (t) != INTEGER_CST)
462 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
463
464 t = TYPE_MAX_VALUE (new_tree);
465 if (t && TREE_CODE (t) != INTEGER_CST)
466 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
467 return new_tree;
468
469 case FUNCTION_TYPE:
470 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
471 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
472 return new_tree;
473
474 case ARRAY_TYPE:
475 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
476 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
477 break;
478
479 case RECORD_TYPE:
480 case UNION_TYPE:
481 case QUAL_UNION_TYPE:
482 {
483 tree f, nf = NULL;
484
485 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
486 {
487 t = remap_decl (f, id);
488 DECL_CONTEXT (t) = new_tree;
489 DECL_CHAIN (t) = nf;
490 nf = t;
491 }
492 TYPE_FIELDS (new_tree) = nreverse (nf);
493 }
494 break;
495
496 case OFFSET_TYPE:
497 default:
498 /* Shouldn't have been thought variable sized. */
499 gcc_unreachable ();
500 }
501
502 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
503 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
504
505 return new_tree;
506 }
507
508 tree
509 remap_type (tree type, copy_body_data *id)
510 {
511 tree *node;
512 tree tmp;
513
514 if (type == NULL)
515 return type;
516
517 /* See if we have remapped this type. */
518 node = (tree *) pointer_map_contains (id->decl_map, type);
519 if (node)
520 return *node;
521
522 /* The type only needs remapping if it's variably modified. */
523 if (! variably_modified_type_p (type, id->src_fn))
524 {
525 insert_decl_map (id, type, type);
526 return type;
527 }
528
529 id->remapping_type_depth++;
530 tmp = remap_type_1 (type, id);
531 id->remapping_type_depth--;
532
533 return tmp;
534 }
535
536 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
537
538 static bool
539 can_be_nonlocal (tree decl, copy_body_data *id)
540 {
541 /* We can not duplicate function decls. */
542 if (TREE_CODE (decl) == FUNCTION_DECL)
543 return true;
544
545 /* Local static vars must be non-local or we get multiple declaration
546 problems. */
547 if (TREE_CODE (decl) == VAR_DECL
548 && !auto_var_in_fn_p (decl, id->src_fn))
549 return true;
550
551 return false;
552 }
553
554 static tree
555 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
556 copy_body_data *id)
557 {
558 tree old_var;
559 tree new_decls = NULL_TREE;
560
561 /* Remap its variables. */
562 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
563 {
564 tree new_var;
565
566 if (can_be_nonlocal (old_var, id))
567 {
568 /* We need to add this variable to the local decls as otherwise
569 nothing else will do so. */
570 if (TREE_CODE (old_var) == VAR_DECL
571 && ! DECL_EXTERNAL (old_var))
572 add_local_decl (cfun, old_var);
573 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
574 && !DECL_IGNORED_P (old_var)
575 && nonlocalized_list)
576 vec_safe_push (*nonlocalized_list, old_var);
577 continue;
578 }
579
580 /* Remap the variable. */
581 new_var = remap_decl (old_var, id);
582
583 /* If we didn't remap this variable, we can't mess with its
584 TREE_CHAIN. If we remapped this variable to the return slot, it's
585 already declared somewhere else, so don't declare it here. */
586
587 if (new_var == id->retvar)
588 ;
589 else if (!new_var)
590 {
591 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
592 && !DECL_IGNORED_P (old_var)
593 && nonlocalized_list)
594 vec_safe_push (*nonlocalized_list, old_var);
595 }
596 else
597 {
598 gcc_assert (DECL_P (new_var));
599 DECL_CHAIN (new_var) = new_decls;
600 new_decls = new_var;
601
602 /* Also copy value-expressions. */
603 if (TREE_CODE (new_var) == VAR_DECL
604 && DECL_HAS_VALUE_EXPR_P (new_var))
605 {
606 tree tem = DECL_VALUE_EXPR (new_var);
607 bool old_regimplify = id->regimplify;
608 id->remapping_type_depth++;
609 walk_tree (&tem, copy_tree_body_r, id, NULL);
610 id->remapping_type_depth--;
611 id->regimplify = old_regimplify;
612 SET_DECL_VALUE_EXPR (new_var, tem);
613 }
614 }
615 }
616
617 return nreverse (new_decls);
618 }
619
620 /* Copy the BLOCK to contain remapped versions of the variables
621 therein. And hook the new block into the block-tree. */
622
623 static void
624 remap_block (tree *block, copy_body_data *id)
625 {
626 tree old_block;
627 tree new_block;
628
629 /* Make the new block. */
630 old_block = *block;
631 new_block = make_node (BLOCK);
632 TREE_USED (new_block) = TREE_USED (old_block);
633 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
634 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
635 BLOCK_NONLOCALIZED_VARS (new_block)
636 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
637 *block = new_block;
638
639 /* Remap its variables. */
640 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
641 &BLOCK_NONLOCALIZED_VARS (new_block),
642 id);
643
644 if (id->transform_lang_insert_block)
645 id->transform_lang_insert_block (new_block);
646
647 /* Remember the remapped block. */
648 insert_decl_map (id, old_block, new_block);
649 }
650
651 /* Copy the whole block tree and root it in id->block. */
652 static tree
653 remap_blocks (tree block, copy_body_data *id)
654 {
655 tree t;
656 tree new_tree = block;
657
658 if (!block)
659 return NULL;
660
661 remap_block (&new_tree, id);
662 gcc_assert (new_tree != block);
663 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
664 prepend_lexical_block (new_tree, remap_blocks (t, id));
665 /* Blocks are in arbitrary order, but make things slightly prettier and do
666 not swap order when producing a copy. */
667 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
668 return new_tree;
669 }
670
671 /* Remap the block tree rooted at BLOCK to nothing. */
672 static void
673 remap_blocks_to_null (tree block, copy_body_data *id)
674 {
675 tree t;
676 insert_decl_map (id, block, NULL_TREE);
677 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
678 remap_blocks_to_null (t, id);
679 }
680
681 static void
682 copy_statement_list (tree *tp)
683 {
684 tree_stmt_iterator oi, ni;
685 tree new_tree;
686
687 new_tree = alloc_stmt_list ();
688 ni = tsi_start (new_tree);
689 oi = tsi_start (*tp);
690 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
691 *tp = new_tree;
692
693 for (; !tsi_end_p (oi); tsi_next (&oi))
694 {
695 tree stmt = tsi_stmt (oi);
696 if (TREE_CODE (stmt) == STATEMENT_LIST)
697 /* This copy is not redundant; tsi_link_after will smash this
698 STATEMENT_LIST into the end of the one we're building, and we
699 don't want to do that with the original. */
700 copy_statement_list (&stmt);
701 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
702 }
703 }
704
705 static void
706 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
707 {
708 tree block = BIND_EXPR_BLOCK (*tp);
709 /* Copy (and replace) the statement. */
710 copy_tree_r (tp, walk_subtrees, NULL);
711 if (block)
712 {
713 remap_block (&block, id);
714 BIND_EXPR_BLOCK (*tp) = block;
715 }
716
717 if (BIND_EXPR_VARS (*tp))
718 /* This will remap a lot of the same decls again, but this should be
719 harmless. */
720 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
721 }
722
723
724 /* Create a new gimple_seq by remapping all the statements in BODY
725 using the inlining information in ID. */
726
727 static gimple_seq
728 remap_gimple_seq (gimple_seq body, copy_body_data *id)
729 {
730 gimple_stmt_iterator si;
731 gimple_seq new_body = NULL;
732
733 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
734 {
735 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
736 gimple_seq_add_stmt (&new_body, new_stmt);
737 }
738
739 return new_body;
740 }
741
742
743 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
744 block using the mapping information in ID. */
745
746 static gimple
747 copy_gimple_bind (gimple stmt, copy_body_data *id)
748 {
749 gimple new_bind;
750 tree new_block, new_vars;
751 gimple_seq body, new_body;
752
753 /* Copy the statement. Note that we purposely don't use copy_stmt
754 here because we need to remap statements as we copy. */
755 body = gimple_bind_body (stmt);
756 new_body = remap_gimple_seq (body, id);
757
758 new_block = gimple_bind_block (stmt);
759 if (new_block)
760 remap_block (&new_block, id);
761
762 /* This will remap a lot of the same decls again, but this should be
763 harmless. */
764 new_vars = gimple_bind_vars (stmt);
765 if (new_vars)
766 new_vars = remap_decls (new_vars, NULL, id);
767
768 new_bind = gimple_build_bind (new_vars, new_body, new_block);
769
770 return new_bind;
771 }
772
773 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
774
775 static bool
776 is_parm (tree decl)
777 {
778 if (TREE_CODE (decl) == SSA_NAME)
779 {
780 decl = SSA_NAME_VAR (decl);
781 if (!decl)
782 return false;
783 }
784
785 return (TREE_CODE (decl) == PARM_DECL);
786 }
787
788 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
789 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
790 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
791 recursing into the children nodes of *TP. */
792
793 static tree
794 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
795 {
796 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
797 copy_body_data *id = (copy_body_data *) wi_p->info;
798 tree fn = id->src_fn;
799
800 if (TREE_CODE (*tp) == SSA_NAME)
801 {
802 *tp = remap_ssa_name (*tp, id);
803 *walk_subtrees = 0;
804 return NULL;
805 }
806 else if (auto_var_in_fn_p (*tp, fn))
807 {
808 /* Local variables and labels need to be replaced by equivalent
809 variables. We don't want to copy static variables; there's
810 only one of those, no matter how many times we inline the
811 containing function. Similarly for globals from an outer
812 function. */
813 tree new_decl;
814
815 /* Remap the declaration. */
816 new_decl = remap_decl (*tp, id);
817 gcc_assert (new_decl);
818 /* Replace this variable with the copy. */
819 STRIP_TYPE_NOPS (new_decl);
820 /* ??? The C++ frontend uses void * pointer zero to initialize
821 any other type. This confuses the middle-end type verification.
822 As cloned bodies do not go through gimplification again the fixup
823 there doesn't trigger. */
824 if (TREE_CODE (new_decl) == INTEGER_CST
825 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
826 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
827 *tp = new_decl;
828 *walk_subtrees = 0;
829 }
830 else if (TREE_CODE (*tp) == STATEMENT_LIST)
831 gcc_unreachable ();
832 else if (TREE_CODE (*tp) == SAVE_EXPR)
833 gcc_unreachable ();
834 else if (TREE_CODE (*tp) == LABEL_DECL
835 && (!DECL_CONTEXT (*tp)
836 || decl_function_context (*tp) == id->src_fn))
837 /* These may need to be remapped for EH handling. */
838 *tp = remap_decl (*tp, id);
839 else if (TREE_CODE (*tp) == FIELD_DECL)
840 {
841 /* If the enclosing record type is variably_modified_type_p, the field
842 has already been remapped. Otherwise, it need not be. */
843 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
844 if (n)
845 *tp = *n;
846 *walk_subtrees = 0;
847 }
848 else if (TYPE_P (*tp))
849 /* Types may need remapping as well. */
850 *tp = remap_type (*tp, id);
851 else if (CONSTANT_CLASS_P (*tp))
852 {
853 /* If this is a constant, we have to copy the node iff the type
854 will be remapped. copy_tree_r will not copy a constant. */
855 tree new_type = remap_type (TREE_TYPE (*tp), id);
856
857 if (new_type == TREE_TYPE (*tp))
858 *walk_subtrees = 0;
859
860 else if (TREE_CODE (*tp) == INTEGER_CST)
861 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
862 TREE_INT_CST_HIGH (*tp));
863 else
864 {
865 *tp = copy_node (*tp);
866 TREE_TYPE (*tp) = new_type;
867 }
868 }
869 else
870 {
871 /* Otherwise, just copy the node. Note that copy_tree_r already
872 knows not to copy VAR_DECLs, etc., so this is safe. */
873
874 if (TREE_CODE (*tp) == MEM_REF)
875 {
876 /* We need to re-canonicalize MEM_REFs from inline substitutions
877 that can happen when a pointer argument is an ADDR_EXPR.
878 Recurse here manually to allow that. */
879 tree ptr = TREE_OPERAND (*tp, 0);
880 tree type = remap_type (TREE_TYPE (*tp), id);
881 tree old = *tp;
882 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
883 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
884 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
885 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
886 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
887 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
888 remapped a parameter as the property might be valid only
889 for the parameter itself. */
890 if (TREE_THIS_NOTRAP (old)
891 && (!is_parm (TREE_OPERAND (old, 0))
892 || (!id->transform_parameter && is_parm (ptr))))
893 TREE_THIS_NOTRAP (*tp) = 1;
894 *walk_subtrees = 0;
895 return NULL;
896 }
897
898 /* Here is the "usual case". Copy this tree node, and then
899 tweak some special cases. */
900 copy_tree_r (tp, walk_subtrees, NULL);
901
902 if (TREE_CODE (*tp) != OMP_CLAUSE)
903 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
904
905 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
906 {
907 /* The copied TARGET_EXPR has never been expanded, even if the
908 original node was expanded already. */
909 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
910 TREE_OPERAND (*tp, 3) = NULL_TREE;
911 }
912 else if (TREE_CODE (*tp) == ADDR_EXPR)
913 {
914 /* Variable substitution need not be simple. In particular,
915 the MEM_REF substitution above. Make sure that
916 TREE_CONSTANT and friends are up-to-date. */
917 int invariant = is_gimple_min_invariant (*tp);
918 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
919 recompute_tree_invariant_for_addr_expr (*tp);
920
921 /* If this used to be invariant, but is not any longer,
922 then regimplification is probably needed. */
923 if (invariant && !is_gimple_min_invariant (*tp))
924 id->regimplify = true;
925
926 *walk_subtrees = 0;
927 }
928 }
929
930 /* Update the TREE_BLOCK for the cloned expr. */
931 if (EXPR_P (*tp))
932 {
933 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
934 tree old_block = TREE_BLOCK (*tp);
935 if (old_block)
936 {
937 tree *n;
938 n = (tree *) pointer_map_contains (id->decl_map,
939 TREE_BLOCK (*tp));
940 if (n)
941 new_block = *n;
942 }
943 TREE_SET_BLOCK (*tp, new_block);
944 }
945
946 /* Keep iterating. */
947 return NULL_TREE;
948 }
949
950
951 /* Called from copy_body_id via walk_tree. DATA is really a
952 `copy_body_data *'. */
953
954 tree
955 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
956 {
957 copy_body_data *id = (copy_body_data *) data;
958 tree fn = id->src_fn;
959 tree new_block;
960
961 /* Begin by recognizing trees that we'll completely rewrite for the
962 inlining context. Our output for these trees is completely
963 different from out input (e.g. RETURN_EXPR is deleted, and morphs
964 into an edge). Further down, we'll handle trees that get
965 duplicated and/or tweaked. */
966
967 /* When requested, RETURN_EXPRs should be transformed to just the
968 contained MODIFY_EXPR. The branch semantics of the return will
969 be handled elsewhere by manipulating the CFG rather than a statement. */
970 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
971 {
972 tree assignment = TREE_OPERAND (*tp, 0);
973
974 /* If we're returning something, just turn that into an
975 assignment into the equivalent of the original RESULT_DECL.
976 If the "assignment" is just the result decl, the result
977 decl has already been set (e.g. a recent "foo (&result_decl,
978 ...)"); just toss the entire RETURN_EXPR. */
979 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
980 {
981 /* Replace the RETURN_EXPR with (a copy of) the
982 MODIFY_EXPR hanging underneath. */
983 *tp = copy_node (assignment);
984 }
985 else /* Else the RETURN_EXPR returns no value. */
986 {
987 *tp = NULL;
988 return (tree) (void *)1;
989 }
990 }
991 else if (TREE_CODE (*tp) == SSA_NAME)
992 {
993 *tp = remap_ssa_name (*tp, id);
994 *walk_subtrees = 0;
995 return NULL;
996 }
997
998 /* Local variables and labels need to be replaced by equivalent
999 variables. We don't want to copy static variables; there's only
1000 one of those, no matter how many times we inline the containing
1001 function. Similarly for globals from an outer function. */
1002 else if (auto_var_in_fn_p (*tp, fn))
1003 {
1004 tree new_decl;
1005
1006 /* Remap the declaration. */
1007 new_decl = remap_decl (*tp, id);
1008 gcc_assert (new_decl);
1009 /* Replace this variable with the copy. */
1010 STRIP_TYPE_NOPS (new_decl);
1011 *tp = new_decl;
1012 *walk_subtrees = 0;
1013 }
1014 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1015 copy_statement_list (tp);
1016 else if (TREE_CODE (*tp) == SAVE_EXPR
1017 || TREE_CODE (*tp) == TARGET_EXPR)
1018 remap_save_expr (tp, id->decl_map, walk_subtrees);
1019 else if (TREE_CODE (*tp) == LABEL_DECL
1020 && (! DECL_CONTEXT (*tp)
1021 || decl_function_context (*tp) == id->src_fn))
1022 /* These may need to be remapped for EH handling. */
1023 *tp = remap_decl (*tp, id);
1024 else if (TREE_CODE (*tp) == BIND_EXPR)
1025 copy_bind_expr (tp, walk_subtrees, id);
1026 /* Types may need remapping as well. */
1027 else if (TYPE_P (*tp))
1028 *tp = remap_type (*tp, id);
1029
1030 /* If this is a constant, we have to copy the node iff the type will be
1031 remapped. copy_tree_r will not copy a constant. */
1032 else if (CONSTANT_CLASS_P (*tp))
1033 {
1034 tree new_type = remap_type (TREE_TYPE (*tp), id);
1035
1036 if (new_type == TREE_TYPE (*tp))
1037 *walk_subtrees = 0;
1038
1039 else if (TREE_CODE (*tp) == INTEGER_CST)
1040 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1041 TREE_INT_CST_HIGH (*tp));
1042 else
1043 {
1044 *tp = copy_node (*tp);
1045 TREE_TYPE (*tp) = new_type;
1046 }
1047 }
1048
1049 /* Otherwise, just copy the node. Note that copy_tree_r already
1050 knows not to copy VAR_DECLs, etc., so this is safe. */
1051 else
1052 {
1053 /* Here we handle trees that are not completely rewritten.
1054 First we detect some inlining-induced bogosities for
1055 discarding. */
1056 if (TREE_CODE (*tp) == MODIFY_EXPR
1057 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1058 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1059 {
1060 /* Some assignments VAR = VAR; don't generate any rtl code
1061 and thus don't count as variable modification. Avoid
1062 keeping bogosities like 0 = 0. */
1063 tree decl = TREE_OPERAND (*tp, 0), value;
1064 tree *n;
1065
1066 n = (tree *) pointer_map_contains (id->decl_map, decl);
1067 if (n)
1068 {
1069 value = *n;
1070 STRIP_TYPE_NOPS (value);
1071 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1072 {
1073 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1074 return copy_tree_body_r (tp, walk_subtrees, data);
1075 }
1076 }
1077 }
1078 else if (TREE_CODE (*tp) == INDIRECT_REF)
1079 {
1080 /* Get rid of *& from inline substitutions that can happen when a
1081 pointer argument is an ADDR_EXPR. */
1082 tree decl = TREE_OPERAND (*tp, 0);
1083 tree *n = (tree *) pointer_map_contains (id->decl_map, decl);
1084 if (n)
1085 {
1086 /* If we happen to get an ADDR_EXPR in n->value, strip
1087 it manually here as we'll eventually get ADDR_EXPRs
1088 which lie about their types pointed to. In this case
1089 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1090 but we absolutely rely on that. As fold_indirect_ref
1091 does other useful transformations, try that first, though. */
1092 tree type = TREE_TYPE (*tp);
1093 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1094 tree old = *tp;
1095 *tp = gimple_fold_indirect_ref (ptr);
1096 if (! *tp)
1097 {
1098 if (TREE_CODE (ptr) == ADDR_EXPR)
1099 {
1100 *tp
1101 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1102 /* ??? We should either assert here or build
1103 a VIEW_CONVERT_EXPR instead of blindly leaking
1104 incompatible types to our IL. */
1105 if (! *tp)
1106 *tp = TREE_OPERAND (ptr, 0);
1107 }
1108 else
1109 {
1110 *tp = build1 (INDIRECT_REF, type, ptr);
1111 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1112 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1113 TREE_READONLY (*tp) = TREE_READONLY (old);
1114 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1115 have remapped a parameter as the property might be
1116 valid only for the parameter itself. */
1117 if (TREE_THIS_NOTRAP (old)
1118 && (!is_parm (TREE_OPERAND (old, 0))
1119 || (!id->transform_parameter && is_parm (ptr))))
1120 TREE_THIS_NOTRAP (*tp) = 1;
1121 }
1122 }
1123 *walk_subtrees = 0;
1124 return NULL;
1125 }
1126 }
1127 else if (TREE_CODE (*tp) == MEM_REF)
1128 {
1129 /* We need to re-canonicalize MEM_REFs from inline substitutions
1130 that can happen when a pointer argument is an ADDR_EXPR.
1131 Recurse here manually to allow that. */
1132 tree ptr = TREE_OPERAND (*tp, 0);
1133 tree type = remap_type (TREE_TYPE (*tp), id);
1134 tree old = *tp;
1135 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1136 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1137 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1138 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1139 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1140 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1141 remapped a parameter as the property might be valid only
1142 for the parameter itself. */
1143 if (TREE_THIS_NOTRAP (old)
1144 && (!is_parm (TREE_OPERAND (old, 0))
1145 || (!id->transform_parameter && is_parm (ptr))))
1146 TREE_THIS_NOTRAP (*tp) = 1;
1147 *walk_subtrees = 0;
1148 return NULL;
1149 }
1150
1151 /* Here is the "usual case". Copy this tree node, and then
1152 tweak some special cases. */
1153 copy_tree_r (tp, walk_subtrees, NULL);
1154
1155 /* If EXPR has block defined, map it to newly constructed block.
1156 When inlining we want EXPRs without block appear in the block
1157 of function call if we are not remapping a type. */
1158 if (EXPR_P (*tp))
1159 {
1160 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1161 if (TREE_BLOCK (*tp))
1162 {
1163 tree *n;
1164 n = (tree *) pointer_map_contains (id->decl_map,
1165 TREE_BLOCK (*tp));
1166 if (n)
1167 new_block = *n;
1168 }
1169 TREE_SET_BLOCK (*tp, new_block);
1170 }
1171
1172 if (TREE_CODE (*tp) != OMP_CLAUSE)
1173 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1174
1175 /* The copied TARGET_EXPR has never been expanded, even if the
1176 original node was expanded already. */
1177 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1178 {
1179 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1180 TREE_OPERAND (*tp, 3) = NULL_TREE;
1181 }
1182
1183 /* Variable substitution need not be simple. In particular, the
1184 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1185 and friends are up-to-date. */
1186 else if (TREE_CODE (*tp) == ADDR_EXPR)
1187 {
1188 int invariant = is_gimple_min_invariant (*tp);
1189 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1190
1191 /* Handle the case where we substituted an INDIRECT_REF
1192 into the operand of the ADDR_EXPR. */
1193 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1194 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1195 else
1196 recompute_tree_invariant_for_addr_expr (*tp);
1197
1198 /* If this used to be invariant, but is not any longer,
1199 then regimplification is probably needed. */
1200 if (invariant && !is_gimple_min_invariant (*tp))
1201 id->regimplify = true;
1202
1203 *walk_subtrees = 0;
1204 }
1205 }
1206
1207 /* Keep iterating. */
1208 return NULL_TREE;
1209 }
1210
1211 /* Helper for remap_gimple_stmt. Given an EH region number for the
1212 source function, map that to the duplicate EH region number in
1213 the destination function. */
1214
1215 static int
1216 remap_eh_region_nr (int old_nr, copy_body_data *id)
1217 {
1218 eh_region old_r, new_r;
1219 void **slot;
1220
1221 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1222 slot = pointer_map_contains (id->eh_map, old_r);
1223 new_r = (eh_region) *slot;
1224
1225 return new_r->index;
1226 }
1227
1228 /* Similar, but operate on INTEGER_CSTs. */
1229
1230 static tree
1231 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1232 {
1233 int old_nr, new_nr;
1234
1235 old_nr = tree_to_shwi (old_t_nr);
1236 new_nr = remap_eh_region_nr (old_nr, id);
1237
1238 return build_int_cst (integer_type_node, new_nr);
1239 }
1240
1241 /* Helper for copy_bb. Remap statement STMT using the inlining
1242 information in ID. Return the new statement copy. */
1243
1244 static gimple
1245 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1246 {
1247 gimple copy = NULL;
1248 struct walk_stmt_info wi;
1249 bool skip_first = false;
1250
1251 /* Begin by recognizing trees that we'll completely rewrite for the
1252 inlining context. Our output for these trees is completely
1253 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1254 into an edge). Further down, we'll handle trees that get
1255 duplicated and/or tweaked. */
1256
1257 /* When requested, GIMPLE_RETURNs should be transformed to just the
1258 contained GIMPLE_ASSIGN. The branch semantics of the return will
1259 be handled elsewhere by manipulating the CFG rather than the
1260 statement. */
1261 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1262 {
1263 tree retval = gimple_return_retval (stmt);
1264
1265 /* If we're returning something, just turn that into an
1266 assignment into the equivalent of the original RESULT_DECL.
1267 If RETVAL is just the result decl, the result decl has
1268 already been set (e.g. a recent "foo (&result_decl, ...)");
1269 just toss the entire GIMPLE_RETURN. */
1270 if (retval
1271 && (TREE_CODE (retval) != RESULT_DECL
1272 && (TREE_CODE (retval) != SSA_NAME
1273 || ! SSA_NAME_VAR (retval)
1274 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1275 {
1276 copy = gimple_build_assign (id->do_not_unshare
1277 ? id->retvar : unshare_expr (id->retvar),
1278 retval);
1279 /* id->retvar is already substituted. Skip it on later remapping. */
1280 skip_first = true;
1281 }
1282 else
1283 return gimple_build_nop ();
1284 }
1285 else if (gimple_has_substatements (stmt))
1286 {
1287 gimple_seq s1, s2;
1288
1289 /* When cloning bodies from the C++ front end, we will be handed bodies
1290 in High GIMPLE form. Handle here all the High GIMPLE statements that
1291 have embedded statements. */
1292 switch (gimple_code (stmt))
1293 {
1294 case GIMPLE_BIND:
1295 copy = copy_gimple_bind (stmt, id);
1296 break;
1297
1298 case GIMPLE_CATCH:
1299 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1300 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1301 break;
1302
1303 case GIMPLE_EH_FILTER:
1304 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1305 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1306 break;
1307
1308 case GIMPLE_TRY:
1309 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1310 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1311 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1312 break;
1313
1314 case GIMPLE_WITH_CLEANUP_EXPR:
1315 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1316 copy = gimple_build_wce (s1);
1317 break;
1318
1319 case GIMPLE_OMP_PARALLEL:
1320 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1321 copy = gimple_build_omp_parallel
1322 (s1,
1323 gimple_omp_parallel_clauses (stmt),
1324 gimple_omp_parallel_child_fn (stmt),
1325 gimple_omp_parallel_data_arg (stmt));
1326 break;
1327
1328 case GIMPLE_OMP_TASK:
1329 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1330 copy = gimple_build_omp_task
1331 (s1,
1332 gimple_omp_task_clauses (stmt),
1333 gimple_omp_task_child_fn (stmt),
1334 gimple_omp_task_data_arg (stmt),
1335 gimple_omp_task_copy_fn (stmt),
1336 gimple_omp_task_arg_size (stmt),
1337 gimple_omp_task_arg_align (stmt));
1338 break;
1339
1340 case GIMPLE_OMP_FOR:
1341 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1342 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1343 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1344 gimple_omp_for_clauses (stmt),
1345 gimple_omp_for_collapse (stmt), s2);
1346 {
1347 size_t i;
1348 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1349 {
1350 gimple_omp_for_set_index (copy, i,
1351 gimple_omp_for_index (stmt, i));
1352 gimple_omp_for_set_initial (copy, i,
1353 gimple_omp_for_initial (stmt, i));
1354 gimple_omp_for_set_final (copy, i,
1355 gimple_omp_for_final (stmt, i));
1356 gimple_omp_for_set_incr (copy, i,
1357 gimple_omp_for_incr (stmt, i));
1358 gimple_omp_for_set_cond (copy, i,
1359 gimple_omp_for_cond (stmt, i));
1360 }
1361 }
1362 break;
1363
1364 case GIMPLE_OMP_MASTER:
1365 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1366 copy = gimple_build_omp_master (s1);
1367 break;
1368
1369 case GIMPLE_OMP_TASKGROUP:
1370 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1371 copy = gimple_build_omp_taskgroup (s1);
1372 break;
1373
1374 case GIMPLE_OMP_ORDERED:
1375 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1376 copy = gimple_build_omp_ordered (s1);
1377 break;
1378
1379 case GIMPLE_OMP_SECTION:
1380 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1381 copy = gimple_build_omp_section (s1);
1382 break;
1383
1384 case GIMPLE_OMP_SECTIONS:
1385 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1386 copy = gimple_build_omp_sections
1387 (s1, gimple_omp_sections_clauses (stmt));
1388 break;
1389
1390 case GIMPLE_OMP_SINGLE:
1391 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1392 copy = gimple_build_omp_single
1393 (s1, gimple_omp_single_clauses (stmt));
1394 break;
1395
1396 case GIMPLE_OMP_TARGET:
1397 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1398 copy = gimple_build_omp_target
1399 (s1, gimple_omp_target_kind (stmt),
1400 gimple_omp_target_clauses (stmt));
1401 break;
1402
1403 case GIMPLE_OMP_TEAMS:
1404 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1405 copy = gimple_build_omp_teams
1406 (s1, gimple_omp_teams_clauses (stmt));
1407 break;
1408
1409 case GIMPLE_OMP_CRITICAL:
1410 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1411 copy
1412 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1413 break;
1414
1415 case GIMPLE_TRANSACTION:
1416 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1417 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1418 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1419 break;
1420
1421 default:
1422 gcc_unreachable ();
1423 }
1424 }
1425 else
1426 {
1427 if (gimple_assign_copy_p (stmt)
1428 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1429 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1430 {
1431 /* Here we handle statements that are not completely rewritten.
1432 First we detect some inlining-induced bogosities for
1433 discarding. */
1434
1435 /* Some assignments VAR = VAR; don't generate any rtl code
1436 and thus don't count as variable modification. Avoid
1437 keeping bogosities like 0 = 0. */
1438 tree decl = gimple_assign_lhs (stmt), value;
1439 tree *n;
1440
1441 n = (tree *) pointer_map_contains (id->decl_map, decl);
1442 if (n)
1443 {
1444 value = *n;
1445 STRIP_TYPE_NOPS (value);
1446 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1447 return gimple_build_nop ();
1448 }
1449 }
1450
1451 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1452 in a block that we aren't copying during tree_function_versioning,
1453 just drop the clobber stmt. */
1454 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1455 {
1456 tree lhs = gimple_assign_lhs (stmt);
1457 if (TREE_CODE (lhs) == MEM_REF
1458 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1459 {
1460 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1461 if (gimple_bb (def_stmt)
1462 && !bitmap_bit_p (id->blocks_to_copy,
1463 gimple_bb (def_stmt)->index))
1464 return gimple_build_nop ();
1465 }
1466 }
1467
1468 if (gimple_debug_bind_p (stmt))
1469 {
1470 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1471 gimple_debug_bind_get_value (stmt),
1472 stmt);
1473 id->debug_stmts.safe_push (copy);
1474 return copy;
1475 }
1476 if (gimple_debug_source_bind_p (stmt))
1477 {
1478 copy = gimple_build_debug_source_bind
1479 (gimple_debug_source_bind_get_var (stmt),
1480 gimple_debug_source_bind_get_value (stmt), stmt);
1481 id->debug_stmts.safe_push (copy);
1482 return copy;
1483 }
1484
1485 /* Create a new deep copy of the statement. */
1486 copy = gimple_copy (stmt);
1487
1488 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1489 RESX and EH_DISPATCH. */
1490 if (id->eh_map)
1491 switch (gimple_code (copy))
1492 {
1493 case GIMPLE_CALL:
1494 {
1495 tree r, fndecl = gimple_call_fndecl (copy);
1496 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1497 switch (DECL_FUNCTION_CODE (fndecl))
1498 {
1499 case BUILT_IN_EH_COPY_VALUES:
1500 r = gimple_call_arg (copy, 1);
1501 r = remap_eh_region_tree_nr (r, id);
1502 gimple_call_set_arg (copy, 1, r);
1503 /* FALLTHRU */
1504
1505 case BUILT_IN_EH_POINTER:
1506 case BUILT_IN_EH_FILTER:
1507 r = gimple_call_arg (copy, 0);
1508 r = remap_eh_region_tree_nr (r, id);
1509 gimple_call_set_arg (copy, 0, r);
1510 break;
1511
1512 default:
1513 break;
1514 }
1515
1516 /* Reset alias info if we didn't apply measures to
1517 keep it valid over inlining by setting DECL_PT_UID. */
1518 if (!id->src_cfun->gimple_df
1519 || !id->src_cfun->gimple_df->ipa_pta)
1520 gimple_call_reset_alias_info (copy);
1521 }
1522 break;
1523
1524 case GIMPLE_RESX:
1525 {
1526 int r = gimple_resx_region (copy);
1527 r = remap_eh_region_nr (r, id);
1528 gimple_resx_set_region (copy, r);
1529 }
1530 break;
1531
1532 case GIMPLE_EH_DISPATCH:
1533 {
1534 int r = gimple_eh_dispatch_region (copy);
1535 r = remap_eh_region_nr (r, id);
1536 gimple_eh_dispatch_set_region (copy, r);
1537 }
1538 break;
1539
1540 default:
1541 break;
1542 }
1543 }
1544
1545 /* If STMT has a block defined, map it to the newly constructed
1546 block. */
1547 if (gimple_block (copy))
1548 {
1549 tree *n;
1550 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1551 gcc_assert (n);
1552 gimple_set_block (copy, *n);
1553 }
1554
1555 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1556 return copy;
1557
1558 /* Remap all the operands in COPY. */
1559 memset (&wi, 0, sizeof (wi));
1560 wi.info = id;
1561 if (skip_first)
1562 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1563 else
1564 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1565
1566 /* Clear the copied virtual operands. We are not remapping them here
1567 but are going to recreate them from scratch. */
1568 if (gimple_has_mem_ops (copy))
1569 {
1570 gimple_set_vdef (copy, NULL_TREE);
1571 gimple_set_vuse (copy, NULL_TREE);
1572 }
1573
1574 return copy;
1575 }
1576
1577
1578 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1579 later */
1580
1581 static basic_block
1582 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1583 gcov_type count_scale)
1584 {
1585 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1586 basic_block copy_basic_block;
1587 tree decl;
1588 gcov_type freq;
1589 basic_block prev;
1590
1591 /* Search for previous copied basic block. */
1592 prev = bb->prev_bb;
1593 while (!prev->aux)
1594 prev = prev->prev_bb;
1595
1596 /* create_basic_block() will append every new block to
1597 basic_block_info automatically. */
1598 copy_basic_block = create_basic_block (NULL, (void *) 0,
1599 (basic_block) prev->aux);
1600 copy_basic_block->count = apply_scale (bb->count, count_scale);
1601
1602 /* We are going to rebuild frequencies from scratch. These values
1603 have just small importance to drive canonicalize_loop_headers. */
1604 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1605
1606 /* We recompute frequencies after inlining, so this is quite safe. */
1607 if (freq > BB_FREQ_MAX)
1608 freq = BB_FREQ_MAX;
1609 copy_basic_block->frequency = freq;
1610
1611 copy_gsi = gsi_start_bb (copy_basic_block);
1612
1613 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1614 {
1615 gimple stmt = gsi_stmt (gsi);
1616 gimple orig_stmt = stmt;
1617
1618 id->regimplify = false;
1619 stmt = remap_gimple_stmt (stmt, id);
1620 if (gimple_nop_p (stmt))
1621 continue;
1622
1623 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1624 seq_gsi = copy_gsi;
1625
1626 /* With return slot optimization we can end up with
1627 non-gimple (foo *)&this->m, fix that here. */
1628 if (is_gimple_assign (stmt)
1629 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1630 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1631 {
1632 tree new_rhs;
1633 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1634 gimple_assign_rhs1 (stmt),
1635 true, NULL, false,
1636 GSI_CONTINUE_LINKING);
1637 gimple_assign_set_rhs1 (stmt, new_rhs);
1638 id->regimplify = false;
1639 }
1640
1641 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1642
1643 if (id->regimplify)
1644 gimple_regimplify_operands (stmt, &seq_gsi);
1645
1646 /* If copy_basic_block has been empty at the start of this iteration,
1647 call gsi_start_bb again to get at the newly added statements. */
1648 if (gsi_end_p (copy_gsi))
1649 copy_gsi = gsi_start_bb (copy_basic_block);
1650 else
1651 gsi_next (&copy_gsi);
1652
1653 /* Process the new statement. The call to gimple_regimplify_operands
1654 possibly turned the statement into multiple statements, we
1655 need to process all of them. */
1656 do
1657 {
1658 tree fn;
1659
1660 stmt = gsi_stmt (copy_gsi);
1661 if (is_gimple_call (stmt)
1662 && gimple_call_va_arg_pack_p (stmt)
1663 && id->gimple_call)
1664 {
1665 /* __builtin_va_arg_pack () should be replaced by
1666 all arguments corresponding to ... in the caller. */
1667 tree p;
1668 gimple new_call;
1669 vec<tree> argarray;
1670 size_t nargs = gimple_call_num_args (id->gimple_call);
1671 size_t n;
1672
1673 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1674 nargs--;
1675
1676 /* Create the new array of arguments. */
1677 n = nargs + gimple_call_num_args (stmt);
1678 argarray.create (n);
1679 argarray.safe_grow_cleared (n);
1680
1681 /* Copy all the arguments before '...' */
1682 memcpy (argarray.address (),
1683 gimple_call_arg_ptr (stmt, 0),
1684 gimple_call_num_args (stmt) * sizeof (tree));
1685
1686 /* Append the arguments passed in '...' */
1687 memcpy (argarray.address () + gimple_call_num_args (stmt),
1688 gimple_call_arg_ptr (id->gimple_call, 0)
1689 + (gimple_call_num_args (id->gimple_call) - nargs),
1690 nargs * sizeof (tree));
1691
1692 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1693 argarray);
1694
1695 argarray.release ();
1696
1697 /* Copy all GIMPLE_CALL flags, location and block, except
1698 GF_CALL_VA_ARG_PACK. */
1699 gimple_call_copy_flags (new_call, stmt);
1700 gimple_call_set_va_arg_pack (new_call, false);
1701 gimple_set_location (new_call, gimple_location (stmt));
1702 gimple_set_block (new_call, gimple_block (stmt));
1703 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1704
1705 gsi_replace (&copy_gsi, new_call, false);
1706 stmt = new_call;
1707 }
1708 else if (is_gimple_call (stmt)
1709 && id->gimple_call
1710 && (decl = gimple_call_fndecl (stmt))
1711 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1712 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1713 {
1714 /* __builtin_va_arg_pack_len () should be replaced by
1715 the number of anonymous arguments. */
1716 size_t nargs = gimple_call_num_args (id->gimple_call);
1717 tree count, p;
1718 gimple new_stmt;
1719
1720 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1721 nargs--;
1722
1723 count = build_int_cst (integer_type_node, nargs);
1724 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1725 gsi_replace (&copy_gsi, new_stmt, false);
1726 stmt = new_stmt;
1727 }
1728
1729 /* Statements produced by inlining can be unfolded, especially
1730 when we constant propagated some operands. We can't fold
1731 them right now for two reasons:
1732 1) folding require SSA_NAME_DEF_STMTs to be correct
1733 2) we can't change function calls to builtins.
1734 So we just mark statement for later folding. We mark
1735 all new statements, instead just statements that has changed
1736 by some nontrivial substitution so even statements made
1737 foldable indirectly are updated. If this turns out to be
1738 expensive, copy_body can be told to watch for nontrivial
1739 changes. */
1740 if (id->statements_to_fold)
1741 pointer_set_insert (id->statements_to_fold, stmt);
1742
1743 /* We're duplicating a CALL_EXPR. Find any corresponding
1744 callgraph edges and update or duplicate them. */
1745 if (is_gimple_call (stmt))
1746 {
1747 struct cgraph_edge *edge;
1748
1749 switch (id->transform_call_graph_edges)
1750 {
1751 case CB_CGE_DUPLICATE:
1752 edge = cgraph_edge (id->src_node, orig_stmt);
1753 if (edge)
1754 {
1755 int edge_freq = edge->frequency;
1756 int new_freq;
1757 struct cgraph_edge *old_edge = edge;
1758 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1759 gimple_uid (stmt),
1760 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1761 true);
1762 /* We could also just rescale the frequency, but
1763 doing so would introduce roundoff errors and make
1764 verifier unhappy. */
1765 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1766 copy_basic_block);
1767
1768 /* Speculative calls consist of two edges - direct and indirect.
1769 Duplicate the whole thing and distribute frequencies accordingly. */
1770 if (edge->speculative)
1771 {
1772 struct cgraph_edge *direct, *indirect;
1773 struct ipa_ref *ref;
1774
1775 gcc_assert (!edge->indirect_unknown_callee);
1776 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1777 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1778 gimple_uid (stmt),
1779 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1780 true);
1781 if (old_edge->frequency + indirect->frequency)
1782 {
1783 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1784 (old_edge->frequency + indirect->frequency)),
1785 CGRAPH_FREQ_MAX);
1786 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1787 (old_edge->frequency + indirect->frequency)),
1788 CGRAPH_FREQ_MAX);
1789 }
1790 ipa_clone_ref (ref, id->dst_node, stmt);
1791 }
1792 else
1793 {
1794 edge->frequency = new_freq;
1795 if (dump_file
1796 && profile_status_for_fn (cfun) != PROFILE_ABSENT
1797 && (edge_freq > edge->frequency + 10
1798 || edge_freq < edge->frequency - 10))
1799 {
1800 fprintf (dump_file, "Edge frequency estimated by "
1801 "cgraph %i diverge from inliner's estimate %i\n",
1802 edge_freq,
1803 edge->frequency);
1804 fprintf (dump_file,
1805 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1806 bb->index,
1807 bb->frequency,
1808 copy_basic_block->frequency);
1809 }
1810 }
1811 }
1812 break;
1813
1814 case CB_CGE_MOVE_CLONES:
1815 cgraph_set_call_stmt_including_clones (id->dst_node,
1816 orig_stmt, stmt);
1817 edge = cgraph_edge (id->dst_node, stmt);
1818 break;
1819
1820 case CB_CGE_MOVE:
1821 edge = cgraph_edge (id->dst_node, orig_stmt);
1822 if (edge)
1823 cgraph_set_call_stmt (edge, stmt);
1824 break;
1825
1826 default:
1827 gcc_unreachable ();
1828 }
1829
1830 /* Constant propagation on argument done during inlining
1831 may create new direct call. Produce an edge for it. */
1832 if ((!edge
1833 || (edge->indirect_inlining_edge
1834 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1835 && id->dst_node->definition
1836 && (fn = gimple_call_fndecl (stmt)) != NULL)
1837 {
1838 struct cgraph_node *dest = cgraph_get_node (fn);
1839
1840 /* We have missing edge in the callgraph. This can happen
1841 when previous inlining turned an indirect call into a
1842 direct call by constant propagating arguments or we are
1843 producing dead clone (for further cloning). In all
1844 other cases we hit a bug (incorrect node sharing is the
1845 most common reason for missing edges). */
1846 gcc_assert (!dest->definition
1847 || dest->address_taken
1848 || !id->src_node->definition
1849 || !id->dst_node->definition);
1850 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1851 cgraph_create_edge_including_clones
1852 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1853 compute_call_stmt_bb_frequency (id->dst_node->decl,
1854 copy_basic_block),
1855 CIF_ORIGINALLY_INDIRECT_CALL);
1856 else
1857 cgraph_create_edge (id->dst_node, dest, stmt,
1858 bb->count,
1859 compute_call_stmt_bb_frequency
1860 (id->dst_node->decl,
1861 copy_basic_block))->inline_failed
1862 = CIF_ORIGINALLY_INDIRECT_CALL;
1863 if (dump_file)
1864 {
1865 fprintf (dump_file, "Created new direct edge to %s\n",
1866 dest->name ());
1867 }
1868 }
1869
1870 notice_special_calls (stmt);
1871 }
1872
1873 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1874 id->eh_map, id->eh_lp_nr);
1875
1876 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1877 {
1878 ssa_op_iter i;
1879 tree def;
1880
1881 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1882 if (TREE_CODE (def) == SSA_NAME)
1883 SSA_NAME_DEF_STMT (def) = stmt;
1884 }
1885
1886 gsi_next (&copy_gsi);
1887 }
1888 while (!gsi_end_p (copy_gsi));
1889
1890 copy_gsi = gsi_last_bb (copy_basic_block);
1891 }
1892
1893 return copy_basic_block;
1894 }
1895
1896 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1897 form is quite easy, since dominator relationship for old basic blocks does
1898 not change.
1899
1900 There is however exception where inlining might change dominator relation
1901 across EH edges from basic block within inlined functions destinating
1902 to landing pads in function we inline into.
1903
1904 The function fills in PHI_RESULTs of such PHI nodes if they refer
1905 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1906 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1907 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1908 set, and this means that there will be no overlapping live ranges
1909 for the underlying symbol.
1910
1911 This might change in future if we allow redirecting of EH edges and
1912 we might want to change way build CFG pre-inlining to include
1913 all the possible edges then. */
1914 static void
1915 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1916 bool can_throw, bool nonlocal_goto)
1917 {
1918 edge e;
1919 edge_iterator ei;
1920
1921 FOR_EACH_EDGE (e, ei, bb->succs)
1922 if (!e->dest->aux
1923 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1924 {
1925 gimple phi;
1926 gimple_stmt_iterator si;
1927
1928 if (!nonlocal_goto)
1929 gcc_assert (e->flags & EDGE_EH);
1930
1931 if (!can_throw)
1932 gcc_assert (!(e->flags & EDGE_EH));
1933
1934 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1935 {
1936 edge re;
1937
1938 phi = gsi_stmt (si);
1939
1940 /* For abnormal goto/call edges the receiver can be the
1941 ENTRY_BLOCK. Do not assert this cannot happen. */
1942
1943 gcc_assert ((e->flags & EDGE_EH)
1944 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1945
1946 re = find_edge (ret_bb, e->dest);
1947 gcc_checking_assert (re);
1948 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1949 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1950
1951 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1952 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1953 }
1954 }
1955 }
1956
1957
1958 /* Copy edges from BB into its copy constructed earlier, scale profile
1959 accordingly. Edges will be taken care of later. Assume aux
1960 pointers to point to the copies of each BB. Return true if any
1961 debug stmts are left after a statement that must end the basic block. */
1962
1963 static bool
1964 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1965 basic_block abnormal_goto_dest)
1966 {
1967 basic_block new_bb = (basic_block) bb->aux;
1968 edge_iterator ei;
1969 edge old_edge;
1970 gimple_stmt_iterator si;
1971 int flags;
1972 bool need_debug_cleanup = false;
1973
1974 /* Use the indices from the original blocks to create edges for the
1975 new ones. */
1976 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1977 if (!(old_edge->flags & EDGE_EH))
1978 {
1979 edge new_edge;
1980
1981 flags = old_edge->flags;
1982
1983 /* Return edges do get a FALLTHRU flag when the get inlined. */
1984 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1985 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
1986 flags |= EDGE_FALLTHRU;
1987 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1988 new_edge->count = apply_scale (old_edge->count, count_scale);
1989 new_edge->probability = old_edge->probability;
1990 }
1991
1992 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1993 return false;
1994
1995 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1996 {
1997 gimple copy_stmt;
1998 bool can_throw, nonlocal_goto;
1999
2000 copy_stmt = gsi_stmt (si);
2001 if (!is_gimple_debug (copy_stmt))
2002 update_stmt (copy_stmt);
2003
2004 /* Do this before the possible split_block. */
2005 gsi_next (&si);
2006
2007 /* If this tree could throw an exception, there are two
2008 cases where we need to add abnormal edge(s): the
2009 tree wasn't in a region and there is a "current
2010 region" in the caller; or the original tree had
2011 EH edges. In both cases split the block after the tree,
2012 and add abnormal edge(s) as needed; we need both
2013 those from the callee and the caller.
2014 We check whether the copy can throw, because the const
2015 propagation can change an INDIRECT_REF which throws
2016 into a COMPONENT_REF which doesn't. If the copy
2017 can throw, the original could also throw. */
2018 can_throw = stmt_can_throw_internal (copy_stmt);
2019 nonlocal_goto
2020 = (stmt_can_make_abnormal_goto (copy_stmt)
2021 && !computed_goto_p (copy_stmt));
2022
2023 if (can_throw || nonlocal_goto)
2024 {
2025 if (!gsi_end_p (si))
2026 {
2027 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2028 gsi_next (&si);
2029 if (gsi_end_p (si))
2030 need_debug_cleanup = true;
2031 }
2032 if (!gsi_end_p (si))
2033 /* Note that bb's predecessor edges aren't necessarily
2034 right at this point; split_block doesn't care. */
2035 {
2036 edge e = split_block (new_bb, copy_stmt);
2037
2038 new_bb = e->dest;
2039 new_bb->aux = e->src->aux;
2040 si = gsi_start_bb (new_bb);
2041 }
2042 }
2043
2044 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2045 make_eh_dispatch_edges (copy_stmt);
2046 else if (can_throw)
2047 make_eh_edges (copy_stmt);
2048
2049 /* If the call we inline cannot make abnormal goto do not add
2050 additional abnormal edges but only retain those already present
2051 in the original function body. */
2052 if (abnormal_goto_dest == NULL)
2053 nonlocal_goto = false;
2054 if (nonlocal_goto)
2055 {
2056 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2057
2058 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2059 nonlocal_goto = false;
2060 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2061 in OpenMP regions which aren't allowed to be left abnormally.
2062 So, no need to add abnormal edge in that case. */
2063 else if (is_gimple_call (copy_stmt)
2064 && gimple_call_internal_p (copy_stmt)
2065 && (gimple_call_internal_fn (copy_stmt)
2066 == IFN_ABNORMAL_DISPATCHER)
2067 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2068 nonlocal_goto = false;
2069 else
2070 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2071 }
2072
2073 if ((can_throw || nonlocal_goto)
2074 && gimple_in_ssa_p (cfun))
2075 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2076 can_throw, nonlocal_goto);
2077 }
2078 return need_debug_cleanup;
2079 }
2080
2081 /* Copy the PHIs. All blocks and edges are copied, some blocks
2082 was possibly split and new outgoing EH edges inserted.
2083 BB points to the block of original function and AUX pointers links
2084 the original and newly copied blocks. */
2085
2086 static void
2087 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2088 {
2089 basic_block const new_bb = (basic_block) bb->aux;
2090 edge_iterator ei;
2091 gimple phi;
2092 gimple_stmt_iterator si;
2093 edge new_edge;
2094 bool inserted = false;
2095
2096 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2097 {
2098 tree res, new_res;
2099 gimple new_phi;
2100
2101 phi = gsi_stmt (si);
2102 res = PHI_RESULT (phi);
2103 new_res = res;
2104 if (!virtual_operand_p (res))
2105 {
2106 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2107 new_phi = create_phi_node (new_res, new_bb);
2108 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2109 {
2110 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2111 tree arg;
2112 tree new_arg;
2113 edge_iterator ei2;
2114 location_t locus;
2115
2116 /* When doing partial cloning, we allow PHIs on the entry block
2117 as long as all the arguments are the same. Find any input
2118 edge to see argument to copy. */
2119 if (!old_edge)
2120 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2121 if (!old_edge->src->aux)
2122 break;
2123
2124 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2125 new_arg = arg;
2126 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2127 gcc_assert (new_arg);
2128 /* With return slot optimization we can end up with
2129 non-gimple (foo *)&this->m, fix that here. */
2130 if (TREE_CODE (new_arg) != SSA_NAME
2131 && TREE_CODE (new_arg) != FUNCTION_DECL
2132 && !is_gimple_val (new_arg))
2133 {
2134 gimple_seq stmts = NULL;
2135 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2136 gsi_insert_seq_on_edge (new_edge, stmts);
2137 inserted = true;
2138 }
2139 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2140 if (LOCATION_BLOCK (locus))
2141 {
2142 tree *n;
2143 n = (tree *) pointer_map_contains (id->decl_map,
2144 LOCATION_BLOCK (locus));
2145 gcc_assert (n);
2146 if (*n)
2147 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2148 else
2149 locus = LOCATION_LOCUS (locus);
2150 }
2151 else
2152 locus = LOCATION_LOCUS (locus);
2153
2154 add_phi_arg (new_phi, new_arg, new_edge, locus);
2155 }
2156 }
2157 }
2158
2159 /* Commit the delayed edge insertions. */
2160 if (inserted)
2161 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2162 gsi_commit_one_edge_insert (new_edge, NULL);
2163 }
2164
2165
2166 /* Wrapper for remap_decl so it can be used as a callback. */
2167
2168 static tree
2169 remap_decl_1 (tree decl, void *data)
2170 {
2171 return remap_decl (decl, (copy_body_data *) data);
2172 }
2173
2174 /* Build struct function and associated datastructures for the new clone
2175 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2176 the cfun to the function of new_fndecl (and current_function_decl too). */
2177
2178 static void
2179 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2180 {
2181 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2182 gcov_type count_scale;
2183
2184 if (!DECL_ARGUMENTS (new_fndecl))
2185 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2186 if (!DECL_RESULT (new_fndecl))
2187 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2188
2189 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2190 count_scale
2191 = GCOV_COMPUTE_SCALE (count,
2192 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2193 else
2194 count_scale = REG_BR_PROB_BASE;
2195
2196 /* Register specific tree functions. */
2197 gimple_register_cfg_hooks ();
2198
2199 /* Get clean struct function. */
2200 push_struct_function (new_fndecl);
2201
2202 /* We will rebuild these, so just sanity check that they are empty. */
2203 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2204 gcc_assert (cfun->local_decls == NULL);
2205 gcc_assert (cfun->cfg == NULL);
2206 gcc_assert (cfun->decl == new_fndecl);
2207
2208 /* Copy items we preserve during cloning. */
2209 cfun->static_chain_decl = src_cfun->static_chain_decl;
2210 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2211 cfun->function_end_locus = src_cfun->function_end_locus;
2212 cfun->curr_properties = src_cfun->curr_properties;
2213 cfun->last_verified = src_cfun->last_verified;
2214 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2215 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2216 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2217 cfun->stdarg = src_cfun->stdarg;
2218 cfun->after_inlining = src_cfun->after_inlining;
2219 cfun->can_throw_non_call_exceptions
2220 = src_cfun->can_throw_non_call_exceptions;
2221 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2222 cfun->returns_struct = src_cfun->returns_struct;
2223 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2224
2225 init_empty_tree_cfg ();
2226
2227 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2228 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2229 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2230 REG_BR_PROB_BASE);
2231 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2232 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2233 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2234 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2235 REG_BR_PROB_BASE);
2236 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2237 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2238 if (src_cfun->eh)
2239 init_eh_for_function ();
2240
2241 if (src_cfun->gimple_df)
2242 {
2243 init_tree_ssa (cfun);
2244 cfun->gimple_df->in_ssa_p = true;
2245 init_ssa_operands (cfun);
2246 }
2247 }
2248
2249 /* Helper function for copy_cfg_body. Move debug stmts from the end
2250 of NEW_BB to the beginning of successor basic blocks when needed. If the
2251 successor has multiple predecessors, reset them, otherwise keep
2252 their value. */
2253
2254 static void
2255 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2256 {
2257 edge e;
2258 edge_iterator ei;
2259 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2260
2261 if (gsi_end_p (si)
2262 || gsi_one_before_end_p (si)
2263 || !(stmt_can_throw_internal (gsi_stmt (si))
2264 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2265 return;
2266
2267 FOR_EACH_EDGE (e, ei, new_bb->succs)
2268 {
2269 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2270 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2271 while (is_gimple_debug (gsi_stmt (ssi)))
2272 {
2273 gimple stmt = gsi_stmt (ssi), new_stmt;
2274 tree var;
2275 tree value;
2276
2277 /* For the last edge move the debug stmts instead of copying
2278 them. */
2279 if (ei_one_before_end_p (ei))
2280 {
2281 si = ssi;
2282 gsi_prev (&ssi);
2283 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2284 gimple_debug_bind_reset_value (stmt);
2285 gsi_remove (&si, false);
2286 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2287 continue;
2288 }
2289
2290 if (gimple_debug_bind_p (stmt))
2291 {
2292 var = gimple_debug_bind_get_var (stmt);
2293 if (single_pred_p (e->dest))
2294 {
2295 value = gimple_debug_bind_get_value (stmt);
2296 value = unshare_expr (value);
2297 }
2298 else
2299 value = NULL_TREE;
2300 new_stmt = gimple_build_debug_bind (var, value, stmt);
2301 }
2302 else if (gimple_debug_source_bind_p (stmt))
2303 {
2304 var = gimple_debug_source_bind_get_var (stmt);
2305 value = gimple_debug_source_bind_get_value (stmt);
2306 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2307 }
2308 else
2309 gcc_unreachable ();
2310 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2311 id->debug_stmts.safe_push (new_stmt);
2312 gsi_prev (&ssi);
2313 }
2314 }
2315 }
2316
2317 /* Make a copy of the sub-loops of SRC_PARENT and place them
2318 as siblings of DEST_PARENT. */
2319
2320 static void
2321 copy_loops (copy_body_data *id,
2322 struct loop *dest_parent, struct loop *src_parent)
2323 {
2324 struct loop *src_loop = src_parent->inner;
2325 while (src_loop)
2326 {
2327 if (!id->blocks_to_copy
2328 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2329 {
2330 struct loop *dest_loop = alloc_loop ();
2331
2332 /* Assign the new loop its header and latch and associate
2333 those with the new loop. */
2334 if (src_loop->header != NULL)
2335 {
2336 dest_loop->header = (basic_block)src_loop->header->aux;
2337 dest_loop->header->loop_father = dest_loop;
2338 }
2339 if (src_loop->latch != NULL)
2340 {
2341 dest_loop->latch = (basic_block)src_loop->latch->aux;
2342 dest_loop->latch->loop_father = dest_loop;
2343 }
2344
2345 /* Copy loop meta-data. */
2346 copy_loop_info (src_loop, dest_loop);
2347
2348 /* Finally place it into the loop array and the loop tree. */
2349 place_new_loop (cfun, dest_loop);
2350 flow_loop_tree_node_add (dest_parent, dest_loop);
2351
2352 if (src_loop->simduid)
2353 {
2354 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2355 cfun->has_simduid_loops = true;
2356 }
2357 if (src_loop->force_vect)
2358 {
2359 dest_loop->force_vect = true;
2360 cfun->has_force_vect_loops = true;
2361 }
2362
2363 /* Recurse. */
2364 copy_loops (id, dest_loop, src_loop);
2365 }
2366 src_loop = src_loop->next;
2367 }
2368 }
2369
2370 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2371
2372 void
2373 redirect_all_calls (copy_body_data * id, basic_block bb)
2374 {
2375 gimple_stmt_iterator si;
2376 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2377 {
2378 if (is_gimple_call (gsi_stmt (si)))
2379 {
2380 struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
2381 if (edge)
2382 cgraph_redirect_edge_call_stmt_to_callee (edge);
2383 }
2384 }
2385 }
2386
2387 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2388 with each bb's frequency. Used when NODE has a 0-weight entry
2389 but we are about to inline it into a non-zero count call bb.
2390 See the comments for handle_missing_profiles() in predict.c for
2391 when this can happen for COMDATs. */
2392
2393 void
2394 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2395 {
2396 basic_block bb;
2397 edge_iterator ei;
2398 edge e;
2399 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2400
2401 FOR_ALL_BB_FN(bb, fn)
2402 {
2403 bb->count = apply_scale (count,
2404 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2405 FOR_EACH_EDGE (e, ei, bb->succs)
2406 e->count = apply_probability (e->src->count, e->probability);
2407 }
2408 }
2409
2410 /* Make a copy of the body of FN so that it can be inserted inline in
2411 another function. Walks FN via CFG, returns new fndecl. */
2412
2413 static tree
2414 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2415 basic_block entry_block_map, basic_block exit_block_map,
2416 basic_block new_entry)
2417 {
2418 tree callee_fndecl = id->src_fn;
2419 /* Original cfun for the callee, doesn't change. */
2420 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2421 struct function *cfun_to_copy;
2422 basic_block bb;
2423 tree new_fndecl = NULL;
2424 bool need_debug_cleanup = false;
2425 gcov_type count_scale;
2426 int last;
2427 int incoming_frequency = 0;
2428 gcov_type incoming_count = 0;
2429
2430 /* This can happen for COMDAT routines that end up with 0 counts
2431 despite being called (see the comments for handle_missing_profiles()
2432 in predict.c as to why). Apply counts to the blocks in the callee
2433 before inlining, using the guessed edge frequencies, so that we don't
2434 end up with a 0-count inline body which can confuse downstream
2435 optimizations such as function splitting. */
2436 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2437 {
2438 /* Apply the larger of the call bb count and the total incoming
2439 call edge count to the callee. */
2440 gcov_type in_count = 0;
2441 struct cgraph_edge *in_edge;
2442 for (in_edge = id->src_node->callers; in_edge;
2443 in_edge = in_edge->next_caller)
2444 in_count += in_edge->count;
2445 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2446 }
2447
2448 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2449 count_scale
2450 = GCOV_COMPUTE_SCALE (count,
2451 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2452 else
2453 count_scale = REG_BR_PROB_BASE;
2454
2455 /* Register specific tree functions. */
2456 gimple_register_cfg_hooks ();
2457
2458 /* If we are inlining just region of the function, make sure to connect
2459 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2460 part of loop, we must compute frequency and probability of
2461 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2462 probabilities of edges incoming from nonduplicated region. */
2463 if (new_entry)
2464 {
2465 edge e;
2466 edge_iterator ei;
2467
2468 FOR_EACH_EDGE (e, ei, new_entry->preds)
2469 if (!e->src->aux)
2470 {
2471 incoming_frequency += EDGE_FREQUENCY (e);
2472 incoming_count += e->count;
2473 }
2474 incoming_count = apply_scale (incoming_count, count_scale);
2475 incoming_frequency
2476 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2477 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2478 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2479 }
2480
2481 /* Must have a CFG here at this point. */
2482 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2483 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2484
2485 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2486
2487 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2488 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2489 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2490 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2491
2492 /* Duplicate any exception-handling regions. */
2493 if (cfun->eh)
2494 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2495 remap_decl_1, id);
2496
2497 /* Use aux pointers to map the original blocks to copy. */
2498 FOR_EACH_BB_FN (bb, cfun_to_copy)
2499 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2500 {
2501 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2502 bb->aux = new_bb;
2503 new_bb->aux = bb;
2504 new_bb->loop_father = entry_block_map->loop_father;
2505 }
2506
2507 last = last_basic_block_for_fn (cfun);
2508
2509 /* Now that we've duplicated the blocks, duplicate their edges. */
2510 basic_block abnormal_goto_dest = NULL;
2511 if (id->gimple_call
2512 && stmt_can_make_abnormal_goto (id->gimple_call))
2513 {
2514 gimple_stmt_iterator gsi = gsi_for_stmt (id->gimple_call);
2515
2516 bb = gimple_bb (id->gimple_call);
2517 gsi_next (&gsi);
2518 if (gsi_end_p (gsi))
2519 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2520 }
2521 FOR_ALL_BB_FN (bb, cfun_to_copy)
2522 if (!id->blocks_to_copy
2523 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2524 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2525 abnormal_goto_dest);
2526
2527 if (new_entry)
2528 {
2529 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2530 e->probability = REG_BR_PROB_BASE;
2531 e->count = incoming_count;
2532 }
2533
2534 /* Duplicate the loop tree, if available and wanted. */
2535 if (loops_for_fn (src_cfun) != NULL
2536 && current_loops != NULL)
2537 {
2538 copy_loops (id, entry_block_map->loop_father,
2539 get_loop (src_cfun, 0));
2540 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2541 loops_state_set (LOOPS_NEED_FIXUP);
2542 }
2543
2544 /* If the loop tree in the source function needed fixup, mark the
2545 destination loop tree for fixup, too. */
2546 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2547 loops_state_set (LOOPS_NEED_FIXUP);
2548
2549 if (gimple_in_ssa_p (cfun))
2550 FOR_ALL_BB_FN (bb, cfun_to_copy)
2551 if (!id->blocks_to_copy
2552 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2553 copy_phis_for_bb (bb, id);
2554
2555 FOR_ALL_BB_FN (bb, cfun_to_copy)
2556 if (bb->aux)
2557 {
2558 if (need_debug_cleanup
2559 && bb->index != ENTRY_BLOCK
2560 && bb->index != EXIT_BLOCK)
2561 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2562 /* Update call edge destinations. This can not be done before loop
2563 info is updated, because we may split basic blocks. */
2564 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2565 redirect_all_calls (id, (basic_block)bb->aux);
2566 ((basic_block)bb->aux)->aux = NULL;
2567 bb->aux = NULL;
2568 }
2569
2570 /* Zero out AUX fields of newly created block during EH edge
2571 insertion. */
2572 for (; last < last_basic_block_for_fn (cfun); last++)
2573 {
2574 if (need_debug_cleanup)
2575 maybe_move_debug_stmts_to_successors (id,
2576 BASIC_BLOCK_FOR_FN (cfun, last));
2577 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2578 /* Update call edge destinations. This can not be done before loop
2579 info is updated, because we may split basic blocks. */
2580 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2581 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2582 }
2583 entry_block_map->aux = NULL;
2584 exit_block_map->aux = NULL;
2585
2586 if (id->eh_map)
2587 {
2588 pointer_map_destroy (id->eh_map);
2589 id->eh_map = NULL;
2590 }
2591
2592 return new_fndecl;
2593 }
2594
2595 /* Copy the debug STMT using ID. We deal with these statements in a
2596 special way: if any variable in their VALUE expression wasn't
2597 remapped yet, we won't remap it, because that would get decl uids
2598 out of sync, causing codegen differences between -g and -g0. If
2599 this arises, we drop the VALUE expression altogether. */
2600
2601 static void
2602 copy_debug_stmt (gimple stmt, copy_body_data *id)
2603 {
2604 tree t, *n;
2605 struct walk_stmt_info wi;
2606
2607 if (gimple_block (stmt))
2608 {
2609 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2610 gimple_set_block (stmt, n ? *n : id->block);
2611 }
2612
2613 /* Remap all the operands in COPY. */
2614 memset (&wi, 0, sizeof (wi));
2615 wi.info = id;
2616
2617 processing_debug_stmt = 1;
2618
2619 if (gimple_debug_source_bind_p (stmt))
2620 t = gimple_debug_source_bind_get_var (stmt);
2621 else
2622 t = gimple_debug_bind_get_var (stmt);
2623
2624 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2625 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2626 {
2627 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2628 t = *n;
2629 }
2630 else if (TREE_CODE (t) == VAR_DECL
2631 && !is_global_var (t)
2632 && !pointer_map_contains (id->decl_map, t))
2633 /* T is a non-localized variable. */;
2634 else
2635 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2636
2637 if (gimple_debug_bind_p (stmt))
2638 {
2639 gimple_debug_bind_set_var (stmt, t);
2640
2641 if (gimple_debug_bind_has_value_p (stmt))
2642 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2643 remap_gimple_op_r, &wi, NULL);
2644
2645 /* Punt if any decl couldn't be remapped. */
2646 if (processing_debug_stmt < 0)
2647 gimple_debug_bind_reset_value (stmt);
2648 }
2649 else if (gimple_debug_source_bind_p (stmt))
2650 {
2651 gimple_debug_source_bind_set_var (stmt, t);
2652 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2653 remap_gimple_op_r, &wi, NULL);
2654 /* When inlining and source bind refers to one of the optimized
2655 away parameters, change the source bind into normal debug bind
2656 referring to the corresponding DEBUG_EXPR_DECL that should have
2657 been bound before the call stmt. */
2658 t = gimple_debug_source_bind_get_value (stmt);
2659 if (t != NULL_TREE
2660 && TREE_CODE (t) == PARM_DECL
2661 && id->gimple_call)
2662 {
2663 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2664 unsigned int i;
2665 if (debug_args != NULL)
2666 {
2667 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2668 if ((**debug_args)[i] == DECL_ORIGIN (t)
2669 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2670 {
2671 t = (**debug_args)[i + 1];
2672 stmt->subcode = GIMPLE_DEBUG_BIND;
2673 gimple_debug_bind_set_value (stmt, t);
2674 break;
2675 }
2676 }
2677 }
2678 }
2679
2680 processing_debug_stmt = 0;
2681
2682 update_stmt (stmt);
2683 }
2684
2685 /* Process deferred debug stmts. In order to give values better odds
2686 of being successfully remapped, we delay the processing of debug
2687 stmts until all other stmts that might require remapping are
2688 processed. */
2689
2690 static void
2691 copy_debug_stmts (copy_body_data *id)
2692 {
2693 size_t i;
2694 gimple stmt;
2695
2696 if (!id->debug_stmts.exists ())
2697 return;
2698
2699 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2700 copy_debug_stmt (stmt, id);
2701
2702 id->debug_stmts.release ();
2703 }
2704
2705 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2706 another function. */
2707
2708 static tree
2709 copy_tree_body (copy_body_data *id)
2710 {
2711 tree fndecl = id->src_fn;
2712 tree body = DECL_SAVED_TREE (fndecl);
2713
2714 walk_tree (&body, copy_tree_body_r, id, NULL);
2715
2716 return body;
2717 }
2718
2719 /* Make a copy of the body of FN so that it can be inserted inline in
2720 another function. */
2721
2722 static tree
2723 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2724 basic_block entry_block_map, basic_block exit_block_map,
2725 basic_block new_entry)
2726 {
2727 tree fndecl = id->src_fn;
2728 tree body;
2729
2730 /* If this body has a CFG, walk CFG and copy. */
2731 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2732 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2733 new_entry);
2734 copy_debug_stmts (id);
2735
2736 return body;
2737 }
2738
2739 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2740 defined in function FN, or of a data member thereof. */
2741
2742 static bool
2743 self_inlining_addr_expr (tree value, tree fn)
2744 {
2745 tree var;
2746
2747 if (TREE_CODE (value) != ADDR_EXPR)
2748 return false;
2749
2750 var = get_base_address (TREE_OPERAND (value, 0));
2751
2752 return var && auto_var_in_fn_p (var, fn);
2753 }
2754
2755 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2756 lexical block and line number information from base_stmt, if given,
2757 or from the last stmt of the block otherwise. */
2758
2759 static gimple
2760 insert_init_debug_bind (copy_body_data *id,
2761 basic_block bb, tree var, tree value,
2762 gimple base_stmt)
2763 {
2764 gimple note;
2765 gimple_stmt_iterator gsi;
2766 tree tracked_var;
2767
2768 if (!gimple_in_ssa_p (id->src_cfun))
2769 return NULL;
2770
2771 if (!MAY_HAVE_DEBUG_STMTS)
2772 return NULL;
2773
2774 tracked_var = target_for_debug_bind (var);
2775 if (!tracked_var)
2776 return NULL;
2777
2778 if (bb)
2779 {
2780 gsi = gsi_last_bb (bb);
2781 if (!base_stmt && !gsi_end_p (gsi))
2782 base_stmt = gsi_stmt (gsi);
2783 }
2784
2785 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2786
2787 if (bb)
2788 {
2789 if (!gsi_end_p (gsi))
2790 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2791 else
2792 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2793 }
2794
2795 return note;
2796 }
2797
2798 static void
2799 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2800 {
2801 /* If VAR represents a zero-sized variable, it's possible that the
2802 assignment statement may result in no gimple statements. */
2803 if (init_stmt)
2804 {
2805 gimple_stmt_iterator si = gsi_last_bb (bb);
2806
2807 /* We can end up with init statements that store to a non-register
2808 from a rhs with a conversion. Handle that here by forcing the
2809 rhs into a temporary. gimple_regimplify_operands is not
2810 prepared to do this for us. */
2811 if (!is_gimple_debug (init_stmt)
2812 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2813 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2814 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2815 {
2816 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2817 gimple_expr_type (init_stmt),
2818 gimple_assign_rhs1 (init_stmt));
2819 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2820 GSI_NEW_STMT);
2821 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2822 gimple_assign_set_rhs1 (init_stmt, rhs);
2823 }
2824 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2825 gimple_regimplify_operands (init_stmt, &si);
2826
2827 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2828 {
2829 tree def = gimple_assign_lhs (init_stmt);
2830 insert_init_debug_bind (id, bb, def, def, init_stmt);
2831 }
2832 }
2833 }
2834
2835 /* Initialize parameter P with VALUE. If needed, produce init statement
2836 at the end of BB. When BB is NULL, we return init statement to be
2837 output later. */
2838 static gimple
2839 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2840 basic_block bb, tree *vars)
2841 {
2842 gimple init_stmt = NULL;
2843 tree var;
2844 tree rhs = value;
2845 tree def = (gimple_in_ssa_p (cfun)
2846 ? ssa_default_def (id->src_cfun, p) : NULL);
2847
2848 if (value
2849 && value != error_mark_node
2850 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2851 {
2852 /* If we can match up types by promotion/demotion do so. */
2853 if (fold_convertible_p (TREE_TYPE (p), value))
2854 rhs = fold_convert (TREE_TYPE (p), value);
2855 else
2856 {
2857 /* ??? For valid programs we should not end up here.
2858 Still if we end up with truly mismatched types here, fall back
2859 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2860 GIMPLE to the following passes. */
2861 if (!is_gimple_reg_type (TREE_TYPE (value))
2862 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2863 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2864 else
2865 rhs = build_zero_cst (TREE_TYPE (p));
2866 }
2867 }
2868
2869 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2870 here since the type of this decl must be visible to the calling
2871 function. */
2872 var = copy_decl_to_var (p, id);
2873
2874 /* Declare this new variable. */
2875 DECL_CHAIN (var) = *vars;
2876 *vars = var;
2877
2878 /* Make gimplifier happy about this variable. */
2879 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2880
2881 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2882 we would not need to create a new variable here at all, if it
2883 weren't for debug info. Still, we can just use the argument
2884 value. */
2885 if (TREE_READONLY (p)
2886 && !TREE_ADDRESSABLE (p)
2887 && value && !TREE_SIDE_EFFECTS (value)
2888 && !def)
2889 {
2890 /* We may produce non-gimple trees by adding NOPs or introduce
2891 invalid sharing when operand is not really constant.
2892 It is not big deal to prohibit constant propagation here as
2893 we will constant propagate in DOM1 pass anyway. */
2894 if (is_gimple_min_invariant (value)
2895 && useless_type_conversion_p (TREE_TYPE (p),
2896 TREE_TYPE (value))
2897 /* We have to be very careful about ADDR_EXPR. Make sure
2898 the base variable isn't a local variable of the inlined
2899 function, e.g., when doing recursive inlining, direct or
2900 mutually-recursive or whatever, which is why we don't
2901 just test whether fn == current_function_decl. */
2902 && ! self_inlining_addr_expr (value, fn))
2903 {
2904 insert_decl_map (id, p, value);
2905 insert_debug_decl_map (id, p, var);
2906 return insert_init_debug_bind (id, bb, var, value, NULL);
2907 }
2908 }
2909
2910 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2911 that way, when the PARM_DECL is encountered, it will be
2912 automatically replaced by the VAR_DECL. */
2913 insert_decl_map (id, p, var);
2914
2915 /* Even if P was TREE_READONLY, the new VAR should not be.
2916 In the original code, we would have constructed a
2917 temporary, and then the function body would have never
2918 changed the value of P. However, now, we will be
2919 constructing VAR directly. The constructor body may
2920 change its value multiple times as it is being
2921 constructed. Therefore, it must not be TREE_READONLY;
2922 the back-end assumes that TREE_READONLY variable is
2923 assigned to only once. */
2924 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2925 TREE_READONLY (var) = 0;
2926
2927 /* If there is no setup required and we are in SSA, take the easy route
2928 replacing all SSA names representing the function parameter by the
2929 SSA name passed to function.
2930
2931 We need to construct map for the variable anyway as it might be used
2932 in different SSA names when parameter is set in function.
2933
2934 Do replacement at -O0 for const arguments replaced by constant.
2935 This is important for builtin_constant_p and other construct requiring
2936 constant argument to be visible in inlined function body. */
2937 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2938 && (optimize
2939 || (TREE_READONLY (p)
2940 && is_gimple_min_invariant (rhs)))
2941 && (TREE_CODE (rhs) == SSA_NAME
2942 || is_gimple_min_invariant (rhs))
2943 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2944 {
2945 insert_decl_map (id, def, rhs);
2946 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2947 }
2948
2949 /* If the value of argument is never used, don't care about initializing
2950 it. */
2951 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2952 {
2953 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2954 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2955 }
2956
2957 /* Initialize this VAR_DECL from the equivalent argument. Convert
2958 the argument to the proper type in case it was promoted. */
2959 if (value)
2960 {
2961 if (rhs == error_mark_node)
2962 {
2963 insert_decl_map (id, p, var);
2964 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2965 }
2966
2967 STRIP_USELESS_TYPE_CONVERSION (rhs);
2968
2969 /* If we are in SSA form properly remap the default definition
2970 or assign to a dummy SSA name if the parameter is unused and
2971 we are not optimizing. */
2972 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2973 {
2974 if (def)
2975 {
2976 def = remap_ssa_name (def, id);
2977 init_stmt = gimple_build_assign (def, rhs);
2978 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2979 set_ssa_default_def (cfun, var, NULL);
2980 }
2981 else if (!optimize)
2982 {
2983 def = make_ssa_name (var, NULL);
2984 init_stmt = gimple_build_assign (def, rhs);
2985 }
2986 }
2987 else
2988 init_stmt = gimple_build_assign (var, rhs);
2989
2990 if (bb && init_stmt)
2991 insert_init_stmt (id, bb, init_stmt);
2992 }
2993 return init_stmt;
2994 }
2995
2996 /* Generate code to initialize the parameters of the function at the
2997 top of the stack in ID from the GIMPLE_CALL STMT. */
2998
2999 static void
3000 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3001 tree fn, basic_block bb)
3002 {
3003 tree parms;
3004 size_t i;
3005 tree p;
3006 tree vars = NULL_TREE;
3007 tree static_chain = gimple_call_chain (stmt);
3008
3009 /* Figure out what the parameters are. */
3010 parms = DECL_ARGUMENTS (fn);
3011
3012 /* Loop through the parameter declarations, replacing each with an
3013 equivalent VAR_DECL, appropriately initialized. */
3014 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3015 {
3016 tree val;
3017 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3018 setup_one_parameter (id, p, val, fn, bb, &vars);
3019 }
3020 /* After remapping parameters remap their types. This has to be done
3021 in a second loop over all parameters to appropriately remap
3022 variable sized arrays when the size is specified in a
3023 parameter following the array. */
3024 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3025 {
3026 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
3027 if (varp
3028 && TREE_CODE (*varp) == VAR_DECL)
3029 {
3030 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3031 ? ssa_default_def (id->src_cfun, p) : NULL);
3032 tree var = *varp;
3033 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3034 /* Also remap the default definition if it was remapped
3035 to the default definition of the parameter replacement
3036 by the parameter setup. */
3037 if (def)
3038 {
3039 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
3040 if (defp
3041 && TREE_CODE (*defp) == SSA_NAME
3042 && SSA_NAME_VAR (*defp) == var)
3043 TREE_TYPE (*defp) = TREE_TYPE (var);
3044 }
3045 }
3046 }
3047
3048 /* Initialize the static chain. */
3049 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3050 gcc_assert (fn != current_function_decl);
3051 if (p)
3052 {
3053 /* No static chain? Seems like a bug in tree-nested.c. */
3054 gcc_assert (static_chain);
3055
3056 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3057 }
3058
3059 declare_inline_vars (id->block, vars);
3060 }
3061
3062
3063 /* Declare a return variable to replace the RESULT_DECL for the
3064 function we are calling. An appropriate DECL_STMT is returned.
3065 The USE_STMT is filled to contain a use of the declaration to
3066 indicate the return value of the function.
3067
3068 RETURN_SLOT, if non-null is place where to store the result. It
3069 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3070 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3071
3072 The return value is a (possibly null) value that holds the result
3073 as seen by the caller. */
3074
3075 static tree
3076 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3077 basic_block entry_bb)
3078 {
3079 tree callee = id->src_fn;
3080 tree result = DECL_RESULT (callee);
3081 tree callee_type = TREE_TYPE (result);
3082 tree caller_type;
3083 tree var, use;
3084
3085 /* Handle type-mismatches in the function declaration return type
3086 vs. the call expression. */
3087 if (modify_dest)
3088 caller_type = TREE_TYPE (modify_dest);
3089 else
3090 caller_type = TREE_TYPE (TREE_TYPE (callee));
3091
3092 /* We don't need to do anything for functions that don't return anything. */
3093 if (VOID_TYPE_P (callee_type))
3094 return NULL_TREE;
3095
3096 /* If there was a return slot, then the return value is the
3097 dereferenced address of that object. */
3098 if (return_slot)
3099 {
3100 /* The front end shouldn't have used both return_slot and
3101 a modify expression. */
3102 gcc_assert (!modify_dest);
3103 if (DECL_BY_REFERENCE (result))
3104 {
3105 tree return_slot_addr = build_fold_addr_expr (return_slot);
3106 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3107
3108 /* We are going to construct *&return_slot and we can't do that
3109 for variables believed to be not addressable.
3110
3111 FIXME: This check possibly can match, because values returned
3112 via return slot optimization are not believed to have address
3113 taken by alias analysis. */
3114 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3115 var = return_slot_addr;
3116 }
3117 else
3118 {
3119 var = return_slot;
3120 gcc_assert (TREE_CODE (var) != SSA_NAME);
3121 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
3122 }
3123 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3124 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3125 && !DECL_GIMPLE_REG_P (result)
3126 && DECL_P (var))
3127 DECL_GIMPLE_REG_P (var) = 0;
3128 use = NULL;
3129 goto done;
3130 }
3131
3132 /* All types requiring non-trivial constructors should have been handled. */
3133 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3134
3135 /* Attempt to avoid creating a new temporary variable. */
3136 if (modify_dest
3137 && TREE_CODE (modify_dest) != SSA_NAME)
3138 {
3139 bool use_it = false;
3140
3141 /* We can't use MODIFY_DEST if there's type promotion involved. */
3142 if (!useless_type_conversion_p (callee_type, caller_type))
3143 use_it = false;
3144
3145 /* ??? If we're assigning to a variable sized type, then we must
3146 reuse the destination variable, because we've no good way to
3147 create variable sized temporaries at this point. */
3148 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3149 use_it = true;
3150
3151 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3152 reuse it as the result of the call directly. Don't do this if
3153 it would promote MODIFY_DEST to addressable. */
3154 else if (TREE_ADDRESSABLE (result))
3155 use_it = false;
3156 else
3157 {
3158 tree base_m = get_base_address (modify_dest);
3159
3160 /* If the base isn't a decl, then it's a pointer, and we don't
3161 know where that's going to go. */
3162 if (!DECL_P (base_m))
3163 use_it = false;
3164 else if (is_global_var (base_m))
3165 use_it = false;
3166 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3167 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3168 && !DECL_GIMPLE_REG_P (result)
3169 && DECL_GIMPLE_REG_P (base_m))
3170 use_it = false;
3171 else if (!TREE_ADDRESSABLE (base_m))
3172 use_it = true;
3173 }
3174
3175 if (use_it)
3176 {
3177 var = modify_dest;
3178 use = NULL;
3179 goto done;
3180 }
3181 }
3182
3183 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3184
3185 var = copy_result_decl_to_var (result, id);
3186 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3187
3188 /* Do not have the rest of GCC warn about this variable as it should
3189 not be visible to the user. */
3190 TREE_NO_WARNING (var) = 1;
3191
3192 declare_inline_vars (id->block, var);
3193
3194 /* Build the use expr. If the return type of the function was
3195 promoted, convert it back to the expected type. */
3196 use = var;
3197 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3198 {
3199 /* If we can match up types by promotion/demotion do so. */
3200 if (fold_convertible_p (caller_type, var))
3201 use = fold_convert (caller_type, var);
3202 else
3203 {
3204 /* ??? For valid programs we should not end up here.
3205 Still if we end up with truly mismatched types here, fall back
3206 to using a MEM_REF to not leak invalid GIMPLE to the following
3207 passes. */
3208 /* Prevent var from being written into SSA form. */
3209 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3210 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3211 DECL_GIMPLE_REG_P (var) = false;
3212 else if (is_gimple_reg_type (TREE_TYPE (var)))
3213 TREE_ADDRESSABLE (var) = true;
3214 use = fold_build2 (MEM_REF, caller_type,
3215 build_fold_addr_expr (var),
3216 build_int_cst (ptr_type_node, 0));
3217 }
3218 }
3219
3220 STRIP_USELESS_TYPE_CONVERSION (use);
3221
3222 if (DECL_BY_REFERENCE (result))
3223 {
3224 TREE_ADDRESSABLE (var) = 1;
3225 var = build_fold_addr_expr (var);
3226 }
3227
3228 done:
3229 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3230 way, when the RESULT_DECL is encountered, it will be
3231 automatically replaced by the VAR_DECL.
3232
3233 When returning by reference, ensure that RESULT_DECL remaps to
3234 gimple_val. */
3235 if (DECL_BY_REFERENCE (result)
3236 && !is_gimple_val (var))
3237 {
3238 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3239 insert_decl_map (id, result, temp);
3240 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3241 it's default_def SSA_NAME. */
3242 if (gimple_in_ssa_p (id->src_cfun)
3243 && is_gimple_reg (result))
3244 {
3245 temp = make_ssa_name (temp, NULL);
3246 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3247 }
3248 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3249 }
3250 else
3251 insert_decl_map (id, result, var);
3252
3253 /* Remember this so we can ignore it in remap_decls. */
3254 id->retvar = var;
3255
3256 return use;
3257 }
3258
3259 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3260 to a local label. */
3261
3262 static tree
3263 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3264 {
3265 tree node = *nodep;
3266 tree fn = (tree) fnp;
3267
3268 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3269 return node;
3270
3271 if (TYPE_P (node))
3272 *walk_subtrees = 0;
3273
3274 return NULL_TREE;
3275 }
3276
3277 /* Determine if the function can be copied. If so return NULL. If
3278 not return a string describng the reason for failure. */
3279
3280 static const char *
3281 copy_forbidden (struct function *fun, tree fndecl)
3282 {
3283 const char *reason = fun->cannot_be_copied_reason;
3284 tree decl;
3285 unsigned ix;
3286
3287 /* Only examine the function once. */
3288 if (fun->cannot_be_copied_set)
3289 return reason;
3290
3291 /* We cannot copy a function that receives a non-local goto
3292 because we cannot remap the destination label used in the
3293 function that is performing the non-local goto. */
3294 /* ??? Actually, this should be possible, if we work at it.
3295 No doubt there's just a handful of places that simply
3296 assume it doesn't happen and don't substitute properly. */
3297 if (fun->has_nonlocal_label)
3298 {
3299 reason = G_("function %q+F can never be copied "
3300 "because it receives a non-local goto");
3301 goto fail;
3302 }
3303
3304 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3305 if (TREE_CODE (decl) == VAR_DECL
3306 && TREE_STATIC (decl)
3307 && !DECL_EXTERNAL (decl)
3308 && DECL_INITIAL (decl)
3309 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3310 has_label_address_in_static_1,
3311 fndecl))
3312 {
3313 reason = G_("function %q+F can never be copied because it saves "
3314 "address of local label in a static variable");
3315 goto fail;
3316 }
3317
3318 fail:
3319 fun->cannot_be_copied_reason = reason;
3320 fun->cannot_be_copied_set = true;
3321 return reason;
3322 }
3323
3324
3325 static const char *inline_forbidden_reason;
3326
3327 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3328 iff a function can not be inlined. Also sets the reason why. */
3329
3330 static tree
3331 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3332 struct walk_stmt_info *wip)
3333 {
3334 tree fn = (tree) wip->info;
3335 tree t;
3336 gimple stmt = gsi_stmt (*gsi);
3337
3338 switch (gimple_code (stmt))
3339 {
3340 case GIMPLE_CALL:
3341 /* Refuse to inline alloca call unless user explicitly forced so as
3342 this may change program's memory overhead drastically when the
3343 function using alloca is called in loop. In GCC present in
3344 SPEC2000 inlining into schedule_block cause it to require 2GB of
3345 RAM instead of 256MB. Don't do so for alloca calls emitted for
3346 VLA objects as those can't cause unbounded growth (they're always
3347 wrapped inside stack_save/stack_restore regions. */
3348 if (gimple_alloca_call_p (stmt)
3349 && !gimple_call_alloca_for_var_p (stmt)
3350 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3351 {
3352 inline_forbidden_reason
3353 = G_("function %q+F can never be inlined because it uses "
3354 "alloca (override using the always_inline attribute)");
3355 *handled_ops_p = true;
3356 return fn;
3357 }
3358
3359 t = gimple_call_fndecl (stmt);
3360 if (t == NULL_TREE)
3361 break;
3362
3363 /* We cannot inline functions that call setjmp. */
3364 if (setjmp_call_p (t))
3365 {
3366 inline_forbidden_reason
3367 = G_("function %q+F can never be inlined because it uses setjmp");
3368 *handled_ops_p = true;
3369 return t;
3370 }
3371
3372 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3373 switch (DECL_FUNCTION_CODE (t))
3374 {
3375 /* We cannot inline functions that take a variable number of
3376 arguments. */
3377 case BUILT_IN_VA_START:
3378 case BUILT_IN_NEXT_ARG:
3379 case BUILT_IN_VA_END:
3380 inline_forbidden_reason
3381 = G_("function %q+F can never be inlined because it "
3382 "uses variable argument lists");
3383 *handled_ops_p = true;
3384 return t;
3385
3386 case BUILT_IN_LONGJMP:
3387 /* We can't inline functions that call __builtin_longjmp at
3388 all. The non-local goto machinery really requires the
3389 destination be in a different function. If we allow the
3390 function calling __builtin_longjmp to be inlined into the
3391 function calling __builtin_setjmp, Things will Go Awry. */
3392 inline_forbidden_reason
3393 = G_("function %q+F can never be inlined because "
3394 "it uses setjmp-longjmp exception handling");
3395 *handled_ops_p = true;
3396 return t;
3397
3398 case BUILT_IN_NONLOCAL_GOTO:
3399 /* Similarly. */
3400 inline_forbidden_reason
3401 = G_("function %q+F can never be inlined because "
3402 "it uses non-local goto");
3403 *handled_ops_p = true;
3404 return t;
3405
3406 case BUILT_IN_RETURN:
3407 case BUILT_IN_APPLY_ARGS:
3408 /* If a __builtin_apply_args caller would be inlined,
3409 it would be saving arguments of the function it has
3410 been inlined into. Similarly __builtin_return would
3411 return from the function the inline has been inlined into. */
3412 inline_forbidden_reason
3413 = G_("function %q+F can never be inlined because "
3414 "it uses __builtin_return or __builtin_apply_args");
3415 *handled_ops_p = true;
3416 return t;
3417
3418 default:
3419 break;
3420 }
3421 break;
3422
3423 case GIMPLE_GOTO:
3424 t = gimple_goto_dest (stmt);
3425
3426 /* We will not inline a function which uses computed goto. The
3427 addresses of its local labels, which may be tucked into
3428 global storage, are of course not constant across
3429 instantiations, which causes unexpected behavior. */
3430 if (TREE_CODE (t) != LABEL_DECL)
3431 {
3432 inline_forbidden_reason
3433 = G_("function %q+F can never be inlined "
3434 "because it contains a computed goto");
3435 *handled_ops_p = true;
3436 return t;
3437 }
3438 break;
3439
3440 default:
3441 break;
3442 }
3443
3444 *handled_ops_p = false;
3445 return NULL_TREE;
3446 }
3447
3448 /* Return true if FNDECL is a function that cannot be inlined into
3449 another one. */
3450
3451 static bool
3452 inline_forbidden_p (tree fndecl)
3453 {
3454 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3455 struct walk_stmt_info wi;
3456 struct pointer_set_t *visited_nodes;
3457 basic_block bb;
3458 bool forbidden_p = false;
3459
3460 /* First check for shared reasons not to copy the code. */
3461 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3462 if (inline_forbidden_reason != NULL)
3463 return true;
3464
3465 /* Next, walk the statements of the function looking for
3466 constraucts we can't handle, or are non-optimal for inlining. */
3467 visited_nodes = pointer_set_create ();
3468 memset (&wi, 0, sizeof (wi));
3469 wi.info = (void *) fndecl;
3470 wi.pset = visited_nodes;
3471
3472 FOR_EACH_BB_FN (bb, fun)
3473 {
3474 gimple ret;
3475 gimple_seq seq = bb_seq (bb);
3476 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3477 forbidden_p = (ret != NULL);
3478 if (forbidden_p)
3479 break;
3480 }
3481
3482 pointer_set_destroy (visited_nodes);
3483 return forbidden_p;
3484 }
3485 \f
3486 /* Return false if the function FNDECL cannot be inlined on account of its
3487 attributes, true otherwise. */
3488 static bool
3489 function_attribute_inlinable_p (const_tree fndecl)
3490 {
3491 if (targetm.attribute_table)
3492 {
3493 const_tree a;
3494
3495 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3496 {
3497 const_tree name = TREE_PURPOSE (a);
3498 int i;
3499
3500 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3501 if (is_attribute_p (targetm.attribute_table[i].name, name))
3502 return targetm.function_attribute_inlinable_p (fndecl);
3503 }
3504 }
3505
3506 return true;
3507 }
3508
3509 /* Returns nonzero if FN is a function that does not have any
3510 fundamental inline blocking properties. */
3511
3512 bool
3513 tree_inlinable_function_p (tree fn)
3514 {
3515 bool inlinable = true;
3516 bool do_warning;
3517 tree always_inline;
3518
3519 /* If we've already decided this function shouldn't be inlined,
3520 there's no need to check again. */
3521 if (DECL_UNINLINABLE (fn))
3522 return false;
3523
3524 /* We only warn for functions declared `inline' by the user. */
3525 do_warning = (warn_inline
3526 && DECL_DECLARED_INLINE_P (fn)
3527 && !DECL_NO_INLINE_WARNING_P (fn)
3528 && !DECL_IN_SYSTEM_HEADER (fn));
3529
3530 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3531
3532 if (flag_no_inline
3533 && always_inline == NULL)
3534 {
3535 if (do_warning)
3536 warning (OPT_Winline, "function %q+F can never be inlined because it "
3537 "is suppressed using -fno-inline", fn);
3538 inlinable = false;
3539 }
3540
3541 else if (!function_attribute_inlinable_p (fn))
3542 {
3543 if (do_warning)
3544 warning (OPT_Winline, "function %q+F can never be inlined because it "
3545 "uses attributes conflicting with inlining", fn);
3546 inlinable = false;
3547 }
3548
3549 else if (inline_forbidden_p (fn))
3550 {
3551 /* See if we should warn about uninlinable functions. Previously,
3552 some of these warnings would be issued while trying to expand
3553 the function inline, but that would cause multiple warnings
3554 about functions that would for example call alloca. But since
3555 this a property of the function, just one warning is enough.
3556 As a bonus we can now give more details about the reason why a
3557 function is not inlinable. */
3558 if (always_inline)
3559 error (inline_forbidden_reason, fn);
3560 else if (do_warning)
3561 warning (OPT_Winline, inline_forbidden_reason, fn);
3562
3563 inlinable = false;
3564 }
3565
3566 /* Squirrel away the result so that we don't have to check again. */
3567 DECL_UNINLINABLE (fn) = !inlinable;
3568
3569 return inlinable;
3570 }
3571
3572 /* Estimate the cost of a memory move. Use machine dependent
3573 word size and take possible memcpy call into account. */
3574
3575 int
3576 estimate_move_cost (tree type)
3577 {
3578 HOST_WIDE_INT size;
3579
3580 gcc_assert (!VOID_TYPE_P (type));
3581
3582 if (TREE_CODE (type) == VECTOR_TYPE)
3583 {
3584 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3585 enum machine_mode simd
3586 = targetm.vectorize.preferred_simd_mode (inner);
3587 int simd_mode_size = GET_MODE_SIZE (simd);
3588 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3589 / simd_mode_size);
3590 }
3591
3592 size = int_size_in_bytes (type);
3593
3594 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3595 /* Cost of a memcpy call, 3 arguments and the call. */
3596 return 4;
3597 else
3598 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3599 }
3600
3601 /* Returns cost of operation CODE, according to WEIGHTS */
3602
3603 static int
3604 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3605 tree op1 ATTRIBUTE_UNUSED, tree op2)
3606 {
3607 switch (code)
3608 {
3609 /* These are "free" conversions, or their presumed cost
3610 is folded into other operations. */
3611 case RANGE_EXPR:
3612 CASE_CONVERT:
3613 case COMPLEX_EXPR:
3614 case PAREN_EXPR:
3615 case VIEW_CONVERT_EXPR:
3616 return 0;
3617
3618 /* Assign cost of 1 to usual operations.
3619 ??? We may consider mapping RTL costs to this. */
3620 case COND_EXPR:
3621 case VEC_COND_EXPR:
3622 case VEC_PERM_EXPR:
3623
3624 case PLUS_EXPR:
3625 case POINTER_PLUS_EXPR:
3626 case MINUS_EXPR:
3627 case MULT_EXPR:
3628 case MULT_HIGHPART_EXPR:
3629 case FMA_EXPR:
3630
3631 case ADDR_SPACE_CONVERT_EXPR:
3632 case FIXED_CONVERT_EXPR:
3633 case FIX_TRUNC_EXPR:
3634
3635 case NEGATE_EXPR:
3636 case FLOAT_EXPR:
3637 case MIN_EXPR:
3638 case MAX_EXPR:
3639 case ABS_EXPR:
3640
3641 case LSHIFT_EXPR:
3642 case RSHIFT_EXPR:
3643 case LROTATE_EXPR:
3644 case RROTATE_EXPR:
3645 case VEC_LSHIFT_EXPR:
3646 case VEC_RSHIFT_EXPR:
3647
3648 case BIT_IOR_EXPR:
3649 case BIT_XOR_EXPR:
3650 case BIT_AND_EXPR:
3651 case BIT_NOT_EXPR:
3652
3653 case TRUTH_ANDIF_EXPR:
3654 case TRUTH_ORIF_EXPR:
3655 case TRUTH_AND_EXPR:
3656 case TRUTH_OR_EXPR:
3657 case TRUTH_XOR_EXPR:
3658 case TRUTH_NOT_EXPR:
3659
3660 case LT_EXPR:
3661 case LE_EXPR:
3662 case GT_EXPR:
3663 case GE_EXPR:
3664 case EQ_EXPR:
3665 case NE_EXPR:
3666 case ORDERED_EXPR:
3667 case UNORDERED_EXPR:
3668
3669 case UNLT_EXPR:
3670 case UNLE_EXPR:
3671 case UNGT_EXPR:
3672 case UNGE_EXPR:
3673 case UNEQ_EXPR:
3674 case LTGT_EXPR:
3675
3676 case CONJ_EXPR:
3677
3678 case PREDECREMENT_EXPR:
3679 case PREINCREMENT_EXPR:
3680 case POSTDECREMENT_EXPR:
3681 case POSTINCREMENT_EXPR:
3682
3683 case REALIGN_LOAD_EXPR:
3684
3685 case REDUC_MAX_EXPR:
3686 case REDUC_MIN_EXPR:
3687 case REDUC_PLUS_EXPR:
3688 case WIDEN_SUM_EXPR:
3689 case WIDEN_MULT_EXPR:
3690 case DOT_PROD_EXPR:
3691 case WIDEN_MULT_PLUS_EXPR:
3692 case WIDEN_MULT_MINUS_EXPR:
3693 case WIDEN_LSHIFT_EXPR:
3694
3695 case VEC_WIDEN_MULT_HI_EXPR:
3696 case VEC_WIDEN_MULT_LO_EXPR:
3697 case VEC_WIDEN_MULT_EVEN_EXPR:
3698 case VEC_WIDEN_MULT_ODD_EXPR:
3699 case VEC_UNPACK_HI_EXPR:
3700 case VEC_UNPACK_LO_EXPR:
3701 case VEC_UNPACK_FLOAT_HI_EXPR:
3702 case VEC_UNPACK_FLOAT_LO_EXPR:
3703 case VEC_PACK_TRUNC_EXPR:
3704 case VEC_PACK_SAT_EXPR:
3705 case VEC_PACK_FIX_TRUNC_EXPR:
3706 case VEC_WIDEN_LSHIFT_HI_EXPR:
3707 case VEC_WIDEN_LSHIFT_LO_EXPR:
3708
3709 return 1;
3710
3711 /* Few special cases of expensive operations. This is useful
3712 to avoid inlining on functions having too many of these. */
3713 case TRUNC_DIV_EXPR:
3714 case CEIL_DIV_EXPR:
3715 case FLOOR_DIV_EXPR:
3716 case ROUND_DIV_EXPR:
3717 case EXACT_DIV_EXPR:
3718 case TRUNC_MOD_EXPR:
3719 case CEIL_MOD_EXPR:
3720 case FLOOR_MOD_EXPR:
3721 case ROUND_MOD_EXPR:
3722 case RDIV_EXPR:
3723 if (TREE_CODE (op2) != INTEGER_CST)
3724 return weights->div_mod_cost;
3725 return 1;
3726
3727 default:
3728 /* We expect a copy assignment with no operator. */
3729 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3730 return 0;
3731 }
3732 }
3733
3734
3735 /* Estimate number of instructions that will be created by expanding
3736 the statements in the statement sequence STMTS.
3737 WEIGHTS contains weights attributed to various constructs. */
3738
3739 static
3740 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3741 {
3742 int cost;
3743 gimple_stmt_iterator gsi;
3744
3745 cost = 0;
3746 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3747 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3748
3749 return cost;
3750 }
3751
3752
3753 /* Estimate number of instructions that will be created by expanding STMT.
3754 WEIGHTS contains weights attributed to various constructs. */
3755
3756 int
3757 estimate_num_insns (gimple stmt, eni_weights *weights)
3758 {
3759 unsigned cost, i;
3760 enum gimple_code code = gimple_code (stmt);
3761 tree lhs;
3762 tree rhs;
3763
3764 switch (code)
3765 {
3766 case GIMPLE_ASSIGN:
3767 /* Try to estimate the cost of assignments. We have three cases to
3768 deal with:
3769 1) Simple assignments to registers;
3770 2) Stores to things that must live in memory. This includes
3771 "normal" stores to scalars, but also assignments of large
3772 structures, or constructors of big arrays;
3773
3774 Let us look at the first two cases, assuming we have "a = b + C":
3775 <GIMPLE_ASSIGN <var_decl "a">
3776 <plus_expr <var_decl "b"> <constant C>>
3777 If "a" is a GIMPLE register, the assignment to it is free on almost
3778 any target, because "a" usually ends up in a real register. Hence
3779 the only cost of this expression comes from the PLUS_EXPR, and we
3780 can ignore the GIMPLE_ASSIGN.
3781 If "a" is not a GIMPLE register, the assignment to "a" will most
3782 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3783 of moving something into "a", which we compute using the function
3784 estimate_move_cost. */
3785 if (gimple_clobber_p (stmt))
3786 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3787
3788 lhs = gimple_assign_lhs (stmt);
3789 rhs = gimple_assign_rhs1 (stmt);
3790
3791 cost = 0;
3792
3793 /* Account for the cost of moving to / from memory. */
3794 if (gimple_store_p (stmt))
3795 cost += estimate_move_cost (TREE_TYPE (lhs));
3796 if (gimple_assign_load_p (stmt))
3797 cost += estimate_move_cost (TREE_TYPE (rhs));
3798
3799 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3800 gimple_assign_rhs1 (stmt),
3801 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3802 == GIMPLE_BINARY_RHS
3803 ? gimple_assign_rhs2 (stmt) : NULL);
3804 break;
3805
3806 case GIMPLE_COND:
3807 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3808 gimple_op (stmt, 0),
3809 gimple_op (stmt, 1));
3810 break;
3811
3812 case GIMPLE_SWITCH:
3813 /* Take into account cost of the switch + guess 2 conditional jumps for
3814 each case label.
3815
3816 TODO: once the switch expansion logic is sufficiently separated, we can
3817 do better job on estimating cost of the switch. */
3818 if (weights->time_based)
3819 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3820 else
3821 cost = gimple_switch_num_labels (stmt) * 2;
3822 break;
3823
3824 case GIMPLE_CALL:
3825 {
3826 tree decl;
3827
3828 if (gimple_call_internal_p (stmt))
3829 return 0;
3830 else if ((decl = gimple_call_fndecl (stmt))
3831 && DECL_BUILT_IN (decl))
3832 {
3833 /* Do not special case builtins where we see the body.
3834 This just confuse inliner. */
3835 struct cgraph_node *node;
3836 if (!(node = cgraph_get_node (decl))
3837 || node->definition)
3838 ;
3839 /* For buitins that are likely expanded to nothing or
3840 inlined do not account operand costs. */
3841 else if (is_simple_builtin (decl))
3842 return 0;
3843 else if (is_inexpensive_builtin (decl))
3844 return weights->target_builtin_call_cost;
3845 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3846 {
3847 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3848 specialize the cheap expansion we do here.
3849 ??? This asks for a more general solution. */
3850 switch (DECL_FUNCTION_CODE (decl))
3851 {
3852 case BUILT_IN_POW:
3853 case BUILT_IN_POWF:
3854 case BUILT_IN_POWL:
3855 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3856 && REAL_VALUES_EQUAL
3857 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3858 return estimate_operator_cost
3859 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
3860 gimple_call_arg (stmt, 0));
3861 break;
3862
3863 default:
3864 break;
3865 }
3866 }
3867 }
3868
3869 cost = decl ? weights->call_cost : weights->indirect_call_cost;
3870 if (gimple_call_lhs (stmt))
3871 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3872 for (i = 0; i < gimple_call_num_args (stmt); i++)
3873 {
3874 tree arg = gimple_call_arg (stmt, i);
3875 cost += estimate_move_cost (TREE_TYPE (arg));
3876 }
3877 break;
3878 }
3879
3880 case GIMPLE_RETURN:
3881 return weights->return_cost;
3882
3883 case GIMPLE_GOTO:
3884 case GIMPLE_LABEL:
3885 case GIMPLE_NOP:
3886 case GIMPLE_PHI:
3887 case GIMPLE_PREDICT:
3888 case GIMPLE_DEBUG:
3889 return 0;
3890
3891 case GIMPLE_ASM:
3892 {
3893 int count = asm_str_count (gimple_asm_string (stmt));
3894 /* 1000 means infinity. This avoids overflows later
3895 with very long asm statements. */
3896 if (count > 1000)
3897 count = 1000;
3898 return count;
3899 }
3900
3901 case GIMPLE_RESX:
3902 /* This is either going to be an external function call with one
3903 argument, or two register copy statements plus a goto. */
3904 return 2;
3905
3906 case GIMPLE_EH_DISPATCH:
3907 /* ??? This is going to turn into a switch statement. Ideally
3908 we'd have a look at the eh region and estimate the number of
3909 edges involved. */
3910 return 10;
3911
3912 case GIMPLE_BIND:
3913 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3914
3915 case GIMPLE_EH_FILTER:
3916 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3917
3918 case GIMPLE_CATCH:
3919 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3920
3921 case GIMPLE_TRY:
3922 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3923 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3924
3925 /* OpenMP directives are generally very expensive. */
3926
3927 case GIMPLE_OMP_RETURN:
3928 case GIMPLE_OMP_SECTIONS_SWITCH:
3929 case GIMPLE_OMP_ATOMIC_STORE:
3930 case GIMPLE_OMP_CONTINUE:
3931 /* ...except these, which are cheap. */
3932 return 0;
3933
3934 case GIMPLE_OMP_ATOMIC_LOAD:
3935 return weights->omp_cost;
3936
3937 case GIMPLE_OMP_FOR:
3938 return (weights->omp_cost
3939 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3940 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3941
3942 case GIMPLE_OMP_PARALLEL:
3943 case GIMPLE_OMP_TASK:
3944 case GIMPLE_OMP_CRITICAL:
3945 case GIMPLE_OMP_MASTER:
3946 case GIMPLE_OMP_TASKGROUP:
3947 case GIMPLE_OMP_ORDERED:
3948 case GIMPLE_OMP_SECTION:
3949 case GIMPLE_OMP_SECTIONS:
3950 case GIMPLE_OMP_SINGLE:
3951 case GIMPLE_OMP_TARGET:
3952 case GIMPLE_OMP_TEAMS:
3953 return (weights->omp_cost
3954 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3955
3956 case GIMPLE_TRANSACTION:
3957 return (weights->tm_cost
3958 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3959 weights));
3960
3961 default:
3962 gcc_unreachable ();
3963 }
3964
3965 return cost;
3966 }
3967
3968 /* Estimate number of instructions that will be created by expanding
3969 function FNDECL. WEIGHTS contains weights attributed to various
3970 constructs. */
3971
3972 int
3973 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3974 {
3975 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3976 gimple_stmt_iterator bsi;
3977 basic_block bb;
3978 int n = 0;
3979
3980 gcc_assert (my_function && my_function->cfg);
3981 FOR_EACH_BB_FN (bb, my_function)
3982 {
3983 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3984 n += estimate_num_insns (gsi_stmt (bsi), weights);
3985 }
3986
3987 return n;
3988 }
3989
3990
3991 /* Initializes weights used by estimate_num_insns. */
3992
3993 void
3994 init_inline_once (void)
3995 {
3996 eni_size_weights.call_cost = 1;
3997 eni_size_weights.indirect_call_cost = 3;
3998 eni_size_weights.target_builtin_call_cost = 1;
3999 eni_size_weights.div_mod_cost = 1;
4000 eni_size_weights.omp_cost = 40;
4001 eni_size_weights.tm_cost = 10;
4002 eni_size_weights.time_based = false;
4003 eni_size_weights.return_cost = 1;
4004
4005 /* Estimating time for call is difficult, since we have no idea what the
4006 called function does. In the current uses of eni_time_weights,
4007 underestimating the cost does less harm than overestimating it, so
4008 we choose a rather small value here. */
4009 eni_time_weights.call_cost = 10;
4010 eni_time_weights.indirect_call_cost = 15;
4011 eni_time_weights.target_builtin_call_cost = 1;
4012 eni_time_weights.div_mod_cost = 10;
4013 eni_time_weights.omp_cost = 40;
4014 eni_time_weights.tm_cost = 40;
4015 eni_time_weights.time_based = true;
4016 eni_time_weights.return_cost = 2;
4017 }
4018
4019 /* Estimate the number of instructions in a gimple_seq. */
4020
4021 int
4022 count_insns_seq (gimple_seq seq, eni_weights *weights)
4023 {
4024 gimple_stmt_iterator gsi;
4025 int n = 0;
4026 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4027 n += estimate_num_insns (gsi_stmt (gsi), weights);
4028
4029 return n;
4030 }
4031
4032
4033 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4034
4035 static void
4036 prepend_lexical_block (tree current_block, tree new_block)
4037 {
4038 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4039 BLOCK_SUBBLOCKS (current_block) = new_block;
4040 BLOCK_SUPERCONTEXT (new_block) = current_block;
4041 }
4042
4043 /* Add local variables from CALLEE to CALLER. */
4044
4045 static inline void
4046 add_local_variables (struct function *callee, struct function *caller,
4047 copy_body_data *id)
4048 {
4049 tree var;
4050 unsigned ix;
4051
4052 FOR_EACH_LOCAL_DECL (callee, ix, var)
4053 if (!can_be_nonlocal (var, id))
4054 {
4055 tree new_var = remap_decl (var, id);
4056
4057 /* Remap debug-expressions. */
4058 if (TREE_CODE (new_var) == VAR_DECL
4059 && DECL_HAS_DEBUG_EXPR_P (var)
4060 && new_var != var)
4061 {
4062 tree tem = DECL_DEBUG_EXPR (var);
4063 bool old_regimplify = id->regimplify;
4064 id->remapping_type_depth++;
4065 walk_tree (&tem, copy_tree_body_r, id, NULL);
4066 id->remapping_type_depth--;
4067 id->regimplify = old_regimplify;
4068 SET_DECL_DEBUG_EXPR (new_var, tem);
4069 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4070 }
4071 add_local_decl (caller, new_var);
4072 }
4073 }
4074
4075 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4076
4077 static bool
4078 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4079 {
4080 tree use_retvar;
4081 tree fn;
4082 struct pointer_map_t *st, *dst;
4083 tree return_slot;
4084 tree modify_dest;
4085 location_t saved_location;
4086 struct cgraph_edge *cg_edge;
4087 cgraph_inline_failed_t reason;
4088 basic_block return_block;
4089 edge e;
4090 gimple_stmt_iterator gsi, stmt_gsi;
4091 bool successfully_inlined = FALSE;
4092 bool purge_dead_abnormal_edges;
4093
4094 /* Set input_location here so we get the right instantiation context
4095 if we call instantiate_decl from inlinable_function_p. */
4096 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4097 saved_location = input_location;
4098 input_location = gimple_location (stmt);
4099
4100 /* From here on, we're only interested in CALL_EXPRs. */
4101 if (gimple_code (stmt) != GIMPLE_CALL)
4102 goto egress;
4103
4104 cg_edge = cgraph_edge (id->dst_node, stmt);
4105 gcc_checking_assert (cg_edge);
4106 /* First, see if we can figure out what function is being called.
4107 If we cannot, then there is no hope of inlining the function. */
4108 if (cg_edge->indirect_unknown_callee)
4109 goto egress;
4110 fn = cg_edge->callee->decl;
4111 gcc_checking_assert (fn);
4112
4113 /* If FN is a declaration of a function in a nested scope that was
4114 globally declared inline, we don't set its DECL_INITIAL.
4115 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4116 C++ front-end uses it for cdtors to refer to their internal
4117 declarations, that are not real functions. Fortunately those
4118 don't have trees to be saved, so we can tell by checking their
4119 gimple_body. */
4120 if (!DECL_INITIAL (fn)
4121 && DECL_ABSTRACT_ORIGIN (fn)
4122 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4123 fn = DECL_ABSTRACT_ORIGIN (fn);
4124
4125 /* Don't try to inline functions that are not well-suited to inlining. */
4126 if (cg_edge->inline_failed)
4127 {
4128 reason = cg_edge->inline_failed;
4129 /* If this call was originally indirect, we do not want to emit any
4130 inlining related warnings or sorry messages because there are no
4131 guarantees regarding those. */
4132 if (cg_edge->indirect_inlining_edge)
4133 goto egress;
4134
4135 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4136 /* For extern inline functions that get redefined we always
4137 silently ignored always_inline flag. Better behaviour would
4138 be to be able to keep both bodies and use extern inline body
4139 for inlining, but we can't do that because frontends overwrite
4140 the body. */
4141 && !cg_edge->callee->local.redefined_extern_inline
4142 /* During early inline pass, report only when optimization is
4143 not turned on. */
4144 && (cgraph_global_info_ready
4145 || !optimize
4146 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4147 /* PR 20090218-1_0.c. Body can be provided by another module. */
4148 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4149 {
4150 error ("inlining failed in call to always_inline %q+F: %s", fn,
4151 cgraph_inline_failed_string (reason));
4152 error ("called from here");
4153 }
4154 else if (warn_inline
4155 && DECL_DECLARED_INLINE_P (fn)
4156 && !DECL_NO_INLINE_WARNING_P (fn)
4157 && !DECL_IN_SYSTEM_HEADER (fn)
4158 && reason != CIF_UNSPECIFIED
4159 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4160 /* Do not warn about not inlined recursive calls. */
4161 && !cgraph_edge_recursive_p (cg_edge)
4162 /* Avoid warnings during early inline pass. */
4163 && cgraph_global_info_ready)
4164 {
4165 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4166 fn, _(cgraph_inline_failed_string (reason)));
4167 warning (OPT_Winline, "called from here");
4168 }
4169 goto egress;
4170 }
4171 fn = cg_edge->callee->decl;
4172 cgraph_get_body (cg_edge->callee);
4173
4174 #ifdef ENABLE_CHECKING
4175 if (cg_edge->callee->decl != id->dst_node->decl)
4176 verify_cgraph_node (cg_edge->callee);
4177 #endif
4178
4179 /* We will be inlining this callee. */
4180 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4181
4182 /* Update the callers EH personality. */
4183 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4184 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4185 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4186
4187 /* Split the block holding the GIMPLE_CALL. */
4188 e = split_block (bb, stmt);
4189 bb = e->src;
4190 return_block = e->dest;
4191 remove_edge (e);
4192
4193 /* split_block splits after the statement; work around this by
4194 moving the call into the second block manually. Not pretty,
4195 but seems easier than doing the CFG manipulation by hand
4196 when the GIMPLE_CALL is in the last statement of BB. */
4197 stmt_gsi = gsi_last_bb (bb);
4198 gsi_remove (&stmt_gsi, false);
4199
4200 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4201 been the source of abnormal edges. In this case, schedule
4202 the removal of dead abnormal edges. */
4203 gsi = gsi_start_bb (return_block);
4204 if (gsi_end_p (gsi))
4205 {
4206 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4207 purge_dead_abnormal_edges = true;
4208 }
4209 else
4210 {
4211 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4212 purge_dead_abnormal_edges = false;
4213 }
4214
4215 stmt_gsi = gsi_start_bb (return_block);
4216
4217 /* Build a block containing code to initialize the arguments, the
4218 actual inline expansion of the body, and a label for the return
4219 statements within the function to jump to. The type of the
4220 statement expression is the return type of the function call.
4221 ??? If the call does not have an associated block then we will
4222 remap all callee blocks to NULL, effectively dropping most of
4223 its debug information. This should only happen for calls to
4224 artificial decls inserted by the compiler itself. We need to
4225 either link the inlined blocks into the caller block tree or
4226 not refer to them in any way to not break GC for locations. */
4227 if (gimple_block (stmt))
4228 {
4229 id->block = make_node (BLOCK);
4230 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4231 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4232 prepend_lexical_block (gimple_block (stmt), id->block);
4233 }
4234
4235 /* Local declarations will be replaced by their equivalents in this
4236 map. */
4237 st = id->decl_map;
4238 id->decl_map = pointer_map_create ();
4239 dst = id->debug_map;
4240 id->debug_map = NULL;
4241
4242 /* Record the function we are about to inline. */
4243 id->src_fn = fn;
4244 id->src_node = cg_edge->callee;
4245 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4246 id->gimple_call = stmt;
4247
4248 gcc_assert (!id->src_cfun->after_inlining);
4249
4250 id->entry_bb = bb;
4251 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4252 {
4253 gimple_stmt_iterator si = gsi_last_bb (bb);
4254 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4255 NOT_TAKEN),
4256 GSI_NEW_STMT);
4257 }
4258 initialize_inlined_parameters (id, stmt, fn, bb);
4259
4260 if (DECL_INITIAL (fn))
4261 {
4262 if (gimple_block (stmt))
4263 {
4264 tree *var;
4265
4266 prepend_lexical_block (id->block,
4267 remap_blocks (DECL_INITIAL (fn), id));
4268 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4269 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4270 == NULL_TREE));
4271 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4272 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4273 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4274 under it. The parameters can be then evaluated in the debugger,
4275 but don't show in backtraces. */
4276 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4277 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4278 {
4279 tree v = *var;
4280 *var = TREE_CHAIN (v);
4281 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4282 BLOCK_VARS (id->block) = v;
4283 }
4284 else
4285 var = &TREE_CHAIN (*var);
4286 }
4287 else
4288 remap_blocks_to_null (DECL_INITIAL (fn), id);
4289 }
4290
4291 /* Return statements in the function body will be replaced by jumps
4292 to the RET_LABEL. */
4293 gcc_assert (DECL_INITIAL (fn));
4294 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4295
4296 /* Find the LHS to which the result of this call is assigned. */
4297 return_slot = NULL;
4298 if (gimple_call_lhs (stmt))
4299 {
4300 modify_dest = gimple_call_lhs (stmt);
4301
4302 /* The function which we are inlining might not return a value,
4303 in which case we should issue a warning that the function
4304 does not return a value. In that case the optimizers will
4305 see that the variable to which the value is assigned was not
4306 initialized. We do not want to issue a warning about that
4307 uninitialized variable. */
4308 if (DECL_P (modify_dest))
4309 TREE_NO_WARNING (modify_dest) = 1;
4310
4311 if (gimple_call_return_slot_opt_p (stmt))
4312 {
4313 return_slot = modify_dest;
4314 modify_dest = NULL;
4315 }
4316 }
4317 else
4318 modify_dest = NULL;
4319
4320 /* If we are inlining a call to the C++ operator new, we don't want
4321 to use type based alias analysis on the return value. Otherwise
4322 we may get confused if the compiler sees that the inlined new
4323 function returns a pointer which was just deleted. See bug
4324 33407. */
4325 if (DECL_IS_OPERATOR_NEW (fn))
4326 {
4327 return_slot = NULL;
4328 modify_dest = NULL;
4329 }
4330
4331 /* Declare the return variable for the function. */
4332 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4333
4334 /* Add local vars in this inlined callee to caller. */
4335 add_local_variables (id->src_cfun, cfun, id);
4336
4337 if (dump_file && (dump_flags & TDF_DETAILS))
4338 {
4339 fprintf (dump_file, "Inlining ");
4340 print_generic_expr (dump_file, id->src_fn, 0);
4341 fprintf (dump_file, " to ");
4342 print_generic_expr (dump_file, id->dst_fn, 0);
4343 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4344 }
4345
4346 /* This is it. Duplicate the callee body. Assume callee is
4347 pre-gimplified. Note that we must not alter the caller
4348 function in any way before this point, as this CALL_EXPR may be
4349 a self-referential call; if we're calling ourselves, we need to
4350 duplicate our body before altering anything. */
4351 copy_body (id, bb->count,
4352 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4353 bb, return_block, NULL);
4354
4355 /* Reset the escaped solution. */
4356 if (cfun->gimple_df)
4357 pt_solution_reset (&cfun->gimple_df->escaped);
4358
4359 /* Clean up. */
4360 if (id->debug_map)
4361 {
4362 pointer_map_destroy (id->debug_map);
4363 id->debug_map = dst;
4364 }
4365 pointer_map_destroy (id->decl_map);
4366 id->decl_map = st;
4367
4368 /* Unlink the calls virtual operands before replacing it. */
4369 unlink_stmt_vdef (stmt);
4370 if (gimple_vdef (stmt)
4371 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4372 release_ssa_name (gimple_vdef (stmt));
4373
4374 /* If the inlined function returns a result that we care about,
4375 substitute the GIMPLE_CALL with an assignment of the return
4376 variable to the LHS of the call. That is, if STMT was
4377 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4378 if (use_retvar && gimple_call_lhs (stmt))
4379 {
4380 gimple old_stmt = stmt;
4381 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4382 gsi_replace (&stmt_gsi, stmt, false);
4383 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4384 }
4385 else
4386 {
4387 /* Handle the case of inlining a function with no return
4388 statement, which causes the return value to become undefined. */
4389 if (gimple_call_lhs (stmt)
4390 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4391 {
4392 tree name = gimple_call_lhs (stmt);
4393 tree var = SSA_NAME_VAR (name);
4394 tree def = ssa_default_def (cfun, var);
4395
4396 if (def)
4397 {
4398 /* If the variable is used undefined, make this name
4399 undefined via a move. */
4400 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4401 gsi_replace (&stmt_gsi, stmt, true);
4402 }
4403 else
4404 {
4405 /* Otherwise make this variable undefined. */
4406 gsi_remove (&stmt_gsi, true);
4407 set_ssa_default_def (cfun, var, name);
4408 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4409 }
4410 }
4411 else
4412 gsi_remove (&stmt_gsi, true);
4413 }
4414
4415 if (purge_dead_abnormal_edges)
4416 {
4417 gimple_purge_dead_eh_edges (return_block);
4418 gimple_purge_dead_abnormal_call_edges (return_block);
4419 }
4420
4421 /* If the value of the new expression is ignored, that's OK. We
4422 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4423 the equivalent inlined version either. */
4424 if (is_gimple_assign (stmt))
4425 {
4426 gcc_assert (gimple_assign_single_p (stmt)
4427 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4428 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4429 }
4430
4431 /* Output the inlining info for this abstract function, since it has been
4432 inlined. If we don't do this now, we can lose the information about the
4433 variables in the function when the blocks get blown away as soon as we
4434 remove the cgraph node. */
4435 if (gimple_block (stmt))
4436 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4437
4438 /* Update callgraph if needed. */
4439 cgraph_remove_node (cg_edge->callee);
4440
4441 id->block = NULL_TREE;
4442 successfully_inlined = TRUE;
4443
4444 egress:
4445 input_location = saved_location;
4446 return successfully_inlined;
4447 }
4448
4449 /* Expand call statements reachable from STMT_P.
4450 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4451 in a MODIFY_EXPR. */
4452
4453 static bool
4454 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4455 {
4456 gimple_stmt_iterator gsi;
4457
4458 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4459 {
4460 gimple stmt = gsi_stmt (gsi);
4461
4462 if (is_gimple_call (stmt)
4463 && !gimple_call_internal_p (stmt)
4464 && expand_call_inline (bb, stmt, id))
4465 return true;
4466 }
4467
4468 return false;
4469 }
4470
4471
4472 /* Walk all basic blocks created after FIRST and try to fold every statement
4473 in the STATEMENTS pointer set. */
4474
4475 static void
4476 fold_marked_statements (int first, struct pointer_set_t *statements)
4477 {
4478 for (; first < n_basic_blocks_for_fn (cfun); first++)
4479 if (BASIC_BLOCK_FOR_FN (cfun, first))
4480 {
4481 gimple_stmt_iterator gsi;
4482
4483 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4484 !gsi_end_p (gsi);
4485 gsi_next (&gsi))
4486 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4487 {
4488 gimple old_stmt = gsi_stmt (gsi);
4489 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4490
4491 if (old_decl && DECL_BUILT_IN (old_decl))
4492 {
4493 /* Folding builtins can create multiple instructions,
4494 we need to look at all of them. */
4495 gimple_stmt_iterator i2 = gsi;
4496 gsi_prev (&i2);
4497 if (fold_stmt (&gsi))
4498 {
4499 gimple new_stmt;
4500 /* If a builtin at the end of a bb folded into nothing,
4501 the following loop won't work. */
4502 if (gsi_end_p (gsi))
4503 {
4504 cgraph_update_edges_for_call_stmt (old_stmt,
4505 old_decl, NULL);
4506 break;
4507 }
4508 if (gsi_end_p (i2))
4509 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4510 else
4511 gsi_next (&i2);
4512 while (1)
4513 {
4514 new_stmt = gsi_stmt (i2);
4515 update_stmt (new_stmt);
4516 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4517 new_stmt);
4518
4519 if (new_stmt == gsi_stmt (gsi))
4520 {
4521 /* It is okay to check only for the very last
4522 of these statements. If it is a throwing
4523 statement nothing will change. If it isn't
4524 this can remove EH edges. If that weren't
4525 correct then because some intermediate stmts
4526 throw, but not the last one. That would mean
4527 we'd have to split the block, which we can't
4528 here and we'd loose anyway. And as builtins
4529 probably never throw, this all
4530 is mood anyway. */
4531 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4532 new_stmt))
4533 gimple_purge_dead_eh_edges (
4534 BASIC_BLOCK_FOR_FN (cfun, first));
4535 break;
4536 }
4537 gsi_next (&i2);
4538 }
4539 }
4540 }
4541 else if (fold_stmt (&gsi))
4542 {
4543 /* Re-read the statement from GSI as fold_stmt() may
4544 have changed it. */
4545 gimple new_stmt = gsi_stmt (gsi);
4546 update_stmt (new_stmt);
4547
4548 if (is_gimple_call (old_stmt)
4549 || is_gimple_call (new_stmt))
4550 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4551 new_stmt);
4552
4553 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4554 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4555 first));
4556 }
4557 }
4558 }
4559 }
4560
4561 /* Expand calls to inline functions in the body of FN. */
4562
4563 unsigned int
4564 optimize_inline_calls (tree fn)
4565 {
4566 copy_body_data id;
4567 basic_block bb;
4568 int last = n_basic_blocks_for_fn (cfun);
4569 bool inlined_p = false;
4570
4571 /* Clear out ID. */
4572 memset (&id, 0, sizeof (id));
4573
4574 id.src_node = id.dst_node = cgraph_get_node (fn);
4575 gcc_assert (id.dst_node->definition);
4576 id.dst_fn = fn;
4577 /* Or any functions that aren't finished yet. */
4578 if (current_function_decl)
4579 id.dst_fn = current_function_decl;
4580
4581 id.copy_decl = copy_decl_maybe_to_var;
4582 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4583 id.transform_new_cfg = false;
4584 id.transform_return_to_modify = true;
4585 id.transform_parameter = true;
4586 id.transform_lang_insert_block = NULL;
4587 id.statements_to_fold = pointer_set_create ();
4588
4589 push_gimplify_context ();
4590
4591 /* We make no attempts to keep dominance info up-to-date. */
4592 free_dominance_info (CDI_DOMINATORS);
4593 free_dominance_info (CDI_POST_DOMINATORS);
4594
4595 /* Register specific gimple functions. */
4596 gimple_register_cfg_hooks ();
4597
4598 /* Reach the trees by walking over the CFG, and note the
4599 enclosing basic-blocks in the call edges. */
4600 /* We walk the blocks going forward, because inlined function bodies
4601 will split id->current_basic_block, and the new blocks will
4602 follow it; we'll trudge through them, processing their CALL_EXPRs
4603 along the way. */
4604 FOR_EACH_BB_FN (bb, cfun)
4605 inlined_p |= gimple_expand_calls_inline (bb, &id);
4606
4607 pop_gimplify_context (NULL);
4608
4609 #ifdef ENABLE_CHECKING
4610 {
4611 struct cgraph_edge *e;
4612
4613 verify_cgraph_node (id.dst_node);
4614
4615 /* Double check that we inlined everything we are supposed to inline. */
4616 for (e = id.dst_node->callees; e; e = e->next_callee)
4617 gcc_assert (e->inline_failed);
4618 }
4619 #endif
4620
4621 /* Fold queued statements. */
4622 fold_marked_statements (last, id.statements_to_fold);
4623 pointer_set_destroy (id.statements_to_fold);
4624
4625 gcc_assert (!id.debug_stmts.exists ());
4626
4627 /* If we didn't inline into the function there is nothing to do. */
4628 if (!inlined_p)
4629 return 0;
4630
4631 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4632 number_blocks (fn);
4633
4634 delete_unreachable_blocks_update_callgraph (&id);
4635 #ifdef ENABLE_CHECKING
4636 verify_cgraph_node (id.dst_node);
4637 #endif
4638
4639 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4640 not possible yet - the IPA passes might make various functions to not
4641 throw and they don't care to proactively update local EH info. This is
4642 done later in fixup_cfg pass that also execute the verification. */
4643 return (TODO_update_ssa
4644 | TODO_cleanup_cfg
4645 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4646 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4647 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4648 ? TODO_rebuild_frequencies : 0));
4649 }
4650
4651 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4652
4653 tree
4654 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4655 {
4656 enum tree_code code = TREE_CODE (*tp);
4657 enum tree_code_class cl = TREE_CODE_CLASS (code);
4658
4659 /* We make copies of most nodes. */
4660 if (IS_EXPR_CODE_CLASS (cl)
4661 || code == TREE_LIST
4662 || code == TREE_VEC
4663 || code == TYPE_DECL
4664 || code == OMP_CLAUSE)
4665 {
4666 /* Because the chain gets clobbered when we make a copy, we save it
4667 here. */
4668 tree chain = NULL_TREE, new_tree;
4669
4670 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4671 chain = TREE_CHAIN (*tp);
4672
4673 /* Copy the node. */
4674 new_tree = copy_node (*tp);
4675
4676 *tp = new_tree;
4677
4678 /* Now, restore the chain, if appropriate. That will cause
4679 walk_tree to walk into the chain as well. */
4680 if (code == PARM_DECL
4681 || code == TREE_LIST
4682 || code == OMP_CLAUSE)
4683 TREE_CHAIN (*tp) = chain;
4684
4685 /* For now, we don't update BLOCKs when we make copies. So, we
4686 have to nullify all BIND_EXPRs. */
4687 if (TREE_CODE (*tp) == BIND_EXPR)
4688 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4689 }
4690 else if (code == CONSTRUCTOR)
4691 {
4692 /* CONSTRUCTOR nodes need special handling because
4693 we need to duplicate the vector of elements. */
4694 tree new_tree;
4695
4696 new_tree = copy_node (*tp);
4697 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4698 *tp = new_tree;
4699 }
4700 else if (code == STATEMENT_LIST)
4701 /* We used to just abort on STATEMENT_LIST, but we can run into them
4702 with statement-expressions (c++/40975). */
4703 copy_statement_list (tp);
4704 else if (TREE_CODE_CLASS (code) == tcc_type)
4705 *walk_subtrees = 0;
4706 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4707 *walk_subtrees = 0;
4708 else if (TREE_CODE_CLASS (code) == tcc_constant)
4709 *walk_subtrees = 0;
4710 return NULL_TREE;
4711 }
4712
4713 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4714 information indicating to what new SAVE_EXPR this one should be mapped,
4715 use that one. Otherwise, create a new node and enter it in ST. FN is
4716 the function into which the copy will be placed. */
4717
4718 static void
4719 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4720 {
4721 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4722 tree *n;
4723 tree t;
4724
4725 /* See if we already encountered this SAVE_EXPR. */
4726 n = (tree *) pointer_map_contains (st, *tp);
4727
4728 /* If we didn't already remap this SAVE_EXPR, do so now. */
4729 if (!n)
4730 {
4731 t = copy_node (*tp);
4732
4733 /* Remember this SAVE_EXPR. */
4734 *pointer_map_insert (st, *tp) = t;
4735 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4736 *pointer_map_insert (st, t) = t;
4737 }
4738 else
4739 {
4740 /* We've already walked into this SAVE_EXPR; don't do it again. */
4741 *walk_subtrees = 0;
4742 t = *n;
4743 }
4744
4745 /* Replace this SAVE_EXPR with the copy. */
4746 *tp = t;
4747 }
4748
4749 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4750 label, copies the declaration and enters it in the splay_tree in DATA (which
4751 is really a 'copy_body_data *'. */
4752
4753 static tree
4754 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4755 bool *handled_ops_p ATTRIBUTE_UNUSED,
4756 struct walk_stmt_info *wi)
4757 {
4758 copy_body_data *id = (copy_body_data *) wi->info;
4759 gimple stmt = gsi_stmt (*gsip);
4760
4761 if (gimple_code (stmt) == GIMPLE_LABEL)
4762 {
4763 tree decl = gimple_label_label (stmt);
4764
4765 /* Copy the decl and remember the copy. */
4766 insert_decl_map (id, decl, id->copy_decl (decl, id));
4767 }
4768
4769 return NULL_TREE;
4770 }
4771
4772
4773 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4774 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4775 remaps all local declarations to appropriate replacements in gimple
4776 operands. */
4777
4778 static tree
4779 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4780 {
4781 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4782 copy_body_data *id = (copy_body_data *) wi->info;
4783 struct pointer_map_t *st = id->decl_map;
4784 tree *n;
4785 tree expr = *tp;
4786
4787 /* Only a local declaration (variable or label). */
4788 if ((TREE_CODE (expr) == VAR_DECL
4789 && !TREE_STATIC (expr))
4790 || TREE_CODE (expr) == LABEL_DECL)
4791 {
4792 /* Lookup the declaration. */
4793 n = (tree *) pointer_map_contains (st, expr);
4794
4795 /* If it's there, remap it. */
4796 if (n)
4797 *tp = *n;
4798 *walk_subtrees = 0;
4799 }
4800 else if (TREE_CODE (expr) == STATEMENT_LIST
4801 || TREE_CODE (expr) == BIND_EXPR
4802 || TREE_CODE (expr) == SAVE_EXPR)
4803 gcc_unreachable ();
4804 else if (TREE_CODE (expr) == TARGET_EXPR)
4805 {
4806 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4807 It's OK for this to happen if it was part of a subtree that
4808 isn't immediately expanded, such as operand 2 of another
4809 TARGET_EXPR. */
4810 if (!TREE_OPERAND (expr, 1))
4811 {
4812 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4813 TREE_OPERAND (expr, 3) = NULL_TREE;
4814 }
4815 }
4816
4817 /* Keep iterating. */
4818 return NULL_TREE;
4819 }
4820
4821
4822 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4823 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4824 remaps all local declarations to appropriate replacements in gimple
4825 statements. */
4826
4827 static tree
4828 replace_locals_stmt (gimple_stmt_iterator *gsip,
4829 bool *handled_ops_p ATTRIBUTE_UNUSED,
4830 struct walk_stmt_info *wi)
4831 {
4832 copy_body_data *id = (copy_body_data *) wi->info;
4833 gimple stmt = gsi_stmt (*gsip);
4834
4835 if (gimple_code (stmt) == GIMPLE_BIND)
4836 {
4837 tree block = gimple_bind_block (stmt);
4838
4839 if (block)
4840 {
4841 remap_block (&block, id);
4842 gimple_bind_set_block (stmt, block);
4843 }
4844
4845 /* This will remap a lot of the same decls again, but this should be
4846 harmless. */
4847 if (gimple_bind_vars (stmt))
4848 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4849 NULL, id));
4850 }
4851
4852 /* Keep iterating. */
4853 return NULL_TREE;
4854 }
4855
4856
4857 /* Copies everything in SEQ and replaces variables and labels local to
4858 current_function_decl. */
4859
4860 gimple_seq
4861 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4862 {
4863 copy_body_data id;
4864 struct walk_stmt_info wi;
4865 struct pointer_set_t *visited;
4866 gimple_seq copy;
4867
4868 /* There's nothing to do for NULL_TREE. */
4869 if (seq == NULL)
4870 return seq;
4871
4872 /* Set up ID. */
4873 memset (&id, 0, sizeof (id));
4874 id.src_fn = current_function_decl;
4875 id.dst_fn = current_function_decl;
4876 id.decl_map = pointer_map_create ();
4877 id.debug_map = NULL;
4878
4879 id.copy_decl = copy_decl_no_change;
4880 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4881 id.transform_new_cfg = false;
4882 id.transform_return_to_modify = false;
4883 id.transform_parameter = false;
4884 id.transform_lang_insert_block = NULL;
4885
4886 /* Walk the tree once to find local labels. */
4887 memset (&wi, 0, sizeof (wi));
4888 visited = pointer_set_create ();
4889 wi.info = &id;
4890 wi.pset = visited;
4891 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4892 pointer_set_destroy (visited);
4893
4894 copy = gimple_seq_copy (seq);
4895
4896 /* Walk the copy, remapping decls. */
4897 memset (&wi, 0, sizeof (wi));
4898 wi.info = &id;
4899 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4900
4901 /* Clean up. */
4902 pointer_map_destroy (id.decl_map);
4903 if (id.debug_map)
4904 pointer_map_destroy (id.debug_map);
4905
4906 return copy;
4907 }
4908
4909
4910 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4911
4912 static tree
4913 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4914 {
4915 if (*tp == data)
4916 return (tree) data;
4917 else
4918 return NULL;
4919 }
4920
4921 DEBUG_FUNCTION bool
4922 debug_find_tree (tree top, tree search)
4923 {
4924 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4925 }
4926
4927
4928 /* Declare the variables created by the inliner. Add all the variables in
4929 VARS to BIND_EXPR. */
4930
4931 static void
4932 declare_inline_vars (tree block, tree vars)
4933 {
4934 tree t;
4935 for (t = vars; t; t = DECL_CHAIN (t))
4936 {
4937 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4938 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4939 add_local_decl (cfun, t);
4940 }
4941
4942 if (block)
4943 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4944 }
4945
4946 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4947 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4948 VAR_DECL translation. */
4949
4950 static tree
4951 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4952 {
4953 /* Don't generate debug information for the copy if we wouldn't have
4954 generated it for the copy either. */
4955 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4956 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4957
4958 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4959 declaration inspired this copy. */
4960 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4961
4962 /* The new variable/label has no RTL, yet. */
4963 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4964 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4965 SET_DECL_RTL (copy, 0);
4966
4967 /* These args would always appear unused, if not for this. */
4968 TREE_USED (copy) = 1;
4969
4970 /* Set the context for the new declaration. */
4971 if (!DECL_CONTEXT (decl))
4972 /* Globals stay global. */
4973 ;
4974 else if (DECL_CONTEXT (decl) != id->src_fn)
4975 /* Things that weren't in the scope of the function we're inlining
4976 from aren't in the scope we're inlining to, either. */
4977 ;
4978 else if (TREE_STATIC (decl))
4979 /* Function-scoped static variables should stay in the original
4980 function. */
4981 ;
4982 else
4983 /* Ordinary automatic local variables are now in the scope of the
4984 new function. */
4985 DECL_CONTEXT (copy) = id->dst_fn;
4986
4987 return copy;
4988 }
4989
4990 static tree
4991 copy_decl_to_var (tree decl, copy_body_data *id)
4992 {
4993 tree copy, type;
4994
4995 gcc_assert (TREE_CODE (decl) == PARM_DECL
4996 || TREE_CODE (decl) == RESULT_DECL);
4997
4998 type = TREE_TYPE (decl);
4999
5000 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5001 VAR_DECL, DECL_NAME (decl), type);
5002 if (DECL_PT_UID_SET_P (decl))
5003 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5004 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5005 TREE_READONLY (copy) = TREE_READONLY (decl);
5006 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5007 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5008
5009 return copy_decl_for_dup_finish (id, decl, copy);
5010 }
5011
5012 /* Like copy_decl_to_var, but create a return slot object instead of a
5013 pointer variable for return by invisible reference. */
5014
5015 static tree
5016 copy_result_decl_to_var (tree decl, copy_body_data *id)
5017 {
5018 tree copy, type;
5019
5020 gcc_assert (TREE_CODE (decl) == PARM_DECL
5021 || TREE_CODE (decl) == RESULT_DECL);
5022
5023 type = TREE_TYPE (decl);
5024 if (DECL_BY_REFERENCE (decl))
5025 type = TREE_TYPE (type);
5026
5027 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5028 VAR_DECL, DECL_NAME (decl), type);
5029 if (DECL_PT_UID_SET_P (decl))
5030 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5031 TREE_READONLY (copy) = TREE_READONLY (decl);
5032 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5033 if (!DECL_BY_REFERENCE (decl))
5034 {
5035 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5036 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5037 }
5038
5039 return copy_decl_for_dup_finish (id, decl, copy);
5040 }
5041
5042 tree
5043 copy_decl_no_change (tree decl, copy_body_data *id)
5044 {
5045 tree copy;
5046
5047 copy = copy_node (decl);
5048
5049 /* The COPY is not abstract; it will be generated in DST_FN. */
5050 DECL_ABSTRACT (copy) = 0;
5051 lang_hooks.dup_lang_specific_decl (copy);
5052
5053 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5054 been taken; it's for internal bookkeeping in expand_goto_internal. */
5055 if (TREE_CODE (copy) == LABEL_DECL)
5056 {
5057 TREE_ADDRESSABLE (copy) = 0;
5058 LABEL_DECL_UID (copy) = -1;
5059 }
5060
5061 return copy_decl_for_dup_finish (id, decl, copy);
5062 }
5063
5064 static tree
5065 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5066 {
5067 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5068 return copy_decl_to_var (decl, id);
5069 else
5070 return copy_decl_no_change (decl, id);
5071 }
5072
5073 /* Return a copy of the function's argument tree. */
5074 static tree
5075 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5076 bitmap args_to_skip, tree *vars)
5077 {
5078 tree arg, *parg;
5079 tree new_parm = NULL;
5080 int i = 0;
5081
5082 parg = &new_parm;
5083
5084 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5085 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5086 {
5087 tree new_tree = remap_decl (arg, id);
5088 if (TREE_CODE (new_tree) != PARM_DECL)
5089 new_tree = id->copy_decl (arg, id);
5090 lang_hooks.dup_lang_specific_decl (new_tree);
5091 *parg = new_tree;
5092 parg = &DECL_CHAIN (new_tree);
5093 }
5094 else if (!pointer_map_contains (id->decl_map, arg))
5095 {
5096 /* Make an equivalent VAR_DECL. If the argument was used
5097 as temporary variable later in function, the uses will be
5098 replaced by local variable. */
5099 tree var = copy_decl_to_var (arg, id);
5100 insert_decl_map (id, arg, var);
5101 /* Declare this new variable. */
5102 DECL_CHAIN (var) = *vars;
5103 *vars = var;
5104 }
5105 return new_parm;
5106 }
5107
5108 /* Return a copy of the function's static chain. */
5109 static tree
5110 copy_static_chain (tree static_chain, copy_body_data * id)
5111 {
5112 tree *chain_copy, *pvar;
5113
5114 chain_copy = &static_chain;
5115 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5116 {
5117 tree new_tree = remap_decl (*pvar, id);
5118 lang_hooks.dup_lang_specific_decl (new_tree);
5119 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5120 *pvar = new_tree;
5121 }
5122 return static_chain;
5123 }
5124
5125 /* Return true if the function is allowed to be versioned.
5126 This is a guard for the versioning functionality. */
5127
5128 bool
5129 tree_versionable_function_p (tree fndecl)
5130 {
5131 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5132 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5133 }
5134
5135 /* Delete all unreachable basic blocks and update callgraph.
5136 Doing so is somewhat nontrivial because we need to update all clones and
5137 remove inline function that become unreachable. */
5138
5139 static bool
5140 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5141 {
5142 bool changed = false;
5143 basic_block b, next_bb;
5144
5145 find_unreachable_blocks ();
5146
5147 /* Delete all unreachable basic blocks. */
5148
5149 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5150 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5151 {
5152 next_bb = b->next_bb;
5153
5154 if (!(b->flags & BB_REACHABLE))
5155 {
5156 gimple_stmt_iterator bsi;
5157
5158 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5159 {
5160 struct cgraph_edge *e;
5161 struct cgraph_node *node;
5162
5163 ipa_remove_stmt_references (id->dst_node, gsi_stmt (bsi));
5164
5165 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5166 &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5167 {
5168 if (!e->inline_failed)
5169 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5170 else
5171 cgraph_remove_edge (e);
5172 }
5173 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5174 && id->dst_node->clones)
5175 for (node = id->dst_node->clones; node != id->dst_node;)
5176 {
5177 ipa_remove_stmt_references (node, gsi_stmt (bsi));
5178 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5179 && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5180 {
5181 if (!e->inline_failed)
5182 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5183 else
5184 cgraph_remove_edge (e);
5185 }
5186
5187 if (node->clones)
5188 node = node->clones;
5189 else if (node->next_sibling_clone)
5190 node = node->next_sibling_clone;
5191 else
5192 {
5193 while (node != id->dst_node && !node->next_sibling_clone)
5194 node = node->clone_of;
5195 if (node != id->dst_node)
5196 node = node->next_sibling_clone;
5197 }
5198 }
5199 }
5200 delete_basic_block (b);
5201 changed = true;
5202 }
5203 }
5204
5205 return changed;
5206 }
5207
5208 /* Update clone info after duplication. */
5209
5210 static void
5211 update_clone_info (copy_body_data * id)
5212 {
5213 struct cgraph_node *node;
5214 if (!id->dst_node->clones)
5215 return;
5216 for (node = id->dst_node->clones; node != id->dst_node;)
5217 {
5218 /* First update replace maps to match the new body. */
5219 if (node->clone.tree_map)
5220 {
5221 unsigned int i;
5222 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5223 {
5224 struct ipa_replace_map *replace_info;
5225 replace_info = (*node->clone.tree_map)[i];
5226 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5227 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5228 }
5229 }
5230 if (node->clones)
5231 node = node->clones;
5232 else if (node->next_sibling_clone)
5233 node = node->next_sibling_clone;
5234 else
5235 {
5236 while (node != id->dst_node && !node->next_sibling_clone)
5237 node = node->clone_of;
5238 if (node != id->dst_node)
5239 node = node->next_sibling_clone;
5240 }
5241 }
5242 }
5243
5244 /* Create a copy of a function's tree.
5245 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5246 of the original function and the new copied function
5247 respectively. In case we want to replace a DECL
5248 tree with another tree while duplicating the function's
5249 body, TREE_MAP represents the mapping between these
5250 trees. If UPDATE_CLONES is set, the call_stmt fields
5251 of edges of clones of the function will be updated.
5252
5253 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5254 from new version.
5255 If SKIP_RETURN is true, the new version will return void.
5256 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5257 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5258 */
5259 void
5260 tree_function_versioning (tree old_decl, tree new_decl,
5261 vec<ipa_replace_map_p, va_gc> *tree_map,
5262 bool update_clones, bitmap args_to_skip,
5263 bool skip_return, bitmap blocks_to_copy,
5264 basic_block new_entry)
5265 {
5266 struct cgraph_node *old_version_node;
5267 struct cgraph_node *new_version_node;
5268 copy_body_data id;
5269 tree p;
5270 unsigned i;
5271 struct ipa_replace_map *replace_info;
5272 basic_block old_entry_block, bb;
5273 auto_vec<gimple, 10> init_stmts;
5274 tree vars = NULL_TREE;
5275
5276 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5277 && TREE_CODE (new_decl) == FUNCTION_DECL);
5278 DECL_POSSIBLY_INLINED (old_decl) = 1;
5279
5280 old_version_node = cgraph_get_node (old_decl);
5281 gcc_checking_assert (old_version_node);
5282 new_version_node = cgraph_get_node (new_decl);
5283 gcc_checking_assert (new_version_node);
5284
5285 /* Copy over debug args. */
5286 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5287 {
5288 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5289 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5290 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5291 old_debug_args = decl_debug_args_lookup (old_decl);
5292 if (old_debug_args)
5293 {
5294 new_debug_args = decl_debug_args_insert (new_decl);
5295 *new_debug_args = vec_safe_copy (*old_debug_args);
5296 }
5297 }
5298
5299 /* Output the inlining info for this abstract function, since it has been
5300 inlined. If we don't do this now, we can lose the information about the
5301 variables in the function when the blocks get blown away as soon as we
5302 remove the cgraph node. */
5303 (*debug_hooks->outlining_inline_function) (old_decl);
5304
5305 DECL_ARTIFICIAL (new_decl) = 1;
5306 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5307 if (DECL_ORIGIN (old_decl) == old_decl)
5308 old_version_node->used_as_abstract_origin = true;
5309 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5310
5311 /* Prepare the data structures for the tree copy. */
5312 memset (&id, 0, sizeof (id));
5313
5314 /* Generate a new name for the new version. */
5315 id.statements_to_fold = pointer_set_create ();
5316
5317 id.decl_map = pointer_map_create ();
5318 id.debug_map = NULL;
5319 id.src_fn = old_decl;
5320 id.dst_fn = new_decl;
5321 id.src_node = old_version_node;
5322 id.dst_node = new_version_node;
5323 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5324 id.blocks_to_copy = blocks_to_copy;
5325 if (id.src_node->ipa_transforms_to_apply.exists ())
5326 {
5327 vec<ipa_opt_pass> old_transforms_to_apply
5328 = id.dst_node->ipa_transforms_to_apply;
5329 unsigned int i;
5330
5331 id.dst_node->ipa_transforms_to_apply
5332 = id.src_node->ipa_transforms_to_apply.copy ();
5333 for (i = 0; i < old_transforms_to_apply.length (); i++)
5334 id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5335 old_transforms_to_apply.release ();
5336 }
5337
5338 id.copy_decl = copy_decl_no_change;
5339 id.transform_call_graph_edges
5340 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5341 id.transform_new_cfg = true;
5342 id.transform_return_to_modify = false;
5343 id.transform_parameter = false;
5344 id.transform_lang_insert_block = NULL;
5345
5346 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5347 (DECL_STRUCT_FUNCTION (old_decl));
5348 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5349 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5350 initialize_cfun (new_decl, old_decl,
5351 old_entry_block->count);
5352 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5353 = id.src_cfun->gimple_df->ipa_pta;
5354
5355 /* Copy the function's static chain. */
5356 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5357 if (p)
5358 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5359 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5360 &id);
5361
5362 /* If there's a tree_map, prepare for substitution. */
5363 if (tree_map)
5364 for (i = 0; i < tree_map->length (); i++)
5365 {
5366 gimple init;
5367 replace_info = (*tree_map)[i];
5368 if (replace_info->replace_p)
5369 {
5370 if (!replace_info->old_tree)
5371 {
5372 int i = replace_info->parm_num;
5373 tree parm;
5374 tree req_type;
5375
5376 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5377 i --;
5378 replace_info->old_tree = parm;
5379 req_type = TREE_TYPE (parm);
5380 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5381 {
5382 if (fold_convertible_p (req_type, replace_info->new_tree))
5383 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5384 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5385 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5386 else
5387 {
5388 if (dump_file)
5389 {
5390 fprintf (dump_file, " const ");
5391 print_generic_expr (dump_file, replace_info->new_tree, 0);
5392 fprintf (dump_file, " can't be converted to param ");
5393 print_generic_expr (dump_file, parm, 0);
5394 fprintf (dump_file, "\n");
5395 }
5396 replace_info->old_tree = NULL;
5397 }
5398 }
5399 }
5400 else
5401 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5402 if (replace_info->old_tree)
5403 {
5404 init = setup_one_parameter (&id, replace_info->old_tree,
5405 replace_info->new_tree, id.src_fn,
5406 NULL,
5407 &vars);
5408 if (init)
5409 init_stmts.safe_push (init);
5410 }
5411 }
5412 }
5413 /* Copy the function's arguments. */
5414 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5415 DECL_ARGUMENTS (new_decl) =
5416 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5417 args_to_skip, &vars);
5418
5419 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5420 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5421
5422 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5423
5424 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5425 /* Add local vars. */
5426 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5427
5428 if (DECL_RESULT (old_decl) == NULL_TREE)
5429 ;
5430 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5431 {
5432 DECL_RESULT (new_decl)
5433 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5434 RESULT_DECL, NULL_TREE, void_type_node);
5435 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5436 cfun->returns_struct = 0;
5437 cfun->returns_pcc_struct = 0;
5438 }
5439 else
5440 {
5441 tree old_name;
5442 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5443 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5444 if (gimple_in_ssa_p (id.src_cfun)
5445 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5446 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5447 {
5448 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5449 insert_decl_map (&id, old_name, new_name);
5450 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5451 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5452 }
5453 }
5454
5455 /* Set up the destination functions loop tree. */
5456 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5457 {
5458 cfun->curr_properties &= ~PROP_loops;
5459 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5460 cfun->curr_properties |= PROP_loops;
5461 }
5462
5463 /* Copy the Function's body. */
5464 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5465 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5466 new_entry);
5467
5468 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5469 number_blocks (new_decl);
5470
5471 /* We want to create the BB unconditionally, so that the addition of
5472 debug stmts doesn't affect BB count, which may in the end cause
5473 codegen differences. */
5474 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5475 while (init_stmts.length ())
5476 insert_init_stmt (&id, bb, init_stmts.pop ());
5477 update_clone_info (&id);
5478
5479 /* Remap the nonlocal_goto_save_area, if any. */
5480 if (cfun->nonlocal_goto_save_area)
5481 {
5482 struct walk_stmt_info wi;
5483
5484 memset (&wi, 0, sizeof (wi));
5485 wi.info = &id;
5486 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5487 }
5488
5489 /* Clean up. */
5490 pointer_map_destroy (id.decl_map);
5491 if (id.debug_map)
5492 pointer_map_destroy (id.debug_map);
5493 free_dominance_info (CDI_DOMINATORS);
5494 free_dominance_info (CDI_POST_DOMINATORS);
5495
5496 fold_marked_statements (0, id.statements_to_fold);
5497 pointer_set_destroy (id.statements_to_fold);
5498 fold_cond_expr_cond ();
5499 delete_unreachable_blocks_update_callgraph (&id);
5500 if (id.dst_node->definition)
5501 cgraph_rebuild_references ();
5502 update_ssa (TODO_update_ssa);
5503
5504 /* After partial cloning we need to rescale frequencies, so they are
5505 within proper range in the cloned function. */
5506 if (new_entry)
5507 {
5508 struct cgraph_edge *e;
5509 rebuild_frequencies ();
5510
5511 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5512 for (e = new_version_node->callees; e; e = e->next_callee)
5513 {
5514 basic_block bb = gimple_bb (e->call_stmt);
5515 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5516 bb);
5517 e->count = bb->count;
5518 }
5519 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5520 {
5521 basic_block bb = gimple_bb (e->call_stmt);
5522 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5523 bb);
5524 e->count = bb->count;
5525 }
5526 }
5527
5528 free_dominance_info (CDI_DOMINATORS);
5529 free_dominance_info (CDI_POST_DOMINATORS);
5530
5531 gcc_assert (!id.debug_stmts.exists ());
5532 pop_cfun ();
5533 return;
5534 }
5535
5536 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5537 the callee and return the inlined body on success. */
5538
5539 tree
5540 maybe_inline_call_in_expr (tree exp)
5541 {
5542 tree fn = get_callee_fndecl (exp);
5543
5544 /* We can only try to inline "const" functions. */
5545 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5546 {
5547 struct pointer_map_t *decl_map = pointer_map_create ();
5548 call_expr_arg_iterator iter;
5549 copy_body_data id;
5550 tree param, arg, t;
5551
5552 /* Remap the parameters. */
5553 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5554 param;
5555 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5556 *pointer_map_insert (decl_map, param) = arg;
5557
5558 memset (&id, 0, sizeof (id));
5559 id.src_fn = fn;
5560 id.dst_fn = current_function_decl;
5561 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5562 id.decl_map = decl_map;
5563
5564 id.copy_decl = copy_decl_no_change;
5565 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5566 id.transform_new_cfg = false;
5567 id.transform_return_to_modify = true;
5568 id.transform_parameter = true;
5569 id.transform_lang_insert_block = NULL;
5570
5571 /* Make sure not to unshare trees behind the front-end's back
5572 since front-end specific mechanisms may rely on sharing. */
5573 id.regimplify = false;
5574 id.do_not_unshare = true;
5575
5576 /* We're not inside any EH region. */
5577 id.eh_lp_nr = 0;
5578
5579 t = copy_tree_body (&id);
5580 pointer_map_destroy (decl_map);
5581
5582 /* We can only return something suitable for use in a GENERIC
5583 expression tree. */
5584 if (TREE_CODE (t) == MODIFY_EXPR)
5585 return TREE_OPERAND (t, 1);
5586 }
5587
5588 return NULL_TREE;
5589 }
5590
5591 /* Duplicate a type, fields and all. */
5592
5593 tree
5594 build_duplicate_type (tree type)
5595 {
5596 struct copy_body_data id;
5597
5598 memset (&id, 0, sizeof (id));
5599 id.src_fn = current_function_decl;
5600 id.dst_fn = current_function_decl;
5601 id.src_cfun = cfun;
5602 id.decl_map = pointer_map_create ();
5603 id.debug_map = NULL;
5604 id.copy_decl = copy_decl_no_change;
5605
5606 type = remap_type_1 (type, &id);
5607
5608 pointer_map_destroy (id.decl_map);
5609 if (id.debug_map)
5610 pointer_map_destroy (id.debug_map);
5611
5612 TYPE_CANONICAL (type) = type;
5613
5614 return type;
5615 }