re PR middle-end/59917 (ICE in calc_dfs_tree, at dominance.c:401)
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "calls.h"
29 #include "tree-inline.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "insn-config.h"
34 #include "hashtab.h"
35 #include "langhooks.h"
36 #include "basic-block.h"
37 #include "tree-iterator.h"
38 #include "intl.h"
39 #include "pointer-set.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "tree-cfg.h"
53 #include "tree-phinodes.h"
54 #include "ssa-iterators.h"
55 #include "stringpool.h"
56 #include "tree-ssanames.h"
57 #include "tree-into-ssa.h"
58 #include "expr.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "function.h"
62 #include "tree-pretty-print.h"
63 #include "except.h"
64 #include "debug.h"
65 #include "ipa-prop.h"
66 #include "value-prof.h"
67 #include "tree-pass.h"
68 #include "target.h"
69 #include "cfgloop.h"
70
71 #include "rtl.h" /* FIXME: For asm_str_count. */
72
73 /* I'm not real happy about this, but we need to handle gimple and
74 non-gimple trees. */
75
76 /* Inlining, Cloning, Versioning, Parallelization
77
78 Inlining: a function body is duplicated, but the PARM_DECLs are
79 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
80 MODIFY_EXPRs that store to a dedicated returned-value variable.
81 The duplicated eh_region info of the copy will later be appended
82 to the info for the caller; the eh_region info in copied throwing
83 statements and RESX statements are adjusted accordingly.
84
85 Cloning: (only in C++) We have one body for a con/de/structor, and
86 multiple function decls, each with a unique parameter list.
87 Duplicate the body, using the given splay tree; some parameters
88 will become constants (like 0 or 1).
89
90 Versioning: a function body is duplicated and the result is a new
91 function rather than into blocks of an existing function as with
92 inlining. Some parameters will become constants.
93
94 Parallelization: a region of a function is duplicated resulting in
95 a new function. Variables may be replaced with complex expressions
96 to enable shared variable semantics.
97
98 All of these will simultaneously lookup any callgraph edges. If
99 we're going to inline the duplicated function body, and the given
100 function has some cloned callgraph nodes (one for each place this
101 function will be inlined) those callgraph edges will be duplicated.
102 If we're cloning the body, those callgraph edges will be
103 updated to point into the new body. (Note that the original
104 callgraph node and edge list will not be altered.)
105
106 See the CALL_EXPR handling case in copy_tree_body_r (). */
107
108 /* To Do:
109
110 o In order to make inlining-on-trees work, we pessimized
111 function-local static constants. In particular, they are now
112 always output, even when not addressed. Fix this by treating
113 function-local static constants just like global static
114 constants; the back-end already knows not to output them if they
115 are not needed.
116
117 o Provide heuristics to clamp inlining of recursive template
118 calls? */
119
120
121 /* Weights that estimate_num_insns uses to estimate the size of the
122 produced code. */
123
124 eni_weights eni_size_weights;
125
126 /* Weights that estimate_num_insns uses to estimate the time necessary
127 to execute the produced code. */
128
129 eni_weights eni_time_weights;
130
131 /* Prototypes. */
132
133 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
134 static void remap_block (tree *, copy_body_data *);
135 static void copy_bind_expr (tree *, int *, copy_body_data *);
136 static void declare_inline_vars (tree, tree);
137 static void remap_save_expr (tree *, void *, int *);
138 static void prepend_lexical_block (tree current_block, tree new_block);
139 static tree copy_decl_to_var (tree, copy_body_data *);
140 static tree copy_result_decl_to_var (tree, copy_body_data *);
141 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
142 static gimple remap_gimple_stmt (gimple, copy_body_data *);
143 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
144
145 /* Insert a tree->tree mapping for ID. Despite the name suggests
146 that the trees should be variables, it is used for more than that. */
147
148 void
149 insert_decl_map (copy_body_data *id, tree key, tree value)
150 {
151 *pointer_map_insert (id->decl_map, key) = value;
152
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
155 if (key != value)
156 *pointer_map_insert (id->decl_map, value) = value;
157 }
158
159 /* Insert a tree->tree mapping for ID. This is only used for
160 variables. */
161
162 static void
163 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
164 {
165 if (!gimple_in_ssa_p (id->src_cfun))
166 return;
167
168 if (!MAY_HAVE_DEBUG_STMTS)
169 return;
170
171 if (!target_for_debug_bind (key))
172 return;
173
174 gcc_assert (TREE_CODE (key) == PARM_DECL);
175 gcc_assert (TREE_CODE (value) == VAR_DECL);
176
177 if (!id->debug_map)
178 id->debug_map = pointer_map_create ();
179
180 *pointer_map_insert (id->debug_map, key) = value;
181 }
182
183 /* If nonzero, we're remapping the contents of inlined debug
184 statements. If negative, an error has occurred, such as a
185 reference to a variable that isn't available in the inlined
186 context. */
187 static int processing_debug_stmt = 0;
188
189 /* Construct new SSA name for old NAME. ID is the inline context. */
190
191 static tree
192 remap_ssa_name (tree name, copy_body_data *id)
193 {
194 tree new_tree, var;
195 tree *n;
196
197 gcc_assert (TREE_CODE (name) == SSA_NAME);
198
199 n = (tree *) pointer_map_contains (id->decl_map, name);
200 if (n)
201 return unshare_expr (*n);
202
203 if (processing_debug_stmt)
204 {
205 if (SSA_NAME_IS_DEFAULT_DEF (name)
206 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
207 && id->entry_bb == NULL
208 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
209 {
210 tree vexpr = make_node (DEBUG_EXPR_DECL);
211 gimple def_temp;
212 gimple_stmt_iterator gsi;
213 tree val = SSA_NAME_VAR (name);
214
215 n = (tree *) pointer_map_contains (id->decl_map, val);
216 if (n != NULL)
217 val = *n;
218 if (TREE_CODE (val) != PARM_DECL)
219 {
220 processing_debug_stmt = -1;
221 return name;
222 }
223 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
224 DECL_ARTIFICIAL (vexpr) = 1;
225 TREE_TYPE (vexpr) = TREE_TYPE (name);
226 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
227 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
228 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
229 return vexpr;
230 }
231
232 processing_debug_stmt = -1;
233 return name;
234 }
235
236 /* Remap anonymous SSA names or SSA names of anonymous decls. */
237 var = SSA_NAME_VAR (name);
238 if (!var
239 || (!SSA_NAME_IS_DEFAULT_DEF (name)
240 && TREE_CODE (var) == VAR_DECL
241 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
242 && DECL_ARTIFICIAL (var)
243 && DECL_IGNORED_P (var)
244 && !DECL_NAME (var)))
245 {
246 struct ptr_info_def *pi;
247 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
248 if (!var && SSA_NAME_IDENTIFIER (name))
249 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
250 insert_decl_map (id, name, new_tree);
251 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
252 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
253 /* At least IPA points-to info can be directly transferred. */
254 if (id->src_cfun->gimple_df
255 && id->src_cfun->gimple_df->ipa_pta
256 && (pi = SSA_NAME_PTR_INFO (name))
257 && !pi->pt.anything)
258 {
259 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
260 new_pi->pt = pi->pt;
261 }
262 return new_tree;
263 }
264
265 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
266 in copy_bb. */
267 new_tree = remap_decl (var, id);
268
269 /* We might've substituted constant or another SSA_NAME for
270 the variable.
271
272 Replace the SSA name representing RESULT_DECL by variable during
273 inlining: this saves us from need to introduce PHI node in a case
274 return value is just partly initialized. */
275 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
276 && (!SSA_NAME_VAR (name)
277 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
278 || !id->transform_return_to_modify))
279 {
280 struct ptr_info_def *pi;
281 new_tree = make_ssa_name (new_tree, NULL);
282 insert_decl_map (id, name, new_tree);
283 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
284 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
285 /* At least IPA points-to info can be directly transferred. */
286 if (id->src_cfun->gimple_df
287 && id->src_cfun->gimple_df->ipa_pta
288 && (pi = SSA_NAME_PTR_INFO (name))
289 && !pi->pt.anything)
290 {
291 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
292 new_pi->pt = pi->pt;
293 }
294 if (SSA_NAME_IS_DEFAULT_DEF (name))
295 {
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
299 abnormal edge, but also increase register pressure.
300
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
305 if (id->entry_bb
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
307 && (!SSA_NAME_VAR (name)
308 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
309 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
310 0)->dest
311 || EDGE_COUNT (id->entry_bb->preds) != 1))
312 {
313 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
314 gimple init_stmt;
315 tree zero = build_zero_cst (TREE_TYPE (new_tree));
316
317 init_stmt = gimple_build_assign (new_tree, zero);
318 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
319 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
320 }
321 else
322 {
323 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
324 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
325 }
326 }
327 }
328 else
329 insert_decl_map (id, name, new_tree);
330 return new_tree;
331 }
332
333 /* Remap DECL during the copying of the BLOCK tree for the function. */
334
335 tree
336 remap_decl (tree decl, copy_body_data *id)
337 {
338 tree *n;
339
340 /* We only remap local variables in the current function. */
341
342 /* See if we have remapped this declaration. */
343
344 n = (tree *) pointer_map_contains (id->decl_map, decl);
345
346 if (!n && processing_debug_stmt)
347 {
348 processing_debug_stmt = -1;
349 return decl;
350 }
351
352 /* If we didn't already have an equivalent for this declaration,
353 create one now. */
354 if (!n)
355 {
356 /* Make a copy of the variable or label. */
357 tree t = id->copy_decl (decl, id);
358
359 /* Remember it, so that if we encounter this local entity again
360 we can reuse this copy. Do this early because remap_type may
361 need this decl for TYPE_STUB_DECL. */
362 insert_decl_map (id, decl, t);
363
364 if (!DECL_P (t))
365 return t;
366
367 /* Remap types, if necessary. */
368 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
369 if (TREE_CODE (t) == TYPE_DECL)
370 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
371
372 /* Remap sizes as necessary. */
373 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
374 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
375
376 /* If fields, do likewise for offset and qualifier. */
377 if (TREE_CODE (t) == FIELD_DECL)
378 {
379 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
380 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
381 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
382 }
383
384 return t;
385 }
386
387 if (id->do_not_unshare)
388 return *n;
389 else
390 return unshare_expr (*n);
391 }
392
393 static tree
394 remap_type_1 (tree type, copy_body_data *id)
395 {
396 tree new_tree, t;
397
398 /* We do need a copy. build and register it now. If this is a pointer or
399 reference type, remap the designated type and make a new pointer or
400 reference type. */
401 if (TREE_CODE (type) == POINTER_TYPE)
402 {
403 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
404 TYPE_MODE (type),
405 TYPE_REF_CAN_ALIAS_ALL (type));
406 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
407 new_tree = build_type_attribute_qual_variant (new_tree,
408 TYPE_ATTRIBUTES (type),
409 TYPE_QUALS (type));
410 insert_decl_map (id, type, new_tree);
411 return new_tree;
412 }
413 else if (TREE_CODE (type) == REFERENCE_TYPE)
414 {
415 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
416 TYPE_MODE (type),
417 TYPE_REF_CAN_ALIAS_ALL (type));
418 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
419 new_tree = build_type_attribute_qual_variant (new_tree,
420 TYPE_ATTRIBUTES (type),
421 TYPE_QUALS (type));
422 insert_decl_map (id, type, new_tree);
423 return new_tree;
424 }
425 else
426 new_tree = copy_node (type);
427
428 insert_decl_map (id, type, new_tree);
429
430 /* This is a new type, not a copy of an old type. Need to reassociate
431 variants. We can handle everything except the main variant lazily. */
432 t = TYPE_MAIN_VARIANT (type);
433 if (type != t)
434 {
435 t = remap_type (t, id);
436 TYPE_MAIN_VARIANT (new_tree) = t;
437 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
438 TYPE_NEXT_VARIANT (t) = new_tree;
439 }
440 else
441 {
442 TYPE_MAIN_VARIANT (new_tree) = new_tree;
443 TYPE_NEXT_VARIANT (new_tree) = NULL;
444 }
445
446 if (TYPE_STUB_DECL (type))
447 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
448
449 /* Lazily create pointer and reference types. */
450 TYPE_POINTER_TO (new_tree) = NULL;
451 TYPE_REFERENCE_TO (new_tree) = NULL;
452
453 switch (TREE_CODE (new_tree))
454 {
455 case INTEGER_TYPE:
456 case REAL_TYPE:
457 case FIXED_POINT_TYPE:
458 case ENUMERAL_TYPE:
459 case BOOLEAN_TYPE:
460 t = TYPE_MIN_VALUE (new_tree);
461 if (t && TREE_CODE (t) != INTEGER_CST)
462 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
463
464 t = TYPE_MAX_VALUE (new_tree);
465 if (t && TREE_CODE (t) != INTEGER_CST)
466 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
467 return new_tree;
468
469 case FUNCTION_TYPE:
470 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
471 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
472 return new_tree;
473
474 case ARRAY_TYPE:
475 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
476 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
477 break;
478
479 case RECORD_TYPE:
480 case UNION_TYPE:
481 case QUAL_UNION_TYPE:
482 {
483 tree f, nf = NULL;
484
485 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
486 {
487 t = remap_decl (f, id);
488 DECL_CONTEXT (t) = new_tree;
489 DECL_CHAIN (t) = nf;
490 nf = t;
491 }
492 TYPE_FIELDS (new_tree) = nreverse (nf);
493 }
494 break;
495
496 case OFFSET_TYPE:
497 default:
498 /* Shouldn't have been thought variable sized. */
499 gcc_unreachable ();
500 }
501
502 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
503 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
504
505 return new_tree;
506 }
507
508 tree
509 remap_type (tree type, copy_body_data *id)
510 {
511 tree *node;
512 tree tmp;
513
514 if (type == NULL)
515 return type;
516
517 /* See if we have remapped this type. */
518 node = (tree *) pointer_map_contains (id->decl_map, type);
519 if (node)
520 return *node;
521
522 /* The type only needs remapping if it's variably modified. */
523 if (! variably_modified_type_p (type, id->src_fn))
524 {
525 insert_decl_map (id, type, type);
526 return type;
527 }
528
529 id->remapping_type_depth++;
530 tmp = remap_type_1 (type, id);
531 id->remapping_type_depth--;
532
533 return tmp;
534 }
535
536 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
537
538 static bool
539 can_be_nonlocal (tree decl, copy_body_data *id)
540 {
541 /* We can not duplicate function decls. */
542 if (TREE_CODE (decl) == FUNCTION_DECL)
543 return true;
544
545 /* Local static vars must be non-local or we get multiple declaration
546 problems. */
547 if (TREE_CODE (decl) == VAR_DECL
548 && !auto_var_in_fn_p (decl, id->src_fn))
549 return true;
550
551 return false;
552 }
553
554 static tree
555 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
556 copy_body_data *id)
557 {
558 tree old_var;
559 tree new_decls = NULL_TREE;
560
561 /* Remap its variables. */
562 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
563 {
564 tree new_var;
565
566 if (can_be_nonlocal (old_var, id))
567 {
568 /* We need to add this variable to the local decls as otherwise
569 nothing else will do so. */
570 if (TREE_CODE (old_var) == VAR_DECL
571 && ! DECL_EXTERNAL (old_var))
572 add_local_decl (cfun, old_var);
573 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
574 && !DECL_IGNORED_P (old_var)
575 && nonlocalized_list)
576 vec_safe_push (*nonlocalized_list, old_var);
577 continue;
578 }
579
580 /* Remap the variable. */
581 new_var = remap_decl (old_var, id);
582
583 /* If we didn't remap this variable, we can't mess with its
584 TREE_CHAIN. If we remapped this variable to the return slot, it's
585 already declared somewhere else, so don't declare it here. */
586
587 if (new_var == id->retvar)
588 ;
589 else if (!new_var)
590 {
591 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
592 && !DECL_IGNORED_P (old_var)
593 && nonlocalized_list)
594 vec_safe_push (*nonlocalized_list, old_var);
595 }
596 else
597 {
598 gcc_assert (DECL_P (new_var));
599 DECL_CHAIN (new_var) = new_decls;
600 new_decls = new_var;
601
602 /* Also copy value-expressions. */
603 if (TREE_CODE (new_var) == VAR_DECL
604 && DECL_HAS_VALUE_EXPR_P (new_var))
605 {
606 tree tem = DECL_VALUE_EXPR (new_var);
607 bool old_regimplify = id->regimplify;
608 id->remapping_type_depth++;
609 walk_tree (&tem, copy_tree_body_r, id, NULL);
610 id->remapping_type_depth--;
611 id->regimplify = old_regimplify;
612 SET_DECL_VALUE_EXPR (new_var, tem);
613 }
614 }
615 }
616
617 return nreverse (new_decls);
618 }
619
620 /* Copy the BLOCK to contain remapped versions of the variables
621 therein. And hook the new block into the block-tree. */
622
623 static void
624 remap_block (tree *block, copy_body_data *id)
625 {
626 tree old_block;
627 tree new_block;
628
629 /* Make the new block. */
630 old_block = *block;
631 new_block = make_node (BLOCK);
632 TREE_USED (new_block) = TREE_USED (old_block);
633 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
634 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
635 BLOCK_NONLOCALIZED_VARS (new_block)
636 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
637 *block = new_block;
638
639 /* Remap its variables. */
640 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
641 &BLOCK_NONLOCALIZED_VARS (new_block),
642 id);
643
644 if (id->transform_lang_insert_block)
645 id->transform_lang_insert_block (new_block);
646
647 /* Remember the remapped block. */
648 insert_decl_map (id, old_block, new_block);
649 }
650
651 /* Copy the whole block tree and root it in id->block. */
652 static tree
653 remap_blocks (tree block, copy_body_data *id)
654 {
655 tree t;
656 tree new_tree = block;
657
658 if (!block)
659 return NULL;
660
661 remap_block (&new_tree, id);
662 gcc_assert (new_tree != block);
663 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
664 prepend_lexical_block (new_tree, remap_blocks (t, id));
665 /* Blocks are in arbitrary order, but make things slightly prettier and do
666 not swap order when producing a copy. */
667 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
668 return new_tree;
669 }
670
671 /* Remap the block tree rooted at BLOCK to nothing. */
672 static void
673 remap_blocks_to_null (tree block, copy_body_data *id)
674 {
675 tree t;
676 insert_decl_map (id, block, NULL_TREE);
677 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
678 remap_blocks_to_null (t, id);
679 }
680
681 static void
682 copy_statement_list (tree *tp)
683 {
684 tree_stmt_iterator oi, ni;
685 tree new_tree;
686
687 new_tree = alloc_stmt_list ();
688 ni = tsi_start (new_tree);
689 oi = tsi_start (*tp);
690 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
691 *tp = new_tree;
692
693 for (; !tsi_end_p (oi); tsi_next (&oi))
694 {
695 tree stmt = tsi_stmt (oi);
696 if (TREE_CODE (stmt) == STATEMENT_LIST)
697 /* This copy is not redundant; tsi_link_after will smash this
698 STATEMENT_LIST into the end of the one we're building, and we
699 don't want to do that with the original. */
700 copy_statement_list (&stmt);
701 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
702 }
703 }
704
705 static void
706 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
707 {
708 tree block = BIND_EXPR_BLOCK (*tp);
709 /* Copy (and replace) the statement. */
710 copy_tree_r (tp, walk_subtrees, NULL);
711 if (block)
712 {
713 remap_block (&block, id);
714 BIND_EXPR_BLOCK (*tp) = block;
715 }
716
717 if (BIND_EXPR_VARS (*tp))
718 /* This will remap a lot of the same decls again, but this should be
719 harmless. */
720 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
721 }
722
723
724 /* Create a new gimple_seq by remapping all the statements in BODY
725 using the inlining information in ID. */
726
727 static gimple_seq
728 remap_gimple_seq (gimple_seq body, copy_body_data *id)
729 {
730 gimple_stmt_iterator si;
731 gimple_seq new_body = NULL;
732
733 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
734 {
735 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
736 gimple_seq_add_stmt (&new_body, new_stmt);
737 }
738
739 return new_body;
740 }
741
742
743 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
744 block using the mapping information in ID. */
745
746 static gimple
747 copy_gimple_bind (gimple stmt, copy_body_data *id)
748 {
749 gimple new_bind;
750 tree new_block, new_vars;
751 gimple_seq body, new_body;
752
753 /* Copy the statement. Note that we purposely don't use copy_stmt
754 here because we need to remap statements as we copy. */
755 body = gimple_bind_body (stmt);
756 new_body = remap_gimple_seq (body, id);
757
758 new_block = gimple_bind_block (stmt);
759 if (new_block)
760 remap_block (&new_block, id);
761
762 /* This will remap a lot of the same decls again, but this should be
763 harmless. */
764 new_vars = gimple_bind_vars (stmt);
765 if (new_vars)
766 new_vars = remap_decls (new_vars, NULL, id);
767
768 new_bind = gimple_build_bind (new_vars, new_body, new_block);
769
770 return new_bind;
771 }
772
773 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
774
775 static bool
776 is_parm (tree decl)
777 {
778 if (TREE_CODE (decl) == SSA_NAME)
779 {
780 decl = SSA_NAME_VAR (decl);
781 if (!decl)
782 return false;
783 }
784
785 return (TREE_CODE (decl) == PARM_DECL);
786 }
787
788 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
789 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
790 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
791 recursing into the children nodes of *TP. */
792
793 static tree
794 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
795 {
796 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
797 copy_body_data *id = (copy_body_data *) wi_p->info;
798 tree fn = id->src_fn;
799
800 if (TREE_CODE (*tp) == SSA_NAME)
801 {
802 *tp = remap_ssa_name (*tp, id);
803 *walk_subtrees = 0;
804 return NULL;
805 }
806 else if (auto_var_in_fn_p (*tp, fn))
807 {
808 /* Local variables and labels need to be replaced by equivalent
809 variables. We don't want to copy static variables; there's
810 only one of those, no matter how many times we inline the
811 containing function. Similarly for globals from an outer
812 function. */
813 tree new_decl;
814
815 /* Remap the declaration. */
816 new_decl = remap_decl (*tp, id);
817 gcc_assert (new_decl);
818 /* Replace this variable with the copy. */
819 STRIP_TYPE_NOPS (new_decl);
820 /* ??? The C++ frontend uses void * pointer zero to initialize
821 any other type. This confuses the middle-end type verification.
822 As cloned bodies do not go through gimplification again the fixup
823 there doesn't trigger. */
824 if (TREE_CODE (new_decl) == INTEGER_CST
825 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
826 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
827 *tp = new_decl;
828 *walk_subtrees = 0;
829 }
830 else if (TREE_CODE (*tp) == STATEMENT_LIST)
831 gcc_unreachable ();
832 else if (TREE_CODE (*tp) == SAVE_EXPR)
833 gcc_unreachable ();
834 else if (TREE_CODE (*tp) == LABEL_DECL
835 && (!DECL_CONTEXT (*tp)
836 || decl_function_context (*tp) == id->src_fn))
837 /* These may need to be remapped for EH handling. */
838 *tp = remap_decl (*tp, id);
839 else if (TREE_CODE (*tp) == FIELD_DECL)
840 {
841 /* If the enclosing record type is variably_modified_type_p, the field
842 has already been remapped. Otherwise, it need not be. */
843 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
844 if (n)
845 *tp = *n;
846 *walk_subtrees = 0;
847 }
848 else if (TYPE_P (*tp))
849 /* Types may need remapping as well. */
850 *tp = remap_type (*tp, id);
851 else if (CONSTANT_CLASS_P (*tp))
852 {
853 /* If this is a constant, we have to copy the node iff the type
854 will be remapped. copy_tree_r will not copy a constant. */
855 tree new_type = remap_type (TREE_TYPE (*tp), id);
856
857 if (new_type == TREE_TYPE (*tp))
858 *walk_subtrees = 0;
859
860 else if (TREE_CODE (*tp) == INTEGER_CST)
861 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
862 TREE_INT_CST_HIGH (*tp));
863 else
864 {
865 *tp = copy_node (*tp);
866 TREE_TYPE (*tp) = new_type;
867 }
868 }
869 else
870 {
871 /* Otherwise, just copy the node. Note that copy_tree_r already
872 knows not to copy VAR_DECLs, etc., so this is safe. */
873
874 if (TREE_CODE (*tp) == MEM_REF)
875 {
876 /* We need to re-canonicalize MEM_REFs from inline substitutions
877 that can happen when a pointer argument is an ADDR_EXPR.
878 Recurse here manually to allow that. */
879 tree ptr = TREE_OPERAND (*tp, 0);
880 tree type = remap_type (TREE_TYPE (*tp), id);
881 tree old = *tp;
882 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
883 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
884 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
885 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
886 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
887 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
888 remapped a parameter as the property might be valid only
889 for the parameter itself. */
890 if (TREE_THIS_NOTRAP (old)
891 && (!is_parm (TREE_OPERAND (old, 0))
892 || (!id->transform_parameter && is_parm (ptr))))
893 TREE_THIS_NOTRAP (*tp) = 1;
894 *walk_subtrees = 0;
895 return NULL;
896 }
897
898 /* Here is the "usual case". Copy this tree node, and then
899 tweak some special cases. */
900 copy_tree_r (tp, walk_subtrees, NULL);
901
902 if (TREE_CODE (*tp) != OMP_CLAUSE)
903 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
904
905 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
906 {
907 /* The copied TARGET_EXPR has never been expanded, even if the
908 original node was expanded already. */
909 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
910 TREE_OPERAND (*tp, 3) = NULL_TREE;
911 }
912 else if (TREE_CODE (*tp) == ADDR_EXPR)
913 {
914 /* Variable substitution need not be simple. In particular,
915 the MEM_REF substitution above. Make sure that
916 TREE_CONSTANT and friends are up-to-date. */
917 int invariant = is_gimple_min_invariant (*tp);
918 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
919 recompute_tree_invariant_for_addr_expr (*tp);
920
921 /* If this used to be invariant, but is not any longer,
922 then regimplification is probably needed. */
923 if (invariant && !is_gimple_min_invariant (*tp))
924 id->regimplify = true;
925
926 *walk_subtrees = 0;
927 }
928 }
929
930 /* Update the TREE_BLOCK for the cloned expr. */
931 if (EXPR_P (*tp))
932 {
933 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
934 tree old_block = TREE_BLOCK (*tp);
935 if (old_block)
936 {
937 tree *n;
938 n = (tree *) pointer_map_contains (id->decl_map,
939 TREE_BLOCK (*tp));
940 if (n)
941 new_block = *n;
942 }
943 TREE_SET_BLOCK (*tp, new_block);
944 }
945
946 /* Keep iterating. */
947 return NULL_TREE;
948 }
949
950
951 /* Called from copy_body_id via walk_tree. DATA is really a
952 `copy_body_data *'. */
953
954 tree
955 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
956 {
957 copy_body_data *id = (copy_body_data *) data;
958 tree fn = id->src_fn;
959 tree new_block;
960
961 /* Begin by recognizing trees that we'll completely rewrite for the
962 inlining context. Our output for these trees is completely
963 different from out input (e.g. RETURN_EXPR is deleted, and morphs
964 into an edge). Further down, we'll handle trees that get
965 duplicated and/or tweaked. */
966
967 /* When requested, RETURN_EXPRs should be transformed to just the
968 contained MODIFY_EXPR. The branch semantics of the return will
969 be handled elsewhere by manipulating the CFG rather than a statement. */
970 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
971 {
972 tree assignment = TREE_OPERAND (*tp, 0);
973
974 /* If we're returning something, just turn that into an
975 assignment into the equivalent of the original RESULT_DECL.
976 If the "assignment" is just the result decl, the result
977 decl has already been set (e.g. a recent "foo (&result_decl,
978 ...)"); just toss the entire RETURN_EXPR. */
979 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
980 {
981 /* Replace the RETURN_EXPR with (a copy of) the
982 MODIFY_EXPR hanging underneath. */
983 *tp = copy_node (assignment);
984 }
985 else /* Else the RETURN_EXPR returns no value. */
986 {
987 *tp = NULL;
988 return (tree) (void *)1;
989 }
990 }
991 else if (TREE_CODE (*tp) == SSA_NAME)
992 {
993 *tp = remap_ssa_name (*tp, id);
994 *walk_subtrees = 0;
995 return NULL;
996 }
997
998 /* Local variables and labels need to be replaced by equivalent
999 variables. We don't want to copy static variables; there's only
1000 one of those, no matter how many times we inline the containing
1001 function. Similarly for globals from an outer function. */
1002 else if (auto_var_in_fn_p (*tp, fn))
1003 {
1004 tree new_decl;
1005
1006 /* Remap the declaration. */
1007 new_decl = remap_decl (*tp, id);
1008 gcc_assert (new_decl);
1009 /* Replace this variable with the copy. */
1010 STRIP_TYPE_NOPS (new_decl);
1011 *tp = new_decl;
1012 *walk_subtrees = 0;
1013 }
1014 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1015 copy_statement_list (tp);
1016 else if (TREE_CODE (*tp) == SAVE_EXPR
1017 || TREE_CODE (*tp) == TARGET_EXPR)
1018 remap_save_expr (tp, id->decl_map, walk_subtrees);
1019 else if (TREE_CODE (*tp) == LABEL_DECL
1020 && (! DECL_CONTEXT (*tp)
1021 || decl_function_context (*tp) == id->src_fn))
1022 /* These may need to be remapped for EH handling. */
1023 *tp = remap_decl (*tp, id);
1024 else if (TREE_CODE (*tp) == BIND_EXPR)
1025 copy_bind_expr (tp, walk_subtrees, id);
1026 /* Types may need remapping as well. */
1027 else if (TYPE_P (*tp))
1028 *tp = remap_type (*tp, id);
1029
1030 /* If this is a constant, we have to copy the node iff the type will be
1031 remapped. copy_tree_r will not copy a constant. */
1032 else if (CONSTANT_CLASS_P (*tp))
1033 {
1034 tree new_type = remap_type (TREE_TYPE (*tp), id);
1035
1036 if (new_type == TREE_TYPE (*tp))
1037 *walk_subtrees = 0;
1038
1039 else if (TREE_CODE (*tp) == INTEGER_CST)
1040 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1041 TREE_INT_CST_HIGH (*tp));
1042 else
1043 {
1044 *tp = copy_node (*tp);
1045 TREE_TYPE (*tp) = new_type;
1046 }
1047 }
1048
1049 /* Otherwise, just copy the node. Note that copy_tree_r already
1050 knows not to copy VAR_DECLs, etc., so this is safe. */
1051 else
1052 {
1053 /* Here we handle trees that are not completely rewritten.
1054 First we detect some inlining-induced bogosities for
1055 discarding. */
1056 if (TREE_CODE (*tp) == MODIFY_EXPR
1057 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1058 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1059 {
1060 /* Some assignments VAR = VAR; don't generate any rtl code
1061 and thus don't count as variable modification. Avoid
1062 keeping bogosities like 0 = 0. */
1063 tree decl = TREE_OPERAND (*tp, 0), value;
1064 tree *n;
1065
1066 n = (tree *) pointer_map_contains (id->decl_map, decl);
1067 if (n)
1068 {
1069 value = *n;
1070 STRIP_TYPE_NOPS (value);
1071 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1072 {
1073 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1074 return copy_tree_body_r (tp, walk_subtrees, data);
1075 }
1076 }
1077 }
1078 else if (TREE_CODE (*tp) == INDIRECT_REF)
1079 {
1080 /* Get rid of *& from inline substitutions that can happen when a
1081 pointer argument is an ADDR_EXPR. */
1082 tree decl = TREE_OPERAND (*tp, 0);
1083 tree *n = (tree *) pointer_map_contains (id->decl_map, decl);
1084 if (n)
1085 {
1086 /* If we happen to get an ADDR_EXPR in n->value, strip
1087 it manually here as we'll eventually get ADDR_EXPRs
1088 which lie about their types pointed to. In this case
1089 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1090 but we absolutely rely on that. As fold_indirect_ref
1091 does other useful transformations, try that first, though. */
1092 tree type = TREE_TYPE (*tp);
1093 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1094 tree old = *tp;
1095 *tp = gimple_fold_indirect_ref (ptr);
1096 if (! *tp)
1097 {
1098 if (TREE_CODE (ptr) == ADDR_EXPR)
1099 {
1100 *tp
1101 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1102 /* ??? We should either assert here or build
1103 a VIEW_CONVERT_EXPR instead of blindly leaking
1104 incompatible types to our IL. */
1105 if (! *tp)
1106 *tp = TREE_OPERAND (ptr, 0);
1107 }
1108 else
1109 {
1110 *tp = build1 (INDIRECT_REF, type, ptr);
1111 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1112 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1113 TREE_READONLY (*tp) = TREE_READONLY (old);
1114 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1115 have remapped a parameter as the property might be
1116 valid only for the parameter itself. */
1117 if (TREE_THIS_NOTRAP (old)
1118 && (!is_parm (TREE_OPERAND (old, 0))
1119 || (!id->transform_parameter && is_parm (ptr))))
1120 TREE_THIS_NOTRAP (*tp) = 1;
1121 }
1122 }
1123 *walk_subtrees = 0;
1124 return NULL;
1125 }
1126 }
1127 else if (TREE_CODE (*tp) == MEM_REF)
1128 {
1129 /* We need to re-canonicalize MEM_REFs from inline substitutions
1130 that can happen when a pointer argument is an ADDR_EXPR.
1131 Recurse here manually to allow that. */
1132 tree ptr = TREE_OPERAND (*tp, 0);
1133 tree type = remap_type (TREE_TYPE (*tp), id);
1134 tree old = *tp;
1135 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1136 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1137 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1138 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1139 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1140 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1141 remapped a parameter as the property might be valid only
1142 for the parameter itself. */
1143 if (TREE_THIS_NOTRAP (old)
1144 && (!is_parm (TREE_OPERAND (old, 0))
1145 || (!id->transform_parameter && is_parm (ptr))))
1146 TREE_THIS_NOTRAP (*tp) = 1;
1147 *walk_subtrees = 0;
1148 return NULL;
1149 }
1150
1151 /* Here is the "usual case". Copy this tree node, and then
1152 tweak some special cases. */
1153 copy_tree_r (tp, walk_subtrees, NULL);
1154
1155 /* If EXPR has block defined, map it to newly constructed block.
1156 When inlining we want EXPRs without block appear in the block
1157 of function call if we are not remapping a type. */
1158 if (EXPR_P (*tp))
1159 {
1160 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1161 if (TREE_BLOCK (*tp))
1162 {
1163 tree *n;
1164 n = (tree *) pointer_map_contains (id->decl_map,
1165 TREE_BLOCK (*tp));
1166 if (n)
1167 new_block = *n;
1168 }
1169 TREE_SET_BLOCK (*tp, new_block);
1170 }
1171
1172 if (TREE_CODE (*tp) != OMP_CLAUSE)
1173 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1174
1175 /* The copied TARGET_EXPR has never been expanded, even if the
1176 original node was expanded already. */
1177 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1178 {
1179 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1180 TREE_OPERAND (*tp, 3) = NULL_TREE;
1181 }
1182
1183 /* Variable substitution need not be simple. In particular, the
1184 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1185 and friends are up-to-date. */
1186 else if (TREE_CODE (*tp) == ADDR_EXPR)
1187 {
1188 int invariant = is_gimple_min_invariant (*tp);
1189 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1190
1191 /* Handle the case where we substituted an INDIRECT_REF
1192 into the operand of the ADDR_EXPR. */
1193 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1194 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1195 else
1196 recompute_tree_invariant_for_addr_expr (*tp);
1197
1198 /* If this used to be invariant, but is not any longer,
1199 then regimplification is probably needed. */
1200 if (invariant && !is_gimple_min_invariant (*tp))
1201 id->regimplify = true;
1202
1203 *walk_subtrees = 0;
1204 }
1205 }
1206
1207 /* Keep iterating. */
1208 return NULL_TREE;
1209 }
1210
1211 /* Helper for remap_gimple_stmt. Given an EH region number for the
1212 source function, map that to the duplicate EH region number in
1213 the destination function. */
1214
1215 static int
1216 remap_eh_region_nr (int old_nr, copy_body_data *id)
1217 {
1218 eh_region old_r, new_r;
1219 void **slot;
1220
1221 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1222 slot = pointer_map_contains (id->eh_map, old_r);
1223 new_r = (eh_region) *slot;
1224
1225 return new_r->index;
1226 }
1227
1228 /* Similar, but operate on INTEGER_CSTs. */
1229
1230 static tree
1231 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1232 {
1233 int old_nr, new_nr;
1234
1235 old_nr = tree_to_shwi (old_t_nr);
1236 new_nr = remap_eh_region_nr (old_nr, id);
1237
1238 return build_int_cst (integer_type_node, new_nr);
1239 }
1240
1241 /* Helper for copy_bb. Remap statement STMT using the inlining
1242 information in ID. Return the new statement copy. */
1243
1244 static gimple
1245 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1246 {
1247 gimple copy = NULL;
1248 struct walk_stmt_info wi;
1249 bool skip_first = false;
1250
1251 /* Begin by recognizing trees that we'll completely rewrite for the
1252 inlining context. Our output for these trees is completely
1253 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1254 into an edge). Further down, we'll handle trees that get
1255 duplicated and/or tweaked. */
1256
1257 /* When requested, GIMPLE_RETURNs should be transformed to just the
1258 contained GIMPLE_ASSIGN. The branch semantics of the return will
1259 be handled elsewhere by manipulating the CFG rather than the
1260 statement. */
1261 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1262 {
1263 tree retval = gimple_return_retval (stmt);
1264
1265 /* If we're returning something, just turn that into an
1266 assignment into the equivalent of the original RESULT_DECL.
1267 If RETVAL is just the result decl, the result decl has
1268 already been set (e.g. a recent "foo (&result_decl, ...)");
1269 just toss the entire GIMPLE_RETURN. */
1270 if (retval
1271 && (TREE_CODE (retval) != RESULT_DECL
1272 && (TREE_CODE (retval) != SSA_NAME
1273 || ! SSA_NAME_VAR (retval)
1274 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1275 {
1276 copy = gimple_build_assign (id->do_not_unshare
1277 ? id->retvar : unshare_expr (id->retvar),
1278 retval);
1279 /* id->retvar is already substituted. Skip it on later remapping. */
1280 skip_first = true;
1281 }
1282 else
1283 return gimple_build_nop ();
1284 }
1285 else if (gimple_has_substatements (stmt))
1286 {
1287 gimple_seq s1, s2;
1288
1289 /* When cloning bodies from the C++ front end, we will be handed bodies
1290 in High GIMPLE form. Handle here all the High GIMPLE statements that
1291 have embedded statements. */
1292 switch (gimple_code (stmt))
1293 {
1294 case GIMPLE_BIND:
1295 copy = copy_gimple_bind (stmt, id);
1296 break;
1297
1298 case GIMPLE_CATCH:
1299 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1300 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1301 break;
1302
1303 case GIMPLE_EH_FILTER:
1304 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1305 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1306 break;
1307
1308 case GIMPLE_TRY:
1309 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1310 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1311 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1312 break;
1313
1314 case GIMPLE_WITH_CLEANUP_EXPR:
1315 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1316 copy = gimple_build_wce (s1);
1317 break;
1318
1319 case GIMPLE_OMP_PARALLEL:
1320 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1321 copy = gimple_build_omp_parallel
1322 (s1,
1323 gimple_omp_parallel_clauses (stmt),
1324 gimple_omp_parallel_child_fn (stmt),
1325 gimple_omp_parallel_data_arg (stmt));
1326 break;
1327
1328 case GIMPLE_OMP_TASK:
1329 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1330 copy = gimple_build_omp_task
1331 (s1,
1332 gimple_omp_task_clauses (stmt),
1333 gimple_omp_task_child_fn (stmt),
1334 gimple_omp_task_data_arg (stmt),
1335 gimple_omp_task_copy_fn (stmt),
1336 gimple_omp_task_arg_size (stmt),
1337 gimple_omp_task_arg_align (stmt));
1338 break;
1339
1340 case GIMPLE_OMP_FOR:
1341 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1342 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1343 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1344 gimple_omp_for_clauses (stmt),
1345 gimple_omp_for_collapse (stmt), s2);
1346 {
1347 size_t i;
1348 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1349 {
1350 gimple_omp_for_set_index (copy, i,
1351 gimple_omp_for_index (stmt, i));
1352 gimple_omp_for_set_initial (copy, i,
1353 gimple_omp_for_initial (stmt, i));
1354 gimple_omp_for_set_final (copy, i,
1355 gimple_omp_for_final (stmt, i));
1356 gimple_omp_for_set_incr (copy, i,
1357 gimple_omp_for_incr (stmt, i));
1358 gimple_omp_for_set_cond (copy, i,
1359 gimple_omp_for_cond (stmt, i));
1360 }
1361 }
1362 break;
1363
1364 case GIMPLE_OMP_MASTER:
1365 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1366 copy = gimple_build_omp_master (s1);
1367 break;
1368
1369 case GIMPLE_OMP_TASKGROUP:
1370 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1371 copy = gimple_build_omp_taskgroup (s1);
1372 break;
1373
1374 case GIMPLE_OMP_ORDERED:
1375 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1376 copy = gimple_build_omp_ordered (s1);
1377 break;
1378
1379 case GIMPLE_OMP_SECTION:
1380 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1381 copy = gimple_build_omp_section (s1);
1382 break;
1383
1384 case GIMPLE_OMP_SECTIONS:
1385 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1386 copy = gimple_build_omp_sections
1387 (s1, gimple_omp_sections_clauses (stmt));
1388 break;
1389
1390 case GIMPLE_OMP_SINGLE:
1391 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1392 copy = gimple_build_omp_single
1393 (s1, gimple_omp_single_clauses (stmt));
1394 break;
1395
1396 case GIMPLE_OMP_TARGET:
1397 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1398 copy = gimple_build_omp_target
1399 (s1, gimple_omp_target_kind (stmt),
1400 gimple_omp_target_clauses (stmt));
1401 break;
1402
1403 case GIMPLE_OMP_TEAMS:
1404 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1405 copy = gimple_build_omp_teams
1406 (s1, gimple_omp_teams_clauses (stmt));
1407 break;
1408
1409 case GIMPLE_OMP_CRITICAL:
1410 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1411 copy
1412 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1413 break;
1414
1415 case GIMPLE_TRANSACTION:
1416 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1417 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1418 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1419 break;
1420
1421 default:
1422 gcc_unreachable ();
1423 }
1424 }
1425 else
1426 {
1427 if (gimple_assign_copy_p (stmt)
1428 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1429 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1430 {
1431 /* Here we handle statements that are not completely rewritten.
1432 First we detect some inlining-induced bogosities for
1433 discarding. */
1434
1435 /* Some assignments VAR = VAR; don't generate any rtl code
1436 and thus don't count as variable modification. Avoid
1437 keeping bogosities like 0 = 0. */
1438 tree decl = gimple_assign_lhs (stmt), value;
1439 tree *n;
1440
1441 n = (tree *) pointer_map_contains (id->decl_map, decl);
1442 if (n)
1443 {
1444 value = *n;
1445 STRIP_TYPE_NOPS (value);
1446 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1447 return gimple_build_nop ();
1448 }
1449 }
1450
1451 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1452 in a block that we aren't copying during tree_function_versioning,
1453 just drop the clobber stmt. */
1454 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1455 {
1456 tree lhs = gimple_assign_lhs (stmt);
1457 if (TREE_CODE (lhs) == MEM_REF
1458 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1459 {
1460 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1461 if (gimple_bb (def_stmt)
1462 && !bitmap_bit_p (id->blocks_to_copy,
1463 gimple_bb (def_stmt)->index))
1464 return gimple_build_nop ();
1465 }
1466 }
1467
1468 if (gimple_debug_bind_p (stmt))
1469 {
1470 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1471 gimple_debug_bind_get_value (stmt),
1472 stmt);
1473 id->debug_stmts.safe_push (copy);
1474 return copy;
1475 }
1476 if (gimple_debug_source_bind_p (stmt))
1477 {
1478 copy = gimple_build_debug_source_bind
1479 (gimple_debug_source_bind_get_var (stmt),
1480 gimple_debug_source_bind_get_value (stmt), stmt);
1481 id->debug_stmts.safe_push (copy);
1482 return copy;
1483 }
1484
1485 /* Create a new deep copy of the statement. */
1486 copy = gimple_copy (stmt);
1487
1488 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1489 RESX and EH_DISPATCH. */
1490 if (id->eh_map)
1491 switch (gimple_code (copy))
1492 {
1493 case GIMPLE_CALL:
1494 {
1495 tree r, fndecl = gimple_call_fndecl (copy);
1496 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1497 switch (DECL_FUNCTION_CODE (fndecl))
1498 {
1499 case BUILT_IN_EH_COPY_VALUES:
1500 r = gimple_call_arg (copy, 1);
1501 r = remap_eh_region_tree_nr (r, id);
1502 gimple_call_set_arg (copy, 1, r);
1503 /* FALLTHRU */
1504
1505 case BUILT_IN_EH_POINTER:
1506 case BUILT_IN_EH_FILTER:
1507 r = gimple_call_arg (copy, 0);
1508 r = remap_eh_region_tree_nr (r, id);
1509 gimple_call_set_arg (copy, 0, r);
1510 break;
1511
1512 default:
1513 break;
1514 }
1515
1516 /* Reset alias info if we didn't apply measures to
1517 keep it valid over inlining by setting DECL_PT_UID. */
1518 if (!id->src_cfun->gimple_df
1519 || !id->src_cfun->gimple_df->ipa_pta)
1520 gimple_call_reset_alias_info (copy);
1521 }
1522 break;
1523
1524 case GIMPLE_RESX:
1525 {
1526 int r = gimple_resx_region (copy);
1527 r = remap_eh_region_nr (r, id);
1528 gimple_resx_set_region (copy, r);
1529 }
1530 break;
1531
1532 case GIMPLE_EH_DISPATCH:
1533 {
1534 int r = gimple_eh_dispatch_region (copy);
1535 r = remap_eh_region_nr (r, id);
1536 gimple_eh_dispatch_set_region (copy, r);
1537 }
1538 break;
1539
1540 default:
1541 break;
1542 }
1543 }
1544
1545 /* If STMT has a block defined, map it to the newly constructed
1546 block. */
1547 if (gimple_block (copy))
1548 {
1549 tree *n;
1550 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1551 gcc_assert (n);
1552 gimple_set_block (copy, *n);
1553 }
1554
1555 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1556 return copy;
1557
1558 /* Remap all the operands in COPY. */
1559 memset (&wi, 0, sizeof (wi));
1560 wi.info = id;
1561 if (skip_first)
1562 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1563 else
1564 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1565
1566 /* Clear the copied virtual operands. We are not remapping them here
1567 but are going to recreate them from scratch. */
1568 if (gimple_has_mem_ops (copy))
1569 {
1570 gimple_set_vdef (copy, NULL_TREE);
1571 gimple_set_vuse (copy, NULL_TREE);
1572 }
1573
1574 return copy;
1575 }
1576
1577
1578 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1579 later */
1580
1581 static basic_block
1582 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1583 gcov_type count_scale)
1584 {
1585 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1586 basic_block copy_basic_block;
1587 tree decl;
1588 gcov_type freq;
1589 basic_block prev;
1590
1591 /* Search for previous copied basic block. */
1592 prev = bb->prev_bb;
1593 while (!prev->aux)
1594 prev = prev->prev_bb;
1595
1596 /* create_basic_block() will append every new block to
1597 basic_block_info automatically. */
1598 copy_basic_block = create_basic_block (NULL, (void *) 0,
1599 (basic_block) prev->aux);
1600 copy_basic_block->count = apply_scale (bb->count, count_scale);
1601
1602 /* We are going to rebuild frequencies from scratch. These values
1603 have just small importance to drive canonicalize_loop_headers. */
1604 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1605
1606 /* We recompute frequencies after inlining, so this is quite safe. */
1607 if (freq > BB_FREQ_MAX)
1608 freq = BB_FREQ_MAX;
1609 copy_basic_block->frequency = freq;
1610
1611 copy_gsi = gsi_start_bb (copy_basic_block);
1612
1613 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1614 {
1615 gimple stmt = gsi_stmt (gsi);
1616 gimple orig_stmt = stmt;
1617
1618 id->regimplify = false;
1619 stmt = remap_gimple_stmt (stmt, id);
1620 if (gimple_nop_p (stmt))
1621 continue;
1622
1623 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1624 seq_gsi = copy_gsi;
1625
1626 /* With return slot optimization we can end up with
1627 non-gimple (foo *)&this->m, fix that here. */
1628 if (is_gimple_assign (stmt)
1629 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1630 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1631 {
1632 tree new_rhs;
1633 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1634 gimple_assign_rhs1 (stmt),
1635 true, NULL, false,
1636 GSI_CONTINUE_LINKING);
1637 gimple_assign_set_rhs1 (stmt, new_rhs);
1638 id->regimplify = false;
1639 }
1640
1641 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1642
1643 if (id->regimplify)
1644 gimple_regimplify_operands (stmt, &seq_gsi);
1645
1646 /* If copy_basic_block has been empty at the start of this iteration,
1647 call gsi_start_bb again to get at the newly added statements. */
1648 if (gsi_end_p (copy_gsi))
1649 copy_gsi = gsi_start_bb (copy_basic_block);
1650 else
1651 gsi_next (&copy_gsi);
1652
1653 /* Process the new statement. The call to gimple_regimplify_operands
1654 possibly turned the statement into multiple statements, we
1655 need to process all of them. */
1656 do
1657 {
1658 tree fn;
1659
1660 stmt = gsi_stmt (copy_gsi);
1661 if (is_gimple_call (stmt)
1662 && gimple_call_va_arg_pack_p (stmt)
1663 && id->gimple_call)
1664 {
1665 /* __builtin_va_arg_pack () should be replaced by
1666 all arguments corresponding to ... in the caller. */
1667 tree p;
1668 gimple new_call;
1669 vec<tree> argarray;
1670 size_t nargs = gimple_call_num_args (id->gimple_call);
1671 size_t n;
1672
1673 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1674 nargs--;
1675
1676 /* Create the new array of arguments. */
1677 n = nargs + gimple_call_num_args (stmt);
1678 argarray.create (n);
1679 argarray.safe_grow_cleared (n);
1680
1681 /* Copy all the arguments before '...' */
1682 memcpy (argarray.address (),
1683 gimple_call_arg_ptr (stmt, 0),
1684 gimple_call_num_args (stmt) * sizeof (tree));
1685
1686 /* Append the arguments passed in '...' */
1687 memcpy (argarray.address () + gimple_call_num_args (stmt),
1688 gimple_call_arg_ptr (id->gimple_call, 0)
1689 + (gimple_call_num_args (id->gimple_call) - nargs),
1690 nargs * sizeof (tree));
1691
1692 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1693 argarray);
1694
1695 argarray.release ();
1696
1697 /* Copy all GIMPLE_CALL flags, location and block, except
1698 GF_CALL_VA_ARG_PACK. */
1699 gimple_call_copy_flags (new_call, stmt);
1700 gimple_call_set_va_arg_pack (new_call, false);
1701 gimple_set_location (new_call, gimple_location (stmt));
1702 gimple_set_block (new_call, gimple_block (stmt));
1703 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1704
1705 gsi_replace (&copy_gsi, new_call, false);
1706 stmt = new_call;
1707 }
1708 else if (is_gimple_call (stmt)
1709 && id->gimple_call
1710 && (decl = gimple_call_fndecl (stmt))
1711 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1712 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1713 {
1714 /* __builtin_va_arg_pack_len () should be replaced by
1715 the number of anonymous arguments. */
1716 size_t nargs = gimple_call_num_args (id->gimple_call);
1717 tree count, p;
1718 gimple new_stmt;
1719
1720 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1721 nargs--;
1722
1723 count = build_int_cst (integer_type_node, nargs);
1724 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1725 gsi_replace (&copy_gsi, new_stmt, false);
1726 stmt = new_stmt;
1727 }
1728
1729 /* Statements produced by inlining can be unfolded, especially
1730 when we constant propagated some operands. We can't fold
1731 them right now for two reasons:
1732 1) folding require SSA_NAME_DEF_STMTs to be correct
1733 2) we can't change function calls to builtins.
1734 So we just mark statement for later folding. We mark
1735 all new statements, instead just statements that has changed
1736 by some nontrivial substitution so even statements made
1737 foldable indirectly are updated. If this turns out to be
1738 expensive, copy_body can be told to watch for nontrivial
1739 changes. */
1740 if (id->statements_to_fold)
1741 pointer_set_insert (id->statements_to_fold, stmt);
1742
1743 /* We're duplicating a CALL_EXPR. Find any corresponding
1744 callgraph edges and update or duplicate them. */
1745 if (is_gimple_call (stmt))
1746 {
1747 struct cgraph_edge *edge;
1748 int flags;
1749
1750 switch (id->transform_call_graph_edges)
1751 {
1752 case CB_CGE_DUPLICATE:
1753 edge = cgraph_edge (id->src_node, orig_stmt);
1754 if (edge)
1755 {
1756 int edge_freq = edge->frequency;
1757 int new_freq;
1758 struct cgraph_edge *old_edge = edge;
1759 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1760 gimple_uid (stmt),
1761 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1762 true);
1763 /* We could also just rescale the frequency, but
1764 doing so would introduce roundoff errors and make
1765 verifier unhappy. */
1766 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1767 copy_basic_block);
1768
1769 /* Speculative calls consist of two edges - direct and indirect.
1770 Duplicate the whole thing and distribute frequencies accordingly. */
1771 if (edge->speculative)
1772 {
1773 struct cgraph_edge *direct, *indirect;
1774 struct ipa_ref *ref;
1775
1776 gcc_assert (!edge->indirect_unknown_callee);
1777 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1778 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1779 gimple_uid (stmt),
1780 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1781 true);
1782 if (old_edge->frequency + indirect->frequency)
1783 {
1784 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1785 (old_edge->frequency + indirect->frequency)),
1786 CGRAPH_FREQ_MAX);
1787 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1788 (old_edge->frequency + indirect->frequency)),
1789 CGRAPH_FREQ_MAX);
1790 }
1791 ipa_clone_ref (ref, id->dst_node, stmt);
1792 }
1793 else
1794 {
1795 edge->frequency = new_freq;
1796 if (dump_file
1797 && profile_status_for_fn (cfun) != PROFILE_ABSENT
1798 && (edge_freq > edge->frequency + 10
1799 || edge_freq < edge->frequency - 10))
1800 {
1801 fprintf (dump_file, "Edge frequency estimated by "
1802 "cgraph %i diverge from inliner's estimate %i\n",
1803 edge_freq,
1804 edge->frequency);
1805 fprintf (dump_file,
1806 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1807 bb->index,
1808 bb->frequency,
1809 copy_basic_block->frequency);
1810 }
1811 }
1812 }
1813 break;
1814
1815 case CB_CGE_MOVE_CLONES:
1816 cgraph_set_call_stmt_including_clones (id->dst_node,
1817 orig_stmt, stmt);
1818 edge = cgraph_edge (id->dst_node, stmt);
1819 break;
1820
1821 case CB_CGE_MOVE:
1822 edge = cgraph_edge (id->dst_node, orig_stmt);
1823 if (edge)
1824 cgraph_set_call_stmt (edge, stmt);
1825 break;
1826
1827 default:
1828 gcc_unreachable ();
1829 }
1830
1831 /* Constant propagation on argument done during inlining
1832 may create new direct call. Produce an edge for it. */
1833 if ((!edge
1834 || (edge->indirect_inlining_edge
1835 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1836 && id->dst_node->definition
1837 && (fn = gimple_call_fndecl (stmt)) != NULL)
1838 {
1839 struct cgraph_node *dest = cgraph_get_node (fn);
1840
1841 /* We have missing edge in the callgraph. This can happen
1842 when previous inlining turned an indirect call into a
1843 direct call by constant propagating arguments or we are
1844 producing dead clone (for further cloning). In all
1845 other cases we hit a bug (incorrect node sharing is the
1846 most common reason for missing edges). */
1847 gcc_assert (!dest->definition
1848 || dest->address_taken
1849 || !id->src_node->definition
1850 || !id->dst_node->definition);
1851 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1852 cgraph_create_edge_including_clones
1853 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1854 compute_call_stmt_bb_frequency (id->dst_node->decl,
1855 copy_basic_block),
1856 CIF_ORIGINALLY_INDIRECT_CALL);
1857 else
1858 cgraph_create_edge (id->dst_node, dest, stmt,
1859 bb->count,
1860 compute_call_stmt_bb_frequency
1861 (id->dst_node->decl,
1862 copy_basic_block))->inline_failed
1863 = CIF_ORIGINALLY_INDIRECT_CALL;
1864 if (dump_file)
1865 {
1866 fprintf (dump_file, "Created new direct edge to %s\n",
1867 dest->name ());
1868 }
1869 }
1870
1871 flags = gimple_call_flags (stmt);
1872 if (flags & ECF_MAY_BE_ALLOCA)
1873 cfun->calls_alloca = true;
1874 if (flags & ECF_RETURNS_TWICE)
1875 cfun->calls_setjmp = true;
1876 }
1877
1878 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1879 id->eh_map, id->eh_lp_nr);
1880
1881 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1882 {
1883 ssa_op_iter i;
1884 tree def;
1885
1886 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1887 if (TREE_CODE (def) == SSA_NAME)
1888 SSA_NAME_DEF_STMT (def) = stmt;
1889 }
1890
1891 gsi_next (&copy_gsi);
1892 }
1893 while (!gsi_end_p (copy_gsi));
1894
1895 copy_gsi = gsi_last_bb (copy_basic_block);
1896 }
1897
1898 return copy_basic_block;
1899 }
1900
1901 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1902 form is quite easy, since dominator relationship for old basic blocks does
1903 not change.
1904
1905 There is however exception where inlining might change dominator relation
1906 across EH edges from basic block within inlined functions destinating
1907 to landing pads in function we inline into.
1908
1909 The function fills in PHI_RESULTs of such PHI nodes if they refer
1910 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1911 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1912 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1913 set, and this means that there will be no overlapping live ranges
1914 for the underlying symbol.
1915
1916 This might change in future if we allow redirecting of EH edges and
1917 we might want to change way build CFG pre-inlining to include
1918 all the possible edges then. */
1919 static void
1920 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1921 bool can_throw, bool nonlocal_goto)
1922 {
1923 edge e;
1924 edge_iterator ei;
1925
1926 FOR_EACH_EDGE (e, ei, bb->succs)
1927 if (!e->dest->aux
1928 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1929 {
1930 gimple phi;
1931 gimple_stmt_iterator si;
1932
1933 if (!nonlocal_goto)
1934 gcc_assert (e->flags & EDGE_EH);
1935
1936 if (!can_throw)
1937 gcc_assert (!(e->flags & EDGE_EH));
1938
1939 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1940 {
1941 edge re;
1942
1943 phi = gsi_stmt (si);
1944
1945 /* For abnormal goto/call edges the receiver can be the
1946 ENTRY_BLOCK. Do not assert this cannot happen. */
1947
1948 gcc_assert ((e->flags & EDGE_EH)
1949 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1950
1951 re = find_edge (ret_bb, e->dest);
1952 gcc_checking_assert (re);
1953 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1954 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1955
1956 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1957 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1958 }
1959 }
1960 }
1961
1962
1963 /* Copy edges from BB into its copy constructed earlier, scale profile
1964 accordingly. Edges will be taken care of later. Assume aux
1965 pointers to point to the copies of each BB. Return true if any
1966 debug stmts are left after a statement that must end the basic block. */
1967
1968 static bool
1969 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1970 basic_block abnormal_goto_dest)
1971 {
1972 basic_block new_bb = (basic_block) bb->aux;
1973 edge_iterator ei;
1974 edge old_edge;
1975 gimple_stmt_iterator si;
1976 int flags;
1977 bool need_debug_cleanup = false;
1978
1979 /* Use the indices from the original blocks to create edges for the
1980 new ones. */
1981 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1982 if (!(old_edge->flags & EDGE_EH))
1983 {
1984 edge new_edge;
1985
1986 flags = old_edge->flags;
1987
1988 /* Return edges do get a FALLTHRU flag when the get inlined. */
1989 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1990 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
1991 flags |= EDGE_FALLTHRU;
1992 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1993 new_edge->count = apply_scale (old_edge->count, count_scale);
1994 new_edge->probability = old_edge->probability;
1995 }
1996
1997 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1998 return false;
1999
2000 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2001 {
2002 gimple copy_stmt;
2003 bool can_throw, nonlocal_goto;
2004
2005 copy_stmt = gsi_stmt (si);
2006 if (!is_gimple_debug (copy_stmt))
2007 update_stmt (copy_stmt);
2008
2009 /* Do this before the possible split_block. */
2010 gsi_next (&si);
2011
2012 /* If this tree could throw an exception, there are two
2013 cases where we need to add abnormal edge(s): the
2014 tree wasn't in a region and there is a "current
2015 region" in the caller; or the original tree had
2016 EH edges. In both cases split the block after the tree,
2017 and add abnormal edge(s) as needed; we need both
2018 those from the callee and the caller.
2019 We check whether the copy can throw, because the const
2020 propagation can change an INDIRECT_REF which throws
2021 into a COMPONENT_REF which doesn't. If the copy
2022 can throw, the original could also throw. */
2023 can_throw = stmt_can_throw_internal (copy_stmt);
2024 nonlocal_goto
2025 = (stmt_can_make_abnormal_goto (copy_stmt)
2026 && !computed_goto_p (copy_stmt));
2027
2028 if (can_throw || nonlocal_goto)
2029 {
2030 if (!gsi_end_p (si))
2031 {
2032 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2033 gsi_next (&si);
2034 if (gsi_end_p (si))
2035 need_debug_cleanup = true;
2036 }
2037 if (!gsi_end_p (si))
2038 /* Note that bb's predecessor edges aren't necessarily
2039 right at this point; split_block doesn't care. */
2040 {
2041 edge e = split_block (new_bb, copy_stmt);
2042
2043 new_bb = e->dest;
2044 new_bb->aux = e->src->aux;
2045 si = gsi_start_bb (new_bb);
2046 }
2047 }
2048
2049 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2050 make_eh_dispatch_edges (copy_stmt);
2051 else if (can_throw)
2052 make_eh_edges (copy_stmt);
2053
2054 /* If the call we inline cannot make abnormal goto do not add
2055 additional abnormal edges but only retain those already present
2056 in the original function body. */
2057 if (abnormal_goto_dest == NULL)
2058 nonlocal_goto = false;
2059 if (nonlocal_goto)
2060 {
2061 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2062
2063 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2064 nonlocal_goto = false;
2065 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2066 in OpenMP regions which aren't allowed to be left abnormally.
2067 So, no need to add abnormal edge in that case. */
2068 else if (is_gimple_call (copy_stmt)
2069 && gimple_call_internal_p (copy_stmt)
2070 && (gimple_call_internal_fn (copy_stmt)
2071 == IFN_ABNORMAL_DISPATCHER)
2072 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2073 nonlocal_goto = false;
2074 else
2075 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2076 }
2077
2078 if ((can_throw || nonlocal_goto)
2079 && gimple_in_ssa_p (cfun))
2080 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2081 can_throw, nonlocal_goto);
2082 }
2083 return need_debug_cleanup;
2084 }
2085
2086 /* Copy the PHIs. All blocks and edges are copied, some blocks
2087 was possibly split and new outgoing EH edges inserted.
2088 BB points to the block of original function and AUX pointers links
2089 the original and newly copied blocks. */
2090
2091 static void
2092 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2093 {
2094 basic_block const new_bb = (basic_block) bb->aux;
2095 edge_iterator ei;
2096 gimple phi;
2097 gimple_stmt_iterator si;
2098 edge new_edge;
2099 bool inserted = false;
2100
2101 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2102 {
2103 tree res, new_res;
2104 gimple new_phi;
2105
2106 phi = gsi_stmt (si);
2107 res = PHI_RESULT (phi);
2108 new_res = res;
2109 if (!virtual_operand_p (res))
2110 {
2111 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2112 new_phi = create_phi_node (new_res, new_bb);
2113 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2114 {
2115 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2116 tree arg;
2117 tree new_arg;
2118 edge_iterator ei2;
2119 location_t locus;
2120
2121 /* When doing partial cloning, we allow PHIs on the entry block
2122 as long as all the arguments are the same. Find any input
2123 edge to see argument to copy. */
2124 if (!old_edge)
2125 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2126 if (!old_edge->src->aux)
2127 break;
2128
2129 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2130 new_arg = arg;
2131 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2132 gcc_assert (new_arg);
2133 /* With return slot optimization we can end up with
2134 non-gimple (foo *)&this->m, fix that here. */
2135 if (TREE_CODE (new_arg) != SSA_NAME
2136 && TREE_CODE (new_arg) != FUNCTION_DECL
2137 && !is_gimple_val (new_arg))
2138 {
2139 gimple_seq stmts = NULL;
2140 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2141 gsi_insert_seq_on_edge (new_edge, stmts);
2142 inserted = true;
2143 }
2144 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2145 if (LOCATION_BLOCK (locus))
2146 {
2147 tree *n;
2148 n = (tree *) pointer_map_contains (id->decl_map,
2149 LOCATION_BLOCK (locus));
2150 gcc_assert (n);
2151 if (*n)
2152 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2153 else
2154 locus = LOCATION_LOCUS (locus);
2155 }
2156 else
2157 locus = LOCATION_LOCUS (locus);
2158
2159 add_phi_arg (new_phi, new_arg, new_edge, locus);
2160 }
2161 }
2162 }
2163
2164 /* Commit the delayed edge insertions. */
2165 if (inserted)
2166 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2167 gsi_commit_one_edge_insert (new_edge, NULL);
2168 }
2169
2170
2171 /* Wrapper for remap_decl so it can be used as a callback. */
2172
2173 static tree
2174 remap_decl_1 (tree decl, void *data)
2175 {
2176 return remap_decl (decl, (copy_body_data *) data);
2177 }
2178
2179 /* Build struct function and associated datastructures for the new clone
2180 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2181 the cfun to the function of new_fndecl (and current_function_decl too). */
2182
2183 static void
2184 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2185 {
2186 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2187 gcov_type count_scale;
2188
2189 if (!DECL_ARGUMENTS (new_fndecl))
2190 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2191 if (!DECL_RESULT (new_fndecl))
2192 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2193
2194 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2195 count_scale
2196 = GCOV_COMPUTE_SCALE (count,
2197 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2198 else
2199 count_scale = REG_BR_PROB_BASE;
2200
2201 /* Register specific tree functions. */
2202 gimple_register_cfg_hooks ();
2203
2204 /* Get clean struct function. */
2205 push_struct_function (new_fndecl);
2206
2207 /* We will rebuild these, so just sanity check that they are empty. */
2208 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2209 gcc_assert (cfun->local_decls == NULL);
2210 gcc_assert (cfun->cfg == NULL);
2211 gcc_assert (cfun->decl == new_fndecl);
2212
2213 /* Copy items we preserve during cloning. */
2214 cfun->static_chain_decl = src_cfun->static_chain_decl;
2215 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2216 cfun->function_end_locus = src_cfun->function_end_locus;
2217 cfun->curr_properties = src_cfun->curr_properties;
2218 cfun->last_verified = src_cfun->last_verified;
2219 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2220 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2221 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2222 cfun->stdarg = src_cfun->stdarg;
2223 cfun->after_inlining = src_cfun->after_inlining;
2224 cfun->can_throw_non_call_exceptions
2225 = src_cfun->can_throw_non_call_exceptions;
2226 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2227 cfun->returns_struct = src_cfun->returns_struct;
2228 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2229
2230 init_empty_tree_cfg ();
2231
2232 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2233 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2234 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2235 REG_BR_PROB_BASE);
2236 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2237 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2238 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2239 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2240 REG_BR_PROB_BASE);
2241 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2242 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2243 if (src_cfun->eh)
2244 init_eh_for_function ();
2245
2246 if (src_cfun->gimple_df)
2247 {
2248 init_tree_ssa (cfun);
2249 cfun->gimple_df->in_ssa_p = true;
2250 init_ssa_operands (cfun);
2251 }
2252 }
2253
2254 /* Helper function for copy_cfg_body. Move debug stmts from the end
2255 of NEW_BB to the beginning of successor basic blocks when needed. If the
2256 successor has multiple predecessors, reset them, otherwise keep
2257 their value. */
2258
2259 static void
2260 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2261 {
2262 edge e;
2263 edge_iterator ei;
2264 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2265
2266 if (gsi_end_p (si)
2267 || gsi_one_before_end_p (si)
2268 || !(stmt_can_throw_internal (gsi_stmt (si))
2269 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2270 return;
2271
2272 FOR_EACH_EDGE (e, ei, new_bb->succs)
2273 {
2274 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2275 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2276 while (is_gimple_debug (gsi_stmt (ssi)))
2277 {
2278 gimple stmt = gsi_stmt (ssi), new_stmt;
2279 tree var;
2280 tree value;
2281
2282 /* For the last edge move the debug stmts instead of copying
2283 them. */
2284 if (ei_one_before_end_p (ei))
2285 {
2286 si = ssi;
2287 gsi_prev (&ssi);
2288 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2289 gimple_debug_bind_reset_value (stmt);
2290 gsi_remove (&si, false);
2291 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2292 continue;
2293 }
2294
2295 if (gimple_debug_bind_p (stmt))
2296 {
2297 var = gimple_debug_bind_get_var (stmt);
2298 if (single_pred_p (e->dest))
2299 {
2300 value = gimple_debug_bind_get_value (stmt);
2301 value = unshare_expr (value);
2302 }
2303 else
2304 value = NULL_TREE;
2305 new_stmt = gimple_build_debug_bind (var, value, stmt);
2306 }
2307 else if (gimple_debug_source_bind_p (stmt))
2308 {
2309 var = gimple_debug_source_bind_get_var (stmt);
2310 value = gimple_debug_source_bind_get_value (stmt);
2311 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2312 }
2313 else
2314 gcc_unreachable ();
2315 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2316 id->debug_stmts.safe_push (new_stmt);
2317 gsi_prev (&ssi);
2318 }
2319 }
2320 }
2321
2322 /* Make a copy of the sub-loops of SRC_PARENT and place them
2323 as siblings of DEST_PARENT. */
2324
2325 static void
2326 copy_loops (copy_body_data *id,
2327 struct loop *dest_parent, struct loop *src_parent)
2328 {
2329 struct loop *src_loop = src_parent->inner;
2330 while (src_loop)
2331 {
2332 if (!id->blocks_to_copy
2333 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2334 {
2335 struct loop *dest_loop = alloc_loop ();
2336
2337 /* Assign the new loop its header and latch and associate
2338 those with the new loop. */
2339 if (src_loop->header != NULL)
2340 {
2341 dest_loop->header = (basic_block)src_loop->header->aux;
2342 dest_loop->header->loop_father = dest_loop;
2343 }
2344 if (src_loop->latch != NULL)
2345 {
2346 dest_loop->latch = (basic_block)src_loop->latch->aux;
2347 dest_loop->latch->loop_father = dest_loop;
2348 }
2349
2350 /* Copy loop meta-data. */
2351 copy_loop_info (src_loop, dest_loop);
2352
2353 /* Finally place it into the loop array and the loop tree. */
2354 place_new_loop (cfun, dest_loop);
2355 flow_loop_tree_node_add (dest_parent, dest_loop);
2356
2357 if (src_loop->simduid)
2358 {
2359 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2360 cfun->has_simduid_loops = true;
2361 }
2362 if (src_loop->force_vect)
2363 {
2364 dest_loop->force_vect = true;
2365 cfun->has_force_vect_loops = true;
2366 }
2367
2368 /* Recurse. */
2369 copy_loops (id, dest_loop, src_loop);
2370 }
2371 src_loop = src_loop->next;
2372 }
2373 }
2374
2375 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2376
2377 void
2378 redirect_all_calls (copy_body_data * id, basic_block bb)
2379 {
2380 gimple_stmt_iterator si;
2381 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2382 {
2383 if (is_gimple_call (gsi_stmt (si)))
2384 {
2385 struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
2386 if (edge)
2387 cgraph_redirect_edge_call_stmt_to_callee (edge);
2388 }
2389 }
2390 }
2391
2392 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2393 with each bb's frequency. Used when NODE has a 0-weight entry
2394 but we are about to inline it into a non-zero count call bb.
2395 See the comments for handle_missing_profiles() in predict.c for
2396 when this can happen for COMDATs. */
2397
2398 void
2399 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2400 {
2401 basic_block bb;
2402 edge_iterator ei;
2403 edge e;
2404 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2405
2406 FOR_ALL_BB_FN(bb, fn)
2407 {
2408 bb->count = apply_scale (count,
2409 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2410 FOR_EACH_EDGE (e, ei, bb->succs)
2411 e->count = apply_probability (e->src->count, e->probability);
2412 }
2413 }
2414
2415 /* Make a copy of the body of FN so that it can be inserted inline in
2416 another function. Walks FN via CFG, returns new fndecl. */
2417
2418 static tree
2419 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2420 basic_block entry_block_map, basic_block exit_block_map,
2421 basic_block new_entry)
2422 {
2423 tree callee_fndecl = id->src_fn;
2424 /* Original cfun for the callee, doesn't change. */
2425 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2426 struct function *cfun_to_copy;
2427 basic_block bb;
2428 tree new_fndecl = NULL;
2429 bool need_debug_cleanup = false;
2430 gcov_type count_scale;
2431 int last;
2432 int incoming_frequency = 0;
2433 gcov_type incoming_count = 0;
2434
2435 /* This can happen for COMDAT routines that end up with 0 counts
2436 despite being called (see the comments for handle_missing_profiles()
2437 in predict.c as to why). Apply counts to the blocks in the callee
2438 before inlining, using the guessed edge frequencies, so that we don't
2439 end up with a 0-count inline body which can confuse downstream
2440 optimizations such as function splitting. */
2441 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2442 {
2443 /* Apply the larger of the call bb count and the total incoming
2444 call edge count to the callee. */
2445 gcov_type in_count = 0;
2446 struct cgraph_edge *in_edge;
2447 for (in_edge = id->src_node->callers; in_edge;
2448 in_edge = in_edge->next_caller)
2449 in_count += in_edge->count;
2450 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2451 }
2452
2453 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2454 count_scale
2455 = GCOV_COMPUTE_SCALE (count,
2456 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2457 else
2458 count_scale = REG_BR_PROB_BASE;
2459
2460 /* Register specific tree functions. */
2461 gimple_register_cfg_hooks ();
2462
2463 /* If we are inlining just region of the function, make sure to connect
2464 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2465 part of loop, we must compute frequency and probability of
2466 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2467 probabilities of edges incoming from nonduplicated region. */
2468 if (new_entry)
2469 {
2470 edge e;
2471 edge_iterator ei;
2472
2473 FOR_EACH_EDGE (e, ei, new_entry->preds)
2474 if (!e->src->aux)
2475 {
2476 incoming_frequency += EDGE_FREQUENCY (e);
2477 incoming_count += e->count;
2478 }
2479 incoming_count = apply_scale (incoming_count, count_scale);
2480 incoming_frequency
2481 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2482 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2483 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2484 }
2485
2486 /* Must have a CFG here at this point. */
2487 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2488 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2489
2490 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2491
2492 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2493 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2494 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2495 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2496
2497 /* Duplicate any exception-handling regions. */
2498 if (cfun->eh)
2499 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2500 remap_decl_1, id);
2501
2502 /* Use aux pointers to map the original blocks to copy. */
2503 FOR_EACH_BB_FN (bb, cfun_to_copy)
2504 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2505 {
2506 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2507 bb->aux = new_bb;
2508 new_bb->aux = bb;
2509 new_bb->loop_father = entry_block_map->loop_father;
2510 }
2511
2512 last = last_basic_block_for_fn (cfun);
2513
2514 /* Now that we've duplicated the blocks, duplicate their edges. */
2515 basic_block abnormal_goto_dest = NULL;
2516 if (id->gimple_call
2517 && stmt_can_make_abnormal_goto (id->gimple_call))
2518 {
2519 gimple_stmt_iterator gsi = gsi_for_stmt (id->gimple_call);
2520
2521 bb = gimple_bb (id->gimple_call);
2522 gsi_next (&gsi);
2523 if (gsi_end_p (gsi))
2524 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2525 }
2526 FOR_ALL_BB_FN (bb, cfun_to_copy)
2527 if (!id->blocks_to_copy
2528 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2529 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2530 abnormal_goto_dest);
2531
2532 if (new_entry)
2533 {
2534 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2535 e->probability = REG_BR_PROB_BASE;
2536 e->count = incoming_count;
2537 }
2538
2539 /* Duplicate the loop tree, if available and wanted. */
2540 if (loops_for_fn (src_cfun) != NULL
2541 && current_loops != NULL)
2542 {
2543 copy_loops (id, entry_block_map->loop_father,
2544 get_loop (src_cfun, 0));
2545 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2546 loops_state_set (LOOPS_NEED_FIXUP);
2547 }
2548
2549 /* If the loop tree in the source function needed fixup, mark the
2550 destination loop tree for fixup, too. */
2551 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2552 loops_state_set (LOOPS_NEED_FIXUP);
2553
2554 if (gimple_in_ssa_p (cfun))
2555 FOR_ALL_BB_FN (bb, cfun_to_copy)
2556 if (!id->blocks_to_copy
2557 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2558 copy_phis_for_bb (bb, id);
2559
2560 FOR_ALL_BB_FN (bb, cfun_to_copy)
2561 if (bb->aux)
2562 {
2563 if (need_debug_cleanup
2564 && bb->index != ENTRY_BLOCK
2565 && bb->index != EXIT_BLOCK)
2566 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2567 /* Update call edge destinations. This can not be done before loop
2568 info is updated, because we may split basic blocks. */
2569 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2570 redirect_all_calls (id, (basic_block)bb->aux);
2571 ((basic_block)bb->aux)->aux = NULL;
2572 bb->aux = NULL;
2573 }
2574
2575 /* Zero out AUX fields of newly created block during EH edge
2576 insertion. */
2577 for (; last < last_basic_block_for_fn (cfun); last++)
2578 {
2579 if (need_debug_cleanup)
2580 maybe_move_debug_stmts_to_successors (id,
2581 BASIC_BLOCK_FOR_FN (cfun, last));
2582 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2583 /* Update call edge destinations. This can not be done before loop
2584 info is updated, because we may split basic blocks. */
2585 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2586 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2587 }
2588 entry_block_map->aux = NULL;
2589 exit_block_map->aux = NULL;
2590
2591 if (id->eh_map)
2592 {
2593 pointer_map_destroy (id->eh_map);
2594 id->eh_map = NULL;
2595 }
2596
2597 return new_fndecl;
2598 }
2599
2600 /* Copy the debug STMT using ID. We deal with these statements in a
2601 special way: if any variable in their VALUE expression wasn't
2602 remapped yet, we won't remap it, because that would get decl uids
2603 out of sync, causing codegen differences between -g and -g0. If
2604 this arises, we drop the VALUE expression altogether. */
2605
2606 static void
2607 copy_debug_stmt (gimple stmt, copy_body_data *id)
2608 {
2609 tree t, *n;
2610 struct walk_stmt_info wi;
2611
2612 if (gimple_block (stmt))
2613 {
2614 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2615 gimple_set_block (stmt, n ? *n : id->block);
2616 }
2617
2618 /* Remap all the operands in COPY. */
2619 memset (&wi, 0, sizeof (wi));
2620 wi.info = id;
2621
2622 processing_debug_stmt = 1;
2623
2624 if (gimple_debug_source_bind_p (stmt))
2625 t = gimple_debug_source_bind_get_var (stmt);
2626 else
2627 t = gimple_debug_bind_get_var (stmt);
2628
2629 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2630 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2631 {
2632 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2633 t = *n;
2634 }
2635 else if (TREE_CODE (t) == VAR_DECL
2636 && !is_global_var (t)
2637 && !pointer_map_contains (id->decl_map, t))
2638 /* T is a non-localized variable. */;
2639 else
2640 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2641
2642 if (gimple_debug_bind_p (stmt))
2643 {
2644 gimple_debug_bind_set_var (stmt, t);
2645
2646 if (gimple_debug_bind_has_value_p (stmt))
2647 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2648 remap_gimple_op_r, &wi, NULL);
2649
2650 /* Punt if any decl couldn't be remapped. */
2651 if (processing_debug_stmt < 0)
2652 gimple_debug_bind_reset_value (stmt);
2653 }
2654 else if (gimple_debug_source_bind_p (stmt))
2655 {
2656 gimple_debug_source_bind_set_var (stmt, t);
2657 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2658 remap_gimple_op_r, &wi, NULL);
2659 /* When inlining and source bind refers to one of the optimized
2660 away parameters, change the source bind into normal debug bind
2661 referring to the corresponding DEBUG_EXPR_DECL that should have
2662 been bound before the call stmt. */
2663 t = gimple_debug_source_bind_get_value (stmt);
2664 if (t != NULL_TREE
2665 && TREE_CODE (t) == PARM_DECL
2666 && id->gimple_call)
2667 {
2668 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2669 unsigned int i;
2670 if (debug_args != NULL)
2671 {
2672 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2673 if ((**debug_args)[i] == DECL_ORIGIN (t)
2674 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2675 {
2676 t = (**debug_args)[i + 1];
2677 stmt->subcode = GIMPLE_DEBUG_BIND;
2678 gimple_debug_bind_set_value (stmt, t);
2679 break;
2680 }
2681 }
2682 }
2683 }
2684
2685 processing_debug_stmt = 0;
2686
2687 update_stmt (stmt);
2688 }
2689
2690 /* Process deferred debug stmts. In order to give values better odds
2691 of being successfully remapped, we delay the processing of debug
2692 stmts until all other stmts that might require remapping are
2693 processed. */
2694
2695 static void
2696 copy_debug_stmts (copy_body_data *id)
2697 {
2698 size_t i;
2699 gimple stmt;
2700
2701 if (!id->debug_stmts.exists ())
2702 return;
2703
2704 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2705 copy_debug_stmt (stmt, id);
2706
2707 id->debug_stmts.release ();
2708 }
2709
2710 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2711 another function. */
2712
2713 static tree
2714 copy_tree_body (copy_body_data *id)
2715 {
2716 tree fndecl = id->src_fn;
2717 tree body = DECL_SAVED_TREE (fndecl);
2718
2719 walk_tree (&body, copy_tree_body_r, id, NULL);
2720
2721 return body;
2722 }
2723
2724 /* Make a copy of the body of FN so that it can be inserted inline in
2725 another function. */
2726
2727 static tree
2728 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2729 basic_block entry_block_map, basic_block exit_block_map,
2730 basic_block new_entry)
2731 {
2732 tree fndecl = id->src_fn;
2733 tree body;
2734
2735 /* If this body has a CFG, walk CFG and copy. */
2736 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2737 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2738 new_entry);
2739 copy_debug_stmts (id);
2740
2741 return body;
2742 }
2743
2744 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2745 defined in function FN, or of a data member thereof. */
2746
2747 static bool
2748 self_inlining_addr_expr (tree value, tree fn)
2749 {
2750 tree var;
2751
2752 if (TREE_CODE (value) != ADDR_EXPR)
2753 return false;
2754
2755 var = get_base_address (TREE_OPERAND (value, 0));
2756
2757 return var && auto_var_in_fn_p (var, fn);
2758 }
2759
2760 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2761 lexical block and line number information from base_stmt, if given,
2762 or from the last stmt of the block otherwise. */
2763
2764 static gimple
2765 insert_init_debug_bind (copy_body_data *id,
2766 basic_block bb, tree var, tree value,
2767 gimple base_stmt)
2768 {
2769 gimple note;
2770 gimple_stmt_iterator gsi;
2771 tree tracked_var;
2772
2773 if (!gimple_in_ssa_p (id->src_cfun))
2774 return NULL;
2775
2776 if (!MAY_HAVE_DEBUG_STMTS)
2777 return NULL;
2778
2779 tracked_var = target_for_debug_bind (var);
2780 if (!tracked_var)
2781 return NULL;
2782
2783 if (bb)
2784 {
2785 gsi = gsi_last_bb (bb);
2786 if (!base_stmt && !gsi_end_p (gsi))
2787 base_stmt = gsi_stmt (gsi);
2788 }
2789
2790 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2791
2792 if (bb)
2793 {
2794 if (!gsi_end_p (gsi))
2795 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2796 else
2797 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2798 }
2799
2800 return note;
2801 }
2802
2803 static void
2804 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2805 {
2806 /* If VAR represents a zero-sized variable, it's possible that the
2807 assignment statement may result in no gimple statements. */
2808 if (init_stmt)
2809 {
2810 gimple_stmt_iterator si = gsi_last_bb (bb);
2811
2812 /* We can end up with init statements that store to a non-register
2813 from a rhs with a conversion. Handle that here by forcing the
2814 rhs into a temporary. gimple_regimplify_operands is not
2815 prepared to do this for us. */
2816 if (!is_gimple_debug (init_stmt)
2817 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2818 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2819 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2820 {
2821 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2822 gimple_expr_type (init_stmt),
2823 gimple_assign_rhs1 (init_stmt));
2824 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2825 GSI_NEW_STMT);
2826 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2827 gimple_assign_set_rhs1 (init_stmt, rhs);
2828 }
2829 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2830 gimple_regimplify_operands (init_stmt, &si);
2831
2832 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2833 {
2834 tree def = gimple_assign_lhs (init_stmt);
2835 insert_init_debug_bind (id, bb, def, def, init_stmt);
2836 }
2837 }
2838 }
2839
2840 /* Initialize parameter P with VALUE. If needed, produce init statement
2841 at the end of BB. When BB is NULL, we return init statement to be
2842 output later. */
2843 static gimple
2844 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2845 basic_block bb, tree *vars)
2846 {
2847 gimple init_stmt = NULL;
2848 tree var;
2849 tree rhs = value;
2850 tree def = (gimple_in_ssa_p (cfun)
2851 ? ssa_default_def (id->src_cfun, p) : NULL);
2852
2853 if (value
2854 && value != error_mark_node
2855 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2856 {
2857 /* If we can match up types by promotion/demotion do so. */
2858 if (fold_convertible_p (TREE_TYPE (p), value))
2859 rhs = fold_convert (TREE_TYPE (p), value);
2860 else
2861 {
2862 /* ??? For valid programs we should not end up here.
2863 Still if we end up with truly mismatched types here, fall back
2864 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2865 GIMPLE to the following passes. */
2866 if (!is_gimple_reg_type (TREE_TYPE (value))
2867 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2868 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2869 else
2870 rhs = build_zero_cst (TREE_TYPE (p));
2871 }
2872 }
2873
2874 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2875 here since the type of this decl must be visible to the calling
2876 function. */
2877 var = copy_decl_to_var (p, id);
2878
2879 /* Declare this new variable. */
2880 DECL_CHAIN (var) = *vars;
2881 *vars = var;
2882
2883 /* Make gimplifier happy about this variable. */
2884 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2885
2886 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2887 we would not need to create a new variable here at all, if it
2888 weren't for debug info. Still, we can just use the argument
2889 value. */
2890 if (TREE_READONLY (p)
2891 && !TREE_ADDRESSABLE (p)
2892 && value && !TREE_SIDE_EFFECTS (value)
2893 && !def)
2894 {
2895 /* We may produce non-gimple trees by adding NOPs or introduce
2896 invalid sharing when operand is not really constant.
2897 It is not big deal to prohibit constant propagation here as
2898 we will constant propagate in DOM1 pass anyway. */
2899 if (is_gimple_min_invariant (value)
2900 && useless_type_conversion_p (TREE_TYPE (p),
2901 TREE_TYPE (value))
2902 /* We have to be very careful about ADDR_EXPR. Make sure
2903 the base variable isn't a local variable of the inlined
2904 function, e.g., when doing recursive inlining, direct or
2905 mutually-recursive or whatever, which is why we don't
2906 just test whether fn == current_function_decl. */
2907 && ! self_inlining_addr_expr (value, fn))
2908 {
2909 insert_decl_map (id, p, value);
2910 insert_debug_decl_map (id, p, var);
2911 return insert_init_debug_bind (id, bb, var, value, NULL);
2912 }
2913 }
2914
2915 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2916 that way, when the PARM_DECL is encountered, it will be
2917 automatically replaced by the VAR_DECL. */
2918 insert_decl_map (id, p, var);
2919
2920 /* Even if P was TREE_READONLY, the new VAR should not be.
2921 In the original code, we would have constructed a
2922 temporary, and then the function body would have never
2923 changed the value of P. However, now, we will be
2924 constructing VAR directly. The constructor body may
2925 change its value multiple times as it is being
2926 constructed. Therefore, it must not be TREE_READONLY;
2927 the back-end assumes that TREE_READONLY variable is
2928 assigned to only once. */
2929 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2930 TREE_READONLY (var) = 0;
2931
2932 /* If there is no setup required and we are in SSA, take the easy route
2933 replacing all SSA names representing the function parameter by the
2934 SSA name passed to function.
2935
2936 We need to construct map for the variable anyway as it might be used
2937 in different SSA names when parameter is set in function.
2938
2939 Do replacement at -O0 for const arguments replaced by constant.
2940 This is important for builtin_constant_p and other construct requiring
2941 constant argument to be visible in inlined function body. */
2942 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2943 && (optimize
2944 || (TREE_READONLY (p)
2945 && is_gimple_min_invariant (rhs)))
2946 && (TREE_CODE (rhs) == SSA_NAME
2947 || is_gimple_min_invariant (rhs))
2948 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2949 {
2950 insert_decl_map (id, def, rhs);
2951 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2952 }
2953
2954 /* If the value of argument is never used, don't care about initializing
2955 it. */
2956 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2957 {
2958 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2959 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2960 }
2961
2962 /* Initialize this VAR_DECL from the equivalent argument. Convert
2963 the argument to the proper type in case it was promoted. */
2964 if (value)
2965 {
2966 if (rhs == error_mark_node)
2967 {
2968 insert_decl_map (id, p, var);
2969 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2970 }
2971
2972 STRIP_USELESS_TYPE_CONVERSION (rhs);
2973
2974 /* If we are in SSA form properly remap the default definition
2975 or assign to a dummy SSA name if the parameter is unused and
2976 we are not optimizing. */
2977 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2978 {
2979 if (def)
2980 {
2981 def = remap_ssa_name (def, id);
2982 init_stmt = gimple_build_assign (def, rhs);
2983 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2984 set_ssa_default_def (cfun, var, NULL);
2985 }
2986 else if (!optimize)
2987 {
2988 def = make_ssa_name (var, NULL);
2989 init_stmt = gimple_build_assign (def, rhs);
2990 }
2991 }
2992 else
2993 init_stmt = gimple_build_assign (var, rhs);
2994
2995 if (bb && init_stmt)
2996 insert_init_stmt (id, bb, init_stmt);
2997 }
2998 return init_stmt;
2999 }
3000
3001 /* Generate code to initialize the parameters of the function at the
3002 top of the stack in ID from the GIMPLE_CALL STMT. */
3003
3004 static void
3005 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3006 tree fn, basic_block bb)
3007 {
3008 tree parms;
3009 size_t i;
3010 tree p;
3011 tree vars = NULL_TREE;
3012 tree static_chain = gimple_call_chain (stmt);
3013
3014 /* Figure out what the parameters are. */
3015 parms = DECL_ARGUMENTS (fn);
3016
3017 /* Loop through the parameter declarations, replacing each with an
3018 equivalent VAR_DECL, appropriately initialized. */
3019 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3020 {
3021 tree val;
3022 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3023 setup_one_parameter (id, p, val, fn, bb, &vars);
3024 }
3025 /* After remapping parameters remap their types. This has to be done
3026 in a second loop over all parameters to appropriately remap
3027 variable sized arrays when the size is specified in a
3028 parameter following the array. */
3029 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3030 {
3031 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
3032 if (varp
3033 && TREE_CODE (*varp) == VAR_DECL)
3034 {
3035 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3036 ? ssa_default_def (id->src_cfun, p) : NULL);
3037 tree var = *varp;
3038 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3039 /* Also remap the default definition if it was remapped
3040 to the default definition of the parameter replacement
3041 by the parameter setup. */
3042 if (def)
3043 {
3044 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
3045 if (defp
3046 && TREE_CODE (*defp) == SSA_NAME
3047 && SSA_NAME_VAR (*defp) == var)
3048 TREE_TYPE (*defp) = TREE_TYPE (var);
3049 }
3050 }
3051 }
3052
3053 /* Initialize the static chain. */
3054 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3055 gcc_assert (fn != current_function_decl);
3056 if (p)
3057 {
3058 /* No static chain? Seems like a bug in tree-nested.c. */
3059 gcc_assert (static_chain);
3060
3061 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3062 }
3063
3064 declare_inline_vars (id->block, vars);
3065 }
3066
3067
3068 /* Declare a return variable to replace the RESULT_DECL for the
3069 function we are calling. An appropriate DECL_STMT is returned.
3070 The USE_STMT is filled to contain a use of the declaration to
3071 indicate the return value of the function.
3072
3073 RETURN_SLOT, if non-null is place where to store the result. It
3074 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3075 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3076
3077 The return value is a (possibly null) value that holds the result
3078 as seen by the caller. */
3079
3080 static tree
3081 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3082 basic_block entry_bb)
3083 {
3084 tree callee = id->src_fn;
3085 tree result = DECL_RESULT (callee);
3086 tree callee_type = TREE_TYPE (result);
3087 tree caller_type;
3088 tree var, use;
3089
3090 /* Handle type-mismatches in the function declaration return type
3091 vs. the call expression. */
3092 if (modify_dest)
3093 caller_type = TREE_TYPE (modify_dest);
3094 else
3095 caller_type = TREE_TYPE (TREE_TYPE (callee));
3096
3097 /* We don't need to do anything for functions that don't return anything. */
3098 if (VOID_TYPE_P (callee_type))
3099 return NULL_TREE;
3100
3101 /* If there was a return slot, then the return value is the
3102 dereferenced address of that object. */
3103 if (return_slot)
3104 {
3105 /* The front end shouldn't have used both return_slot and
3106 a modify expression. */
3107 gcc_assert (!modify_dest);
3108 if (DECL_BY_REFERENCE (result))
3109 {
3110 tree return_slot_addr = build_fold_addr_expr (return_slot);
3111 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3112
3113 /* We are going to construct *&return_slot and we can't do that
3114 for variables believed to be not addressable.
3115
3116 FIXME: This check possibly can match, because values returned
3117 via return slot optimization are not believed to have address
3118 taken by alias analysis. */
3119 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3120 var = return_slot_addr;
3121 }
3122 else
3123 {
3124 var = return_slot;
3125 gcc_assert (TREE_CODE (var) != SSA_NAME);
3126 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
3127 }
3128 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3129 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3130 && !DECL_GIMPLE_REG_P (result)
3131 && DECL_P (var))
3132 DECL_GIMPLE_REG_P (var) = 0;
3133 use = NULL;
3134 goto done;
3135 }
3136
3137 /* All types requiring non-trivial constructors should have been handled. */
3138 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3139
3140 /* Attempt to avoid creating a new temporary variable. */
3141 if (modify_dest
3142 && TREE_CODE (modify_dest) != SSA_NAME)
3143 {
3144 bool use_it = false;
3145
3146 /* We can't use MODIFY_DEST if there's type promotion involved. */
3147 if (!useless_type_conversion_p (callee_type, caller_type))
3148 use_it = false;
3149
3150 /* ??? If we're assigning to a variable sized type, then we must
3151 reuse the destination variable, because we've no good way to
3152 create variable sized temporaries at this point. */
3153 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3154 use_it = true;
3155
3156 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3157 reuse it as the result of the call directly. Don't do this if
3158 it would promote MODIFY_DEST to addressable. */
3159 else if (TREE_ADDRESSABLE (result))
3160 use_it = false;
3161 else
3162 {
3163 tree base_m = get_base_address (modify_dest);
3164
3165 /* If the base isn't a decl, then it's a pointer, and we don't
3166 know where that's going to go. */
3167 if (!DECL_P (base_m))
3168 use_it = false;
3169 else if (is_global_var (base_m))
3170 use_it = false;
3171 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3172 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3173 && !DECL_GIMPLE_REG_P (result)
3174 && DECL_GIMPLE_REG_P (base_m))
3175 use_it = false;
3176 else if (!TREE_ADDRESSABLE (base_m))
3177 use_it = true;
3178 }
3179
3180 if (use_it)
3181 {
3182 var = modify_dest;
3183 use = NULL;
3184 goto done;
3185 }
3186 }
3187
3188 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3189
3190 var = copy_result_decl_to_var (result, id);
3191 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3192
3193 /* Do not have the rest of GCC warn about this variable as it should
3194 not be visible to the user. */
3195 TREE_NO_WARNING (var) = 1;
3196
3197 declare_inline_vars (id->block, var);
3198
3199 /* Build the use expr. If the return type of the function was
3200 promoted, convert it back to the expected type. */
3201 use = var;
3202 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3203 {
3204 /* If we can match up types by promotion/demotion do so. */
3205 if (fold_convertible_p (caller_type, var))
3206 use = fold_convert (caller_type, var);
3207 else
3208 {
3209 /* ??? For valid programs we should not end up here.
3210 Still if we end up with truly mismatched types here, fall back
3211 to using a MEM_REF to not leak invalid GIMPLE to the following
3212 passes. */
3213 /* Prevent var from being written into SSA form. */
3214 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3215 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3216 DECL_GIMPLE_REG_P (var) = false;
3217 else if (is_gimple_reg_type (TREE_TYPE (var)))
3218 TREE_ADDRESSABLE (var) = true;
3219 use = fold_build2 (MEM_REF, caller_type,
3220 build_fold_addr_expr (var),
3221 build_int_cst (ptr_type_node, 0));
3222 }
3223 }
3224
3225 STRIP_USELESS_TYPE_CONVERSION (use);
3226
3227 if (DECL_BY_REFERENCE (result))
3228 {
3229 TREE_ADDRESSABLE (var) = 1;
3230 var = build_fold_addr_expr (var);
3231 }
3232
3233 done:
3234 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3235 way, when the RESULT_DECL is encountered, it will be
3236 automatically replaced by the VAR_DECL.
3237
3238 When returning by reference, ensure that RESULT_DECL remaps to
3239 gimple_val. */
3240 if (DECL_BY_REFERENCE (result)
3241 && !is_gimple_val (var))
3242 {
3243 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3244 insert_decl_map (id, result, temp);
3245 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3246 it's default_def SSA_NAME. */
3247 if (gimple_in_ssa_p (id->src_cfun)
3248 && is_gimple_reg (result))
3249 {
3250 temp = make_ssa_name (temp, NULL);
3251 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3252 }
3253 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3254 }
3255 else
3256 insert_decl_map (id, result, var);
3257
3258 /* Remember this so we can ignore it in remap_decls. */
3259 id->retvar = var;
3260
3261 return use;
3262 }
3263
3264 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3265 to a local label. */
3266
3267 static tree
3268 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3269 {
3270 tree node = *nodep;
3271 tree fn = (tree) fnp;
3272
3273 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3274 return node;
3275
3276 if (TYPE_P (node))
3277 *walk_subtrees = 0;
3278
3279 return NULL_TREE;
3280 }
3281
3282 /* Determine if the function can be copied. If so return NULL. If
3283 not return a string describng the reason for failure. */
3284
3285 static const char *
3286 copy_forbidden (struct function *fun, tree fndecl)
3287 {
3288 const char *reason = fun->cannot_be_copied_reason;
3289 tree decl;
3290 unsigned ix;
3291
3292 /* Only examine the function once. */
3293 if (fun->cannot_be_copied_set)
3294 return reason;
3295
3296 /* We cannot copy a function that receives a non-local goto
3297 because we cannot remap the destination label used in the
3298 function that is performing the non-local goto. */
3299 /* ??? Actually, this should be possible, if we work at it.
3300 No doubt there's just a handful of places that simply
3301 assume it doesn't happen and don't substitute properly. */
3302 if (fun->has_nonlocal_label)
3303 {
3304 reason = G_("function %q+F can never be copied "
3305 "because it receives a non-local goto");
3306 goto fail;
3307 }
3308
3309 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3310 if (TREE_CODE (decl) == VAR_DECL
3311 && TREE_STATIC (decl)
3312 && !DECL_EXTERNAL (decl)
3313 && DECL_INITIAL (decl)
3314 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3315 has_label_address_in_static_1,
3316 fndecl))
3317 {
3318 reason = G_("function %q+F can never be copied because it saves "
3319 "address of local label in a static variable");
3320 goto fail;
3321 }
3322
3323 fail:
3324 fun->cannot_be_copied_reason = reason;
3325 fun->cannot_be_copied_set = true;
3326 return reason;
3327 }
3328
3329
3330 static const char *inline_forbidden_reason;
3331
3332 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3333 iff a function can not be inlined. Also sets the reason why. */
3334
3335 static tree
3336 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3337 struct walk_stmt_info *wip)
3338 {
3339 tree fn = (tree) wip->info;
3340 tree t;
3341 gimple stmt = gsi_stmt (*gsi);
3342
3343 switch (gimple_code (stmt))
3344 {
3345 case GIMPLE_CALL:
3346 /* Refuse to inline alloca call unless user explicitly forced so as
3347 this may change program's memory overhead drastically when the
3348 function using alloca is called in loop. In GCC present in
3349 SPEC2000 inlining into schedule_block cause it to require 2GB of
3350 RAM instead of 256MB. Don't do so for alloca calls emitted for
3351 VLA objects as those can't cause unbounded growth (they're always
3352 wrapped inside stack_save/stack_restore regions. */
3353 if (gimple_alloca_call_p (stmt)
3354 && !gimple_call_alloca_for_var_p (stmt)
3355 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3356 {
3357 inline_forbidden_reason
3358 = G_("function %q+F can never be inlined because it uses "
3359 "alloca (override using the always_inline attribute)");
3360 *handled_ops_p = true;
3361 return fn;
3362 }
3363
3364 t = gimple_call_fndecl (stmt);
3365 if (t == NULL_TREE)
3366 break;
3367
3368 /* We cannot inline functions that call setjmp. */
3369 if (setjmp_call_p (t))
3370 {
3371 inline_forbidden_reason
3372 = G_("function %q+F can never be inlined because it uses setjmp");
3373 *handled_ops_p = true;
3374 return t;
3375 }
3376
3377 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3378 switch (DECL_FUNCTION_CODE (t))
3379 {
3380 /* We cannot inline functions that take a variable number of
3381 arguments. */
3382 case BUILT_IN_VA_START:
3383 case BUILT_IN_NEXT_ARG:
3384 case BUILT_IN_VA_END:
3385 inline_forbidden_reason
3386 = G_("function %q+F can never be inlined because it "
3387 "uses variable argument lists");
3388 *handled_ops_p = true;
3389 return t;
3390
3391 case BUILT_IN_LONGJMP:
3392 /* We can't inline functions that call __builtin_longjmp at
3393 all. The non-local goto machinery really requires the
3394 destination be in a different function. If we allow the
3395 function calling __builtin_longjmp to be inlined into the
3396 function calling __builtin_setjmp, Things will Go Awry. */
3397 inline_forbidden_reason
3398 = G_("function %q+F can never be inlined because "
3399 "it uses setjmp-longjmp exception handling");
3400 *handled_ops_p = true;
3401 return t;
3402
3403 case BUILT_IN_NONLOCAL_GOTO:
3404 /* Similarly. */
3405 inline_forbidden_reason
3406 = G_("function %q+F can never be inlined because "
3407 "it uses non-local goto");
3408 *handled_ops_p = true;
3409 return t;
3410
3411 case BUILT_IN_RETURN:
3412 case BUILT_IN_APPLY_ARGS:
3413 /* If a __builtin_apply_args caller would be inlined,
3414 it would be saving arguments of the function it has
3415 been inlined into. Similarly __builtin_return would
3416 return from the function the inline has been inlined into. */
3417 inline_forbidden_reason
3418 = G_("function %q+F can never be inlined because "
3419 "it uses __builtin_return or __builtin_apply_args");
3420 *handled_ops_p = true;
3421 return t;
3422
3423 default:
3424 break;
3425 }
3426 break;
3427
3428 case GIMPLE_GOTO:
3429 t = gimple_goto_dest (stmt);
3430
3431 /* We will not inline a function which uses computed goto. The
3432 addresses of its local labels, which may be tucked into
3433 global storage, are of course not constant across
3434 instantiations, which causes unexpected behavior. */
3435 if (TREE_CODE (t) != LABEL_DECL)
3436 {
3437 inline_forbidden_reason
3438 = G_("function %q+F can never be inlined "
3439 "because it contains a computed goto");
3440 *handled_ops_p = true;
3441 return t;
3442 }
3443 break;
3444
3445 default:
3446 break;
3447 }
3448
3449 *handled_ops_p = false;
3450 return NULL_TREE;
3451 }
3452
3453 /* Return true if FNDECL is a function that cannot be inlined into
3454 another one. */
3455
3456 static bool
3457 inline_forbidden_p (tree fndecl)
3458 {
3459 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3460 struct walk_stmt_info wi;
3461 struct pointer_set_t *visited_nodes;
3462 basic_block bb;
3463 bool forbidden_p = false;
3464
3465 /* First check for shared reasons not to copy the code. */
3466 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3467 if (inline_forbidden_reason != NULL)
3468 return true;
3469
3470 /* Next, walk the statements of the function looking for
3471 constraucts we can't handle, or are non-optimal for inlining. */
3472 visited_nodes = pointer_set_create ();
3473 memset (&wi, 0, sizeof (wi));
3474 wi.info = (void *) fndecl;
3475 wi.pset = visited_nodes;
3476
3477 FOR_EACH_BB_FN (bb, fun)
3478 {
3479 gimple ret;
3480 gimple_seq seq = bb_seq (bb);
3481 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3482 forbidden_p = (ret != NULL);
3483 if (forbidden_p)
3484 break;
3485 }
3486
3487 pointer_set_destroy (visited_nodes);
3488 return forbidden_p;
3489 }
3490 \f
3491 /* Return false if the function FNDECL cannot be inlined on account of its
3492 attributes, true otherwise. */
3493 static bool
3494 function_attribute_inlinable_p (const_tree fndecl)
3495 {
3496 if (targetm.attribute_table)
3497 {
3498 const_tree a;
3499
3500 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3501 {
3502 const_tree name = TREE_PURPOSE (a);
3503 int i;
3504
3505 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3506 if (is_attribute_p (targetm.attribute_table[i].name, name))
3507 return targetm.function_attribute_inlinable_p (fndecl);
3508 }
3509 }
3510
3511 return true;
3512 }
3513
3514 /* Returns nonzero if FN is a function that does not have any
3515 fundamental inline blocking properties. */
3516
3517 bool
3518 tree_inlinable_function_p (tree fn)
3519 {
3520 bool inlinable = true;
3521 bool do_warning;
3522 tree always_inline;
3523
3524 /* If we've already decided this function shouldn't be inlined,
3525 there's no need to check again. */
3526 if (DECL_UNINLINABLE (fn))
3527 return false;
3528
3529 /* We only warn for functions declared `inline' by the user. */
3530 do_warning = (warn_inline
3531 && DECL_DECLARED_INLINE_P (fn)
3532 && !DECL_NO_INLINE_WARNING_P (fn)
3533 && !DECL_IN_SYSTEM_HEADER (fn));
3534
3535 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3536
3537 if (flag_no_inline
3538 && always_inline == NULL)
3539 {
3540 if (do_warning)
3541 warning (OPT_Winline, "function %q+F can never be inlined because it "
3542 "is suppressed using -fno-inline", fn);
3543 inlinable = false;
3544 }
3545
3546 else if (!function_attribute_inlinable_p (fn))
3547 {
3548 if (do_warning)
3549 warning (OPT_Winline, "function %q+F can never be inlined because it "
3550 "uses attributes conflicting with inlining", fn);
3551 inlinable = false;
3552 }
3553
3554 else if (inline_forbidden_p (fn))
3555 {
3556 /* See if we should warn about uninlinable functions. Previously,
3557 some of these warnings would be issued while trying to expand
3558 the function inline, but that would cause multiple warnings
3559 about functions that would for example call alloca. But since
3560 this a property of the function, just one warning is enough.
3561 As a bonus we can now give more details about the reason why a
3562 function is not inlinable. */
3563 if (always_inline)
3564 error (inline_forbidden_reason, fn);
3565 else if (do_warning)
3566 warning (OPT_Winline, inline_forbidden_reason, fn);
3567
3568 inlinable = false;
3569 }
3570
3571 /* Squirrel away the result so that we don't have to check again. */
3572 DECL_UNINLINABLE (fn) = !inlinable;
3573
3574 return inlinable;
3575 }
3576
3577 /* Estimate the cost of a memory move. Use machine dependent
3578 word size and take possible memcpy call into account. */
3579
3580 int
3581 estimate_move_cost (tree type)
3582 {
3583 HOST_WIDE_INT size;
3584
3585 gcc_assert (!VOID_TYPE_P (type));
3586
3587 if (TREE_CODE (type) == VECTOR_TYPE)
3588 {
3589 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3590 enum machine_mode simd
3591 = targetm.vectorize.preferred_simd_mode (inner);
3592 int simd_mode_size = GET_MODE_SIZE (simd);
3593 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3594 / simd_mode_size);
3595 }
3596
3597 size = int_size_in_bytes (type);
3598
3599 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3600 /* Cost of a memcpy call, 3 arguments and the call. */
3601 return 4;
3602 else
3603 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3604 }
3605
3606 /* Returns cost of operation CODE, according to WEIGHTS */
3607
3608 static int
3609 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3610 tree op1 ATTRIBUTE_UNUSED, tree op2)
3611 {
3612 switch (code)
3613 {
3614 /* These are "free" conversions, or their presumed cost
3615 is folded into other operations. */
3616 case RANGE_EXPR:
3617 CASE_CONVERT:
3618 case COMPLEX_EXPR:
3619 case PAREN_EXPR:
3620 case VIEW_CONVERT_EXPR:
3621 return 0;
3622
3623 /* Assign cost of 1 to usual operations.
3624 ??? We may consider mapping RTL costs to this. */
3625 case COND_EXPR:
3626 case VEC_COND_EXPR:
3627 case VEC_PERM_EXPR:
3628
3629 case PLUS_EXPR:
3630 case POINTER_PLUS_EXPR:
3631 case MINUS_EXPR:
3632 case MULT_EXPR:
3633 case MULT_HIGHPART_EXPR:
3634 case FMA_EXPR:
3635
3636 case ADDR_SPACE_CONVERT_EXPR:
3637 case FIXED_CONVERT_EXPR:
3638 case FIX_TRUNC_EXPR:
3639
3640 case NEGATE_EXPR:
3641 case FLOAT_EXPR:
3642 case MIN_EXPR:
3643 case MAX_EXPR:
3644 case ABS_EXPR:
3645
3646 case LSHIFT_EXPR:
3647 case RSHIFT_EXPR:
3648 case LROTATE_EXPR:
3649 case RROTATE_EXPR:
3650 case VEC_LSHIFT_EXPR:
3651 case VEC_RSHIFT_EXPR:
3652
3653 case BIT_IOR_EXPR:
3654 case BIT_XOR_EXPR:
3655 case BIT_AND_EXPR:
3656 case BIT_NOT_EXPR:
3657
3658 case TRUTH_ANDIF_EXPR:
3659 case TRUTH_ORIF_EXPR:
3660 case TRUTH_AND_EXPR:
3661 case TRUTH_OR_EXPR:
3662 case TRUTH_XOR_EXPR:
3663 case TRUTH_NOT_EXPR:
3664
3665 case LT_EXPR:
3666 case LE_EXPR:
3667 case GT_EXPR:
3668 case GE_EXPR:
3669 case EQ_EXPR:
3670 case NE_EXPR:
3671 case ORDERED_EXPR:
3672 case UNORDERED_EXPR:
3673
3674 case UNLT_EXPR:
3675 case UNLE_EXPR:
3676 case UNGT_EXPR:
3677 case UNGE_EXPR:
3678 case UNEQ_EXPR:
3679 case LTGT_EXPR:
3680
3681 case CONJ_EXPR:
3682
3683 case PREDECREMENT_EXPR:
3684 case PREINCREMENT_EXPR:
3685 case POSTDECREMENT_EXPR:
3686 case POSTINCREMENT_EXPR:
3687
3688 case REALIGN_LOAD_EXPR:
3689
3690 case REDUC_MAX_EXPR:
3691 case REDUC_MIN_EXPR:
3692 case REDUC_PLUS_EXPR:
3693 case WIDEN_SUM_EXPR:
3694 case WIDEN_MULT_EXPR:
3695 case DOT_PROD_EXPR:
3696 case WIDEN_MULT_PLUS_EXPR:
3697 case WIDEN_MULT_MINUS_EXPR:
3698 case WIDEN_LSHIFT_EXPR:
3699
3700 case VEC_WIDEN_MULT_HI_EXPR:
3701 case VEC_WIDEN_MULT_LO_EXPR:
3702 case VEC_WIDEN_MULT_EVEN_EXPR:
3703 case VEC_WIDEN_MULT_ODD_EXPR:
3704 case VEC_UNPACK_HI_EXPR:
3705 case VEC_UNPACK_LO_EXPR:
3706 case VEC_UNPACK_FLOAT_HI_EXPR:
3707 case VEC_UNPACK_FLOAT_LO_EXPR:
3708 case VEC_PACK_TRUNC_EXPR:
3709 case VEC_PACK_SAT_EXPR:
3710 case VEC_PACK_FIX_TRUNC_EXPR:
3711 case VEC_WIDEN_LSHIFT_HI_EXPR:
3712 case VEC_WIDEN_LSHIFT_LO_EXPR:
3713
3714 return 1;
3715
3716 /* Few special cases of expensive operations. This is useful
3717 to avoid inlining on functions having too many of these. */
3718 case TRUNC_DIV_EXPR:
3719 case CEIL_DIV_EXPR:
3720 case FLOOR_DIV_EXPR:
3721 case ROUND_DIV_EXPR:
3722 case EXACT_DIV_EXPR:
3723 case TRUNC_MOD_EXPR:
3724 case CEIL_MOD_EXPR:
3725 case FLOOR_MOD_EXPR:
3726 case ROUND_MOD_EXPR:
3727 case RDIV_EXPR:
3728 if (TREE_CODE (op2) != INTEGER_CST)
3729 return weights->div_mod_cost;
3730 return 1;
3731
3732 default:
3733 /* We expect a copy assignment with no operator. */
3734 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3735 return 0;
3736 }
3737 }
3738
3739
3740 /* Estimate number of instructions that will be created by expanding
3741 the statements in the statement sequence STMTS.
3742 WEIGHTS contains weights attributed to various constructs. */
3743
3744 static
3745 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3746 {
3747 int cost;
3748 gimple_stmt_iterator gsi;
3749
3750 cost = 0;
3751 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3752 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3753
3754 return cost;
3755 }
3756
3757
3758 /* Estimate number of instructions that will be created by expanding STMT.
3759 WEIGHTS contains weights attributed to various constructs. */
3760
3761 int
3762 estimate_num_insns (gimple stmt, eni_weights *weights)
3763 {
3764 unsigned cost, i;
3765 enum gimple_code code = gimple_code (stmt);
3766 tree lhs;
3767 tree rhs;
3768
3769 switch (code)
3770 {
3771 case GIMPLE_ASSIGN:
3772 /* Try to estimate the cost of assignments. We have three cases to
3773 deal with:
3774 1) Simple assignments to registers;
3775 2) Stores to things that must live in memory. This includes
3776 "normal" stores to scalars, but also assignments of large
3777 structures, or constructors of big arrays;
3778
3779 Let us look at the first two cases, assuming we have "a = b + C":
3780 <GIMPLE_ASSIGN <var_decl "a">
3781 <plus_expr <var_decl "b"> <constant C>>
3782 If "a" is a GIMPLE register, the assignment to it is free on almost
3783 any target, because "a" usually ends up in a real register. Hence
3784 the only cost of this expression comes from the PLUS_EXPR, and we
3785 can ignore the GIMPLE_ASSIGN.
3786 If "a" is not a GIMPLE register, the assignment to "a" will most
3787 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3788 of moving something into "a", which we compute using the function
3789 estimate_move_cost. */
3790 if (gimple_clobber_p (stmt))
3791 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3792
3793 lhs = gimple_assign_lhs (stmt);
3794 rhs = gimple_assign_rhs1 (stmt);
3795
3796 cost = 0;
3797
3798 /* Account for the cost of moving to / from memory. */
3799 if (gimple_store_p (stmt))
3800 cost += estimate_move_cost (TREE_TYPE (lhs));
3801 if (gimple_assign_load_p (stmt))
3802 cost += estimate_move_cost (TREE_TYPE (rhs));
3803
3804 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3805 gimple_assign_rhs1 (stmt),
3806 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3807 == GIMPLE_BINARY_RHS
3808 ? gimple_assign_rhs2 (stmt) : NULL);
3809 break;
3810
3811 case GIMPLE_COND:
3812 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3813 gimple_op (stmt, 0),
3814 gimple_op (stmt, 1));
3815 break;
3816
3817 case GIMPLE_SWITCH:
3818 /* Take into account cost of the switch + guess 2 conditional jumps for
3819 each case label.
3820
3821 TODO: once the switch expansion logic is sufficiently separated, we can
3822 do better job on estimating cost of the switch. */
3823 if (weights->time_based)
3824 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3825 else
3826 cost = gimple_switch_num_labels (stmt) * 2;
3827 break;
3828
3829 case GIMPLE_CALL:
3830 {
3831 tree decl;
3832 struct cgraph_node *node = NULL;
3833
3834 /* Do not special case builtins where we see the body.
3835 This just confuse inliner. */
3836 if (gimple_call_internal_p (stmt))
3837 return 0;
3838 else if (!(decl = gimple_call_fndecl (stmt))
3839 || !(node = cgraph_get_node (decl))
3840 || node->definition)
3841 ;
3842 /* For buitins that are likely expanded to nothing or
3843 inlined do not account operand costs. */
3844 else if (is_simple_builtin (decl))
3845 return 0;
3846 else if (is_inexpensive_builtin (decl))
3847 return weights->target_builtin_call_cost;
3848 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3849 {
3850 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3851 specialize the cheap expansion we do here.
3852 ??? This asks for a more general solution. */
3853 switch (DECL_FUNCTION_CODE (decl))
3854 {
3855 case BUILT_IN_POW:
3856 case BUILT_IN_POWF:
3857 case BUILT_IN_POWL:
3858 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3859 && REAL_VALUES_EQUAL
3860 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3861 return estimate_operator_cost (MULT_EXPR, weights,
3862 gimple_call_arg (stmt, 0),
3863 gimple_call_arg (stmt, 0));
3864 break;
3865
3866 default:
3867 break;
3868 }
3869 }
3870
3871 cost = node ? weights->call_cost : weights->indirect_call_cost;
3872 if (gimple_call_lhs (stmt))
3873 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3874 for (i = 0; i < gimple_call_num_args (stmt); i++)
3875 {
3876 tree arg = gimple_call_arg (stmt, i);
3877 cost += estimate_move_cost (TREE_TYPE (arg));
3878 }
3879 break;
3880 }
3881
3882 case GIMPLE_RETURN:
3883 return weights->return_cost;
3884
3885 case GIMPLE_GOTO:
3886 case GIMPLE_LABEL:
3887 case GIMPLE_NOP:
3888 case GIMPLE_PHI:
3889 case GIMPLE_PREDICT:
3890 case GIMPLE_DEBUG:
3891 return 0;
3892
3893 case GIMPLE_ASM:
3894 {
3895 int count = asm_str_count (gimple_asm_string (stmt));
3896 /* 1000 means infinity. This avoids overflows later
3897 with very long asm statements. */
3898 if (count > 1000)
3899 count = 1000;
3900 return count;
3901 }
3902
3903 case GIMPLE_RESX:
3904 /* This is either going to be an external function call with one
3905 argument, or two register copy statements plus a goto. */
3906 return 2;
3907
3908 case GIMPLE_EH_DISPATCH:
3909 /* ??? This is going to turn into a switch statement. Ideally
3910 we'd have a look at the eh region and estimate the number of
3911 edges involved. */
3912 return 10;
3913
3914 case GIMPLE_BIND:
3915 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3916
3917 case GIMPLE_EH_FILTER:
3918 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3919
3920 case GIMPLE_CATCH:
3921 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3922
3923 case GIMPLE_TRY:
3924 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3925 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3926
3927 /* OpenMP directives are generally very expensive. */
3928
3929 case GIMPLE_OMP_RETURN:
3930 case GIMPLE_OMP_SECTIONS_SWITCH:
3931 case GIMPLE_OMP_ATOMIC_STORE:
3932 case GIMPLE_OMP_CONTINUE:
3933 /* ...except these, which are cheap. */
3934 return 0;
3935
3936 case GIMPLE_OMP_ATOMIC_LOAD:
3937 return weights->omp_cost;
3938
3939 case GIMPLE_OMP_FOR:
3940 return (weights->omp_cost
3941 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3942 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3943
3944 case GIMPLE_OMP_PARALLEL:
3945 case GIMPLE_OMP_TASK:
3946 case GIMPLE_OMP_CRITICAL:
3947 case GIMPLE_OMP_MASTER:
3948 case GIMPLE_OMP_TASKGROUP:
3949 case GIMPLE_OMP_ORDERED:
3950 case GIMPLE_OMP_SECTION:
3951 case GIMPLE_OMP_SECTIONS:
3952 case GIMPLE_OMP_SINGLE:
3953 case GIMPLE_OMP_TARGET:
3954 case GIMPLE_OMP_TEAMS:
3955 return (weights->omp_cost
3956 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3957
3958 case GIMPLE_TRANSACTION:
3959 return (weights->tm_cost
3960 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3961 weights));
3962
3963 default:
3964 gcc_unreachable ();
3965 }
3966
3967 return cost;
3968 }
3969
3970 /* Estimate number of instructions that will be created by expanding
3971 function FNDECL. WEIGHTS contains weights attributed to various
3972 constructs. */
3973
3974 int
3975 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3976 {
3977 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3978 gimple_stmt_iterator bsi;
3979 basic_block bb;
3980 int n = 0;
3981
3982 gcc_assert (my_function && my_function->cfg);
3983 FOR_EACH_BB_FN (bb, my_function)
3984 {
3985 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3986 n += estimate_num_insns (gsi_stmt (bsi), weights);
3987 }
3988
3989 return n;
3990 }
3991
3992
3993 /* Initializes weights used by estimate_num_insns. */
3994
3995 void
3996 init_inline_once (void)
3997 {
3998 eni_size_weights.call_cost = 1;
3999 eni_size_weights.indirect_call_cost = 3;
4000 eni_size_weights.target_builtin_call_cost = 1;
4001 eni_size_weights.div_mod_cost = 1;
4002 eni_size_weights.omp_cost = 40;
4003 eni_size_weights.tm_cost = 10;
4004 eni_size_weights.time_based = false;
4005 eni_size_weights.return_cost = 1;
4006
4007 /* Estimating time for call is difficult, since we have no idea what the
4008 called function does. In the current uses of eni_time_weights,
4009 underestimating the cost does less harm than overestimating it, so
4010 we choose a rather small value here. */
4011 eni_time_weights.call_cost = 10;
4012 eni_time_weights.indirect_call_cost = 15;
4013 eni_time_weights.target_builtin_call_cost = 1;
4014 eni_time_weights.div_mod_cost = 10;
4015 eni_time_weights.omp_cost = 40;
4016 eni_time_weights.tm_cost = 40;
4017 eni_time_weights.time_based = true;
4018 eni_time_weights.return_cost = 2;
4019 }
4020
4021 /* Estimate the number of instructions in a gimple_seq. */
4022
4023 int
4024 count_insns_seq (gimple_seq seq, eni_weights *weights)
4025 {
4026 gimple_stmt_iterator gsi;
4027 int n = 0;
4028 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4029 n += estimate_num_insns (gsi_stmt (gsi), weights);
4030
4031 return n;
4032 }
4033
4034
4035 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4036
4037 static void
4038 prepend_lexical_block (tree current_block, tree new_block)
4039 {
4040 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4041 BLOCK_SUBBLOCKS (current_block) = new_block;
4042 BLOCK_SUPERCONTEXT (new_block) = current_block;
4043 }
4044
4045 /* Add local variables from CALLEE to CALLER. */
4046
4047 static inline void
4048 add_local_variables (struct function *callee, struct function *caller,
4049 copy_body_data *id)
4050 {
4051 tree var;
4052 unsigned ix;
4053
4054 FOR_EACH_LOCAL_DECL (callee, ix, var)
4055 if (!can_be_nonlocal (var, id))
4056 {
4057 tree new_var = remap_decl (var, id);
4058
4059 /* Remap debug-expressions. */
4060 if (TREE_CODE (new_var) == VAR_DECL
4061 && DECL_HAS_DEBUG_EXPR_P (var)
4062 && new_var != var)
4063 {
4064 tree tem = DECL_DEBUG_EXPR (var);
4065 bool old_regimplify = id->regimplify;
4066 id->remapping_type_depth++;
4067 walk_tree (&tem, copy_tree_body_r, id, NULL);
4068 id->remapping_type_depth--;
4069 id->regimplify = old_regimplify;
4070 SET_DECL_DEBUG_EXPR (new_var, tem);
4071 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4072 }
4073 add_local_decl (caller, new_var);
4074 }
4075 }
4076
4077 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4078
4079 static bool
4080 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4081 {
4082 tree use_retvar;
4083 tree fn;
4084 struct pointer_map_t *st, *dst;
4085 tree return_slot;
4086 tree modify_dest;
4087 location_t saved_location;
4088 struct cgraph_edge *cg_edge;
4089 cgraph_inline_failed_t reason;
4090 basic_block return_block;
4091 edge e;
4092 gimple_stmt_iterator gsi, stmt_gsi;
4093 bool successfully_inlined = FALSE;
4094 bool purge_dead_abnormal_edges;
4095
4096 /* Set input_location here so we get the right instantiation context
4097 if we call instantiate_decl from inlinable_function_p. */
4098 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4099 saved_location = input_location;
4100 input_location = gimple_location (stmt);
4101
4102 /* From here on, we're only interested in CALL_EXPRs. */
4103 if (gimple_code (stmt) != GIMPLE_CALL)
4104 goto egress;
4105
4106 cg_edge = cgraph_edge (id->dst_node, stmt);
4107 gcc_checking_assert (cg_edge);
4108 /* First, see if we can figure out what function is being called.
4109 If we cannot, then there is no hope of inlining the function. */
4110 if (cg_edge->indirect_unknown_callee)
4111 goto egress;
4112 fn = cg_edge->callee->decl;
4113 gcc_checking_assert (fn);
4114
4115 /* If FN is a declaration of a function in a nested scope that was
4116 globally declared inline, we don't set its DECL_INITIAL.
4117 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4118 C++ front-end uses it for cdtors to refer to their internal
4119 declarations, that are not real functions. Fortunately those
4120 don't have trees to be saved, so we can tell by checking their
4121 gimple_body. */
4122 if (!DECL_INITIAL (fn)
4123 && DECL_ABSTRACT_ORIGIN (fn)
4124 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4125 fn = DECL_ABSTRACT_ORIGIN (fn);
4126
4127 /* Don't try to inline functions that are not well-suited to inlining. */
4128 if (cg_edge->inline_failed)
4129 {
4130 reason = cg_edge->inline_failed;
4131 /* If this call was originally indirect, we do not want to emit any
4132 inlining related warnings or sorry messages because there are no
4133 guarantees regarding those. */
4134 if (cg_edge->indirect_inlining_edge)
4135 goto egress;
4136
4137 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4138 /* For extern inline functions that get redefined we always
4139 silently ignored always_inline flag. Better behaviour would
4140 be to be able to keep both bodies and use extern inline body
4141 for inlining, but we can't do that because frontends overwrite
4142 the body. */
4143 && !cg_edge->callee->local.redefined_extern_inline
4144 /* During early inline pass, report only when optimization is
4145 not turned on. */
4146 && (cgraph_global_info_ready
4147 || !optimize
4148 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4149 /* PR 20090218-1_0.c. Body can be provided by another module. */
4150 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4151 {
4152 error ("inlining failed in call to always_inline %q+F: %s", fn,
4153 cgraph_inline_failed_string (reason));
4154 error ("called from here");
4155 }
4156 else if (warn_inline
4157 && DECL_DECLARED_INLINE_P (fn)
4158 && !DECL_NO_INLINE_WARNING_P (fn)
4159 && !DECL_IN_SYSTEM_HEADER (fn)
4160 && reason != CIF_UNSPECIFIED
4161 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4162 /* Do not warn about not inlined recursive calls. */
4163 && !cgraph_edge_recursive_p (cg_edge)
4164 /* Avoid warnings during early inline pass. */
4165 && cgraph_global_info_ready)
4166 {
4167 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4168 fn, _(cgraph_inline_failed_string (reason)));
4169 warning (OPT_Winline, "called from here");
4170 }
4171 goto egress;
4172 }
4173 fn = cg_edge->callee->decl;
4174 cgraph_get_body (cg_edge->callee);
4175
4176 #ifdef ENABLE_CHECKING
4177 if (cg_edge->callee->decl != id->dst_node->decl)
4178 verify_cgraph_node (cg_edge->callee);
4179 #endif
4180
4181 /* We will be inlining this callee. */
4182 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4183
4184 /* Update the callers EH personality. */
4185 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4186 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4187 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4188
4189 /* Split the block holding the GIMPLE_CALL. */
4190 e = split_block (bb, stmt);
4191 bb = e->src;
4192 return_block = e->dest;
4193 remove_edge (e);
4194
4195 /* split_block splits after the statement; work around this by
4196 moving the call into the second block manually. Not pretty,
4197 but seems easier than doing the CFG manipulation by hand
4198 when the GIMPLE_CALL is in the last statement of BB. */
4199 stmt_gsi = gsi_last_bb (bb);
4200 gsi_remove (&stmt_gsi, false);
4201
4202 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4203 been the source of abnormal edges. In this case, schedule
4204 the removal of dead abnormal edges. */
4205 gsi = gsi_start_bb (return_block);
4206 if (gsi_end_p (gsi))
4207 {
4208 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4209 purge_dead_abnormal_edges = true;
4210 }
4211 else
4212 {
4213 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4214 purge_dead_abnormal_edges = false;
4215 }
4216
4217 stmt_gsi = gsi_start_bb (return_block);
4218
4219 /* Build a block containing code to initialize the arguments, the
4220 actual inline expansion of the body, and a label for the return
4221 statements within the function to jump to. The type of the
4222 statement expression is the return type of the function call.
4223 ??? If the call does not have an associated block then we will
4224 remap all callee blocks to NULL, effectively dropping most of
4225 its debug information. This should only happen for calls to
4226 artificial decls inserted by the compiler itself. We need to
4227 either link the inlined blocks into the caller block tree or
4228 not refer to them in any way to not break GC for locations. */
4229 if (gimple_block (stmt))
4230 {
4231 id->block = make_node (BLOCK);
4232 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4233 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4234 prepend_lexical_block (gimple_block (stmt), id->block);
4235 }
4236
4237 /* Local declarations will be replaced by their equivalents in this
4238 map. */
4239 st = id->decl_map;
4240 id->decl_map = pointer_map_create ();
4241 dst = id->debug_map;
4242 id->debug_map = NULL;
4243
4244 /* Record the function we are about to inline. */
4245 id->src_fn = fn;
4246 id->src_node = cg_edge->callee;
4247 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4248 id->gimple_call = stmt;
4249
4250 gcc_assert (!id->src_cfun->after_inlining);
4251
4252 id->entry_bb = bb;
4253 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4254 {
4255 gimple_stmt_iterator si = gsi_last_bb (bb);
4256 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4257 NOT_TAKEN),
4258 GSI_NEW_STMT);
4259 }
4260 initialize_inlined_parameters (id, stmt, fn, bb);
4261
4262 if (DECL_INITIAL (fn))
4263 {
4264 if (gimple_block (stmt))
4265 {
4266 tree *var;
4267
4268 prepend_lexical_block (id->block,
4269 remap_blocks (DECL_INITIAL (fn), id));
4270 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4271 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4272 == NULL_TREE));
4273 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4274 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4275 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4276 under it. The parameters can be then evaluated in the debugger,
4277 but don't show in backtraces. */
4278 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4279 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4280 {
4281 tree v = *var;
4282 *var = TREE_CHAIN (v);
4283 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4284 BLOCK_VARS (id->block) = v;
4285 }
4286 else
4287 var = &TREE_CHAIN (*var);
4288 }
4289 else
4290 remap_blocks_to_null (DECL_INITIAL (fn), id);
4291 }
4292
4293 /* Return statements in the function body will be replaced by jumps
4294 to the RET_LABEL. */
4295 gcc_assert (DECL_INITIAL (fn));
4296 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4297
4298 /* Find the LHS to which the result of this call is assigned. */
4299 return_slot = NULL;
4300 if (gimple_call_lhs (stmt))
4301 {
4302 modify_dest = gimple_call_lhs (stmt);
4303
4304 /* The function which we are inlining might not return a value,
4305 in which case we should issue a warning that the function
4306 does not return a value. In that case the optimizers will
4307 see that the variable to which the value is assigned was not
4308 initialized. We do not want to issue a warning about that
4309 uninitialized variable. */
4310 if (DECL_P (modify_dest))
4311 TREE_NO_WARNING (modify_dest) = 1;
4312
4313 if (gimple_call_return_slot_opt_p (stmt))
4314 {
4315 return_slot = modify_dest;
4316 modify_dest = NULL;
4317 }
4318 }
4319 else
4320 modify_dest = NULL;
4321
4322 /* If we are inlining a call to the C++ operator new, we don't want
4323 to use type based alias analysis on the return value. Otherwise
4324 we may get confused if the compiler sees that the inlined new
4325 function returns a pointer which was just deleted. See bug
4326 33407. */
4327 if (DECL_IS_OPERATOR_NEW (fn))
4328 {
4329 return_slot = NULL;
4330 modify_dest = NULL;
4331 }
4332
4333 /* Declare the return variable for the function. */
4334 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4335
4336 /* Add local vars in this inlined callee to caller. */
4337 add_local_variables (id->src_cfun, cfun, id);
4338
4339 if (dump_file && (dump_flags & TDF_DETAILS))
4340 {
4341 fprintf (dump_file, "Inlining ");
4342 print_generic_expr (dump_file, id->src_fn, 0);
4343 fprintf (dump_file, " to ");
4344 print_generic_expr (dump_file, id->dst_fn, 0);
4345 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4346 }
4347
4348 /* This is it. Duplicate the callee body. Assume callee is
4349 pre-gimplified. Note that we must not alter the caller
4350 function in any way before this point, as this CALL_EXPR may be
4351 a self-referential call; if we're calling ourselves, we need to
4352 duplicate our body before altering anything. */
4353 copy_body (id, bb->count,
4354 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4355 bb, return_block, NULL);
4356
4357 /* Reset the escaped solution. */
4358 if (cfun->gimple_df)
4359 pt_solution_reset (&cfun->gimple_df->escaped);
4360
4361 /* Clean up. */
4362 if (id->debug_map)
4363 {
4364 pointer_map_destroy (id->debug_map);
4365 id->debug_map = dst;
4366 }
4367 pointer_map_destroy (id->decl_map);
4368 id->decl_map = st;
4369
4370 /* Unlink the calls virtual operands before replacing it. */
4371 unlink_stmt_vdef (stmt);
4372
4373 /* If the inlined function returns a result that we care about,
4374 substitute the GIMPLE_CALL with an assignment of the return
4375 variable to the LHS of the call. That is, if STMT was
4376 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4377 if (use_retvar && gimple_call_lhs (stmt))
4378 {
4379 gimple old_stmt = stmt;
4380 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4381 gsi_replace (&stmt_gsi, stmt, false);
4382 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4383 }
4384 else
4385 {
4386 /* Handle the case of inlining a function with no return
4387 statement, which causes the return value to become undefined. */
4388 if (gimple_call_lhs (stmt)
4389 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4390 {
4391 tree name = gimple_call_lhs (stmt);
4392 tree var = SSA_NAME_VAR (name);
4393 tree def = ssa_default_def (cfun, var);
4394
4395 if (def)
4396 {
4397 /* If the variable is used undefined, make this name
4398 undefined via a move. */
4399 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4400 gsi_replace (&stmt_gsi, stmt, true);
4401 }
4402 else
4403 {
4404 /* Otherwise make this variable undefined. */
4405 gsi_remove (&stmt_gsi, true);
4406 set_ssa_default_def (cfun, var, name);
4407 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4408 }
4409 }
4410 else
4411 gsi_remove (&stmt_gsi, true);
4412 }
4413
4414 if (purge_dead_abnormal_edges)
4415 {
4416 gimple_purge_dead_eh_edges (return_block);
4417 gimple_purge_dead_abnormal_call_edges (return_block);
4418 }
4419
4420 /* If the value of the new expression is ignored, that's OK. We
4421 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4422 the equivalent inlined version either. */
4423 if (is_gimple_assign (stmt))
4424 {
4425 gcc_assert (gimple_assign_single_p (stmt)
4426 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4427 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4428 }
4429
4430 /* Output the inlining info for this abstract function, since it has been
4431 inlined. If we don't do this now, we can lose the information about the
4432 variables in the function when the blocks get blown away as soon as we
4433 remove the cgraph node. */
4434 if (gimple_block (stmt))
4435 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4436
4437 /* Update callgraph if needed. */
4438 cgraph_remove_node (cg_edge->callee);
4439
4440 id->block = NULL_TREE;
4441 successfully_inlined = TRUE;
4442
4443 egress:
4444 input_location = saved_location;
4445 return successfully_inlined;
4446 }
4447
4448 /* Expand call statements reachable from STMT_P.
4449 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4450 in a MODIFY_EXPR. */
4451
4452 static bool
4453 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4454 {
4455 gimple_stmt_iterator gsi;
4456
4457 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4458 {
4459 gimple stmt = gsi_stmt (gsi);
4460
4461 if (is_gimple_call (stmt)
4462 && !gimple_call_internal_p (stmt)
4463 && expand_call_inline (bb, stmt, id))
4464 return true;
4465 }
4466
4467 return false;
4468 }
4469
4470
4471 /* Walk all basic blocks created after FIRST and try to fold every statement
4472 in the STATEMENTS pointer set. */
4473
4474 static void
4475 fold_marked_statements (int first, struct pointer_set_t *statements)
4476 {
4477 for (; first < n_basic_blocks_for_fn (cfun); first++)
4478 if (BASIC_BLOCK_FOR_FN (cfun, first))
4479 {
4480 gimple_stmt_iterator gsi;
4481
4482 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4483 !gsi_end_p (gsi);
4484 gsi_next (&gsi))
4485 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4486 {
4487 gimple old_stmt = gsi_stmt (gsi);
4488 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4489
4490 if (old_decl && DECL_BUILT_IN (old_decl))
4491 {
4492 /* Folding builtins can create multiple instructions,
4493 we need to look at all of them. */
4494 gimple_stmt_iterator i2 = gsi;
4495 gsi_prev (&i2);
4496 if (fold_stmt (&gsi))
4497 {
4498 gimple new_stmt;
4499 /* If a builtin at the end of a bb folded into nothing,
4500 the following loop won't work. */
4501 if (gsi_end_p (gsi))
4502 {
4503 cgraph_update_edges_for_call_stmt (old_stmt,
4504 old_decl, NULL);
4505 break;
4506 }
4507 if (gsi_end_p (i2))
4508 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4509 else
4510 gsi_next (&i2);
4511 while (1)
4512 {
4513 new_stmt = gsi_stmt (i2);
4514 update_stmt (new_stmt);
4515 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4516 new_stmt);
4517
4518 if (new_stmt == gsi_stmt (gsi))
4519 {
4520 /* It is okay to check only for the very last
4521 of these statements. If it is a throwing
4522 statement nothing will change. If it isn't
4523 this can remove EH edges. If that weren't
4524 correct then because some intermediate stmts
4525 throw, but not the last one. That would mean
4526 we'd have to split the block, which we can't
4527 here and we'd loose anyway. And as builtins
4528 probably never throw, this all
4529 is mood anyway. */
4530 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4531 new_stmt))
4532 gimple_purge_dead_eh_edges (
4533 BASIC_BLOCK_FOR_FN (cfun, first));
4534 break;
4535 }
4536 gsi_next (&i2);
4537 }
4538 }
4539 }
4540 else if (fold_stmt (&gsi))
4541 {
4542 /* Re-read the statement from GSI as fold_stmt() may
4543 have changed it. */
4544 gimple new_stmt = gsi_stmt (gsi);
4545 update_stmt (new_stmt);
4546
4547 if (is_gimple_call (old_stmt)
4548 || is_gimple_call (new_stmt))
4549 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4550 new_stmt);
4551
4552 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4553 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4554 first));
4555 }
4556 }
4557 }
4558 }
4559
4560 /* Expand calls to inline functions in the body of FN. */
4561
4562 unsigned int
4563 optimize_inline_calls (tree fn)
4564 {
4565 copy_body_data id;
4566 basic_block bb;
4567 int last = n_basic_blocks_for_fn (cfun);
4568 bool inlined_p = false;
4569
4570 /* Clear out ID. */
4571 memset (&id, 0, sizeof (id));
4572
4573 id.src_node = id.dst_node = cgraph_get_node (fn);
4574 gcc_assert (id.dst_node->definition);
4575 id.dst_fn = fn;
4576 /* Or any functions that aren't finished yet. */
4577 if (current_function_decl)
4578 id.dst_fn = current_function_decl;
4579
4580 id.copy_decl = copy_decl_maybe_to_var;
4581 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4582 id.transform_new_cfg = false;
4583 id.transform_return_to_modify = true;
4584 id.transform_parameter = true;
4585 id.transform_lang_insert_block = NULL;
4586 id.statements_to_fold = pointer_set_create ();
4587
4588 push_gimplify_context ();
4589
4590 /* We make no attempts to keep dominance info up-to-date. */
4591 free_dominance_info (CDI_DOMINATORS);
4592 free_dominance_info (CDI_POST_DOMINATORS);
4593
4594 /* Register specific gimple functions. */
4595 gimple_register_cfg_hooks ();
4596
4597 /* Reach the trees by walking over the CFG, and note the
4598 enclosing basic-blocks in the call edges. */
4599 /* We walk the blocks going forward, because inlined function bodies
4600 will split id->current_basic_block, and the new blocks will
4601 follow it; we'll trudge through them, processing their CALL_EXPRs
4602 along the way. */
4603 FOR_EACH_BB_FN (bb, cfun)
4604 inlined_p |= gimple_expand_calls_inline (bb, &id);
4605
4606 pop_gimplify_context (NULL);
4607
4608 #ifdef ENABLE_CHECKING
4609 {
4610 struct cgraph_edge *e;
4611
4612 verify_cgraph_node (id.dst_node);
4613
4614 /* Double check that we inlined everything we are supposed to inline. */
4615 for (e = id.dst_node->callees; e; e = e->next_callee)
4616 gcc_assert (e->inline_failed);
4617 }
4618 #endif
4619
4620 /* Fold queued statements. */
4621 fold_marked_statements (last, id.statements_to_fold);
4622 pointer_set_destroy (id.statements_to_fold);
4623
4624 gcc_assert (!id.debug_stmts.exists ());
4625
4626 /* If we didn't inline into the function there is nothing to do. */
4627 if (!inlined_p)
4628 return 0;
4629
4630 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4631 number_blocks (fn);
4632
4633 delete_unreachable_blocks_update_callgraph (&id);
4634 #ifdef ENABLE_CHECKING
4635 verify_cgraph_node (id.dst_node);
4636 #endif
4637
4638 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4639 not possible yet - the IPA passes might make various functions to not
4640 throw and they don't care to proactively update local EH info. This is
4641 done later in fixup_cfg pass that also execute the verification. */
4642 return (TODO_update_ssa
4643 | TODO_cleanup_cfg
4644 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4645 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4646 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4647 ? TODO_rebuild_frequencies : 0));
4648 }
4649
4650 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4651
4652 tree
4653 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4654 {
4655 enum tree_code code = TREE_CODE (*tp);
4656 enum tree_code_class cl = TREE_CODE_CLASS (code);
4657
4658 /* We make copies of most nodes. */
4659 if (IS_EXPR_CODE_CLASS (cl)
4660 || code == TREE_LIST
4661 || code == TREE_VEC
4662 || code == TYPE_DECL
4663 || code == OMP_CLAUSE)
4664 {
4665 /* Because the chain gets clobbered when we make a copy, we save it
4666 here. */
4667 tree chain = NULL_TREE, new_tree;
4668
4669 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4670 chain = TREE_CHAIN (*tp);
4671
4672 /* Copy the node. */
4673 new_tree = copy_node (*tp);
4674
4675 *tp = new_tree;
4676
4677 /* Now, restore the chain, if appropriate. That will cause
4678 walk_tree to walk into the chain as well. */
4679 if (code == PARM_DECL
4680 || code == TREE_LIST
4681 || code == OMP_CLAUSE)
4682 TREE_CHAIN (*tp) = chain;
4683
4684 /* For now, we don't update BLOCKs when we make copies. So, we
4685 have to nullify all BIND_EXPRs. */
4686 if (TREE_CODE (*tp) == BIND_EXPR)
4687 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4688 }
4689 else if (code == CONSTRUCTOR)
4690 {
4691 /* CONSTRUCTOR nodes need special handling because
4692 we need to duplicate the vector of elements. */
4693 tree new_tree;
4694
4695 new_tree = copy_node (*tp);
4696 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4697 *tp = new_tree;
4698 }
4699 else if (code == STATEMENT_LIST)
4700 /* We used to just abort on STATEMENT_LIST, but we can run into them
4701 with statement-expressions (c++/40975). */
4702 copy_statement_list (tp);
4703 else if (TREE_CODE_CLASS (code) == tcc_type)
4704 *walk_subtrees = 0;
4705 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4706 *walk_subtrees = 0;
4707 else if (TREE_CODE_CLASS (code) == tcc_constant)
4708 *walk_subtrees = 0;
4709 return NULL_TREE;
4710 }
4711
4712 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4713 information indicating to what new SAVE_EXPR this one should be mapped,
4714 use that one. Otherwise, create a new node and enter it in ST. FN is
4715 the function into which the copy will be placed. */
4716
4717 static void
4718 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4719 {
4720 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4721 tree *n;
4722 tree t;
4723
4724 /* See if we already encountered this SAVE_EXPR. */
4725 n = (tree *) pointer_map_contains (st, *tp);
4726
4727 /* If we didn't already remap this SAVE_EXPR, do so now. */
4728 if (!n)
4729 {
4730 t = copy_node (*tp);
4731
4732 /* Remember this SAVE_EXPR. */
4733 *pointer_map_insert (st, *tp) = t;
4734 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4735 *pointer_map_insert (st, t) = t;
4736 }
4737 else
4738 {
4739 /* We've already walked into this SAVE_EXPR; don't do it again. */
4740 *walk_subtrees = 0;
4741 t = *n;
4742 }
4743
4744 /* Replace this SAVE_EXPR with the copy. */
4745 *tp = t;
4746 }
4747
4748 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4749 label, copies the declaration and enters it in the splay_tree in DATA (which
4750 is really a 'copy_body_data *'. */
4751
4752 static tree
4753 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4754 bool *handled_ops_p ATTRIBUTE_UNUSED,
4755 struct walk_stmt_info *wi)
4756 {
4757 copy_body_data *id = (copy_body_data *) wi->info;
4758 gimple stmt = gsi_stmt (*gsip);
4759
4760 if (gimple_code (stmt) == GIMPLE_LABEL)
4761 {
4762 tree decl = gimple_label_label (stmt);
4763
4764 /* Copy the decl and remember the copy. */
4765 insert_decl_map (id, decl, id->copy_decl (decl, id));
4766 }
4767
4768 return NULL_TREE;
4769 }
4770
4771
4772 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4773 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4774 remaps all local declarations to appropriate replacements in gimple
4775 operands. */
4776
4777 static tree
4778 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4779 {
4780 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4781 copy_body_data *id = (copy_body_data *) wi->info;
4782 struct pointer_map_t *st = id->decl_map;
4783 tree *n;
4784 tree expr = *tp;
4785
4786 /* Only a local declaration (variable or label). */
4787 if ((TREE_CODE (expr) == VAR_DECL
4788 && !TREE_STATIC (expr))
4789 || TREE_CODE (expr) == LABEL_DECL)
4790 {
4791 /* Lookup the declaration. */
4792 n = (tree *) pointer_map_contains (st, expr);
4793
4794 /* If it's there, remap it. */
4795 if (n)
4796 *tp = *n;
4797 *walk_subtrees = 0;
4798 }
4799 else if (TREE_CODE (expr) == STATEMENT_LIST
4800 || TREE_CODE (expr) == BIND_EXPR
4801 || TREE_CODE (expr) == SAVE_EXPR)
4802 gcc_unreachable ();
4803 else if (TREE_CODE (expr) == TARGET_EXPR)
4804 {
4805 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4806 It's OK for this to happen if it was part of a subtree that
4807 isn't immediately expanded, such as operand 2 of another
4808 TARGET_EXPR. */
4809 if (!TREE_OPERAND (expr, 1))
4810 {
4811 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4812 TREE_OPERAND (expr, 3) = NULL_TREE;
4813 }
4814 }
4815
4816 /* Keep iterating. */
4817 return NULL_TREE;
4818 }
4819
4820
4821 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4822 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4823 remaps all local declarations to appropriate replacements in gimple
4824 statements. */
4825
4826 static tree
4827 replace_locals_stmt (gimple_stmt_iterator *gsip,
4828 bool *handled_ops_p ATTRIBUTE_UNUSED,
4829 struct walk_stmt_info *wi)
4830 {
4831 copy_body_data *id = (copy_body_data *) wi->info;
4832 gimple stmt = gsi_stmt (*gsip);
4833
4834 if (gimple_code (stmt) == GIMPLE_BIND)
4835 {
4836 tree block = gimple_bind_block (stmt);
4837
4838 if (block)
4839 {
4840 remap_block (&block, id);
4841 gimple_bind_set_block (stmt, block);
4842 }
4843
4844 /* This will remap a lot of the same decls again, but this should be
4845 harmless. */
4846 if (gimple_bind_vars (stmt))
4847 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4848 NULL, id));
4849 }
4850
4851 /* Keep iterating. */
4852 return NULL_TREE;
4853 }
4854
4855
4856 /* Copies everything in SEQ and replaces variables and labels local to
4857 current_function_decl. */
4858
4859 gimple_seq
4860 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4861 {
4862 copy_body_data id;
4863 struct walk_stmt_info wi;
4864 struct pointer_set_t *visited;
4865 gimple_seq copy;
4866
4867 /* There's nothing to do for NULL_TREE. */
4868 if (seq == NULL)
4869 return seq;
4870
4871 /* Set up ID. */
4872 memset (&id, 0, sizeof (id));
4873 id.src_fn = current_function_decl;
4874 id.dst_fn = current_function_decl;
4875 id.decl_map = pointer_map_create ();
4876 id.debug_map = NULL;
4877
4878 id.copy_decl = copy_decl_no_change;
4879 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4880 id.transform_new_cfg = false;
4881 id.transform_return_to_modify = false;
4882 id.transform_parameter = false;
4883 id.transform_lang_insert_block = NULL;
4884
4885 /* Walk the tree once to find local labels. */
4886 memset (&wi, 0, sizeof (wi));
4887 visited = pointer_set_create ();
4888 wi.info = &id;
4889 wi.pset = visited;
4890 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4891 pointer_set_destroy (visited);
4892
4893 copy = gimple_seq_copy (seq);
4894
4895 /* Walk the copy, remapping decls. */
4896 memset (&wi, 0, sizeof (wi));
4897 wi.info = &id;
4898 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4899
4900 /* Clean up. */
4901 pointer_map_destroy (id.decl_map);
4902 if (id.debug_map)
4903 pointer_map_destroy (id.debug_map);
4904
4905 return copy;
4906 }
4907
4908
4909 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4910
4911 static tree
4912 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4913 {
4914 if (*tp == data)
4915 return (tree) data;
4916 else
4917 return NULL;
4918 }
4919
4920 DEBUG_FUNCTION bool
4921 debug_find_tree (tree top, tree search)
4922 {
4923 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4924 }
4925
4926
4927 /* Declare the variables created by the inliner. Add all the variables in
4928 VARS to BIND_EXPR. */
4929
4930 static void
4931 declare_inline_vars (tree block, tree vars)
4932 {
4933 tree t;
4934 for (t = vars; t; t = DECL_CHAIN (t))
4935 {
4936 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4937 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4938 add_local_decl (cfun, t);
4939 }
4940
4941 if (block)
4942 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4943 }
4944
4945 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4946 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4947 VAR_DECL translation. */
4948
4949 static tree
4950 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4951 {
4952 /* Don't generate debug information for the copy if we wouldn't have
4953 generated it for the copy either. */
4954 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4955 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4956
4957 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4958 declaration inspired this copy. */
4959 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4960
4961 /* The new variable/label has no RTL, yet. */
4962 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4963 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4964 SET_DECL_RTL (copy, 0);
4965
4966 /* These args would always appear unused, if not for this. */
4967 TREE_USED (copy) = 1;
4968
4969 /* Set the context for the new declaration. */
4970 if (!DECL_CONTEXT (decl))
4971 /* Globals stay global. */
4972 ;
4973 else if (DECL_CONTEXT (decl) != id->src_fn)
4974 /* Things that weren't in the scope of the function we're inlining
4975 from aren't in the scope we're inlining to, either. */
4976 ;
4977 else if (TREE_STATIC (decl))
4978 /* Function-scoped static variables should stay in the original
4979 function. */
4980 ;
4981 else
4982 /* Ordinary automatic local variables are now in the scope of the
4983 new function. */
4984 DECL_CONTEXT (copy) = id->dst_fn;
4985
4986 return copy;
4987 }
4988
4989 static tree
4990 copy_decl_to_var (tree decl, copy_body_data *id)
4991 {
4992 tree copy, type;
4993
4994 gcc_assert (TREE_CODE (decl) == PARM_DECL
4995 || TREE_CODE (decl) == RESULT_DECL);
4996
4997 type = TREE_TYPE (decl);
4998
4999 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5000 VAR_DECL, DECL_NAME (decl), type);
5001 if (DECL_PT_UID_SET_P (decl))
5002 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5003 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5004 TREE_READONLY (copy) = TREE_READONLY (decl);
5005 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5006 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5007
5008 return copy_decl_for_dup_finish (id, decl, copy);
5009 }
5010
5011 /* Like copy_decl_to_var, but create a return slot object instead of a
5012 pointer variable for return by invisible reference. */
5013
5014 static tree
5015 copy_result_decl_to_var (tree decl, copy_body_data *id)
5016 {
5017 tree copy, type;
5018
5019 gcc_assert (TREE_CODE (decl) == PARM_DECL
5020 || TREE_CODE (decl) == RESULT_DECL);
5021
5022 type = TREE_TYPE (decl);
5023 if (DECL_BY_REFERENCE (decl))
5024 type = TREE_TYPE (type);
5025
5026 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5027 VAR_DECL, DECL_NAME (decl), type);
5028 if (DECL_PT_UID_SET_P (decl))
5029 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5030 TREE_READONLY (copy) = TREE_READONLY (decl);
5031 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5032 if (!DECL_BY_REFERENCE (decl))
5033 {
5034 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5035 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5036 }
5037
5038 return copy_decl_for_dup_finish (id, decl, copy);
5039 }
5040
5041 tree
5042 copy_decl_no_change (tree decl, copy_body_data *id)
5043 {
5044 tree copy;
5045
5046 copy = copy_node (decl);
5047
5048 /* The COPY is not abstract; it will be generated in DST_FN. */
5049 DECL_ABSTRACT (copy) = 0;
5050 lang_hooks.dup_lang_specific_decl (copy);
5051
5052 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5053 been taken; it's for internal bookkeeping in expand_goto_internal. */
5054 if (TREE_CODE (copy) == LABEL_DECL)
5055 {
5056 TREE_ADDRESSABLE (copy) = 0;
5057 LABEL_DECL_UID (copy) = -1;
5058 }
5059
5060 return copy_decl_for_dup_finish (id, decl, copy);
5061 }
5062
5063 static tree
5064 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5065 {
5066 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5067 return copy_decl_to_var (decl, id);
5068 else
5069 return copy_decl_no_change (decl, id);
5070 }
5071
5072 /* Return a copy of the function's argument tree. */
5073 static tree
5074 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5075 bitmap args_to_skip, tree *vars)
5076 {
5077 tree arg, *parg;
5078 tree new_parm = NULL;
5079 int i = 0;
5080
5081 parg = &new_parm;
5082
5083 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5084 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5085 {
5086 tree new_tree = remap_decl (arg, id);
5087 if (TREE_CODE (new_tree) != PARM_DECL)
5088 new_tree = id->copy_decl (arg, id);
5089 lang_hooks.dup_lang_specific_decl (new_tree);
5090 *parg = new_tree;
5091 parg = &DECL_CHAIN (new_tree);
5092 }
5093 else if (!pointer_map_contains (id->decl_map, arg))
5094 {
5095 /* Make an equivalent VAR_DECL. If the argument was used
5096 as temporary variable later in function, the uses will be
5097 replaced by local variable. */
5098 tree var = copy_decl_to_var (arg, id);
5099 insert_decl_map (id, arg, var);
5100 /* Declare this new variable. */
5101 DECL_CHAIN (var) = *vars;
5102 *vars = var;
5103 }
5104 return new_parm;
5105 }
5106
5107 /* Return a copy of the function's static chain. */
5108 static tree
5109 copy_static_chain (tree static_chain, copy_body_data * id)
5110 {
5111 tree *chain_copy, *pvar;
5112
5113 chain_copy = &static_chain;
5114 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5115 {
5116 tree new_tree = remap_decl (*pvar, id);
5117 lang_hooks.dup_lang_specific_decl (new_tree);
5118 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5119 *pvar = new_tree;
5120 }
5121 return static_chain;
5122 }
5123
5124 /* Return true if the function is allowed to be versioned.
5125 This is a guard for the versioning functionality. */
5126
5127 bool
5128 tree_versionable_function_p (tree fndecl)
5129 {
5130 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5131 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5132 }
5133
5134 /* Delete all unreachable basic blocks and update callgraph.
5135 Doing so is somewhat nontrivial because we need to update all clones and
5136 remove inline function that become unreachable. */
5137
5138 static bool
5139 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5140 {
5141 bool changed = false;
5142 basic_block b, next_bb;
5143
5144 find_unreachable_blocks ();
5145
5146 /* Delete all unreachable basic blocks. */
5147
5148 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5149 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5150 {
5151 next_bb = b->next_bb;
5152
5153 if (!(b->flags & BB_REACHABLE))
5154 {
5155 gimple_stmt_iterator bsi;
5156
5157 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5158 {
5159 struct cgraph_edge *e;
5160 struct cgraph_node *node;
5161
5162 ipa_remove_stmt_references (id->dst_node, gsi_stmt (bsi));
5163
5164 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5165 &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5166 {
5167 if (!e->inline_failed)
5168 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5169 else
5170 cgraph_remove_edge (e);
5171 }
5172 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5173 && id->dst_node->clones)
5174 for (node = id->dst_node->clones; node != id->dst_node;)
5175 {
5176 ipa_remove_stmt_references (node, gsi_stmt (bsi));
5177 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5178 && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5179 {
5180 if (!e->inline_failed)
5181 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5182 else
5183 cgraph_remove_edge (e);
5184 }
5185
5186 if (node->clones)
5187 node = node->clones;
5188 else if (node->next_sibling_clone)
5189 node = node->next_sibling_clone;
5190 else
5191 {
5192 while (node != id->dst_node && !node->next_sibling_clone)
5193 node = node->clone_of;
5194 if (node != id->dst_node)
5195 node = node->next_sibling_clone;
5196 }
5197 }
5198 }
5199 delete_basic_block (b);
5200 changed = true;
5201 }
5202 }
5203
5204 return changed;
5205 }
5206
5207 /* Update clone info after duplication. */
5208
5209 static void
5210 update_clone_info (copy_body_data * id)
5211 {
5212 struct cgraph_node *node;
5213 if (!id->dst_node->clones)
5214 return;
5215 for (node = id->dst_node->clones; node != id->dst_node;)
5216 {
5217 /* First update replace maps to match the new body. */
5218 if (node->clone.tree_map)
5219 {
5220 unsigned int i;
5221 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5222 {
5223 struct ipa_replace_map *replace_info;
5224 replace_info = (*node->clone.tree_map)[i];
5225 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5226 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5227 }
5228 }
5229 if (node->clones)
5230 node = node->clones;
5231 else if (node->next_sibling_clone)
5232 node = node->next_sibling_clone;
5233 else
5234 {
5235 while (node != id->dst_node && !node->next_sibling_clone)
5236 node = node->clone_of;
5237 if (node != id->dst_node)
5238 node = node->next_sibling_clone;
5239 }
5240 }
5241 }
5242
5243 /* Create a copy of a function's tree.
5244 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5245 of the original function and the new copied function
5246 respectively. In case we want to replace a DECL
5247 tree with another tree while duplicating the function's
5248 body, TREE_MAP represents the mapping between these
5249 trees. If UPDATE_CLONES is set, the call_stmt fields
5250 of edges of clones of the function will be updated.
5251
5252 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5253 from new version.
5254 If SKIP_RETURN is true, the new version will return void.
5255 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5256 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5257 */
5258 void
5259 tree_function_versioning (tree old_decl, tree new_decl,
5260 vec<ipa_replace_map_p, va_gc> *tree_map,
5261 bool update_clones, bitmap args_to_skip,
5262 bool skip_return, bitmap blocks_to_copy,
5263 basic_block new_entry)
5264 {
5265 struct cgraph_node *old_version_node;
5266 struct cgraph_node *new_version_node;
5267 copy_body_data id;
5268 tree p;
5269 unsigned i;
5270 struct ipa_replace_map *replace_info;
5271 basic_block old_entry_block, bb;
5272 auto_vec<gimple, 10> init_stmts;
5273 tree vars = NULL_TREE;
5274
5275 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5276 && TREE_CODE (new_decl) == FUNCTION_DECL);
5277 DECL_POSSIBLY_INLINED (old_decl) = 1;
5278
5279 old_version_node = cgraph_get_node (old_decl);
5280 gcc_checking_assert (old_version_node);
5281 new_version_node = cgraph_get_node (new_decl);
5282 gcc_checking_assert (new_version_node);
5283
5284 /* Copy over debug args. */
5285 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5286 {
5287 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5288 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5289 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5290 old_debug_args = decl_debug_args_lookup (old_decl);
5291 if (old_debug_args)
5292 {
5293 new_debug_args = decl_debug_args_insert (new_decl);
5294 *new_debug_args = vec_safe_copy (*old_debug_args);
5295 }
5296 }
5297
5298 /* Output the inlining info for this abstract function, since it has been
5299 inlined. If we don't do this now, we can lose the information about the
5300 variables in the function when the blocks get blown away as soon as we
5301 remove the cgraph node. */
5302 (*debug_hooks->outlining_inline_function) (old_decl);
5303
5304 DECL_ARTIFICIAL (new_decl) = 1;
5305 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5306 if (DECL_ORIGIN (old_decl) == old_decl)
5307 old_version_node->used_as_abstract_origin = true;
5308 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5309
5310 /* Prepare the data structures for the tree copy. */
5311 memset (&id, 0, sizeof (id));
5312
5313 /* Generate a new name for the new version. */
5314 id.statements_to_fold = pointer_set_create ();
5315
5316 id.decl_map = pointer_map_create ();
5317 id.debug_map = NULL;
5318 id.src_fn = old_decl;
5319 id.dst_fn = new_decl;
5320 id.src_node = old_version_node;
5321 id.dst_node = new_version_node;
5322 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5323 id.blocks_to_copy = blocks_to_copy;
5324 if (id.src_node->ipa_transforms_to_apply.exists ())
5325 {
5326 vec<ipa_opt_pass> old_transforms_to_apply
5327 = id.dst_node->ipa_transforms_to_apply;
5328 unsigned int i;
5329
5330 id.dst_node->ipa_transforms_to_apply
5331 = id.src_node->ipa_transforms_to_apply.copy ();
5332 for (i = 0; i < old_transforms_to_apply.length (); i++)
5333 id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5334 old_transforms_to_apply.release ();
5335 }
5336
5337 id.copy_decl = copy_decl_no_change;
5338 id.transform_call_graph_edges
5339 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5340 id.transform_new_cfg = true;
5341 id.transform_return_to_modify = false;
5342 id.transform_parameter = false;
5343 id.transform_lang_insert_block = NULL;
5344
5345 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5346 (DECL_STRUCT_FUNCTION (old_decl));
5347 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5348 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5349 initialize_cfun (new_decl, old_decl,
5350 old_entry_block->count);
5351 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5352 = id.src_cfun->gimple_df->ipa_pta;
5353
5354 /* Copy the function's static chain. */
5355 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5356 if (p)
5357 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5358 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5359 &id);
5360
5361 /* If there's a tree_map, prepare for substitution. */
5362 if (tree_map)
5363 for (i = 0; i < tree_map->length (); i++)
5364 {
5365 gimple init;
5366 replace_info = (*tree_map)[i];
5367 if (replace_info->replace_p)
5368 {
5369 if (!replace_info->old_tree)
5370 {
5371 int i = replace_info->parm_num;
5372 tree parm;
5373 tree req_type;
5374
5375 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5376 i --;
5377 replace_info->old_tree = parm;
5378 req_type = TREE_TYPE (parm);
5379 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5380 {
5381 if (fold_convertible_p (req_type, replace_info->new_tree))
5382 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5383 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5384 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5385 else
5386 {
5387 if (dump_file)
5388 {
5389 fprintf (dump_file, " const ");
5390 print_generic_expr (dump_file, replace_info->new_tree, 0);
5391 fprintf (dump_file, " can't be converted to param ");
5392 print_generic_expr (dump_file, parm, 0);
5393 fprintf (dump_file, "\n");
5394 }
5395 replace_info->old_tree = NULL;
5396 }
5397 }
5398 }
5399 else
5400 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5401 if (replace_info->old_tree)
5402 {
5403 init = setup_one_parameter (&id, replace_info->old_tree,
5404 replace_info->new_tree, id.src_fn,
5405 NULL,
5406 &vars);
5407 if (init)
5408 init_stmts.safe_push (init);
5409 }
5410 }
5411 }
5412 /* Copy the function's arguments. */
5413 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5414 DECL_ARGUMENTS (new_decl) =
5415 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5416 args_to_skip, &vars);
5417
5418 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5419 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5420
5421 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5422
5423 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5424 /* Add local vars. */
5425 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5426
5427 if (DECL_RESULT (old_decl) == NULL_TREE)
5428 ;
5429 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5430 {
5431 DECL_RESULT (new_decl)
5432 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5433 RESULT_DECL, NULL_TREE, void_type_node);
5434 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5435 cfun->returns_struct = 0;
5436 cfun->returns_pcc_struct = 0;
5437 }
5438 else
5439 {
5440 tree old_name;
5441 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5442 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5443 if (gimple_in_ssa_p (id.src_cfun)
5444 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5445 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5446 {
5447 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5448 insert_decl_map (&id, old_name, new_name);
5449 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5450 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5451 }
5452 }
5453
5454 /* Set up the destination functions loop tree. */
5455 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5456 {
5457 cfun->curr_properties &= ~PROP_loops;
5458 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5459 cfun->curr_properties |= PROP_loops;
5460 }
5461
5462 /* Copy the Function's body. */
5463 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5464 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5465 new_entry);
5466
5467 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5468 number_blocks (new_decl);
5469
5470 /* We want to create the BB unconditionally, so that the addition of
5471 debug stmts doesn't affect BB count, which may in the end cause
5472 codegen differences. */
5473 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5474 while (init_stmts.length ())
5475 insert_init_stmt (&id, bb, init_stmts.pop ());
5476 update_clone_info (&id);
5477
5478 /* Remap the nonlocal_goto_save_area, if any. */
5479 if (cfun->nonlocal_goto_save_area)
5480 {
5481 struct walk_stmt_info wi;
5482
5483 memset (&wi, 0, sizeof (wi));
5484 wi.info = &id;
5485 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5486 }
5487
5488 /* Clean up. */
5489 pointer_map_destroy (id.decl_map);
5490 if (id.debug_map)
5491 pointer_map_destroy (id.debug_map);
5492 free_dominance_info (CDI_DOMINATORS);
5493 free_dominance_info (CDI_POST_DOMINATORS);
5494
5495 fold_marked_statements (0, id.statements_to_fold);
5496 pointer_set_destroy (id.statements_to_fold);
5497 fold_cond_expr_cond ();
5498 delete_unreachable_blocks_update_callgraph (&id);
5499 if (id.dst_node->definition)
5500 cgraph_rebuild_references ();
5501 update_ssa (TODO_update_ssa);
5502
5503 /* After partial cloning we need to rescale frequencies, so they are
5504 within proper range in the cloned function. */
5505 if (new_entry)
5506 {
5507 struct cgraph_edge *e;
5508 rebuild_frequencies ();
5509
5510 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5511 for (e = new_version_node->callees; e; e = e->next_callee)
5512 {
5513 basic_block bb = gimple_bb (e->call_stmt);
5514 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5515 bb);
5516 e->count = bb->count;
5517 }
5518 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5519 {
5520 basic_block bb = gimple_bb (e->call_stmt);
5521 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5522 bb);
5523 e->count = bb->count;
5524 }
5525 }
5526
5527 free_dominance_info (CDI_DOMINATORS);
5528 free_dominance_info (CDI_POST_DOMINATORS);
5529
5530 gcc_assert (!id.debug_stmts.exists ());
5531 pop_cfun ();
5532 return;
5533 }
5534
5535 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5536 the callee and return the inlined body on success. */
5537
5538 tree
5539 maybe_inline_call_in_expr (tree exp)
5540 {
5541 tree fn = get_callee_fndecl (exp);
5542
5543 /* We can only try to inline "const" functions. */
5544 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5545 {
5546 struct pointer_map_t *decl_map = pointer_map_create ();
5547 call_expr_arg_iterator iter;
5548 copy_body_data id;
5549 tree param, arg, t;
5550
5551 /* Remap the parameters. */
5552 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5553 param;
5554 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5555 *pointer_map_insert (decl_map, param) = arg;
5556
5557 memset (&id, 0, sizeof (id));
5558 id.src_fn = fn;
5559 id.dst_fn = current_function_decl;
5560 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5561 id.decl_map = decl_map;
5562
5563 id.copy_decl = copy_decl_no_change;
5564 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5565 id.transform_new_cfg = false;
5566 id.transform_return_to_modify = true;
5567 id.transform_parameter = true;
5568 id.transform_lang_insert_block = NULL;
5569
5570 /* Make sure not to unshare trees behind the front-end's back
5571 since front-end specific mechanisms may rely on sharing. */
5572 id.regimplify = false;
5573 id.do_not_unshare = true;
5574
5575 /* We're not inside any EH region. */
5576 id.eh_lp_nr = 0;
5577
5578 t = copy_tree_body (&id);
5579 pointer_map_destroy (decl_map);
5580
5581 /* We can only return something suitable for use in a GENERIC
5582 expression tree. */
5583 if (TREE_CODE (t) == MODIFY_EXPR)
5584 return TREE_OPERAND (t, 1);
5585 }
5586
5587 return NULL_TREE;
5588 }
5589
5590 /* Duplicate a type, fields and all. */
5591
5592 tree
5593 build_duplicate_type (tree type)
5594 {
5595 struct copy_body_data id;
5596
5597 memset (&id, 0, sizeof (id));
5598 id.src_fn = current_function_decl;
5599 id.dst_fn = current_function_decl;
5600 id.src_cfun = cfun;
5601 id.decl_map = pointer_map_create ();
5602 id.debug_map = NULL;
5603 id.copy_decl = copy_decl_no_change;
5604
5605 type = remap_type_1 (type, &id);
5606
5607 pointer_map_destroy (id.decl_map);
5608 if (id.debug_map)
5609 pointer_map_destroy (id.debug_map);
5610
5611 TYPE_CANONICAL (type) = type;
5612
5613 return type;
5614 }