IPA REF refactoring
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "calls.h"
29 #include "tree-inline.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "insn-config.h"
34 #include "hashtab.h"
35 #include "langhooks.h"
36 #include "basic-block.h"
37 #include "tree-iterator.h"
38 #include "intl.h"
39 #include "pointer-set.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "tree-cfg.h"
53 #include "tree-phinodes.h"
54 #include "ssa-iterators.h"
55 #include "stringpool.h"
56 #include "tree-ssanames.h"
57 #include "tree-into-ssa.h"
58 #include "expr.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "function.h"
62 #include "tree-pretty-print.h"
63 #include "except.h"
64 #include "debug.h"
65 #include "ipa-prop.h"
66 #include "value-prof.h"
67 #include "tree-pass.h"
68 #include "target.h"
69 #include "cfgloop.h"
70 #include "builtins.h"
71
72 #include "rtl.h" /* FIXME: For asm_str_count. */
73
74 /* I'm not real happy about this, but we need to handle gimple and
75 non-gimple trees. */
76
77 /* Inlining, Cloning, Versioning, Parallelization
78
79 Inlining: a function body is duplicated, but the PARM_DECLs are
80 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
81 MODIFY_EXPRs that store to a dedicated returned-value variable.
82 The duplicated eh_region info of the copy will later be appended
83 to the info for the caller; the eh_region info in copied throwing
84 statements and RESX statements are adjusted accordingly.
85
86 Cloning: (only in C++) We have one body for a con/de/structor, and
87 multiple function decls, each with a unique parameter list.
88 Duplicate the body, using the given splay tree; some parameters
89 will become constants (like 0 or 1).
90
91 Versioning: a function body is duplicated and the result is a new
92 function rather than into blocks of an existing function as with
93 inlining. Some parameters will become constants.
94
95 Parallelization: a region of a function is duplicated resulting in
96 a new function. Variables may be replaced with complex expressions
97 to enable shared variable semantics.
98
99 All of these will simultaneously lookup any callgraph edges. If
100 we're going to inline the duplicated function body, and the given
101 function has some cloned callgraph nodes (one for each place this
102 function will be inlined) those callgraph edges will be duplicated.
103 If we're cloning the body, those callgraph edges will be
104 updated to point into the new body. (Note that the original
105 callgraph node and edge list will not be altered.)
106
107 See the CALL_EXPR handling case in copy_tree_body_r (). */
108
109 /* To Do:
110
111 o In order to make inlining-on-trees work, we pessimized
112 function-local static constants. In particular, they are now
113 always output, even when not addressed. Fix this by treating
114 function-local static constants just like global static
115 constants; the back-end already knows not to output them if they
116 are not needed.
117
118 o Provide heuristics to clamp inlining of recursive template
119 calls? */
120
121
122 /* Weights that estimate_num_insns uses to estimate the size of the
123 produced code. */
124
125 eni_weights eni_size_weights;
126
127 /* Weights that estimate_num_insns uses to estimate the time necessary
128 to execute the produced code. */
129
130 eni_weights eni_time_weights;
131
132 /* Prototypes. */
133
134 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
135 static void remap_block (tree *, copy_body_data *);
136 static void copy_bind_expr (tree *, int *, copy_body_data *);
137 static void declare_inline_vars (tree, tree);
138 static void remap_save_expr (tree *, void *, int *);
139 static void prepend_lexical_block (tree current_block, tree new_block);
140 static tree copy_decl_to_var (tree, copy_body_data *);
141 static tree copy_result_decl_to_var (tree, copy_body_data *);
142 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
143 static gimple remap_gimple_stmt (gimple, copy_body_data *);
144 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
145
146 /* Insert a tree->tree mapping for ID. Despite the name suggests
147 that the trees should be variables, it is used for more than that. */
148
149 void
150 insert_decl_map (copy_body_data *id, tree key, tree value)
151 {
152 *pointer_map_insert (id->decl_map, key) = value;
153
154 /* Always insert an identity map as well. If we see this same new
155 node again, we won't want to duplicate it a second time. */
156 if (key != value)
157 *pointer_map_insert (id->decl_map, value) = value;
158 }
159
160 /* Insert a tree->tree mapping for ID. This is only used for
161 variables. */
162
163 static void
164 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
165 {
166 if (!gimple_in_ssa_p (id->src_cfun))
167 return;
168
169 if (!MAY_HAVE_DEBUG_STMTS)
170 return;
171
172 if (!target_for_debug_bind (key))
173 return;
174
175 gcc_assert (TREE_CODE (key) == PARM_DECL);
176 gcc_assert (TREE_CODE (value) == VAR_DECL);
177
178 if (!id->debug_map)
179 id->debug_map = pointer_map_create ();
180
181 *pointer_map_insert (id->debug_map, key) = value;
182 }
183
184 /* If nonzero, we're remapping the contents of inlined debug
185 statements. If negative, an error has occurred, such as a
186 reference to a variable that isn't available in the inlined
187 context. */
188 static int processing_debug_stmt = 0;
189
190 /* Construct new SSA name for old NAME. ID is the inline context. */
191
192 static tree
193 remap_ssa_name (tree name, copy_body_data *id)
194 {
195 tree new_tree, var;
196 tree *n;
197
198 gcc_assert (TREE_CODE (name) == SSA_NAME);
199
200 n = (tree *) pointer_map_contains (id->decl_map, name);
201 if (n)
202 return unshare_expr (*n);
203
204 if (processing_debug_stmt)
205 {
206 if (SSA_NAME_IS_DEFAULT_DEF (name)
207 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
208 && id->entry_bb == NULL
209 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
210 {
211 tree vexpr = make_node (DEBUG_EXPR_DECL);
212 gimple def_temp;
213 gimple_stmt_iterator gsi;
214 tree val = SSA_NAME_VAR (name);
215
216 n = (tree *) pointer_map_contains (id->decl_map, val);
217 if (n != NULL)
218 val = *n;
219 if (TREE_CODE (val) != PARM_DECL)
220 {
221 processing_debug_stmt = -1;
222 return name;
223 }
224 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
225 DECL_ARTIFICIAL (vexpr) = 1;
226 TREE_TYPE (vexpr) = TREE_TYPE (name);
227 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
228 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
229 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
230 return vexpr;
231 }
232
233 processing_debug_stmt = -1;
234 return name;
235 }
236
237 /* Remap anonymous SSA names or SSA names of anonymous decls. */
238 var = SSA_NAME_VAR (name);
239 if (!var
240 || (!SSA_NAME_IS_DEFAULT_DEF (name)
241 && TREE_CODE (var) == VAR_DECL
242 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
243 && DECL_ARTIFICIAL (var)
244 && DECL_IGNORED_P (var)
245 && !DECL_NAME (var)))
246 {
247 struct ptr_info_def *pi;
248 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
249 if (!var && SSA_NAME_IDENTIFIER (name))
250 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
251 insert_decl_map (id, name, new_tree);
252 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
253 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
254 /* At least IPA points-to info can be directly transferred. */
255 if (id->src_cfun->gimple_df
256 && id->src_cfun->gimple_df->ipa_pta
257 && (pi = SSA_NAME_PTR_INFO (name))
258 && !pi->pt.anything)
259 {
260 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
261 new_pi->pt = pi->pt;
262 }
263 return new_tree;
264 }
265
266 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
267 in copy_bb. */
268 new_tree = remap_decl (var, id);
269
270 /* We might've substituted constant or another SSA_NAME for
271 the variable.
272
273 Replace the SSA name representing RESULT_DECL by variable during
274 inlining: this saves us from need to introduce PHI node in a case
275 return value is just partly initialized. */
276 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
277 && (!SSA_NAME_VAR (name)
278 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
279 || !id->transform_return_to_modify))
280 {
281 struct ptr_info_def *pi;
282 new_tree = make_ssa_name (new_tree, NULL);
283 insert_decl_map (id, name, new_tree);
284 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
285 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
286 /* At least IPA points-to info can be directly transferred. */
287 if (id->src_cfun->gimple_df
288 && id->src_cfun->gimple_df->ipa_pta
289 && (pi = SSA_NAME_PTR_INFO (name))
290 && !pi->pt.anything)
291 {
292 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293 new_pi->pt = pi->pt;
294 }
295 if (SSA_NAME_IS_DEFAULT_DEF (name))
296 {
297 /* By inlining function having uninitialized variable, we might
298 extend the lifetime (variable might get reused). This cause
299 ICE in the case we end up extending lifetime of SSA name across
300 abnormal edge, but also increase register pressure.
301
302 We simply initialize all uninitialized vars by 0 except
303 for case we are inlining to very first BB. We can avoid
304 this for all BBs that are not inside strongly connected
305 regions of the CFG, but this is expensive to test. */
306 if (id->entry_bb
307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
308 && (!SSA_NAME_VAR (name)
309 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
310 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
311 0)->dest
312 || EDGE_COUNT (id->entry_bb->preds) != 1))
313 {
314 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
315 gimple init_stmt;
316 tree zero = build_zero_cst (TREE_TYPE (new_tree));
317
318 init_stmt = gimple_build_assign (new_tree, zero);
319 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
320 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
321 }
322 else
323 {
324 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
325 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
326 }
327 }
328 }
329 else
330 insert_decl_map (id, name, new_tree);
331 return new_tree;
332 }
333
334 /* Remap DECL during the copying of the BLOCK tree for the function. */
335
336 tree
337 remap_decl (tree decl, copy_body_data *id)
338 {
339 tree *n;
340
341 /* We only remap local variables in the current function. */
342
343 /* See if we have remapped this declaration. */
344
345 n = (tree *) pointer_map_contains (id->decl_map, decl);
346
347 if (!n && processing_debug_stmt)
348 {
349 processing_debug_stmt = -1;
350 return decl;
351 }
352
353 /* If we didn't already have an equivalent for this declaration,
354 create one now. */
355 if (!n)
356 {
357 /* Make a copy of the variable or label. */
358 tree t = id->copy_decl (decl, id);
359
360 /* Remember it, so that if we encounter this local entity again
361 we can reuse this copy. Do this early because remap_type may
362 need this decl for TYPE_STUB_DECL. */
363 insert_decl_map (id, decl, t);
364
365 if (!DECL_P (t))
366 return t;
367
368 /* Remap types, if necessary. */
369 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
370 if (TREE_CODE (t) == TYPE_DECL)
371 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
372
373 /* Remap sizes as necessary. */
374 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
375 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
376
377 /* If fields, do likewise for offset and qualifier. */
378 if (TREE_CODE (t) == FIELD_DECL)
379 {
380 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
381 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
382 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
383 }
384
385 return t;
386 }
387
388 if (id->do_not_unshare)
389 return *n;
390 else
391 return unshare_expr (*n);
392 }
393
394 static tree
395 remap_type_1 (tree type, copy_body_data *id)
396 {
397 tree new_tree, t;
398
399 /* We do need a copy. build and register it now. If this is a pointer or
400 reference type, remap the designated type and make a new pointer or
401 reference type. */
402 if (TREE_CODE (type) == POINTER_TYPE)
403 {
404 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
405 TYPE_MODE (type),
406 TYPE_REF_CAN_ALIAS_ALL (type));
407 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
408 new_tree = build_type_attribute_qual_variant (new_tree,
409 TYPE_ATTRIBUTES (type),
410 TYPE_QUALS (type));
411 insert_decl_map (id, type, new_tree);
412 return new_tree;
413 }
414 else if (TREE_CODE (type) == REFERENCE_TYPE)
415 {
416 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
417 TYPE_MODE (type),
418 TYPE_REF_CAN_ALIAS_ALL (type));
419 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
420 new_tree = build_type_attribute_qual_variant (new_tree,
421 TYPE_ATTRIBUTES (type),
422 TYPE_QUALS (type));
423 insert_decl_map (id, type, new_tree);
424 return new_tree;
425 }
426 else
427 new_tree = copy_node (type);
428
429 insert_decl_map (id, type, new_tree);
430
431 /* This is a new type, not a copy of an old type. Need to reassociate
432 variants. We can handle everything except the main variant lazily. */
433 t = TYPE_MAIN_VARIANT (type);
434 if (type != t)
435 {
436 t = remap_type (t, id);
437 TYPE_MAIN_VARIANT (new_tree) = t;
438 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
439 TYPE_NEXT_VARIANT (t) = new_tree;
440 }
441 else
442 {
443 TYPE_MAIN_VARIANT (new_tree) = new_tree;
444 TYPE_NEXT_VARIANT (new_tree) = NULL;
445 }
446
447 if (TYPE_STUB_DECL (type))
448 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
449
450 /* Lazily create pointer and reference types. */
451 TYPE_POINTER_TO (new_tree) = NULL;
452 TYPE_REFERENCE_TO (new_tree) = NULL;
453
454 switch (TREE_CODE (new_tree))
455 {
456 case INTEGER_TYPE:
457 case REAL_TYPE:
458 case FIXED_POINT_TYPE:
459 case ENUMERAL_TYPE:
460 case BOOLEAN_TYPE:
461 t = TYPE_MIN_VALUE (new_tree);
462 if (t && TREE_CODE (t) != INTEGER_CST)
463 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
464
465 t = TYPE_MAX_VALUE (new_tree);
466 if (t && TREE_CODE (t) != INTEGER_CST)
467 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
468 return new_tree;
469
470 case FUNCTION_TYPE:
471 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
472 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
473 return new_tree;
474
475 case ARRAY_TYPE:
476 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
477 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
478 break;
479
480 case RECORD_TYPE:
481 case UNION_TYPE:
482 case QUAL_UNION_TYPE:
483 {
484 tree f, nf = NULL;
485
486 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
487 {
488 t = remap_decl (f, id);
489 DECL_CONTEXT (t) = new_tree;
490 DECL_CHAIN (t) = nf;
491 nf = t;
492 }
493 TYPE_FIELDS (new_tree) = nreverse (nf);
494 }
495 break;
496
497 case OFFSET_TYPE:
498 default:
499 /* Shouldn't have been thought variable sized. */
500 gcc_unreachable ();
501 }
502
503 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
504 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
505
506 return new_tree;
507 }
508
509 tree
510 remap_type (tree type, copy_body_data *id)
511 {
512 tree *node;
513 tree tmp;
514
515 if (type == NULL)
516 return type;
517
518 /* See if we have remapped this type. */
519 node = (tree *) pointer_map_contains (id->decl_map, type);
520 if (node)
521 return *node;
522
523 /* The type only needs remapping if it's variably modified. */
524 if (! variably_modified_type_p (type, id->src_fn))
525 {
526 insert_decl_map (id, type, type);
527 return type;
528 }
529
530 id->remapping_type_depth++;
531 tmp = remap_type_1 (type, id);
532 id->remapping_type_depth--;
533
534 return tmp;
535 }
536
537 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
538
539 static bool
540 can_be_nonlocal (tree decl, copy_body_data *id)
541 {
542 /* We can not duplicate function decls. */
543 if (TREE_CODE (decl) == FUNCTION_DECL)
544 return true;
545
546 /* Local static vars must be non-local or we get multiple declaration
547 problems. */
548 if (TREE_CODE (decl) == VAR_DECL
549 && !auto_var_in_fn_p (decl, id->src_fn))
550 return true;
551
552 return false;
553 }
554
555 static tree
556 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
557 copy_body_data *id)
558 {
559 tree old_var;
560 tree new_decls = NULL_TREE;
561
562 /* Remap its variables. */
563 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
564 {
565 tree new_var;
566
567 if (can_be_nonlocal (old_var, id))
568 {
569 /* We need to add this variable to the local decls as otherwise
570 nothing else will do so. */
571 if (TREE_CODE (old_var) == VAR_DECL
572 && ! DECL_EXTERNAL (old_var))
573 add_local_decl (cfun, old_var);
574 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
575 && !DECL_IGNORED_P (old_var)
576 && nonlocalized_list)
577 vec_safe_push (*nonlocalized_list, old_var);
578 continue;
579 }
580
581 /* Remap the variable. */
582 new_var = remap_decl (old_var, id);
583
584 /* If we didn't remap this variable, we can't mess with its
585 TREE_CHAIN. If we remapped this variable to the return slot, it's
586 already declared somewhere else, so don't declare it here. */
587
588 if (new_var == id->retvar)
589 ;
590 else if (!new_var)
591 {
592 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
593 && !DECL_IGNORED_P (old_var)
594 && nonlocalized_list)
595 vec_safe_push (*nonlocalized_list, old_var);
596 }
597 else
598 {
599 gcc_assert (DECL_P (new_var));
600 DECL_CHAIN (new_var) = new_decls;
601 new_decls = new_var;
602
603 /* Also copy value-expressions. */
604 if (TREE_CODE (new_var) == VAR_DECL
605 && DECL_HAS_VALUE_EXPR_P (new_var))
606 {
607 tree tem = DECL_VALUE_EXPR (new_var);
608 bool old_regimplify = id->regimplify;
609 id->remapping_type_depth++;
610 walk_tree (&tem, copy_tree_body_r, id, NULL);
611 id->remapping_type_depth--;
612 id->regimplify = old_regimplify;
613 SET_DECL_VALUE_EXPR (new_var, tem);
614 }
615 }
616 }
617
618 return nreverse (new_decls);
619 }
620
621 /* Copy the BLOCK to contain remapped versions of the variables
622 therein. And hook the new block into the block-tree. */
623
624 static void
625 remap_block (tree *block, copy_body_data *id)
626 {
627 tree old_block;
628 tree new_block;
629
630 /* Make the new block. */
631 old_block = *block;
632 new_block = make_node (BLOCK);
633 TREE_USED (new_block) = TREE_USED (old_block);
634 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
635 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
636 BLOCK_NONLOCALIZED_VARS (new_block)
637 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
638 *block = new_block;
639
640 /* Remap its variables. */
641 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
642 &BLOCK_NONLOCALIZED_VARS (new_block),
643 id);
644
645 if (id->transform_lang_insert_block)
646 id->transform_lang_insert_block (new_block);
647
648 /* Remember the remapped block. */
649 insert_decl_map (id, old_block, new_block);
650 }
651
652 /* Copy the whole block tree and root it in id->block. */
653 static tree
654 remap_blocks (tree block, copy_body_data *id)
655 {
656 tree t;
657 tree new_tree = block;
658
659 if (!block)
660 return NULL;
661
662 remap_block (&new_tree, id);
663 gcc_assert (new_tree != block);
664 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
665 prepend_lexical_block (new_tree, remap_blocks (t, id));
666 /* Blocks are in arbitrary order, but make things slightly prettier and do
667 not swap order when producing a copy. */
668 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
669 return new_tree;
670 }
671
672 /* Remap the block tree rooted at BLOCK to nothing. */
673 static void
674 remap_blocks_to_null (tree block, copy_body_data *id)
675 {
676 tree t;
677 insert_decl_map (id, block, NULL_TREE);
678 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
679 remap_blocks_to_null (t, id);
680 }
681
682 static void
683 copy_statement_list (tree *tp)
684 {
685 tree_stmt_iterator oi, ni;
686 tree new_tree;
687
688 new_tree = alloc_stmt_list ();
689 ni = tsi_start (new_tree);
690 oi = tsi_start (*tp);
691 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
692 *tp = new_tree;
693
694 for (; !tsi_end_p (oi); tsi_next (&oi))
695 {
696 tree stmt = tsi_stmt (oi);
697 if (TREE_CODE (stmt) == STATEMENT_LIST)
698 /* This copy is not redundant; tsi_link_after will smash this
699 STATEMENT_LIST into the end of the one we're building, and we
700 don't want to do that with the original. */
701 copy_statement_list (&stmt);
702 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
703 }
704 }
705
706 static void
707 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
708 {
709 tree block = BIND_EXPR_BLOCK (*tp);
710 /* Copy (and replace) the statement. */
711 copy_tree_r (tp, walk_subtrees, NULL);
712 if (block)
713 {
714 remap_block (&block, id);
715 BIND_EXPR_BLOCK (*tp) = block;
716 }
717
718 if (BIND_EXPR_VARS (*tp))
719 /* This will remap a lot of the same decls again, but this should be
720 harmless. */
721 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
722 }
723
724
725 /* Create a new gimple_seq by remapping all the statements in BODY
726 using the inlining information in ID. */
727
728 static gimple_seq
729 remap_gimple_seq (gimple_seq body, copy_body_data *id)
730 {
731 gimple_stmt_iterator si;
732 gimple_seq new_body = NULL;
733
734 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
735 {
736 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
737 gimple_seq_add_stmt (&new_body, new_stmt);
738 }
739
740 return new_body;
741 }
742
743
744 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
745 block using the mapping information in ID. */
746
747 static gimple
748 copy_gimple_bind (gimple stmt, copy_body_data *id)
749 {
750 gimple new_bind;
751 tree new_block, new_vars;
752 gimple_seq body, new_body;
753
754 /* Copy the statement. Note that we purposely don't use copy_stmt
755 here because we need to remap statements as we copy. */
756 body = gimple_bind_body (stmt);
757 new_body = remap_gimple_seq (body, id);
758
759 new_block = gimple_bind_block (stmt);
760 if (new_block)
761 remap_block (&new_block, id);
762
763 /* This will remap a lot of the same decls again, but this should be
764 harmless. */
765 new_vars = gimple_bind_vars (stmt);
766 if (new_vars)
767 new_vars = remap_decls (new_vars, NULL, id);
768
769 new_bind = gimple_build_bind (new_vars, new_body, new_block);
770
771 return new_bind;
772 }
773
774 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
775
776 static bool
777 is_parm (tree decl)
778 {
779 if (TREE_CODE (decl) == SSA_NAME)
780 {
781 decl = SSA_NAME_VAR (decl);
782 if (!decl)
783 return false;
784 }
785
786 return (TREE_CODE (decl) == PARM_DECL);
787 }
788
789 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
790 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
791 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
792 recursing into the children nodes of *TP. */
793
794 static tree
795 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
796 {
797 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
798 copy_body_data *id = (copy_body_data *) wi_p->info;
799 tree fn = id->src_fn;
800
801 if (TREE_CODE (*tp) == SSA_NAME)
802 {
803 *tp = remap_ssa_name (*tp, id);
804 *walk_subtrees = 0;
805 return NULL;
806 }
807 else if (auto_var_in_fn_p (*tp, fn))
808 {
809 /* Local variables and labels need to be replaced by equivalent
810 variables. We don't want to copy static variables; there's
811 only one of those, no matter how many times we inline the
812 containing function. Similarly for globals from an outer
813 function. */
814 tree new_decl;
815
816 /* Remap the declaration. */
817 new_decl = remap_decl (*tp, id);
818 gcc_assert (new_decl);
819 /* Replace this variable with the copy. */
820 STRIP_TYPE_NOPS (new_decl);
821 /* ??? The C++ frontend uses void * pointer zero to initialize
822 any other type. This confuses the middle-end type verification.
823 As cloned bodies do not go through gimplification again the fixup
824 there doesn't trigger. */
825 if (TREE_CODE (new_decl) == INTEGER_CST
826 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
827 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
828 *tp = new_decl;
829 *walk_subtrees = 0;
830 }
831 else if (TREE_CODE (*tp) == STATEMENT_LIST)
832 gcc_unreachable ();
833 else if (TREE_CODE (*tp) == SAVE_EXPR)
834 gcc_unreachable ();
835 else if (TREE_CODE (*tp) == LABEL_DECL
836 && (!DECL_CONTEXT (*tp)
837 || decl_function_context (*tp) == id->src_fn))
838 /* These may need to be remapped for EH handling. */
839 *tp = remap_decl (*tp, id);
840 else if (TREE_CODE (*tp) == FIELD_DECL)
841 {
842 /* If the enclosing record type is variably_modified_type_p, the field
843 has already been remapped. Otherwise, it need not be. */
844 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
845 if (n)
846 *tp = *n;
847 *walk_subtrees = 0;
848 }
849 else if (TYPE_P (*tp))
850 /* Types may need remapping as well. */
851 *tp = remap_type (*tp, id);
852 else if (CONSTANT_CLASS_P (*tp))
853 {
854 /* If this is a constant, we have to copy the node iff the type
855 will be remapped. copy_tree_r will not copy a constant. */
856 tree new_type = remap_type (TREE_TYPE (*tp), id);
857
858 if (new_type == TREE_TYPE (*tp))
859 *walk_subtrees = 0;
860
861 else if (TREE_CODE (*tp) == INTEGER_CST)
862 *tp = wide_int_to_tree (new_type, *tp);
863 else
864 {
865 *tp = copy_node (*tp);
866 TREE_TYPE (*tp) = new_type;
867 }
868 }
869 else
870 {
871 /* Otherwise, just copy the node. Note that copy_tree_r already
872 knows not to copy VAR_DECLs, etc., so this is safe. */
873
874 if (TREE_CODE (*tp) == MEM_REF)
875 {
876 /* We need to re-canonicalize MEM_REFs from inline substitutions
877 that can happen when a pointer argument is an ADDR_EXPR.
878 Recurse here manually to allow that. */
879 tree ptr = TREE_OPERAND (*tp, 0);
880 tree type = remap_type (TREE_TYPE (*tp), id);
881 tree old = *tp;
882 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
883 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
884 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
885 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
886 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
887 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
888 remapped a parameter as the property might be valid only
889 for the parameter itself. */
890 if (TREE_THIS_NOTRAP (old)
891 && (!is_parm (TREE_OPERAND (old, 0))
892 || (!id->transform_parameter && is_parm (ptr))))
893 TREE_THIS_NOTRAP (*tp) = 1;
894 *walk_subtrees = 0;
895 return NULL;
896 }
897
898 /* Here is the "usual case". Copy this tree node, and then
899 tweak some special cases. */
900 copy_tree_r (tp, walk_subtrees, NULL);
901
902 if (TREE_CODE (*tp) != OMP_CLAUSE)
903 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
904
905 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
906 {
907 /* The copied TARGET_EXPR has never been expanded, even if the
908 original node was expanded already. */
909 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
910 TREE_OPERAND (*tp, 3) = NULL_TREE;
911 }
912 else if (TREE_CODE (*tp) == ADDR_EXPR)
913 {
914 /* Variable substitution need not be simple. In particular,
915 the MEM_REF substitution above. Make sure that
916 TREE_CONSTANT and friends are up-to-date. */
917 int invariant = is_gimple_min_invariant (*tp);
918 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
919 recompute_tree_invariant_for_addr_expr (*tp);
920
921 /* If this used to be invariant, but is not any longer,
922 then regimplification is probably needed. */
923 if (invariant && !is_gimple_min_invariant (*tp))
924 id->regimplify = true;
925
926 *walk_subtrees = 0;
927 }
928 }
929
930 /* Update the TREE_BLOCK for the cloned expr. */
931 if (EXPR_P (*tp))
932 {
933 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
934 tree old_block = TREE_BLOCK (*tp);
935 if (old_block)
936 {
937 tree *n;
938 n = (tree *) pointer_map_contains (id->decl_map,
939 TREE_BLOCK (*tp));
940 if (n)
941 new_block = *n;
942 }
943 TREE_SET_BLOCK (*tp, new_block);
944 }
945
946 /* Keep iterating. */
947 return NULL_TREE;
948 }
949
950
951 /* Called from copy_body_id via walk_tree. DATA is really a
952 `copy_body_data *'. */
953
954 tree
955 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
956 {
957 copy_body_data *id = (copy_body_data *) data;
958 tree fn = id->src_fn;
959 tree new_block;
960
961 /* Begin by recognizing trees that we'll completely rewrite for the
962 inlining context. Our output for these trees is completely
963 different from out input (e.g. RETURN_EXPR is deleted, and morphs
964 into an edge). Further down, we'll handle trees that get
965 duplicated and/or tweaked. */
966
967 /* When requested, RETURN_EXPRs should be transformed to just the
968 contained MODIFY_EXPR. The branch semantics of the return will
969 be handled elsewhere by manipulating the CFG rather than a statement. */
970 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
971 {
972 tree assignment = TREE_OPERAND (*tp, 0);
973
974 /* If we're returning something, just turn that into an
975 assignment into the equivalent of the original RESULT_DECL.
976 If the "assignment" is just the result decl, the result
977 decl has already been set (e.g. a recent "foo (&result_decl,
978 ...)"); just toss the entire RETURN_EXPR. */
979 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
980 {
981 /* Replace the RETURN_EXPR with (a copy of) the
982 MODIFY_EXPR hanging underneath. */
983 *tp = copy_node (assignment);
984 }
985 else /* Else the RETURN_EXPR returns no value. */
986 {
987 *tp = NULL;
988 return (tree) (void *)1;
989 }
990 }
991 else if (TREE_CODE (*tp) == SSA_NAME)
992 {
993 *tp = remap_ssa_name (*tp, id);
994 *walk_subtrees = 0;
995 return NULL;
996 }
997
998 /* Local variables and labels need to be replaced by equivalent
999 variables. We don't want to copy static variables; there's only
1000 one of those, no matter how many times we inline the containing
1001 function. Similarly for globals from an outer function. */
1002 else if (auto_var_in_fn_p (*tp, fn))
1003 {
1004 tree new_decl;
1005
1006 /* Remap the declaration. */
1007 new_decl = remap_decl (*tp, id);
1008 gcc_assert (new_decl);
1009 /* Replace this variable with the copy. */
1010 STRIP_TYPE_NOPS (new_decl);
1011 *tp = new_decl;
1012 *walk_subtrees = 0;
1013 }
1014 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1015 copy_statement_list (tp);
1016 else if (TREE_CODE (*tp) == SAVE_EXPR
1017 || TREE_CODE (*tp) == TARGET_EXPR)
1018 remap_save_expr (tp, id->decl_map, walk_subtrees);
1019 else if (TREE_CODE (*tp) == LABEL_DECL
1020 && (! DECL_CONTEXT (*tp)
1021 || decl_function_context (*tp) == id->src_fn))
1022 /* These may need to be remapped for EH handling. */
1023 *tp = remap_decl (*tp, id);
1024 else if (TREE_CODE (*tp) == BIND_EXPR)
1025 copy_bind_expr (tp, walk_subtrees, id);
1026 /* Types may need remapping as well. */
1027 else if (TYPE_P (*tp))
1028 *tp = remap_type (*tp, id);
1029
1030 /* If this is a constant, we have to copy the node iff the type will be
1031 remapped. copy_tree_r will not copy a constant. */
1032 else if (CONSTANT_CLASS_P (*tp))
1033 {
1034 tree new_type = remap_type (TREE_TYPE (*tp), id);
1035
1036 if (new_type == TREE_TYPE (*tp))
1037 *walk_subtrees = 0;
1038
1039 else if (TREE_CODE (*tp) == INTEGER_CST)
1040 *tp = wide_int_to_tree (new_type, *tp);
1041 else
1042 {
1043 *tp = copy_node (*tp);
1044 TREE_TYPE (*tp) = new_type;
1045 }
1046 }
1047
1048 /* Otherwise, just copy the node. Note that copy_tree_r already
1049 knows not to copy VAR_DECLs, etc., so this is safe. */
1050 else
1051 {
1052 /* Here we handle trees that are not completely rewritten.
1053 First we detect some inlining-induced bogosities for
1054 discarding. */
1055 if (TREE_CODE (*tp) == MODIFY_EXPR
1056 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1057 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1058 {
1059 /* Some assignments VAR = VAR; don't generate any rtl code
1060 and thus don't count as variable modification. Avoid
1061 keeping bogosities like 0 = 0. */
1062 tree decl = TREE_OPERAND (*tp, 0), value;
1063 tree *n;
1064
1065 n = (tree *) pointer_map_contains (id->decl_map, decl);
1066 if (n)
1067 {
1068 value = *n;
1069 STRIP_TYPE_NOPS (value);
1070 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1071 {
1072 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1073 return copy_tree_body_r (tp, walk_subtrees, data);
1074 }
1075 }
1076 }
1077 else if (TREE_CODE (*tp) == INDIRECT_REF)
1078 {
1079 /* Get rid of *& from inline substitutions that can happen when a
1080 pointer argument is an ADDR_EXPR. */
1081 tree decl = TREE_OPERAND (*tp, 0);
1082 tree *n = (tree *) pointer_map_contains (id->decl_map, decl);
1083 if (n)
1084 {
1085 /* If we happen to get an ADDR_EXPR in n->value, strip
1086 it manually here as we'll eventually get ADDR_EXPRs
1087 which lie about their types pointed to. In this case
1088 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1089 but we absolutely rely on that. As fold_indirect_ref
1090 does other useful transformations, try that first, though. */
1091 tree type = TREE_TYPE (*tp);
1092 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1093 tree old = *tp;
1094 *tp = gimple_fold_indirect_ref (ptr);
1095 if (! *tp)
1096 {
1097 if (TREE_CODE (ptr) == ADDR_EXPR)
1098 {
1099 *tp
1100 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1101 /* ??? We should either assert here or build
1102 a VIEW_CONVERT_EXPR instead of blindly leaking
1103 incompatible types to our IL. */
1104 if (! *tp)
1105 *tp = TREE_OPERAND (ptr, 0);
1106 }
1107 else
1108 {
1109 *tp = build1 (INDIRECT_REF, type, ptr);
1110 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1111 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1112 TREE_READONLY (*tp) = TREE_READONLY (old);
1113 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1114 have remapped a parameter as the property might be
1115 valid only for the parameter itself. */
1116 if (TREE_THIS_NOTRAP (old)
1117 && (!is_parm (TREE_OPERAND (old, 0))
1118 || (!id->transform_parameter && is_parm (ptr))))
1119 TREE_THIS_NOTRAP (*tp) = 1;
1120 }
1121 }
1122 *walk_subtrees = 0;
1123 return NULL;
1124 }
1125 }
1126 else if (TREE_CODE (*tp) == MEM_REF)
1127 {
1128 /* We need to re-canonicalize MEM_REFs from inline substitutions
1129 that can happen when a pointer argument is an ADDR_EXPR.
1130 Recurse here manually to allow that. */
1131 tree ptr = TREE_OPERAND (*tp, 0);
1132 tree type = remap_type (TREE_TYPE (*tp), id);
1133 tree old = *tp;
1134 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1135 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1136 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1137 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1138 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1139 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1140 remapped a parameter as the property might be valid only
1141 for the parameter itself. */
1142 if (TREE_THIS_NOTRAP (old)
1143 && (!is_parm (TREE_OPERAND (old, 0))
1144 || (!id->transform_parameter && is_parm (ptr))))
1145 TREE_THIS_NOTRAP (*tp) = 1;
1146 *walk_subtrees = 0;
1147 return NULL;
1148 }
1149
1150 /* Here is the "usual case". Copy this tree node, and then
1151 tweak some special cases. */
1152 copy_tree_r (tp, walk_subtrees, NULL);
1153
1154 /* If EXPR has block defined, map it to newly constructed block.
1155 When inlining we want EXPRs without block appear in the block
1156 of function call if we are not remapping a type. */
1157 if (EXPR_P (*tp))
1158 {
1159 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1160 if (TREE_BLOCK (*tp))
1161 {
1162 tree *n;
1163 n = (tree *) pointer_map_contains (id->decl_map,
1164 TREE_BLOCK (*tp));
1165 if (n)
1166 new_block = *n;
1167 }
1168 TREE_SET_BLOCK (*tp, new_block);
1169 }
1170
1171 if (TREE_CODE (*tp) != OMP_CLAUSE)
1172 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1173
1174 /* The copied TARGET_EXPR has never been expanded, even if the
1175 original node was expanded already. */
1176 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1177 {
1178 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1179 TREE_OPERAND (*tp, 3) = NULL_TREE;
1180 }
1181
1182 /* Variable substitution need not be simple. In particular, the
1183 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1184 and friends are up-to-date. */
1185 else if (TREE_CODE (*tp) == ADDR_EXPR)
1186 {
1187 int invariant = is_gimple_min_invariant (*tp);
1188 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1189
1190 /* Handle the case where we substituted an INDIRECT_REF
1191 into the operand of the ADDR_EXPR. */
1192 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1193 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1194 else
1195 recompute_tree_invariant_for_addr_expr (*tp);
1196
1197 /* If this used to be invariant, but is not any longer,
1198 then regimplification is probably needed. */
1199 if (invariant && !is_gimple_min_invariant (*tp))
1200 id->regimplify = true;
1201
1202 *walk_subtrees = 0;
1203 }
1204 }
1205
1206 /* Keep iterating. */
1207 return NULL_TREE;
1208 }
1209
1210 /* Helper for remap_gimple_stmt. Given an EH region number for the
1211 source function, map that to the duplicate EH region number in
1212 the destination function. */
1213
1214 static int
1215 remap_eh_region_nr (int old_nr, copy_body_data *id)
1216 {
1217 eh_region old_r, new_r;
1218 void **slot;
1219
1220 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1221 slot = pointer_map_contains (id->eh_map, old_r);
1222 new_r = (eh_region) *slot;
1223
1224 return new_r->index;
1225 }
1226
1227 /* Similar, but operate on INTEGER_CSTs. */
1228
1229 static tree
1230 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1231 {
1232 int old_nr, new_nr;
1233
1234 old_nr = tree_to_shwi (old_t_nr);
1235 new_nr = remap_eh_region_nr (old_nr, id);
1236
1237 return build_int_cst (integer_type_node, new_nr);
1238 }
1239
1240 /* Helper for copy_bb. Remap statement STMT using the inlining
1241 information in ID. Return the new statement copy. */
1242
1243 static gimple
1244 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1245 {
1246 gimple copy = NULL;
1247 struct walk_stmt_info wi;
1248 bool skip_first = false;
1249
1250 /* Begin by recognizing trees that we'll completely rewrite for the
1251 inlining context. Our output for these trees is completely
1252 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1253 into an edge). Further down, we'll handle trees that get
1254 duplicated and/or tweaked. */
1255
1256 /* When requested, GIMPLE_RETURNs should be transformed to just the
1257 contained GIMPLE_ASSIGN. The branch semantics of the return will
1258 be handled elsewhere by manipulating the CFG rather than the
1259 statement. */
1260 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1261 {
1262 tree retval = gimple_return_retval (stmt);
1263
1264 /* If we're returning something, just turn that into an
1265 assignment into the equivalent of the original RESULT_DECL.
1266 If RETVAL is just the result decl, the result decl has
1267 already been set (e.g. a recent "foo (&result_decl, ...)");
1268 just toss the entire GIMPLE_RETURN. */
1269 if (retval
1270 && (TREE_CODE (retval) != RESULT_DECL
1271 && (TREE_CODE (retval) != SSA_NAME
1272 || ! SSA_NAME_VAR (retval)
1273 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1274 {
1275 copy = gimple_build_assign (id->do_not_unshare
1276 ? id->retvar : unshare_expr (id->retvar),
1277 retval);
1278 /* id->retvar is already substituted. Skip it on later remapping. */
1279 skip_first = true;
1280 }
1281 else
1282 return gimple_build_nop ();
1283 }
1284 else if (gimple_has_substatements (stmt))
1285 {
1286 gimple_seq s1, s2;
1287
1288 /* When cloning bodies from the C++ front end, we will be handed bodies
1289 in High GIMPLE form. Handle here all the High GIMPLE statements that
1290 have embedded statements. */
1291 switch (gimple_code (stmt))
1292 {
1293 case GIMPLE_BIND:
1294 copy = copy_gimple_bind (stmt, id);
1295 break;
1296
1297 case GIMPLE_CATCH:
1298 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1299 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1300 break;
1301
1302 case GIMPLE_EH_FILTER:
1303 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1304 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1305 break;
1306
1307 case GIMPLE_TRY:
1308 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1309 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1310 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1311 break;
1312
1313 case GIMPLE_WITH_CLEANUP_EXPR:
1314 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1315 copy = gimple_build_wce (s1);
1316 break;
1317
1318 case GIMPLE_OMP_PARALLEL:
1319 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1320 copy = gimple_build_omp_parallel
1321 (s1,
1322 gimple_omp_parallel_clauses (stmt),
1323 gimple_omp_parallel_child_fn (stmt),
1324 gimple_omp_parallel_data_arg (stmt));
1325 break;
1326
1327 case GIMPLE_OMP_TASK:
1328 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1329 copy = gimple_build_omp_task
1330 (s1,
1331 gimple_omp_task_clauses (stmt),
1332 gimple_omp_task_child_fn (stmt),
1333 gimple_omp_task_data_arg (stmt),
1334 gimple_omp_task_copy_fn (stmt),
1335 gimple_omp_task_arg_size (stmt),
1336 gimple_omp_task_arg_align (stmt));
1337 break;
1338
1339 case GIMPLE_OMP_FOR:
1340 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1341 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1342 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1343 gimple_omp_for_clauses (stmt),
1344 gimple_omp_for_collapse (stmt), s2);
1345 {
1346 size_t i;
1347 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1348 {
1349 gimple_omp_for_set_index (copy, i,
1350 gimple_omp_for_index (stmt, i));
1351 gimple_omp_for_set_initial (copy, i,
1352 gimple_omp_for_initial (stmt, i));
1353 gimple_omp_for_set_final (copy, i,
1354 gimple_omp_for_final (stmt, i));
1355 gimple_omp_for_set_incr (copy, i,
1356 gimple_omp_for_incr (stmt, i));
1357 gimple_omp_for_set_cond (copy, i,
1358 gimple_omp_for_cond (stmt, i));
1359 }
1360 }
1361 break;
1362
1363 case GIMPLE_OMP_MASTER:
1364 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1365 copy = gimple_build_omp_master (s1);
1366 break;
1367
1368 case GIMPLE_OMP_TASKGROUP:
1369 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1370 copy = gimple_build_omp_taskgroup (s1);
1371 break;
1372
1373 case GIMPLE_OMP_ORDERED:
1374 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1375 copy = gimple_build_omp_ordered (s1);
1376 break;
1377
1378 case GIMPLE_OMP_SECTION:
1379 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1380 copy = gimple_build_omp_section (s1);
1381 break;
1382
1383 case GIMPLE_OMP_SECTIONS:
1384 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1385 copy = gimple_build_omp_sections
1386 (s1, gimple_omp_sections_clauses (stmt));
1387 break;
1388
1389 case GIMPLE_OMP_SINGLE:
1390 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1391 copy = gimple_build_omp_single
1392 (s1, gimple_omp_single_clauses (stmt));
1393 break;
1394
1395 case GIMPLE_OMP_TARGET:
1396 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1397 copy = gimple_build_omp_target
1398 (s1, gimple_omp_target_kind (stmt),
1399 gimple_omp_target_clauses (stmt));
1400 break;
1401
1402 case GIMPLE_OMP_TEAMS:
1403 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1404 copy = gimple_build_omp_teams
1405 (s1, gimple_omp_teams_clauses (stmt));
1406 break;
1407
1408 case GIMPLE_OMP_CRITICAL:
1409 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1410 copy
1411 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1412 break;
1413
1414 case GIMPLE_TRANSACTION:
1415 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1416 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1417 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1418 break;
1419
1420 default:
1421 gcc_unreachable ();
1422 }
1423 }
1424 else
1425 {
1426 if (gimple_assign_copy_p (stmt)
1427 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1428 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1429 {
1430 /* Here we handle statements that are not completely rewritten.
1431 First we detect some inlining-induced bogosities for
1432 discarding. */
1433
1434 /* Some assignments VAR = VAR; don't generate any rtl code
1435 and thus don't count as variable modification. Avoid
1436 keeping bogosities like 0 = 0. */
1437 tree decl = gimple_assign_lhs (stmt), value;
1438 tree *n;
1439
1440 n = (tree *) pointer_map_contains (id->decl_map, decl);
1441 if (n)
1442 {
1443 value = *n;
1444 STRIP_TYPE_NOPS (value);
1445 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1446 return gimple_build_nop ();
1447 }
1448 }
1449
1450 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1451 in a block that we aren't copying during tree_function_versioning,
1452 just drop the clobber stmt. */
1453 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1454 {
1455 tree lhs = gimple_assign_lhs (stmt);
1456 if (TREE_CODE (lhs) == MEM_REF
1457 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1458 {
1459 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1460 if (gimple_bb (def_stmt)
1461 && !bitmap_bit_p (id->blocks_to_copy,
1462 gimple_bb (def_stmt)->index))
1463 return gimple_build_nop ();
1464 }
1465 }
1466
1467 if (gimple_debug_bind_p (stmt))
1468 {
1469 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1470 gimple_debug_bind_get_value (stmt),
1471 stmt);
1472 id->debug_stmts.safe_push (copy);
1473 return copy;
1474 }
1475 if (gimple_debug_source_bind_p (stmt))
1476 {
1477 copy = gimple_build_debug_source_bind
1478 (gimple_debug_source_bind_get_var (stmt),
1479 gimple_debug_source_bind_get_value (stmt), stmt);
1480 id->debug_stmts.safe_push (copy);
1481 return copy;
1482 }
1483
1484 /* Create a new deep copy of the statement. */
1485 copy = gimple_copy (stmt);
1486
1487 /* Clear flags that need revisiting. */
1488 if (is_gimple_call (copy)
1489 && gimple_call_tail_p (copy))
1490 gimple_call_set_tail (copy, false);
1491
1492 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1493 RESX and EH_DISPATCH. */
1494 if (id->eh_map)
1495 switch (gimple_code (copy))
1496 {
1497 case GIMPLE_CALL:
1498 {
1499 tree r, fndecl = gimple_call_fndecl (copy);
1500 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1501 switch (DECL_FUNCTION_CODE (fndecl))
1502 {
1503 case BUILT_IN_EH_COPY_VALUES:
1504 r = gimple_call_arg (copy, 1);
1505 r = remap_eh_region_tree_nr (r, id);
1506 gimple_call_set_arg (copy, 1, r);
1507 /* FALLTHRU */
1508
1509 case BUILT_IN_EH_POINTER:
1510 case BUILT_IN_EH_FILTER:
1511 r = gimple_call_arg (copy, 0);
1512 r = remap_eh_region_tree_nr (r, id);
1513 gimple_call_set_arg (copy, 0, r);
1514 break;
1515
1516 default:
1517 break;
1518 }
1519
1520 /* Reset alias info if we didn't apply measures to
1521 keep it valid over inlining by setting DECL_PT_UID. */
1522 if (!id->src_cfun->gimple_df
1523 || !id->src_cfun->gimple_df->ipa_pta)
1524 gimple_call_reset_alias_info (copy);
1525 }
1526 break;
1527
1528 case GIMPLE_RESX:
1529 {
1530 int r = gimple_resx_region (copy);
1531 r = remap_eh_region_nr (r, id);
1532 gimple_resx_set_region (copy, r);
1533 }
1534 break;
1535
1536 case GIMPLE_EH_DISPATCH:
1537 {
1538 int r = gimple_eh_dispatch_region (copy);
1539 r = remap_eh_region_nr (r, id);
1540 gimple_eh_dispatch_set_region (copy, r);
1541 }
1542 break;
1543
1544 default:
1545 break;
1546 }
1547 }
1548
1549 /* If STMT has a block defined, map it to the newly constructed
1550 block. */
1551 if (gimple_block (copy))
1552 {
1553 tree *n;
1554 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1555 gcc_assert (n);
1556 gimple_set_block (copy, *n);
1557 }
1558
1559 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1560 return copy;
1561
1562 /* Remap all the operands in COPY. */
1563 memset (&wi, 0, sizeof (wi));
1564 wi.info = id;
1565 if (skip_first)
1566 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1567 else
1568 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1569
1570 /* Clear the copied virtual operands. We are not remapping them here
1571 but are going to recreate them from scratch. */
1572 if (gimple_has_mem_ops (copy))
1573 {
1574 gimple_set_vdef (copy, NULL_TREE);
1575 gimple_set_vuse (copy, NULL_TREE);
1576 }
1577
1578 return copy;
1579 }
1580
1581
1582 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1583 later */
1584
1585 static basic_block
1586 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1587 gcov_type count_scale)
1588 {
1589 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1590 basic_block copy_basic_block;
1591 tree decl;
1592 gcov_type freq;
1593 basic_block prev;
1594
1595 /* Search for previous copied basic block. */
1596 prev = bb->prev_bb;
1597 while (!prev->aux)
1598 prev = prev->prev_bb;
1599
1600 /* create_basic_block() will append every new block to
1601 basic_block_info automatically. */
1602 copy_basic_block = create_basic_block (NULL, (void *) 0,
1603 (basic_block) prev->aux);
1604 copy_basic_block->count = apply_scale (bb->count, count_scale);
1605
1606 /* We are going to rebuild frequencies from scratch. These values
1607 have just small importance to drive canonicalize_loop_headers. */
1608 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1609
1610 /* We recompute frequencies after inlining, so this is quite safe. */
1611 if (freq > BB_FREQ_MAX)
1612 freq = BB_FREQ_MAX;
1613 copy_basic_block->frequency = freq;
1614
1615 copy_gsi = gsi_start_bb (copy_basic_block);
1616
1617 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1618 {
1619 gimple stmt = gsi_stmt (gsi);
1620 gimple orig_stmt = stmt;
1621
1622 id->regimplify = false;
1623 stmt = remap_gimple_stmt (stmt, id);
1624 if (gimple_nop_p (stmt))
1625 continue;
1626
1627 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1628 seq_gsi = copy_gsi;
1629
1630 /* With return slot optimization we can end up with
1631 non-gimple (foo *)&this->m, fix that here. */
1632 if (is_gimple_assign (stmt)
1633 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1634 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1635 {
1636 tree new_rhs;
1637 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1638 gimple_assign_rhs1 (stmt),
1639 true, NULL, false,
1640 GSI_CONTINUE_LINKING);
1641 gimple_assign_set_rhs1 (stmt, new_rhs);
1642 id->regimplify = false;
1643 }
1644
1645 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1646
1647 if (id->regimplify)
1648 gimple_regimplify_operands (stmt, &seq_gsi);
1649
1650 /* If copy_basic_block has been empty at the start of this iteration,
1651 call gsi_start_bb again to get at the newly added statements. */
1652 if (gsi_end_p (copy_gsi))
1653 copy_gsi = gsi_start_bb (copy_basic_block);
1654 else
1655 gsi_next (&copy_gsi);
1656
1657 /* Process the new statement. The call to gimple_regimplify_operands
1658 possibly turned the statement into multiple statements, we
1659 need to process all of them. */
1660 do
1661 {
1662 tree fn;
1663
1664 stmt = gsi_stmt (copy_gsi);
1665 if (is_gimple_call (stmt)
1666 && gimple_call_va_arg_pack_p (stmt)
1667 && id->gimple_call)
1668 {
1669 /* __builtin_va_arg_pack () should be replaced by
1670 all arguments corresponding to ... in the caller. */
1671 tree p;
1672 gimple new_call;
1673 vec<tree> argarray;
1674 size_t nargs = gimple_call_num_args (id->gimple_call);
1675 size_t n;
1676
1677 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1678 nargs--;
1679
1680 /* Create the new array of arguments. */
1681 n = nargs + gimple_call_num_args (stmt);
1682 argarray.create (n);
1683 argarray.safe_grow_cleared (n);
1684
1685 /* Copy all the arguments before '...' */
1686 memcpy (argarray.address (),
1687 gimple_call_arg_ptr (stmt, 0),
1688 gimple_call_num_args (stmt) * sizeof (tree));
1689
1690 /* Append the arguments passed in '...' */
1691 memcpy (argarray.address () + gimple_call_num_args (stmt),
1692 gimple_call_arg_ptr (id->gimple_call, 0)
1693 + (gimple_call_num_args (id->gimple_call) - nargs),
1694 nargs * sizeof (tree));
1695
1696 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1697 argarray);
1698
1699 argarray.release ();
1700
1701 /* Copy all GIMPLE_CALL flags, location and block, except
1702 GF_CALL_VA_ARG_PACK. */
1703 gimple_call_copy_flags (new_call, stmt);
1704 gimple_call_set_va_arg_pack (new_call, false);
1705 gimple_set_location (new_call, gimple_location (stmt));
1706 gimple_set_block (new_call, gimple_block (stmt));
1707 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1708
1709 gsi_replace (&copy_gsi, new_call, false);
1710 stmt = new_call;
1711 }
1712 else if (is_gimple_call (stmt)
1713 && id->gimple_call
1714 && (decl = gimple_call_fndecl (stmt))
1715 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1716 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1717 {
1718 /* __builtin_va_arg_pack_len () should be replaced by
1719 the number of anonymous arguments. */
1720 size_t nargs = gimple_call_num_args (id->gimple_call);
1721 tree count, p;
1722 gimple new_stmt;
1723
1724 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1725 nargs--;
1726
1727 count = build_int_cst (integer_type_node, nargs);
1728 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1729 gsi_replace (&copy_gsi, new_stmt, false);
1730 stmt = new_stmt;
1731 }
1732
1733 /* Statements produced by inlining can be unfolded, especially
1734 when we constant propagated some operands. We can't fold
1735 them right now for two reasons:
1736 1) folding require SSA_NAME_DEF_STMTs to be correct
1737 2) we can't change function calls to builtins.
1738 So we just mark statement for later folding. We mark
1739 all new statements, instead just statements that has changed
1740 by some nontrivial substitution so even statements made
1741 foldable indirectly are updated. If this turns out to be
1742 expensive, copy_body can be told to watch for nontrivial
1743 changes. */
1744 if (id->statements_to_fold)
1745 pointer_set_insert (id->statements_to_fold, stmt);
1746
1747 /* We're duplicating a CALL_EXPR. Find any corresponding
1748 callgraph edges and update or duplicate them. */
1749 if (is_gimple_call (stmt))
1750 {
1751 struct cgraph_edge *edge;
1752
1753 switch (id->transform_call_graph_edges)
1754 {
1755 case CB_CGE_DUPLICATE:
1756 edge = cgraph_edge (id->src_node, orig_stmt);
1757 if (edge)
1758 {
1759 int edge_freq = edge->frequency;
1760 int new_freq;
1761 struct cgraph_edge *old_edge = edge;
1762 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1763 gimple_uid (stmt),
1764 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1765 true);
1766 /* We could also just rescale the frequency, but
1767 doing so would introduce roundoff errors and make
1768 verifier unhappy. */
1769 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1770 copy_basic_block);
1771
1772 /* Speculative calls consist of two edges - direct and indirect.
1773 Duplicate the whole thing and distribute frequencies accordingly. */
1774 if (edge->speculative)
1775 {
1776 struct cgraph_edge *direct, *indirect;
1777 struct ipa_ref *ref;
1778
1779 gcc_assert (!edge->indirect_unknown_callee);
1780 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1781 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1782 gimple_uid (stmt),
1783 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1784 true);
1785 if (old_edge->frequency + indirect->frequency)
1786 {
1787 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1788 (old_edge->frequency + indirect->frequency)),
1789 CGRAPH_FREQ_MAX);
1790 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1791 (old_edge->frequency + indirect->frequency)),
1792 CGRAPH_FREQ_MAX);
1793 }
1794 id->dst_node->clone_reference (ref, stmt);
1795 }
1796 else
1797 {
1798 edge->frequency = new_freq;
1799 if (dump_file
1800 && profile_status_for_fn (cfun) != PROFILE_ABSENT
1801 && (edge_freq > edge->frequency + 10
1802 || edge_freq < edge->frequency - 10))
1803 {
1804 fprintf (dump_file, "Edge frequency estimated by "
1805 "cgraph %i diverge from inliner's estimate %i\n",
1806 edge_freq,
1807 edge->frequency);
1808 fprintf (dump_file,
1809 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1810 bb->index,
1811 bb->frequency,
1812 copy_basic_block->frequency);
1813 }
1814 }
1815 }
1816 break;
1817
1818 case CB_CGE_MOVE_CLONES:
1819 cgraph_set_call_stmt_including_clones (id->dst_node,
1820 orig_stmt, stmt);
1821 edge = cgraph_edge (id->dst_node, stmt);
1822 break;
1823
1824 case CB_CGE_MOVE:
1825 edge = cgraph_edge (id->dst_node, orig_stmt);
1826 if (edge)
1827 cgraph_set_call_stmt (edge, stmt);
1828 break;
1829
1830 default:
1831 gcc_unreachable ();
1832 }
1833
1834 /* Constant propagation on argument done during inlining
1835 may create new direct call. Produce an edge for it. */
1836 if ((!edge
1837 || (edge->indirect_inlining_edge
1838 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1839 && id->dst_node->definition
1840 && (fn = gimple_call_fndecl (stmt)) != NULL)
1841 {
1842 struct cgraph_node *dest = cgraph_get_node (fn);
1843
1844 /* We have missing edge in the callgraph. This can happen
1845 when previous inlining turned an indirect call into a
1846 direct call by constant propagating arguments or we are
1847 producing dead clone (for further cloning). In all
1848 other cases we hit a bug (incorrect node sharing is the
1849 most common reason for missing edges). */
1850 gcc_assert (!dest->definition
1851 || dest->address_taken
1852 || !id->src_node->definition
1853 || !id->dst_node->definition);
1854 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1855 cgraph_create_edge_including_clones
1856 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1857 compute_call_stmt_bb_frequency (id->dst_node->decl,
1858 copy_basic_block),
1859 CIF_ORIGINALLY_INDIRECT_CALL);
1860 else
1861 cgraph_create_edge (id->dst_node, dest, stmt,
1862 bb->count,
1863 compute_call_stmt_bb_frequency
1864 (id->dst_node->decl,
1865 copy_basic_block))->inline_failed
1866 = CIF_ORIGINALLY_INDIRECT_CALL;
1867 if (dump_file)
1868 {
1869 fprintf (dump_file, "Created new direct edge to %s\n",
1870 dest->name ());
1871 }
1872 }
1873
1874 notice_special_calls (stmt);
1875 }
1876
1877 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1878 id->eh_map, id->eh_lp_nr);
1879
1880 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1881 {
1882 ssa_op_iter i;
1883 tree def;
1884
1885 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1886 if (TREE_CODE (def) == SSA_NAME)
1887 SSA_NAME_DEF_STMT (def) = stmt;
1888 }
1889
1890 gsi_next (&copy_gsi);
1891 }
1892 while (!gsi_end_p (copy_gsi));
1893
1894 copy_gsi = gsi_last_bb (copy_basic_block);
1895 }
1896
1897 return copy_basic_block;
1898 }
1899
1900 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1901 form is quite easy, since dominator relationship for old basic blocks does
1902 not change.
1903
1904 There is however exception where inlining might change dominator relation
1905 across EH edges from basic block within inlined functions destinating
1906 to landing pads in function we inline into.
1907
1908 The function fills in PHI_RESULTs of such PHI nodes if they refer
1909 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1910 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1911 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1912 set, and this means that there will be no overlapping live ranges
1913 for the underlying symbol.
1914
1915 This might change in future if we allow redirecting of EH edges and
1916 we might want to change way build CFG pre-inlining to include
1917 all the possible edges then. */
1918 static void
1919 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1920 bool can_throw, bool nonlocal_goto)
1921 {
1922 edge e;
1923 edge_iterator ei;
1924
1925 FOR_EACH_EDGE (e, ei, bb->succs)
1926 if (!e->dest->aux
1927 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1928 {
1929 gimple phi;
1930 gimple_stmt_iterator si;
1931
1932 if (!nonlocal_goto)
1933 gcc_assert (e->flags & EDGE_EH);
1934
1935 if (!can_throw)
1936 gcc_assert (!(e->flags & EDGE_EH));
1937
1938 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1939 {
1940 edge re;
1941
1942 phi = gsi_stmt (si);
1943
1944 /* For abnormal goto/call edges the receiver can be the
1945 ENTRY_BLOCK. Do not assert this cannot happen. */
1946
1947 gcc_assert ((e->flags & EDGE_EH)
1948 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1949
1950 re = find_edge (ret_bb, e->dest);
1951 gcc_checking_assert (re);
1952 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1953 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1954
1955 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1956 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1957 }
1958 }
1959 }
1960
1961
1962 /* Copy edges from BB into its copy constructed earlier, scale profile
1963 accordingly. Edges will be taken care of later. Assume aux
1964 pointers to point to the copies of each BB. Return true if any
1965 debug stmts are left after a statement that must end the basic block. */
1966
1967 static bool
1968 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1969 basic_block abnormal_goto_dest)
1970 {
1971 basic_block new_bb = (basic_block) bb->aux;
1972 edge_iterator ei;
1973 edge old_edge;
1974 gimple_stmt_iterator si;
1975 int flags;
1976 bool need_debug_cleanup = false;
1977
1978 /* Use the indices from the original blocks to create edges for the
1979 new ones. */
1980 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1981 if (!(old_edge->flags & EDGE_EH))
1982 {
1983 edge new_edge;
1984
1985 flags = old_edge->flags;
1986
1987 /* Return edges do get a FALLTHRU flag when the get inlined. */
1988 if (old_edge->dest->index == EXIT_BLOCK
1989 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
1990 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
1991 flags |= EDGE_FALLTHRU;
1992 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1993 new_edge->count = apply_scale (old_edge->count, count_scale);
1994 new_edge->probability = old_edge->probability;
1995 }
1996
1997 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1998 return false;
1999
2000 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2001 {
2002 gimple copy_stmt;
2003 bool can_throw, nonlocal_goto;
2004
2005 copy_stmt = gsi_stmt (si);
2006 if (!is_gimple_debug (copy_stmt))
2007 update_stmt (copy_stmt);
2008
2009 /* Do this before the possible split_block. */
2010 gsi_next (&si);
2011
2012 /* If this tree could throw an exception, there are two
2013 cases where we need to add abnormal edge(s): the
2014 tree wasn't in a region and there is a "current
2015 region" in the caller; or the original tree had
2016 EH edges. In both cases split the block after the tree,
2017 and add abnormal edge(s) as needed; we need both
2018 those from the callee and the caller.
2019 We check whether the copy can throw, because the const
2020 propagation can change an INDIRECT_REF which throws
2021 into a COMPONENT_REF which doesn't. If the copy
2022 can throw, the original could also throw. */
2023 can_throw = stmt_can_throw_internal (copy_stmt);
2024 nonlocal_goto
2025 = (stmt_can_make_abnormal_goto (copy_stmt)
2026 && !computed_goto_p (copy_stmt));
2027
2028 if (can_throw || nonlocal_goto)
2029 {
2030 if (!gsi_end_p (si))
2031 {
2032 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2033 gsi_next (&si);
2034 if (gsi_end_p (si))
2035 need_debug_cleanup = true;
2036 }
2037 if (!gsi_end_p (si))
2038 /* Note that bb's predecessor edges aren't necessarily
2039 right at this point; split_block doesn't care. */
2040 {
2041 edge e = split_block (new_bb, copy_stmt);
2042
2043 new_bb = e->dest;
2044 new_bb->aux = e->src->aux;
2045 si = gsi_start_bb (new_bb);
2046 }
2047 }
2048
2049 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2050 make_eh_dispatch_edges (copy_stmt);
2051 else if (can_throw)
2052 make_eh_edges (copy_stmt);
2053
2054 /* If the call we inline cannot make abnormal goto do not add
2055 additional abnormal edges but only retain those already present
2056 in the original function body. */
2057 if (abnormal_goto_dest == NULL)
2058 nonlocal_goto = false;
2059 if (nonlocal_goto)
2060 {
2061 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2062
2063 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2064 nonlocal_goto = false;
2065 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2066 in OpenMP regions which aren't allowed to be left abnormally.
2067 So, no need to add abnormal edge in that case. */
2068 else if (is_gimple_call (copy_stmt)
2069 && gimple_call_internal_p (copy_stmt)
2070 && (gimple_call_internal_fn (copy_stmt)
2071 == IFN_ABNORMAL_DISPATCHER)
2072 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2073 nonlocal_goto = false;
2074 else
2075 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2076 }
2077
2078 if ((can_throw || nonlocal_goto)
2079 && gimple_in_ssa_p (cfun))
2080 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2081 can_throw, nonlocal_goto);
2082 }
2083 return need_debug_cleanup;
2084 }
2085
2086 /* Copy the PHIs. All blocks and edges are copied, some blocks
2087 was possibly split and new outgoing EH edges inserted.
2088 BB points to the block of original function and AUX pointers links
2089 the original and newly copied blocks. */
2090
2091 static void
2092 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2093 {
2094 basic_block const new_bb = (basic_block) bb->aux;
2095 edge_iterator ei;
2096 gimple phi;
2097 gimple_stmt_iterator si;
2098 edge new_edge;
2099 bool inserted = false;
2100
2101 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2102 {
2103 tree res, new_res;
2104 gimple new_phi;
2105
2106 phi = gsi_stmt (si);
2107 res = PHI_RESULT (phi);
2108 new_res = res;
2109 if (!virtual_operand_p (res))
2110 {
2111 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2112 new_phi = create_phi_node (new_res, new_bb);
2113 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2114 {
2115 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2116 tree arg;
2117 tree new_arg;
2118 edge_iterator ei2;
2119 location_t locus;
2120
2121 /* When doing partial cloning, we allow PHIs on the entry block
2122 as long as all the arguments are the same. Find any input
2123 edge to see argument to copy. */
2124 if (!old_edge)
2125 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2126 if (!old_edge->src->aux)
2127 break;
2128
2129 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2130 new_arg = arg;
2131 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2132 gcc_assert (new_arg);
2133 /* With return slot optimization we can end up with
2134 non-gimple (foo *)&this->m, fix that here. */
2135 if (TREE_CODE (new_arg) != SSA_NAME
2136 && TREE_CODE (new_arg) != FUNCTION_DECL
2137 && !is_gimple_val (new_arg))
2138 {
2139 gimple_seq stmts = NULL;
2140 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2141 gsi_insert_seq_on_edge (new_edge, stmts);
2142 inserted = true;
2143 }
2144 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2145 if (LOCATION_BLOCK (locus))
2146 {
2147 tree *n;
2148 n = (tree *) pointer_map_contains (id->decl_map,
2149 LOCATION_BLOCK (locus));
2150 gcc_assert (n);
2151 if (*n)
2152 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2153 else
2154 locus = LOCATION_LOCUS (locus);
2155 }
2156 else
2157 locus = LOCATION_LOCUS (locus);
2158
2159 add_phi_arg (new_phi, new_arg, new_edge, locus);
2160 }
2161 }
2162 }
2163
2164 /* Commit the delayed edge insertions. */
2165 if (inserted)
2166 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2167 gsi_commit_one_edge_insert (new_edge, NULL);
2168 }
2169
2170
2171 /* Wrapper for remap_decl so it can be used as a callback. */
2172
2173 static tree
2174 remap_decl_1 (tree decl, void *data)
2175 {
2176 return remap_decl (decl, (copy_body_data *) data);
2177 }
2178
2179 /* Build struct function and associated datastructures for the new clone
2180 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2181 the cfun to the function of new_fndecl (and current_function_decl too). */
2182
2183 static void
2184 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2185 {
2186 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2187 gcov_type count_scale;
2188
2189 if (!DECL_ARGUMENTS (new_fndecl))
2190 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2191 if (!DECL_RESULT (new_fndecl))
2192 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2193
2194 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2195 count_scale
2196 = GCOV_COMPUTE_SCALE (count,
2197 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2198 else
2199 count_scale = REG_BR_PROB_BASE;
2200
2201 /* Register specific tree functions. */
2202 gimple_register_cfg_hooks ();
2203
2204 /* Get clean struct function. */
2205 push_struct_function (new_fndecl);
2206
2207 /* We will rebuild these, so just sanity check that they are empty. */
2208 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2209 gcc_assert (cfun->local_decls == NULL);
2210 gcc_assert (cfun->cfg == NULL);
2211 gcc_assert (cfun->decl == new_fndecl);
2212
2213 /* Copy items we preserve during cloning. */
2214 cfun->static_chain_decl = src_cfun->static_chain_decl;
2215 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2216 cfun->function_end_locus = src_cfun->function_end_locus;
2217 cfun->curr_properties = src_cfun->curr_properties;
2218 cfun->last_verified = src_cfun->last_verified;
2219 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2220 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2221 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2222 cfun->stdarg = src_cfun->stdarg;
2223 cfun->after_inlining = src_cfun->after_inlining;
2224 cfun->can_throw_non_call_exceptions
2225 = src_cfun->can_throw_non_call_exceptions;
2226 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2227 cfun->returns_struct = src_cfun->returns_struct;
2228 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2229
2230 init_empty_tree_cfg ();
2231
2232 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2233 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2234 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2235 REG_BR_PROB_BASE);
2236 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2237 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2238 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2239 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2240 REG_BR_PROB_BASE);
2241 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2242 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2243 if (src_cfun->eh)
2244 init_eh_for_function ();
2245
2246 if (src_cfun->gimple_df)
2247 {
2248 init_tree_ssa (cfun);
2249 cfun->gimple_df->in_ssa_p = true;
2250 init_ssa_operands (cfun);
2251 }
2252 }
2253
2254 /* Helper function for copy_cfg_body. Move debug stmts from the end
2255 of NEW_BB to the beginning of successor basic blocks when needed. If the
2256 successor has multiple predecessors, reset them, otherwise keep
2257 their value. */
2258
2259 static void
2260 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2261 {
2262 edge e;
2263 edge_iterator ei;
2264 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2265
2266 if (gsi_end_p (si)
2267 || gsi_one_before_end_p (si)
2268 || !(stmt_can_throw_internal (gsi_stmt (si))
2269 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2270 return;
2271
2272 FOR_EACH_EDGE (e, ei, new_bb->succs)
2273 {
2274 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2275 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2276 while (is_gimple_debug (gsi_stmt (ssi)))
2277 {
2278 gimple stmt = gsi_stmt (ssi), new_stmt;
2279 tree var;
2280 tree value;
2281
2282 /* For the last edge move the debug stmts instead of copying
2283 them. */
2284 if (ei_one_before_end_p (ei))
2285 {
2286 si = ssi;
2287 gsi_prev (&ssi);
2288 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2289 gimple_debug_bind_reset_value (stmt);
2290 gsi_remove (&si, false);
2291 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2292 continue;
2293 }
2294
2295 if (gimple_debug_bind_p (stmt))
2296 {
2297 var = gimple_debug_bind_get_var (stmt);
2298 if (single_pred_p (e->dest))
2299 {
2300 value = gimple_debug_bind_get_value (stmt);
2301 value = unshare_expr (value);
2302 }
2303 else
2304 value = NULL_TREE;
2305 new_stmt = gimple_build_debug_bind (var, value, stmt);
2306 }
2307 else if (gimple_debug_source_bind_p (stmt))
2308 {
2309 var = gimple_debug_source_bind_get_var (stmt);
2310 value = gimple_debug_source_bind_get_value (stmt);
2311 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2312 }
2313 else
2314 gcc_unreachable ();
2315 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2316 id->debug_stmts.safe_push (new_stmt);
2317 gsi_prev (&ssi);
2318 }
2319 }
2320 }
2321
2322 /* Make a copy of the sub-loops of SRC_PARENT and place them
2323 as siblings of DEST_PARENT. */
2324
2325 static void
2326 copy_loops (copy_body_data *id,
2327 struct loop *dest_parent, struct loop *src_parent)
2328 {
2329 struct loop *src_loop = src_parent->inner;
2330 while (src_loop)
2331 {
2332 if (!id->blocks_to_copy
2333 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2334 {
2335 struct loop *dest_loop = alloc_loop ();
2336
2337 /* Assign the new loop its header and latch and associate
2338 those with the new loop. */
2339 if (src_loop->header != NULL)
2340 {
2341 dest_loop->header = (basic_block)src_loop->header->aux;
2342 dest_loop->header->loop_father = dest_loop;
2343 }
2344 if (src_loop->latch != NULL)
2345 {
2346 dest_loop->latch = (basic_block)src_loop->latch->aux;
2347 dest_loop->latch->loop_father = dest_loop;
2348 }
2349
2350 /* Copy loop meta-data. */
2351 copy_loop_info (src_loop, dest_loop);
2352
2353 /* Finally place it into the loop array and the loop tree. */
2354 place_new_loop (cfun, dest_loop);
2355 flow_loop_tree_node_add (dest_parent, dest_loop);
2356
2357 dest_loop->safelen = src_loop->safelen;
2358 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2359 if (src_loop->force_vectorize)
2360 {
2361 dest_loop->force_vectorize = true;
2362 cfun->has_force_vectorize_loops = true;
2363 }
2364 if (src_loop->simduid)
2365 {
2366 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2367 cfun->has_simduid_loops = true;
2368 }
2369
2370 /* Recurse. */
2371 copy_loops (id, dest_loop, src_loop);
2372 }
2373 src_loop = src_loop->next;
2374 }
2375 }
2376
2377 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2378
2379 void
2380 redirect_all_calls (copy_body_data * id, basic_block bb)
2381 {
2382 gimple_stmt_iterator si;
2383 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2384 {
2385 if (is_gimple_call (gsi_stmt (si)))
2386 {
2387 struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
2388 if (edge)
2389 cgraph_redirect_edge_call_stmt_to_callee (edge);
2390 }
2391 }
2392 }
2393
2394 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2395 with each bb's frequency. Used when NODE has a 0-weight entry
2396 but we are about to inline it into a non-zero count call bb.
2397 See the comments for handle_missing_profiles() in predict.c for
2398 when this can happen for COMDATs. */
2399
2400 void
2401 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2402 {
2403 basic_block bb;
2404 edge_iterator ei;
2405 edge e;
2406 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2407
2408 FOR_ALL_BB_FN(bb, fn)
2409 {
2410 bb->count = apply_scale (count,
2411 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2412 FOR_EACH_EDGE (e, ei, bb->succs)
2413 e->count = apply_probability (e->src->count, e->probability);
2414 }
2415 }
2416
2417 /* Make a copy of the body of FN so that it can be inserted inline in
2418 another function. Walks FN via CFG, returns new fndecl. */
2419
2420 static tree
2421 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2422 basic_block entry_block_map, basic_block exit_block_map,
2423 basic_block new_entry)
2424 {
2425 tree callee_fndecl = id->src_fn;
2426 /* Original cfun for the callee, doesn't change. */
2427 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2428 struct function *cfun_to_copy;
2429 basic_block bb;
2430 tree new_fndecl = NULL;
2431 bool need_debug_cleanup = false;
2432 gcov_type count_scale;
2433 int last;
2434 int incoming_frequency = 0;
2435 gcov_type incoming_count = 0;
2436
2437 /* This can happen for COMDAT routines that end up with 0 counts
2438 despite being called (see the comments for handle_missing_profiles()
2439 in predict.c as to why). Apply counts to the blocks in the callee
2440 before inlining, using the guessed edge frequencies, so that we don't
2441 end up with a 0-count inline body which can confuse downstream
2442 optimizations such as function splitting. */
2443 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2444 {
2445 /* Apply the larger of the call bb count and the total incoming
2446 call edge count to the callee. */
2447 gcov_type in_count = 0;
2448 struct cgraph_edge *in_edge;
2449 for (in_edge = id->src_node->callers; in_edge;
2450 in_edge = in_edge->next_caller)
2451 in_count += in_edge->count;
2452 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2453 }
2454
2455 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2456 count_scale
2457 = GCOV_COMPUTE_SCALE (count,
2458 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2459 else
2460 count_scale = REG_BR_PROB_BASE;
2461
2462 /* Register specific tree functions. */
2463 gimple_register_cfg_hooks ();
2464
2465 /* If we are inlining just region of the function, make sure to connect
2466 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2467 part of loop, we must compute frequency and probability of
2468 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2469 probabilities of edges incoming from nonduplicated region. */
2470 if (new_entry)
2471 {
2472 edge e;
2473 edge_iterator ei;
2474
2475 FOR_EACH_EDGE (e, ei, new_entry->preds)
2476 if (!e->src->aux)
2477 {
2478 incoming_frequency += EDGE_FREQUENCY (e);
2479 incoming_count += e->count;
2480 }
2481 incoming_count = apply_scale (incoming_count, count_scale);
2482 incoming_frequency
2483 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2484 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2485 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2486 }
2487
2488 /* Must have a CFG here at this point. */
2489 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2490 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2491
2492 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2493
2494 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2495 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2496 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2497 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2498
2499 /* Duplicate any exception-handling regions. */
2500 if (cfun->eh)
2501 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2502 remap_decl_1, id);
2503
2504 /* Use aux pointers to map the original blocks to copy. */
2505 FOR_EACH_BB_FN (bb, cfun_to_copy)
2506 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2507 {
2508 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2509 bb->aux = new_bb;
2510 new_bb->aux = bb;
2511 new_bb->loop_father = entry_block_map->loop_father;
2512 }
2513
2514 last = last_basic_block_for_fn (cfun);
2515
2516 /* Now that we've duplicated the blocks, duplicate their edges. */
2517 basic_block abnormal_goto_dest = NULL;
2518 if (id->gimple_call
2519 && stmt_can_make_abnormal_goto (id->gimple_call))
2520 {
2521 gimple_stmt_iterator gsi = gsi_for_stmt (id->gimple_call);
2522
2523 bb = gimple_bb (id->gimple_call);
2524 gsi_next (&gsi);
2525 if (gsi_end_p (gsi))
2526 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2527 }
2528 FOR_ALL_BB_FN (bb, cfun_to_copy)
2529 if (!id->blocks_to_copy
2530 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2531 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2532 abnormal_goto_dest);
2533
2534 if (new_entry)
2535 {
2536 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2537 e->probability = REG_BR_PROB_BASE;
2538 e->count = incoming_count;
2539 }
2540
2541 /* Duplicate the loop tree, if available and wanted. */
2542 if (loops_for_fn (src_cfun) != NULL
2543 && current_loops != NULL)
2544 {
2545 copy_loops (id, entry_block_map->loop_father,
2546 get_loop (src_cfun, 0));
2547 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2548 loops_state_set (LOOPS_NEED_FIXUP);
2549 }
2550
2551 /* If the loop tree in the source function needed fixup, mark the
2552 destination loop tree for fixup, too. */
2553 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2554 loops_state_set (LOOPS_NEED_FIXUP);
2555
2556 if (gimple_in_ssa_p (cfun))
2557 FOR_ALL_BB_FN (bb, cfun_to_copy)
2558 if (!id->blocks_to_copy
2559 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2560 copy_phis_for_bb (bb, id);
2561
2562 FOR_ALL_BB_FN (bb, cfun_to_copy)
2563 if (bb->aux)
2564 {
2565 if (need_debug_cleanup
2566 && bb->index != ENTRY_BLOCK
2567 && bb->index != EXIT_BLOCK)
2568 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2569 /* Update call edge destinations. This can not be done before loop
2570 info is updated, because we may split basic blocks. */
2571 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2572 redirect_all_calls (id, (basic_block)bb->aux);
2573 ((basic_block)bb->aux)->aux = NULL;
2574 bb->aux = NULL;
2575 }
2576
2577 /* Zero out AUX fields of newly created block during EH edge
2578 insertion. */
2579 for (; last < last_basic_block_for_fn (cfun); last++)
2580 {
2581 if (need_debug_cleanup)
2582 maybe_move_debug_stmts_to_successors (id,
2583 BASIC_BLOCK_FOR_FN (cfun, last));
2584 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2585 /* Update call edge destinations. This can not be done before loop
2586 info is updated, because we may split basic blocks. */
2587 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2588 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2589 }
2590 entry_block_map->aux = NULL;
2591 exit_block_map->aux = NULL;
2592
2593 if (id->eh_map)
2594 {
2595 pointer_map_destroy (id->eh_map);
2596 id->eh_map = NULL;
2597 }
2598
2599 return new_fndecl;
2600 }
2601
2602 /* Copy the debug STMT using ID. We deal with these statements in a
2603 special way: if any variable in their VALUE expression wasn't
2604 remapped yet, we won't remap it, because that would get decl uids
2605 out of sync, causing codegen differences between -g and -g0. If
2606 this arises, we drop the VALUE expression altogether. */
2607
2608 static void
2609 copy_debug_stmt (gimple stmt, copy_body_data *id)
2610 {
2611 tree t, *n;
2612 struct walk_stmt_info wi;
2613
2614 if (gimple_block (stmt))
2615 {
2616 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2617 gimple_set_block (stmt, n ? *n : id->block);
2618 }
2619
2620 /* Remap all the operands in COPY. */
2621 memset (&wi, 0, sizeof (wi));
2622 wi.info = id;
2623
2624 processing_debug_stmt = 1;
2625
2626 if (gimple_debug_source_bind_p (stmt))
2627 t = gimple_debug_source_bind_get_var (stmt);
2628 else
2629 t = gimple_debug_bind_get_var (stmt);
2630
2631 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2632 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2633 {
2634 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2635 t = *n;
2636 }
2637 else if (TREE_CODE (t) == VAR_DECL
2638 && !is_global_var (t)
2639 && !pointer_map_contains (id->decl_map, t))
2640 /* T is a non-localized variable. */;
2641 else
2642 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2643
2644 if (gimple_debug_bind_p (stmt))
2645 {
2646 gimple_debug_bind_set_var (stmt, t);
2647
2648 if (gimple_debug_bind_has_value_p (stmt))
2649 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2650 remap_gimple_op_r, &wi, NULL);
2651
2652 /* Punt if any decl couldn't be remapped. */
2653 if (processing_debug_stmt < 0)
2654 gimple_debug_bind_reset_value (stmt);
2655 }
2656 else if (gimple_debug_source_bind_p (stmt))
2657 {
2658 gimple_debug_source_bind_set_var (stmt, t);
2659 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2660 remap_gimple_op_r, &wi, NULL);
2661 /* When inlining and source bind refers to one of the optimized
2662 away parameters, change the source bind into normal debug bind
2663 referring to the corresponding DEBUG_EXPR_DECL that should have
2664 been bound before the call stmt. */
2665 t = gimple_debug_source_bind_get_value (stmt);
2666 if (t != NULL_TREE
2667 && TREE_CODE (t) == PARM_DECL
2668 && id->gimple_call)
2669 {
2670 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2671 unsigned int i;
2672 if (debug_args != NULL)
2673 {
2674 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2675 if ((**debug_args)[i] == DECL_ORIGIN (t)
2676 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2677 {
2678 t = (**debug_args)[i + 1];
2679 stmt->subcode = GIMPLE_DEBUG_BIND;
2680 gimple_debug_bind_set_value (stmt, t);
2681 break;
2682 }
2683 }
2684 }
2685 }
2686
2687 processing_debug_stmt = 0;
2688
2689 update_stmt (stmt);
2690 }
2691
2692 /* Process deferred debug stmts. In order to give values better odds
2693 of being successfully remapped, we delay the processing of debug
2694 stmts until all other stmts that might require remapping are
2695 processed. */
2696
2697 static void
2698 copy_debug_stmts (copy_body_data *id)
2699 {
2700 size_t i;
2701 gimple stmt;
2702
2703 if (!id->debug_stmts.exists ())
2704 return;
2705
2706 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2707 copy_debug_stmt (stmt, id);
2708
2709 id->debug_stmts.release ();
2710 }
2711
2712 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2713 another function. */
2714
2715 static tree
2716 copy_tree_body (copy_body_data *id)
2717 {
2718 tree fndecl = id->src_fn;
2719 tree body = DECL_SAVED_TREE (fndecl);
2720
2721 walk_tree (&body, copy_tree_body_r, id, NULL);
2722
2723 return body;
2724 }
2725
2726 /* Make a copy of the body of FN so that it can be inserted inline in
2727 another function. */
2728
2729 static tree
2730 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2731 basic_block entry_block_map, basic_block exit_block_map,
2732 basic_block new_entry)
2733 {
2734 tree fndecl = id->src_fn;
2735 tree body;
2736
2737 /* If this body has a CFG, walk CFG and copy. */
2738 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2739 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2740 new_entry);
2741 copy_debug_stmts (id);
2742
2743 return body;
2744 }
2745
2746 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2747 defined in function FN, or of a data member thereof. */
2748
2749 static bool
2750 self_inlining_addr_expr (tree value, tree fn)
2751 {
2752 tree var;
2753
2754 if (TREE_CODE (value) != ADDR_EXPR)
2755 return false;
2756
2757 var = get_base_address (TREE_OPERAND (value, 0));
2758
2759 return var && auto_var_in_fn_p (var, fn);
2760 }
2761
2762 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2763 lexical block and line number information from base_stmt, if given,
2764 or from the last stmt of the block otherwise. */
2765
2766 static gimple
2767 insert_init_debug_bind (copy_body_data *id,
2768 basic_block bb, tree var, tree value,
2769 gimple base_stmt)
2770 {
2771 gimple note;
2772 gimple_stmt_iterator gsi;
2773 tree tracked_var;
2774
2775 if (!gimple_in_ssa_p (id->src_cfun))
2776 return NULL;
2777
2778 if (!MAY_HAVE_DEBUG_STMTS)
2779 return NULL;
2780
2781 tracked_var = target_for_debug_bind (var);
2782 if (!tracked_var)
2783 return NULL;
2784
2785 if (bb)
2786 {
2787 gsi = gsi_last_bb (bb);
2788 if (!base_stmt && !gsi_end_p (gsi))
2789 base_stmt = gsi_stmt (gsi);
2790 }
2791
2792 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2793
2794 if (bb)
2795 {
2796 if (!gsi_end_p (gsi))
2797 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2798 else
2799 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2800 }
2801
2802 return note;
2803 }
2804
2805 static void
2806 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2807 {
2808 /* If VAR represents a zero-sized variable, it's possible that the
2809 assignment statement may result in no gimple statements. */
2810 if (init_stmt)
2811 {
2812 gimple_stmt_iterator si = gsi_last_bb (bb);
2813
2814 /* We can end up with init statements that store to a non-register
2815 from a rhs with a conversion. Handle that here by forcing the
2816 rhs into a temporary. gimple_regimplify_operands is not
2817 prepared to do this for us. */
2818 if (!is_gimple_debug (init_stmt)
2819 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2820 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2821 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2822 {
2823 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2824 gimple_expr_type (init_stmt),
2825 gimple_assign_rhs1 (init_stmt));
2826 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2827 GSI_NEW_STMT);
2828 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2829 gimple_assign_set_rhs1 (init_stmt, rhs);
2830 }
2831 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2832 gimple_regimplify_operands (init_stmt, &si);
2833
2834 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2835 {
2836 tree def = gimple_assign_lhs (init_stmt);
2837 insert_init_debug_bind (id, bb, def, def, init_stmt);
2838 }
2839 }
2840 }
2841
2842 /* Initialize parameter P with VALUE. If needed, produce init statement
2843 at the end of BB. When BB is NULL, we return init statement to be
2844 output later. */
2845 static gimple
2846 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2847 basic_block bb, tree *vars)
2848 {
2849 gimple init_stmt = NULL;
2850 tree var;
2851 tree rhs = value;
2852 tree def = (gimple_in_ssa_p (cfun)
2853 ? ssa_default_def (id->src_cfun, p) : NULL);
2854
2855 if (value
2856 && value != error_mark_node
2857 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2858 {
2859 /* If we can match up types by promotion/demotion do so. */
2860 if (fold_convertible_p (TREE_TYPE (p), value))
2861 rhs = fold_convert (TREE_TYPE (p), value);
2862 else
2863 {
2864 /* ??? For valid programs we should not end up here.
2865 Still if we end up with truly mismatched types here, fall back
2866 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2867 GIMPLE to the following passes. */
2868 if (!is_gimple_reg_type (TREE_TYPE (value))
2869 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2870 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2871 else
2872 rhs = build_zero_cst (TREE_TYPE (p));
2873 }
2874 }
2875
2876 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2877 here since the type of this decl must be visible to the calling
2878 function. */
2879 var = copy_decl_to_var (p, id);
2880
2881 /* Declare this new variable. */
2882 DECL_CHAIN (var) = *vars;
2883 *vars = var;
2884
2885 /* Make gimplifier happy about this variable. */
2886 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2887
2888 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2889 we would not need to create a new variable here at all, if it
2890 weren't for debug info. Still, we can just use the argument
2891 value. */
2892 if (TREE_READONLY (p)
2893 && !TREE_ADDRESSABLE (p)
2894 && value && !TREE_SIDE_EFFECTS (value)
2895 && !def)
2896 {
2897 /* We may produce non-gimple trees by adding NOPs or introduce
2898 invalid sharing when operand is not really constant.
2899 It is not big deal to prohibit constant propagation here as
2900 we will constant propagate in DOM1 pass anyway. */
2901 if (is_gimple_min_invariant (value)
2902 && useless_type_conversion_p (TREE_TYPE (p),
2903 TREE_TYPE (value))
2904 /* We have to be very careful about ADDR_EXPR. Make sure
2905 the base variable isn't a local variable of the inlined
2906 function, e.g., when doing recursive inlining, direct or
2907 mutually-recursive or whatever, which is why we don't
2908 just test whether fn == current_function_decl. */
2909 && ! self_inlining_addr_expr (value, fn))
2910 {
2911 insert_decl_map (id, p, value);
2912 insert_debug_decl_map (id, p, var);
2913 return insert_init_debug_bind (id, bb, var, value, NULL);
2914 }
2915 }
2916
2917 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2918 that way, when the PARM_DECL is encountered, it will be
2919 automatically replaced by the VAR_DECL. */
2920 insert_decl_map (id, p, var);
2921
2922 /* Even if P was TREE_READONLY, the new VAR should not be.
2923 In the original code, we would have constructed a
2924 temporary, and then the function body would have never
2925 changed the value of P. However, now, we will be
2926 constructing VAR directly. The constructor body may
2927 change its value multiple times as it is being
2928 constructed. Therefore, it must not be TREE_READONLY;
2929 the back-end assumes that TREE_READONLY variable is
2930 assigned to only once. */
2931 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2932 TREE_READONLY (var) = 0;
2933
2934 /* If there is no setup required and we are in SSA, take the easy route
2935 replacing all SSA names representing the function parameter by the
2936 SSA name passed to function.
2937
2938 We need to construct map for the variable anyway as it might be used
2939 in different SSA names when parameter is set in function.
2940
2941 Do replacement at -O0 for const arguments replaced by constant.
2942 This is important for builtin_constant_p and other construct requiring
2943 constant argument to be visible in inlined function body. */
2944 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2945 && (optimize
2946 || (TREE_READONLY (p)
2947 && is_gimple_min_invariant (rhs)))
2948 && (TREE_CODE (rhs) == SSA_NAME
2949 || is_gimple_min_invariant (rhs))
2950 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2951 {
2952 insert_decl_map (id, def, rhs);
2953 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2954 }
2955
2956 /* If the value of argument is never used, don't care about initializing
2957 it. */
2958 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2959 {
2960 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2961 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2962 }
2963
2964 /* Initialize this VAR_DECL from the equivalent argument. Convert
2965 the argument to the proper type in case it was promoted. */
2966 if (value)
2967 {
2968 if (rhs == error_mark_node)
2969 {
2970 insert_decl_map (id, p, var);
2971 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2972 }
2973
2974 STRIP_USELESS_TYPE_CONVERSION (rhs);
2975
2976 /* If we are in SSA form properly remap the default definition
2977 or assign to a dummy SSA name if the parameter is unused and
2978 we are not optimizing. */
2979 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2980 {
2981 if (def)
2982 {
2983 def = remap_ssa_name (def, id);
2984 init_stmt = gimple_build_assign (def, rhs);
2985 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2986 set_ssa_default_def (cfun, var, NULL);
2987 }
2988 else if (!optimize)
2989 {
2990 def = make_ssa_name (var, NULL);
2991 init_stmt = gimple_build_assign (def, rhs);
2992 }
2993 }
2994 else
2995 init_stmt = gimple_build_assign (var, rhs);
2996
2997 if (bb && init_stmt)
2998 insert_init_stmt (id, bb, init_stmt);
2999 }
3000 return init_stmt;
3001 }
3002
3003 /* Generate code to initialize the parameters of the function at the
3004 top of the stack in ID from the GIMPLE_CALL STMT. */
3005
3006 static void
3007 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3008 tree fn, basic_block bb)
3009 {
3010 tree parms;
3011 size_t i;
3012 tree p;
3013 tree vars = NULL_TREE;
3014 tree static_chain = gimple_call_chain (stmt);
3015
3016 /* Figure out what the parameters are. */
3017 parms = DECL_ARGUMENTS (fn);
3018
3019 /* Loop through the parameter declarations, replacing each with an
3020 equivalent VAR_DECL, appropriately initialized. */
3021 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3022 {
3023 tree val;
3024 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3025 setup_one_parameter (id, p, val, fn, bb, &vars);
3026 }
3027 /* After remapping parameters remap their types. This has to be done
3028 in a second loop over all parameters to appropriately remap
3029 variable sized arrays when the size is specified in a
3030 parameter following the array. */
3031 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3032 {
3033 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
3034 if (varp
3035 && TREE_CODE (*varp) == VAR_DECL)
3036 {
3037 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3038 ? ssa_default_def (id->src_cfun, p) : NULL);
3039 tree var = *varp;
3040 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3041 /* Also remap the default definition if it was remapped
3042 to the default definition of the parameter replacement
3043 by the parameter setup. */
3044 if (def)
3045 {
3046 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
3047 if (defp
3048 && TREE_CODE (*defp) == SSA_NAME
3049 && SSA_NAME_VAR (*defp) == var)
3050 TREE_TYPE (*defp) = TREE_TYPE (var);
3051 }
3052 }
3053 }
3054
3055 /* Initialize the static chain. */
3056 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3057 gcc_assert (fn != current_function_decl);
3058 if (p)
3059 {
3060 /* No static chain? Seems like a bug in tree-nested.c. */
3061 gcc_assert (static_chain);
3062
3063 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3064 }
3065
3066 declare_inline_vars (id->block, vars);
3067 }
3068
3069
3070 /* Declare a return variable to replace the RESULT_DECL for the
3071 function we are calling. An appropriate DECL_STMT is returned.
3072 The USE_STMT is filled to contain a use of the declaration to
3073 indicate the return value of the function.
3074
3075 RETURN_SLOT, if non-null is place where to store the result. It
3076 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3077 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3078
3079 The return value is a (possibly null) value that holds the result
3080 as seen by the caller. */
3081
3082 static tree
3083 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3084 basic_block entry_bb)
3085 {
3086 tree callee = id->src_fn;
3087 tree result = DECL_RESULT (callee);
3088 tree callee_type = TREE_TYPE (result);
3089 tree caller_type;
3090 tree var, use;
3091
3092 /* Handle type-mismatches in the function declaration return type
3093 vs. the call expression. */
3094 if (modify_dest)
3095 caller_type = TREE_TYPE (modify_dest);
3096 else
3097 caller_type = TREE_TYPE (TREE_TYPE (callee));
3098
3099 /* We don't need to do anything for functions that don't return anything. */
3100 if (VOID_TYPE_P (callee_type))
3101 return NULL_TREE;
3102
3103 /* If there was a return slot, then the return value is the
3104 dereferenced address of that object. */
3105 if (return_slot)
3106 {
3107 /* The front end shouldn't have used both return_slot and
3108 a modify expression. */
3109 gcc_assert (!modify_dest);
3110 if (DECL_BY_REFERENCE (result))
3111 {
3112 tree return_slot_addr = build_fold_addr_expr (return_slot);
3113 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3114
3115 /* We are going to construct *&return_slot and we can't do that
3116 for variables believed to be not addressable.
3117
3118 FIXME: This check possibly can match, because values returned
3119 via return slot optimization are not believed to have address
3120 taken by alias analysis. */
3121 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3122 var = return_slot_addr;
3123 }
3124 else
3125 {
3126 var = return_slot;
3127 gcc_assert (TREE_CODE (var) != SSA_NAME);
3128 if (TREE_ADDRESSABLE (result))
3129 mark_addressable (var);
3130 }
3131 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3132 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3133 && !DECL_GIMPLE_REG_P (result)
3134 && DECL_P (var))
3135 DECL_GIMPLE_REG_P (var) = 0;
3136 use = NULL;
3137 goto done;
3138 }
3139
3140 /* All types requiring non-trivial constructors should have been handled. */
3141 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3142
3143 /* Attempt to avoid creating a new temporary variable. */
3144 if (modify_dest
3145 && TREE_CODE (modify_dest) != SSA_NAME)
3146 {
3147 bool use_it = false;
3148
3149 /* We can't use MODIFY_DEST if there's type promotion involved. */
3150 if (!useless_type_conversion_p (callee_type, caller_type))
3151 use_it = false;
3152
3153 /* ??? If we're assigning to a variable sized type, then we must
3154 reuse the destination variable, because we've no good way to
3155 create variable sized temporaries at this point. */
3156 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3157 use_it = true;
3158
3159 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3160 reuse it as the result of the call directly. Don't do this if
3161 it would promote MODIFY_DEST to addressable. */
3162 else if (TREE_ADDRESSABLE (result))
3163 use_it = false;
3164 else
3165 {
3166 tree base_m = get_base_address (modify_dest);
3167
3168 /* If the base isn't a decl, then it's a pointer, and we don't
3169 know where that's going to go. */
3170 if (!DECL_P (base_m))
3171 use_it = false;
3172 else if (is_global_var (base_m))
3173 use_it = false;
3174 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3175 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3176 && !DECL_GIMPLE_REG_P (result)
3177 && DECL_GIMPLE_REG_P (base_m))
3178 use_it = false;
3179 else if (!TREE_ADDRESSABLE (base_m))
3180 use_it = true;
3181 }
3182
3183 if (use_it)
3184 {
3185 var = modify_dest;
3186 use = NULL;
3187 goto done;
3188 }
3189 }
3190
3191 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3192
3193 var = copy_result_decl_to_var (result, id);
3194 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3195
3196 /* Do not have the rest of GCC warn about this variable as it should
3197 not be visible to the user. */
3198 TREE_NO_WARNING (var) = 1;
3199
3200 declare_inline_vars (id->block, var);
3201
3202 /* Build the use expr. If the return type of the function was
3203 promoted, convert it back to the expected type. */
3204 use = var;
3205 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3206 {
3207 /* If we can match up types by promotion/demotion do so. */
3208 if (fold_convertible_p (caller_type, var))
3209 use = fold_convert (caller_type, var);
3210 else
3211 {
3212 /* ??? For valid programs we should not end up here.
3213 Still if we end up with truly mismatched types here, fall back
3214 to using a MEM_REF to not leak invalid GIMPLE to the following
3215 passes. */
3216 /* Prevent var from being written into SSA form. */
3217 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3218 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3219 DECL_GIMPLE_REG_P (var) = false;
3220 else if (is_gimple_reg_type (TREE_TYPE (var)))
3221 TREE_ADDRESSABLE (var) = true;
3222 use = fold_build2 (MEM_REF, caller_type,
3223 build_fold_addr_expr (var),
3224 build_int_cst (ptr_type_node, 0));
3225 }
3226 }
3227
3228 STRIP_USELESS_TYPE_CONVERSION (use);
3229
3230 if (DECL_BY_REFERENCE (result))
3231 {
3232 TREE_ADDRESSABLE (var) = 1;
3233 var = build_fold_addr_expr (var);
3234 }
3235
3236 done:
3237 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3238 way, when the RESULT_DECL is encountered, it will be
3239 automatically replaced by the VAR_DECL.
3240
3241 When returning by reference, ensure that RESULT_DECL remaps to
3242 gimple_val. */
3243 if (DECL_BY_REFERENCE (result)
3244 && !is_gimple_val (var))
3245 {
3246 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3247 insert_decl_map (id, result, temp);
3248 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3249 it's default_def SSA_NAME. */
3250 if (gimple_in_ssa_p (id->src_cfun)
3251 && is_gimple_reg (result))
3252 {
3253 temp = make_ssa_name (temp, NULL);
3254 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3255 }
3256 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3257 }
3258 else
3259 insert_decl_map (id, result, var);
3260
3261 /* Remember this so we can ignore it in remap_decls. */
3262 id->retvar = var;
3263
3264 return use;
3265 }
3266
3267 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3268 to a local label. */
3269
3270 static tree
3271 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3272 {
3273 tree node = *nodep;
3274 tree fn = (tree) fnp;
3275
3276 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3277 return node;
3278
3279 if (TYPE_P (node))
3280 *walk_subtrees = 0;
3281
3282 return NULL_TREE;
3283 }
3284
3285 /* Determine if the function can be copied. If so return NULL. If
3286 not return a string describng the reason for failure. */
3287
3288 static const char *
3289 copy_forbidden (struct function *fun, tree fndecl)
3290 {
3291 const char *reason = fun->cannot_be_copied_reason;
3292 tree decl;
3293 unsigned ix;
3294
3295 /* Only examine the function once. */
3296 if (fun->cannot_be_copied_set)
3297 return reason;
3298
3299 /* We cannot copy a function that receives a non-local goto
3300 because we cannot remap the destination label used in the
3301 function that is performing the non-local goto. */
3302 /* ??? Actually, this should be possible, if we work at it.
3303 No doubt there's just a handful of places that simply
3304 assume it doesn't happen and don't substitute properly. */
3305 if (fun->has_nonlocal_label)
3306 {
3307 reason = G_("function %q+F can never be copied "
3308 "because it receives a non-local goto");
3309 goto fail;
3310 }
3311
3312 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3313 if (TREE_CODE (decl) == VAR_DECL
3314 && TREE_STATIC (decl)
3315 && !DECL_EXTERNAL (decl)
3316 && DECL_INITIAL (decl)
3317 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3318 has_label_address_in_static_1,
3319 fndecl))
3320 {
3321 reason = G_("function %q+F can never be copied because it saves "
3322 "address of local label in a static variable");
3323 goto fail;
3324 }
3325
3326 fail:
3327 fun->cannot_be_copied_reason = reason;
3328 fun->cannot_be_copied_set = true;
3329 return reason;
3330 }
3331
3332
3333 static const char *inline_forbidden_reason;
3334
3335 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3336 iff a function can not be inlined. Also sets the reason why. */
3337
3338 static tree
3339 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3340 struct walk_stmt_info *wip)
3341 {
3342 tree fn = (tree) wip->info;
3343 tree t;
3344 gimple stmt = gsi_stmt (*gsi);
3345
3346 switch (gimple_code (stmt))
3347 {
3348 case GIMPLE_CALL:
3349 /* Refuse to inline alloca call unless user explicitly forced so as
3350 this may change program's memory overhead drastically when the
3351 function using alloca is called in loop. In GCC present in
3352 SPEC2000 inlining into schedule_block cause it to require 2GB of
3353 RAM instead of 256MB. Don't do so for alloca calls emitted for
3354 VLA objects as those can't cause unbounded growth (they're always
3355 wrapped inside stack_save/stack_restore regions. */
3356 if (gimple_alloca_call_p (stmt)
3357 && !gimple_call_alloca_for_var_p (stmt)
3358 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3359 {
3360 inline_forbidden_reason
3361 = G_("function %q+F can never be inlined because it uses "
3362 "alloca (override using the always_inline attribute)");
3363 *handled_ops_p = true;
3364 return fn;
3365 }
3366
3367 t = gimple_call_fndecl (stmt);
3368 if (t == NULL_TREE)
3369 break;
3370
3371 /* We cannot inline functions that call setjmp. */
3372 if (setjmp_call_p (t))
3373 {
3374 inline_forbidden_reason
3375 = G_("function %q+F can never be inlined because it uses setjmp");
3376 *handled_ops_p = true;
3377 return t;
3378 }
3379
3380 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3381 switch (DECL_FUNCTION_CODE (t))
3382 {
3383 /* We cannot inline functions that take a variable number of
3384 arguments. */
3385 case BUILT_IN_VA_START:
3386 case BUILT_IN_NEXT_ARG:
3387 case BUILT_IN_VA_END:
3388 inline_forbidden_reason
3389 = G_("function %q+F can never be inlined because it "
3390 "uses variable argument lists");
3391 *handled_ops_p = true;
3392 return t;
3393
3394 case BUILT_IN_LONGJMP:
3395 /* We can't inline functions that call __builtin_longjmp at
3396 all. The non-local goto machinery really requires the
3397 destination be in a different function. If we allow the
3398 function calling __builtin_longjmp to be inlined into the
3399 function calling __builtin_setjmp, Things will Go Awry. */
3400 inline_forbidden_reason
3401 = G_("function %q+F can never be inlined because "
3402 "it uses setjmp-longjmp exception handling");
3403 *handled_ops_p = true;
3404 return t;
3405
3406 case BUILT_IN_NONLOCAL_GOTO:
3407 /* Similarly. */
3408 inline_forbidden_reason
3409 = G_("function %q+F can never be inlined because "
3410 "it uses non-local goto");
3411 *handled_ops_p = true;
3412 return t;
3413
3414 case BUILT_IN_RETURN:
3415 case BUILT_IN_APPLY_ARGS:
3416 /* If a __builtin_apply_args caller would be inlined,
3417 it would be saving arguments of the function it has
3418 been inlined into. Similarly __builtin_return would
3419 return from the function the inline has been inlined into. */
3420 inline_forbidden_reason
3421 = G_("function %q+F can never be inlined because "
3422 "it uses __builtin_return or __builtin_apply_args");
3423 *handled_ops_p = true;
3424 return t;
3425
3426 default:
3427 break;
3428 }
3429 break;
3430
3431 case GIMPLE_GOTO:
3432 t = gimple_goto_dest (stmt);
3433
3434 /* We will not inline a function which uses computed goto. The
3435 addresses of its local labels, which may be tucked into
3436 global storage, are of course not constant across
3437 instantiations, which causes unexpected behavior. */
3438 if (TREE_CODE (t) != LABEL_DECL)
3439 {
3440 inline_forbidden_reason
3441 = G_("function %q+F can never be inlined "
3442 "because it contains a computed goto");
3443 *handled_ops_p = true;
3444 return t;
3445 }
3446 break;
3447
3448 default:
3449 break;
3450 }
3451
3452 *handled_ops_p = false;
3453 return NULL_TREE;
3454 }
3455
3456 /* Return true if FNDECL is a function that cannot be inlined into
3457 another one. */
3458
3459 static bool
3460 inline_forbidden_p (tree fndecl)
3461 {
3462 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3463 struct walk_stmt_info wi;
3464 struct pointer_set_t *visited_nodes;
3465 basic_block bb;
3466 bool forbidden_p = false;
3467
3468 /* First check for shared reasons not to copy the code. */
3469 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3470 if (inline_forbidden_reason != NULL)
3471 return true;
3472
3473 /* Next, walk the statements of the function looking for
3474 constraucts we can't handle, or are non-optimal for inlining. */
3475 visited_nodes = pointer_set_create ();
3476 memset (&wi, 0, sizeof (wi));
3477 wi.info = (void *) fndecl;
3478 wi.pset = visited_nodes;
3479
3480 FOR_EACH_BB_FN (bb, fun)
3481 {
3482 gimple ret;
3483 gimple_seq seq = bb_seq (bb);
3484 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3485 forbidden_p = (ret != NULL);
3486 if (forbidden_p)
3487 break;
3488 }
3489
3490 pointer_set_destroy (visited_nodes);
3491 return forbidden_p;
3492 }
3493 \f
3494 /* Return false if the function FNDECL cannot be inlined on account of its
3495 attributes, true otherwise. */
3496 static bool
3497 function_attribute_inlinable_p (const_tree fndecl)
3498 {
3499 if (targetm.attribute_table)
3500 {
3501 const_tree a;
3502
3503 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3504 {
3505 const_tree name = TREE_PURPOSE (a);
3506 int i;
3507
3508 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3509 if (is_attribute_p (targetm.attribute_table[i].name, name))
3510 return targetm.function_attribute_inlinable_p (fndecl);
3511 }
3512 }
3513
3514 return true;
3515 }
3516
3517 /* Returns nonzero if FN is a function that does not have any
3518 fundamental inline blocking properties. */
3519
3520 bool
3521 tree_inlinable_function_p (tree fn)
3522 {
3523 bool inlinable = true;
3524 bool do_warning;
3525 tree always_inline;
3526
3527 /* If we've already decided this function shouldn't be inlined,
3528 there's no need to check again. */
3529 if (DECL_UNINLINABLE (fn))
3530 return false;
3531
3532 /* We only warn for functions declared `inline' by the user. */
3533 do_warning = (warn_inline
3534 && DECL_DECLARED_INLINE_P (fn)
3535 && !DECL_NO_INLINE_WARNING_P (fn)
3536 && !DECL_IN_SYSTEM_HEADER (fn));
3537
3538 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3539
3540 if (flag_no_inline
3541 && always_inline == NULL)
3542 {
3543 if (do_warning)
3544 warning (OPT_Winline, "function %q+F can never be inlined because it "
3545 "is suppressed using -fno-inline", fn);
3546 inlinable = false;
3547 }
3548
3549 else if (!function_attribute_inlinable_p (fn))
3550 {
3551 if (do_warning)
3552 warning (OPT_Winline, "function %q+F can never be inlined because it "
3553 "uses attributes conflicting with inlining", fn);
3554 inlinable = false;
3555 }
3556
3557 else if (inline_forbidden_p (fn))
3558 {
3559 /* See if we should warn about uninlinable functions. Previously,
3560 some of these warnings would be issued while trying to expand
3561 the function inline, but that would cause multiple warnings
3562 about functions that would for example call alloca. But since
3563 this a property of the function, just one warning is enough.
3564 As a bonus we can now give more details about the reason why a
3565 function is not inlinable. */
3566 if (always_inline)
3567 error (inline_forbidden_reason, fn);
3568 else if (do_warning)
3569 warning (OPT_Winline, inline_forbidden_reason, fn);
3570
3571 inlinable = false;
3572 }
3573
3574 /* Squirrel away the result so that we don't have to check again. */
3575 DECL_UNINLINABLE (fn) = !inlinable;
3576
3577 return inlinable;
3578 }
3579
3580 /* Estimate the cost of a memory move. Use machine dependent
3581 word size and take possible memcpy call into account. */
3582
3583 int
3584 estimate_move_cost (tree type)
3585 {
3586 HOST_WIDE_INT size;
3587
3588 gcc_assert (!VOID_TYPE_P (type));
3589
3590 if (TREE_CODE (type) == VECTOR_TYPE)
3591 {
3592 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3593 enum machine_mode simd
3594 = targetm.vectorize.preferred_simd_mode (inner);
3595 int simd_mode_size = GET_MODE_SIZE (simd);
3596 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3597 / simd_mode_size);
3598 }
3599
3600 size = int_size_in_bytes (type);
3601
3602 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3603 /* Cost of a memcpy call, 3 arguments and the call. */
3604 return 4;
3605 else
3606 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3607 }
3608
3609 /* Returns cost of operation CODE, according to WEIGHTS */
3610
3611 static int
3612 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3613 tree op1 ATTRIBUTE_UNUSED, tree op2)
3614 {
3615 switch (code)
3616 {
3617 /* These are "free" conversions, or their presumed cost
3618 is folded into other operations. */
3619 case RANGE_EXPR:
3620 CASE_CONVERT:
3621 case COMPLEX_EXPR:
3622 case PAREN_EXPR:
3623 case VIEW_CONVERT_EXPR:
3624 return 0;
3625
3626 /* Assign cost of 1 to usual operations.
3627 ??? We may consider mapping RTL costs to this. */
3628 case COND_EXPR:
3629 case VEC_COND_EXPR:
3630 case VEC_PERM_EXPR:
3631
3632 case PLUS_EXPR:
3633 case POINTER_PLUS_EXPR:
3634 case MINUS_EXPR:
3635 case MULT_EXPR:
3636 case MULT_HIGHPART_EXPR:
3637 case FMA_EXPR:
3638
3639 case ADDR_SPACE_CONVERT_EXPR:
3640 case FIXED_CONVERT_EXPR:
3641 case FIX_TRUNC_EXPR:
3642
3643 case NEGATE_EXPR:
3644 case FLOAT_EXPR:
3645 case MIN_EXPR:
3646 case MAX_EXPR:
3647 case ABS_EXPR:
3648
3649 case LSHIFT_EXPR:
3650 case RSHIFT_EXPR:
3651 case LROTATE_EXPR:
3652 case RROTATE_EXPR:
3653 case VEC_LSHIFT_EXPR:
3654 case VEC_RSHIFT_EXPR:
3655
3656 case BIT_IOR_EXPR:
3657 case BIT_XOR_EXPR:
3658 case BIT_AND_EXPR:
3659 case BIT_NOT_EXPR:
3660
3661 case TRUTH_ANDIF_EXPR:
3662 case TRUTH_ORIF_EXPR:
3663 case TRUTH_AND_EXPR:
3664 case TRUTH_OR_EXPR:
3665 case TRUTH_XOR_EXPR:
3666 case TRUTH_NOT_EXPR:
3667
3668 case LT_EXPR:
3669 case LE_EXPR:
3670 case GT_EXPR:
3671 case GE_EXPR:
3672 case EQ_EXPR:
3673 case NE_EXPR:
3674 case ORDERED_EXPR:
3675 case UNORDERED_EXPR:
3676
3677 case UNLT_EXPR:
3678 case UNLE_EXPR:
3679 case UNGT_EXPR:
3680 case UNGE_EXPR:
3681 case UNEQ_EXPR:
3682 case LTGT_EXPR:
3683
3684 case CONJ_EXPR:
3685
3686 case PREDECREMENT_EXPR:
3687 case PREINCREMENT_EXPR:
3688 case POSTDECREMENT_EXPR:
3689 case POSTINCREMENT_EXPR:
3690
3691 case REALIGN_LOAD_EXPR:
3692
3693 case REDUC_MAX_EXPR:
3694 case REDUC_MIN_EXPR:
3695 case REDUC_PLUS_EXPR:
3696 case WIDEN_SUM_EXPR:
3697 case WIDEN_MULT_EXPR:
3698 case DOT_PROD_EXPR:
3699 case SAD_EXPR:
3700 case WIDEN_MULT_PLUS_EXPR:
3701 case WIDEN_MULT_MINUS_EXPR:
3702 case WIDEN_LSHIFT_EXPR:
3703
3704 case VEC_WIDEN_MULT_HI_EXPR:
3705 case VEC_WIDEN_MULT_LO_EXPR:
3706 case VEC_WIDEN_MULT_EVEN_EXPR:
3707 case VEC_WIDEN_MULT_ODD_EXPR:
3708 case VEC_UNPACK_HI_EXPR:
3709 case VEC_UNPACK_LO_EXPR:
3710 case VEC_UNPACK_FLOAT_HI_EXPR:
3711 case VEC_UNPACK_FLOAT_LO_EXPR:
3712 case VEC_PACK_TRUNC_EXPR:
3713 case VEC_PACK_SAT_EXPR:
3714 case VEC_PACK_FIX_TRUNC_EXPR:
3715 case VEC_WIDEN_LSHIFT_HI_EXPR:
3716 case VEC_WIDEN_LSHIFT_LO_EXPR:
3717
3718 return 1;
3719
3720 /* Few special cases of expensive operations. This is useful
3721 to avoid inlining on functions having too many of these. */
3722 case TRUNC_DIV_EXPR:
3723 case CEIL_DIV_EXPR:
3724 case FLOOR_DIV_EXPR:
3725 case ROUND_DIV_EXPR:
3726 case EXACT_DIV_EXPR:
3727 case TRUNC_MOD_EXPR:
3728 case CEIL_MOD_EXPR:
3729 case FLOOR_MOD_EXPR:
3730 case ROUND_MOD_EXPR:
3731 case RDIV_EXPR:
3732 if (TREE_CODE (op2) != INTEGER_CST)
3733 return weights->div_mod_cost;
3734 return 1;
3735
3736 default:
3737 /* We expect a copy assignment with no operator. */
3738 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3739 return 0;
3740 }
3741 }
3742
3743
3744 /* Estimate number of instructions that will be created by expanding
3745 the statements in the statement sequence STMTS.
3746 WEIGHTS contains weights attributed to various constructs. */
3747
3748 static
3749 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3750 {
3751 int cost;
3752 gimple_stmt_iterator gsi;
3753
3754 cost = 0;
3755 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3756 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3757
3758 return cost;
3759 }
3760
3761
3762 /* Estimate number of instructions that will be created by expanding STMT.
3763 WEIGHTS contains weights attributed to various constructs. */
3764
3765 int
3766 estimate_num_insns (gimple stmt, eni_weights *weights)
3767 {
3768 unsigned cost, i;
3769 enum gimple_code code = gimple_code (stmt);
3770 tree lhs;
3771 tree rhs;
3772
3773 switch (code)
3774 {
3775 case GIMPLE_ASSIGN:
3776 /* Try to estimate the cost of assignments. We have three cases to
3777 deal with:
3778 1) Simple assignments to registers;
3779 2) Stores to things that must live in memory. This includes
3780 "normal" stores to scalars, but also assignments of large
3781 structures, or constructors of big arrays;
3782
3783 Let us look at the first two cases, assuming we have "a = b + C":
3784 <GIMPLE_ASSIGN <var_decl "a">
3785 <plus_expr <var_decl "b"> <constant C>>
3786 If "a" is a GIMPLE register, the assignment to it is free on almost
3787 any target, because "a" usually ends up in a real register. Hence
3788 the only cost of this expression comes from the PLUS_EXPR, and we
3789 can ignore the GIMPLE_ASSIGN.
3790 If "a" is not a GIMPLE register, the assignment to "a" will most
3791 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3792 of moving something into "a", which we compute using the function
3793 estimate_move_cost. */
3794 if (gimple_clobber_p (stmt))
3795 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3796
3797 lhs = gimple_assign_lhs (stmt);
3798 rhs = gimple_assign_rhs1 (stmt);
3799
3800 cost = 0;
3801
3802 /* Account for the cost of moving to / from memory. */
3803 if (gimple_store_p (stmt))
3804 cost += estimate_move_cost (TREE_TYPE (lhs));
3805 if (gimple_assign_load_p (stmt))
3806 cost += estimate_move_cost (TREE_TYPE (rhs));
3807
3808 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3809 gimple_assign_rhs1 (stmt),
3810 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3811 == GIMPLE_BINARY_RHS
3812 ? gimple_assign_rhs2 (stmt) : NULL);
3813 break;
3814
3815 case GIMPLE_COND:
3816 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3817 gimple_op (stmt, 0),
3818 gimple_op (stmt, 1));
3819 break;
3820
3821 case GIMPLE_SWITCH:
3822 /* Take into account cost of the switch + guess 2 conditional jumps for
3823 each case label.
3824
3825 TODO: once the switch expansion logic is sufficiently separated, we can
3826 do better job on estimating cost of the switch. */
3827 if (weights->time_based)
3828 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3829 else
3830 cost = gimple_switch_num_labels (stmt) * 2;
3831 break;
3832
3833 case GIMPLE_CALL:
3834 {
3835 tree decl;
3836
3837 if (gimple_call_internal_p (stmt))
3838 return 0;
3839 else if ((decl = gimple_call_fndecl (stmt))
3840 && DECL_BUILT_IN (decl))
3841 {
3842 /* Do not special case builtins where we see the body.
3843 This just confuse inliner. */
3844 struct cgraph_node *node;
3845 if (!(node = cgraph_get_node (decl))
3846 || node->definition)
3847 ;
3848 /* For buitins that are likely expanded to nothing or
3849 inlined do not account operand costs. */
3850 else if (is_simple_builtin (decl))
3851 return 0;
3852 else if (is_inexpensive_builtin (decl))
3853 return weights->target_builtin_call_cost;
3854 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3855 {
3856 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3857 specialize the cheap expansion we do here.
3858 ??? This asks for a more general solution. */
3859 switch (DECL_FUNCTION_CODE (decl))
3860 {
3861 case BUILT_IN_POW:
3862 case BUILT_IN_POWF:
3863 case BUILT_IN_POWL:
3864 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3865 && REAL_VALUES_EQUAL
3866 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3867 return estimate_operator_cost
3868 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
3869 gimple_call_arg (stmt, 0));
3870 break;
3871
3872 default:
3873 break;
3874 }
3875 }
3876 }
3877
3878 cost = decl ? weights->call_cost : weights->indirect_call_cost;
3879 if (gimple_call_lhs (stmt))
3880 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3881 for (i = 0; i < gimple_call_num_args (stmt); i++)
3882 {
3883 tree arg = gimple_call_arg (stmt, i);
3884 cost += estimate_move_cost (TREE_TYPE (arg));
3885 }
3886 break;
3887 }
3888
3889 case GIMPLE_RETURN:
3890 return weights->return_cost;
3891
3892 case GIMPLE_GOTO:
3893 case GIMPLE_LABEL:
3894 case GIMPLE_NOP:
3895 case GIMPLE_PHI:
3896 case GIMPLE_PREDICT:
3897 case GIMPLE_DEBUG:
3898 return 0;
3899
3900 case GIMPLE_ASM:
3901 {
3902 int count = asm_str_count (gimple_asm_string (stmt));
3903 /* 1000 means infinity. This avoids overflows later
3904 with very long asm statements. */
3905 if (count > 1000)
3906 count = 1000;
3907 return count;
3908 }
3909
3910 case GIMPLE_RESX:
3911 /* This is either going to be an external function call with one
3912 argument, or two register copy statements plus a goto. */
3913 return 2;
3914
3915 case GIMPLE_EH_DISPATCH:
3916 /* ??? This is going to turn into a switch statement. Ideally
3917 we'd have a look at the eh region and estimate the number of
3918 edges involved. */
3919 return 10;
3920
3921 case GIMPLE_BIND:
3922 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3923
3924 case GIMPLE_EH_FILTER:
3925 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3926
3927 case GIMPLE_CATCH:
3928 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3929
3930 case GIMPLE_TRY:
3931 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3932 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3933
3934 /* OpenMP directives are generally very expensive. */
3935
3936 case GIMPLE_OMP_RETURN:
3937 case GIMPLE_OMP_SECTIONS_SWITCH:
3938 case GIMPLE_OMP_ATOMIC_STORE:
3939 case GIMPLE_OMP_CONTINUE:
3940 /* ...except these, which are cheap. */
3941 return 0;
3942
3943 case GIMPLE_OMP_ATOMIC_LOAD:
3944 return weights->omp_cost;
3945
3946 case GIMPLE_OMP_FOR:
3947 return (weights->omp_cost
3948 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3949 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3950
3951 case GIMPLE_OMP_PARALLEL:
3952 case GIMPLE_OMP_TASK:
3953 case GIMPLE_OMP_CRITICAL:
3954 case GIMPLE_OMP_MASTER:
3955 case GIMPLE_OMP_TASKGROUP:
3956 case GIMPLE_OMP_ORDERED:
3957 case GIMPLE_OMP_SECTION:
3958 case GIMPLE_OMP_SECTIONS:
3959 case GIMPLE_OMP_SINGLE:
3960 case GIMPLE_OMP_TARGET:
3961 case GIMPLE_OMP_TEAMS:
3962 return (weights->omp_cost
3963 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3964
3965 case GIMPLE_TRANSACTION:
3966 return (weights->tm_cost
3967 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3968 weights));
3969
3970 default:
3971 gcc_unreachable ();
3972 }
3973
3974 return cost;
3975 }
3976
3977 /* Estimate number of instructions that will be created by expanding
3978 function FNDECL. WEIGHTS contains weights attributed to various
3979 constructs. */
3980
3981 int
3982 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3983 {
3984 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3985 gimple_stmt_iterator bsi;
3986 basic_block bb;
3987 int n = 0;
3988
3989 gcc_assert (my_function && my_function->cfg);
3990 FOR_EACH_BB_FN (bb, my_function)
3991 {
3992 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3993 n += estimate_num_insns (gsi_stmt (bsi), weights);
3994 }
3995
3996 return n;
3997 }
3998
3999
4000 /* Initializes weights used by estimate_num_insns. */
4001
4002 void
4003 init_inline_once (void)
4004 {
4005 eni_size_weights.call_cost = 1;
4006 eni_size_weights.indirect_call_cost = 3;
4007 eni_size_weights.target_builtin_call_cost = 1;
4008 eni_size_weights.div_mod_cost = 1;
4009 eni_size_weights.omp_cost = 40;
4010 eni_size_weights.tm_cost = 10;
4011 eni_size_weights.time_based = false;
4012 eni_size_weights.return_cost = 1;
4013
4014 /* Estimating time for call is difficult, since we have no idea what the
4015 called function does. In the current uses of eni_time_weights,
4016 underestimating the cost does less harm than overestimating it, so
4017 we choose a rather small value here. */
4018 eni_time_weights.call_cost = 10;
4019 eni_time_weights.indirect_call_cost = 15;
4020 eni_time_weights.target_builtin_call_cost = 1;
4021 eni_time_weights.div_mod_cost = 10;
4022 eni_time_weights.omp_cost = 40;
4023 eni_time_weights.tm_cost = 40;
4024 eni_time_weights.time_based = true;
4025 eni_time_weights.return_cost = 2;
4026 }
4027
4028 /* Estimate the number of instructions in a gimple_seq. */
4029
4030 int
4031 count_insns_seq (gimple_seq seq, eni_weights *weights)
4032 {
4033 gimple_stmt_iterator gsi;
4034 int n = 0;
4035 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4036 n += estimate_num_insns (gsi_stmt (gsi), weights);
4037
4038 return n;
4039 }
4040
4041
4042 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4043
4044 static void
4045 prepend_lexical_block (tree current_block, tree new_block)
4046 {
4047 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4048 BLOCK_SUBBLOCKS (current_block) = new_block;
4049 BLOCK_SUPERCONTEXT (new_block) = current_block;
4050 }
4051
4052 /* Add local variables from CALLEE to CALLER. */
4053
4054 static inline void
4055 add_local_variables (struct function *callee, struct function *caller,
4056 copy_body_data *id)
4057 {
4058 tree var;
4059 unsigned ix;
4060
4061 FOR_EACH_LOCAL_DECL (callee, ix, var)
4062 if (!can_be_nonlocal (var, id))
4063 {
4064 tree new_var = remap_decl (var, id);
4065
4066 /* Remap debug-expressions. */
4067 if (TREE_CODE (new_var) == VAR_DECL
4068 && DECL_HAS_DEBUG_EXPR_P (var)
4069 && new_var != var)
4070 {
4071 tree tem = DECL_DEBUG_EXPR (var);
4072 bool old_regimplify = id->regimplify;
4073 id->remapping_type_depth++;
4074 walk_tree (&tem, copy_tree_body_r, id, NULL);
4075 id->remapping_type_depth--;
4076 id->regimplify = old_regimplify;
4077 SET_DECL_DEBUG_EXPR (new_var, tem);
4078 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4079 }
4080 add_local_decl (caller, new_var);
4081 }
4082 }
4083
4084 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4085
4086 static bool
4087 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4088 {
4089 tree use_retvar;
4090 tree fn;
4091 struct pointer_map_t *st, *dst;
4092 tree return_slot;
4093 tree modify_dest;
4094 location_t saved_location;
4095 struct cgraph_edge *cg_edge;
4096 cgraph_inline_failed_t reason;
4097 basic_block return_block;
4098 edge e;
4099 gimple_stmt_iterator gsi, stmt_gsi;
4100 bool successfully_inlined = FALSE;
4101 bool purge_dead_abnormal_edges;
4102
4103 /* Set input_location here so we get the right instantiation context
4104 if we call instantiate_decl from inlinable_function_p. */
4105 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4106 saved_location = input_location;
4107 input_location = gimple_location (stmt);
4108
4109 /* From here on, we're only interested in CALL_EXPRs. */
4110 if (gimple_code (stmt) != GIMPLE_CALL)
4111 goto egress;
4112
4113 cg_edge = cgraph_edge (id->dst_node, stmt);
4114 gcc_checking_assert (cg_edge);
4115 /* First, see if we can figure out what function is being called.
4116 If we cannot, then there is no hope of inlining the function. */
4117 if (cg_edge->indirect_unknown_callee)
4118 goto egress;
4119 fn = cg_edge->callee->decl;
4120 gcc_checking_assert (fn);
4121
4122 /* If FN is a declaration of a function in a nested scope that was
4123 globally declared inline, we don't set its DECL_INITIAL.
4124 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4125 C++ front-end uses it for cdtors to refer to their internal
4126 declarations, that are not real functions. Fortunately those
4127 don't have trees to be saved, so we can tell by checking their
4128 gimple_body. */
4129 if (!DECL_INITIAL (fn)
4130 && DECL_ABSTRACT_ORIGIN (fn)
4131 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4132 fn = DECL_ABSTRACT_ORIGIN (fn);
4133
4134 /* Don't try to inline functions that are not well-suited to inlining. */
4135 if (cg_edge->inline_failed)
4136 {
4137 reason = cg_edge->inline_failed;
4138 /* If this call was originally indirect, we do not want to emit any
4139 inlining related warnings or sorry messages because there are no
4140 guarantees regarding those. */
4141 if (cg_edge->indirect_inlining_edge)
4142 goto egress;
4143
4144 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4145 /* For extern inline functions that get redefined we always
4146 silently ignored always_inline flag. Better behaviour would
4147 be to be able to keep both bodies and use extern inline body
4148 for inlining, but we can't do that because frontends overwrite
4149 the body. */
4150 && !cg_edge->callee->local.redefined_extern_inline
4151 /* During early inline pass, report only when optimization is
4152 not turned on. */
4153 && (cgraph_global_info_ready
4154 || !optimize
4155 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4156 /* PR 20090218-1_0.c. Body can be provided by another module. */
4157 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4158 {
4159 error ("inlining failed in call to always_inline %q+F: %s", fn,
4160 cgraph_inline_failed_string (reason));
4161 error ("called from here");
4162 }
4163 else if (warn_inline
4164 && DECL_DECLARED_INLINE_P (fn)
4165 && !DECL_NO_INLINE_WARNING_P (fn)
4166 && !DECL_IN_SYSTEM_HEADER (fn)
4167 && reason != CIF_UNSPECIFIED
4168 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4169 /* Do not warn about not inlined recursive calls. */
4170 && !cgraph_edge_recursive_p (cg_edge)
4171 /* Avoid warnings during early inline pass. */
4172 && cgraph_global_info_ready)
4173 {
4174 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4175 fn, _(cgraph_inline_failed_string (reason)));
4176 warning (OPT_Winline, "called from here");
4177 }
4178 goto egress;
4179 }
4180 fn = cg_edge->callee->decl;
4181 cgraph_get_body (cg_edge->callee);
4182
4183 #ifdef ENABLE_CHECKING
4184 if (cg_edge->callee->decl != id->dst_node->decl)
4185 verify_cgraph_node (cg_edge->callee);
4186 #endif
4187
4188 /* We will be inlining this callee. */
4189 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4190
4191 /* Update the callers EH personality. */
4192 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4193 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4194 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4195
4196 /* Split the block holding the GIMPLE_CALL. */
4197 e = split_block (bb, stmt);
4198 bb = e->src;
4199 return_block = e->dest;
4200 remove_edge (e);
4201
4202 /* split_block splits after the statement; work around this by
4203 moving the call into the second block manually. Not pretty,
4204 but seems easier than doing the CFG manipulation by hand
4205 when the GIMPLE_CALL is in the last statement of BB. */
4206 stmt_gsi = gsi_last_bb (bb);
4207 gsi_remove (&stmt_gsi, false);
4208
4209 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4210 been the source of abnormal edges. In this case, schedule
4211 the removal of dead abnormal edges. */
4212 gsi = gsi_start_bb (return_block);
4213 if (gsi_end_p (gsi))
4214 {
4215 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4216 purge_dead_abnormal_edges = true;
4217 }
4218 else
4219 {
4220 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4221 purge_dead_abnormal_edges = false;
4222 }
4223
4224 stmt_gsi = gsi_start_bb (return_block);
4225
4226 /* Build a block containing code to initialize the arguments, the
4227 actual inline expansion of the body, and a label for the return
4228 statements within the function to jump to. The type of the
4229 statement expression is the return type of the function call.
4230 ??? If the call does not have an associated block then we will
4231 remap all callee blocks to NULL, effectively dropping most of
4232 its debug information. This should only happen for calls to
4233 artificial decls inserted by the compiler itself. We need to
4234 either link the inlined blocks into the caller block tree or
4235 not refer to them in any way to not break GC for locations. */
4236 if (gimple_block (stmt))
4237 {
4238 id->block = make_node (BLOCK);
4239 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4240 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4241 prepend_lexical_block (gimple_block (stmt), id->block);
4242 }
4243
4244 /* Local declarations will be replaced by their equivalents in this
4245 map. */
4246 st = id->decl_map;
4247 id->decl_map = pointer_map_create ();
4248 dst = id->debug_map;
4249 id->debug_map = NULL;
4250
4251 /* Record the function we are about to inline. */
4252 id->src_fn = fn;
4253 id->src_node = cg_edge->callee;
4254 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4255 id->gimple_call = stmt;
4256
4257 gcc_assert (!id->src_cfun->after_inlining);
4258
4259 id->entry_bb = bb;
4260 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4261 {
4262 gimple_stmt_iterator si = gsi_last_bb (bb);
4263 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4264 NOT_TAKEN),
4265 GSI_NEW_STMT);
4266 }
4267 initialize_inlined_parameters (id, stmt, fn, bb);
4268
4269 if (DECL_INITIAL (fn))
4270 {
4271 if (gimple_block (stmt))
4272 {
4273 tree *var;
4274
4275 prepend_lexical_block (id->block,
4276 remap_blocks (DECL_INITIAL (fn), id));
4277 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4278 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4279 == NULL_TREE));
4280 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4281 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4282 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4283 under it. The parameters can be then evaluated in the debugger,
4284 but don't show in backtraces. */
4285 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4286 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4287 {
4288 tree v = *var;
4289 *var = TREE_CHAIN (v);
4290 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4291 BLOCK_VARS (id->block) = v;
4292 }
4293 else
4294 var = &TREE_CHAIN (*var);
4295 }
4296 else
4297 remap_blocks_to_null (DECL_INITIAL (fn), id);
4298 }
4299
4300 /* Return statements in the function body will be replaced by jumps
4301 to the RET_LABEL. */
4302 gcc_assert (DECL_INITIAL (fn));
4303 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4304
4305 /* Find the LHS to which the result of this call is assigned. */
4306 return_slot = NULL;
4307 if (gimple_call_lhs (stmt))
4308 {
4309 modify_dest = gimple_call_lhs (stmt);
4310
4311 /* The function which we are inlining might not return a value,
4312 in which case we should issue a warning that the function
4313 does not return a value. In that case the optimizers will
4314 see that the variable to which the value is assigned was not
4315 initialized. We do not want to issue a warning about that
4316 uninitialized variable. */
4317 if (DECL_P (modify_dest))
4318 TREE_NO_WARNING (modify_dest) = 1;
4319
4320 if (gimple_call_return_slot_opt_p (stmt))
4321 {
4322 return_slot = modify_dest;
4323 modify_dest = NULL;
4324 }
4325 }
4326 else
4327 modify_dest = NULL;
4328
4329 /* If we are inlining a call to the C++ operator new, we don't want
4330 to use type based alias analysis on the return value. Otherwise
4331 we may get confused if the compiler sees that the inlined new
4332 function returns a pointer which was just deleted. See bug
4333 33407. */
4334 if (DECL_IS_OPERATOR_NEW (fn))
4335 {
4336 return_slot = NULL;
4337 modify_dest = NULL;
4338 }
4339
4340 /* Declare the return variable for the function. */
4341 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4342
4343 /* Add local vars in this inlined callee to caller. */
4344 add_local_variables (id->src_cfun, cfun, id);
4345
4346 if (dump_file && (dump_flags & TDF_DETAILS))
4347 {
4348 fprintf (dump_file, "Inlining ");
4349 print_generic_expr (dump_file, id->src_fn, 0);
4350 fprintf (dump_file, " to ");
4351 print_generic_expr (dump_file, id->dst_fn, 0);
4352 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4353 }
4354
4355 /* This is it. Duplicate the callee body. Assume callee is
4356 pre-gimplified. Note that we must not alter the caller
4357 function in any way before this point, as this CALL_EXPR may be
4358 a self-referential call; if we're calling ourselves, we need to
4359 duplicate our body before altering anything. */
4360 copy_body (id, cg_edge->callee->count,
4361 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4362 bb, return_block, NULL);
4363
4364 /* Reset the escaped solution. */
4365 if (cfun->gimple_df)
4366 pt_solution_reset (&cfun->gimple_df->escaped);
4367
4368 /* Clean up. */
4369 if (id->debug_map)
4370 {
4371 pointer_map_destroy (id->debug_map);
4372 id->debug_map = dst;
4373 }
4374 pointer_map_destroy (id->decl_map);
4375 id->decl_map = st;
4376
4377 /* Unlink the calls virtual operands before replacing it. */
4378 unlink_stmt_vdef (stmt);
4379 if (gimple_vdef (stmt)
4380 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4381 release_ssa_name (gimple_vdef (stmt));
4382
4383 /* If the inlined function returns a result that we care about,
4384 substitute the GIMPLE_CALL with an assignment of the return
4385 variable to the LHS of the call. That is, if STMT was
4386 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4387 if (use_retvar && gimple_call_lhs (stmt))
4388 {
4389 gimple old_stmt = stmt;
4390 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4391 gsi_replace (&stmt_gsi, stmt, false);
4392 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4393 }
4394 else
4395 {
4396 /* Handle the case of inlining a function with no return
4397 statement, which causes the return value to become undefined. */
4398 if (gimple_call_lhs (stmt)
4399 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4400 {
4401 tree name = gimple_call_lhs (stmt);
4402 tree var = SSA_NAME_VAR (name);
4403 tree def = ssa_default_def (cfun, var);
4404
4405 if (def)
4406 {
4407 /* If the variable is used undefined, make this name
4408 undefined via a move. */
4409 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4410 gsi_replace (&stmt_gsi, stmt, true);
4411 }
4412 else
4413 {
4414 /* Otherwise make this variable undefined. */
4415 gsi_remove (&stmt_gsi, true);
4416 set_ssa_default_def (cfun, var, name);
4417 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4418 }
4419 }
4420 else
4421 gsi_remove (&stmt_gsi, true);
4422 }
4423
4424 if (purge_dead_abnormal_edges)
4425 {
4426 gimple_purge_dead_eh_edges (return_block);
4427 gimple_purge_dead_abnormal_call_edges (return_block);
4428 }
4429
4430 /* If the value of the new expression is ignored, that's OK. We
4431 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4432 the equivalent inlined version either. */
4433 if (is_gimple_assign (stmt))
4434 {
4435 gcc_assert (gimple_assign_single_p (stmt)
4436 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4437 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4438 }
4439
4440 /* Output the inlining info for this abstract function, since it has been
4441 inlined. If we don't do this now, we can lose the information about the
4442 variables in the function when the blocks get blown away as soon as we
4443 remove the cgraph node. */
4444 if (gimple_block (stmt))
4445 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4446
4447 /* Update callgraph if needed. */
4448 cgraph_remove_node (cg_edge->callee);
4449
4450 id->block = NULL_TREE;
4451 successfully_inlined = TRUE;
4452
4453 egress:
4454 input_location = saved_location;
4455 return successfully_inlined;
4456 }
4457
4458 /* Expand call statements reachable from STMT_P.
4459 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4460 in a MODIFY_EXPR. */
4461
4462 static bool
4463 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4464 {
4465 gimple_stmt_iterator gsi;
4466
4467 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4468 {
4469 gimple stmt = gsi_stmt (gsi);
4470
4471 if (is_gimple_call (stmt)
4472 && !gimple_call_internal_p (stmt)
4473 && expand_call_inline (bb, stmt, id))
4474 return true;
4475 }
4476
4477 return false;
4478 }
4479
4480
4481 /* Walk all basic blocks created after FIRST and try to fold every statement
4482 in the STATEMENTS pointer set. */
4483
4484 static void
4485 fold_marked_statements (int first, struct pointer_set_t *statements)
4486 {
4487 for (; first < n_basic_blocks_for_fn (cfun); first++)
4488 if (BASIC_BLOCK_FOR_FN (cfun, first))
4489 {
4490 gimple_stmt_iterator gsi;
4491
4492 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4493 !gsi_end_p (gsi);
4494 gsi_next (&gsi))
4495 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4496 {
4497 gimple old_stmt = gsi_stmt (gsi);
4498 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4499
4500 if (old_decl && DECL_BUILT_IN (old_decl))
4501 {
4502 /* Folding builtins can create multiple instructions,
4503 we need to look at all of them. */
4504 gimple_stmt_iterator i2 = gsi;
4505 gsi_prev (&i2);
4506 if (fold_stmt (&gsi))
4507 {
4508 gimple new_stmt;
4509 /* If a builtin at the end of a bb folded into nothing,
4510 the following loop won't work. */
4511 if (gsi_end_p (gsi))
4512 {
4513 cgraph_update_edges_for_call_stmt (old_stmt,
4514 old_decl, NULL);
4515 break;
4516 }
4517 if (gsi_end_p (i2))
4518 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4519 else
4520 gsi_next (&i2);
4521 while (1)
4522 {
4523 new_stmt = gsi_stmt (i2);
4524 update_stmt (new_stmt);
4525 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4526 new_stmt);
4527
4528 if (new_stmt == gsi_stmt (gsi))
4529 {
4530 /* It is okay to check only for the very last
4531 of these statements. If it is a throwing
4532 statement nothing will change. If it isn't
4533 this can remove EH edges. If that weren't
4534 correct then because some intermediate stmts
4535 throw, but not the last one. That would mean
4536 we'd have to split the block, which we can't
4537 here and we'd loose anyway. And as builtins
4538 probably never throw, this all
4539 is mood anyway. */
4540 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4541 new_stmt))
4542 gimple_purge_dead_eh_edges (
4543 BASIC_BLOCK_FOR_FN (cfun, first));
4544 break;
4545 }
4546 gsi_next (&i2);
4547 }
4548 }
4549 }
4550 else if (fold_stmt (&gsi))
4551 {
4552 /* Re-read the statement from GSI as fold_stmt() may
4553 have changed it. */
4554 gimple new_stmt = gsi_stmt (gsi);
4555 update_stmt (new_stmt);
4556
4557 if (is_gimple_call (old_stmt)
4558 || is_gimple_call (new_stmt))
4559 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4560 new_stmt);
4561
4562 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4563 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4564 first));
4565 }
4566 }
4567 }
4568 }
4569
4570 /* Expand calls to inline functions in the body of FN. */
4571
4572 unsigned int
4573 optimize_inline_calls (tree fn)
4574 {
4575 copy_body_data id;
4576 basic_block bb;
4577 int last = n_basic_blocks_for_fn (cfun);
4578 bool inlined_p = false;
4579
4580 /* Clear out ID. */
4581 memset (&id, 0, sizeof (id));
4582
4583 id.src_node = id.dst_node = cgraph_get_node (fn);
4584 gcc_assert (id.dst_node->definition);
4585 id.dst_fn = fn;
4586 /* Or any functions that aren't finished yet. */
4587 if (current_function_decl)
4588 id.dst_fn = current_function_decl;
4589
4590 id.copy_decl = copy_decl_maybe_to_var;
4591 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4592 id.transform_new_cfg = false;
4593 id.transform_return_to_modify = true;
4594 id.transform_parameter = true;
4595 id.transform_lang_insert_block = NULL;
4596 id.statements_to_fold = pointer_set_create ();
4597
4598 push_gimplify_context ();
4599
4600 /* We make no attempts to keep dominance info up-to-date. */
4601 free_dominance_info (CDI_DOMINATORS);
4602 free_dominance_info (CDI_POST_DOMINATORS);
4603
4604 /* Register specific gimple functions. */
4605 gimple_register_cfg_hooks ();
4606
4607 /* Reach the trees by walking over the CFG, and note the
4608 enclosing basic-blocks in the call edges. */
4609 /* We walk the blocks going forward, because inlined function bodies
4610 will split id->current_basic_block, and the new blocks will
4611 follow it; we'll trudge through them, processing their CALL_EXPRs
4612 along the way. */
4613 FOR_EACH_BB_FN (bb, cfun)
4614 inlined_p |= gimple_expand_calls_inline (bb, &id);
4615
4616 pop_gimplify_context (NULL);
4617
4618 #ifdef ENABLE_CHECKING
4619 {
4620 struct cgraph_edge *e;
4621
4622 verify_cgraph_node (id.dst_node);
4623
4624 /* Double check that we inlined everything we are supposed to inline. */
4625 for (e = id.dst_node->callees; e; e = e->next_callee)
4626 gcc_assert (e->inline_failed);
4627 }
4628 #endif
4629
4630 /* Fold queued statements. */
4631 fold_marked_statements (last, id.statements_to_fold);
4632 pointer_set_destroy (id.statements_to_fold);
4633
4634 gcc_assert (!id.debug_stmts.exists ());
4635
4636 /* If we didn't inline into the function there is nothing to do. */
4637 if (!inlined_p)
4638 return 0;
4639
4640 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4641 number_blocks (fn);
4642
4643 delete_unreachable_blocks_update_callgraph (&id);
4644 #ifdef ENABLE_CHECKING
4645 verify_cgraph_node (id.dst_node);
4646 #endif
4647
4648 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4649 not possible yet - the IPA passes might make various functions to not
4650 throw and they don't care to proactively update local EH info. This is
4651 done later in fixup_cfg pass that also execute the verification. */
4652 return (TODO_update_ssa
4653 | TODO_cleanup_cfg
4654 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4655 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4656 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4657 ? TODO_rebuild_frequencies : 0));
4658 }
4659
4660 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4661
4662 tree
4663 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4664 {
4665 enum tree_code code = TREE_CODE (*tp);
4666 enum tree_code_class cl = TREE_CODE_CLASS (code);
4667
4668 /* We make copies of most nodes. */
4669 if (IS_EXPR_CODE_CLASS (cl)
4670 || code == TREE_LIST
4671 || code == TREE_VEC
4672 || code == TYPE_DECL
4673 || code == OMP_CLAUSE)
4674 {
4675 /* Because the chain gets clobbered when we make a copy, we save it
4676 here. */
4677 tree chain = NULL_TREE, new_tree;
4678
4679 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4680 chain = TREE_CHAIN (*tp);
4681
4682 /* Copy the node. */
4683 new_tree = copy_node (*tp);
4684
4685 *tp = new_tree;
4686
4687 /* Now, restore the chain, if appropriate. That will cause
4688 walk_tree to walk into the chain as well. */
4689 if (code == PARM_DECL
4690 || code == TREE_LIST
4691 || code == OMP_CLAUSE)
4692 TREE_CHAIN (*tp) = chain;
4693
4694 /* For now, we don't update BLOCKs when we make copies. So, we
4695 have to nullify all BIND_EXPRs. */
4696 if (TREE_CODE (*tp) == BIND_EXPR)
4697 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4698 }
4699 else if (code == CONSTRUCTOR)
4700 {
4701 /* CONSTRUCTOR nodes need special handling because
4702 we need to duplicate the vector of elements. */
4703 tree new_tree;
4704
4705 new_tree = copy_node (*tp);
4706 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4707 *tp = new_tree;
4708 }
4709 else if (code == STATEMENT_LIST)
4710 /* We used to just abort on STATEMENT_LIST, but we can run into them
4711 with statement-expressions (c++/40975). */
4712 copy_statement_list (tp);
4713 else if (TREE_CODE_CLASS (code) == tcc_type)
4714 *walk_subtrees = 0;
4715 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4716 *walk_subtrees = 0;
4717 else if (TREE_CODE_CLASS (code) == tcc_constant)
4718 *walk_subtrees = 0;
4719 return NULL_TREE;
4720 }
4721
4722 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4723 information indicating to what new SAVE_EXPR this one should be mapped,
4724 use that one. Otherwise, create a new node and enter it in ST. FN is
4725 the function into which the copy will be placed. */
4726
4727 static void
4728 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4729 {
4730 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4731 tree *n;
4732 tree t;
4733
4734 /* See if we already encountered this SAVE_EXPR. */
4735 n = (tree *) pointer_map_contains (st, *tp);
4736
4737 /* If we didn't already remap this SAVE_EXPR, do so now. */
4738 if (!n)
4739 {
4740 t = copy_node (*tp);
4741
4742 /* Remember this SAVE_EXPR. */
4743 *pointer_map_insert (st, *tp) = t;
4744 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4745 *pointer_map_insert (st, t) = t;
4746 }
4747 else
4748 {
4749 /* We've already walked into this SAVE_EXPR; don't do it again. */
4750 *walk_subtrees = 0;
4751 t = *n;
4752 }
4753
4754 /* Replace this SAVE_EXPR with the copy. */
4755 *tp = t;
4756 }
4757
4758 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4759 label, copies the declaration and enters it in the splay_tree in DATA (which
4760 is really a 'copy_body_data *'. */
4761
4762 static tree
4763 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4764 bool *handled_ops_p ATTRIBUTE_UNUSED,
4765 struct walk_stmt_info *wi)
4766 {
4767 copy_body_data *id = (copy_body_data *) wi->info;
4768 gimple stmt = gsi_stmt (*gsip);
4769
4770 if (gimple_code (stmt) == GIMPLE_LABEL)
4771 {
4772 tree decl = gimple_label_label (stmt);
4773
4774 /* Copy the decl and remember the copy. */
4775 insert_decl_map (id, decl, id->copy_decl (decl, id));
4776 }
4777
4778 return NULL_TREE;
4779 }
4780
4781
4782 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4783 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4784 remaps all local declarations to appropriate replacements in gimple
4785 operands. */
4786
4787 static tree
4788 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4789 {
4790 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4791 copy_body_data *id = (copy_body_data *) wi->info;
4792 struct pointer_map_t *st = id->decl_map;
4793 tree *n;
4794 tree expr = *tp;
4795
4796 /* Only a local declaration (variable or label). */
4797 if ((TREE_CODE (expr) == VAR_DECL
4798 && !TREE_STATIC (expr))
4799 || TREE_CODE (expr) == LABEL_DECL)
4800 {
4801 /* Lookup the declaration. */
4802 n = (tree *) pointer_map_contains (st, expr);
4803
4804 /* If it's there, remap it. */
4805 if (n)
4806 *tp = *n;
4807 *walk_subtrees = 0;
4808 }
4809 else if (TREE_CODE (expr) == STATEMENT_LIST
4810 || TREE_CODE (expr) == BIND_EXPR
4811 || TREE_CODE (expr) == SAVE_EXPR)
4812 gcc_unreachable ();
4813 else if (TREE_CODE (expr) == TARGET_EXPR)
4814 {
4815 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4816 It's OK for this to happen if it was part of a subtree that
4817 isn't immediately expanded, such as operand 2 of another
4818 TARGET_EXPR. */
4819 if (!TREE_OPERAND (expr, 1))
4820 {
4821 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4822 TREE_OPERAND (expr, 3) = NULL_TREE;
4823 }
4824 }
4825
4826 /* Keep iterating. */
4827 return NULL_TREE;
4828 }
4829
4830
4831 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4832 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4833 remaps all local declarations to appropriate replacements in gimple
4834 statements. */
4835
4836 static tree
4837 replace_locals_stmt (gimple_stmt_iterator *gsip,
4838 bool *handled_ops_p ATTRIBUTE_UNUSED,
4839 struct walk_stmt_info *wi)
4840 {
4841 copy_body_data *id = (copy_body_data *) wi->info;
4842 gimple stmt = gsi_stmt (*gsip);
4843
4844 if (gimple_code (stmt) == GIMPLE_BIND)
4845 {
4846 tree block = gimple_bind_block (stmt);
4847
4848 if (block)
4849 {
4850 remap_block (&block, id);
4851 gimple_bind_set_block (stmt, block);
4852 }
4853
4854 /* This will remap a lot of the same decls again, but this should be
4855 harmless. */
4856 if (gimple_bind_vars (stmt))
4857 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4858 NULL, id));
4859 }
4860
4861 /* Keep iterating. */
4862 return NULL_TREE;
4863 }
4864
4865
4866 /* Copies everything in SEQ and replaces variables and labels local to
4867 current_function_decl. */
4868
4869 gimple_seq
4870 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4871 {
4872 copy_body_data id;
4873 struct walk_stmt_info wi;
4874 struct pointer_set_t *visited;
4875 gimple_seq copy;
4876
4877 /* There's nothing to do for NULL_TREE. */
4878 if (seq == NULL)
4879 return seq;
4880
4881 /* Set up ID. */
4882 memset (&id, 0, sizeof (id));
4883 id.src_fn = current_function_decl;
4884 id.dst_fn = current_function_decl;
4885 id.decl_map = pointer_map_create ();
4886 id.debug_map = NULL;
4887
4888 id.copy_decl = copy_decl_no_change;
4889 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4890 id.transform_new_cfg = false;
4891 id.transform_return_to_modify = false;
4892 id.transform_parameter = false;
4893 id.transform_lang_insert_block = NULL;
4894
4895 /* Walk the tree once to find local labels. */
4896 memset (&wi, 0, sizeof (wi));
4897 visited = pointer_set_create ();
4898 wi.info = &id;
4899 wi.pset = visited;
4900 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4901 pointer_set_destroy (visited);
4902
4903 copy = gimple_seq_copy (seq);
4904
4905 /* Walk the copy, remapping decls. */
4906 memset (&wi, 0, sizeof (wi));
4907 wi.info = &id;
4908 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4909
4910 /* Clean up. */
4911 pointer_map_destroy (id.decl_map);
4912 if (id.debug_map)
4913 pointer_map_destroy (id.debug_map);
4914
4915 return copy;
4916 }
4917
4918
4919 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4920
4921 static tree
4922 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4923 {
4924 if (*tp == data)
4925 return (tree) data;
4926 else
4927 return NULL;
4928 }
4929
4930 DEBUG_FUNCTION bool
4931 debug_find_tree (tree top, tree search)
4932 {
4933 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4934 }
4935
4936
4937 /* Declare the variables created by the inliner. Add all the variables in
4938 VARS to BIND_EXPR. */
4939
4940 static void
4941 declare_inline_vars (tree block, tree vars)
4942 {
4943 tree t;
4944 for (t = vars; t; t = DECL_CHAIN (t))
4945 {
4946 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4947 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4948 add_local_decl (cfun, t);
4949 }
4950
4951 if (block)
4952 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4953 }
4954
4955 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4956 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4957 VAR_DECL translation. */
4958
4959 static tree
4960 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4961 {
4962 /* Don't generate debug information for the copy if we wouldn't have
4963 generated it for the copy either. */
4964 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4965 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4966
4967 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4968 declaration inspired this copy. */
4969 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4970
4971 /* The new variable/label has no RTL, yet. */
4972 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4973 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4974 SET_DECL_RTL (copy, 0);
4975
4976 /* These args would always appear unused, if not for this. */
4977 TREE_USED (copy) = 1;
4978
4979 /* Set the context for the new declaration. */
4980 if (!DECL_CONTEXT (decl))
4981 /* Globals stay global. */
4982 ;
4983 else if (DECL_CONTEXT (decl) != id->src_fn)
4984 /* Things that weren't in the scope of the function we're inlining
4985 from aren't in the scope we're inlining to, either. */
4986 ;
4987 else if (TREE_STATIC (decl))
4988 /* Function-scoped static variables should stay in the original
4989 function. */
4990 ;
4991 else
4992 /* Ordinary automatic local variables are now in the scope of the
4993 new function. */
4994 DECL_CONTEXT (copy) = id->dst_fn;
4995
4996 return copy;
4997 }
4998
4999 static tree
5000 copy_decl_to_var (tree decl, copy_body_data *id)
5001 {
5002 tree copy, type;
5003
5004 gcc_assert (TREE_CODE (decl) == PARM_DECL
5005 || TREE_CODE (decl) == RESULT_DECL);
5006
5007 type = TREE_TYPE (decl);
5008
5009 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5010 VAR_DECL, DECL_NAME (decl), type);
5011 if (DECL_PT_UID_SET_P (decl))
5012 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5013 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5014 TREE_READONLY (copy) = TREE_READONLY (decl);
5015 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5016 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5017
5018 return copy_decl_for_dup_finish (id, decl, copy);
5019 }
5020
5021 /* Like copy_decl_to_var, but create a return slot object instead of a
5022 pointer variable for return by invisible reference. */
5023
5024 static tree
5025 copy_result_decl_to_var (tree decl, copy_body_data *id)
5026 {
5027 tree copy, type;
5028
5029 gcc_assert (TREE_CODE (decl) == PARM_DECL
5030 || TREE_CODE (decl) == RESULT_DECL);
5031
5032 type = TREE_TYPE (decl);
5033 if (DECL_BY_REFERENCE (decl))
5034 type = TREE_TYPE (type);
5035
5036 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5037 VAR_DECL, DECL_NAME (decl), type);
5038 if (DECL_PT_UID_SET_P (decl))
5039 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5040 TREE_READONLY (copy) = TREE_READONLY (decl);
5041 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5042 if (!DECL_BY_REFERENCE (decl))
5043 {
5044 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5045 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5046 }
5047
5048 return copy_decl_for_dup_finish (id, decl, copy);
5049 }
5050
5051 tree
5052 copy_decl_no_change (tree decl, copy_body_data *id)
5053 {
5054 tree copy;
5055
5056 copy = copy_node (decl);
5057
5058 /* The COPY is not abstract; it will be generated in DST_FN. */
5059 DECL_ABSTRACT (copy) = 0;
5060 lang_hooks.dup_lang_specific_decl (copy);
5061
5062 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5063 been taken; it's for internal bookkeeping in expand_goto_internal. */
5064 if (TREE_CODE (copy) == LABEL_DECL)
5065 {
5066 TREE_ADDRESSABLE (copy) = 0;
5067 LABEL_DECL_UID (copy) = -1;
5068 }
5069
5070 return copy_decl_for_dup_finish (id, decl, copy);
5071 }
5072
5073 static tree
5074 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5075 {
5076 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5077 return copy_decl_to_var (decl, id);
5078 else
5079 return copy_decl_no_change (decl, id);
5080 }
5081
5082 /* Return a copy of the function's argument tree. */
5083 static tree
5084 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5085 bitmap args_to_skip, tree *vars)
5086 {
5087 tree arg, *parg;
5088 tree new_parm = NULL;
5089 int i = 0;
5090
5091 parg = &new_parm;
5092
5093 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5094 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5095 {
5096 tree new_tree = remap_decl (arg, id);
5097 if (TREE_CODE (new_tree) != PARM_DECL)
5098 new_tree = id->copy_decl (arg, id);
5099 lang_hooks.dup_lang_specific_decl (new_tree);
5100 *parg = new_tree;
5101 parg = &DECL_CHAIN (new_tree);
5102 }
5103 else if (!pointer_map_contains (id->decl_map, arg))
5104 {
5105 /* Make an equivalent VAR_DECL. If the argument was used
5106 as temporary variable later in function, the uses will be
5107 replaced by local variable. */
5108 tree var = copy_decl_to_var (arg, id);
5109 insert_decl_map (id, arg, var);
5110 /* Declare this new variable. */
5111 DECL_CHAIN (var) = *vars;
5112 *vars = var;
5113 }
5114 return new_parm;
5115 }
5116
5117 /* Return a copy of the function's static chain. */
5118 static tree
5119 copy_static_chain (tree static_chain, copy_body_data * id)
5120 {
5121 tree *chain_copy, *pvar;
5122
5123 chain_copy = &static_chain;
5124 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5125 {
5126 tree new_tree = remap_decl (*pvar, id);
5127 lang_hooks.dup_lang_specific_decl (new_tree);
5128 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5129 *pvar = new_tree;
5130 }
5131 return static_chain;
5132 }
5133
5134 /* Return true if the function is allowed to be versioned.
5135 This is a guard for the versioning functionality. */
5136
5137 bool
5138 tree_versionable_function_p (tree fndecl)
5139 {
5140 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5141 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5142 }
5143
5144 /* Delete all unreachable basic blocks and update callgraph.
5145 Doing so is somewhat nontrivial because we need to update all clones and
5146 remove inline function that become unreachable. */
5147
5148 static bool
5149 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5150 {
5151 bool changed = false;
5152 basic_block b, next_bb;
5153
5154 find_unreachable_blocks ();
5155
5156 /* Delete all unreachable basic blocks. */
5157
5158 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5159 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5160 {
5161 next_bb = b->next_bb;
5162
5163 if (!(b->flags & BB_REACHABLE))
5164 {
5165 gimple_stmt_iterator bsi;
5166
5167 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5168 {
5169 struct cgraph_edge *e;
5170 struct cgraph_node *node;
5171
5172 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5173
5174 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5175 &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5176 {
5177 if (!e->inline_failed)
5178 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5179 else
5180 cgraph_remove_edge (e);
5181 }
5182 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5183 && id->dst_node->clones)
5184 for (node = id->dst_node->clones; node != id->dst_node;)
5185 {
5186 node->remove_stmt_references (gsi_stmt (bsi));
5187 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5188 && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5189 {
5190 if (!e->inline_failed)
5191 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5192 else
5193 cgraph_remove_edge (e);
5194 }
5195
5196 if (node->clones)
5197 node = node->clones;
5198 else if (node->next_sibling_clone)
5199 node = node->next_sibling_clone;
5200 else
5201 {
5202 while (node != id->dst_node && !node->next_sibling_clone)
5203 node = node->clone_of;
5204 if (node != id->dst_node)
5205 node = node->next_sibling_clone;
5206 }
5207 }
5208 }
5209 delete_basic_block (b);
5210 changed = true;
5211 }
5212 }
5213
5214 return changed;
5215 }
5216
5217 /* Update clone info after duplication. */
5218
5219 static void
5220 update_clone_info (copy_body_data * id)
5221 {
5222 struct cgraph_node *node;
5223 if (!id->dst_node->clones)
5224 return;
5225 for (node = id->dst_node->clones; node != id->dst_node;)
5226 {
5227 /* First update replace maps to match the new body. */
5228 if (node->clone.tree_map)
5229 {
5230 unsigned int i;
5231 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5232 {
5233 struct ipa_replace_map *replace_info;
5234 replace_info = (*node->clone.tree_map)[i];
5235 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5236 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5237 }
5238 }
5239 if (node->clones)
5240 node = node->clones;
5241 else if (node->next_sibling_clone)
5242 node = node->next_sibling_clone;
5243 else
5244 {
5245 while (node != id->dst_node && !node->next_sibling_clone)
5246 node = node->clone_of;
5247 if (node != id->dst_node)
5248 node = node->next_sibling_clone;
5249 }
5250 }
5251 }
5252
5253 /* Create a copy of a function's tree.
5254 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5255 of the original function and the new copied function
5256 respectively. In case we want to replace a DECL
5257 tree with another tree while duplicating the function's
5258 body, TREE_MAP represents the mapping between these
5259 trees. If UPDATE_CLONES is set, the call_stmt fields
5260 of edges of clones of the function will be updated.
5261
5262 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5263 from new version.
5264 If SKIP_RETURN is true, the new version will return void.
5265 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5266 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5267 */
5268 void
5269 tree_function_versioning (tree old_decl, tree new_decl,
5270 vec<ipa_replace_map_p, va_gc> *tree_map,
5271 bool update_clones, bitmap args_to_skip,
5272 bool skip_return, bitmap blocks_to_copy,
5273 basic_block new_entry)
5274 {
5275 struct cgraph_node *old_version_node;
5276 struct cgraph_node *new_version_node;
5277 copy_body_data id;
5278 tree p;
5279 unsigned i;
5280 struct ipa_replace_map *replace_info;
5281 basic_block old_entry_block, bb;
5282 auto_vec<gimple, 10> init_stmts;
5283 tree vars = NULL_TREE;
5284
5285 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5286 && TREE_CODE (new_decl) == FUNCTION_DECL);
5287 DECL_POSSIBLY_INLINED (old_decl) = 1;
5288
5289 old_version_node = cgraph_get_node (old_decl);
5290 gcc_checking_assert (old_version_node);
5291 new_version_node = cgraph_get_node (new_decl);
5292 gcc_checking_assert (new_version_node);
5293
5294 /* Copy over debug args. */
5295 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5296 {
5297 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5298 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5299 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5300 old_debug_args = decl_debug_args_lookup (old_decl);
5301 if (old_debug_args)
5302 {
5303 new_debug_args = decl_debug_args_insert (new_decl);
5304 *new_debug_args = vec_safe_copy (*old_debug_args);
5305 }
5306 }
5307
5308 /* Output the inlining info for this abstract function, since it has been
5309 inlined. If we don't do this now, we can lose the information about the
5310 variables in the function when the blocks get blown away as soon as we
5311 remove the cgraph node. */
5312 (*debug_hooks->outlining_inline_function) (old_decl);
5313
5314 DECL_ARTIFICIAL (new_decl) = 1;
5315 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5316 if (DECL_ORIGIN (old_decl) == old_decl)
5317 old_version_node->used_as_abstract_origin = true;
5318 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5319
5320 /* Prepare the data structures for the tree copy. */
5321 memset (&id, 0, sizeof (id));
5322
5323 /* Generate a new name for the new version. */
5324 id.statements_to_fold = pointer_set_create ();
5325
5326 id.decl_map = pointer_map_create ();
5327 id.debug_map = NULL;
5328 id.src_fn = old_decl;
5329 id.dst_fn = new_decl;
5330 id.src_node = old_version_node;
5331 id.dst_node = new_version_node;
5332 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5333 id.blocks_to_copy = blocks_to_copy;
5334
5335 id.copy_decl = copy_decl_no_change;
5336 id.transform_call_graph_edges
5337 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5338 id.transform_new_cfg = true;
5339 id.transform_return_to_modify = false;
5340 id.transform_parameter = false;
5341 id.transform_lang_insert_block = NULL;
5342
5343 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5344 (DECL_STRUCT_FUNCTION (old_decl));
5345 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5346 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5347 initialize_cfun (new_decl, old_decl,
5348 old_entry_block->count);
5349 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5350 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5351 = id.src_cfun->gimple_df->ipa_pta;
5352
5353 /* Copy the function's static chain. */
5354 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5355 if (p)
5356 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5357 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5358 &id);
5359
5360 /* If there's a tree_map, prepare for substitution. */
5361 if (tree_map)
5362 for (i = 0; i < tree_map->length (); i++)
5363 {
5364 gimple init;
5365 replace_info = (*tree_map)[i];
5366 if (replace_info->replace_p)
5367 {
5368 if (!replace_info->old_tree)
5369 {
5370 int i = replace_info->parm_num;
5371 tree parm;
5372 tree req_type;
5373
5374 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5375 i --;
5376 replace_info->old_tree = parm;
5377 req_type = TREE_TYPE (parm);
5378 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5379 {
5380 if (fold_convertible_p (req_type, replace_info->new_tree))
5381 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5382 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5383 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5384 else
5385 {
5386 if (dump_file)
5387 {
5388 fprintf (dump_file, " const ");
5389 print_generic_expr (dump_file, replace_info->new_tree, 0);
5390 fprintf (dump_file, " can't be converted to param ");
5391 print_generic_expr (dump_file, parm, 0);
5392 fprintf (dump_file, "\n");
5393 }
5394 replace_info->old_tree = NULL;
5395 }
5396 }
5397 }
5398 else
5399 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5400 if (replace_info->old_tree)
5401 {
5402 init = setup_one_parameter (&id, replace_info->old_tree,
5403 replace_info->new_tree, id.src_fn,
5404 NULL,
5405 &vars);
5406 if (init)
5407 init_stmts.safe_push (init);
5408 }
5409 }
5410 }
5411 /* Copy the function's arguments. */
5412 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5413 DECL_ARGUMENTS (new_decl) =
5414 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5415 args_to_skip, &vars);
5416
5417 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5418 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5419
5420 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5421
5422 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5423 /* Add local vars. */
5424 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5425
5426 if (DECL_RESULT (old_decl) == NULL_TREE)
5427 ;
5428 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5429 {
5430 DECL_RESULT (new_decl)
5431 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5432 RESULT_DECL, NULL_TREE, void_type_node);
5433 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5434 cfun->returns_struct = 0;
5435 cfun->returns_pcc_struct = 0;
5436 }
5437 else
5438 {
5439 tree old_name;
5440 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5441 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5442 if (gimple_in_ssa_p (id.src_cfun)
5443 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5444 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5445 {
5446 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5447 insert_decl_map (&id, old_name, new_name);
5448 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5449 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5450 }
5451 }
5452
5453 /* Set up the destination functions loop tree. */
5454 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5455 {
5456 cfun->curr_properties &= ~PROP_loops;
5457 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5458 cfun->curr_properties |= PROP_loops;
5459 }
5460
5461 /* Copy the Function's body. */
5462 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5463 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5464 new_entry);
5465
5466 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5467 number_blocks (new_decl);
5468
5469 /* We want to create the BB unconditionally, so that the addition of
5470 debug stmts doesn't affect BB count, which may in the end cause
5471 codegen differences. */
5472 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5473 while (init_stmts.length ())
5474 insert_init_stmt (&id, bb, init_stmts.pop ());
5475 update_clone_info (&id);
5476
5477 /* Remap the nonlocal_goto_save_area, if any. */
5478 if (cfun->nonlocal_goto_save_area)
5479 {
5480 struct walk_stmt_info wi;
5481
5482 memset (&wi, 0, sizeof (wi));
5483 wi.info = &id;
5484 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5485 }
5486
5487 /* Clean up. */
5488 pointer_map_destroy (id.decl_map);
5489 if (id.debug_map)
5490 pointer_map_destroy (id.debug_map);
5491 free_dominance_info (CDI_DOMINATORS);
5492 free_dominance_info (CDI_POST_DOMINATORS);
5493
5494 fold_marked_statements (0, id.statements_to_fold);
5495 pointer_set_destroy (id.statements_to_fold);
5496 fold_cond_expr_cond ();
5497 delete_unreachable_blocks_update_callgraph (&id);
5498 if (id.dst_node->definition)
5499 cgraph_rebuild_references ();
5500 update_ssa (TODO_update_ssa);
5501
5502 /* After partial cloning we need to rescale frequencies, so they are
5503 within proper range in the cloned function. */
5504 if (new_entry)
5505 {
5506 struct cgraph_edge *e;
5507 rebuild_frequencies ();
5508
5509 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5510 for (e = new_version_node->callees; e; e = e->next_callee)
5511 {
5512 basic_block bb = gimple_bb (e->call_stmt);
5513 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5514 bb);
5515 e->count = bb->count;
5516 }
5517 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5518 {
5519 basic_block bb = gimple_bb (e->call_stmt);
5520 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5521 bb);
5522 e->count = bb->count;
5523 }
5524 }
5525
5526 free_dominance_info (CDI_DOMINATORS);
5527 free_dominance_info (CDI_POST_DOMINATORS);
5528
5529 gcc_assert (!id.debug_stmts.exists ());
5530 pop_cfun ();
5531 return;
5532 }
5533
5534 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5535 the callee and return the inlined body on success. */
5536
5537 tree
5538 maybe_inline_call_in_expr (tree exp)
5539 {
5540 tree fn = get_callee_fndecl (exp);
5541
5542 /* We can only try to inline "const" functions. */
5543 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5544 {
5545 struct pointer_map_t *decl_map = pointer_map_create ();
5546 call_expr_arg_iterator iter;
5547 copy_body_data id;
5548 tree param, arg, t;
5549
5550 /* Remap the parameters. */
5551 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5552 param;
5553 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5554 *pointer_map_insert (decl_map, param) = arg;
5555
5556 memset (&id, 0, sizeof (id));
5557 id.src_fn = fn;
5558 id.dst_fn = current_function_decl;
5559 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5560 id.decl_map = decl_map;
5561
5562 id.copy_decl = copy_decl_no_change;
5563 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5564 id.transform_new_cfg = false;
5565 id.transform_return_to_modify = true;
5566 id.transform_parameter = true;
5567 id.transform_lang_insert_block = NULL;
5568
5569 /* Make sure not to unshare trees behind the front-end's back
5570 since front-end specific mechanisms may rely on sharing. */
5571 id.regimplify = false;
5572 id.do_not_unshare = true;
5573
5574 /* We're not inside any EH region. */
5575 id.eh_lp_nr = 0;
5576
5577 t = copy_tree_body (&id);
5578 pointer_map_destroy (decl_map);
5579
5580 /* We can only return something suitable for use in a GENERIC
5581 expression tree. */
5582 if (TREE_CODE (t) == MODIFY_EXPR)
5583 return TREE_OPERAND (t, 1);
5584 }
5585
5586 return NULL_TREE;
5587 }
5588
5589 /* Duplicate a type, fields and all. */
5590
5591 tree
5592 build_duplicate_type (tree type)
5593 {
5594 struct copy_body_data id;
5595
5596 memset (&id, 0, sizeof (id));
5597 id.src_fn = current_function_decl;
5598 id.dst_fn = current_function_decl;
5599 id.src_cfun = cfun;
5600 id.decl_map = pointer_map_create ();
5601 id.debug_map = NULL;
5602 id.copy_decl = copy_decl_no_change;
5603
5604 type = remap_type_1 (type, &id);
5605
5606 pointer_map_destroy (id.decl_map);
5607 if (id.debug_map)
5608 pointer_map_destroy (id.debug_map);
5609
5610 TYPE_CANONICAL (type) = type;
5611
5612 return type;
5613 }