Remove a layer of indirection from hash_table
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "calls.h"
29 #include "tree-inline.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "insn-config.h"
34 #include "hashtab.h"
35 #include "langhooks.h"
36 #include "basic-block.h"
37 #include "tree-iterator.h"
38 #include "intl.h"
39 #include "pointer-set.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "tree-cfg.h"
53 #include "tree-phinodes.h"
54 #include "ssa-iterators.h"
55 #include "stringpool.h"
56 #include "tree-ssanames.h"
57 #include "tree-into-ssa.h"
58 #include "expr.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "function.h"
62 #include "tree-pretty-print.h"
63 #include "except.h"
64 #include "debug.h"
65 #include "ipa-prop.h"
66 #include "value-prof.h"
67 #include "tree-pass.h"
68 #include "target.h"
69 #include "cfgloop.h"
70 #include "builtins.h"
71
72 #include "rtl.h" /* FIXME: For asm_str_count. */
73
74 /* I'm not real happy about this, but we need to handle gimple and
75 non-gimple trees. */
76
77 /* Inlining, Cloning, Versioning, Parallelization
78
79 Inlining: a function body is duplicated, but the PARM_DECLs are
80 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
81 MODIFY_EXPRs that store to a dedicated returned-value variable.
82 The duplicated eh_region info of the copy will later be appended
83 to the info for the caller; the eh_region info in copied throwing
84 statements and RESX statements are adjusted accordingly.
85
86 Cloning: (only in C++) We have one body for a con/de/structor, and
87 multiple function decls, each with a unique parameter list.
88 Duplicate the body, using the given splay tree; some parameters
89 will become constants (like 0 or 1).
90
91 Versioning: a function body is duplicated and the result is a new
92 function rather than into blocks of an existing function as with
93 inlining. Some parameters will become constants.
94
95 Parallelization: a region of a function is duplicated resulting in
96 a new function. Variables may be replaced with complex expressions
97 to enable shared variable semantics.
98
99 All of these will simultaneously lookup any callgraph edges. If
100 we're going to inline the duplicated function body, and the given
101 function has some cloned callgraph nodes (one for each place this
102 function will be inlined) those callgraph edges will be duplicated.
103 If we're cloning the body, those callgraph edges will be
104 updated to point into the new body. (Note that the original
105 callgraph node and edge list will not be altered.)
106
107 See the CALL_EXPR handling case in copy_tree_body_r (). */
108
109 /* To Do:
110
111 o In order to make inlining-on-trees work, we pessimized
112 function-local static constants. In particular, they are now
113 always output, even when not addressed. Fix this by treating
114 function-local static constants just like global static
115 constants; the back-end already knows not to output them if they
116 are not needed.
117
118 o Provide heuristics to clamp inlining of recursive template
119 calls? */
120
121
122 /* Weights that estimate_num_insns uses to estimate the size of the
123 produced code. */
124
125 eni_weights eni_size_weights;
126
127 /* Weights that estimate_num_insns uses to estimate the time necessary
128 to execute the produced code. */
129
130 eni_weights eni_time_weights;
131
132 /* Prototypes. */
133
134 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
135 static void remap_block (tree *, copy_body_data *);
136 static void copy_bind_expr (tree *, int *, copy_body_data *);
137 static void declare_inline_vars (tree, tree);
138 static void remap_save_expr (tree *, void *, int *);
139 static void prepend_lexical_block (tree current_block, tree new_block);
140 static tree copy_decl_to_var (tree, copy_body_data *);
141 static tree copy_result_decl_to_var (tree, copy_body_data *);
142 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
143 static gimple remap_gimple_stmt (gimple, copy_body_data *);
144 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
145
146 /* Insert a tree->tree mapping for ID. Despite the name suggests
147 that the trees should be variables, it is used for more than that. */
148
149 void
150 insert_decl_map (copy_body_data *id, tree key, tree value)
151 {
152 *pointer_map_insert (id->decl_map, key) = value;
153
154 /* Always insert an identity map as well. If we see this same new
155 node again, we won't want to duplicate it a second time. */
156 if (key != value)
157 *pointer_map_insert (id->decl_map, value) = value;
158 }
159
160 /* Insert a tree->tree mapping for ID. This is only used for
161 variables. */
162
163 static void
164 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
165 {
166 if (!gimple_in_ssa_p (id->src_cfun))
167 return;
168
169 if (!MAY_HAVE_DEBUG_STMTS)
170 return;
171
172 if (!target_for_debug_bind (key))
173 return;
174
175 gcc_assert (TREE_CODE (key) == PARM_DECL);
176 gcc_assert (TREE_CODE (value) == VAR_DECL);
177
178 if (!id->debug_map)
179 id->debug_map = pointer_map_create ();
180
181 *pointer_map_insert (id->debug_map, key) = value;
182 }
183
184 /* If nonzero, we're remapping the contents of inlined debug
185 statements. If negative, an error has occurred, such as a
186 reference to a variable that isn't available in the inlined
187 context. */
188 static int processing_debug_stmt = 0;
189
190 /* Construct new SSA name for old NAME. ID is the inline context. */
191
192 static tree
193 remap_ssa_name (tree name, copy_body_data *id)
194 {
195 tree new_tree, var;
196 tree *n;
197
198 gcc_assert (TREE_CODE (name) == SSA_NAME);
199
200 n = (tree *) pointer_map_contains (id->decl_map, name);
201 if (n)
202 return unshare_expr (*n);
203
204 if (processing_debug_stmt)
205 {
206 if (SSA_NAME_IS_DEFAULT_DEF (name)
207 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
208 && id->entry_bb == NULL
209 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
210 {
211 tree vexpr = make_node (DEBUG_EXPR_DECL);
212 gimple def_temp;
213 gimple_stmt_iterator gsi;
214 tree val = SSA_NAME_VAR (name);
215
216 n = (tree *) pointer_map_contains (id->decl_map, val);
217 if (n != NULL)
218 val = *n;
219 if (TREE_CODE (val) != PARM_DECL)
220 {
221 processing_debug_stmt = -1;
222 return name;
223 }
224 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
225 DECL_ARTIFICIAL (vexpr) = 1;
226 TREE_TYPE (vexpr) = TREE_TYPE (name);
227 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
228 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
229 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
230 return vexpr;
231 }
232
233 processing_debug_stmt = -1;
234 return name;
235 }
236
237 /* Remap anonymous SSA names or SSA names of anonymous decls. */
238 var = SSA_NAME_VAR (name);
239 if (!var
240 || (!SSA_NAME_IS_DEFAULT_DEF (name)
241 && TREE_CODE (var) == VAR_DECL
242 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
243 && DECL_ARTIFICIAL (var)
244 && DECL_IGNORED_P (var)
245 && !DECL_NAME (var)))
246 {
247 struct ptr_info_def *pi;
248 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
249 if (!var && SSA_NAME_IDENTIFIER (name))
250 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
251 insert_decl_map (id, name, new_tree);
252 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
253 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
254 /* At least IPA points-to info can be directly transferred. */
255 if (id->src_cfun->gimple_df
256 && id->src_cfun->gimple_df->ipa_pta
257 && (pi = SSA_NAME_PTR_INFO (name))
258 && !pi->pt.anything)
259 {
260 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
261 new_pi->pt = pi->pt;
262 }
263 return new_tree;
264 }
265
266 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
267 in copy_bb. */
268 new_tree = remap_decl (var, id);
269
270 /* We might've substituted constant or another SSA_NAME for
271 the variable.
272
273 Replace the SSA name representing RESULT_DECL by variable during
274 inlining: this saves us from need to introduce PHI node in a case
275 return value is just partly initialized. */
276 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
277 && (!SSA_NAME_VAR (name)
278 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
279 || !id->transform_return_to_modify))
280 {
281 struct ptr_info_def *pi;
282 new_tree = make_ssa_name (new_tree, NULL);
283 insert_decl_map (id, name, new_tree);
284 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
285 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
286 /* At least IPA points-to info can be directly transferred. */
287 if (id->src_cfun->gimple_df
288 && id->src_cfun->gimple_df->ipa_pta
289 && (pi = SSA_NAME_PTR_INFO (name))
290 && !pi->pt.anything)
291 {
292 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293 new_pi->pt = pi->pt;
294 }
295 if (SSA_NAME_IS_DEFAULT_DEF (name))
296 {
297 /* By inlining function having uninitialized variable, we might
298 extend the lifetime (variable might get reused). This cause
299 ICE in the case we end up extending lifetime of SSA name across
300 abnormal edge, but also increase register pressure.
301
302 We simply initialize all uninitialized vars by 0 except
303 for case we are inlining to very first BB. We can avoid
304 this for all BBs that are not inside strongly connected
305 regions of the CFG, but this is expensive to test. */
306 if (id->entry_bb
307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
308 && (!SSA_NAME_VAR (name)
309 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
310 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
311 0)->dest
312 || EDGE_COUNT (id->entry_bb->preds) != 1))
313 {
314 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
315 gimple init_stmt;
316 tree zero = build_zero_cst (TREE_TYPE (new_tree));
317
318 init_stmt = gimple_build_assign (new_tree, zero);
319 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
320 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
321 }
322 else
323 {
324 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
325 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
326 }
327 }
328 }
329 else
330 insert_decl_map (id, name, new_tree);
331 return new_tree;
332 }
333
334 /* Remap DECL during the copying of the BLOCK tree for the function. */
335
336 tree
337 remap_decl (tree decl, copy_body_data *id)
338 {
339 tree *n;
340
341 /* We only remap local variables in the current function. */
342
343 /* See if we have remapped this declaration. */
344
345 n = (tree *) pointer_map_contains (id->decl_map, decl);
346
347 if (!n && processing_debug_stmt)
348 {
349 processing_debug_stmt = -1;
350 return decl;
351 }
352
353 /* If we didn't already have an equivalent for this declaration,
354 create one now. */
355 if (!n)
356 {
357 /* Make a copy of the variable or label. */
358 tree t = id->copy_decl (decl, id);
359
360 /* Remember it, so that if we encounter this local entity again
361 we can reuse this copy. Do this early because remap_type may
362 need this decl for TYPE_STUB_DECL. */
363 insert_decl_map (id, decl, t);
364
365 if (!DECL_P (t))
366 return t;
367
368 /* Remap types, if necessary. */
369 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
370 if (TREE_CODE (t) == TYPE_DECL)
371 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
372
373 /* Remap sizes as necessary. */
374 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
375 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
376
377 /* If fields, do likewise for offset and qualifier. */
378 if (TREE_CODE (t) == FIELD_DECL)
379 {
380 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
381 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
382 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
383 }
384
385 return t;
386 }
387
388 if (id->do_not_unshare)
389 return *n;
390 else
391 return unshare_expr (*n);
392 }
393
394 static tree
395 remap_type_1 (tree type, copy_body_data *id)
396 {
397 tree new_tree, t;
398
399 /* We do need a copy. build and register it now. If this is a pointer or
400 reference type, remap the designated type and make a new pointer or
401 reference type. */
402 if (TREE_CODE (type) == POINTER_TYPE)
403 {
404 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
405 TYPE_MODE (type),
406 TYPE_REF_CAN_ALIAS_ALL (type));
407 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
408 new_tree = build_type_attribute_qual_variant (new_tree,
409 TYPE_ATTRIBUTES (type),
410 TYPE_QUALS (type));
411 insert_decl_map (id, type, new_tree);
412 return new_tree;
413 }
414 else if (TREE_CODE (type) == REFERENCE_TYPE)
415 {
416 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
417 TYPE_MODE (type),
418 TYPE_REF_CAN_ALIAS_ALL (type));
419 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
420 new_tree = build_type_attribute_qual_variant (new_tree,
421 TYPE_ATTRIBUTES (type),
422 TYPE_QUALS (type));
423 insert_decl_map (id, type, new_tree);
424 return new_tree;
425 }
426 else
427 new_tree = copy_node (type);
428
429 insert_decl_map (id, type, new_tree);
430
431 /* This is a new type, not a copy of an old type. Need to reassociate
432 variants. We can handle everything except the main variant lazily. */
433 t = TYPE_MAIN_VARIANT (type);
434 if (type != t)
435 {
436 t = remap_type (t, id);
437 TYPE_MAIN_VARIANT (new_tree) = t;
438 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
439 TYPE_NEXT_VARIANT (t) = new_tree;
440 }
441 else
442 {
443 TYPE_MAIN_VARIANT (new_tree) = new_tree;
444 TYPE_NEXT_VARIANT (new_tree) = NULL;
445 }
446
447 if (TYPE_STUB_DECL (type))
448 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
449
450 /* Lazily create pointer and reference types. */
451 TYPE_POINTER_TO (new_tree) = NULL;
452 TYPE_REFERENCE_TO (new_tree) = NULL;
453
454 switch (TREE_CODE (new_tree))
455 {
456 case INTEGER_TYPE:
457 case REAL_TYPE:
458 case FIXED_POINT_TYPE:
459 case ENUMERAL_TYPE:
460 case BOOLEAN_TYPE:
461 t = TYPE_MIN_VALUE (new_tree);
462 if (t && TREE_CODE (t) != INTEGER_CST)
463 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
464
465 t = TYPE_MAX_VALUE (new_tree);
466 if (t && TREE_CODE (t) != INTEGER_CST)
467 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
468 return new_tree;
469
470 case FUNCTION_TYPE:
471 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
472 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
473 return new_tree;
474
475 case ARRAY_TYPE:
476 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
477 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
478 break;
479
480 case RECORD_TYPE:
481 case UNION_TYPE:
482 case QUAL_UNION_TYPE:
483 {
484 tree f, nf = NULL;
485
486 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
487 {
488 t = remap_decl (f, id);
489 DECL_CONTEXT (t) = new_tree;
490 DECL_CHAIN (t) = nf;
491 nf = t;
492 }
493 TYPE_FIELDS (new_tree) = nreverse (nf);
494 }
495 break;
496
497 case OFFSET_TYPE:
498 default:
499 /* Shouldn't have been thought variable sized. */
500 gcc_unreachable ();
501 }
502
503 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
504 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
505
506 return new_tree;
507 }
508
509 tree
510 remap_type (tree type, copy_body_data *id)
511 {
512 tree *node;
513 tree tmp;
514
515 if (type == NULL)
516 return type;
517
518 /* See if we have remapped this type. */
519 node = (tree *) pointer_map_contains (id->decl_map, type);
520 if (node)
521 return *node;
522
523 /* The type only needs remapping if it's variably modified. */
524 if (! variably_modified_type_p (type, id->src_fn))
525 {
526 insert_decl_map (id, type, type);
527 return type;
528 }
529
530 id->remapping_type_depth++;
531 tmp = remap_type_1 (type, id);
532 id->remapping_type_depth--;
533
534 return tmp;
535 }
536
537 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
538
539 static bool
540 can_be_nonlocal (tree decl, copy_body_data *id)
541 {
542 /* We can not duplicate function decls. */
543 if (TREE_CODE (decl) == FUNCTION_DECL)
544 return true;
545
546 /* Local static vars must be non-local or we get multiple declaration
547 problems. */
548 if (TREE_CODE (decl) == VAR_DECL
549 && !auto_var_in_fn_p (decl, id->src_fn))
550 return true;
551
552 return false;
553 }
554
555 static tree
556 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
557 copy_body_data *id)
558 {
559 tree old_var;
560 tree new_decls = NULL_TREE;
561
562 /* Remap its variables. */
563 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
564 {
565 tree new_var;
566
567 if (can_be_nonlocal (old_var, id))
568 {
569 /* We need to add this variable to the local decls as otherwise
570 nothing else will do so. */
571 if (TREE_CODE (old_var) == VAR_DECL
572 && ! DECL_EXTERNAL (old_var))
573 add_local_decl (cfun, old_var);
574 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
575 && !DECL_IGNORED_P (old_var)
576 && nonlocalized_list)
577 vec_safe_push (*nonlocalized_list, old_var);
578 continue;
579 }
580
581 /* Remap the variable. */
582 new_var = remap_decl (old_var, id);
583
584 /* If we didn't remap this variable, we can't mess with its
585 TREE_CHAIN. If we remapped this variable to the return slot, it's
586 already declared somewhere else, so don't declare it here. */
587
588 if (new_var == id->retvar)
589 ;
590 else if (!new_var)
591 {
592 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
593 && !DECL_IGNORED_P (old_var)
594 && nonlocalized_list)
595 vec_safe_push (*nonlocalized_list, old_var);
596 }
597 else
598 {
599 gcc_assert (DECL_P (new_var));
600 DECL_CHAIN (new_var) = new_decls;
601 new_decls = new_var;
602
603 /* Also copy value-expressions. */
604 if (TREE_CODE (new_var) == VAR_DECL
605 && DECL_HAS_VALUE_EXPR_P (new_var))
606 {
607 tree tem = DECL_VALUE_EXPR (new_var);
608 bool old_regimplify = id->regimplify;
609 id->remapping_type_depth++;
610 walk_tree (&tem, copy_tree_body_r, id, NULL);
611 id->remapping_type_depth--;
612 id->regimplify = old_regimplify;
613 SET_DECL_VALUE_EXPR (new_var, tem);
614 }
615 }
616 }
617
618 return nreverse (new_decls);
619 }
620
621 /* Copy the BLOCK to contain remapped versions of the variables
622 therein. And hook the new block into the block-tree. */
623
624 static void
625 remap_block (tree *block, copy_body_data *id)
626 {
627 tree old_block;
628 tree new_block;
629
630 /* Make the new block. */
631 old_block = *block;
632 new_block = make_node (BLOCK);
633 TREE_USED (new_block) = TREE_USED (old_block);
634 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
635 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
636 BLOCK_NONLOCALIZED_VARS (new_block)
637 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
638 *block = new_block;
639
640 /* Remap its variables. */
641 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
642 &BLOCK_NONLOCALIZED_VARS (new_block),
643 id);
644
645 if (id->transform_lang_insert_block)
646 id->transform_lang_insert_block (new_block);
647
648 /* Remember the remapped block. */
649 insert_decl_map (id, old_block, new_block);
650 }
651
652 /* Copy the whole block tree and root it in id->block. */
653 static tree
654 remap_blocks (tree block, copy_body_data *id)
655 {
656 tree t;
657 tree new_tree = block;
658
659 if (!block)
660 return NULL;
661
662 remap_block (&new_tree, id);
663 gcc_assert (new_tree != block);
664 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
665 prepend_lexical_block (new_tree, remap_blocks (t, id));
666 /* Blocks are in arbitrary order, but make things slightly prettier and do
667 not swap order when producing a copy. */
668 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
669 return new_tree;
670 }
671
672 /* Remap the block tree rooted at BLOCK to nothing. */
673 static void
674 remap_blocks_to_null (tree block, copy_body_data *id)
675 {
676 tree t;
677 insert_decl_map (id, block, NULL_TREE);
678 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
679 remap_blocks_to_null (t, id);
680 }
681
682 static void
683 copy_statement_list (tree *tp)
684 {
685 tree_stmt_iterator oi, ni;
686 tree new_tree;
687
688 new_tree = alloc_stmt_list ();
689 ni = tsi_start (new_tree);
690 oi = tsi_start (*tp);
691 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
692 *tp = new_tree;
693
694 for (; !tsi_end_p (oi); tsi_next (&oi))
695 {
696 tree stmt = tsi_stmt (oi);
697 if (TREE_CODE (stmt) == STATEMENT_LIST)
698 /* This copy is not redundant; tsi_link_after will smash this
699 STATEMENT_LIST into the end of the one we're building, and we
700 don't want to do that with the original. */
701 copy_statement_list (&stmt);
702 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
703 }
704 }
705
706 static void
707 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
708 {
709 tree block = BIND_EXPR_BLOCK (*tp);
710 /* Copy (and replace) the statement. */
711 copy_tree_r (tp, walk_subtrees, NULL);
712 if (block)
713 {
714 remap_block (&block, id);
715 BIND_EXPR_BLOCK (*tp) = block;
716 }
717
718 if (BIND_EXPR_VARS (*tp))
719 /* This will remap a lot of the same decls again, but this should be
720 harmless. */
721 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
722 }
723
724
725 /* Create a new gimple_seq by remapping all the statements in BODY
726 using the inlining information in ID. */
727
728 static gimple_seq
729 remap_gimple_seq (gimple_seq body, copy_body_data *id)
730 {
731 gimple_stmt_iterator si;
732 gimple_seq new_body = NULL;
733
734 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
735 {
736 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
737 gimple_seq_add_stmt (&new_body, new_stmt);
738 }
739
740 return new_body;
741 }
742
743
744 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
745 block using the mapping information in ID. */
746
747 static gimple
748 copy_gimple_bind (gimple stmt, copy_body_data *id)
749 {
750 gimple new_bind;
751 tree new_block, new_vars;
752 gimple_seq body, new_body;
753
754 /* Copy the statement. Note that we purposely don't use copy_stmt
755 here because we need to remap statements as we copy. */
756 body = gimple_bind_body (stmt);
757 new_body = remap_gimple_seq (body, id);
758
759 new_block = gimple_bind_block (stmt);
760 if (new_block)
761 remap_block (&new_block, id);
762
763 /* This will remap a lot of the same decls again, but this should be
764 harmless. */
765 new_vars = gimple_bind_vars (stmt);
766 if (new_vars)
767 new_vars = remap_decls (new_vars, NULL, id);
768
769 new_bind = gimple_build_bind (new_vars, new_body, new_block);
770
771 return new_bind;
772 }
773
774 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
775
776 static bool
777 is_parm (tree decl)
778 {
779 if (TREE_CODE (decl) == SSA_NAME)
780 {
781 decl = SSA_NAME_VAR (decl);
782 if (!decl)
783 return false;
784 }
785
786 return (TREE_CODE (decl) == PARM_DECL);
787 }
788
789 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
790 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
791 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
792 recursing into the children nodes of *TP. */
793
794 static tree
795 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
796 {
797 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
798 copy_body_data *id = (copy_body_data *) wi_p->info;
799 tree fn = id->src_fn;
800
801 if (TREE_CODE (*tp) == SSA_NAME)
802 {
803 *tp = remap_ssa_name (*tp, id);
804 *walk_subtrees = 0;
805 return NULL;
806 }
807 else if (auto_var_in_fn_p (*tp, fn))
808 {
809 /* Local variables and labels need to be replaced by equivalent
810 variables. We don't want to copy static variables; there's
811 only one of those, no matter how many times we inline the
812 containing function. Similarly for globals from an outer
813 function. */
814 tree new_decl;
815
816 /* Remap the declaration. */
817 new_decl = remap_decl (*tp, id);
818 gcc_assert (new_decl);
819 /* Replace this variable with the copy. */
820 STRIP_TYPE_NOPS (new_decl);
821 /* ??? The C++ frontend uses void * pointer zero to initialize
822 any other type. This confuses the middle-end type verification.
823 As cloned bodies do not go through gimplification again the fixup
824 there doesn't trigger. */
825 if (TREE_CODE (new_decl) == INTEGER_CST
826 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
827 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
828 *tp = new_decl;
829 *walk_subtrees = 0;
830 }
831 else if (TREE_CODE (*tp) == STATEMENT_LIST)
832 gcc_unreachable ();
833 else if (TREE_CODE (*tp) == SAVE_EXPR)
834 gcc_unreachable ();
835 else if (TREE_CODE (*tp) == LABEL_DECL
836 && (!DECL_CONTEXT (*tp)
837 || decl_function_context (*tp) == id->src_fn))
838 /* These may need to be remapped for EH handling. */
839 *tp = remap_decl (*tp, id);
840 else if (TREE_CODE (*tp) == FIELD_DECL)
841 {
842 /* If the enclosing record type is variably_modified_type_p, the field
843 has already been remapped. Otherwise, it need not be. */
844 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
845 if (n)
846 *tp = *n;
847 *walk_subtrees = 0;
848 }
849 else if (TYPE_P (*tp))
850 /* Types may need remapping as well. */
851 *tp = remap_type (*tp, id);
852 else if (CONSTANT_CLASS_P (*tp))
853 {
854 /* If this is a constant, we have to copy the node iff the type
855 will be remapped. copy_tree_r will not copy a constant. */
856 tree new_type = remap_type (TREE_TYPE (*tp), id);
857
858 if (new_type == TREE_TYPE (*tp))
859 *walk_subtrees = 0;
860
861 else if (TREE_CODE (*tp) == INTEGER_CST)
862 *tp = wide_int_to_tree (new_type, *tp);
863 else
864 {
865 *tp = copy_node (*tp);
866 TREE_TYPE (*tp) = new_type;
867 }
868 }
869 else
870 {
871 /* Otherwise, just copy the node. Note that copy_tree_r already
872 knows not to copy VAR_DECLs, etc., so this is safe. */
873
874 if (TREE_CODE (*tp) == MEM_REF)
875 {
876 /* We need to re-canonicalize MEM_REFs from inline substitutions
877 that can happen when a pointer argument is an ADDR_EXPR.
878 Recurse here manually to allow that. */
879 tree ptr = TREE_OPERAND (*tp, 0);
880 tree type = remap_type (TREE_TYPE (*tp), id);
881 tree old = *tp;
882 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
883 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
884 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
885 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
886 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
887 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
888 remapped a parameter as the property might be valid only
889 for the parameter itself. */
890 if (TREE_THIS_NOTRAP (old)
891 && (!is_parm (TREE_OPERAND (old, 0))
892 || (!id->transform_parameter && is_parm (ptr))))
893 TREE_THIS_NOTRAP (*tp) = 1;
894 *walk_subtrees = 0;
895 return NULL;
896 }
897
898 /* Here is the "usual case". Copy this tree node, and then
899 tweak some special cases. */
900 copy_tree_r (tp, walk_subtrees, NULL);
901
902 if (TREE_CODE (*tp) != OMP_CLAUSE)
903 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
904
905 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
906 {
907 /* The copied TARGET_EXPR has never been expanded, even if the
908 original node was expanded already. */
909 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
910 TREE_OPERAND (*tp, 3) = NULL_TREE;
911 }
912 else if (TREE_CODE (*tp) == ADDR_EXPR)
913 {
914 /* Variable substitution need not be simple. In particular,
915 the MEM_REF substitution above. Make sure that
916 TREE_CONSTANT and friends are up-to-date. */
917 int invariant = is_gimple_min_invariant (*tp);
918 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
919 recompute_tree_invariant_for_addr_expr (*tp);
920
921 /* If this used to be invariant, but is not any longer,
922 then regimplification is probably needed. */
923 if (invariant && !is_gimple_min_invariant (*tp))
924 id->regimplify = true;
925
926 *walk_subtrees = 0;
927 }
928 }
929
930 /* Update the TREE_BLOCK for the cloned expr. */
931 if (EXPR_P (*tp))
932 {
933 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
934 tree old_block = TREE_BLOCK (*tp);
935 if (old_block)
936 {
937 tree *n;
938 n = (tree *) pointer_map_contains (id->decl_map,
939 TREE_BLOCK (*tp));
940 if (n)
941 new_block = *n;
942 }
943 TREE_SET_BLOCK (*tp, new_block);
944 }
945
946 /* Keep iterating. */
947 return NULL_TREE;
948 }
949
950
951 /* Called from copy_body_id via walk_tree. DATA is really a
952 `copy_body_data *'. */
953
954 tree
955 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
956 {
957 copy_body_data *id = (copy_body_data *) data;
958 tree fn = id->src_fn;
959 tree new_block;
960
961 /* Begin by recognizing trees that we'll completely rewrite for the
962 inlining context. Our output for these trees is completely
963 different from out input (e.g. RETURN_EXPR is deleted, and morphs
964 into an edge). Further down, we'll handle trees that get
965 duplicated and/or tweaked. */
966
967 /* When requested, RETURN_EXPRs should be transformed to just the
968 contained MODIFY_EXPR. The branch semantics of the return will
969 be handled elsewhere by manipulating the CFG rather than a statement. */
970 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
971 {
972 tree assignment = TREE_OPERAND (*tp, 0);
973
974 /* If we're returning something, just turn that into an
975 assignment into the equivalent of the original RESULT_DECL.
976 If the "assignment" is just the result decl, the result
977 decl has already been set (e.g. a recent "foo (&result_decl,
978 ...)"); just toss the entire RETURN_EXPR. */
979 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
980 {
981 /* Replace the RETURN_EXPR with (a copy of) the
982 MODIFY_EXPR hanging underneath. */
983 *tp = copy_node (assignment);
984 }
985 else /* Else the RETURN_EXPR returns no value. */
986 {
987 *tp = NULL;
988 return (tree) (void *)1;
989 }
990 }
991 else if (TREE_CODE (*tp) == SSA_NAME)
992 {
993 *tp = remap_ssa_name (*tp, id);
994 *walk_subtrees = 0;
995 return NULL;
996 }
997
998 /* Local variables and labels need to be replaced by equivalent
999 variables. We don't want to copy static variables; there's only
1000 one of those, no matter how many times we inline the containing
1001 function. Similarly for globals from an outer function. */
1002 else if (auto_var_in_fn_p (*tp, fn))
1003 {
1004 tree new_decl;
1005
1006 /* Remap the declaration. */
1007 new_decl = remap_decl (*tp, id);
1008 gcc_assert (new_decl);
1009 /* Replace this variable with the copy. */
1010 STRIP_TYPE_NOPS (new_decl);
1011 *tp = new_decl;
1012 *walk_subtrees = 0;
1013 }
1014 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1015 copy_statement_list (tp);
1016 else if (TREE_CODE (*tp) == SAVE_EXPR
1017 || TREE_CODE (*tp) == TARGET_EXPR)
1018 remap_save_expr (tp, id->decl_map, walk_subtrees);
1019 else if (TREE_CODE (*tp) == LABEL_DECL
1020 && (! DECL_CONTEXT (*tp)
1021 || decl_function_context (*tp) == id->src_fn))
1022 /* These may need to be remapped for EH handling. */
1023 *tp = remap_decl (*tp, id);
1024 else if (TREE_CODE (*tp) == BIND_EXPR)
1025 copy_bind_expr (tp, walk_subtrees, id);
1026 /* Types may need remapping as well. */
1027 else if (TYPE_P (*tp))
1028 *tp = remap_type (*tp, id);
1029
1030 /* If this is a constant, we have to copy the node iff the type will be
1031 remapped. copy_tree_r will not copy a constant. */
1032 else if (CONSTANT_CLASS_P (*tp))
1033 {
1034 tree new_type = remap_type (TREE_TYPE (*tp), id);
1035
1036 if (new_type == TREE_TYPE (*tp))
1037 *walk_subtrees = 0;
1038
1039 else if (TREE_CODE (*tp) == INTEGER_CST)
1040 *tp = wide_int_to_tree (new_type, *tp);
1041 else
1042 {
1043 *tp = copy_node (*tp);
1044 TREE_TYPE (*tp) = new_type;
1045 }
1046 }
1047
1048 /* Otherwise, just copy the node. Note that copy_tree_r already
1049 knows not to copy VAR_DECLs, etc., so this is safe. */
1050 else
1051 {
1052 /* Here we handle trees that are not completely rewritten.
1053 First we detect some inlining-induced bogosities for
1054 discarding. */
1055 if (TREE_CODE (*tp) == MODIFY_EXPR
1056 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1057 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1058 {
1059 /* Some assignments VAR = VAR; don't generate any rtl code
1060 and thus don't count as variable modification. Avoid
1061 keeping bogosities like 0 = 0. */
1062 tree decl = TREE_OPERAND (*tp, 0), value;
1063 tree *n;
1064
1065 n = (tree *) pointer_map_contains (id->decl_map, decl);
1066 if (n)
1067 {
1068 value = *n;
1069 STRIP_TYPE_NOPS (value);
1070 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1071 {
1072 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1073 return copy_tree_body_r (tp, walk_subtrees, data);
1074 }
1075 }
1076 }
1077 else if (TREE_CODE (*tp) == INDIRECT_REF)
1078 {
1079 /* Get rid of *& from inline substitutions that can happen when a
1080 pointer argument is an ADDR_EXPR. */
1081 tree decl = TREE_OPERAND (*tp, 0);
1082 tree *n = (tree *) pointer_map_contains (id->decl_map, decl);
1083 if (n)
1084 {
1085 /* If we happen to get an ADDR_EXPR in n->value, strip
1086 it manually here as we'll eventually get ADDR_EXPRs
1087 which lie about their types pointed to. In this case
1088 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1089 but we absolutely rely on that. As fold_indirect_ref
1090 does other useful transformations, try that first, though. */
1091 tree type = TREE_TYPE (*tp);
1092 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1093 tree old = *tp;
1094 *tp = gimple_fold_indirect_ref (ptr);
1095 if (! *tp)
1096 {
1097 if (TREE_CODE (ptr) == ADDR_EXPR)
1098 {
1099 *tp
1100 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1101 /* ??? We should either assert here or build
1102 a VIEW_CONVERT_EXPR instead of blindly leaking
1103 incompatible types to our IL. */
1104 if (! *tp)
1105 *tp = TREE_OPERAND (ptr, 0);
1106 }
1107 else
1108 {
1109 *tp = build1 (INDIRECT_REF, type, ptr);
1110 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1111 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1112 TREE_READONLY (*tp) = TREE_READONLY (old);
1113 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1114 have remapped a parameter as the property might be
1115 valid only for the parameter itself. */
1116 if (TREE_THIS_NOTRAP (old)
1117 && (!is_parm (TREE_OPERAND (old, 0))
1118 || (!id->transform_parameter && is_parm (ptr))))
1119 TREE_THIS_NOTRAP (*tp) = 1;
1120 }
1121 }
1122 *walk_subtrees = 0;
1123 return NULL;
1124 }
1125 }
1126 else if (TREE_CODE (*tp) == MEM_REF)
1127 {
1128 /* We need to re-canonicalize MEM_REFs from inline substitutions
1129 that can happen when a pointer argument is an ADDR_EXPR.
1130 Recurse here manually to allow that. */
1131 tree ptr = TREE_OPERAND (*tp, 0);
1132 tree type = remap_type (TREE_TYPE (*tp), id);
1133 tree old = *tp;
1134 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1135 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1136 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1137 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1138 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1139 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1140 remapped a parameter as the property might be valid only
1141 for the parameter itself. */
1142 if (TREE_THIS_NOTRAP (old)
1143 && (!is_parm (TREE_OPERAND (old, 0))
1144 || (!id->transform_parameter && is_parm (ptr))))
1145 TREE_THIS_NOTRAP (*tp) = 1;
1146 *walk_subtrees = 0;
1147 return NULL;
1148 }
1149
1150 /* Here is the "usual case". Copy this tree node, and then
1151 tweak some special cases. */
1152 copy_tree_r (tp, walk_subtrees, NULL);
1153
1154 /* If EXPR has block defined, map it to newly constructed block.
1155 When inlining we want EXPRs without block appear in the block
1156 of function call if we are not remapping a type. */
1157 if (EXPR_P (*tp))
1158 {
1159 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1160 if (TREE_BLOCK (*tp))
1161 {
1162 tree *n;
1163 n = (tree *) pointer_map_contains (id->decl_map,
1164 TREE_BLOCK (*tp));
1165 if (n)
1166 new_block = *n;
1167 }
1168 TREE_SET_BLOCK (*tp, new_block);
1169 }
1170
1171 if (TREE_CODE (*tp) != OMP_CLAUSE)
1172 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1173
1174 /* The copied TARGET_EXPR has never been expanded, even if the
1175 original node was expanded already. */
1176 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1177 {
1178 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1179 TREE_OPERAND (*tp, 3) = NULL_TREE;
1180 }
1181
1182 /* Variable substitution need not be simple. In particular, the
1183 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1184 and friends are up-to-date. */
1185 else if (TREE_CODE (*tp) == ADDR_EXPR)
1186 {
1187 int invariant = is_gimple_min_invariant (*tp);
1188 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1189
1190 /* Handle the case where we substituted an INDIRECT_REF
1191 into the operand of the ADDR_EXPR. */
1192 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1193 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1194 else
1195 recompute_tree_invariant_for_addr_expr (*tp);
1196
1197 /* If this used to be invariant, but is not any longer,
1198 then regimplification is probably needed. */
1199 if (invariant && !is_gimple_min_invariant (*tp))
1200 id->regimplify = true;
1201
1202 *walk_subtrees = 0;
1203 }
1204 }
1205
1206 /* Keep iterating. */
1207 return NULL_TREE;
1208 }
1209
1210 /* Helper for remap_gimple_stmt. Given an EH region number for the
1211 source function, map that to the duplicate EH region number in
1212 the destination function. */
1213
1214 static int
1215 remap_eh_region_nr (int old_nr, copy_body_data *id)
1216 {
1217 eh_region old_r, new_r;
1218 void **slot;
1219
1220 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1221 slot = pointer_map_contains (id->eh_map, old_r);
1222 new_r = (eh_region) *slot;
1223
1224 return new_r->index;
1225 }
1226
1227 /* Similar, but operate on INTEGER_CSTs. */
1228
1229 static tree
1230 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1231 {
1232 int old_nr, new_nr;
1233
1234 old_nr = tree_to_shwi (old_t_nr);
1235 new_nr = remap_eh_region_nr (old_nr, id);
1236
1237 return build_int_cst (integer_type_node, new_nr);
1238 }
1239
1240 /* Helper for copy_bb. Remap statement STMT using the inlining
1241 information in ID. Return the new statement copy. */
1242
1243 static gimple
1244 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1245 {
1246 gimple copy = NULL;
1247 struct walk_stmt_info wi;
1248 bool skip_first = false;
1249
1250 /* Begin by recognizing trees that we'll completely rewrite for the
1251 inlining context. Our output for these trees is completely
1252 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1253 into an edge). Further down, we'll handle trees that get
1254 duplicated and/or tweaked. */
1255
1256 /* When requested, GIMPLE_RETURNs should be transformed to just the
1257 contained GIMPLE_ASSIGN. The branch semantics of the return will
1258 be handled elsewhere by manipulating the CFG rather than the
1259 statement. */
1260 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1261 {
1262 tree retval = gimple_return_retval (stmt);
1263
1264 /* If we're returning something, just turn that into an
1265 assignment into the equivalent of the original RESULT_DECL.
1266 If RETVAL is just the result decl, the result decl has
1267 already been set (e.g. a recent "foo (&result_decl, ...)");
1268 just toss the entire GIMPLE_RETURN. */
1269 if (retval
1270 && (TREE_CODE (retval) != RESULT_DECL
1271 && (TREE_CODE (retval) != SSA_NAME
1272 || ! SSA_NAME_VAR (retval)
1273 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1274 {
1275 copy = gimple_build_assign (id->do_not_unshare
1276 ? id->retvar : unshare_expr (id->retvar),
1277 retval);
1278 /* id->retvar is already substituted. Skip it on later remapping. */
1279 skip_first = true;
1280 }
1281 else
1282 return gimple_build_nop ();
1283 }
1284 else if (gimple_has_substatements (stmt))
1285 {
1286 gimple_seq s1, s2;
1287
1288 /* When cloning bodies from the C++ front end, we will be handed bodies
1289 in High GIMPLE form. Handle here all the High GIMPLE statements that
1290 have embedded statements. */
1291 switch (gimple_code (stmt))
1292 {
1293 case GIMPLE_BIND:
1294 copy = copy_gimple_bind (stmt, id);
1295 break;
1296
1297 case GIMPLE_CATCH:
1298 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1299 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1300 break;
1301
1302 case GIMPLE_EH_FILTER:
1303 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1304 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1305 break;
1306
1307 case GIMPLE_TRY:
1308 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1309 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1310 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1311 break;
1312
1313 case GIMPLE_WITH_CLEANUP_EXPR:
1314 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1315 copy = gimple_build_wce (s1);
1316 break;
1317
1318 case GIMPLE_OMP_PARALLEL:
1319 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1320 copy = gimple_build_omp_parallel
1321 (s1,
1322 gimple_omp_parallel_clauses (stmt),
1323 gimple_omp_parallel_child_fn (stmt),
1324 gimple_omp_parallel_data_arg (stmt));
1325 break;
1326
1327 case GIMPLE_OMP_TASK:
1328 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1329 copy = gimple_build_omp_task
1330 (s1,
1331 gimple_omp_task_clauses (stmt),
1332 gimple_omp_task_child_fn (stmt),
1333 gimple_omp_task_data_arg (stmt),
1334 gimple_omp_task_copy_fn (stmt),
1335 gimple_omp_task_arg_size (stmt),
1336 gimple_omp_task_arg_align (stmt));
1337 break;
1338
1339 case GIMPLE_OMP_FOR:
1340 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1341 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1342 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1343 gimple_omp_for_clauses (stmt),
1344 gimple_omp_for_collapse (stmt), s2);
1345 {
1346 size_t i;
1347 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1348 {
1349 gimple_omp_for_set_index (copy, i,
1350 gimple_omp_for_index (stmt, i));
1351 gimple_omp_for_set_initial (copy, i,
1352 gimple_omp_for_initial (stmt, i));
1353 gimple_omp_for_set_final (copy, i,
1354 gimple_omp_for_final (stmt, i));
1355 gimple_omp_for_set_incr (copy, i,
1356 gimple_omp_for_incr (stmt, i));
1357 gimple_omp_for_set_cond (copy, i,
1358 gimple_omp_for_cond (stmt, i));
1359 }
1360 }
1361 break;
1362
1363 case GIMPLE_OMP_MASTER:
1364 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1365 copy = gimple_build_omp_master (s1);
1366 break;
1367
1368 case GIMPLE_OMP_TASKGROUP:
1369 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1370 copy = gimple_build_omp_taskgroup (s1);
1371 break;
1372
1373 case GIMPLE_OMP_ORDERED:
1374 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1375 copy = gimple_build_omp_ordered (s1);
1376 break;
1377
1378 case GIMPLE_OMP_SECTION:
1379 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1380 copy = gimple_build_omp_section (s1);
1381 break;
1382
1383 case GIMPLE_OMP_SECTIONS:
1384 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1385 copy = gimple_build_omp_sections
1386 (s1, gimple_omp_sections_clauses (stmt));
1387 break;
1388
1389 case GIMPLE_OMP_SINGLE:
1390 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1391 copy = gimple_build_omp_single
1392 (s1, gimple_omp_single_clauses (stmt));
1393 break;
1394
1395 case GIMPLE_OMP_TARGET:
1396 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1397 copy = gimple_build_omp_target
1398 (s1, gimple_omp_target_kind (stmt),
1399 gimple_omp_target_clauses (stmt));
1400 break;
1401
1402 case GIMPLE_OMP_TEAMS:
1403 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1404 copy = gimple_build_omp_teams
1405 (s1, gimple_omp_teams_clauses (stmt));
1406 break;
1407
1408 case GIMPLE_OMP_CRITICAL:
1409 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1410 copy
1411 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1412 break;
1413
1414 case GIMPLE_TRANSACTION:
1415 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1416 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1417 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1418 break;
1419
1420 default:
1421 gcc_unreachable ();
1422 }
1423 }
1424 else
1425 {
1426 if (gimple_assign_copy_p (stmt)
1427 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1428 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1429 {
1430 /* Here we handle statements that are not completely rewritten.
1431 First we detect some inlining-induced bogosities for
1432 discarding. */
1433
1434 /* Some assignments VAR = VAR; don't generate any rtl code
1435 and thus don't count as variable modification. Avoid
1436 keeping bogosities like 0 = 0. */
1437 tree decl = gimple_assign_lhs (stmt), value;
1438 tree *n;
1439
1440 n = (tree *) pointer_map_contains (id->decl_map, decl);
1441 if (n)
1442 {
1443 value = *n;
1444 STRIP_TYPE_NOPS (value);
1445 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1446 return gimple_build_nop ();
1447 }
1448 }
1449
1450 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1451 in a block that we aren't copying during tree_function_versioning,
1452 just drop the clobber stmt. */
1453 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1454 {
1455 tree lhs = gimple_assign_lhs (stmt);
1456 if (TREE_CODE (lhs) == MEM_REF
1457 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1458 {
1459 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1460 if (gimple_bb (def_stmt)
1461 && !bitmap_bit_p (id->blocks_to_copy,
1462 gimple_bb (def_stmt)->index))
1463 return gimple_build_nop ();
1464 }
1465 }
1466
1467 if (gimple_debug_bind_p (stmt))
1468 {
1469 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1470 gimple_debug_bind_get_value (stmt),
1471 stmt);
1472 id->debug_stmts.safe_push (copy);
1473 return copy;
1474 }
1475 if (gimple_debug_source_bind_p (stmt))
1476 {
1477 copy = gimple_build_debug_source_bind
1478 (gimple_debug_source_bind_get_var (stmt),
1479 gimple_debug_source_bind_get_value (stmt), stmt);
1480 id->debug_stmts.safe_push (copy);
1481 return copy;
1482 }
1483
1484 /* Create a new deep copy of the statement. */
1485 copy = gimple_copy (stmt);
1486
1487 /* Clear flags that need revisiting. */
1488 if (is_gimple_call (copy)
1489 && gimple_call_tail_p (copy))
1490 gimple_call_set_tail (copy, false);
1491
1492 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1493 RESX and EH_DISPATCH. */
1494 if (id->eh_map)
1495 switch (gimple_code (copy))
1496 {
1497 case GIMPLE_CALL:
1498 {
1499 tree r, fndecl = gimple_call_fndecl (copy);
1500 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1501 switch (DECL_FUNCTION_CODE (fndecl))
1502 {
1503 case BUILT_IN_EH_COPY_VALUES:
1504 r = gimple_call_arg (copy, 1);
1505 r = remap_eh_region_tree_nr (r, id);
1506 gimple_call_set_arg (copy, 1, r);
1507 /* FALLTHRU */
1508
1509 case BUILT_IN_EH_POINTER:
1510 case BUILT_IN_EH_FILTER:
1511 r = gimple_call_arg (copy, 0);
1512 r = remap_eh_region_tree_nr (r, id);
1513 gimple_call_set_arg (copy, 0, r);
1514 break;
1515
1516 default:
1517 break;
1518 }
1519
1520 /* Reset alias info if we didn't apply measures to
1521 keep it valid over inlining by setting DECL_PT_UID. */
1522 if (!id->src_cfun->gimple_df
1523 || !id->src_cfun->gimple_df->ipa_pta)
1524 gimple_call_reset_alias_info (copy);
1525 }
1526 break;
1527
1528 case GIMPLE_RESX:
1529 {
1530 int r = gimple_resx_region (copy);
1531 r = remap_eh_region_nr (r, id);
1532 gimple_resx_set_region (copy, r);
1533 }
1534 break;
1535
1536 case GIMPLE_EH_DISPATCH:
1537 {
1538 int r = gimple_eh_dispatch_region (copy);
1539 r = remap_eh_region_nr (r, id);
1540 gimple_eh_dispatch_set_region (copy, r);
1541 }
1542 break;
1543
1544 default:
1545 break;
1546 }
1547 }
1548
1549 /* If STMT has a block defined, map it to the newly constructed
1550 block. */
1551 if (gimple_block (copy))
1552 {
1553 tree *n;
1554 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1555 gcc_assert (n);
1556 gimple_set_block (copy, *n);
1557 }
1558
1559 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1560 return copy;
1561
1562 /* Remap all the operands in COPY. */
1563 memset (&wi, 0, sizeof (wi));
1564 wi.info = id;
1565 if (skip_first)
1566 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1567 else
1568 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1569
1570 /* Clear the copied virtual operands. We are not remapping them here
1571 but are going to recreate them from scratch. */
1572 if (gimple_has_mem_ops (copy))
1573 {
1574 gimple_set_vdef (copy, NULL_TREE);
1575 gimple_set_vuse (copy, NULL_TREE);
1576 }
1577
1578 return copy;
1579 }
1580
1581
1582 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1583 later */
1584
1585 static basic_block
1586 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1587 gcov_type count_scale)
1588 {
1589 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1590 basic_block copy_basic_block;
1591 tree decl;
1592 gcov_type freq;
1593 basic_block prev;
1594
1595 /* Search for previous copied basic block. */
1596 prev = bb->prev_bb;
1597 while (!prev->aux)
1598 prev = prev->prev_bb;
1599
1600 /* create_basic_block() will append every new block to
1601 basic_block_info automatically. */
1602 copy_basic_block = create_basic_block (NULL, (void *) 0,
1603 (basic_block) prev->aux);
1604 copy_basic_block->count = apply_scale (bb->count, count_scale);
1605
1606 /* We are going to rebuild frequencies from scratch. These values
1607 have just small importance to drive canonicalize_loop_headers. */
1608 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1609
1610 /* We recompute frequencies after inlining, so this is quite safe. */
1611 if (freq > BB_FREQ_MAX)
1612 freq = BB_FREQ_MAX;
1613 copy_basic_block->frequency = freq;
1614
1615 copy_gsi = gsi_start_bb (copy_basic_block);
1616
1617 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1618 {
1619 gimple stmt = gsi_stmt (gsi);
1620 gimple orig_stmt = stmt;
1621
1622 id->regimplify = false;
1623 stmt = remap_gimple_stmt (stmt, id);
1624 if (gimple_nop_p (stmt))
1625 continue;
1626
1627 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1628 seq_gsi = copy_gsi;
1629
1630 /* With return slot optimization we can end up with
1631 non-gimple (foo *)&this->m, fix that here. */
1632 if (is_gimple_assign (stmt)
1633 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1634 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1635 {
1636 tree new_rhs;
1637 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1638 gimple_assign_rhs1 (stmt),
1639 true, NULL, false,
1640 GSI_CONTINUE_LINKING);
1641 gimple_assign_set_rhs1 (stmt, new_rhs);
1642 id->regimplify = false;
1643 }
1644
1645 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1646
1647 if (id->regimplify)
1648 gimple_regimplify_operands (stmt, &seq_gsi);
1649
1650 /* If copy_basic_block has been empty at the start of this iteration,
1651 call gsi_start_bb again to get at the newly added statements. */
1652 if (gsi_end_p (copy_gsi))
1653 copy_gsi = gsi_start_bb (copy_basic_block);
1654 else
1655 gsi_next (&copy_gsi);
1656
1657 /* Process the new statement. The call to gimple_regimplify_operands
1658 possibly turned the statement into multiple statements, we
1659 need to process all of them. */
1660 do
1661 {
1662 tree fn;
1663
1664 stmt = gsi_stmt (copy_gsi);
1665 if (is_gimple_call (stmt)
1666 && gimple_call_va_arg_pack_p (stmt)
1667 && id->gimple_call)
1668 {
1669 /* __builtin_va_arg_pack () should be replaced by
1670 all arguments corresponding to ... in the caller. */
1671 tree p;
1672 gimple new_call;
1673 vec<tree> argarray;
1674 size_t nargs = gimple_call_num_args (id->gimple_call);
1675 size_t n;
1676
1677 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1678 nargs--;
1679
1680 /* Create the new array of arguments. */
1681 n = nargs + gimple_call_num_args (stmt);
1682 argarray.create (n);
1683 argarray.safe_grow_cleared (n);
1684
1685 /* Copy all the arguments before '...' */
1686 memcpy (argarray.address (),
1687 gimple_call_arg_ptr (stmt, 0),
1688 gimple_call_num_args (stmt) * sizeof (tree));
1689
1690 /* Append the arguments passed in '...' */
1691 memcpy (argarray.address () + gimple_call_num_args (stmt),
1692 gimple_call_arg_ptr (id->gimple_call, 0)
1693 + (gimple_call_num_args (id->gimple_call) - nargs),
1694 nargs * sizeof (tree));
1695
1696 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1697 argarray);
1698
1699 argarray.release ();
1700
1701 /* Copy all GIMPLE_CALL flags, location and block, except
1702 GF_CALL_VA_ARG_PACK. */
1703 gimple_call_copy_flags (new_call, stmt);
1704 gimple_call_set_va_arg_pack (new_call, false);
1705 gimple_set_location (new_call, gimple_location (stmt));
1706 gimple_set_block (new_call, gimple_block (stmt));
1707 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1708
1709 gsi_replace (&copy_gsi, new_call, false);
1710 stmt = new_call;
1711 }
1712 else if (is_gimple_call (stmt)
1713 && id->gimple_call
1714 && (decl = gimple_call_fndecl (stmt))
1715 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1716 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1717 {
1718 /* __builtin_va_arg_pack_len () should be replaced by
1719 the number of anonymous arguments. */
1720 size_t nargs = gimple_call_num_args (id->gimple_call);
1721 tree count, p;
1722 gimple new_stmt;
1723
1724 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1725 nargs--;
1726
1727 count = build_int_cst (integer_type_node, nargs);
1728 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1729 gsi_replace (&copy_gsi, new_stmt, false);
1730 stmt = new_stmt;
1731 }
1732
1733 /* Statements produced by inlining can be unfolded, especially
1734 when we constant propagated some operands. We can't fold
1735 them right now for two reasons:
1736 1) folding require SSA_NAME_DEF_STMTs to be correct
1737 2) we can't change function calls to builtins.
1738 So we just mark statement for later folding. We mark
1739 all new statements, instead just statements that has changed
1740 by some nontrivial substitution so even statements made
1741 foldable indirectly are updated. If this turns out to be
1742 expensive, copy_body can be told to watch for nontrivial
1743 changes. */
1744 if (id->statements_to_fold)
1745 pointer_set_insert (id->statements_to_fold, stmt);
1746
1747 /* We're duplicating a CALL_EXPR. Find any corresponding
1748 callgraph edges and update or duplicate them. */
1749 if (is_gimple_call (stmt))
1750 {
1751 struct cgraph_edge *edge;
1752
1753 switch (id->transform_call_graph_edges)
1754 {
1755 case CB_CGE_DUPLICATE:
1756 edge = cgraph_edge (id->src_node, orig_stmt);
1757 if (edge)
1758 {
1759 int edge_freq = edge->frequency;
1760 int new_freq;
1761 struct cgraph_edge *old_edge = edge;
1762 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1763 gimple_uid (stmt),
1764 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1765 true);
1766 /* We could also just rescale the frequency, but
1767 doing so would introduce roundoff errors and make
1768 verifier unhappy. */
1769 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1770 copy_basic_block);
1771
1772 /* Speculative calls consist of two edges - direct and indirect.
1773 Duplicate the whole thing and distribute frequencies accordingly. */
1774 if (edge->speculative)
1775 {
1776 struct cgraph_edge *direct, *indirect;
1777 struct ipa_ref *ref;
1778
1779 gcc_assert (!edge->indirect_unknown_callee);
1780 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1781 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1782 gimple_uid (stmt),
1783 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1784 true);
1785 if (old_edge->frequency + indirect->frequency)
1786 {
1787 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1788 (old_edge->frequency + indirect->frequency)),
1789 CGRAPH_FREQ_MAX);
1790 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1791 (old_edge->frequency + indirect->frequency)),
1792 CGRAPH_FREQ_MAX);
1793 }
1794 ipa_clone_ref (ref, id->dst_node, stmt);
1795 }
1796 else
1797 {
1798 edge->frequency = new_freq;
1799 if (dump_file
1800 && profile_status_for_fn (cfun) != PROFILE_ABSENT
1801 && (edge_freq > edge->frequency + 10
1802 || edge_freq < edge->frequency - 10))
1803 {
1804 fprintf (dump_file, "Edge frequency estimated by "
1805 "cgraph %i diverge from inliner's estimate %i\n",
1806 edge_freq,
1807 edge->frequency);
1808 fprintf (dump_file,
1809 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1810 bb->index,
1811 bb->frequency,
1812 copy_basic_block->frequency);
1813 }
1814 }
1815 }
1816 break;
1817
1818 case CB_CGE_MOVE_CLONES:
1819 cgraph_set_call_stmt_including_clones (id->dst_node,
1820 orig_stmt, stmt);
1821 edge = cgraph_edge (id->dst_node, stmt);
1822 break;
1823
1824 case CB_CGE_MOVE:
1825 edge = cgraph_edge (id->dst_node, orig_stmt);
1826 if (edge)
1827 cgraph_set_call_stmt (edge, stmt);
1828 break;
1829
1830 default:
1831 gcc_unreachable ();
1832 }
1833
1834 /* Constant propagation on argument done during inlining
1835 may create new direct call. Produce an edge for it. */
1836 if ((!edge
1837 || (edge->indirect_inlining_edge
1838 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1839 && id->dst_node->definition
1840 && (fn = gimple_call_fndecl (stmt)) != NULL)
1841 {
1842 struct cgraph_node *dest = cgraph_get_node (fn);
1843
1844 /* We have missing edge in the callgraph. This can happen
1845 when previous inlining turned an indirect call into a
1846 direct call by constant propagating arguments or we are
1847 producing dead clone (for further cloning). In all
1848 other cases we hit a bug (incorrect node sharing is the
1849 most common reason for missing edges). */
1850 gcc_assert (!dest->definition
1851 || dest->address_taken
1852 || !id->src_node->definition
1853 || !id->dst_node->definition);
1854 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1855 cgraph_create_edge_including_clones
1856 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1857 compute_call_stmt_bb_frequency (id->dst_node->decl,
1858 copy_basic_block),
1859 CIF_ORIGINALLY_INDIRECT_CALL);
1860 else
1861 cgraph_create_edge (id->dst_node, dest, stmt,
1862 bb->count,
1863 compute_call_stmt_bb_frequency
1864 (id->dst_node->decl,
1865 copy_basic_block))->inline_failed
1866 = CIF_ORIGINALLY_INDIRECT_CALL;
1867 if (dump_file)
1868 {
1869 fprintf (dump_file, "Created new direct edge to %s\n",
1870 dest->name ());
1871 }
1872 }
1873
1874 notice_special_calls (stmt);
1875 }
1876
1877 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1878 id->eh_map, id->eh_lp_nr);
1879
1880 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1881 {
1882 ssa_op_iter i;
1883 tree def;
1884
1885 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1886 if (TREE_CODE (def) == SSA_NAME)
1887 SSA_NAME_DEF_STMT (def) = stmt;
1888 }
1889
1890 gsi_next (&copy_gsi);
1891 }
1892 while (!gsi_end_p (copy_gsi));
1893
1894 copy_gsi = gsi_last_bb (copy_basic_block);
1895 }
1896
1897 return copy_basic_block;
1898 }
1899
1900 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1901 form is quite easy, since dominator relationship for old basic blocks does
1902 not change.
1903
1904 There is however exception where inlining might change dominator relation
1905 across EH edges from basic block within inlined functions destinating
1906 to landing pads in function we inline into.
1907
1908 The function fills in PHI_RESULTs of such PHI nodes if they refer
1909 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1910 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1911 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1912 set, and this means that there will be no overlapping live ranges
1913 for the underlying symbol.
1914
1915 This might change in future if we allow redirecting of EH edges and
1916 we might want to change way build CFG pre-inlining to include
1917 all the possible edges then. */
1918 static void
1919 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1920 bool can_throw, bool nonlocal_goto)
1921 {
1922 edge e;
1923 edge_iterator ei;
1924
1925 FOR_EACH_EDGE (e, ei, bb->succs)
1926 if (!e->dest->aux
1927 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1928 {
1929 gimple phi;
1930 gimple_stmt_iterator si;
1931
1932 if (!nonlocal_goto)
1933 gcc_assert (e->flags & EDGE_EH);
1934
1935 if (!can_throw)
1936 gcc_assert (!(e->flags & EDGE_EH));
1937
1938 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1939 {
1940 edge re;
1941
1942 phi = gsi_stmt (si);
1943
1944 /* For abnormal goto/call edges the receiver can be the
1945 ENTRY_BLOCK. Do not assert this cannot happen. */
1946
1947 gcc_assert ((e->flags & EDGE_EH)
1948 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1949
1950 re = find_edge (ret_bb, e->dest);
1951 gcc_checking_assert (re);
1952 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1953 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1954
1955 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1956 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1957 }
1958 }
1959 }
1960
1961
1962 /* Copy edges from BB into its copy constructed earlier, scale profile
1963 accordingly. Edges will be taken care of later. Assume aux
1964 pointers to point to the copies of each BB. Return true if any
1965 debug stmts are left after a statement that must end the basic block. */
1966
1967 static bool
1968 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1969 basic_block abnormal_goto_dest)
1970 {
1971 basic_block new_bb = (basic_block) bb->aux;
1972 edge_iterator ei;
1973 edge old_edge;
1974 gimple_stmt_iterator si;
1975 int flags;
1976 bool need_debug_cleanup = false;
1977
1978 /* Use the indices from the original blocks to create edges for the
1979 new ones. */
1980 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1981 if (!(old_edge->flags & EDGE_EH))
1982 {
1983 edge new_edge;
1984
1985 flags = old_edge->flags;
1986
1987 /* Return edges do get a FALLTHRU flag when the get inlined. */
1988 if (old_edge->dest->index == EXIT_BLOCK
1989 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
1990 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
1991 flags |= EDGE_FALLTHRU;
1992 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1993 new_edge->count = apply_scale (old_edge->count, count_scale);
1994 new_edge->probability = old_edge->probability;
1995 }
1996
1997 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1998 return false;
1999
2000 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2001 {
2002 gimple copy_stmt;
2003 bool can_throw, nonlocal_goto;
2004
2005 copy_stmt = gsi_stmt (si);
2006 if (!is_gimple_debug (copy_stmt))
2007 update_stmt (copy_stmt);
2008
2009 /* Do this before the possible split_block. */
2010 gsi_next (&si);
2011
2012 /* If this tree could throw an exception, there are two
2013 cases where we need to add abnormal edge(s): the
2014 tree wasn't in a region and there is a "current
2015 region" in the caller; or the original tree had
2016 EH edges. In both cases split the block after the tree,
2017 and add abnormal edge(s) as needed; we need both
2018 those from the callee and the caller.
2019 We check whether the copy can throw, because the const
2020 propagation can change an INDIRECT_REF which throws
2021 into a COMPONENT_REF which doesn't. If the copy
2022 can throw, the original could also throw. */
2023 can_throw = stmt_can_throw_internal (copy_stmt);
2024 nonlocal_goto
2025 = (stmt_can_make_abnormal_goto (copy_stmt)
2026 && !computed_goto_p (copy_stmt));
2027
2028 if (can_throw || nonlocal_goto)
2029 {
2030 if (!gsi_end_p (si))
2031 {
2032 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2033 gsi_next (&si);
2034 if (gsi_end_p (si))
2035 need_debug_cleanup = true;
2036 }
2037 if (!gsi_end_p (si))
2038 /* Note that bb's predecessor edges aren't necessarily
2039 right at this point; split_block doesn't care. */
2040 {
2041 edge e = split_block (new_bb, copy_stmt);
2042
2043 new_bb = e->dest;
2044 new_bb->aux = e->src->aux;
2045 si = gsi_start_bb (new_bb);
2046 }
2047 }
2048
2049 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2050 make_eh_dispatch_edges (copy_stmt);
2051 else if (can_throw)
2052 make_eh_edges (copy_stmt);
2053
2054 /* If the call we inline cannot make abnormal goto do not add
2055 additional abnormal edges but only retain those already present
2056 in the original function body. */
2057 if (abnormal_goto_dest == NULL)
2058 nonlocal_goto = false;
2059 if (nonlocal_goto)
2060 {
2061 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2062
2063 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2064 nonlocal_goto = false;
2065 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2066 in OpenMP regions which aren't allowed to be left abnormally.
2067 So, no need to add abnormal edge in that case. */
2068 else if (is_gimple_call (copy_stmt)
2069 && gimple_call_internal_p (copy_stmt)
2070 && (gimple_call_internal_fn (copy_stmt)
2071 == IFN_ABNORMAL_DISPATCHER)
2072 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2073 nonlocal_goto = false;
2074 else
2075 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2076 }
2077
2078 if ((can_throw || nonlocal_goto)
2079 && gimple_in_ssa_p (cfun))
2080 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2081 can_throw, nonlocal_goto);
2082 }
2083 return need_debug_cleanup;
2084 }
2085
2086 /* Copy the PHIs. All blocks and edges are copied, some blocks
2087 was possibly split and new outgoing EH edges inserted.
2088 BB points to the block of original function and AUX pointers links
2089 the original and newly copied blocks. */
2090
2091 static void
2092 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2093 {
2094 basic_block const new_bb = (basic_block) bb->aux;
2095 edge_iterator ei;
2096 gimple phi;
2097 gimple_stmt_iterator si;
2098 edge new_edge;
2099 bool inserted = false;
2100
2101 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2102 {
2103 tree res, new_res;
2104 gimple new_phi;
2105
2106 phi = gsi_stmt (si);
2107 res = PHI_RESULT (phi);
2108 new_res = res;
2109 if (!virtual_operand_p (res))
2110 {
2111 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2112 new_phi = create_phi_node (new_res, new_bb);
2113 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2114 {
2115 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2116 tree arg;
2117 tree new_arg;
2118 edge_iterator ei2;
2119 location_t locus;
2120
2121 /* When doing partial cloning, we allow PHIs on the entry block
2122 as long as all the arguments are the same. Find any input
2123 edge to see argument to copy. */
2124 if (!old_edge)
2125 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2126 if (!old_edge->src->aux)
2127 break;
2128
2129 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2130 new_arg = arg;
2131 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2132 gcc_assert (new_arg);
2133 /* With return slot optimization we can end up with
2134 non-gimple (foo *)&this->m, fix that here. */
2135 if (TREE_CODE (new_arg) != SSA_NAME
2136 && TREE_CODE (new_arg) != FUNCTION_DECL
2137 && !is_gimple_val (new_arg))
2138 {
2139 gimple_seq stmts = NULL;
2140 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2141 gsi_insert_seq_on_edge (new_edge, stmts);
2142 inserted = true;
2143 }
2144 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2145 if (LOCATION_BLOCK (locus))
2146 {
2147 tree *n;
2148 n = (tree *) pointer_map_contains (id->decl_map,
2149 LOCATION_BLOCK (locus));
2150 gcc_assert (n);
2151 if (*n)
2152 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2153 else
2154 locus = LOCATION_LOCUS (locus);
2155 }
2156 else
2157 locus = LOCATION_LOCUS (locus);
2158
2159 add_phi_arg (new_phi, new_arg, new_edge, locus);
2160 }
2161 }
2162 }
2163
2164 /* Commit the delayed edge insertions. */
2165 if (inserted)
2166 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2167 gsi_commit_one_edge_insert (new_edge, NULL);
2168 }
2169
2170
2171 /* Wrapper for remap_decl so it can be used as a callback. */
2172
2173 static tree
2174 remap_decl_1 (tree decl, void *data)
2175 {
2176 return remap_decl (decl, (copy_body_data *) data);
2177 }
2178
2179 /* Build struct function and associated datastructures for the new clone
2180 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2181 the cfun to the function of new_fndecl (and current_function_decl too). */
2182
2183 static void
2184 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2185 {
2186 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2187 gcov_type count_scale;
2188
2189 if (!DECL_ARGUMENTS (new_fndecl))
2190 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2191 if (!DECL_RESULT (new_fndecl))
2192 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2193
2194 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2195 count_scale
2196 = GCOV_COMPUTE_SCALE (count,
2197 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2198 else
2199 count_scale = REG_BR_PROB_BASE;
2200
2201 /* Register specific tree functions. */
2202 gimple_register_cfg_hooks ();
2203
2204 /* Get clean struct function. */
2205 push_struct_function (new_fndecl);
2206
2207 /* We will rebuild these, so just sanity check that they are empty. */
2208 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2209 gcc_assert (cfun->local_decls == NULL);
2210 gcc_assert (cfun->cfg == NULL);
2211 gcc_assert (cfun->decl == new_fndecl);
2212
2213 /* Copy items we preserve during cloning. */
2214 cfun->static_chain_decl = src_cfun->static_chain_decl;
2215 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2216 cfun->function_end_locus = src_cfun->function_end_locus;
2217 cfun->curr_properties = src_cfun->curr_properties;
2218 cfun->last_verified = src_cfun->last_verified;
2219 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2220 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2221 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2222 cfun->stdarg = src_cfun->stdarg;
2223 cfun->after_inlining = src_cfun->after_inlining;
2224 cfun->can_throw_non_call_exceptions
2225 = src_cfun->can_throw_non_call_exceptions;
2226 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2227 cfun->returns_struct = src_cfun->returns_struct;
2228 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2229
2230 init_empty_tree_cfg ();
2231
2232 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2233 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2234 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2235 REG_BR_PROB_BASE);
2236 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2237 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2238 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2239 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2240 REG_BR_PROB_BASE);
2241 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2242 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2243 if (src_cfun->eh)
2244 init_eh_for_function ();
2245
2246 if (src_cfun->gimple_df)
2247 {
2248 init_tree_ssa (cfun);
2249 cfun->gimple_df->in_ssa_p = true;
2250 init_ssa_operands (cfun);
2251 }
2252 }
2253
2254 /* Helper function for copy_cfg_body. Move debug stmts from the end
2255 of NEW_BB to the beginning of successor basic blocks when needed. If the
2256 successor has multiple predecessors, reset them, otherwise keep
2257 their value. */
2258
2259 static void
2260 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2261 {
2262 edge e;
2263 edge_iterator ei;
2264 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2265
2266 if (gsi_end_p (si)
2267 || gsi_one_before_end_p (si)
2268 || !(stmt_can_throw_internal (gsi_stmt (si))
2269 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2270 return;
2271
2272 FOR_EACH_EDGE (e, ei, new_bb->succs)
2273 {
2274 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2275 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2276 while (is_gimple_debug (gsi_stmt (ssi)))
2277 {
2278 gimple stmt = gsi_stmt (ssi), new_stmt;
2279 tree var;
2280 tree value;
2281
2282 /* For the last edge move the debug stmts instead of copying
2283 them. */
2284 if (ei_one_before_end_p (ei))
2285 {
2286 si = ssi;
2287 gsi_prev (&ssi);
2288 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2289 gimple_debug_bind_reset_value (stmt);
2290 gsi_remove (&si, false);
2291 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2292 continue;
2293 }
2294
2295 if (gimple_debug_bind_p (stmt))
2296 {
2297 var = gimple_debug_bind_get_var (stmt);
2298 if (single_pred_p (e->dest))
2299 {
2300 value = gimple_debug_bind_get_value (stmt);
2301 value = unshare_expr (value);
2302 }
2303 else
2304 value = NULL_TREE;
2305 new_stmt = gimple_build_debug_bind (var, value, stmt);
2306 }
2307 else if (gimple_debug_source_bind_p (stmt))
2308 {
2309 var = gimple_debug_source_bind_get_var (stmt);
2310 value = gimple_debug_source_bind_get_value (stmt);
2311 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2312 }
2313 else
2314 gcc_unreachable ();
2315 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2316 id->debug_stmts.safe_push (new_stmt);
2317 gsi_prev (&ssi);
2318 }
2319 }
2320 }
2321
2322 /* Make a copy of the sub-loops of SRC_PARENT and place them
2323 as siblings of DEST_PARENT. */
2324
2325 static void
2326 copy_loops (copy_body_data *id,
2327 struct loop *dest_parent, struct loop *src_parent)
2328 {
2329 struct loop *src_loop = src_parent->inner;
2330 while (src_loop)
2331 {
2332 if (!id->blocks_to_copy
2333 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2334 {
2335 struct loop *dest_loop = alloc_loop ();
2336
2337 /* Assign the new loop its header and latch and associate
2338 those with the new loop. */
2339 if (src_loop->header != NULL)
2340 {
2341 dest_loop->header = (basic_block)src_loop->header->aux;
2342 dest_loop->header->loop_father = dest_loop;
2343 }
2344 if (src_loop->latch != NULL)
2345 {
2346 dest_loop->latch = (basic_block)src_loop->latch->aux;
2347 dest_loop->latch->loop_father = dest_loop;
2348 }
2349
2350 /* Copy loop meta-data. */
2351 copy_loop_info (src_loop, dest_loop);
2352
2353 /* Finally place it into the loop array and the loop tree. */
2354 place_new_loop (cfun, dest_loop);
2355 flow_loop_tree_node_add (dest_parent, dest_loop);
2356
2357 dest_loop->safelen = src_loop->safelen;
2358 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2359 if (src_loop->force_vectorize)
2360 {
2361 dest_loop->force_vectorize = true;
2362 cfun->has_force_vectorize_loops = true;
2363 }
2364 if (src_loop->simduid)
2365 {
2366 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2367 cfun->has_simduid_loops = true;
2368 }
2369
2370 /* Recurse. */
2371 copy_loops (id, dest_loop, src_loop);
2372 }
2373 src_loop = src_loop->next;
2374 }
2375 }
2376
2377 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2378
2379 void
2380 redirect_all_calls (copy_body_data * id, basic_block bb)
2381 {
2382 gimple_stmt_iterator si;
2383 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2384 {
2385 if (is_gimple_call (gsi_stmt (si)))
2386 {
2387 struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
2388 if (edge)
2389 cgraph_redirect_edge_call_stmt_to_callee (edge);
2390 }
2391 }
2392 }
2393
2394 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2395 with each bb's frequency. Used when NODE has a 0-weight entry
2396 but we are about to inline it into a non-zero count call bb.
2397 See the comments for handle_missing_profiles() in predict.c for
2398 when this can happen for COMDATs. */
2399
2400 void
2401 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2402 {
2403 basic_block bb;
2404 edge_iterator ei;
2405 edge e;
2406 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2407
2408 FOR_ALL_BB_FN(bb, fn)
2409 {
2410 bb->count = apply_scale (count,
2411 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2412 FOR_EACH_EDGE (e, ei, bb->succs)
2413 e->count = apply_probability (e->src->count, e->probability);
2414 }
2415 }
2416
2417 /* Make a copy of the body of FN so that it can be inserted inline in
2418 another function. Walks FN via CFG, returns new fndecl. */
2419
2420 static tree
2421 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2422 basic_block entry_block_map, basic_block exit_block_map,
2423 basic_block new_entry)
2424 {
2425 tree callee_fndecl = id->src_fn;
2426 /* Original cfun for the callee, doesn't change. */
2427 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2428 struct function *cfun_to_copy;
2429 basic_block bb;
2430 tree new_fndecl = NULL;
2431 bool need_debug_cleanup = false;
2432 gcov_type count_scale;
2433 int last;
2434 int incoming_frequency = 0;
2435 gcov_type incoming_count = 0;
2436
2437 /* This can happen for COMDAT routines that end up with 0 counts
2438 despite being called (see the comments for handle_missing_profiles()
2439 in predict.c as to why). Apply counts to the blocks in the callee
2440 before inlining, using the guessed edge frequencies, so that we don't
2441 end up with a 0-count inline body which can confuse downstream
2442 optimizations such as function splitting. */
2443 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2444 {
2445 /* Apply the larger of the call bb count and the total incoming
2446 call edge count to the callee. */
2447 gcov_type in_count = 0;
2448 struct cgraph_edge *in_edge;
2449 for (in_edge = id->src_node->callers; in_edge;
2450 in_edge = in_edge->next_caller)
2451 in_count += in_edge->count;
2452 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2453 }
2454
2455 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2456 count_scale
2457 = GCOV_COMPUTE_SCALE (count,
2458 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2459 else
2460 count_scale = REG_BR_PROB_BASE;
2461
2462 /* Register specific tree functions. */
2463 gimple_register_cfg_hooks ();
2464
2465 /* If we are inlining just region of the function, make sure to connect
2466 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2467 part of loop, we must compute frequency and probability of
2468 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2469 probabilities of edges incoming from nonduplicated region. */
2470 if (new_entry)
2471 {
2472 edge e;
2473 edge_iterator ei;
2474
2475 FOR_EACH_EDGE (e, ei, new_entry->preds)
2476 if (!e->src->aux)
2477 {
2478 incoming_frequency += EDGE_FREQUENCY (e);
2479 incoming_count += e->count;
2480 }
2481 incoming_count = apply_scale (incoming_count, count_scale);
2482 incoming_frequency
2483 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2484 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2485 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2486 }
2487
2488 /* Must have a CFG here at this point. */
2489 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2490 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2491
2492 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2493
2494 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2495 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2496 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2497 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2498
2499 /* Duplicate any exception-handling regions. */
2500 if (cfun->eh)
2501 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2502 remap_decl_1, id);
2503
2504 /* Use aux pointers to map the original blocks to copy. */
2505 FOR_EACH_BB_FN (bb, cfun_to_copy)
2506 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2507 {
2508 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2509 bb->aux = new_bb;
2510 new_bb->aux = bb;
2511 new_bb->loop_father = entry_block_map->loop_father;
2512 }
2513
2514 last = last_basic_block_for_fn (cfun);
2515
2516 /* Now that we've duplicated the blocks, duplicate their edges. */
2517 basic_block abnormal_goto_dest = NULL;
2518 if (id->gimple_call
2519 && stmt_can_make_abnormal_goto (id->gimple_call))
2520 {
2521 gimple_stmt_iterator gsi = gsi_for_stmt (id->gimple_call);
2522
2523 bb = gimple_bb (id->gimple_call);
2524 gsi_next (&gsi);
2525 if (gsi_end_p (gsi))
2526 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2527 }
2528 FOR_ALL_BB_FN (bb, cfun_to_copy)
2529 if (!id->blocks_to_copy
2530 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2531 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2532 abnormal_goto_dest);
2533
2534 if (new_entry)
2535 {
2536 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2537 e->probability = REG_BR_PROB_BASE;
2538 e->count = incoming_count;
2539 }
2540
2541 /* Duplicate the loop tree, if available and wanted. */
2542 if (loops_for_fn (src_cfun) != NULL
2543 && current_loops != NULL)
2544 {
2545 copy_loops (id, entry_block_map->loop_father,
2546 get_loop (src_cfun, 0));
2547 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2548 loops_state_set (LOOPS_NEED_FIXUP);
2549 }
2550
2551 /* If the loop tree in the source function needed fixup, mark the
2552 destination loop tree for fixup, too. */
2553 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2554 loops_state_set (LOOPS_NEED_FIXUP);
2555
2556 if (gimple_in_ssa_p (cfun))
2557 FOR_ALL_BB_FN (bb, cfun_to_copy)
2558 if (!id->blocks_to_copy
2559 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2560 copy_phis_for_bb (bb, id);
2561
2562 FOR_ALL_BB_FN (bb, cfun_to_copy)
2563 if (bb->aux)
2564 {
2565 if (need_debug_cleanup
2566 && bb->index != ENTRY_BLOCK
2567 && bb->index != EXIT_BLOCK)
2568 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2569 /* Update call edge destinations. This can not be done before loop
2570 info is updated, because we may split basic blocks. */
2571 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2572 redirect_all_calls (id, (basic_block)bb->aux);
2573 ((basic_block)bb->aux)->aux = NULL;
2574 bb->aux = NULL;
2575 }
2576
2577 /* Zero out AUX fields of newly created block during EH edge
2578 insertion. */
2579 for (; last < last_basic_block_for_fn (cfun); last++)
2580 {
2581 if (need_debug_cleanup)
2582 maybe_move_debug_stmts_to_successors (id,
2583 BASIC_BLOCK_FOR_FN (cfun, last));
2584 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2585 /* Update call edge destinations. This can not be done before loop
2586 info is updated, because we may split basic blocks. */
2587 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2588 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2589 }
2590 entry_block_map->aux = NULL;
2591 exit_block_map->aux = NULL;
2592
2593 if (id->eh_map)
2594 {
2595 pointer_map_destroy (id->eh_map);
2596 id->eh_map = NULL;
2597 }
2598
2599 return new_fndecl;
2600 }
2601
2602 /* Copy the debug STMT using ID. We deal with these statements in a
2603 special way: if any variable in their VALUE expression wasn't
2604 remapped yet, we won't remap it, because that would get decl uids
2605 out of sync, causing codegen differences between -g and -g0. If
2606 this arises, we drop the VALUE expression altogether. */
2607
2608 static void
2609 copy_debug_stmt (gimple stmt, copy_body_data *id)
2610 {
2611 tree t, *n;
2612 struct walk_stmt_info wi;
2613
2614 if (gimple_block (stmt))
2615 {
2616 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2617 gimple_set_block (stmt, n ? *n : id->block);
2618 }
2619
2620 /* Remap all the operands in COPY. */
2621 memset (&wi, 0, sizeof (wi));
2622 wi.info = id;
2623
2624 processing_debug_stmt = 1;
2625
2626 if (gimple_debug_source_bind_p (stmt))
2627 t = gimple_debug_source_bind_get_var (stmt);
2628 else
2629 t = gimple_debug_bind_get_var (stmt);
2630
2631 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2632 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2633 {
2634 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2635 t = *n;
2636 }
2637 else if (TREE_CODE (t) == VAR_DECL
2638 && !is_global_var (t)
2639 && !pointer_map_contains (id->decl_map, t))
2640 /* T is a non-localized variable. */;
2641 else
2642 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2643
2644 if (gimple_debug_bind_p (stmt))
2645 {
2646 gimple_debug_bind_set_var (stmt, t);
2647
2648 if (gimple_debug_bind_has_value_p (stmt))
2649 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2650 remap_gimple_op_r, &wi, NULL);
2651
2652 /* Punt if any decl couldn't be remapped. */
2653 if (processing_debug_stmt < 0)
2654 gimple_debug_bind_reset_value (stmt);
2655 }
2656 else if (gimple_debug_source_bind_p (stmt))
2657 {
2658 gimple_debug_source_bind_set_var (stmt, t);
2659 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2660 remap_gimple_op_r, &wi, NULL);
2661 /* When inlining and source bind refers to one of the optimized
2662 away parameters, change the source bind into normal debug bind
2663 referring to the corresponding DEBUG_EXPR_DECL that should have
2664 been bound before the call stmt. */
2665 t = gimple_debug_source_bind_get_value (stmt);
2666 if (t != NULL_TREE
2667 && TREE_CODE (t) == PARM_DECL
2668 && id->gimple_call)
2669 {
2670 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2671 unsigned int i;
2672 if (debug_args != NULL)
2673 {
2674 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2675 if ((**debug_args)[i] == DECL_ORIGIN (t)
2676 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2677 {
2678 t = (**debug_args)[i + 1];
2679 stmt->subcode = GIMPLE_DEBUG_BIND;
2680 gimple_debug_bind_set_value (stmt, t);
2681 break;
2682 }
2683 }
2684 }
2685 }
2686
2687 processing_debug_stmt = 0;
2688
2689 update_stmt (stmt);
2690 }
2691
2692 /* Process deferred debug stmts. In order to give values better odds
2693 of being successfully remapped, we delay the processing of debug
2694 stmts until all other stmts that might require remapping are
2695 processed. */
2696
2697 static void
2698 copy_debug_stmts (copy_body_data *id)
2699 {
2700 size_t i;
2701 gimple stmt;
2702
2703 if (!id->debug_stmts.exists ())
2704 return;
2705
2706 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2707 copy_debug_stmt (stmt, id);
2708
2709 id->debug_stmts.release ();
2710 }
2711
2712 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2713 another function. */
2714
2715 static tree
2716 copy_tree_body (copy_body_data *id)
2717 {
2718 tree fndecl = id->src_fn;
2719 tree body = DECL_SAVED_TREE (fndecl);
2720
2721 walk_tree (&body, copy_tree_body_r, id, NULL);
2722
2723 return body;
2724 }
2725
2726 /* Make a copy of the body of FN so that it can be inserted inline in
2727 another function. */
2728
2729 static tree
2730 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2731 basic_block entry_block_map, basic_block exit_block_map,
2732 basic_block new_entry)
2733 {
2734 tree fndecl = id->src_fn;
2735 tree body;
2736
2737 /* If this body has a CFG, walk CFG and copy. */
2738 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2739 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2740 new_entry);
2741 copy_debug_stmts (id);
2742
2743 return body;
2744 }
2745
2746 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2747 defined in function FN, or of a data member thereof. */
2748
2749 static bool
2750 self_inlining_addr_expr (tree value, tree fn)
2751 {
2752 tree var;
2753
2754 if (TREE_CODE (value) != ADDR_EXPR)
2755 return false;
2756
2757 var = get_base_address (TREE_OPERAND (value, 0));
2758
2759 return var && auto_var_in_fn_p (var, fn);
2760 }
2761
2762 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2763 lexical block and line number information from base_stmt, if given,
2764 or from the last stmt of the block otherwise. */
2765
2766 static gimple
2767 insert_init_debug_bind (copy_body_data *id,
2768 basic_block bb, tree var, tree value,
2769 gimple base_stmt)
2770 {
2771 gimple note;
2772 gimple_stmt_iterator gsi;
2773 tree tracked_var;
2774
2775 if (!gimple_in_ssa_p (id->src_cfun))
2776 return NULL;
2777
2778 if (!MAY_HAVE_DEBUG_STMTS)
2779 return NULL;
2780
2781 tracked_var = target_for_debug_bind (var);
2782 if (!tracked_var)
2783 return NULL;
2784
2785 if (bb)
2786 {
2787 gsi = gsi_last_bb (bb);
2788 if (!base_stmt && !gsi_end_p (gsi))
2789 base_stmt = gsi_stmt (gsi);
2790 }
2791
2792 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2793
2794 if (bb)
2795 {
2796 if (!gsi_end_p (gsi))
2797 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2798 else
2799 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2800 }
2801
2802 return note;
2803 }
2804
2805 static void
2806 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2807 {
2808 /* If VAR represents a zero-sized variable, it's possible that the
2809 assignment statement may result in no gimple statements. */
2810 if (init_stmt)
2811 {
2812 gimple_stmt_iterator si = gsi_last_bb (bb);
2813
2814 /* We can end up with init statements that store to a non-register
2815 from a rhs with a conversion. Handle that here by forcing the
2816 rhs into a temporary. gimple_regimplify_operands is not
2817 prepared to do this for us. */
2818 if (!is_gimple_debug (init_stmt)
2819 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2820 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2821 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2822 {
2823 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2824 gimple_expr_type (init_stmt),
2825 gimple_assign_rhs1 (init_stmt));
2826 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2827 GSI_NEW_STMT);
2828 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2829 gimple_assign_set_rhs1 (init_stmt, rhs);
2830 }
2831 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2832 gimple_regimplify_operands (init_stmt, &si);
2833
2834 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2835 {
2836 tree def = gimple_assign_lhs (init_stmt);
2837 insert_init_debug_bind (id, bb, def, def, init_stmt);
2838 }
2839 }
2840 }
2841
2842 /* Initialize parameter P with VALUE. If needed, produce init statement
2843 at the end of BB. When BB is NULL, we return init statement to be
2844 output later. */
2845 static gimple
2846 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2847 basic_block bb, tree *vars)
2848 {
2849 gimple init_stmt = NULL;
2850 tree var;
2851 tree rhs = value;
2852 tree def = (gimple_in_ssa_p (cfun)
2853 ? ssa_default_def (id->src_cfun, p) : NULL);
2854
2855 if (value
2856 && value != error_mark_node
2857 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2858 {
2859 /* If we can match up types by promotion/demotion do so. */
2860 if (fold_convertible_p (TREE_TYPE (p), value))
2861 rhs = fold_convert (TREE_TYPE (p), value);
2862 else
2863 {
2864 /* ??? For valid programs we should not end up here.
2865 Still if we end up with truly mismatched types here, fall back
2866 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2867 GIMPLE to the following passes. */
2868 if (!is_gimple_reg_type (TREE_TYPE (value))
2869 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2870 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2871 else
2872 rhs = build_zero_cst (TREE_TYPE (p));
2873 }
2874 }
2875
2876 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2877 here since the type of this decl must be visible to the calling
2878 function. */
2879 var = copy_decl_to_var (p, id);
2880
2881 /* Declare this new variable. */
2882 DECL_CHAIN (var) = *vars;
2883 *vars = var;
2884
2885 /* Make gimplifier happy about this variable. */
2886 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2887
2888 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2889 we would not need to create a new variable here at all, if it
2890 weren't for debug info. Still, we can just use the argument
2891 value. */
2892 if (TREE_READONLY (p)
2893 && !TREE_ADDRESSABLE (p)
2894 && value && !TREE_SIDE_EFFECTS (value)
2895 && !def)
2896 {
2897 /* We may produce non-gimple trees by adding NOPs or introduce
2898 invalid sharing when operand is not really constant.
2899 It is not big deal to prohibit constant propagation here as
2900 we will constant propagate in DOM1 pass anyway. */
2901 if (is_gimple_min_invariant (value)
2902 && useless_type_conversion_p (TREE_TYPE (p),
2903 TREE_TYPE (value))
2904 /* We have to be very careful about ADDR_EXPR. Make sure
2905 the base variable isn't a local variable of the inlined
2906 function, e.g., when doing recursive inlining, direct or
2907 mutually-recursive or whatever, which is why we don't
2908 just test whether fn == current_function_decl. */
2909 && ! self_inlining_addr_expr (value, fn))
2910 {
2911 insert_decl_map (id, p, value);
2912 insert_debug_decl_map (id, p, var);
2913 return insert_init_debug_bind (id, bb, var, value, NULL);
2914 }
2915 }
2916
2917 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2918 that way, when the PARM_DECL is encountered, it will be
2919 automatically replaced by the VAR_DECL. */
2920 insert_decl_map (id, p, var);
2921
2922 /* Even if P was TREE_READONLY, the new VAR should not be.
2923 In the original code, we would have constructed a
2924 temporary, and then the function body would have never
2925 changed the value of P. However, now, we will be
2926 constructing VAR directly. The constructor body may
2927 change its value multiple times as it is being
2928 constructed. Therefore, it must not be TREE_READONLY;
2929 the back-end assumes that TREE_READONLY variable is
2930 assigned to only once. */
2931 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2932 TREE_READONLY (var) = 0;
2933
2934 /* If there is no setup required and we are in SSA, take the easy route
2935 replacing all SSA names representing the function parameter by the
2936 SSA name passed to function.
2937
2938 We need to construct map for the variable anyway as it might be used
2939 in different SSA names when parameter is set in function.
2940
2941 Do replacement at -O0 for const arguments replaced by constant.
2942 This is important for builtin_constant_p and other construct requiring
2943 constant argument to be visible in inlined function body. */
2944 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2945 && (optimize
2946 || (TREE_READONLY (p)
2947 && is_gimple_min_invariant (rhs)))
2948 && (TREE_CODE (rhs) == SSA_NAME
2949 || is_gimple_min_invariant (rhs))
2950 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2951 {
2952 insert_decl_map (id, def, rhs);
2953 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2954 }
2955
2956 /* If the value of argument is never used, don't care about initializing
2957 it. */
2958 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2959 {
2960 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2961 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2962 }
2963
2964 /* Initialize this VAR_DECL from the equivalent argument. Convert
2965 the argument to the proper type in case it was promoted. */
2966 if (value)
2967 {
2968 if (rhs == error_mark_node)
2969 {
2970 insert_decl_map (id, p, var);
2971 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2972 }
2973
2974 STRIP_USELESS_TYPE_CONVERSION (rhs);
2975
2976 /* If we are in SSA form properly remap the default definition
2977 or assign to a dummy SSA name if the parameter is unused and
2978 we are not optimizing. */
2979 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2980 {
2981 if (def)
2982 {
2983 def = remap_ssa_name (def, id);
2984 init_stmt = gimple_build_assign (def, rhs);
2985 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2986 set_ssa_default_def (cfun, var, NULL);
2987 }
2988 else if (!optimize)
2989 {
2990 def = make_ssa_name (var, NULL);
2991 init_stmt = gimple_build_assign (def, rhs);
2992 }
2993 }
2994 else
2995 init_stmt = gimple_build_assign (var, rhs);
2996
2997 if (bb && init_stmt)
2998 insert_init_stmt (id, bb, init_stmt);
2999 }
3000 return init_stmt;
3001 }
3002
3003 /* Generate code to initialize the parameters of the function at the
3004 top of the stack in ID from the GIMPLE_CALL STMT. */
3005
3006 static void
3007 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3008 tree fn, basic_block bb)
3009 {
3010 tree parms;
3011 size_t i;
3012 tree p;
3013 tree vars = NULL_TREE;
3014 tree static_chain = gimple_call_chain (stmt);
3015
3016 /* Figure out what the parameters are. */
3017 parms = DECL_ARGUMENTS (fn);
3018
3019 /* Loop through the parameter declarations, replacing each with an
3020 equivalent VAR_DECL, appropriately initialized. */
3021 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3022 {
3023 tree val;
3024 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3025 setup_one_parameter (id, p, val, fn, bb, &vars);
3026 }
3027 /* After remapping parameters remap their types. This has to be done
3028 in a second loop over all parameters to appropriately remap
3029 variable sized arrays when the size is specified in a
3030 parameter following the array. */
3031 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3032 {
3033 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
3034 if (varp
3035 && TREE_CODE (*varp) == VAR_DECL)
3036 {
3037 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3038 ? ssa_default_def (id->src_cfun, p) : NULL);
3039 tree var = *varp;
3040 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3041 /* Also remap the default definition if it was remapped
3042 to the default definition of the parameter replacement
3043 by the parameter setup. */
3044 if (def)
3045 {
3046 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
3047 if (defp
3048 && TREE_CODE (*defp) == SSA_NAME
3049 && SSA_NAME_VAR (*defp) == var)
3050 TREE_TYPE (*defp) = TREE_TYPE (var);
3051 }
3052 }
3053 }
3054
3055 /* Initialize the static chain. */
3056 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3057 gcc_assert (fn != current_function_decl);
3058 if (p)
3059 {
3060 /* No static chain? Seems like a bug in tree-nested.c. */
3061 gcc_assert (static_chain);
3062
3063 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3064 }
3065
3066 declare_inline_vars (id->block, vars);
3067 }
3068
3069
3070 /* Declare a return variable to replace the RESULT_DECL for the
3071 function we are calling. An appropriate DECL_STMT is returned.
3072 The USE_STMT is filled to contain a use of the declaration to
3073 indicate the return value of the function.
3074
3075 RETURN_SLOT, if non-null is place where to store the result. It
3076 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3077 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3078
3079 The return value is a (possibly null) value that holds the result
3080 as seen by the caller. */
3081
3082 static tree
3083 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3084 basic_block entry_bb)
3085 {
3086 tree callee = id->src_fn;
3087 tree result = DECL_RESULT (callee);
3088 tree callee_type = TREE_TYPE (result);
3089 tree caller_type;
3090 tree var, use;
3091
3092 /* Handle type-mismatches in the function declaration return type
3093 vs. the call expression. */
3094 if (modify_dest)
3095 caller_type = TREE_TYPE (modify_dest);
3096 else
3097 caller_type = TREE_TYPE (TREE_TYPE (callee));
3098
3099 /* We don't need to do anything for functions that don't return anything. */
3100 if (VOID_TYPE_P (callee_type))
3101 return NULL_TREE;
3102
3103 /* If there was a return slot, then the return value is the
3104 dereferenced address of that object. */
3105 if (return_slot)
3106 {
3107 /* The front end shouldn't have used both return_slot and
3108 a modify expression. */
3109 gcc_assert (!modify_dest);
3110 if (DECL_BY_REFERENCE (result))
3111 {
3112 tree return_slot_addr = build_fold_addr_expr (return_slot);
3113 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3114
3115 /* We are going to construct *&return_slot and we can't do that
3116 for variables believed to be not addressable.
3117
3118 FIXME: This check possibly can match, because values returned
3119 via return slot optimization are not believed to have address
3120 taken by alias analysis. */
3121 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3122 var = return_slot_addr;
3123 }
3124 else
3125 {
3126 var = return_slot;
3127 gcc_assert (TREE_CODE (var) != SSA_NAME);
3128 if (TREE_ADDRESSABLE (result))
3129 mark_addressable (var);
3130 }
3131 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3132 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3133 && !DECL_GIMPLE_REG_P (result)
3134 && DECL_P (var))
3135 DECL_GIMPLE_REG_P (var) = 0;
3136 use = NULL;
3137 goto done;
3138 }
3139
3140 /* All types requiring non-trivial constructors should have been handled. */
3141 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3142
3143 /* Attempt to avoid creating a new temporary variable. */
3144 if (modify_dest
3145 && TREE_CODE (modify_dest) != SSA_NAME)
3146 {
3147 bool use_it = false;
3148
3149 /* We can't use MODIFY_DEST if there's type promotion involved. */
3150 if (!useless_type_conversion_p (callee_type, caller_type))
3151 use_it = false;
3152
3153 /* ??? If we're assigning to a variable sized type, then we must
3154 reuse the destination variable, because we've no good way to
3155 create variable sized temporaries at this point. */
3156 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3157 use_it = true;
3158
3159 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3160 reuse it as the result of the call directly. Don't do this if
3161 it would promote MODIFY_DEST to addressable. */
3162 else if (TREE_ADDRESSABLE (result))
3163 use_it = false;
3164 else
3165 {
3166 tree base_m = get_base_address (modify_dest);
3167
3168 /* If the base isn't a decl, then it's a pointer, and we don't
3169 know where that's going to go. */
3170 if (!DECL_P (base_m))
3171 use_it = false;
3172 else if (is_global_var (base_m))
3173 use_it = false;
3174 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3175 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3176 && !DECL_GIMPLE_REG_P (result)
3177 && DECL_GIMPLE_REG_P (base_m))
3178 use_it = false;
3179 else if (!TREE_ADDRESSABLE (base_m))
3180 use_it = true;
3181 }
3182
3183 if (use_it)
3184 {
3185 var = modify_dest;
3186 use = NULL;
3187 goto done;
3188 }
3189 }
3190
3191 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3192
3193 var = copy_result_decl_to_var (result, id);
3194 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3195
3196 /* Do not have the rest of GCC warn about this variable as it should
3197 not be visible to the user. */
3198 TREE_NO_WARNING (var) = 1;
3199
3200 declare_inline_vars (id->block, var);
3201
3202 /* Build the use expr. If the return type of the function was
3203 promoted, convert it back to the expected type. */
3204 use = var;
3205 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3206 {
3207 /* If we can match up types by promotion/demotion do so. */
3208 if (fold_convertible_p (caller_type, var))
3209 use = fold_convert (caller_type, var);
3210 else
3211 {
3212 /* ??? For valid programs we should not end up here.
3213 Still if we end up with truly mismatched types here, fall back
3214 to using a MEM_REF to not leak invalid GIMPLE to the following
3215 passes. */
3216 /* Prevent var from being written into SSA form. */
3217 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3218 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3219 DECL_GIMPLE_REG_P (var) = false;
3220 else if (is_gimple_reg_type (TREE_TYPE (var)))
3221 TREE_ADDRESSABLE (var) = true;
3222 use = fold_build2 (MEM_REF, caller_type,
3223 build_fold_addr_expr (var),
3224 build_int_cst (ptr_type_node, 0));
3225 }
3226 }
3227
3228 STRIP_USELESS_TYPE_CONVERSION (use);
3229
3230 if (DECL_BY_REFERENCE (result))
3231 {
3232 TREE_ADDRESSABLE (var) = 1;
3233 var = build_fold_addr_expr (var);
3234 }
3235
3236 done:
3237 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3238 way, when the RESULT_DECL is encountered, it will be
3239 automatically replaced by the VAR_DECL.
3240
3241 When returning by reference, ensure that RESULT_DECL remaps to
3242 gimple_val. */
3243 if (DECL_BY_REFERENCE (result)
3244 && !is_gimple_val (var))
3245 {
3246 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3247 insert_decl_map (id, result, temp);
3248 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3249 it's default_def SSA_NAME. */
3250 if (gimple_in_ssa_p (id->src_cfun)
3251 && is_gimple_reg (result))
3252 {
3253 temp = make_ssa_name (temp, NULL);
3254 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3255 }
3256 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3257 }
3258 else
3259 insert_decl_map (id, result, var);
3260
3261 /* Remember this so we can ignore it in remap_decls. */
3262 id->retvar = var;
3263
3264 return use;
3265 }
3266
3267 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3268 to a local label. */
3269
3270 static tree
3271 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3272 {
3273 tree node = *nodep;
3274 tree fn = (tree) fnp;
3275
3276 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3277 return node;
3278
3279 if (TYPE_P (node))
3280 *walk_subtrees = 0;
3281
3282 return NULL_TREE;
3283 }
3284
3285 /* Determine if the function can be copied. If so return NULL. If
3286 not return a string describng the reason for failure. */
3287
3288 static const char *
3289 copy_forbidden (struct function *fun, tree fndecl)
3290 {
3291 const char *reason = fun->cannot_be_copied_reason;
3292 tree decl;
3293 unsigned ix;
3294
3295 /* Only examine the function once. */
3296 if (fun->cannot_be_copied_set)
3297 return reason;
3298
3299 /* We cannot copy a function that receives a non-local goto
3300 because we cannot remap the destination label used in the
3301 function that is performing the non-local goto. */
3302 /* ??? Actually, this should be possible, if we work at it.
3303 No doubt there's just a handful of places that simply
3304 assume it doesn't happen and don't substitute properly. */
3305 if (fun->has_nonlocal_label)
3306 {
3307 reason = G_("function %q+F can never be copied "
3308 "because it receives a non-local goto");
3309 goto fail;
3310 }
3311
3312 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3313 if (TREE_CODE (decl) == VAR_DECL
3314 && TREE_STATIC (decl)
3315 && !DECL_EXTERNAL (decl)
3316 && DECL_INITIAL (decl)
3317 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3318 has_label_address_in_static_1,
3319 fndecl))
3320 {
3321 reason = G_("function %q+F can never be copied because it saves "
3322 "address of local label in a static variable");
3323 goto fail;
3324 }
3325
3326 fail:
3327 fun->cannot_be_copied_reason = reason;
3328 fun->cannot_be_copied_set = true;
3329 return reason;
3330 }
3331
3332
3333 static const char *inline_forbidden_reason;
3334
3335 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3336 iff a function can not be inlined. Also sets the reason why. */
3337
3338 static tree
3339 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3340 struct walk_stmt_info *wip)
3341 {
3342 tree fn = (tree) wip->info;
3343 tree t;
3344 gimple stmt = gsi_stmt (*gsi);
3345
3346 switch (gimple_code (stmt))
3347 {
3348 case GIMPLE_CALL:
3349 /* Refuse to inline alloca call unless user explicitly forced so as
3350 this may change program's memory overhead drastically when the
3351 function using alloca is called in loop. In GCC present in
3352 SPEC2000 inlining into schedule_block cause it to require 2GB of
3353 RAM instead of 256MB. Don't do so for alloca calls emitted for
3354 VLA objects as those can't cause unbounded growth (they're always
3355 wrapped inside stack_save/stack_restore regions. */
3356 if (gimple_alloca_call_p (stmt)
3357 && !gimple_call_alloca_for_var_p (stmt)
3358 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3359 {
3360 inline_forbidden_reason
3361 = G_("function %q+F can never be inlined because it uses "
3362 "alloca (override using the always_inline attribute)");
3363 *handled_ops_p = true;
3364 return fn;
3365 }
3366
3367 t = gimple_call_fndecl (stmt);
3368 if (t == NULL_TREE)
3369 break;
3370
3371 /* We cannot inline functions that call setjmp. */
3372 if (setjmp_call_p (t))
3373 {
3374 inline_forbidden_reason
3375 = G_("function %q+F can never be inlined because it uses setjmp");
3376 *handled_ops_p = true;
3377 return t;
3378 }
3379
3380 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3381 switch (DECL_FUNCTION_CODE (t))
3382 {
3383 /* We cannot inline functions that take a variable number of
3384 arguments. */
3385 case BUILT_IN_VA_START:
3386 case BUILT_IN_NEXT_ARG:
3387 case BUILT_IN_VA_END:
3388 inline_forbidden_reason
3389 = G_("function %q+F can never be inlined because it "
3390 "uses variable argument lists");
3391 *handled_ops_p = true;
3392 return t;
3393
3394 case BUILT_IN_LONGJMP:
3395 /* We can't inline functions that call __builtin_longjmp at
3396 all. The non-local goto machinery really requires the
3397 destination be in a different function. If we allow the
3398 function calling __builtin_longjmp to be inlined into the
3399 function calling __builtin_setjmp, Things will Go Awry. */
3400 inline_forbidden_reason
3401 = G_("function %q+F can never be inlined because "
3402 "it uses setjmp-longjmp exception handling");
3403 *handled_ops_p = true;
3404 return t;
3405
3406 case BUILT_IN_NONLOCAL_GOTO:
3407 /* Similarly. */
3408 inline_forbidden_reason
3409 = G_("function %q+F can never be inlined because "
3410 "it uses non-local goto");
3411 *handled_ops_p = true;
3412 return t;
3413
3414 case BUILT_IN_RETURN:
3415 case BUILT_IN_APPLY_ARGS:
3416 /* If a __builtin_apply_args caller would be inlined,
3417 it would be saving arguments of the function it has
3418 been inlined into. Similarly __builtin_return would
3419 return from the function the inline has been inlined into. */
3420 inline_forbidden_reason
3421 = G_("function %q+F can never be inlined because "
3422 "it uses __builtin_return or __builtin_apply_args");
3423 *handled_ops_p = true;
3424 return t;
3425
3426 default:
3427 break;
3428 }
3429 break;
3430
3431 case GIMPLE_GOTO:
3432 t = gimple_goto_dest (stmt);
3433
3434 /* We will not inline a function which uses computed goto. The
3435 addresses of its local labels, which may be tucked into
3436 global storage, are of course not constant across
3437 instantiations, which causes unexpected behavior. */
3438 if (TREE_CODE (t) != LABEL_DECL)
3439 {
3440 inline_forbidden_reason
3441 = G_("function %q+F can never be inlined "
3442 "because it contains a computed goto");
3443 *handled_ops_p = true;
3444 return t;
3445 }
3446 break;
3447
3448 default:
3449 break;
3450 }
3451
3452 *handled_ops_p = false;
3453 return NULL_TREE;
3454 }
3455
3456 /* Return true if FNDECL is a function that cannot be inlined into
3457 another one. */
3458
3459 static bool
3460 inline_forbidden_p (tree fndecl)
3461 {
3462 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3463 struct walk_stmt_info wi;
3464 struct pointer_set_t *visited_nodes;
3465 basic_block bb;
3466 bool forbidden_p = false;
3467
3468 /* First check for shared reasons not to copy the code. */
3469 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3470 if (inline_forbidden_reason != NULL)
3471 return true;
3472
3473 /* Next, walk the statements of the function looking for
3474 constraucts we can't handle, or are non-optimal for inlining. */
3475 visited_nodes = pointer_set_create ();
3476 memset (&wi, 0, sizeof (wi));
3477 wi.info = (void *) fndecl;
3478 wi.pset = visited_nodes;
3479
3480 FOR_EACH_BB_FN (bb, fun)
3481 {
3482 gimple ret;
3483 gimple_seq seq = bb_seq (bb);
3484 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3485 forbidden_p = (ret != NULL);
3486 if (forbidden_p)
3487 break;
3488 }
3489
3490 pointer_set_destroy (visited_nodes);
3491 return forbidden_p;
3492 }
3493 \f
3494 /* Return false if the function FNDECL cannot be inlined on account of its
3495 attributes, true otherwise. */
3496 static bool
3497 function_attribute_inlinable_p (const_tree fndecl)
3498 {
3499 if (targetm.attribute_table)
3500 {
3501 const_tree a;
3502
3503 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3504 {
3505 const_tree name = TREE_PURPOSE (a);
3506 int i;
3507
3508 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3509 if (is_attribute_p (targetm.attribute_table[i].name, name))
3510 return targetm.function_attribute_inlinable_p (fndecl);
3511 }
3512 }
3513
3514 return true;
3515 }
3516
3517 /* Returns nonzero if FN is a function that does not have any
3518 fundamental inline blocking properties. */
3519
3520 bool
3521 tree_inlinable_function_p (tree fn)
3522 {
3523 bool inlinable = true;
3524 bool do_warning;
3525 tree always_inline;
3526
3527 /* If we've already decided this function shouldn't be inlined,
3528 there's no need to check again. */
3529 if (DECL_UNINLINABLE (fn))
3530 return false;
3531
3532 /* We only warn for functions declared `inline' by the user. */
3533 do_warning = (warn_inline
3534 && DECL_DECLARED_INLINE_P (fn)
3535 && !DECL_NO_INLINE_WARNING_P (fn)
3536 && !DECL_IN_SYSTEM_HEADER (fn));
3537
3538 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3539
3540 if (flag_no_inline
3541 && always_inline == NULL)
3542 {
3543 if (do_warning)
3544 warning (OPT_Winline, "function %q+F can never be inlined because it "
3545 "is suppressed using -fno-inline", fn);
3546 inlinable = false;
3547 }
3548
3549 else if (!function_attribute_inlinable_p (fn))
3550 {
3551 if (do_warning)
3552 warning (OPT_Winline, "function %q+F can never be inlined because it "
3553 "uses attributes conflicting with inlining", fn);
3554 inlinable = false;
3555 }
3556
3557 else if (inline_forbidden_p (fn))
3558 {
3559 /* See if we should warn about uninlinable functions. Previously,
3560 some of these warnings would be issued while trying to expand
3561 the function inline, but that would cause multiple warnings
3562 about functions that would for example call alloca. But since
3563 this a property of the function, just one warning is enough.
3564 As a bonus we can now give more details about the reason why a
3565 function is not inlinable. */
3566 if (always_inline)
3567 error (inline_forbidden_reason, fn);
3568 else if (do_warning)
3569 warning (OPT_Winline, inline_forbidden_reason, fn);
3570
3571 inlinable = false;
3572 }
3573
3574 /* Squirrel away the result so that we don't have to check again. */
3575 DECL_UNINLINABLE (fn) = !inlinable;
3576
3577 return inlinable;
3578 }
3579
3580 /* Estimate the cost of a memory move. Use machine dependent
3581 word size and take possible memcpy call into account. */
3582
3583 int
3584 estimate_move_cost (tree type)
3585 {
3586 HOST_WIDE_INT size;
3587
3588 gcc_assert (!VOID_TYPE_P (type));
3589
3590 if (TREE_CODE (type) == VECTOR_TYPE)
3591 {
3592 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3593 enum machine_mode simd
3594 = targetm.vectorize.preferred_simd_mode (inner);
3595 int simd_mode_size = GET_MODE_SIZE (simd);
3596 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3597 / simd_mode_size);
3598 }
3599
3600 size = int_size_in_bytes (type);
3601
3602 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3603 /* Cost of a memcpy call, 3 arguments and the call. */
3604 return 4;
3605 else
3606 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3607 }
3608
3609 /* Returns cost of operation CODE, according to WEIGHTS */
3610
3611 static int
3612 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3613 tree op1 ATTRIBUTE_UNUSED, tree op2)
3614 {
3615 switch (code)
3616 {
3617 /* These are "free" conversions, or their presumed cost
3618 is folded into other operations. */
3619 case RANGE_EXPR:
3620 CASE_CONVERT:
3621 case COMPLEX_EXPR:
3622 case PAREN_EXPR:
3623 case VIEW_CONVERT_EXPR:
3624 return 0;
3625
3626 /* Assign cost of 1 to usual operations.
3627 ??? We may consider mapping RTL costs to this. */
3628 case COND_EXPR:
3629 case VEC_COND_EXPR:
3630 case VEC_PERM_EXPR:
3631
3632 case PLUS_EXPR:
3633 case POINTER_PLUS_EXPR:
3634 case MINUS_EXPR:
3635 case MULT_EXPR:
3636 case MULT_HIGHPART_EXPR:
3637 case FMA_EXPR:
3638
3639 case ADDR_SPACE_CONVERT_EXPR:
3640 case FIXED_CONVERT_EXPR:
3641 case FIX_TRUNC_EXPR:
3642
3643 case NEGATE_EXPR:
3644 case FLOAT_EXPR:
3645 case MIN_EXPR:
3646 case MAX_EXPR:
3647 case ABS_EXPR:
3648
3649 case LSHIFT_EXPR:
3650 case RSHIFT_EXPR:
3651 case LROTATE_EXPR:
3652 case RROTATE_EXPR:
3653 case VEC_LSHIFT_EXPR:
3654 case VEC_RSHIFT_EXPR:
3655
3656 case BIT_IOR_EXPR:
3657 case BIT_XOR_EXPR:
3658 case BIT_AND_EXPR:
3659 case BIT_NOT_EXPR:
3660
3661 case TRUTH_ANDIF_EXPR:
3662 case TRUTH_ORIF_EXPR:
3663 case TRUTH_AND_EXPR:
3664 case TRUTH_OR_EXPR:
3665 case TRUTH_XOR_EXPR:
3666 case TRUTH_NOT_EXPR:
3667
3668 case LT_EXPR:
3669 case LE_EXPR:
3670 case GT_EXPR:
3671 case GE_EXPR:
3672 case EQ_EXPR:
3673 case NE_EXPR:
3674 case ORDERED_EXPR:
3675 case UNORDERED_EXPR:
3676
3677 case UNLT_EXPR:
3678 case UNLE_EXPR:
3679 case UNGT_EXPR:
3680 case UNGE_EXPR:
3681 case UNEQ_EXPR:
3682 case LTGT_EXPR:
3683
3684 case CONJ_EXPR:
3685
3686 case PREDECREMENT_EXPR:
3687 case PREINCREMENT_EXPR:
3688 case POSTDECREMENT_EXPR:
3689 case POSTINCREMENT_EXPR:
3690
3691 case REALIGN_LOAD_EXPR:
3692
3693 case REDUC_MAX_EXPR:
3694 case REDUC_MIN_EXPR:
3695 case REDUC_PLUS_EXPR:
3696 case WIDEN_SUM_EXPR:
3697 case WIDEN_MULT_EXPR:
3698 case DOT_PROD_EXPR:
3699 case WIDEN_MULT_PLUS_EXPR:
3700 case WIDEN_MULT_MINUS_EXPR:
3701 case WIDEN_LSHIFT_EXPR:
3702
3703 case VEC_WIDEN_MULT_HI_EXPR:
3704 case VEC_WIDEN_MULT_LO_EXPR:
3705 case VEC_WIDEN_MULT_EVEN_EXPR:
3706 case VEC_WIDEN_MULT_ODD_EXPR:
3707 case VEC_UNPACK_HI_EXPR:
3708 case VEC_UNPACK_LO_EXPR:
3709 case VEC_UNPACK_FLOAT_HI_EXPR:
3710 case VEC_UNPACK_FLOAT_LO_EXPR:
3711 case VEC_PACK_TRUNC_EXPR:
3712 case VEC_PACK_SAT_EXPR:
3713 case VEC_PACK_FIX_TRUNC_EXPR:
3714 case VEC_WIDEN_LSHIFT_HI_EXPR:
3715 case VEC_WIDEN_LSHIFT_LO_EXPR:
3716
3717 return 1;
3718
3719 /* Few special cases of expensive operations. This is useful
3720 to avoid inlining on functions having too many of these. */
3721 case TRUNC_DIV_EXPR:
3722 case CEIL_DIV_EXPR:
3723 case FLOOR_DIV_EXPR:
3724 case ROUND_DIV_EXPR:
3725 case EXACT_DIV_EXPR:
3726 case TRUNC_MOD_EXPR:
3727 case CEIL_MOD_EXPR:
3728 case FLOOR_MOD_EXPR:
3729 case ROUND_MOD_EXPR:
3730 case RDIV_EXPR:
3731 if (TREE_CODE (op2) != INTEGER_CST)
3732 return weights->div_mod_cost;
3733 return 1;
3734
3735 default:
3736 /* We expect a copy assignment with no operator. */
3737 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3738 return 0;
3739 }
3740 }
3741
3742
3743 /* Estimate number of instructions that will be created by expanding
3744 the statements in the statement sequence STMTS.
3745 WEIGHTS contains weights attributed to various constructs. */
3746
3747 static
3748 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3749 {
3750 int cost;
3751 gimple_stmt_iterator gsi;
3752
3753 cost = 0;
3754 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3755 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3756
3757 return cost;
3758 }
3759
3760
3761 /* Estimate number of instructions that will be created by expanding STMT.
3762 WEIGHTS contains weights attributed to various constructs. */
3763
3764 int
3765 estimate_num_insns (gimple stmt, eni_weights *weights)
3766 {
3767 unsigned cost, i;
3768 enum gimple_code code = gimple_code (stmt);
3769 tree lhs;
3770 tree rhs;
3771
3772 switch (code)
3773 {
3774 case GIMPLE_ASSIGN:
3775 /* Try to estimate the cost of assignments. We have three cases to
3776 deal with:
3777 1) Simple assignments to registers;
3778 2) Stores to things that must live in memory. This includes
3779 "normal" stores to scalars, but also assignments of large
3780 structures, or constructors of big arrays;
3781
3782 Let us look at the first two cases, assuming we have "a = b + C":
3783 <GIMPLE_ASSIGN <var_decl "a">
3784 <plus_expr <var_decl "b"> <constant C>>
3785 If "a" is a GIMPLE register, the assignment to it is free on almost
3786 any target, because "a" usually ends up in a real register. Hence
3787 the only cost of this expression comes from the PLUS_EXPR, and we
3788 can ignore the GIMPLE_ASSIGN.
3789 If "a" is not a GIMPLE register, the assignment to "a" will most
3790 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3791 of moving something into "a", which we compute using the function
3792 estimate_move_cost. */
3793 if (gimple_clobber_p (stmt))
3794 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3795
3796 lhs = gimple_assign_lhs (stmt);
3797 rhs = gimple_assign_rhs1 (stmt);
3798
3799 cost = 0;
3800
3801 /* Account for the cost of moving to / from memory. */
3802 if (gimple_store_p (stmt))
3803 cost += estimate_move_cost (TREE_TYPE (lhs));
3804 if (gimple_assign_load_p (stmt))
3805 cost += estimate_move_cost (TREE_TYPE (rhs));
3806
3807 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3808 gimple_assign_rhs1 (stmt),
3809 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3810 == GIMPLE_BINARY_RHS
3811 ? gimple_assign_rhs2 (stmt) : NULL);
3812 break;
3813
3814 case GIMPLE_COND:
3815 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3816 gimple_op (stmt, 0),
3817 gimple_op (stmt, 1));
3818 break;
3819
3820 case GIMPLE_SWITCH:
3821 /* Take into account cost of the switch + guess 2 conditional jumps for
3822 each case label.
3823
3824 TODO: once the switch expansion logic is sufficiently separated, we can
3825 do better job on estimating cost of the switch. */
3826 if (weights->time_based)
3827 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3828 else
3829 cost = gimple_switch_num_labels (stmt) * 2;
3830 break;
3831
3832 case GIMPLE_CALL:
3833 {
3834 tree decl;
3835
3836 if (gimple_call_internal_p (stmt))
3837 return 0;
3838 else if ((decl = gimple_call_fndecl (stmt))
3839 && DECL_BUILT_IN (decl))
3840 {
3841 /* Do not special case builtins where we see the body.
3842 This just confuse inliner. */
3843 struct cgraph_node *node;
3844 if (!(node = cgraph_get_node (decl))
3845 || node->definition)
3846 ;
3847 /* For buitins that are likely expanded to nothing or
3848 inlined do not account operand costs. */
3849 else if (is_simple_builtin (decl))
3850 return 0;
3851 else if (is_inexpensive_builtin (decl))
3852 return weights->target_builtin_call_cost;
3853 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3854 {
3855 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3856 specialize the cheap expansion we do here.
3857 ??? This asks for a more general solution. */
3858 switch (DECL_FUNCTION_CODE (decl))
3859 {
3860 case BUILT_IN_POW:
3861 case BUILT_IN_POWF:
3862 case BUILT_IN_POWL:
3863 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3864 && REAL_VALUES_EQUAL
3865 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3866 return estimate_operator_cost
3867 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
3868 gimple_call_arg (stmt, 0));
3869 break;
3870
3871 default:
3872 break;
3873 }
3874 }
3875 }
3876
3877 cost = decl ? weights->call_cost : weights->indirect_call_cost;
3878 if (gimple_call_lhs (stmt))
3879 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3880 for (i = 0; i < gimple_call_num_args (stmt); i++)
3881 {
3882 tree arg = gimple_call_arg (stmt, i);
3883 cost += estimate_move_cost (TREE_TYPE (arg));
3884 }
3885 break;
3886 }
3887
3888 case GIMPLE_RETURN:
3889 return weights->return_cost;
3890
3891 case GIMPLE_GOTO:
3892 case GIMPLE_LABEL:
3893 case GIMPLE_NOP:
3894 case GIMPLE_PHI:
3895 case GIMPLE_PREDICT:
3896 case GIMPLE_DEBUG:
3897 return 0;
3898
3899 case GIMPLE_ASM:
3900 {
3901 int count = asm_str_count (gimple_asm_string (stmt));
3902 /* 1000 means infinity. This avoids overflows later
3903 with very long asm statements. */
3904 if (count > 1000)
3905 count = 1000;
3906 return count;
3907 }
3908
3909 case GIMPLE_RESX:
3910 /* This is either going to be an external function call with one
3911 argument, or two register copy statements plus a goto. */
3912 return 2;
3913
3914 case GIMPLE_EH_DISPATCH:
3915 /* ??? This is going to turn into a switch statement. Ideally
3916 we'd have a look at the eh region and estimate the number of
3917 edges involved. */
3918 return 10;
3919
3920 case GIMPLE_BIND:
3921 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3922
3923 case GIMPLE_EH_FILTER:
3924 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3925
3926 case GIMPLE_CATCH:
3927 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3928
3929 case GIMPLE_TRY:
3930 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3931 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3932
3933 /* OpenMP directives are generally very expensive. */
3934
3935 case GIMPLE_OMP_RETURN:
3936 case GIMPLE_OMP_SECTIONS_SWITCH:
3937 case GIMPLE_OMP_ATOMIC_STORE:
3938 case GIMPLE_OMP_CONTINUE:
3939 /* ...except these, which are cheap. */
3940 return 0;
3941
3942 case GIMPLE_OMP_ATOMIC_LOAD:
3943 return weights->omp_cost;
3944
3945 case GIMPLE_OMP_FOR:
3946 return (weights->omp_cost
3947 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3948 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3949
3950 case GIMPLE_OMP_PARALLEL:
3951 case GIMPLE_OMP_TASK:
3952 case GIMPLE_OMP_CRITICAL:
3953 case GIMPLE_OMP_MASTER:
3954 case GIMPLE_OMP_TASKGROUP:
3955 case GIMPLE_OMP_ORDERED:
3956 case GIMPLE_OMP_SECTION:
3957 case GIMPLE_OMP_SECTIONS:
3958 case GIMPLE_OMP_SINGLE:
3959 case GIMPLE_OMP_TARGET:
3960 case GIMPLE_OMP_TEAMS:
3961 return (weights->omp_cost
3962 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3963
3964 case GIMPLE_TRANSACTION:
3965 return (weights->tm_cost
3966 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3967 weights));
3968
3969 default:
3970 gcc_unreachable ();
3971 }
3972
3973 return cost;
3974 }
3975
3976 /* Estimate number of instructions that will be created by expanding
3977 function FNDECL. WEIGHTS contains weights attributed to various
3978 constructs. */
3979
3980 int
3981 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3982 {
3983 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3984 gimple_stmt_iterator bsi;
3985 basic_block bb;
3986 int n = 0;
3987
3988 gcc_assert (my_function && my_function->cfg);
3989 FOR_EACH_BB_FN (bb, my_function)
3990 {
3991 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3992 n += estimate_num_insns (gsi_stmt (bsi), weights);
3993 }
3994
3995 return n;
3996 }
3997
3998
3999 /* Initializes weights used by estimate_num_insns. */
4000
4001 void
4002 init_inline_once (void)
4003 {
4004 eni_size_weights.call_cost = 1;
4005 eni_size_weights.indirect_call_cost = 3;
4006 eni_size_weights.target_builtin_call_cost = 1;
4007 eni_size_weights.div_mod_cost = 1;
4008 eni_size_weights.omp_cost = 40;
4009 eni_size_weights.tm_cost = 10;
4010 eni_size_weights.time_based = false;
4011 eni_size_weights.return_cost = 1;
4012
4013 /* Estimating time for call is difficult, since we have no idea what the
4014 called function does. In the current uses of eni_time_weights,
4015 underestimating the cost does less harm than overestimating it, so
4016 we choose a rather small value here. */
4017 eni_time_weights.call_cost = 10;
4018 eni_time_weights.indirect_call_cost = 15;
4019 eni_time_weights.target_builtin_call_cost = 1;
4020 eni_time_weights.div_mod_cost = 10;
4021 eni_time_weights.omp_cost = 40;
4022 eni_time_weights.tm_cost = 40;
4023 eni_time_weights.time_based = true;
4024 eni_time_weights.return_cost = 2;
4025 }
4026
4027 /* Estimate the number of instructions in a gimple_seq. */
4028
4029 int
4030 count_insns_seq (gimple_seq seq, eni_weights *weights)
4031 {
4032 gimple_stmt_iterator gsi;
4033 int n = 0;
4034 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4035 n += estimate_num_insns (gsi_stmt (gsi), weights);
4036
4037 return n;
4038 }
4039
4040
4041 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4042
4043 static void
4044 prepend_lexical_block (tree current_block, tree new_block)
4045 {
4046 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4047 BLOCK_SUBBLOCKS (current_block) = new_block;
4048 BLOCK_SUPERCONTEXT (new_block) = current_block;
4049 }
4050
4051 /* Add local variables from CALLEE to CALLER. */
4052
4053 static inline void
4054 add_local_variables (struct function *callee, struct function *caller,
4055 copy_body_data *id)
4056 {
4057 tree var;
4058 unsigned ix;
4059
4060 FOR_EACH_LOCAL_DECL (callee, ix, var)
4061 if (!can_be_nonlocal (var, id))
4062 {
4063 tree new_var = remap_decl (var, id);
4064
4065 /* Remap debug-expressions. */
4066 if (TREE_CODE (new_var) == VAR_DECL
4067 && DECL_HAS_DEBUG_EXPR_P (var)
4068 && new_var != var)
4069 {
4070 tree tem = DECL_DEBUG_EXPR (var);
4071 bool old_regimplify = id->regimplify;
4072 id->remapping_type_depth++;
4073 walk_tree (&tem, copy_tree_body_r, id, NULL);
4074 id->remapping_type_depth--;
4075 id->regimplify = old_regimplify;
4076 SET_DECL_DEBUG_EXPR (new_var, tem);
4077 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4078 }
4079 add_local_decl (caller, new_var);
4080 }
4081 }
4082
4083 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4084
4085 static bool
4086 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4087 {
4088 tree use_retvar;
4089 tree fn;
4090 struct pointer_map_t *st, *dst;
4091 tree return_slot;
4092 tree modify_dest;
4093 location_t saved_location;
4094 struct cgraph_edge *cg_edge;
4095 cgraph_inline_failed_t reason;
4096 basic_block return_block;
4097 edge e;
4098 gimple_stmt_iterator gsi, stmt_gsi;
4099 bool successfully_inlined = FALSE;
4100 bool purge_dead_abnormal_edges;
4101
4102 /* Set input_location here so we get the right instantiation context
4103 if we call instantiate_decl from inlinable_function_p. */
4104 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4105 saved_location = input_location;
4106 input_location = gimple_location (stmt);
4107
4108 /* From here on, we're only interested in CALL_EXPRs. */
4109 if (gimple_code (stmt) != GIMPLE_CALL)
4110 goto egress;
4111
4112 cg_edge = cgraph_edge (id->dst_node, stmt);
4113 gcc_checking_assert (cg_edge);
4114 /* First, see if we can figure out what function is being called.
4115 If we cannot, then there is no hope of inlining the function. */
4116 if (cg_edge->indirect_unknown_callee)
4117 goto egress;
4118 fn = cg_edge->callee->decl;
4119 gcc_checking_assert (fn);
4120
4121 /* If FN is a declaration of a function in a nested scope that was
4122 globally declared inline, we don't set its DECL_INITIAL.
4123 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4124 C++ front-end uses it for cdtors to refer to their internal
4125 declarations, that are not real functions. Fortunately those
4126 don't have trees to be saved, so we can tell by checking their
4127 gimple_body. */
4128 if (!DECL_INITIAL (fn)
4129 && DECL_ABSTRACT_ORIGIN (fn)
4130 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4131 fn = DECL_ABSTRACT_ORIGIN (fn);
4132
4133 /* Don't try to inline functions that are not well-suited to inlining. */
4134 if (cg_edge->inline_failed)
4135 {
4136 reason = cg_edge->inline_failed;
4137 /* If this call was originally indirect, we do not want to emit any
4138 inlining related warnings or sorry messages because there are no
4139 guarantees regarding those. */
4140 if (cg_edge->indirect_inlining_edge)
4141 goto egress;
4142
4143 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4144 /* For extern inline functions that get redefined we always
4145 silently ignored always_inline flag. Better behaviour would
4146 be to be able to keep both bodies and use extern inline body
4147 for inlining, but we can't do that because frontends overwrite
4148 the body. */
4149 && !cg_edge->callee->local.redefined_extern_inline
4150 /* During early inline pass, report only when optimization is
4151 not turned on. */
4152 && (cgraph_global_info_ready
4153 || !optimize
4154 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4155 /* PR 20090218-1_0.c. Body can be provided by another module. */
4156 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4157 {
4158 error ("inlining failed in call to always_inline %q+F: %s", fn,
4159 cgraph_inline_failed_string (reason));
4160 error ("called from here");
4161 }
4162 else if (warn_inline
4163 && DECL_DECLARED_INLINE_P (fn)
4164 && !DECL_NO_INLINE_WARNING_P (fn)
4165 && !DECL_IN_SYSTEM_HEADER (fn)
4166 && reason != CIF_UNSPECIFIED
4167 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4168 /* Do not warn about not inlined recursive calls. */
4169 && !cgraph_edge_recursive_p (cg_edge)
4170 /* Avoid warnings during early inline pass. */
4171 && cgraph_global_info_ready)
4172 {
4173 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4174 fn, _(cgraph_inline_failed_string (reason)));
4175 warning (OPT_Winline, "called from here");
4176 }
4177 goto egress;
4178 }
4179 fn = cg_edge->callee->decl;
4180 cgraph_get_body (cg_edge->callee);
4181
4182 #ifdef ENABLE_CHECKING
4183 if (cg_edge->callee->decl != id->dst_node->decl)
4184 verify_cgraph_node (cg_edge->callee);
4185 #endif
4186
4187 /* We will be inlining this callee. */
4188 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4189
4190 /* Update the callers EH personality. */
4191 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4192 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4193 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4194
4195 /* Split the block holding the GIMPLE_CALL. */
4196 e = split_block (bb, stmt);
4197 bb = e->src;
4198 return_block = e->dest;
4199 remove_edge (e);
4200
4201 /* split_block splits after the statement; work around this by
4202 moving the call into the second block manually. Not pretty,
4203 but seems easier than doing the CFG manipulation by hand
4204 when the GIMPLE_CALL is in the last statement of BB. */
4205 stmt_gsi = gsi_last_bb (bb);
4206 gsi_remove (&stmt_gsi, false);
4207
4208 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4209 been the source of abnormal edges. In this case, schedule
4210 the removal of dead abnormal edges. */
4211 gsi = gsi_start_bb (return_block);
4212 if (gsi_end_p (gsi))
4213 {
4214 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4215 purge_dead_abnormal_edges = true;
4216 }
4217 else
4218 {
4219 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4220 purge_dead_abnormal_edges = false;
4221 }
4222
4223 stmt_gsi = gsi_start_bb (return_block);
4224
4225 /* Build a block containing code to initialize the arguments, the
4226 actual inline expansion of the body, and a label for the return
4227 statements within the function to jump to. The type of the
4228 statement expression is the return type of the function call.
4229 ??? If the call does not have an associated block then we will
4230 remap all callee blocks to NULL, effectively dropping most of
4231 its debug information. This should only happen for calls to
4232 artificial decls inserted by the compiler itself. We need to
4233 either link the inlined blocks into the caller block tree or
4234 not refer to them in any way to not break GC for locations. */
4235 if (gimple_block (stmt))
4236 {
4237 id->block = make_node (BLOCK);
4238 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4239 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4240 prepend_lexical_block (gimple_block (stmt), id->block);
4241 }
4242
4243 /* Local declarations will be replaced by their equivalents in this
4244 map. */
4245 st = id->decl_map;
4246 id->decl_map = pointer_map_create ();
4247 dst = id->debug_map;
4248 id->debug_map = NULL;
4249
4250 /* Record the function we are about to inline. */
4251 id->src_fn = fn;
4252 id->src_node = cg_edge->callee;
4253 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4254 id->gimple_call = stmt;
4255
4256 gcc_assert (!id->src_cfun->after_inlining);
4257
4258 id->entry_bb = bb;
4259 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4260 {
4261 gimple_stmt_iterator si = gsi_last_bb (bb);
4262 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4263 NOT_TAKEN),
4264 GSI_NEW_STMT);
4265 }
4266 initialize_inlined_parameters (id, stmt, fn, bb);
4267
4268 if (DECL_INITIAL (fn))
4269 {
4270 if (gimple_block (stmt))
4271 {
4272 tree *var;
4273
4274 prepend_lexical_block (id->block,
4275 remap_blocks (DECL_INITIAL (fn), id));
4276 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4277 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4278 == NULL_TREE));
4279 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4280 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4281 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4282 under it. The parameters can be then evaluated in the debugger,
4283 but don't show in backtraces. */
4284 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4285 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4286 {
4287 tree v = *var;
4288 *var = TREE_CHAIN (v);
4289 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4290 BLOCK_VARS (id->block) = v;
4291 }
4292 else
4293 var = &TREE_CHAIN (*var);
4294 }
4295 else
4296 remap_blocks_to_null (DECL_INITIAL (fn), id);
4297 }
4298
4299 /* Return statements in the function body will be replaced by jumps
4300 to the RET_LABEL. */
4301 gcc_assert (DECL_INITIAL (fn));
4302 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4303
4304 /* Find the LHS to which the result of this call is assigned. */
4305 return_slot = NULL;
4306 if (gimple_call_lhs (stmt))
4307 {
4308 modify_dest = gimple_call_lhs (stmt);
4309
4310 /* The function which we are inlining might not return a value,
4311 in which case we should issue a warning that the function
4312 does not return a value. In that case the optimizers will
4313 see that the variable to which the value is assigned was not
4314 initialized. We do not want to issue a warning about that
4315 uninitialized variable. */
4316 if (DECL_P (modify_dest))
4317 TREE_NO_WARNING (modify_dest) = 1;
4318
4319 if (gimple_call_return_slot_opt_p (stmt))
4320 {
4321 return_slot = modify_dest;
4322 modify_dest = NULL;
4323 }
4324 }
4325 else
4326 modify_dest = NULL;
4327
4328 /* If we are inlining a call to the C++ operator new, we don't want
4329 to use type based alias analysis on the return value. Otherwise
4330 we may get confused if the compiler sees that the inlined new
4331 function returns a pointer which was just deleted. See bug
4332 33407. */
4333 if (DECL_IS_OPERATOR_NEW (fn))
4334 {
4335 return_slot = NULL;
4336 modify_dest = NULL;
4337 }
4338
4339 /* Declare the return variable for the function. */
4340 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4341
4342 /* Add local vars in this inlined callee to caller. */
4343 add_local_variables (id->src_cfun, cfun, id);
4344
4345 if (dump_file && (dump_flags & TDF_DETAILS))
4346 {
4347 fprintf (dump_file, "Inlining ");
4348 print_generic_expr (dump_file, id->src_fn, 0);
4349 fprintf (dump_file, " to ");
4350 print_generic_expr (dump_file, id->dst_fn, 0);
4351 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4352 }
4353
4354 /* This is it. Duplicate the callee body. Assume callee is
4355 pre-gimplified. Note that we must not alter the caller
4356 function in any way before this point, as this CALL_EXPR may be
4357 a self-referential call; if we're calling ourselves, we need to
4358 duplicate our body before altering anything. */
4359 copy_body (id, cg_edge->callee->count,
4360 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4361 bb, return_block, NULL);
4362
4363 /* Reset the escaped solution. */
4364 if (cfun->gimple_df)
4365 pt_solution_reset (&cfun->gimple_df->escaped);
4366
4367 /* Clean up. */
4368 if (id->debug_map)
4369 {
4370 pointer_map_destroy (id->debug_map);
4371 id->debug_map = dst;
4372 }
4373 pointer_map_destroy (id->decl_map);
4374 id->decl_map = st;
4375
4376 /* Unlink the calls virtual operands before replacing it. */
4377 unlink_stmt_vdef (stmt);
4378 if (gimple_vdef (stmt)
4379 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4380 release_ssa_name (gimple_vdef (stmt));
4381
4382 /* If the inlined function returns a result that we care about,
4383 substitute the GIMPLE_CALL with an assignment of the return
4384 variable to the LHS of the call. That is, if STMT was
4385 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4386 if (use_retvar && gimple_call_lhs (stmt))
4387 {
4388 gimple old_stmt = stmt;
4389 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4390 gsi_replace (&stmt_gsi, stmt, false);
4391 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4392 }
4393 else
4394 {
4395 /* Handle the case of inlining a function with no return
4396 statement, which causes the return value to become undefined. */
4397 if (gimple_call_lhs (stmt)
4398 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4399 {
4400 tree name = gimple_call_lhs (stmt);
4401 tree var = SSA_NAME_VAR (name);
4402 tree def = ssa_default_def (cfun, var);
4403
4404 if (def)
4405 {
4406 /* If the variable is used undefined, make this name
4407 undefined via a move. */
4408 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4409 gsi_replace (&stmt_gsi, stmt, true);
4410 }
4411 else
4412 {
4413 /* Otherwise make this variable undefined. */
4414 gsi_remove (&stmt_gsi, true);
4415 set_ssa_default_def (cfun, var, name);
4416 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4417 }
4418 }
4419 else
4420 gsi_remove (&stmt_gsi, true);
4421 }
4422
4423 if (purge_dead_abnormal_edges)
4424 {
4425 gimple_purge_dead_eh_edges (return_block);
4426 gimple_purge_dead_abnormal_call_edges (return_block);
4427 }
4428
4429 /* If the value of the new expression is ignored, that's OK. We
4430 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4431 the equivalent inlined version either. */
4432 if (is_gimple_assign (stmt))
4433 {
4434 gcc_assert (gimple_assign_single_p (stmt)
4435 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4436 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4437 }
4438
4439 /* Output the inlining info for this abstract function, since it has been
4440 inlined. If we don't do this now, we can lose the information about the
4441 variables in the function when the blocks get blown away as soon as we
4442 remove the cgraph node. */
4443 if (gimple_block (stmt))
4444 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4445
4446 /* Update callgraph if needed. */
4447 cgraph_remove_node (cg_edge->callee);
4448
4449 id->block = NULL_TREE;
4450 successfully_inlined = TRUE;
4451
4452 egress:
4453 input_location = saved_location;
4454 return successfully_inlined;
4455 }
4456
4457 /* Expand call statements reachable from STMT_P.
4458 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4459 in a MODIFY_EXPR. */
4460
4461 static bool
4462 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4463 {
4464 gimple_stmt_iterator gsi;
4465
4466 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4467 {
4468 gimple stmt = gsi_stmt (gsi);
4469
4470 if (is_gimple_call (stmt)
4471 && !gimple_call_internal_p (stmt)
4472 && expand_call_inline (bb, stmt, id))
4473 return true;
4474 }
4475
4476 return false;
4477 }
4478
4479
4480 /* Walk all basic blocks created after FIRST and try to fold every statement
4481 in the STATEMENTS pointer set. */
4482
4483 static void
4484 fold_marked_statements (int first, struct pointer_set_t *statements)
4485 {
4486 for (; first < n_basic_blocks_for_fn (cfun); first++)
4487 if (BASIC_BLOCK_FOR_FN (cfun, first))
4488 {
4489 gimple_stmt_iterator gsi;
4490
4491 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4492 !gsi_end_p (gsi);
4493 gsi_next (&gsi))
4494 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4495 {
4496 gimple old_stmt = gsi_stmt (gsi);
4497 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4498
4499 if (old_decl && DECL_BUILT_IN (old_decl))
4500 {
4501 /* Folding builtins can create multiple instructions,
4502 we need to look at all of them. */
4503 gimple_stmt_iterator i2 = gsi;
4504 gsi_prev (&i2);
4505 if (fold_stmt (&gsi))
4506 {
4507 gimple new_stmt;
4508 /* If a builtin at the end of a bb folded into nothing,
4509 the following loop won't work. */
4510 if (gsi_end_p (gsi))
4511 {
4512 cgraph_update_edges_for_call_stmt (old_stmt,
4513 old_decl, NULL);
4514 break;
4515 }
4516 if (gsi_end_p (i2))
4517 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4518 else
4519 gsi_next (&i2);
4520 while (1)
4521 {
4522 new_stmt = gsi_stmt (i2);
4523 update_stmt (new_stmt);
4524 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4525 new_stmt);
4526
4527 if (new_stmt == gsi_stmt (gsi))
4528 {
4529 /* It is okay to check only for the very last
4530 of these statements. If it is a throwing
4531 statement nothing will change. If it isn't
4532 this can remove EH edges. If that weren't
4533 correct then because some intermediate stmts
4534 throw, but not the last one. That would mean
4535 we'd have to split the block, which we can't
4536 here and we'd loose anyway. And as builtins
4537 probably never throw, this all
4538 is mood anyway. */
4539 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4540 new_stmt))
4541 gimple_purge_dead_eh_edges (
4542 BASIC_BLOCK_FOR_FN (cfun, first));
4543 break;
4544 }
4545 gsi_next (&i2);
4546 }
4547 }
4548 }
4549 else if (fold_stmt (&gsi))
4550 {
4551 /* Re-read the statement from GSI as fold_stmt() may
4552 have changed it. */
4553 gimple new_stmt = gsi_stmt (gsi);
4554 update_stmt (new_stmt);
4555
4556 if (is_gimple_call (old_stmt)
4557 || is_gimple_call (new_stmt))
4558 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4559 new_stmt);
4560
4561 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4562 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4563 first));
4564 }
4565 }
4566 }
4567 }
4568
4569 /* Expand calls to inline functions in the body of FN. */
4570
4571 unsigned int
4572 optimize_inline_calls (tree fn)
4573 {
4574 copy_body_data id;
4575 basic_block bb;
4576 int last = n_basic_blocks_for_fn (cfun);
4577 bool inlined_p = false;
4578
4579 /* Clear out ID. */
4580 memset (&id, 0, sizeof (id));
4581
4582 id.src_node = id.dst_node = cgraph_get_node (fn);
4583 gcc_assert (id.dst_node->definition);
4584 id.dst_fn = fn;
4585 /* Or any functions that aren't finished yet. */
4586 if (current_function_decl)
4587 id.dst_fn = current_function_decl;
4588
4589 id.copy_decl = copy_decl_maybe_to_var;
4590 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4591 id.transform_new_cfg = false;
4592 id.transform_return_to_modify = true;
4593 id.transform_parameter = true;
4594 id.transform_lang_insert_block = NULL;
4595 id.statements_to_fold = pointer_set_create ();
4596
4597 push_gimplify_context ();
4598
4599 /* We make no attempts to keep dominance info up-to-date. */
4600 free_dominance_info (CDI_DOMINATORS);
4601 free_dominance_info (CDI_POST_DOMINATORS);
4602
4603 /* Register specific gimple functions. */
4604 gimple_register_cfg_hooks ();
4605
4606 /* Reach the trees by walking over the CFG, and note the
4607 enclosing basic-blocks in the call edges. */
4608 /* We walk the blocks going forward, because inlined function bodies
4609 will split id->current_basic_block, and the new blocks will
4610 follow it; we'll trudge through them, processing their CALL_EXPRs
4611 along the way. */
4612 FOR_EACH_BB_FN (bb, cfun)
4613 inlined_p |= gimple_expand_calls_inline (bb, &id);
4614
4615 pop_gimplify_context (NULL);
4616
4617 #ifdef ENABLE_CHECKING
4618 {
4619 struct cgraph_edge *e;
4620
4621 verify_cgraph_node (id.dst_node);
4622
4623 /* Double check that we inlined everything we are supposed to inline. */
4624 for (e = id.dst_node->callees; e; e = e->next_callee)
4625 gcc_assert (e->inline_failed);
4626 }
4627 #endif
4628
4629 /* Fold queued statements. */
4630 fold_marked_statements (last, id.statements_to_fold);
4631 pointer_set_destroy (id.statements_to_fold);
4632
4633 gcc_assert (!id.debug_stmts.exists ());
4634
4635 /* If we didn't inline into the function there is nothing to do. */
4636 if (!inlined_p)
4637 return 0;
4638
4639 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4640 number_blocks (fn);
4641
4642 delete_unreachable_blocks_update_callgraph (&id);
4643 #ifdef ENABLE_CHECKING
4644 verify_cgraph_node (id.dst_node);
4645 #endif
4646
4647 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4648 not possible yet - the IPA passes might make various functions to not
4649 throw and they don't care to proactively update local EH info. This is
4650 done later in fixup_cfg pass that also execute the verification. */
4651 return (TODO_update_ssa
4652 | TODO_cleanup_cfg
4653 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4654 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4655 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4656 ? TODO_rebuild_frequencies : 0));
4657 }
4658
4659 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4660
4661 tree
4662 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4663 {
4664 enum tree_code code = TREE_CODE (*tp);
4665 enum tree_code_class cl = TREE_CODE_CLASS (code);
4666
4667 /* We make copies of most nodes. */
4668 if (IS_EXPR_CODE_CLASS (cl)
4669 || code == TREE_LIST
4670 || code == TREE_VEC
4671 || code == TYPE_DECL
4672 || code == OMP_CLAUSE)
4673 {
4674 /* Because the chain gets clobbered when we make a copy, we save it
4675 here. */
4676 tree chain = NULL_TREE, new_tree;
4677
4678 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4679 chain = TREE_CHAIN (*tp);
4680
4681 /* Copy the node. */
4682 new_tree = copy_node (*tp);
4683
4684 *tp = new_tree;
4685
4686 /* Now, restore the chain, if appropriate. That will cause
4687 walk_tree to walk into the chain as well. */
4688 if (code == PARM_DECL
4689 || code == TREE_LIST
4690 || code == OMP_CLAUSE)
4691 TREE_CHAIN (*tp) = chain;
4692
4693 /* For now, we don't update BLOCKs when we make copies. So, we
4694 have to nullify all BIND_EXPRs. */
4695 if (TREE_CODE (*tp) == BIND_EXPR)
4696 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4697 }
4698 else if (code == CONSTRUCTOR)
4699 {
4700 /* CONSTRUCTOR nodes need special handling because
4701 we need to duplicate the vector of elements. */
4702 tree new_tree;
4703
4704 new_tree = copy_node (*tp);
4705 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4706 *tp = new_tree;
4707 }
4708 else if (code == STATEMENT_LIST)
4709 /* We used to just abort on STATEMENT_LIST, but we can run into them
4710 with statement-expressions (c++/40975). */
4711 copy_statement_list (tp);
4712 else if (TREE_CODE_CLASS (code) == tcc_type)
4713 *walk_subtrees = 0;
4714 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4715 *walk_subtrees = 0;
4716 else if (TREE_CODE_CLASS (code) == tcc_constant)
4717 *walk_subtrees = 0;
4718 return NULL_TREE;
4719 }
4720
4721 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4722 information indicating to what new SAVE_EXPR this one should be mapped,
4723 use that one. Otherwise, create a new node and enter it in ST. FN is
4724 the function into which the copy will be placed. */
4725
4726 static void
4727 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4728 {
4729 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4730 tree *n;
4731 tree t;
4732
4733 /* See if we already encountered this SAVE_EXPR. */
4734 n = (tree *) pointer_map_contains (st, *tp);
4735
4736 /* If we didn't already remap this SAVE_EXPR, do so now. */
4737 if (!n)
4738 {
4739 t = copy_node (*tp);
4740
4741 /* Remember this SAVE_EXPR. */
4742 *pointer_map_insert (st, *tp) = t;
4743 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4744 *pointer_map_insert (st, t) = t;
4745 }
4746 else
4747 {
4748 /* We've already walked into this SAVE_EXPR; don't do it again. */
4749 *walk_subtrees = 0;
4750 t = *n;
4751 }
4752
4753 /* Replace this SAVE_EXPR with the copy. */
4754 *tp = t;
4755 }
4756
4757 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4758 label, copies the declaration and enters it in the splay_tree in DATA (which
4759 is really a 'copy_body_data *'. */
4760
4761 static tree
4762 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4763 bool *handled_ops_p ATTRIBUTE_UNUSED,
4764 struct walk_stmt_info *wi)
4765 {
4766 copy_body_data *id = (copy_body_data *) wi->info;
4767 gimple stmt = gsi_stmt (*gsip);
4768
4769 if (gimple_code (stmt) == GIMPLE_LABEL)
4770 {
4771 tree decl = gimple_label_label (stmt);
4772
4773 /* Copy the decl and remember the copy. */
4774 insert_decl_map (id, decl, id->copy_decl (decl, id));
4775 }
4776
4777 return NULL_TREE;
4778 }
4779
4780
4781 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4782 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4783 remaps all local declarations to appropriate replacements in gimple
4784 operands. */
4785
4786 static tree
4787 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4788 {
4789 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4790 copy_body_data *id = (copy_body_data *) wi->info;
4791 struct pointer_map_t *st = id->decl_map;
4792 tree *n;
4793 tree expr = *tp;
4794
4795 /* Only a local declaration (variable or label). */
4796 if ((TREE_CODE (expr) == VAR_DECL
4797 && !TREE_STATIC (expr))
4798 || TREE_CODE (expr) == LABEL_DECL)
4799 {
4800 /* Lookup the declaration. */
4801 n = (tree *) pointer_map_contains (st, expr);
4802
4803 /* If it's there, remap it. */
4804 if (n)
4805 *tp = *n;
4806 *walk_subtrees = 0;
4807 }
4808 else if (TREE_CODE (expr) == STATEMENT_LIST
4809 || TREE_CODE (expr) == BIND_EXPR
4810 || TREE_CODE (expr) == SAVE_EXPR)
4811 gcc_unreachable ();
4812 else if (TREE_CODE (expr) == TARGET_EXPR)
4813 {
4814 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4815 It's OK for this to happen if it was part of a subtree that
4816 isn't immediately expanded, such as operand 2 of another
4817 TARGET_EXPR. */
4818 if (!TREE_OPERAND (expr, 1))
4819 {
4820 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4821 TREE_OPERAND (expr, 3) = NULL_TREE;
4822 }
4823 }
4824
4825 /* Keep iterating. */
4826 return NULL_TREE;
4827 }
4828
4829
4830 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4831 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4832 remaps all local declarations to appropriate replacements in gimple
4833 statements. */
4834
4835 static tree
4836 replace_locals_stmt (gimple_stmt_iterator *gsip,
4837 bool *handled_ops_p ATTRIBUTE_UNUSED,
4838 struct walk_stmt_info *wi)
4839 {
4840 copy_body_data *id = (copy_body_data *) wi->info;
4841 gimple stmt = gsi_stmt (*gsip);
4842
4843 if (gimple_code (stmt) == GIMPLE_BIND)
4844 {
4845 tree block = gimple_bind_block (stmt);
4846
4847 if (block)
4848 {
4849 remap_block (&block, id);
4850 gimple_bind_set_block (stmt, block);
4851 }
4852
4853 /* This will remap a lot of the same decls again, but this should be
4854 harmless. */
4855 if (gimple_bind_vars (stmt))
4856 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4857 NULL, id));
4858 }
4859
4860 /* Keep iterating. */
4861 return NULL_TREE;
4862 }
4863
4864
4865 /* Copies everything in SEQ and replaces variables and labels local to
4866 current_function_decl. */
4867
4868 gimple_seq
4869 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4870 {
4871 copy_body_data id;
4872 struct walk_stmt_info wi;
4873 struct pointer_set_t *visited;
4874 gimple_seq copy;
4875
4876 /* There's nothing to do for NULL_TREE. */
4877 if (seq == NULL)
4878 return seq;
4879
4880 /* Set up ID. */
4881 memset (&id, 0, sizeof (id));
4882 id.src_fn = current_function_decl;
4883 id.dst_fn = current_function_decl;
4884 id.decl_map = pointer_map_create ();
4885 id.debug_map = NULL;
4886
4887 id.copy_decl = copy_decl_no_change;
4888 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4889 id.transform_new_cfg = false;
4890 id.transform_return_to_modify = false;
4891 id.transform_parameter = false;
4892 id.transform_lang_insert_block = NULL;
4893
4894 /* Walk the tree once to find local labels. */
4895 memset (&wi, 0, sizeof (wi));
4896 visited = pointer_set_create ();
4897 wi.info = &id;
4898 wi.pset = visited;
4899 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4900 pointer_set_destroy (visited);
4901
4902 copy = gimple_seq_copy (seq);
4903
4904 /* Walk the copy, remapping decls. */
4905 memset (&wi, 0, sizeof (wi));
4906 wi.info = &id;
4907 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4908
4909 /* Clean up. */
4910 pointer_map_destroy (id.decl_map);
4911 if (id.debug_map)
4912 pointer_map_destroy (id.debug_map);
4913
4914 return copy;
4915 }
4916
4917
4918 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4919
4920 static tree
4921 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4922 {
4923 if (*tp == data)
4924 return (tree) data;
4925 else
4926 return NULL;
4927 }
4928
4929 DEBUG_FUNCTION bool
4930 debug_find_tree (tree top, tree search)
4931 {
4932 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4933 }
4934
4935
4936 /* Declare the variables created by the inliner. Add all the variables in
4937 VARS to BIND_EXPR. */
4938
4939 static void
4940 declare_inline_vars (tree block, tree vars)
4941 {
4942 tree t;
4943 for (t = vars; t; t = DECL_CHAIN (t))
4944 {
4945 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4946 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4947 add_local_decl (cfun, t);
4948 }
4949
4950 if (block)
4951 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4952 }
4953
4954 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4955 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4956 VAR_DECL translation. */
4957
4958 static tree
4959 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4960 {
4961 /* Don't generate debug information for the copy if we wouldn't have
4962 generated it for the copy either. */
4963 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4964 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4965
4966 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4967 declaration inspired this copy. */
4968 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4969
4970 /* The new variable/label has no RTL, yet. */
4971 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4972 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4973 SET_DECL_RTL (copy, 0);
4974
4975 /* These args would always appear unused, if not for this. */
4976 TREE_USED (copy) = 1;
4977
4978 /* Set the context for the new declaration. */
4979 if (!DECL_CONTEXT (decl))
4980 /* Globals stay global. */
4981 ;
4982 else if (DECL_CONTEXT (decl) != id->src_fn)
4983 /* Things that weren't in the scope of the function we're inlining
4984 from aren't in the scope we're inlining to, either. */
4985 ;
4986 else if (TREE_STATIC (decl))
4987 /* Function-scoped static variables should stay in the original
4988 function. */
4989 ;
4990 else
4991 /* Ordinary automatic local variables are now in the scope of the
4992 new function. */
4993 DECL_CONTEXT (copy) = id->dst_fn;
4994
4995 return copy;
4996 }
4997
4998 static tree
4999 copy_decl_to_var (tree decl, copy_body_data *id)
5000 {
5001 tree copy, type;
5002
5003 gcc_assert (TREE_CODE (decl) == PARM_DECL
5004 || TREE_CODE (decl) == RESULT_DECL);
5005
5006 type = TREE_TYPE (decl);
5007
5008 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5009 VAR_DECL, DECL_NAME (decl), type);
5010 if (DECL_PT_UID_SET_P (decl))
5011 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5012 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5013 TREE_READONLY (copy) = TREE_READONLY (decl);
5014 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5015 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5016
5017 return copy_decl_for_dup_finish (id, decl, copy);
5018 }
5019
5020 /* Like copy_decl_to_var, but create a return slot object instead of a
5021 pointer variable for return by invisible reference. */
5022
5023 static tree
5024 copy_result_decl_to_var (tree decl, copy_body_data *id)
5025 {
5026 tree copy, type;
5027
5028 gcc_assert (TREE_CODE (decl) == PARM_DECL
5029 || TREE_CODE (decl) == RESULT_DECL);
5030
5031 type = TREE_TYPE (decl);
5032 if (DECL_BY_REFERENCE (decl))
5033 type = TREE_TYPE (type);
5034
5035 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5036 VAR_DECL, DECL_NAME (decl), type);
5037 if (DECL_PT_UID_SET_P (decl))
5038 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5039 TREE_READONLY (copy) = TREE_READONLY (decl);
5040 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5041 if (!DECL_BY_REFERENCE (decl))
5042 {
5043 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5044 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5045 }
5046
5047 return copy_decl_for_dup_finish (id, decl, copy);
5048 }
5049
5050 tree
5051 copy_decl_no_change (tree decl, copy_body_data *id)
5052 {
5053 tree copy;
5054
5055 copy = copy_node (decl);
5056
5057 /* The COPY is not abstract; it will be generated in DST_FN. */
5058 DECL_ABSTRACT (copy) = 0;
5059 lang_hooks.dup_lang_specific_decl (copy);
5060
5061 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5062 been taken; it's for internal bookkeeping in expand_goto_internal. */
5063 if (TREE_CODE (copy) == LABEL_DECL)
5064 {
5065 TREE_ADDRESSABLE (copy) = 0;
5066 LABEL_DECL_UID (copy) = -1;
5067 }
5068
5069 return copy_decl_for_dup_finish (id, decl, copy);
5070 }
5071
5072 static tree
5073 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5074 {
5075 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5076 return copy_decl_to_var (decl, id);
5077 else
5078 return copy_decl_no_change (decl, id);
5079 }
5080
5081 /* Return a copy of the function's argument tree. */
5082 static tree
5083 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5084 bitmap args_to_skip, tree *vars)
5085 {
5086 tree arg, *parg;
5087 tree new_parm = NULL;
5088 int i = 0;
5089
5090 parg = &new_parm;
5091
5092 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5093 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5094 {
5095 tree new_tree = remap_decl (arg, id);
5096 if (TREE_CODE (new_tree) != PARM_DECL)
5097 new_tree = id->copy_decl (arg, id);
5098 lang_hooks.dup_lang_specific_decl (new_tree);
5099 *parg = new_tree;
5100 parg = &DECL_CHAIN (new_tree);
5101 }
5102 else if (!pointer_map_contains (id->decl_map, arg))
5103 {
5104 /* Make an equivalent VAR_DECL. If the argument was used
5105 as temporary variable later in function, the uses will be
5106 replaced by local variable. */
5107 tree var = copy_decl_to_var (arg, id);
5108 insert_decl_map (id, arg, var);
5109 /* Declare this new variable. */
5110 DECL_CHAIN (var) = *vars;
5111 *vars = var;
5112 }
5113 return new_parm;
5114 }
5115
5116 /* Return a copy of the function's static chain. */
5117 static tree
5118 copy_static_chain (tree static_chain, copy_body_data * id)
5119 {
5120 tree *chain_copy, *pvar;
5121
5122 chain_copy = &static_chain;
5123 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5124 {
5125 tree new_tree = remap_decl (*pvar, id);
5126 lang_hooks.dup_lang_specific_decl (new_tree);
5127 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5128 *pvar = new_tree;
5129 }
5130 return static_chain;
5131 }
5132
5133 /* Return true if the function is allowed to be versioned.
5134 This is a guard for the versioning functionality. */
5135
5136 bool
5137 tree_versionable_function_p (tree fndecl)
5138 {
5139 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5140 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5141 }
5142
5143 /* Delete all unreachable basic blocks and update callgraph.
5144 Doing so is somewhat nontrivial because we need to update all clones and
5145 remove inline function that become unreachable. */
5146
5147 static bool
5148 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5149 {
5150 bool changed = false;
5151 basic_block b, next_bb;
5152
5153 find_unreachable_blocks ();
5154
5155 /* Delete all unreachable basic blocks. */
5156
5157 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5158 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5159 {
5160 next_bb = b->next_bb;
5161
5162 if (!(b->flags & BB_REACHABLE))
5163 {
5164 gimple_stmt_iterator bsi;
5165
5166 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5167 {
5168 struct cgraph_edge *e;
5169 struct cgraph_node *node;
5170
5171 ipa_remove_stmt_references (id->dst_node, gsi_stmt (bsi));
5172
5173 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5174 &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5175 {
5176 if (!e->inline_failed)
5177 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5178 else
5179 cgraph_remove_edge (e);
5180 }
5181 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5182 && id->dst_node->clones)
5183 for (node = id->dst_node->clones; node != id->dst_node;)
5184 {
5185 ipa_remove_stmt_references (node, gsi_stmt (bsi));
5186 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5187 && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5188 {
5189 if (!e->inline_failed)
5190 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5191 else
5192 cgraph_remove_edge (e);
5193 }
5194
5195 if (node->clones)
5196 node = node->clones;
5197 else if (node->next_sibling_clone)
5198 node = node->next_sibling_clone;
5199 else
5200 {
5201 while (node != id->dst_node && !node->next_sibling_clone)
5202 node = node->clone_of;
5203 if (node != id->dst_node)
5204 node = node->next_sibling_clone;
5205 }
5206 }
5207 }
5208 delete_basic_block (b);
5209 changed = true;
5210 }
5211 }
5212
5213 return changed;
5214 }
5215
5216 /* Update clone info after duplication. */
5217
5218 static void
5219 update_clone_info (copy_body_data * id)
5220 {
5221 struct cgraph_node *node;
5222 if (!id->dst_node->clones)
5223 return;
5224 for (node = id->dst_node->clones; node != id->dst_node;)
5225 {
5226 /* First update replace maps to match the new body. */
5227 if (node->clone.tree_map)
5228 {
5229 unsigned int i;
5230 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5231 {
5232 struct ipa_replace_map *replace_info;
5233 replace_info = (*node->clone.tree_map)[i];
5234 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5235 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5236 }
5237 }
5238 if (node->clones)
5239 node = node->clones;
5240 else if (node->next_sibling_clone)
5241 node = node->next_sibling_clone;
5242 else
5243 {
5244 while (node != id->dst_node && !node->next_sibling_clone)
5245 node = node->clone_of;
5246 if (node != id->dst_node)
5247 node = node->next_sibling_clone;
5248 }
5249 }
5250 }
5251
5252 /* Create a copy of a function's tree.
5253 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5254 of the original function and the new copied function
5255 respectively. In case we want to replace a DECL
5256 tree with another tree while duplicating the function's
5257 body, TREE_MAP represents the mapping between these
5258 trees. If UPDATE_CLONES is set, the call_stmt fields
5259 of edges of clones of the function will be updated.
5260
5261 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5262 from new version.
5263 If SKIP_RETURN is true, the new version will return void.
5264 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5265 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5266 */
5267 void
5268 tree_function_versioning (tree old_decl, tree new_decl,
5269 vec<ipa_replace_map_p, va_gc> *tree_map,
5270 bool update_clones, bitmap args_to_skip,
5271 bool skip_return, bitmap blocks_to_copy,
5272 basic_block new_entry)
5273 {
5274 struct cgraph_node *old_version_node;
5275 struct cgraph_node *new_version_node;
5276 copy_body_data id;
5277 tree p;
5278 unsigned i;
5279 struct ipa_replace_map *replace_info;
5280 basic_block old_entry_block, bb;
5281 auto_vec<gimple, 10> init_stmts;
5282 tree vars = NULL_TREE;
5283
5284 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5285 && TREE_CODE (new_decl) == FUNCTION_DECL);
5286 DECL_POSSIBLY_INLINED (old_decl) = 1;
5287
5288 old_version_node = cgraph_get_node (old_decl);
5289 gcc_checking_assert (old_version_node);
5290 new_version_node = cgraph_get_node (new_decl);
5291 gcc_checking_assert (new_version_node);
5292
5293 /* Copy over debug args. */
5294 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5295 {
5296 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5297 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5298 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5299 old_debug_args = decl_debug_args_lookup (old_decl);
5300 if (old_debug_args)
5301 {
5302 new_debug_args = decl_debug_args_insert (new_decl);
5303 *new_debug_args = vec_safe_copy (*old_debug_args);
5304 }
5305 }
5306
5307 /* Output the inlining info for this abstract function, since it has been
5308 inlined. If we don't do this now, we can lose the information about the
5309 variables in the function when the blocks get blown away as soon as we
5310 remove the cgraph node. */
5311 (*debug_hooks->outlining_inline_function) (old_decl);
5312
5313 DECL_ARTIFICIAL (new_decl) = 1;
5314 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5315 if (DECL_ORIGIN (old_decl) == old_decl)
5316 old_version_node->used_as_abstract_origin = true;
5317 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5318
5319 /* Prepare the data structures for the tree copy. */
5320 memset (&id, 0, sizeof (id));
5321
5322 /* Generate a new name for the new version. */
5323 id.statements_to_fold = pointer_set_create ();
5324
5325 id.decl_map = pointer_map_create ();
5326 id.debug_map = NULL;
5327 id.src_fn = old_decl;
5328 id.dst_fn = new_decl;
5329 id.src_node = old_version_node;
5330 id.dst_node = new_version_node;
5331 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5332 id.blocks_to_copy = blocks_to_copy;
5333
5334 id.copy_decl = copy_decl_no_change;
5335 id.transform_call_graph_edges
5336 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5337 id.transform_new_cfg = true;
5338 id.transform_return_to_modify = false;
5339 id.transform_parameter = false;
5340 id.transform_lang_insert_block = NULL;
5341
5342 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5343 (DECL_STRUCT_FUNCTION (old_decl));
5344 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5345 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5346 initialize_cfun (new_decl, old_decl,
5347 old_entry_block->count);
5348 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5349 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5350 = id.src_cfun->gimple_df->ipa_pta;
5351
5352 /* Copy the function's static chain. */
5353 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5354 if (p)
5355 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5356 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5357 &id);
5358
5359 /* If there's a tree_map, prepare for substitution. */
5360 if (tree_map)
5361 for (i = 0; i < tree_map->length (); i++)
5362 {
5363 gimple init;
5364 replace_info = (*tree_map)[i];
5365 if (replace_info->replace_p)
5366 {
5367 if (!replace_info->old_tree)
5368 {
5369 int i = replace_info->parm_num;
5370 tree parm;
5371 tree req_type;
5372
5373 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5374 i --;
5375 replace_info->old_tree = parm;
5376 req_type = TREE_TYPE (parm);
5377 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5378 {
5379 if (fold_convertible_p (req_type, replace_info->new_tree))
5380 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5381 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5382 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5383 else
5384 {
5385 if (dump_file)
5386 {
5387 fprintf (dump_file, " const ");
5388 print_generic_expr (dump_file, replace_info->new_tree, 0);
5389 fprintf (dump_file, " can't be converted to param ");
5390 print_generic_expr (dump_file, parm, 0);
5391 fprintf (dump_file, "\n");
5392 }
5393 replace_info->old_tree = NULL;
5394 }
5395 }
5396 }
5397 else
5398 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5399 if (replace_info->old_tree)
5400 {
5401 init = setup_one_parameter (&id, replace_info->old_tree,
5402 replace_info->new_tree, id.src_fn,
5403 NULL,
5404 &vars);
5405 if (init)
5406 init_stmts.safe_push (init);
5407 }
5408 }
5409 }
5410 /* Copy the function's arguments. */
5411 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5412 DECL_ARGUMENTS (new_decl) =
5413 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5414 args_to_skip, &vars);
5415
5416 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5417 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5418
5419 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5420
5421 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5422 /* Add local vars. */
5423 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5424
5425 if (DECL_RESULT (old_decl) == NULL_TREE)
5426 ;
5427 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5428 {
5429 DECL_RESULT (new_decl)
5430 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5431 RESULT_DECL, NULL_TREE, void_type_node);
5432 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5433 cfun->returns_struct = 0;
5434 cfun->returns_pcc_struct = 0;
5435 }
5436 else
5437 {
5438 tree old_name;
5439 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5440 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5441 if (gimple_in_ssa_p (id.src_cfun)
5442 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5443 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5444 {
5445 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5446 insert_decl_map (&id, old_name, new_name);
5447 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5448 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5449 }
5450 }
5451
5452 /* Set up the destination functions loop tree. */
5453 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5454 {
5455 cfun->curr_properties &= ~PROP_loops;
5456 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5457 cfun->curr_properties |= PROP_loops;
5458 }
5459
5460 /* Copy the Function's body. */
5461 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5462 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5463 new_entry);
5464
5465 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5466 number_blocks (new_decl);
5467
5468 /* We want to create the BB unconditionally, so that the addition of
5469 debug stmts doesn't affect BB count, which may in the end cause
5470 codegen differences. */
5471 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5472 while (init_stmts.length ())
5473 insert_init_stmt (&id, bb, init_stmts.pop ());
5474 update_clone_info (&id);
5475
5476 /* Remap the nonlocal_goto_save_area, if any. */
5477 if (cfun->nonlocal_goto_save_area)
5478 {
5479 struct walk_stmt_info wi;
5480
5481 memset (&wi, 0, sizeof (wi));
5482 wi.info = &id;
5483 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5484 }
5485
5486 /* Clean up. */
5487 pointer_map_destroy (id.decl_map);
5488 if (id.debug_map)
5489 pointer_map_destroy (id.debug_map);
5490 free_dominance_info (CDI_DOMINATORS);
5491 free_dominance_info (CDI_POST_DOMINATORS);
5492
5493 fold_marked_statements (0, id.statements_to_fold);
5494 pointer_set_destroy (id.statements_to_fold);
5495 fold_cond_expr_cond ();
5496 delete_unreachable_blocks_update_callgraph (&id);
5497 if (id.dst_node->definition)
5498 cgraph_rebuild_references ();
5499 update_ssa (TODO_update_ssa);
5500
5501 /* After partial cloning we need to rescale frequencies, so they are
5502 within proper range in the cloned function. */
5503 if (new_entry)
5504 {
5505 struct cgraph_edge *e;
5506 rebuild_frequencies ();
5507
5508 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5509 for (e = new_version_node->callees; e; e = e->next_callee)
5510 {
5511 basic_block bb = gimple_bb (e->call_stmt);
5512 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5513 bb);
5514 e->count = bb->count;
5515 }
5516 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5517 {
5518 basic_block bb = gimple_bb (e->call_stmt);
5519 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5520 bb);
5521 e->count = bb->count;
5522 }
5523 }
5524
5525 free_dominance_info (CDI_DOMINATORS);
5526 free_dominance_info (CDI_POST_DOMINATORS);
5527
5528 gcc_assert (!id.debug_stmts.exists ());
5529 pop_cfun ();
5530 return;
5531 }
5532
5533 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5534 the callee and return the inlined body on success. */
5535
5536 tree
5537 maybe_inline_call_in_expr (tree exp)
5538 {
5539 tree fn = get_callee_fndecl (exp);
5540
5541 /* We can only try to inline "const" functions. */
5542 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5543 {
5544 struct pointer_map_t *decl_map = pointer_map_create ();
5545 call_expr_arg_iterator iter;
5546 copy_body_data id;
5547 tree param, arg, t;
5548
5549 /* Remap the parameters. */
5550 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5551 param;
5552 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5553 *pointer_map_insert (decl_map, param) = arg;
5554
5555 memset (&id, 0, sizeof (id));
5556 id.src_fn = fn;
5557 id.dst_fn = current_function_decl;
5558 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5559 id.decl_map = decl_map;
5560
5561 id.copy_decl = copy_decl_no_change;
5562 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5563 id.transform_new_cfg = false;
5564 id.transform_return_to_modify = true;
5565 id.transform_parameter = true;
5566 id.transform_lang_insert_block = NULL;
5567
5568 /* Make sure not to unshare trees behind the front-end's back
5569 since front-end specific mechanisms may rely on sharing. */
5570 id.regimplify = false;
5571 id.do_not_unshare = true;
5572
5573 /* We're not inside any EH region. */
5574 id.eh_lp_nr = 0;
5575
5576 t = copy_tree_body (&id);
5577 pointer_map_destroy (decl_map);
5578
5579 /* We can only return something suitable for use in a GENERIC
5580 expression tree. */
5581 if (TREE_CODE (t) == MODIFY_EXPR)
5582 return TREE_OPERAND (t, 1);
5583 }
5584
5585 return NULL_TREE;
5586 }
5587
5588 /* Duplicate a type, fields and all. */
5589
5590 tree
5591 build_duplicate_type (tree type)
5592 {
5593 struct copy_body_data id;
5594
5595 memset (&id, 0, sizeof (id));
5596 id.src_fn = current_function_decl;
5597 id.dst_fn = current_function_decl;
5598 id.src_cfun = cfun;
5599 id.decl_map = pointer_map_create ();
5600 id.debug_map = NULL;
5601 id.copy_decl = copy_decl_no_change;
5602
5603 type = remap_type_1 (type, &id);
5604
5605 pointer_map_destroy (id.decl_map);
5606 if (id.debug_map)
5607 pointer_map_destroy (id.debug_map);
5608
5609 TYPE_CANONICAL (type) = type;
5610
5611 return type;
5612 }