tree-ssa.h: Remove all #include's
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "tree-inline.h"
28 #include "flags.h"
29 #include "params.h"
30 #include "input.h"
31 #include "insn-config.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-iterator.h"
36 #include "intl.h"
37 #include "tree-mudflap.h"
38 #include "gimple.h"
39 #include "gimple-ssa.h"
40 #include "tree-cfg.h"
41 #include "tree-phinodes.h"
42 #include "ssa-iterators.h"
43 #include "tree-ssanames.h"
44 #include "tree-into-ssa.h"
45 #include "tree-dfa.h"
46 #include "tree-ssa.h"
47 #include "function.h"
48 #include "tree-pretty-print.h"
49 #include "except.h"
50 #include "debug.h"
51 #include "pointer-set.h"
52 #include "ipa-prop.h"
53 #include "value-prof.h"
54 #include "tree-pass.h"
55 #include "target.h"
56 #include "cfgloop.h"
57
58 #include "rtl.h" /* FIXME: For asm_str_count. */
59
60 /* I'm not real happy about this, but we need to handle gimple and
61 non-gimple trees. */
62
63 /* Inlining, Cloning, Versioning, Parallelization
64
65 Inlining: a function body is duplicated, but the PARM_DECLs are
66 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
67 MODIFY_EXPRs that store to a dedicated returned-value variable.
68 The duplicated eh_region info of the copy will later be appended
69 to the info for the caller; the eh_region info in copied throwing
70 statements and RESX statements are adjusted accordingly.
71
72 Cloning: (only in C++) We have one body for a con/de/structor, and
73 multiple function decls, each with a unique parameter list.
74 Duplicate the body, using the given splay tree; some parameters
75 will become constants (like 0 or 1).
76
77 Versioning: a function body is duplicated and the result is a new
78 function rather than into blocks of an existing function as with
79 inlining. Some parameters will become constants.
80
81 Parallelization: a region of a function is duplicated resulting in
82 a new function. Variables may be replaced with complex expressions
83 to enable shared variable semantics.
84
85 All of these will simultaneously lookup any callgraph edges. If
86 we're going to inline the duplicated function body, and the given
87 function has some cloned callgraph nodes (one for each place this
88 function will be inlined) those callgraph edges will be duplicated.
89 If we're cloning the body, those callgraph edges will be
90 updated to point into the new body. (Note that the original
91 callgraph node and edge list will not be altered.)
92
93 See the CALL_EXPR handling case in copy_tree_body_r (). */
94
95 /* To Do:
96
97 o In order to make inlining-on-trees work, we pessimized
98 function-local static constants. In particular, they are now
99 always output, even when not addressed. Fix this by treating
100 function-local static constants just like global static
101 constants; the back-end already knows not to output them if they
102 are not needed.
103
104 o Provide heuristics to clamp inlining of recursive template
105 calls? */
106
107
108 /* Weights that estimate_num_insns uses to estimate the size of the
109 produced code. */
110
111 eni_weights eni_size_weights;
112
113 /* Weights that estimate_num_insns uses to estimate the time necessary
114 to execute the produced code. */
115
116 eni_weights eni_time_weights;
117
118 /* Prototypes. */
119
120 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
121 static void remap_block (tree *, copy_body_data *);
122 static void copy_bind_expr (tree *, int *, copy_body_data *);
123 static void declare_inline_vars (tree, tree);
124 static void remap_save_expr (tree *, void *, int *);
125 static void prepend_lexical_block (tree current_block, tree new_block);
126 static tree copy_decl_to_var (tree, copy_body_data *);
127 static tree copy_result_decl_to_var (tree, copy_body_data *);
128 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
129 static gimple remap_gimple_stmt (gimple, copy_body_data *);
130 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
131
132 /* Insert a tree->tree mapping for ID. Despite the name suggests
133 that the trees should be variables, it is used for more than that. */
134
135 void
136 insert_decl_map (copy_body_data *id, tree key, tree value)
137 {
138 *pointer_map_insert (id->decl_map, key) = value;
139
140 /* Always insert an identity map as well. If we see this same new
141 node again, we won't want to duplicate it a second time. */
142 if (key != value)
143 *pointer_map_insert (id->decl_map, value) = value;
144 }
145
146 /* Insert a tree->tree mapping for ID. This is only used for
147 variables. */
148
149 static void
150 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
151 {
152 if (!gimple_in_ssa_p (id->src_cfun))
153 return;
154
155 if (!MAY_HAVE_DEBUG_STMTS)
156 return;
157
158 if (!target_for_debug_bind (key))
159 return;
160
161 gcc_assert (TREE_CODE (key) == PARM_DECL);
162 gcc_assert (TREE_CODE (value) == VAR_DECL);
163
164 if (!id->debug_map)
165 id->debug_map = pointer_map_create ();
166
167 *pointer_map_insert (id->debug_map, key) = value;
168 }
169
170 /* If nonzero, we're remapping the contents of inlined debug
171 statements. If negative, an error has occurred, such as a
172 reference to a variable that isn't available in the inlined
173 context. */
174 static int processing_debug_stmt = 0;
175
176 /* Construct new SSA name for old NAME. ID is the inline context. */
177
178 static tree
179 remap_ssa_name (tree name, copy_body_data *id)
180 {
181 tree new_tree, var;
182 tree *n;
183
184 gcc_assert (TREE_CODE (name) == SSA_NAME);
185
186 n = (tree *) pointer_map_contains (id->decl_map, name);
187 if (n)
188 return unshare_expr (*n);
189
190 if (processing_debug_stmt)
191 {
192 if (SSA_NAME_IS_DEFAULT_DEF (name)
193 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
194 && id->entry_bb == NULL
195 && single_succ_p (ENTRY_BLOCK_PTR))
196 {
197 tree vexpr = make_node (DEBUG_EXPR_DECL);
198 gimple def_temp;
199 gimple_stmt_iterator gsi;
200 tree val = SSA_NAME_VAR (name);
201
202 n = (tree *) pointer_map_contains (id->decl_map, val);
203 if (n != NULL)
204 val = *n;
205 if (TREE_CODE (val) != PARM_DECL)
206 {
207 processing_debug_stmt = -1;
208 return name;
209 }
210 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
211 DECL_ARTIFICIAL (vexpr) = 1;
212 TREE_TYPE (vexpr) = TREE_TYPE (name);
213 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
214 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
215 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
216 return vexpr;
217 }
218
219 processing_debug_stmt = -1;
220 return name;
221 }
222
223 /* Remap anonymous SSA names or SSA names of anonymous decls. */
224 var = SSA_NAME_VAR (name);
225 if (!var
226 || (!SSA_NAME_IS_DEFAULT_DEF (name)
227 && TREE_CODE (var) == VAR_DECL
228 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
229 && DECL_ARTIFICIAL (var)
230 && DECL_IGNORED_P (var)
231 && !DECL_NAME (var)))
232 {
233 struct ptr_info_def *pi;
234 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
235 if (!var && SSA_NAME_IDENTIFIER (name))
236 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
237 insert_decl_map (id, name, new_tree);
238 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
239 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
240 /* At least IPA points-to info can be directly transferred. */
241 if (id->src_cfun->gimple_df
242 && id->src_cfun->gimple_df->ipa_pta
243 && (pi = SSA_NAME_PTR_INFO (name))
244 && !pi->pt.anything)
245 {
246 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
247 new_pi->pt = pi->pt;
248 }
249 return new_tree;
250 }
251
252 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
253 in copy_bb. */
254 new_tree = remap_decl (var, id);
255
256 /* We might've substituted constant or another SSA_NAME for
257 the variable.
258
259 Replace the SSA name representing RESULT_DECL by variable during
260 inlining: this saves us from need to introduce PHI node in a case
261 return value is just partly initialized. */
262 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
263 && (!SSA_NAME_VAR (name)
264 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
265 || !id->transform_return_to_modify))
266 {
267 struct ptr_info_def *pi;
268 new_tree = make_ssa_name (new_tree, NULL);
269 insert_decl_map (id, name, new_tree);
270 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
271 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
272 /* At least IPA points-to info can be directly transferred. */
273 if (id->src_cfun->gimple_df
274 && id->src_cfun->gimple_df->ipa_pta
275 && (pi = SSA_NAME_PTR_INFO (name))
276 && !pi->pt.anything)
277 {
278 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
279 new_pi->pt = pi->pt;
280 }
281 if (SSA_NAME_IS_DEFAULT_DEF (name))
282 {
283 /* By inlining function having uninitialized variable, we might
284 extend the lifetime (variable might get reused). This cause
285 ICE in the case we end up extending lifetime of SSA name across
286 abnormal edge, but also increase register pressure.
287
288 We simply initialize all uninitialized vars by 0 except
289 for case we are inlining to very first BB. We can avoid
290 this for all BBs that are not inside strongly connected
291 regions of the CFG, but this is expensive to test. */
292 if (id->entry_bb
293 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
294 && (!SSA_NAME_VAR (name)
295 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
296 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
297 || EDGE_COUNT (id->entry_bb->preds) != 1))
298 {
299 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
300 gimple init_stmt;
301 tree zero = build_zero_cst (TREE_TYPE (new_tree));
302
303 init_stmt = gimple_build_assign (new_tree, zero);
304 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
305 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
306 }
307 else
308 {
309 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
310 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
311 }
312 }
313 }
314 else
315 insert_decl_map (id, name, new_tree);
316 return new_tree;
317 }
318
319 /* Remap DECL during the copying of the BLOCK tree for the function. */
320
321 tree
322 remap_decl (tree decl, copy_body_data *id)
323 {
324 tree *n;
325
326 /* We only remap local variables in the current function. */
327
328 /* See if we have remapped this declaration. */
329
330 n = (tree *) pointer_map_contains (id->decl_map, decl);
331
332 if (!n && processing_debug_stmt)
333 {
334 processing_debug_stmt = -1;
335 return decl;
336 }
337
338 /* If we didn't already have an equivalent for this declaration,
339 create one now. */
340 if (!n)
341 {
342 /* Make a copy of the variable or label. */
343 tree t = id->copy_decl (decl, id);
344
345 /* Remember it, so that if we encounter this local entity again
346 we can reuse this copy. Do this early because remap_type may
347 need this decl for TYPE_STUB_DECL. */
348 insert_decl_map (id, decl, t);
349
350 if (!DECL_P (t))
351 return t;
352
353 /* Remap types, if necessary. */
354 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
355 if (TREE_CODE (t) == TYPE_DECL)
356 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
357
358 /* Remap sizes as necessary. */
359 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
360 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
361
362 /* If fields, do likewise for offset and qualifier. */
363 if (TREE_CODE (t) == FIELD_DECL)
364 {
365 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
366 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
367 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
368 }
369
370 return t;
371 }
372
373 if (id->do_not_unshare)
374 return *n;
375 else
376 return unshare_expr (*n);
377 }
378
379 static tree
380 remap_type_1 (tree type, copy_body_data *id)
381 {
382 tree new_tree, t;
383
384 /* We do need a copy. build and register it now. If this is a pointer or
385 reference type, remap the designated type and make a new pointer or
386 reference type. */
387 if (TREE_CODE (type) == POINTER_TYPE)
388 {
389 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
390 TYPE_MODE (type),
391 TYPE_REF_CAN_ALIAS_ALL (type));
392 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
393 new_tree = build_type_attribute_qual_variant (new_tree,
394 TYPE_ATTRIBUTES (type),
395 TYPE_QUALS (type));
396 insert_decl_map (id, type, new_tree);
397 return new_tree;
398 }
399 else if (TREE_CODE (type) == REFERENCE_TYPE)
400 {
401 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
402 TYPE_MODE (type),
403 TYPE_REF_CAN_ALIAS_ALL (type));
404 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
405 new_tree = build_type_attribute_qual_variant (new_tree,
406 TYPE_ATTRIBUTES (type),
407 TYPE_QUALS (type));
408 insert_decl_map (id, type, new_tree);
409 return new_tree;
410 }
411 else
412 new_tree = copy_node (type);
413
414 insert_decl_map (id, type, new_tree);
415
416 /* This is a new type, not a copy of an old type. Need to reassociate
417 variants. We can handle everything except the main variant lazily. */
418 t = TYPE_MAIN_VARIANT (type);
419 if (type != t)
420 {
421 t = remap_type (t, id);
422 TYPE_MAIN_VARIANT (new_tree) = t;
423 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
424 TYPE_NEXT_VARIANT (t) = new_tree;
425 }
426 else
427 {
428 TYPE_MAIN_VARIANT (new_tree) = new_tree;
429 TYPE_NEXT_VARIANT (new_tree) = NULL;
430 }
431
432 if (TYPE_STUB_DECL (type))
433 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
434
435 /* Lazily create pointer and reference types. */
436 TYPE_POINTER_TO (new_tree) = NULL;
437 TYPE_REFERENCE_TO (new_tree) = NULL;
438
439 switch (TREE_CODE (new_tree))
440 {
441 case INTEGER_TYPE:
442 case REAL_TYPE:
443 case FIXED_POINT_TYPE:
444 case ENUMERAL_TYPE:
445 case BOOLEAN_TYPE:
446 t = TYPE_MIN_VALUE (new_tree);
447 if (t && TREE_CODE (t) != INTEGER_CST)
448 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
449
450 t = TYPE_MAX_VALUE (new_tree);
451 if (t && TREE_CODE (t) != INTEGER_CST)
452 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
453 return new_tree;
454
455 case FUNCTION_TYPE:
456 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
457 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
458 return new_tree;
459
460 case ARRAY_TYPE:
461 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
462 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
463 break;
464
465 case RECORD_TYPE:
466 case UNION_TYPE:
467 case QUAL_UNION_TYPE:
468 {
469 tree f, nf = NULL;
470
471 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
472 {
473 t = remap_decl (f, id);
474 DECL_CONTEXT (t) = new_tree;
475 DECL_CHAIN (t) = nf;
476 nf = t;
477 }
478 TYPE_FIELDS (new_tree) = nreverse (nf);
479 }
480 break;
481
482 case OFFSET_TYPE:
483 default:
484 /* Shouldn't have been thought variable sized. */
485 gcc_unreachable ();
486 }
487
488 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
489 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
490
491 return new_tree;
492 }
493
494 tree
495 remap_type (tree type, copy_body_data *id)
496 {
497 tree *node;
498 tree tmp;
499
500 if (type == NULL)
501 return type;
502
503 /* See if we have remapped this type. */
504 node = (tree *) pointer_map_contains (id->decl_map, type);
505 if (node)
506 return *node;
507
508 /* The type only needs remapping if it's variably modified. */
509 if (! variably_modified_type_p (type, id->src_fn))
510 {
511 insert_decl_map (id, type, type);
512 return type;
513 }
514
515 id->remapping_type_depth++;
516 tmp = remap_type_1 (type, id);
517 id->remapping_type_depth--;
518
519 return tmp;
520 }
521
522 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
523
524 static bool
525 can_be_nonlocal (tree decl, copy_body_data *id)
526 {
527 /* We can not duplicate function decls. */
528 if (TREE_CODE (decl) == FUNCTION_DECL)
529 return true;
530
531 /* Local static vars must be non-local or we get multiple declaration
532 problems. */
533 if (TREE_CODE (decl) == VAR_DECL
534 && !auto_var_in_fn_p (decl, id->src_fn))
535 return true;
536
537 return false;
538 }
539
540 static tree
541 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
542 copy_body_data *id)
543 {
544 tree old_var;
545 tree new_decls = NULL_TREE;
546
547 /* Remap its variables. */
548 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
549 {
550 tree new_var;
551
552 if (can_be_nonlocal (old_var, id))
553 {
554 /* We need to add this variable to the local decls as otherwise
555 nothing else will do so. */
556 if (TREE_CODE (old_var) == VAR_DECL
557 && ! DECL_EXTERNAL (old_var))
558 add_local_decl (cfun, old_var);
559 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
560 && !DECL_IGNORED_P (old_var)
561 && nonlocalized_list)
562 vec_safe_push (*nonlocalized_list, old_var);
563 continue;
564 }
565
566 /* Remap the variable. */
567 new_var = remap_decl (old_var, id);
568
569 /* If we didn't remap this variable, we can't mess with its
570 TREE_CHAIN. If we remapped this variable to the return slot, it's
571 already declared somewhere else, so don't declare it here. */
572
573 if (new_var == id->retvar)
574 ;
575 else if (!new_var)
576 {
577 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
578 && !DECL_IGNORED_P (old_var)
579 && nonlocalized_list)
580 vec_safe_push (*nonlocalized_list, old_var);
581 }
582 else
583 {
584 gcc_assert (DECL_P (new_var));
585 DECL_CHAIN (new_var) = new_decls;
586 new_decls = new_var;
587
588 /* Also copy value-expressions. */
589 if (TREE_CODE (new_var) == VAR_DECL
590 && DECL_HAS_VALUE_EXPR_P (new_var))
591 {
592 tree tem = DECL_VALUE_EXPR (new_var);
593 bool old_regimplify = id->regimplify;
594 id->remapping_type_depth++;
595 walk_tree (&tem, copy_tree_body_r, id, NULL);
596 id->remapping_type_depth--;
597 id->regimplify = old_regimplify;
598 SET_DECL_VALUE_EXPR (new_var, tem);
599 }
600 }
601 }
602
603 return nreverse (new_decls);
604 }
605
606 /* Copy the BLOCK to contain remapped versions of the variables
607 therein. And hook the new block into the block-tree. */
608
609 static void
610 remap_block (tree *block, copy_body_data *id)
611 {
612 tree old_block;
613 tree new_block;
614
615 /* Make the new block. */
616 old_block = *block;
617 new_block = make_node (BLOCK);
618 TREE_USED (new_block) = TREE_USED (old_block);
619 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
620 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
621 BLOCK_NONLOCALIZED_VARS (new_block)
622 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
623 *block = new_block;
624
625 /* Remap its variables. */
626 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
627 &BLOCK_NONLOCALIZED_VARS (new_block),
628 id);
629
630 if (id->transform_lang_insert_block)
631 id->transform_lang_insert_block (new_block);
632
633 /* Remember the remapped block. */
634 insert_decl_map (id, old_block, new_block);
635 }
636
637 /* Copy the whole block tree and root it in id->block. */
638 static tree
639 remap_blocks (tree block, copy_body_data *id)
640 {
641 tree t;
642 tree new_tree = block;
643
644 if (!block)
645 return NULL;
646
647 remap_block (&new_tree, id);
648 gcc_assert (new_tree != block);
649 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
650 prepend_lexical_block (new_tree, remap_blocks (t, id));
651 /* Blocks are in arbitrary order, but make things slightly prettier and do
652 not swap order when producing a copy. */
653 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
654 return new_tree;
655 }
656
657 /* Remap the block tree rooted at BLOCK to nothing. */
658 static void
659 remap_blocks_to_null (tree block, copy_body_data *id)
660 {
661 tree t;
662 insert_decl_map (id, block, NULL_TREE);
663 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
664 remap_blocks_to_null (t, id);
665 }
666
667 static void
668 copy_statement_list (tree *tp)
669 {
670 tree_stmt_iterator oi, ni;
671 tree new_tree;
672
673 new_tree = alloc_stmt_list ();
674 ni = tsi_start (new_tree);
675 oi = tsi_start (*tp);
676 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
677 *tp = new_tree;
678
679 for (; !tsi_end_p (oi); tsi_next (&oi))
680 {
681 tree stmt = tsi_stmt (oi);
682 if (TREE_CODE (stmt) == STATEMENT_LIST)
683 /* This copy is not redundant; tsi_link_after will smash this
684 STATEMENT_LIST into the end of the one we're building, and we
685 don't want to do that with the original. */
686 copy_statement_list (&stmt);
687 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
688 }
689 }
690
691 static void
692 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
693 {
694 tree block = BIND_EXPR_BLOCK (*tp);
695 /* Copy (and replace) the statement. */
696 copy_tree_r (tp, walk_subtrees, NULL);
697 if (block)
698 {
699 remap_block (&block, id);
700 BIND_EXPR_BLOCK (*tp) = block;
701 }
702
703 if (BIND_EXPR_VARS (*tp))
704 /* This will remap a lot of the same decls again, but this should be
705 harmless. */
706 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
707 }
708
709
710 /* Create a new gimple_seq by remapping all the statements in BODY
711 using the inlining information in ID. */
712
713 static gimple_seq
714 remap_gimple_seq (gimple_seq body, copy_body_data *id)
715 {
716 gimple_stmt_iterator si;
717 gimple_seq new_body = NULL;
718
719 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
720 {
721 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
722 gimple_seq_add_stmt (&new_body, new_stmt);
723 }
724
725 return new_body;
726 }
727
728
729 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
730 block using the mapping information in ID. */
731
732 static gimple
733 copy_gimple_bind (gimple stmt, copy_body_data *id)
734 {
735 gimple new_bind;
736 tree new_block, new_vars;
737 gimple_seq body, new_body;
738
739 /* Copy the statement. Note that we purposely don't use copy_stmt
740 here because we need to remap statements as we copy. */
741 body = gimple_bind_body (stmt);
742 new_body = remap_gimple_seq (body, id);
743
744 new_block = gimple_bind_block (stmt);
745 if (new_block)
746 remap_block (&new_block, id);
747
748 /* This will remap a lot of the same decls again, but this should be
749 harmless. */
750 new_vars = gimple_bind_vars (stmt);
751 if (new_vars)
752 new_vars = remap_decls (new_vars, NULL, id);
753
754 new_bind = gimple_build_bind (new_vars, new_body, new_block);
755
756 return new_bind;
757 }
758
759 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
760
761 static bool
762 is_parm (tree decl)
763 {
764 if (TREE_CODE (decl) == SSA_NAME)
765 {
766 decl = SSA_NAME_VAR (decl);
767 if (!decl)
768 return false;
769 }
770
771 return (TREE_CODE (decl) == PARM_DECL);
772 }
773
774 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
775 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
776 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
777 recursing into the children nodes of *TP. */
778
779 static tree
780 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
781 {
782 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
783 copy_body_data *id = (copy_body_data *) wi_p->info;
784 tree fn = id->src_fn;
785
786 if (TREE_CODE (*tp) == SSA_NAME)
787 {
788 *tp = remap_ssa_name (*tp, id);
789 *walk_subtrees = 0;
790 return NULL;
791 }
792 else if (auto_var_in_fn_p (*tp, fn))
793 {
794 /* Local variables and labels need to be replaced by equivalent
795 variables. We don't want to copy static variables; there's
796 only one of those, no matter how many times we inline the
797 containing function. Similarly for globals from an outer
798 function. */
799 tree new_decl;
800
801 /* Remap the declaration. */
802 new_decl = remap_decl (*tp, id);
803 gcc_assert (new_decl);
804 /* Replace this variable with the copy. */
805 STRIP_TYPE_NOPS (new_decl);
806 /* ??? The C++ frontend uses void * pointer zero to initialize
807 any other type. This confuses the middle-end type verification.
808 As cloned bodies do not go through gimplification again the fixup
809 there doesn't trigger. */
810 if (TREE_CODE (new_decl) == INTEGER_CST
811 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
812 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
813 *tp = new_decl;
814 *walk_subtrees = 0;
815 }
816 else if (TREE_CODE (*tp) == STATEMENT_LIST)
817 gcc_unreachable ();
818 else if (TREE_CODE (*tp) == SAVE_EXPR)
819 gcc_unreachable ();
820 else if (TREE_CODE (*tp) == LABEL_DECL
821 && (!DECL_CONTEXT (*tp)
822 || decl_function_context (*tp) == id->src_fn))
823 /* These may need to be remapped for EH handling. */
824 *tp = remap_decl (*tp, id);
825 else if (TREE_CODE (*tp) == FIELD_DECL)
826 {
827 /* If the enclosing record type is variably_modified_type_p, the field
828 has already been remapped. Otherwise, it need not be. */
829 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
830 if (n)
831 *tp = *n;
832 *walk_subtrees = 0;
833 }
834 else if (TYPE_P (*tp))
835 /* Types may need remapping as well. */
836 *tp = remap_type (*tp, id);
837 else if (CONSTANT_CLASS_P (*tp))
838 {
839 /* If this is a constant, we have to copy the node iff the type
840 will be remapped. copy_tree_r will not copy a constant. */
841 tree new_type = remap_type (TREE_TYPE (*tp), id);
842
843 if (new_type == TREE_TYPE (*tp))
844 *walk_subtrees = 0;
845
846 else if (TREE_CODE (*tp) == INTEGER_CST)
847 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
848 TREE_INT_CST_HIGH (*tp));
849 else
850 {
851 *tp = copy_node (*tp);
852 TREE_TYPE (*tp) = new_type;
853 }
854 }
855 else
856 {
857 /* Otherwise, just copy the node. Note that copy_tree_r already
858 knows not to copy VAR_DECLs, etc., so this is safe. */
859
860 if (TREE_CODE (*tp) == MEM_REF)
861 {
862 /* We need to re-canonicalize MEM_REFs from inline substitutions
863 that can happen when a pointer argument is an ADDR_EXPR.
864 Recurse here manually to allow that. */
865 tree ptr = TREE_OPERAND (*tp, 0);
866 tree type = remap_type (TREE_TYPE (*tp), id);
867 tree old = *tp;
868 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
869 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
870 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
871 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
872 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
873 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
874 remapped a parameter as the property might be valid only
875 for the parameter itself. */
876 if (TREE_THIS_NOTRAP (old)
877 && (!is_parm (TREE_OPERAND (old, 0))
878 || (!id->transform_parameter && is_parm (ptr))))
879 TREE_THIS_NOTRAP (*tp) = 1;
880 *walk_subtrees = 0;
881 return NULL;
882 }
883
884 /* Here is the "usual case". Copy this tree node, and then
885 tweak some special cases. */
886 copy_tree_r (tp, walk_subtrees, NULL);
887
888 if (TREE_CODE (*tp) != OMP_CLAUSE)
889 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
890
891 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
892 {
893 /* The copied TARGET_EXPR has never been expanded, even if the
894 original node was expanded already. */
895 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
896 TREE_OPERAND (*tp, 3) = NULL_TREE;
897 }
898 else if (TREE_CODE (*tp) == ADDR_EXPR)
899 {
900 /* Variable substitution need not be simple. In particular,
901 the MEM_REF substitution above. Make sure that
902 TREE_CONSTANT and friends are up-to-date. */
903 int invariant = is_gimple_min_invariant (*tp);
904 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
905 recompute_tree_invariant_for_addr_expr (*tp);
906
907 /* If this used to be invariant, but is not any longer,
908 then regimplification is probably needed. */
909 if (invariant && !is_gimple_min_invariant (*tp))
910 id->regimplify = true;
911
912 *walk_subtrees = 0;
913 }
914 }
915
916 /* Update the TREE_BLOCK for the cloned expr. */
917 if (EXPR_P (*tp))
918 {
919 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
920 tree old_block = TREE_BLOCK (*tp);
921 if (old_block)
922 {
923 tree *n;
924 n = (tree *) pointer_map_contains (id->decl_map,
925 TREE_BLOCK (*tp));
926 if (n)
927 new_block = *n;
928 }
929 TREE_SET_BLOCK (*tp, new_block);
930 }
931
932 /* Keep iterating. */
933 return NULL_TREE;
934 }
935
936
937 /* Called from copy_body_id via walk_tree. DATA is really a
938 `copy_body_data *'. */
939
940 tree
941 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
942 {
943 copy_body_data *id = (copy_body_data *) data;
944 tree fn = id->src_fn;
945 tree new_block;
946
947 /* Begin by recognizing trees that we'll completely rewrite for the
948 inlining context. Our output for these trees is completely
949 different from out input (e.g. RETURN_EXPR is deleted, and morphs
950 into an edge). Further down, we'll handle trees that get
951 duplicated and/or tweaked. */
952
953 /* When requested, RETURN_EXPRs should be transformed to just the
954 contained MODIFY_EXPR. The branch semantics of the return will
955 be handled elsewhere by manipulating the CFG rather than a statement. */
956 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
957 {
958 tree assignment = TREE_OPERAND (*tp, 0);
959
960 /* If we're returning something, just turn that into an
961 assignment into the equivalent of the original RESULT_DECL.
962 If the "assignment" is just the result decl, the result
963 decl has already been set (e.g. a recent "foo (&result_decl,
964 ...)"); just toss the entire RETURN_EXPR. */
965 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
966 {
967 /* Replace the RETURN_EXPR with (a copy of) the
968 MODIFY_EXPR hanging underneath. */
969 *tp = copy_node (assignment);
970 }
971 else /* Else the RETURN_EXPR returns no value. */
972 {
973 *tp = NULL;
974 return (tree) (void *)1;
975 }
976 }
977 else if (TREE_CODE (*tp) == SSA_NAME)
978 {
979 *tp = remap_ssa_name (*tp, id);
980 *walk_subtrees = 0;
981 return NULL;
982 }
983
984 /* Local variables and labels need to be replaced by equivalent
985 variables. We don't want to copy static variables; there's only
986 one of those, no matter how many times we inline the containing
987 function. Similarly for globals from an outer function. */
988 else if (auto_var_in_fn_p (*tp, fn))
989 {
990 tree new_decl;
991
992 /* Remap the declaration. */
993 new_decl = remap_decl (*tp, id);
994 gcc_assert (new_decl);
995 /* Replace this variable with the copy. */
996 STRIP_TYPE_NOPS (new_decl);
997 *tp = new_decl;
998 *walk_subtrees = 0;
999 }
1000 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1001 copy_statement_list (tp);
1002 else if (TREE_CODE (*tp) == SAVE_EXPR
1003 || TREE_CODE (*tp) == TARGET_EXPR)
1004 remap_save_expr (tp, id->decl_map, walk_subtrees);
1005 else if (TREE_CODE (*tp) == LABEL_DECL
1006 && (! DECL_CONTEXT (*tp)
1007 || decl_function_context (*tp) == id->src_fn))
1008 /* These may need to be remapped for EH handling. */
1009 *tp = remap_decl (*tp, id);
1010 else if (TREE_CODE (*tp) == BIND_EXPR)
1011 copy_bind_expr (tp, walk_subtrees, id);
1012 /* Types may need remapping as well. */
1013 else if (TYPE_P (*tp))
1014 *tp = remap_type (*tp, id);
1015
1016 /* If this is a constant, we have to copy the node iff the type will be
1017 remapped. copy_tree_r will not copy a constant. */
1018 else if (CONSTANT_CLASS_P (*tp))
1019 {
1020 tree new_type = remap_type (TREE_TYPE (*tp), id);
1021
1022 if (new_type == TREE_TYPE (*tp))
1023 *walk_subtrees = 0;
1024
1025 else if (TREE_CODE (*tp) == INTEGER_CST)
1026 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1027 TREE_INT_CST_HIGH (*tp));
1028 else
1029 {
1030 *tp = copy_node (*tp);
1031 TREE_TYPE (*tp) = new_type;
1032 }
1033 }
1034
1035 /* Otherwise, just copy the node. Note that copy_tree_r already
1036 knows not to copy VAR_DECLs, etc., so this is safe. */
1037 else
1038 {
1039 /* Here we handle trees that are not completely rewritten.
1040 First we detect some inlining-induced bogosities for
1041 discarding. */
1042 if (TREE_CODE (*tp) == MODIFY_EXPR
1043 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1044 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1045 {
1046 /* Some assignments VAR = VAR; don't generate any rtl code
1047 and thus don't count as variable modification. Avoid
1048 keeping bogosities like 0 = 0. */
1049 tree decl = TREE_OPERAND (*tp, 0), value;
1050 tree *n;
1051
1052 n = (tree *) pointer_map_contains (id->decl_map, decl);
1053 if (n)
1054 {
1055 value = *n;
1056 STRIP_TYPE_NOPS (value);
1057 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1058 {
1059 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1060 return copy_tree_body_r (tp, walk_subtrees, data);
1061 }
1062 }
1063 }
1064 else if (TREE_CODE (*tp) == INDIRECT_REF)
1065 {
1066 /* Get rid of *& from inline substitutions that can happen when a
1067 pointer argument is an ADDR_EXPR. */
1068 tree decl = TREE_OPERAND (*tp, 0);
1069 tree *n = (tree *) pointer_map_contains (id->decl_map, decl);
1070 if (n)
1071 {
1072 /* If we happen to get an ADDR_EXPR in n->value, strip
1073 it manually here as we'll eventually get ADDR_EXPRs
1074 which lie about their types pointed to. In this case
1075 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1076 but we absolutely rely on that. As fold_indirect_ref
1077 does other useful transformations, try that first, though. */
1078 tree type = TREE_TYPE (*tp);
1079 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1080 tree old = *tp;
1081 *tp = gimple_fold_indirect_ref (ptr);
1082 if (! *tp)
1083 {
1084 if (TREE_CODE (ptr) == ADDR_EXPR)
1085 {
1086 *tp
1087 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1088 /* ??? We should either assert here or build
1089 a VIEW_CONVERT_EXPR instead of blindly leaking
1090 incompatible types to our IL. */
1091 if (! *tp)
1092 *tp = TREE_OPERAND (ptr, 0);
1093 }
1094 else
1095 {
1096 *tp = build1 (INDIRECT_REF, type, ptr);
1097 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1098 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1099 TREE_READONLY (*tp) = TREE_READONLY (old);
1100 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1101 have remapped a parameter as the property might be
1102 valid only for the parameter itself. */
1103 if (TREE_THIS_NOTRAP (old)
1104 && (!is_parm (TREE_OPERAND (old, 0))
1105 || (!id->transform_parameter && is_parm (ptr))))
1106 TREE_THIS_NOTRAP (*tp) = 1;
1107 }
1108 }
1109 *walk_subtrees = 0;
1110 return NULL;
1111 }
1112 }
1113 else if (TREE_CODE (*tp) == MEM_REF)
1114 {
1115 /* We need to re-canonicalize MEM_REFs from inline substitutions
1116 that can happen when a pointer argument is an ADDR_EXPR.
1117 Recurse here manually to allow that. */
1118 tree ptr = TREE_OPERAND (*tp, 0);
1119 tree type = remap_type (TREE_TYPE (*tp), id);
1120 tree old = *tp;
1121 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1122 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1123 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1124 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1125 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1126 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1127 remapped a parameter as the property might be valid only
1128 for the parameter itself. */
1129 if (TREE_THIS_NOTRAP (old)
1130 && (!is_parm (TREE_OPERAND (old, 0))
1131 || (!id->transform_parameter && is_parm (ptr))))
1132 TREE_THIS_NOTRAP (*tp) = 1;
1133 *walk_subtrees = 0;
1134 return NULL;
1135 }
1136
1137 /* Here is the "usual case". Copy this tree node, and then
1138 tweak some special cases. */
1139 copy_tree_r (tp, walk_subtrees, NULL);
1140
1141 /* If EXPR has block defined, map it to newly constructed block.
1142 When inlining we want EXPRs without block appear in the block
1143 of function call if we are not remapping a type. */
1144 if (EXPR_P (*tp))
1145 {
1146 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1147 if (TREE_BLOCK (*tp))
1148 {
1149 tree *n;
1150 n = (tree *) pointer_map_contains (id->decl_map,
1151 TREE_BLOCK (*tp));
1152 if (n)
1153 new_block = *n;
1154 }
1155 TREE_SET_BLOCK (*tp, new_block);
1156 }
1157
1158 if (TREE_CODE (*tp) != OMP_CLAUSE)
1159 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1160
1161 /* The copied TARGET_EXPR has never been expanded, even if the
1162 original node was expanded already. */
1163 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1164 {
1165 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1166 TREE_OPERAND (*tp, 3) = NULL_TREE;
1167 }
1168
1169 /* Variable substitution need not be simple. In particular, the
1170 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1171 and friends are up-to-date. */
1172 else if (TREE_CODE (*tp) == ADDR_EXPR)
1173 {
1174 int invariant = is_gimple_min_invariant (*tp);
1175 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1176
1177 /* Handle the case where we substituted an INDIRECT_REF
1178 into the operand of the ADDR_EXPR. */
1179 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1180 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1181 else
1182 recompute_tree_invariant_for_addr_expr (*tp);
1183
1184 /* If this used to be invariant, but is not any longer,
1185 then regimplification is probably needed. */
1186 if (invariant && !is_gimple_min_invariant (*tp))
1187 id->regimplify = true;
1188
1189 *walk_subtrees = 0;
1190 }
1191 }
1192
1193 /* Keep iterating. */
1194 return NULL_TREE;
1195 }
1196
1197 /* Helper for remap_gimple_stmt. Given an EH region number for the
1198 source function, map that to the duplicate EH region number in
1199 the destination function. */
1200
1201 static int
1202 remap_eh_region_nr (int old_nr, copy_body_data *id)
1203 {
1204 eh_region old_r, new_r;
1205 void **slot;
1206
1207 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1208 slot = pointer_map_contains (id->eh_map, old_r);
1209 new_r = (eh_region) *slot;
1210
1211 return new_r->index;
1212 }
1213
1214 /* Similar, but operate on INTEGER_CSTs. */
1215
1216 static tree
1217 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1218 {
1219 int old_nr, new_nr;
1220
1221 old_nr = tree_low_cst (old_t_nr, 0);
1222 new_nr = remap_eh_region_nr (old_nr, id);
1223
1224 return build_int_cst (integer_type_node, new_nr);
1225 }
1226
1227 /* Helper for copy_bb. Remap statement STMT using the inlining
1228 information in ID. Return the new statement copy. */
1229
1230 static gimple
1231 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1232 {
1233 gimple copy = NULL;
1234 struct walk_stmt_info wi;
1235 bool skip_first = false;
1236
1237 /* Begin by recognizing trees that we'll completely rewrite for the
1238 inlining context. Our output for these trees is completely
1239 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1240 into an edge). Further down, we'll handle trees that get
1241 duplicated and/or tweaked. */
1242
1243 /* When requested, GIMPLE_RETURNs should be transformed to just the
1244 contained GIMPLE_ASSIGN. The branch semantics of the return will
1245 be handled elsewhere by manipulating the CFG rather than the
1246 statement. */
1247 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1248 {
1249 tree retval = gimple_return_retval (stmt);
1250
1251 /* If we're returning something, just turn that into an
1252 assignment into the equivalent of the original RESULT_DECL.
1253 If RETVAL is just the result decl, the result decl has
1254 already been set (e.g. a recent "foo (&result_decl, ...)");
1255 just toss the entire GIMPLE_RETURN. */
1256 if (retval
1257 && (TREE_CODE (retval) != RESULT_DECL
1258 && (TREE_CODE (retval) != SSA_NAME
1259 || ! SSA_NAME_VAR (retval)
1260 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1261 {
1262 copy = gimple_build_assign (id->retvar, retval);
1263 /* id->retvar is already substituted. Skip it on later remapping. */
1264 skip_first = true;
1265 }
1266 else
1267 return gimple_build_nop ();
1268 }
1269 else if (gimple_has_substatements (stmt))
1270 {
1271 gimple_seq s1, s2;
1272
1273 /* When cloning bodies from the C++ front end, we will be handed bodies
1274 in High GIMPLE form. Handle here all the High GIMPLE statements that
1275 have embedded statements. */
1276 switch (gimple_code (stmt))
1277 {
1278 case GIMPLE_BIND:
1279 copy = copy_gimple_bind (stmt, id);
1280 break;
1281
1282 case GIMPLE_CATCH:
1283 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1284 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1285 break;
1286
1287 case GIMPLE_EH_FILTER:
1288 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1289 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1290 break;
1291
1292 case GIMPLE_TRY:
1293 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1294 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1295 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1296 break;
1297
1298 case GIMPLE_WITH_CLEANUP_EXPR:
1299 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1300 copy = gimple_build_wce (s1);
1301 break;
1302
1303 case GIMPLE_OMP_PARALLEL:
1304 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1305 copy = gimple_build_omp_parallel
1306 (s1,
1307 gimple_omp_parallel_clauses (stmt),
1308 gimple_omp_parallel_child_fn (stmt),
1309 gimple_omp_parallel_data_arg (stmt));
1310 break;
1311
1312 case GIMPLE_OMP_TASK:
1313 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1314 copy = gimple_build_omp_task
1315 (s1,
1316 gimple_omp_task_clauses (stmt),
1317 gimple_omp_task_child_fn (stmt),
1318 gimple_omp_task_data_arg (stmt),
1319 gimple_omp_task_copy_fn (stmt),
1320 gimple_omp_task_arg_size (stmt),
1321 gimple_omp_task_arg_align (stmt));
1322 break;
1323
1324 case GIMPLE_OMP_FOR:
1325 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1326 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1327 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1328 gimple_omp_for_clauses (stmt),
1329 gimple_omp_for_collapse (stmt), s2);
1330 {
1331 size_t i;
1332 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1333 {
1334 gimple_omp_for_set_index (copy, i,
1335 gimple_omp_for_index (stmt, i));
1336 gimple_omp_for_set_initial (copy, i,
1337 gimple_omp_for_initial (stmt, i));
1338 gimple_omp_for_set_final (copy, i,
1339 gimple_omp_for_final (stmt, i));
1340 gimple_omp_for_set_incr (copy, i,
1341 gimple_omp_for_incr (stmt, i));
1342 gimple_omp_for_set_cond (copy, i,
1343 gimple_omp_for_cond (stmt, i));
1344 }
1345 }
1346 break;
1347
1348 case GIMPLE_OMP_MASTER:
1349 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1350 copy = gimple_build_omp_master (s1);
1351 break;
1352
1353 case GIMPLE_OMP_TASKGROUP:
1354 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1355 copy = gimple_build_omp_taskgroup (s1);
1356 break;
1357
1358 case GIMPLE_OMP_ORDERED:
1359 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1360 copy = gimple_build_omp_ordered (s1);
1361 break;
1362
1363 case GIMPLE_OMP_SECTION:
1364 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1365 copy = gimple_build_omp_section (s1);
1366 break;
1367
1368 case GIMPLE_OMP_SECTIONS:
1369 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1370 copy = gimple_build_omp_sections
1371 (s1, gimple_omp_sections_clauses (stmt));
1372 break;
1373
1374 case GIMPLE_OMP_SINGLE:
1375 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1376 copy = gimple_build_omp_single
1377 (s1, gimple_omp_single_clauses (stmt));
1378 break;
1379
1380 case GIMPLE_OMP_TARGET:
1381 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1382 copy = gimple_build_omp_target
1383 (s1, gimple_omp_target_kind (stmt),
1384 gimple_omp_target_clauses (stmt));
1385 break;
1386
1387 case GIMPLE_OMP_TEAMS:
1388 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1389 copy = gimple_build_omp_teams
1390 (s1, gimple_omp_teams_clauses (stmt));
1391 break;
1392
1393 case GIMPLE_OMP_CRITICAL:
1394 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1395 copy
1396 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1397 break;
1398
1399 case GIMPLE_TRANSACTION:
1400 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1401 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1402 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1403 break;
1404
1405 default:
1406 gcc_unreachable ();
1407 }
1408 }
1409 else
1410 {
1411 if (gimple_assign_copy_p (stmt)
1412 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1413 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1414 {
1415 /* Here we handle statements that are not completely rewritten.
1416 First we detect some inlining-induced bogosities for
1417 discarding. */
1418
1419 /* Some assignments VAR = VAR; don't generate any rtl code
1420 and thus don't count as variable modification. Avoid
1421 keeping bogosities like 0 = 0. */
1422 tree decl = gimple_assign_lhs (stmt), value;
1423 tree *n;
1424
1425 n = (tree *) pointer_map_contains (id->decl_map, decl);
1426 if (n)
1427 {
1428 value = *n;
1429 STRIP_TYPE_NOPS (value);
1430 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1431 return gimple_build_nop ();
1432 }
1433 }
1434
1435 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1436 in a block that we aren't copying during tree_function_versioning,
1437 just drop the clobber stmt. */
1438 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1439 {
1440 tree lhs = gimple_assign_lhs (stmt);
1441 if (TREE_CODE (lhs) == MEM_REF
1442 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1443 {
1444 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1445 if (gimple_bb (def_stmt)
1446 && !bitmap_bit_p (id->blocks_to_copy,
1447 gimple_bb (def_stmt)->index))
1448 return gimple_build_nop ();
1449 }
1450 }
1451
1452 if (gimple_debug_bind_p (stmt))
1453 {
1454 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1455 gimple_debug_bind_get_value (stmt),
1456 stmt);
1457 id->debug_stmts.safe_push (copy);
1458 return copy;
1459 }
1460 if (gimple_debug_source_bind_p (stmt))
1461 {
1462 copy = gimple_build_debug_source_bind
1463 (gimple_debug_source_bind_get_var (stmt),
1464 gimple_debug_source_bind_get_value (stmt), stmt);
1465 id->debug_stmts.safe_push (copy);
1466 return copy;
1467 }
1468
1469 /* Create a new deep copy of the statement. */
1470 copy = gimple_copy (stmt);
1471
1472 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1473 RESX and EH_DISPATCH. */
1474 if (id->eh_map)
1475 switch (gimple_code (copy))
1476 {
1477 case GIMPLE_CALL:
1478 {
1479 tree r, fndecl = gimple_call_fndecl (copy);
1480 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1481 switch (DECL_FUNCTION_CODE (fndecl))
1482 {
1483 case BUILT_IN_EH_COPY_VALUES:
1484 r = gimple_call_arg (copy, 1);
1485 r = remap_eh_region_tree_nr (r, id);
1486 gimple_call_set_arg (copy, 1, r);
1487 /* FALLTHRU */
1488
1489 case BUILT_IN_EH_POINTER:
1490 case BUILT_IN_EH_FILTER:
1491 r = gimple_call_arg (copy, 0);
1492 r = remap_eh_region_tree_nr (r, id);
1493 gimple_call_set_arg (copy, 0, r);
1494 break;
1495
1496 default:
1497 break;
1498 }
1499
1500 /* Reset alias info if we didn't apply measures to
1501 keep it valid over inlining by setting DECL_PT_UID. */
1502 if (!id->src_cfun->gimple_df
1503 || !id->src_cfun->gimple_df->ipa_pta)
1504 gimple_call_reset_alias_info (copy);
1505 }
1506 break;
1507
1508 case GIMPLE_RESX:
1509 {
1510 int r = gimple_resx_region (copy);
1511 r = remap_eh_region_nr (r, id);
1512 gimple_resx_set_region (copy, r);
1513 }
1514 break;
1515
1516 case GIMPLE_EH_DISPATCH:
1517 {
1518 int r = gimple_eh_dispatch_region (copy);
1519 r = remap_eh_region_nr (r, id);
1520 gimple_eh_dispatch_set_region (copy, r);
1521 }
1522 break;
1523
1524 default:
1525 break;
1526 }
1527 }
1528
1529 /* If STMT has a block defined, map it to the newly constructed
1530 block. */
1531 if (gimple_block (copy))
1532 {
1533 tree *n;
1534 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1535 gcc_assert (n);
1536 gimple_set_block (copy, *n);
1537 }
1538
1539 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1540 return copy;
1541
1542 /* Remap all the operands in COPY. */
1543 memset (&wi, 0, sizeof (wi));
1544 wi.info = id;
1545 if (skip_first)
1546 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1547 else
1548 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1549
1550 /* Clear the copied virtual operands. We are not remapping them here
1551 but are going to recreate them from scratch. */
1552 if (gimple_has_mem_ops (copy))
1553 {
1554 gimple_set_vdef (copy, NULL_TREE);
1555 gimple_set_vuse (copy, NULL_TREE);
1556 }
1557
1558 return copy;
1559 }
1560
1561
1562 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1563 later */
1564
1565 static basic_block
1566 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1567 gcov_type count_scale)
1568 {
1569 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1570 basic_block copy_basic_block;
1571 tree decl;
1572 gcov_type freq;
1573 basic_block prev;
1574
1575 /* Search for previous copied basic block. */
1576 prev = bb->prev_bb;
1577 while (!prev->aux)
1578 prev = prev->prev_bb;
1579
1580 /* create_basic_block() will append every new block to
1581 basic_block_info automatically. */
1582 copy_basic_block = create_basic_block (NULL, (void *) 0,
1583 (basic_block) prev->aux);
1584 copy_basic_block->count = apply_scale (bb->count, count_scale);
1585
1586 /* We are going to rebuild frequencies from scratch. These values
1587 have just small importance to drive canonicalize_loop_headers. */
1588 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1589
1590 /* We recompute frequencies after inlining, so this is quite safe. */
1591 if (freq > BB_FREQ_MAX)
1592 freq = BB_FREQ_MAX;
1593 copy_basic_block->frequency = freq;
1594
1595 copy_gsi = gsi_start_bb (copy_basic_block);
1596
1597 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1598 {
1599 gimple stmt = gsi_stmt (gsi);
1600 gimple orig_stmt = stmt;
1601
1602 id->regimplify = false;
1603 stmt = remap_gimple_stmt (stmt, id);
1604 if (gimple_nop_p (stmt))
1605 continue;
1606
1607 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1608 seq_gsi = copy_gsi;
1609
1610 /* With return slot optimization we can end up with
1611 non-gimple (foo *)&this->m, fix that here. */
1612 if (is_gimple_assign (stmt)
1613 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1614 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1615 {
1616 tree new_rhs;
1617 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1618 gimple_assign_rhs1 (stmt),
1619 true, NULL, false,
1620 GSI_CONTINUE_LINKING);
1621 gimple_assign_set_rhs1 (stmt, new_rhs);
1622 id->regimplify = false;
1623 }
1624
1625 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1626
1627 if (id->regimplify)
1628 gimple_regimplify_operands (stmt, &seq_gsi);
1629
1630 /* If copy_basic_block has been empty at the start of this iteration,
1631 call gsi_start_bb again to get at the newly added statements. */
1632 if (gsi_end_p (copy_gsi))
1633 copy_gsi = gsi_start_bb (copy_basic_block);
1634 else
1635 gsi_next (&copy_gsi);
1636
1637 /* Process the new statement. The call to gimple_regimplify_operands
1638 possibly turned the statement into multiple statements, we
1639 need to process all of them. */
1640 do
1641 {
1642 tree fn;
1643
1644 stmt = gsi_stmt (copy_gsi);
1645 if (is_gimple_call (stmt)
1646 && gimple_call_va_arg_pack_p (stmt)
1647 && id->gimple_call)
1648 {
1649 /* __builtin_va_arg_pack () should be replaced by
1650 all arguments corresponding to ... in the caller. */
1651 tree p;
1652 gimple new_call;
1653 vec<tree> argarray;
1654 size_t nargs = gimple_call_num_args (id->gimple_call);
1655 size_t n;
1656
1657 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1658 nargs--;
1659
1660 /* Create the new array of arguments. */
1661 n = nargs + gimple_call_num_args (stmt);
1662 argarray.create (n);
1663 argarray.safe_grow_cleared (n);
1664
1665 /* Copy all the arguments before '...' */
1666 memcpy (argarray.address (),
1667 gimple_call_arg_ptr (stmt, 0),
1668 gimple_call_num_args (stmt) * sizeof (tree));
1669
1670 /* Append the arguments passed in '...' */
1671 memcpy (argarray.address () + gimple_call_num_args (stmt),
1672 gimple_call_arg_ptr (id->gimple_call, 0)
1673 + (gimple_call_num_args (id->gimple_call) - nargs),
1674 nargs * sizeof (tree));
1675
1676 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1677 argarray);
1678
1679 argarray.release ();
1680
1681 /* Copy all GIMPLE_CALL flags, location and block, except
1682 GF_CALL_VA_ARG_PACK. */
1683 gimple_call_copy_flags (new_call, stmt);
1684 gimple_call_set_va_arg_pack (new_call, false);
1685 gimple_set_location (new_call, gimple_location (stmt));
1686 gimple_set_block (new_call, gimple_block (stmt));
1687 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1688
1689 gsi_replace (&copy_gsi, new_call, false);
1690 stmt = new_call;
1691 }
1692 else if (is_gimple_call (stmt)
1693 && id->gimple_call
1694 && (decl = gimple_call_fndecl (stmt))
1695 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1696 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1697 {
1698 /* __builtin_va_arg_pack_len () should be replaced by
1699 the number of anonymous arguments. */
1700 size_t nargs = gimple_call_num_args (id->gimple_call);
1701 tree count, p;
1702 gimple new_stmt;
1703
1704 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1705 nargs--;
1706
1707 count = build_int_cst (integer_type_node, nargs);
1708 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1709 gsi_replace (&copy_gsi, new_stmt, false);
1710 stmt = new_stmt;
1711 }
1712
1713 /* Statements produced by inlining can be unfolded, especially
1714 when we constant propagated some operands. We can't fold
1715 them right now for two reasons:
1716 1) folding require SSA_NAME_DEF_STMTs to be correct
1717 2) we can't change function calls to builtins.
1718 So we just mark statement for later folding. We mark
1719 all new statements, instead just statements that has changed
1720 by some nontrivial substitution so even statements made
1721 foldable indirectly are updated. If this turns out to be
1722 expensive, copy_body can be told to watch for nontrivial
1723 changes. */
1724 if (id->statements_to_fold)
1725 pointer_set_insert (id->statements_to_fold, stmt);
1726
1727 /* We're duplicating a CALL_EXPR. Find any corresponding
1728 callgraph edges and update or duplicate them. */
1729 if (is_gimple_call (stmt))
1730 {
1731 struct cgraph_edge *edge;
1732 int flags;
1733
1734 switch (id->transform_call_graph_edges)
1735 {
1736 case CB_CGE_DUPLICATE:
1737 edge = cgraph_edge (id->src_node, orig_stmt);
1738 if (edge)
1739 {
1740 int edge_freq = edge->frequency;
1741 int new_freq;
1742 struct cgraph_edge *old_edge = edge;
1743 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1744 gimple_uid (stmt),
1745 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1746 true);
1747 /* We could also just rescale the frequency, but
1748 doing so would introduce roundoff errors and make
1749 verifier unhappy. */
1750 new_freq = compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1751 copy_basic_block);
1752
1753 /* Speculative calls consist of two edges - direct and indirect.
1754 Duplicate the whole thing and distribute frequencies accordingly. */
1755 if (edge->speculative)
1756 {
1757 struct cgraph_edge *direct, *indirect;
1758 struct ipa_ref *ref;
1759
1760 gcc_assert (!edge->indirect_unknown_callee);
1761 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1762 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1763 gimple_uid (stmt),
1764 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1765 true);
1766 if (old_edge->frequency + indirect->frequency)
1767 {
1768 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1769 (old_edge->frequency + indirect->frequency)),
1770 CGRAPH_FREQ_MAX);
1771 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1772 (old_edge->frequency + indirect->frequency)),
1773 CGRAPH_FREQ_MAX);
1774 }
1775 ipa_clone_ref (ref, (symtab_node)id->dst_node, stmt);
1776 }
1777 else
1778 {
1779 edge->frequency = new_freq;
1780 if (dump_file
1781 && profile_status_for_function (cfun) != PROFILE_ABSENT
1782 && (edge_freq > edge->frequency + 10
1783 || edge_freq < edge->frequency - 10))
1784 {
1785 fprintf (dump_file, "Edge frequency estimated by "
1786 "cgraph %i diverge from inliner's estimate %i\n",
1787 edge_freq,
1788 edge->frequency);
1789 fprintf (dump_file,
1790 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1791 bb->index,
1792 bb->frequency,
1793 copy_basic_block->frequency);
1794 }
1795 }
1796 }
1797 break;
1798
1799 case CB_CGE_MOVE_CLONES:
1800 cgraph_set_call_stmt_including_clones (id->dst_node,
1801 orig_stmt, stmt);
1802 edge = cgraph_edge (id->dst_node, stmt);
1803 break;
1804
1805 case CB_CGE_MOVE:
1806 edge = cgraph_edge (id->dst_node, orig_stmt);
1807 if (edge)
1808 cgraph_set_call_stmt (edge, stmt);
1809 break;
1810
1811 default:
1812 gcc_unreachable ();
1813 }
1814
1815 /* Constant propagation on argument done during inlining
1816 may create new direct call. Produce an edge for it. */
1817 if ((!edge
1818 || (edge->indirect_inlining_edge
1819 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1820 && id->dst_node->symbol.definition
1821 && (fn = gimple_call_fndecl (stmt)) != NULL)
1822 {
1823 struct cgraph_node *dest = cgraph_get_node (fn);
1824
1825 /* We have missing edge in the callgraph. This can happen
1826 when previous inlining turned an indirect call into a
1827 direct call by constant propagating arguments or we are
1828 producing dead clone (for further cloning). In all
1829 other cases we hit a bug (incorrect node sharing is the
1830 most common reason for missing edges). */
1831 gcc_assert (!dest->symbol.definition
1832 || dest->symbol.address_taken
1833 || !id->src_node->symbol.definition
1834 || !id->dst_node->symbol.definition);
1835 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1836 cgraph_create_edge_including_clones
1837 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1838 compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1839 copy_basic_block),
1840 CIF_ORIGINALLY_INDIRECT_CALL);
1841 else
1842 cgraph_create_edge (id->dst_node, dest, stmt,
1843 bb->count,
1844 compute_call_stmt_bb_frequency
1845 (id->dst_node->symbol.decl,
1846 copy_basic_block))->inline_failed
1847 = CIF_ORIGINALLY_INDIRECT_CALL;
1848 if (dump_file)
1849 {
1850 fprintf (dump_file, "Created new direct edge to %s\n",
1851 cgraph_node_name (dest));
1852 }
1853 }
1854
1855 flags = gimple_call_flags (stmt);
1856 if (flags & ECF_MAY_BE_ALLOCA)
1857 cfun->calls_alloca = true;
1858 if (flags & ECF_RETURNS_TWICE)
1859 cfun->calls_setjmp = true;
1860 }
1861
1862 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1863 id->eh_map, id->eh_lp_nr);
1864
1865 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1866 {
1867 ssa_op_iter i;
1868 tree def;
1869
1870 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1871 if (TREE_CODE (def) == SSA_NAME)
1872 SSA_NAME_DEF_STMT (def) = stmt;
1873 }
1874
1875 gsi_next (&copy_gsi);
1876 }
1877 while (!gsi_end_p (copy_gsi));
1878
1879 copy_gsi = gsi_last_bb (copy_basic_block);
1880 }
1881
1882 return copy_basic_block;
1883 }
1884
1885 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1886 form is quite easy, since dominator relationship for old basic blocks does
1887 not change.
1888
1889 There is however exception where inlining might change dominator relation
1890 across EH edges from basic block within inlined functions destinating
1891 to landing pads in function we inline into.
1892
1893 The function fills in PHI_RESULTs of such PHI nodes if they refer
1894 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1895 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1896 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1897 set, and this means that there will be no overlapping live ranges
1898 for the underlying symbol.
1899
1900 This might change in future if we allow redirecting of EH edges and
1901 we might want to change way build CFG pre-inlining to include
1902 all the possible edges then. */
1903 static void
1904 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1905 bool can_throw, bool nonlocal_goto)
1906 {
1907 edge e;
1908 edge_iterator ei;
1909
1910 FOR_EACH_EDGE (e, ei, bb->succs)
1911 if (!e->dest->aux
1912 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1913 {
1914 gimple phi;
1915 gimple_stmt_iterator si;
1916
1917 if (!nonlocal_goto)
1918 gcc_assert (e->flags & EDGE_EH);
1919
1920 if (!can_throw)
1921 gcc_assert (!(e->flags & EDGE_EH));
1922
1923 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1924 {
1925 edge re;
1926
1927 phi = gsi_stmt (si);
1928
1929 /* For abnormal goto/call edges the receiver can be the
1930 ENTRY_BLOCK. Do not assert this cannot happen. */
1931
1932 gcc_assert ((e->flags & EDGE_EH)
1933 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1934
1935 re = find_edge (ret_bb, e->dest);
1936 gcc_checking_assert (re);
1937 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1938 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1939
1940 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1941 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1942 }
1943 }
1944 }
1945
1946
1947 /* Copy edges from BB into its copy constructed earlier, scale profile
1948 accordingly. Edges will be taken care of later. Assume aux
1949 pointers to point to the copies of each BB. Return true if any
1950 debug stmts are left after a statement that must end the basic block. */
1951
1952 static bool
1953 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1954 bool can_make_abnormal_goto)
1955 {
1956 basic_block new_bb = (basic_block) bb->aux;
1957 edge_iterator ei;
1958 edge old_edge;
1959 gimple_stmt_iterator si;
1960 int flags;
1961 bool need_debug_cleanup = false;
1962
1963 /* Use the indices from the original blocks to create edges for the
1964 new ones. */
1965 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1966 if (!(old_edge->flags & EDGE_EH))
1967 {
1968 edge new_edge;
1969
1970 flags = old_edge->flags;
1971
1972 /* Return edges do get a FALLTHRU flag when the get inlined. */
1973 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1974 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1975 flags |= EDGE_FALLTHRU;
1976 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1977 new_edge->count = apply_scale (old_edge->count, count_scale);
1978 new_edge->probability = old_edge->probability;
1979 }
1980
1981 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1982 return false;
1983
1984 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1985 {
1986 gimple copy_stmt;
1987 bool can_throw, nonlocal_goto;
1988
1989 copy_stmt = gsi_stmt (si);
1990 if (!is_gimple_debug (copy_stmt))
1991 update_stmt (copy_stmt);
1992
1993 /* Do this before the possible split_block. */
1994 gsi_next (&si);
1995
1996 /* If this tree could throw an exception, there are two
1997 cases where we need to add abnormal edge(s): the
1998 tree wasn't in a region and there is a "current
1999 region" in the caller; or the original tree had
2000 EH edges. In both cases split the block after the tree,
2001 and add abnormal edge(s) as needed; we need both
2002 those from the callee and the caller.
2003 We check whether the copy can throw, because the const
2004 propagation can change an INDIRECT_REF which throws
2005 into a COMPONENT_REF which doesn't. If the copy
2006 can throw, the original could also throw. */
2007 can_throw = stmt_can_throw_internal (copy_stmt);
2008 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
2009
2010 if (can_throw || nonlocal_goto)
2011 {
2012 if (!gsi_end_p (si))
2013 {
2014 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2015 gsi_next (&si);
2016 if (gsi_end_p (si))
2017 need_debug_cleanup = true;
2018 }
2019 if (!gsi_end_p (si))
2020 /* Note that bb's predecessor edges aren't necessarily
2021 right at this point; split_block doesn't care. */
2022 {
2023 edge e = split_block (new_bb, copy_stmt);
2024
2025 new_bb = e->dest;
2026 new_bb->aux = e->src->aux;
2027 si = gsi_start_bb (new_bb);
2028 }
2029 }
2030
2031 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2032 make_eh_dispatch_edges (copy_stmt);
2033 else if (can_throw)
2034 make_eh_edges (copy_stmt);
2035
2036 /* If the call we inline cannot make abnormal goto do not add
2037 additional abnormal edges but only retain those already present
2038 in the original function body. */
2039 nonlocal_goto &= can_make_abnormal_goto;
2040 if (nonlocal_goto)
2041 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
2042
2043 if ((can_throw || nonlocal_goto)
2044 && gimple_in_ssa_p (cfun))
2045 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2046 can_throw, nonlocal_goto);
2047 }
2048 return need_debug_cleanup;
2049 }
2050
2051 /* Copy the PHIs. All blocks and edges are copied, some blocks
2052 was possibly split and new outgoing EH edges inserted.
2053 BB points to the block of original function and AUX pointers links
2054 the original and newly copied blocks. */
2055
2056 static void
2057 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2058 {
2059 basic_block const new_bb = (basic_block) bb->aux;
2060 edge_iterator ei;
2061 gimple phi;
2062 gimple_stmt_iterator si;
2063 edge new_edge;
2064 bool inserted = false;
2065
2066 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2067 {
2068 tree res, new_res;
2069 gimple new_phi;
2070
2071 phi = gsi_stmt (si);
2072 res = PHI_RESULT (phi);
2073 new_res = res;
2074 if (!virtual_operand_p (res))
2075 {
2076 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2077 new_phi = create_phi_node (new_res, new_bb);
2078 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2079 {
2080 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2081 tree arg;
2082 tree new_arg;
2083 edge_iterator ei2;
2084 location_t locus;
2085
2086 /* When doing partial cloning, we allow PHIs on the entry block
2087 as long as all the arguments are the same. Find any input
2088 edge to see argument to copy. */
2089 if (!old_edge)
2090 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2091 if (!old_edge->src->aux)
2092 break;
2093
2094 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2095 new_arg = arg;
2096 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2097 gcc_assert (new_arg);
2098 /* With return slot optimization we can end up with
2099 non-gimple (foo *)&this->m, fix that here. */
2100 if (TREE_CODE (new_arg) != SSA_NAME
2101 && TREE_CODE (new_arg) != FUNCTION_DECL
2102 && !is_gimple_val (new_arg))
2103 {
2104 gimple_seq stmts = NULL;
2105 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2106 gsi_insert_seq_on_edge (new_edge, stmts);
2107 inserted = true;
2108 }
2109 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2110 if (LOCATION_BLOCK (locus))
2111 {
2112 tree *n;
2113 n = (tree *) pointer_map_contains (id->decl_map,
2114 LOCATION_BLOCK (locus));
2115 gcc_assert (n);
2116 if (*n)
2117 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2118 else
2119 locus = LOCATION_LOCUS (locus);
2120 }
2121 else
2122 locus = LOCATION_LOCUS (locus);
2123
2124 add_phi_arg (new_phi, new_arg, new_edge, locus);
2125 }
2126 }
2127 }
2128
2129 /* Commit the delayed edge insertions. */
2130 if (inserted)
2131 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2132 gsi_commit_one_edge_insert (new_edge, NULL);
2133 }
2134
2135
2136 /* Wrapper for remap_decl so it can be used as a callback. */
2137
2138 static tree
2139 remap_decl_1 (tree decl, void *data)
2140 {
2141 return remap_decl (decl, (copy_body_data *) data);
2142 }
2143
2144 /* Build struct function and associated datastructures for the new clone
2145 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2146 the cfun to the function of new_fndecl (and current_function_decl too). */
2147
2148 static void
2149 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2150 {
2151 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2152 gcov_type count_scale;
2153
2154 if (!DECL_ARGUMENTS (new_fndecl))
2155 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2156 if (!DECL_RESULT (new_fndecl))
2157 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2158
2159 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2160 count_scale
2161 = GCOV_COMPUTE_SCALE (count,
2162 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2163 else
2164 count_scale = REG_BR_PROB_BASE;
2165
2166 /* Register specific tree functions. */
2167 gimple_register_cfg_hooks ();
2168
2169 /* Get clean struct function. */
2170 push_struct_function (new_fndecl);
2171
2172 /* We will rebuild these, so just sanity check that they are empty. */
2173 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2174 gcc_assert (cfun->local_decls == NULL);
2175 gcc_assert (cfun->cfg == NULL);
2176 gcc_assert (cfun->decl == new_fndecl);
2177
2178 /* Copy items we preserve during cloning. */
2179 cfun->static_chain_decl = src_cfun->static_chain_decl;
2180 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2181 cfun->function_end_locus = src_cfun->function_end_locus;
2182 cfun->curr_properties = src_cfun->curr_properties;
2183 cfun->last_verified = src_cfun->last_verified;
2184 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2185 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2186 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2187 cfun->stdarg = src_cfun->stdarg;
2188 cfun->after_inlining = src_cfun->after_inlining;
2189 cfun->can_throw_non_call_exceptions
2190 = src_cfun->can_throw_non_call_exceptions;
2191 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2192 cfun->returns_struct = src_cfun->returns_struct;
2193 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2194
2195 init_empty_tree_cfg ();
2196
2197 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2198 ENTRY_BLOCK_PTR->count =
2199 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2200 REG_BR_PROB_BASE);
2201 ENTRY_BLOCK_PTR->frequency
2202 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2203 EXIT_BLOCK_PTR->count =
2204 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2205 REG_BR_PROB_BASE);
2206 EXIT_BLOCK_PTR->frequency =
2207 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2208 if (src_cfun->eh)
2209 init_eh_for_function ();
2210
2211 if (src_cfun->gimple_df)
2212 {
2213 init_tree_ssa (cfun);
2214 cfun->gimple_df->in_ssa_p = true;
2215 init_ssa_operands (cfun);
2216 }
2217 }
2218
2219 /* Helper function for copy_cfg_body. Move debug stmts from the end
2220 of NEW_BB to the beginning of successor basic blocks when needed. If the
2221 successor has multiple predecessors, reset them, otherwise keep
2222 their value. */
2223
2224 static void
2225 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2226 {
2227 edge e;
2228 edge_iterator ei;
2229 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2230
2231 if (gsi_end_p (si)
2232 || gsi_one_before_end_p (si)
2233 || !(stmt_can_throw_internal (gsi_stmt (si))
2234 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2235 return;
2236
2237 FOR_EACH_EDGE (e, ei, new_bb->succs)
2238 {
2239 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2240 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2241 while (is_gimple_debug (gsi_stmt (ssi)))
2242 {
2243 gimple stmt = gsi_stmt (ssi), new_stmt;
2244 tree var;
2245 tree value;
2246
2247 /* For the last edge move the debug stmts instead of copying
2248 them. */
2249 if (ei_one_before_end_p (ei))
2250 {
2251 si = ssi;
2252 gsi_prev (&ssi);
2253 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2254 gimple_debug_bind_reset_value (stmt);
2255 gsi_remove (&si, false);
2256 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2257 continue;
2258 }
2259
2260 if (gimple_debug_bind_p (stmt))
2261 {
2262 var = gimple_debug_bind_get_var (stmt);
2263 if (single_pred_p (e->dest))
2264 {
2265 value = gimple_debug_bind_get_value (stmt);
2266 value = unshare_expr (value);
2267 }
2268 else
2269 value = NULL_TREE;
2270 new_stmt = gimple_build_debug_bind (var, value, stmt);
2271 }
2272 else if (gimple_debug_source_bind_p (stmt))
2273 {
2274 var = gimple_debug_source_bind_get_var (stmt);
2275 value = gimple_debug_source_bind_get_value (stmt);
2276 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2277 }
2278 else
2279 gcc_unreachable ();
2280 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2281 id->debug_stmts.safe_push (new_stmt);
2282 gsi_prev (&ssi);
2283 }
2284 }
2285 }
2286
2287 /* Make a copy of the sub-loops of SRC_PARENT and place them
2288 as siblings of DEST_PARENT. */
2289
2290 static void
2291 copy_loops (copy_body_data *id,
2292 struct loop *dest_parent, struct loop *src_parent)
2293 {
2294 struct loop *src_loop = src_parent->inner;
2295 while (src_loop)
2296 {
2297 if (!id->blocks_to_copy
2298 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2299 {
2300 struct loop *dest_loop = alloc_loop ();
2301
2302 /* Assign the new loop its header and latch and associate
2303 those with the new loop. */
2304 if (src_loop->header != NULL)
2305 {
2306 dest_loop->header = (basic_block)src_loop->header->aux;
2307 dest_loop->header->loop_father = dest_loop;
2308 }
2309 if (src_loop->latch != NULL)
2310 {
2311 dest_loop->latch = (basic_block)src_loop->latch->aux;
2312 dest_loop->latch->loop_father = dest_loop;
2313 }
2314
2315 /* Copy loop meta-data. */
2316 copy_loop_info (src_loop, dest_loop);
2317
2318 /* Finally place it into the loop array and the loop tree. */
2319 place_new_loop (cfun, dest_loop);
2320 flow_loop_tree_node_add (dest_parent, dest_loop);
2321
2322 if (src_loop->simduid)
2323 {
2324 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2325 cfun->has_simduid_loops = true;
2326 }
2327 if (src_loop->force_vect)
2328 {
2329 dest_loop->force_vect = true;
2330 cfun->has_force_vect_loops = true;
2331 }
2332
2333 /* Recurse. */
2334 copy_loops (id, dest_loop, src_loop);
2335 }
2336 src_loop = src_loop->next;
2337 }
2338 }
2339
2340 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2341
2342 void
2343 redirect_all_calls (copy_body_data * id, basic_block bb)
2344 {
2345 gimple_stmt_iterator si;
2346 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2347 {
2348 if (is_gimple_call (gsi_stmt (si)))
2349 {
2350 struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
2351 if (edge)
2352 cgraph_redirect_edge_call_stmt_to_callee (edge);
2353 }
2354 }
2355 }
2356
2357 /* Make a copy of the body of FN so that it can be inserted inline in
2358 another function. Walks FN via CFG, returns new fndecl. */
2359
2360 static tree
2361 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2362 basic_block entry_block_map, basic_block exit_block_map,
2363 basic_block new_entry)
2364 {
2365 tree callee_fndecl = id->src_fn;
2366 /* Original cfun for the callee, doesn't change. */
2367 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2368 struct function *cfun_to_copy;
2369 basic_block bb;
2370 tree new_fndecl = NULL;
2371 bool need_debug_cleanup = false;
2372 gcov_type count_scale;
2373 int last;
2374 int incoming_frequency = 0;
2375 gcov_type incoming_count = 0;
2376
2377 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2378 count_scale
2379 = GCOV_COMPUTE_SCALE (count,
2380 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2381 else
2382 count_scale = REG_BR_PROB_BASE;
2383
2384 /* Register specific tree functions. */
2385 gimple_register_cfg_hooks ();
2386
2387 /* If we are inlining just region of the function, make sure to connect new entry
2388 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2389 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2390 probabilities of edges incoming from nonduplicated region. */
2391 if (new_entry)
2392 {
2393 edge e;
2394 edge_iterator ei;
2395
2396 FOR_EACH_EDGE (e, ei, new_entry->preds)
2397 if (!e->src->aux)
2398 {
2399 incoming_frequency += EDGE_FREQUENCY (e);
2400 incoming_count += e->count;
2401 }
2402 incoming_count = apply_scale (incoming_count, count_scale);
2403 incoming_frequency
2404 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2405 ENTRY_BLOCK_PTR->count = incoming_count;
2406 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2407 }
2408
2409 /* Must have a CFG here at this point. */
2410 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2411 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2412
2413 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2414
2415 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2416 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2417 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2418 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2419
2420 /* Duplicate any exception-handling regions. */
2421 if (cfun->eh)
2422 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2423 remap_decl_1, id);
2424
2425 /* Use aux pointers to map the original blocks to copy. */
2426 FOR_EACH_BB_FN (bb, cfun_to_copy)
2427 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2428 {
2429 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2430 bb->aux = new_bb;
2431 new_bb->aux = bb;
2432 new_bb->loop_father = entry_block_map->loop_father;
2433 }
2434
2435 last = last_basic_block;
2436
2437 /* Now that we've duplicated the blocks, duplicate their edges. */
2438 bool can_make_abormal_goto
2439 = id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call);
2440 FOR_ALL_BB_FN (bb, cfun_to_copy)
2441 if (!id->blocks_to_copy
2442 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2443 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2444 can_make_abormal_goto);
2445
2446 if (new_entry)
2447 {
2448 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2449 e->probability = REG_BR_PROB_BASE;
2450 e->count = incoming_count;
2451 }
2452
2453 /* Duplicate the loop tree, if available and wanted. */
2454 if (loops_for_fn (src_cfun) != NULL
2455 && current_loops != NULL)
2456 {
2457 copy_loops (id, entry_block_map->loop_father,
2458 get_loop (src_cfun, 0));
2459 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2460 loops_state_set (LOOPS_NEED_FIXUP);
2461 }
2462
2463 /* If the loop tree in the source function needed fixup, mark the
2464 destination loop tree for fixup, too. */
2465 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2466 loops_state_set (LOOPS_NEED_FIXUP);
2467
2468 if (gimple_in_ssa_p (cfun))
2469 FOR_ALL_BB_FN (bb, cfun_to_copy)
2470 if (!id->blocks_to_copy
2471 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2472 copy_phis_for_bb (bb, id);
2473
2474 FOR_ALL_BB_FN (bb, cfun_to_copy)
2475 if (bb->aux)
2476 {
2477 if (need_debug_cleanup
2478 && bb->index != ENTRY_BLOCK
2479 && bb->index != EXIT_BLOCK)
2480 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2481 /* Update call edge destinations. This can not be done before loop
2482 info is updated, because we may split basic blocks. */
2483 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2484 redirect_all_calls (id, (basic_block)bb->aux);
2485 ((basic_block)bb->aux)->aux = NULL;
2486 bb->aux = NULL;
2487 }
2488
2489 /* Zero out AUX fields of newly created block during EH edge
2490 insertion. */
2491 for (; last < last_basic_block; last++)
2492 {
2493 if (need_debug_cleanup)
2494 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2495 BASIC_BLOCK (last)->aux = NULL;
2496 /* Update call edge destinations. This can not be done before loop
2497 info is updated, because we may split basic blocks. */
2498 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2499 redirect_all_calls (id, BASIC_BLOCK (last));
2500 }
2501 entry_block_map->aux = NULL;
2502 exit_block_map->aux = NULL;
2503
2504 if (id->eh_map)
2505 {
2506 pointer_map_destroy (id->eh_map);
2507 id->eh_map = NULL;
2508 }
2509
2510 return new_fndecl;
2511 }
2512
2513 /* Copy the debug STMT using ID. We deal with these statements in a
2514 special way: if any variable in their VALUE expression wasn't
2515 remapped yet, we won't remap it, because that would get decl uids
2516 out of sync, causing codegen differences between -g and -g0. If
2517 this arises, we drop the VALUE expression altogether. */
2518
2519 static void
2520 copy_debug_stmt (gimple stmt, copy_body_data *id)
2521 {
2522 tree t, *n;
2523 struct walk_stmt_info wi;
2524
2525 if (gimple_block (stmt))
2526 {
2527 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2528 gimple_set_block (stmt, n ? *n : id->block);
2529 }
2530
2531 /* Remap all the operands in COPY. */
2532 memset (&wi, 0, sizeof (wi));
2533 wi.info = id;
2534
2535 processing_debug_stmt = 1;
2536
2537 if (gimple_debug_source_bind_p (stmt))
2538 t = gimple_debug_source_bind_get_var (stmt);
2539 else
2540 t = gimple_debug_bind_get_var (stmt);
2541
2542 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2543 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2544 {
2545 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2546 t = *n;
2547 }
2548 else if (TREE_CODE (t) == VAR_DECL
2549 && !is_global_var (t)
2550 && !pointer_map_contains (id->decl_map, t))
2551 /* T is a non-localized variable. */;
2552 else
2553 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2554
2555 if (gimple_debug_bind_p (stmt))
2556 {
2557 gimple_debug_bind_set_var (stmt, t);
2558
2559 if (gimple_debug_bind_has_value_p (stmt))
2560 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2561 remap_gimple_op_r, &wi, NULL);
2562
2563 /* Punt if any decl couldn't be remapped. */
2564 if (processing_debug_stmt < 0)
2565 gimple_debug_bind_reset_value (stmt);
2566 }
2567 else if (gimple_debug_source_bind_p (stmt))
2568 {
2569 gimple_debug_source_bind_set_var (stmt, t);
2570 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2571 remap_gimple_op_r, &wi, NULL);
2572 /* When inlining and source bind refers to one of the optimized
2573 away parameters, change the source bind into normal debug bind
2574 referring to the corresponding DEBUG_EXPR_DECL that should have
2575 been bound before the call stmt. */
2576 t = gimple_debug_source_bind_get_value (stmt);
2577 if (t != NULL_TREE
2578 && TREE_CODE (t) == PARM_DECL
2579 && id->gimple_call)
2580 {
2581 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2582 unsigned int i;
2583 if (debug_args != NULL)
2584 {
2585 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2586 if ((**debug_args)[i] == DECL_ORIGIN (t)
2587 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2588 {
2589 t = (**debug_args)[i + 1];
2590 stmt->gsbase.subcode = GIMPLE_DEBUG_BIND;
2591 gimple_debug_bind_set_value (stmt, t);
2592 break;
2593 }
2594 }
2595 }
2596 }
2597
2598 processing_debug_stmt = 0;
2599
2600 update_stmt (stmt);
2601 }
2602
2603 /* Process deferred debug stmts. In order to give values better odds
2604 of being successfully remapped, we delay the processing of debug
2605 stmts until all other stmts that might require remapping are
2606 processed. */
2607
2608 static void
2609 copy_debug_stmts (copy_body_data *id)
2610 {
2611 size_t i;
2612 gimple stmt;
2613
2614 if (!id->debug_stmts.exists ())
2615 return;
2616
2617 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2618 copy_debug_stmt (stmt, id);
2619
2620 id->debug_stmts.release ();
2621 }
2622
2623 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2624 another function. */
2625
2626 static tree
2627 copy_tree_body (copy_body_data *id)
2628 {
2629 tree fndecl = id->src_fn;
2630 tree body = DECL_SAVED_TREE (fndecl);
2631
2632 walk_tree (&body, copy_tree_body_r, id, NULL);
2633
2634 return body;
2635 }
2636
2637 /* Make a copy of the body of FN so that it can be inserted inline in
2638 another function. */
2639
2640 static tree
2641 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2642 basic_block entry_block_map, basic_block exit_block_map,
2643 basic_block new_entry)
2644 {
2645 tree fndecl = id->src_fn;
2646 tree body;
2647
2648 /* If this body has a CFG, walk CFG and copy. */
2649 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2650 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2651 new_entry);
2652 copy_debug_stmts (id);
2653
2654 return body;
2655 }
2656
2657 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2658 defined in function FN, or of a data member thereof. */
2659
2660 static bool
2661 self_inlining_addr_expr (tree value, tree fn)
2662 {
2663 tree var;
2664
2665 if (TREE_CODE (value) != ADDR_EXPR)
2666 return false;
2667
2668 var = get_base_address (TREE_OPERAND (value, 0));
2669
2670 return var && auto_var_in_fn_p (var, fn);
2671 }
2672
2673 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2674 lexical block and line number information from base_stmt, if given,
2675 or from the last stmt of the block otherwise. */
2676
2677 static gimple
2678 insert_init_debug_bind (copy_body_data *id,
2679 basic_block bb, tree var, tree value,
2680 gimple base_stmt)
2681 {
2682 gimple note;
2683 gimple_stmt_iterator gsi;
2684 tree tracked_var;
2685
2686 if (!gimple_in_ssa_p (id->src_cfun))
2687 return NULL;
2688
2689 if (!MAY_HAVE_DEBUG_STMTS)
2690 return NULL;
2691
2692 tracked_var = target_for_debug_bind (var);
2693 if (!tracked_var)
2694 return NULL;
2695
2696 if (bb)
2697 {
2698 gsi = gsi_last_bb (bb);
2699 if (!base_stmt && !gsi_end_p (gsi))
2700 base_stmt = gsi_stmt (gsi);
2701 }
2702
2703 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2704
2705 if (bb)
2706 {
2707 if (!gsi_end_p (gsi))
2708 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2709 else
2710 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2711 }
2712
2713 return note;
2714 }
2715
2716 static void
2717 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2718 {
2719 /* If VAR represents a zero-sized variable, it's possible that the
2720 assignment statement may result in no gimple statements. */
2721 if (init_stmt)
2722 {
2723 gimple_stmt_iterator si = gsi_last_bb (bb);
2724
2725 /* We can end up with init statements that store to a non-register
2726 from a rhs with a conversion. Handle that here by forcing the
2727 rhs into a temporary. gimple_regimplify_operands is not
2728 prepared to do this for us. */
2729 if (!is_gimple_debug (init_stmt)
2730 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2731 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2732 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2733 {
2734 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2735 gimple_expr_type (init_stmt),
2736 gimple_assign_rhs1 (init_stmt));
2737 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2738 GSI_NEW_STMT);
2739 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2740 gimple_assign_set_rhs1 (init_stmt, rhs);
2741 }
2742 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2743 gimple_regimplify_operands (init_stmt, &si);
2744
2745 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2746 {
2747 tree def = gimple_assign_lhs (init_stmt);
2748 insert_init_debug_bind (id, bb, def, def, init_stmt);
2749 }
2750 }
2751 }
2752
2753 /* Initialize parameter P with VALUE. If needed, produce init statement
2754 at the end of BB. When BB is NULL, we return init statement to be
2755 output later. */
2756 static gimple
2757 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2758 basic_block bb, tree *vars)
2759 {
2760 gimple init_stmt = NULL;
2761 tree var;
2762 tree rhs = value;
2763 tree def = (gimple_in_ssa_p (cfun)
2764 ? ssa_default_def (id->src_cfun, p) : NULL);
2765
2766 if (value
2767 && value != error_mark_node
2768 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2769 {
2770 /* If we can match up types by promotion/demotion do so. */
2771 if (fold_convertible_p (TREE_TYPE (p), value))
2772 rhs = fold_convert (TREE_TYPE (p), value);
2773 else
2774 {
2775 /* ??? For valid programs we should not end up here.
2776 Still if we end up with truly mismatched types here, fall back
2777 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2778 GIMPLE to the following passes. */
2779 if (!is_gimple_reg_type (TREE_TYPE (value))
2780 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2781 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2782 else
2783 rhs = build_zero_cst (TREE_TYPE (p));
2784 }
2785 }
2786
2787 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2788 here since the type of this decl must be visible to the calling
2789 function. */
2790 var = copy_decl_to_var (p, id);
2791
2792 /* Declare this new variable. */
2793 DECL_CHAIN (var) = *vars;
2794 *vars = var;
2795
2796 /* Make gimplifier happy about this variable. */
2797 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2798
2799 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2800 we would not need to create a new variable here at all, if it
2801 weren't for debug info. Still, we can just use the argument
2802 value. */
2803 if (TREE_READONLY (p)
2804 && !TREE_ADDRESSABLE (p)
2805 && value && !TREE_SIDE_EFFECTS (value)
2806 && !def)
2807 {
2808 /* We may produce non-gimple trees by adding NOPs or introduce
2809 invalid sharing when operand is not really constant.
2810 It is not big deal to prohibit constant propagation here as
2811 we will constant propagate in DOM1 pass anyway. */
2812 if (is_gimple_min_invariant (value)
2813 && useless_type_conversion_p (TREE_TYPE (p),
2814 TREE_TYPE (value))
2815 /* We have to be very careful about ADDR_EXPR. Make sure
2816 the base variable isn't a local variable of the inlined
2817 function, e.g., when doing recursive inlining, direct or
2818 mutually-recursive or whatever, which is why we don't
2819 just test whether fn == current_function_decl. */
2820 && ! self_inlining_addr_expr (value, fn))
2821 {
2822 insert_decl_map (id, p, value);
2823 insert_debug_decl_map (id, p, var);
2824 return insert_init_debug_bind (id, bb, var, value, NULL);
2825 }
2826 }
2827
2828 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2829 that way, when the PARM_DECL is encountered, it will be
2830 automatically replaced by the VAR_DECL. */
2831 insert_decl_map (id, p, var);
2832
2833 /* Even if P was TREE_READONLY, the new VAR should not be.
2834 In the original code, we would have constructed a
2835 temporary, and then the function body would have never
2836 changed the value of P. However, now, we will be
2837 constructing VAR directly. The constructor body may
2838 change its value multiple times as it is being
2839 constructed. Therefore, it must not be TREE_READONLY;
2840 the back-end assumes that TREE_READONLY variable is
2841 assigned to only once. */
2842 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2843 TREE_READONLY (var) = 0;
2844
2845 /* If there is no setup required and we are in SSA, take the easy route
2846 replacing all SSA names representing the function parameter by the
2847 SSA name passed to function.
2848
2849 We need to construct map for the variable anyway as it might be used
2850 in different SSA names when parameter is set in function.
2851
2852 Do replacement at -O0 for const arguments replaced by constant.
2853 This is important for builtin_constant_p and other construct requiring
2854 constant argument to be visible in inlined function body. */
2855 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2856 && (optimize
2857 || (TREE_READONLY (p)
2858 && is_gimple_min_invariant (rhs)))
2859 && (TREE_CODE (rhs) == SSA_NAME
2860 || is_gimple_min_invariant (rhs))
2861 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2862 {
2863 insert_decl_map (id, def, rhs);
2864 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2865 }
2866
2867 /* If the value of argument is never used, don't care about initializing
2868 it. */
2869 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2870 {
2871 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2872 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2873 }
2874
2875 /* Initialize this VAR_DECL from the equivalent argument. Convert
2876 the argument to the proper type in case it was promoted. */
2877 if (value)
2878 {
2879 if (rhs == error_mark_node)
2880 {
2881 insert_decl_map (id, p, var);
2882 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2883 }
2884
2885 STRIP_USELESS_TYPE_CONVERSION (rhs);
2886
2887 /* If we are in SSA form properly remap the default definition
2888 or assign to a dummy SSA name if the parameter is unused and
2889 we are not optimizing. */
2890 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2891 {
2892 if (def)
2893 {
2894 def = remap_ssa_name (def, id);
2895 init_stmt = gimple_build_assign (def, rhs);
2896 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2897 set_ssa_default_def (cfun, var, NULL);
2898 }
2899 else if (!optimize)
2900 {
2901 def = make_ssa_name (var, NULL);
2902 init_stmt = gimple_build_assign (def, rhs);
2903 }
2904 }
2905 else
2906 init_stmt = gimple_build_assign (var, rhs);
2907
2908 if (bb && init_stmt)
2909 insert_init_stmt (id, bb, init_stmt);
2910 }
2911 return init_stmt;
2912 }
2913
2914 /* Generate code to initialize the parameters of the function at the
2915 top of the stack in ID from the GIMPLE_CALL STMT. */
2916
2917 static void
2918 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2919 tree fn, basic_block bb)
2920 {
2921 tree parms;
2922 size_t i;
2923 tree p;
2924 tree vars = NULL_TREE;
2925 tree static_chain = gimple_call_chain (stmt);
2926
2927 /* Figure out what the parameters are. */
2928 parms = DECL_ARGUMENTS (fn);
2929
2930 /* Loop through the parameter declarations, replacing each with an
2931 equivalent VAR_DECL, appropriately initialized. */
2932 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2933 {
2934 tree val;
2935 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2936 setup_one_parameter (id, p, val, fn, bb, &vars);
2937 }
2938 /* After remapping parameters remap their types. This has to be done
2939 in a second loop over all parameters to appropriately remap
2940 variable sized arrays when the size is specified in a
2941 parameter following the array. */
2942 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2943 {
2944 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2945 if (varp
2946 && TREE_CODE (*varp) == VAR_DECL)
2947 {
2948 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2949 ? ssa_default_def (id->src_cfun, p) : NULL);
2950 tree var = *varp;
2951 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2952 /* Also remap the default definition if it was remapped
2953 to the default definition of the parameter replacement
2954 by the parameter setup. */
2955 if (def)
2956 {
2957 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2958 if (defp
2959 && TREE_CODE (*defp) == SSA_NAME
2960 && SSA_NAME_VAR (*defp) == var)
2961 TREE_TYPE (*defp) = TREE_TYPE (var);
2962 }
2963 }
2964 }
2965
2966 /* Initialize the static chain. */
2967 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2968 gcc_assert (fn != current_function_decl);
2969 if (p)
2970 {
2971 /* No static chain? Seems like a bug in tree-nested.c. */
2972 gcc_assert (static_chain);
2973
2974 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2975 }
2976
2977 declare_inline_vars (id->block, vars);
2978 }
2979
2980
2981 /* Declare a return variable to replace the RESULT_DECL for the
2982 function we are calling. An appropriate DECL_STMT is returned.
2983 The USE_STMT is filled to contain a use of the declaration to
2984 indicate the return value of the function.
2985
2986 RETURN_SLOT, if non-null is place where to store the result. It
2987 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2988 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2989
2990 The return value is a (possibly null) value that holds the result
2991 as seen by the caller. */
2992
2993 static tree
2994 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2995 basic_block entry_bb)
2996 {
2997 tree callee = id->src_fn;
2998 tree result = DECL_RESULT (callee);
2999 tree callee_type = TREE_TYPE (result);
3000 tree caller_type;
3001 tree var, use;
3002
3003 /* Handle type-mismatches in the function declaration return type
3004 vs. the call expression. */
3005 if (modify_dest)
3006 caller_type = TREE_TYPE (modify_dest);
3007 else
3008 caller_type = TREE_TYPE (TREE_TYPE (callee));
3009
3010 /* We don't need to do anything for functions that don't return anything. */
3011 if (VOID_TYPE_P (callee_type))
3012 return NULL_TREE;
3013
3014 /* If there was a return slot, then the return value is the
3015 dereferenced address of that object. */
3016 if (return_slot)
3017 {
3018 /* The front end shouldn't have used both return_slot and
3019 a modify expression. */
3020 gcc_assert (!modify_dest);
3021 if (DECL_BY_REFERENCE (result))
3022 {
3023 tree return_slot_addr = build_fold_addr_expr (return_slot);
3024 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3025
3026 /* We are going to construct *&return_slot and we can't do that
3027 for variables believed to be not addressable.
3028
3029 FIXME: This check possibly can match, because values returned
3030 via return slot optimization are not believed to have address
3031 taken by alias analysis. */
3032 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3033 var = return_slot_addr;
3034 }
3035 else
3036 {
3037 var = return_slot;
3038 gcc_assert (TREE_CODE (var) != SSA_NAME);
3039 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
3040 }
3041 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3042 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3043 && !DECL_GIMPLE_REG_P (result)
3044 && DECL_P (var))
3045 DECL_GIMPLE_REG_P (var) = 0;
3046 use = NULL;
3047 goto done;
3048 }
3049
3050 /* All types requiring non-trivial constructors should have been handled. */
3051 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3052
3053 /* Attempt to avoid creating a new temporary variable. */
3054 if (modify_dest
3055 && TREE_CODE (modify_dest) != SSA_NAME)
3056 {
3057 bool use_it = false;
3058
3059 /* We can't use MODIFY_DEST if there's type promotion involved. */
3060 if (!useless_type_conversion_p (callee_type, caller_type))
3061 use_it = false;
3062
3063 /* ??? If we're assigning to a variable sized type, then we must
3064 reuse the destination variable, because we've no good way to
3065 create variable sized temporaries at this point. */
3066 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3067 use_it = true;
3068
3069 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3070 reuse it as the result of the call directly. Don't do this if
3071 it would promote MODIFY_DEST to addressable. */
3072 else if (TREE_ADDRESSABLE (result))
3073 use_it = false;
3074 else
3075 {
3076 tree base_m = get_base_address (modify_dest);
3077
3078 /* If the base isn't a decl, then it's a pointer, and we don't
3079 know where that's going to go. */
3080 if (!DECL_P (base_m))
3081 use_it = false;
3082 else if (is_global_var (base_m))
3083 use_it = false;
3084 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3085 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3086 && !DECL_GIMPLE_REG_P (result)
3087 && DECL_GIMPLE_REG_P (base_m))
3088 use_it = false;
3089 else if (!TREE_ADDRESSABLE (base_m))
3090 use_it = true;
3091 }
3092
3093 if (use_it)
3094 {
3095 var = modify_dest;
3096 use = NULL;
3097 goto done;
3098 }
3099 }
3100
3101 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3102
3103 var = copy_result_decl_to_var (result, id);
3104 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3105
3106 /* Do not have the rest of GCC warn about this variable as it should
3107 not be visible to the user. */
3108 TREE_NO_WARNING (var) = 1;
3109
3110 declare_inline_vars (id->block, var);
3111
3112 /* Build the use expr. If the return type of the function was
3113 promoted, convert it back to the expected type. */
3114 use = var;
3115 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3116 {
3117 /* If we can match up types by promotion/demotion do so. */
3118 if (fold_convertible_p (caller_type, var))
3119 use = fold_convert (caller_type, var);
3120 else
3121 {
3122 /* ??? For valid programs we should not end up here.
3123 Still if we end up with truly mismatched types here, fall back
3124 to using a MEM_REF to not leak invalid GIMPLE to the following
3125 passes. */
3126 /* Prevent var from being written into SSA form. */
3127 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3128 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3129 DECL_GIMPLE_REG_P (var) = false;
3130 else if (is_gimple_reg_type (TREE_TYPE (var)))
3131 TREE_ADDRESSABLE (var) = true;
3132 use = fold_build2 (MEM_REF, caller_type,
3133 build_fold_addr_expr (var),
3134 build_int_cst (ptr_type_node, 0));
3135 }
3136 }
3137
3138 STRIP_USELESS_TYPE_CONVERSION (use);
3139
3140 if (DECL_BY_REFERENCE (result))
3141 {
3142 TREE_ADDRESSABLE (var) = 1;
3143 var = build_fold_addr_expr (var);
3144 }
3145
3146 done:
3147 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3148 way, when the RESULT_DECL is encountered, it will be
3149 automatically replaced by the VAR_DECL.
3150
3151 When returning by reference, ensure that RESULT_DECL remaps to
3152 gimple_val. */
3153 if (DECL_BY_REFERENCE (result)
3154 && !is_gimple_val (var))
3155 {
3156 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3157 insert_decl_map (id, result, temp);
3158 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3159 it's default_def SSA_NAME. */
3160 if (gimple_in_ssa_p (id->src_cfun)
3161 && is_gimple_reg (result))
3162 {
3163 temp = make_ssa_name (temp, NULL);
3164 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3165 }
3166 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3167 }
3168 else
3169 insert_decl_map (id, result, var);
3170
3171 /* Remember this so we can ignore it in remap_decls. */
3172 id->retvar = var;
3173
3174 return use;
3175 }
3176
3177 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3178 to a local label. */
3179
3180 static tree
3181 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3182 {
3183 tree node = *nodep;
3184 tree fn = (tree) fnp;
3185
3186 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3187 return node;
3188
3189 if (TYPE_P (node))
3190 *walk_subtrees = 0;
3191
3192 return NULL_TREE;
3193 }
3194
3195 /* Determine if the function can be copied. If so return NULL. If
3196 not return a string describng the reason for failure. */
3197
3198 static const char *
3199 copy_forbidden (struct function *fun, tree fndecl)
3200 {
3201 const char *reason = fun->cannot_be_copied_reason;
3202 tree decl;
3203 unsigned ix;
3204
3205 /* Only examine the function once. */
3206 if (fun->cannot_be_copied_set)
3207 return reason;
3208
3209 /* We cannot copy a function that receives a non-local goto
3210 because we cannot remap the destination label used in the
3211 function that is performing the non-local goto. */
3212 /* ??? Actually, this should be possible, if we work at it.
3213 No doubt there's just a handful of places that simply
3214 assume it doesn't happen and don't substitute properly. */
3215 if (fun->has_nonlocal_label)
3216 {
3217 reason = G_("function %q+F can never be copied "
3218 "because it receives a non-local goto");
3219 goto fail;
3220 }
3221
3222 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3223 if (TREE_CODE (decl) == VAR_DECL
3224 && TREE_STATIC (decl)
3225 && !DECL_EXTERNAL (decl)
3226 && DECL_INITIAL (decl)
3227 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3228 has_label_address_in_static_1,
3229 fndecl))
3230 {
3231 reason = G_("function %q+F can never be copied because it saves "
3232 "address of local label in a static variable");
3233 goto fail;
3234 }
3235
3236 fail:
3237 fun->cannot_be_copied_reason = reason;
3238 fun->cannot_be_copied_set = true;
3239 return reason;
3240 }
3241
3242
3243 static const char *inline_forbidden_reason;
3244
3245 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3246 iff a function can not be inlined. Also sets the reason why. */
3247
3248 static tree
3249 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3250 struct walk_stmt_info *wip)
3251 {
3252 tree fn = (tree) wip->info;
3253 tree t;
3254 gimple stmt = gsi_stmt (*gsi);
3255
3256 switch (gimple_code (stmt))
3257 {
3258 case GIMPLE_CALL:
3259 /* Refuse to inline alloca call unless user explicitly forced so as
3260 this may change program's memory overhead drastically when the
3261 function using alloca is called in loop. In GCC present in
3262 SPEC2000 inlining into schedule_block cause it to require 2GB of
3263 RAM instead of 256MB. Don't do so for alloca calls emitted for
3264 VLA objects as those can't cause unbounded growth (they're always
3265 wrapped inside stack_save/stack_restore regions. */
3266 if (gimple_alloca_call_p (stmt)
3267 && !gimple_call_alloca_for_var_p (stmt)
3268 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3269 {
3270 inline_forbidden_reason
3271 = G_("function %q+F can never be inlined because it uses "
3272 "alloca (override using the always_inline attribute)");
3273 *handled_ops_p = true;
3274 return fn;
3275 }
3276
3277 t = gimple_call_fndecl (stmt);
3278 if (t == NULL_TREE)
3279 break;
3280
3281 /* We cannot inline functions that call setjmp. */
3282 if (setjmp_call_p (t))
3283 {
3284 inline_forbidden_reason
3285 = G_("function %q+F can never be inlined because it uses setjmp");
3286 *handled_ops_p = true;
3287 return t;
3288 }
3289
3290 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3291 switch (DECL_FUNCTION_CODE (t))
3292 {
3293 /* We cannot inline functions that take a variable number of
3294 arguments. */
3295 case BUILT_IN_VA_START:
3296 case BUILT_IN_NEXT_ARG:
3297 case BUILT_IN_VA_END:
3298 inline_forbidden_reason
3299 = G_("function %q+F can never be inlined because it "
3300 "uses variable argument lists");
3301 *handled_ops_p = true;
3302 return t;
3303
3304 case BUILT_IN_LONGJMP:
3305 /* We can't inline functions that call __builtin_longjmp at
3306 all. The non-local goto machinery really requires the
3307 destination be in a different function. If we allow the
3308 function calling __builtin_longjmp to be inlined into the
3309 function calling __builtin_setjmp, Things will Go Awry. */
3310 inline_forbidden_reason
3311 = G_("function %q+F can never be inlined because "
3312 "it uses setjmp-longjmp exception handling");
3313 *handled_ops_p = true;
3314 return t;
3315
3316 case BUILT_IN_NONLOCAL_GOTO:
3317 /* Similarly. */
3318 inline_forbidden_reason
3319 = G_("function %q+F can never be inlined because "
3320 "it uses non-local goto");
3321 *handled_ops_p = true;
3322 return t;
3323
3324 case BUILT_IN_RETURN:
3325 case BUILT_IN_APPLY_ARGS:
3326 /* If a __builtin_apply_args caller would be inlined,
3327 it would be saving arguments of the function it has
3328 been inlined into. Similarly __builtin_return would
3329 return from the function the inline has been inlined into. */
3330 inline_forbidden_reason
3331 = G_("function %q+F can never be inlined because "
3332 "it uses __builtin_return or __builtin_apply_args");
3333 *handled_ops_p = true;
3334 return t;
3335
3336 default:
3337 break;
3338 }
3339 break;
3340
3341 case GIMPLE_GOTO:
3342 t = gimple_goto_dest (stmt);
3343
3344 /* We will not inline a function which uses computed goto. The
3345 addresses of its local labels, which may be tucked into
3346 global storage, are of course not constant across
3347 instantiations, which causes unexpected behavior. */
3348 if (TREE_CODE (t) != LABEL_DECL)
3349 {
3350 inline_forbidden_reason
3351 = G_("function %q+F can never be inlined "
3352 "because it contains a computed goto");
3353 *handled_ops_p = true;
3354 return t;
3355 }
3356 break;
3357
3358 default:
3359 break;
3360 }
3361
3362 *handled_ops_p = false;
3363 return NULL_TREE;
3364 }
3365
3366 /* Return true if FNDECL is a function that cannot be inlined into
3367 another one. */
3368
3369 static bool
3370 inline_forbidden_p (tree fndecl)
3371 {
3372 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3373 struct walk_stmt_info wi;
3374 struct pointer_set_t *visited_nodes;
3375 basic_block bb;
3376 bool forbidden_p = false;
3377
3378 /* First check for shared reasons not to copy the code. */
3379 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3380 if (inline_forbidden_reason != NULL)
3381 return true;
3382
3383 /* Next, walk the statements of the function looking for
3384 constraucts we can't handle, or are non-optimal for inlining. */
3385 visited_nodes = pointer_set_create ();
3386 memset (&wi, 0, sizeof (wi));
3387 wi.info = (void *) fndecl;
3388 wi.pset = visited_nodes;
3389
3390 FOR_EACH_BB_FN (bb, fun)
3391 {
3392 gimple ret;
3393 gimple_seq seq = bb_seq (bb);
3394 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3395 forbidden_p = (ret != NULL);
3396 if (forbidden_p)
3397 break;
3398 }
3399
3400 pointer_set_destroy (visited_nodes);
3401 return forbidden_p;
3402 }
3403 \f
3404 /* Return false if the function FNDECL cannot be inlined on account of its
3405 attributes, true otherwise. */
3406 static bool
3407 function_attribute_inlinable_p (const_tree fndecl)
3408 {
3409 if (targetm.attribute_table)
3410 {
3411 const_tree a;
3412
3413 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3414 {
3415 const_tree name = TREE_PURPOSE (a);
3416 int i;
3417
3418 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3419 if (is_attribute_p (targetm.attribute_table[i].name, name))
3420 return targetm.function_attribute_inlinable_p (fndecl);
3421 }
3422 }
3423
3424 return true;
3425 }
3426
3427 /* Returns nonzero if FN is a function that does not have any
3428 fundamental inline blocking properties. */
3429
3430 bool
3431 tree_inlinable_function_p (tree fn)
3432 {
3433 bool inlinable = true;
3434 bool do_warning;
3435 tree always_inline;
3436
3437 /* If we've already decided this function shouldn't be inlined,
3438 there's no need to check again. */
3439 if (DECL_UNINLINABLE (fn))
3440 return false;
3441
3442 /* We only warn for functions declared `inline' by the user. */
3443 do_warning = (warn_inline
3444 && DECL_DECLARED_INLINE_P (fn)
3445 && !DECL_NO_INLINE_WARNING_P (fn)
3446 && !DECL_IN_SYSTEM_HEADER (fn));
3447
3448 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3449
3450 if (flag_no_inline
3451 && always_inline == NULL)
3452 {
3453 if (do_warning)
3454 warning (OPT_Winline, "function %q+F can never be inlined because it "
3455 "is suppressed using -fno-inline", fn);
3456 inlinable = false;
3457 }
3458
3459 else if (!function_attribute_inlinable_p (fn))
3460 {
3461 if (do_warning)
3462 warning (OPT_Winline, "function %q+F can never be inlined because it "
3463 "uses attributes conflicting with inlining", fn);
3464 inlinable = false;
3465 }
3466
3467 else if (inline_forbidden_p (fn))
3468 {
3469 /* See if we should warn about uninlinable functions. Previously,
3470 some of these warnings would be issued while trying to expand
3471 the function inline, but that would cause multiple warnings
3472 about functions that would for example call alloca. But since
3473 this a property of the function, just one warning is enough.
3474 As a bonus we can now give more details about the reason why a
3475 function is not inlinable. */
3476 if (always_inline)
3477 error (inline_forbidden_reason, fn);
3478 else if (do_warning)
3479 warning (OPT_Winline, inline_forbidden_reason, fn);
3480
3481 inlinable = false;
3482 }
3483
3484 /* Squirrel away the result so that we don't have to check again. */
3485 DECL_UNINLINABLE (fn) = !inlinable;
3486
3487 return inlinable;
3488 }
3489
3490 /* Estimate the cost of a memory move. Use machine dependent
3491 word size and take possible memcpy call into account. */
3492
3493 int
3494 estimate_move_cost (tree type)
3495 {
3496 HOST_WIDE_INT size;
3497
3498 gcc_assert (!VOID_TYPE_P (type));
3499
3500 if (TREE_CODE (type) == VECTOR_TYPE)
3501 {
3502 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3503 enum machine_mode simd
3504 = targetm.vectorize.preferred_simd_mode (inner);
3505 int simd_mode_size = GET_MODE_SIZE (simd);
3506 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3507 / simd_mode_size);
3508 }
3509
3510 size = int_size_in_bytes (type);
3511
3512 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3513 /* Cost of a memcpy call, 3 arguments and the call. */
3514 return 4;
3515 else
3516 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3517 }
3518
3519 /* Returns cost of operation CODE, according to WEIGHTS */
3520
3521 static int
3522 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3523 tree op1 ATTRIBUTE_UNUSED, tree op2)
3524 {
3525 switch (code)
3526 {
3527 /* These are "free" conversions, or their presumed cost
3528 is folded into other operations. */
3529 case RANGE_EXPR:
3530 CASE_CONVERT:
3531 case COMPLEX_EXPR:
3532 case PAREN_EXPR:
3533 case VIEW_CONVERT_EXPR:
3534 return 0;
3535
3536 /* Assign cost of 1 to usual operations.
3537 ??? We may consider mapping RTL costs to this. */
3538 case COND_EXPR:
3539 case VEC_COND_EXPR:
3540 case VEC_PERM_EXPR:
3541
3542 case PLUS_EXPR:
3543 case POINTER_PLUS_EXPR:
3544 case MINUS_EXPR:
3545 case MULT_EXPR:
3546 case MULT_HIGHPART_EXPR:
3547 case FMA_EXPR:
3548
3549 case ADDR_SPACE_CONVERT_EXPR:
3550 case FIXED_CONVERT_EXPR:
3551 case FIX_TRUNC_EXPR:
3552
3553 case NEGATE_EXPR:
3554 case FLOAT_EXPR:
3555 case MIN_EXPR:
3556 case MAX_EXPR:
3557 case ABS_EXPR:
3558
3559 case LSHIFT_EXPR:
3560 case RSHIFT_EXPR:
3561 case LROTATE_EXPR:
3562 case RROTATE_EXPR:
3563 case VEC_LSHIFT_EXPR:
3564 case VEC_RSHIFT_EXPR:
3565
3566 case BIT_IOR_EXPR:
3567 case BIT_XOR_EXPR:
3568 case BIT_AND_EXPR:
3569 case BIT_NOT_EXPR:
3570
3571 case TRUTH_ANDIF_EXPR:
3572 case TRUTH_ORIF_EXPR:
3573 case TRUTH_AND_EXPR:
3574 case TRUTH_OR_EXPR:
3575 case TRUTH_XOR_EXPR:
3576 case TRUTH_NOT_EXPR:
3577
3578 case LT_EXPR:
3579 case LE_EXPR:
3580 case GT_EXPR:
3581 case GE_EXPR:
3582 case EQ_EXPR:
3583 case NE_EXPR:
3584 case ORDERED_EXPR:
3585 case UNORDERED_EXPR:
3586
3587 case UNLT_EXPR:
3588 case UNLE_EXPR:
3589 case UNGT_EXPR:
3590 case UNGE_EXPR:
3591 case UNEQ_EXPR:
3592 case LTGT_EXPR:
3593
3594 case CONJ_EXPR:
3595
3596 case PREDECREMENT_EXPR:
3597 case PREINCREMENT_EXPR:
3598 case POSTDECREMENT_EXPR:
3599 case POSTINCREMENT_EXPR:
3600
3601 case REALIGN_LOAD_EXPR:
3602
3603 case REDUC_MAX_EXPR:
3604 case REDUC_MIN_EXPR:
3605 case REDUC_PLUS_EXPR:
3606 case WIDEN_SUM_EXPR:
3607 case WIDEN_MULT_EXPR:
3608 case DOT_PROD_EXPR:
3609 case WIDEN_MULT_PLUS_EXPR:
3610 case WIDEN_MULT_MINUS_EXPR:
3611 case WIDEN_LSHIFT_EXPR:
3612
3613 case VEC_WIDEN_MULT_HI_EXPR:
3614 case VEC_WIDEN_MULT_LO_EXPR:
3615 case VEC_WIDEN_MULT_EVEN_EXPR:
3616 case VEC_WIDEN_MULT_ODD_EXPR:
3617 case VEC_UNPACK_HI_EXPR:
3618 case VEC_UNPACK_LO_EXPR:
3619 case VEC_UNPACK_FLOAT_HI_EXPR:
3620 case VEC_UNPACK_FLOAT_LO_EXPR:
3621 case VEC_PACK_TRUNC_EXPR:
3622 case VEC_PACK_SAT_EXPR:
3623 case VEC_PACK_FIX_TRUNC_EXPR:
3624 case VEC_WIDEN_LSHIFT_HI_EXPR:
3625 case VEC_WIDEN_LSHIFT_LO_EXPR:
3626
3627 return 1;
3628
3629 /* Few special cases of expensive operations. This is useful
3630 to avoid inlining on functions having too many of these. */
3631 case TRUNC_DIV_EXPR:
3632 case CEIL_DIV_EXPR:
3633 case FLOOR_DIV_EXPR:
3634 case ROUND_DIV_EXPR:
3635 case EXACT_DIV_EXPR:
3636 case TRUNC_MOD_EXPR:
3637 case CEIL_MOD_EXPR:
3638 case FLOOR_MOD_EXPR:
3639 case ROUND_MOD_EXPR:
3640 case RDIV_EXPR:
3641 if (TREE_CODE (op2) != INTEGER_CST)
3642 return weights->div_mod_cost;
3643 return 1;
3644
3645 default:
3646 /* We expect a copy assignment with no operator. */
3647 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3648 return 0;
3649 }
3650 }
3651
3652
3653 /* Estimate number of instructions that will be created by expanding
3654 the statements in the statement sequence STMTS.
3655 WEIGHTS contains weights attributed to various constructs. */
3656
3657 static
3658 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3659 {
3660 int cost;
3661 gimple_stmt_iterator gsi;
3662
3663 cost = 0;
3664 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3665 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3666
3667 return cost;
3668 }
3669
3670
3671 /* Estimate number of instructions that will be created by expanding STMT.
3672 WEIGHTS contains weights attributed to various constructs. */
3673
3674 int
3675 estimate_num_insns (gimple stmt, eni_weights *weights)
3676 {
3677 unsigned cost, i;
3678 enum gimple_code code = gimple_code (stmt);
3679 tree lhs;
3680 tree rhs;
3681
3682 switch (code)
3683 {
3684 case GIMPLE_ASSIGN:
3685 /* Try to estimate the cost of assignments. We have three cases to
3686 deal with:
3687 1) Simple assignments to registers;
3688 2) Stores to things that must live in memory. This includes
3689 "normal" stores to scalars, but also assignments of large
3690 structures, or constructors of big arrays;
3691
3692 Let us look at the first two cases, assuming we have "a = b + C":
3693 <GIMPLE_ASSIGN <var_decl "a">
3694 <plus_expr <var_decl "b"> <constant C>>
3695 If "a" is a GIMPLE register, the assignment to it is free on almost
3696 any target, because "a" usually ends up in a real register. Hence
3697 the only cost of this expression comes from the PLUS_EXPR, and we
3698 can ignore the GIMPLE_ASSIGN.
3699 If "a" is not a GIMPLE register, the assignment to "a" will most
3700 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3701 of moving something into "a", which we compute using the function
3702 estimate_move_cost. */
3703 if (gimple_clobber_p (stmt))
3704 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3705
3706 lhs = gimple_assign_lhs (stmt);
3707 rhs = gimple_assign_rhs1 (stmt);
3708
3709 cost = 0;
3710
3711 /* Account for the cost of moving to / from memory. */
3712 if (gimple_store_p (stmt))
3713 cost += estimate_move_cost (TREE_TYPE (lhs));
3714 if (gimple_assign_load_p (stmt))
3715 cost += estimate_move_cost (TREE_TYPE (rhs));
3716
3717 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3718 gimple_assign_rhs1 (stmt),
3719 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3720 == GIMPLE_BINARY_RHS
3721 ? gimple_assign_rhs2 (stmt) : NULL);
3722 break;
3723
3724 case GIMPLE_COND:
3725 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3726 gimple_op (stmt, 0),
3727 gimple_op (stmt, 1));
3728 break;
3729
3730 case GIMPLE_SWITCH:
3731 /* Take into account cost of the switch + guess 2 conditional jumps for
3732 each case label.
3733
3734 TODO: once the switch expansion logic is sufficiently separated, we can
3735 do better job on estimating cost of the switch. */
3736 if (weights->time_based)
3737 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3738 else
3739 cost = gimple_switch_num_labels (stmt) * 2;
3740 break;
3741
3742 case GIMPLE_CALL:
3743 {
3744 tree decl = gimple_call_fndecl (stmt);
3745 struct cgraph_node *node = NULL;
3746
3747 /* Do not special case builtins where we see the body.
3748 This just confuse inliner. */
3749 if (!decl || !(node = cgraph_get_node (decl)) || node->symbol.definition)
3750 ;
3751 /* For buitins that are likely expanded to nothing or
3752 inlined do not account operand costs. */
3753 else if (is_simple_builtin (decl))
3754 return 0;
3755 else if (is_inexpensive_builtin (decl))
3756 return weights->target_builtin_call_cost;
3757 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3758 {
3759 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3760 specialize the cheap expansion we do here.
3761 ??? This asks for a more general solution. */
3762 switch (DECL_FUNCTION_CODE (decl))
3763 {
3764 case BUILT_IN_POW:
3765 case BUILT_IN_POWF:
3766 case BUILT_IN_POWL:
3767 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3768 && REAL_VALUES_EQUAL
3769 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3770 return estimate_operator_cost (MULT_EXPR, weights,
3771 gimple_call_arg (stmt, 0),
3772 gimple_call_arg (stmt, 0));
3773 break;
3774
3775 default:
3776 break;
3777 }
3778 }
3779
3780 cost = node ? weights->call_cost : weights->indirect_call_cost;
3781 if (gimple_call_lhs (stmt))
3782 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3783 for (i = 0; i < gimple_call_num_args (stmt); i++)
3784 {
3785 tree arg = gimple_call_arg (stmt, i);
3786 cost += estimate_move_cost (TREE_TYPE (arg));
3787 }
3788 break;
3789 }
3790
3791 case GIMPLE_RETURN:
3792 return weights->return_cost;
3793
3794 case GIMPLE_GOTO:
3795 case GIMPLE_LABEL:
3796 case GIMPLE_NOP:
3797 case GIMPLE_PHI:
3798 case GIMPLE_PREDICT:
3799 case GIMPLE_DEBUG:
3800 return 0;
3801
3802 case GIMPLE_ASM:
3803 {
3804 int count = asm_str_count (gimple_asm_string (stmt));
3805 /* 1000 means infinity. This avoids overflows later
3806 with very long asm statements. */
3807 if (count > 1000)
3808 count = 1000;
3809 return count;
3810 }
3811
3812 case GIMPLE_RESX:
3813 /* This is either going to be an external function call with one
3814 argument, or two register copy statements plus a goto. */
3815 return 2;
3816
3817 case GIMPLE_EH_DISPATCH:
3818 /* ??? This is going to turn into a switch statement. Ideally
3819 we'd have a look at the eh region and estimate the number of
3820 edges involved. */
3821 return 10;
3822
3823 case GIMPLE_BIND:
3824 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3825
3826 case GIMPLE_EH_FILTER:
3827 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3828
3829 case GIMPLE_CATCH:
3830 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3831
3832 case GIMPLE_TRY:
3833 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3834 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3835
3836 /* OpenMP directives are generally very expensive. */
3837
3838 case GIMPLE_OMP_RETURN:
3839 case GIMPLE_OMP_SECTIONS_SWITCH:
3840 case GIMPLE_OMP_ATOMIC_STORE:
3841 case GIMPLE_OMP_CONTINUE:
3842 /* ...except these, which are cheap. */
3843 return 0;
3844
3845 case GIMPLE_OMP_ATOMIC_LOAD:
3846 return weights->omp_cost;
3847
3848 case GIMPLE_OMP_FOR:
3849 return (weights->omp_cost
3850 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3851 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3852
3853 case GIMPLE_OMP_PARALLEL:
3854 case GIMPLE_OMP_TASK:
3855 case GIMPLE_OMP_CRITICAL:
3856 case GIMPLE_OMP_MASTER:
3857 case GIMPLE_OMP_TASKGROUP:
3858 case GIMPLE_OMP_ORDERED:
3859 case GIMPLE_OMP_SECTION:
3860 case GIMPLE_OMP_SECTIONS:
3861 case GIMPLE_OMP_SINGLE:
3862 case GIMPLE_OMP_TARGET:
3863 case GIMPLE_OMP_TEAMS:
3864 return (weights->omp_cost
3865 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3866
3867 case GIMPLE_TRANSACTION:
3868 return (weights->tm_cost
3869 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3870 weights));
3871
3872 default:
3873 gcc_unreachable ();
3874 }
3875
3876 return cost;
3877 }
3878
3879 /* Estimate number of instructions that will be created by expanding
3880 function FNDECL. WEIGHTS contains weights attributed to various
3881 constructs. */
3882
3883 int
3884 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3885 {
3886 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3887 gimple_stmt_iterator bsi;
3888 basic_block bb;
3889 int n = 0;
3890
3891 gcc_assert (my_function && my_function->cfg);
3892 FOR_EACH_BB_FN (bb, my_function)
3893 {
3894 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3895 n += estimate_num_insns (gsi_stmt (bsi), weights);
3896 }
3897
3898 return n;
3899 }
3900
3901
3902 /* Initializes weights used by estimate_num_insns. */
3903
3904 void
3905 init_inline_once (void)
3906 {
3907 eni_size_weights.call_cost = 1;
3908 eni_size_weights.indirect_call_cost = 3;
3909 eni_size_weights.target_builtin_call_cost = 1;
3910 eni_size_weights.div_mod_cost = 1;
3911 eni_size_weights.omp_cost = 40;
3912 eni_size_weights.tm_cost = 10;
3913 eni_size_weights.time_based = false;
3914 eni_size_weights.return_cost = 1;
3915
3916 /* Estimating time for call is difficult, since we have no idea what the
3917 called function does. In the current uses of eni_time_weights,
3918 underestimating the cost does less harm than overestimating it, so
3919 we choose a rather small value here. */
3920 eni_time_weights.call_cost = 10;
3921 eni_time_weights.indirect_call_cost = 15;
3922 eni_time_weights.target_builtin_call_cost = 1;
3923 eni_time_weights.div_mod_cost = 10;
3924 eni_time_weights.omp_cost = 40;
3925 eni_time_weights.tm_cost = 40;
3926 eni_time_weights.time_based = true;
3927 eni_time_weights.return_cost = 2;
3928 }
3929
3930 /* Estimate the number of instructions in a gimple_seq. */
3931
3932 int
3933 count_insns_seq (gimple_seq seq, eni_weights *weights)
3934 {
3935 gimple_stmt_iterator gsi;
3936 int n = 0;
3937 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3938 n += estimate_num_insns (gsi_stmt (gsi), weights);
3939
3940 return n;
3941 }
3942
3943
3944 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3945
3946 static void
3947 prepend_lexical_block (tree current_block, tree new_block)
3948 {
3949 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3950 BLOCK_SUBBLOCKS (current_block) = new_block;
3951 BLOCK_SUPERCONTEXT (new_block) = current_block;
3952 }
3953
3954 /* Add local variables from CALLEE to CALLER. */
3955
3956 static inline void
3957 add_local_variables (struct function *callee, struct function *caller,
3958 copy_body_data *id)
3959 {
3960 tree var;
3961 unsigned ix;
3962
3963 FOR_EACH_LOCAL_DECL (callee, ix, var)
3964 if (!can_be_nonlocal (var, id))
3965 {
3966 tree new_var = remap_decl (var, id);
3967
3968 /* Remap debug-expressions. */
3969 if (TREE_CODE (new_var) == VAR_DECL
3970 && DECL_HAS_DEBUG_EXPR_P (var)
3971 && new_var != var)
3972 {
3973 tree tem = DECL_DEBUG_EXPR (var);
3974 bool old_regimplify = id->regimplify;
3975 id->remapping_type_depth++;
3976 walk_tree (&tem, copy_tree_body_r, id, NULL);
3977 id->remapping_type_depth--;
3978 id->regimplify = old_regimplify;
3979 SET_DECL_DEBUG_EXPR (new_var, tem);
3980 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
3981 }
3982 add_local_decl (caller, new_var);
3983 }
3984 }
3985
3986 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3987
3988 static bool
3989 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3990 {
3991 tree use_retvar;
3992 tree fn;
3993 struct pointer_map_t *st, *dst;
3994 tree return_slot;
3995 tree modify_dest;
3996 location_t saved_location;
3997 struct cgraph_edge *cg_edge;
3998 cgraph_inline_failed_t reason;
3999 basic_block return_block;
4000 edge e;
4001 gimple_stmt_iterator gsi, stmt_gsi;
4002 bool successfully_inlined = FALSE;
4003 bool purge_dead_abnormal_edges;
4004
4005 /* Set input_location here so we get the right instantiation context
4006 if we call instantiate_decl from inlinable_function_p. */
4007 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4008 saved_location = input_location;
4009 input_location = gimple_location (stmt);
4010
4011 /* From here on, we're only interested in CALL_EXPRs. */
4012 if (gimple_code (stmt) != GIMPLE_CALL)
4013 goto egress;
4014
4015 cg_edge = cgraph_edge (id->dst_node, stmt);
4016 gcc_checking_assert (cg_edge);
4017 /* First, see if we can figure out what function is being called.
4018 If we cannot, then there is no hope of inlining the function. */
4019 if (cg_edge->indirect_unknown_callee)
4020 goto egress;
4021 fn = cg_edge->callee->symbol.decl;
4022 gcc_checking_assert (fn);
4023
4024 /* If FN is a declaration of a function in a nested scope that was
4025 globally declared inline, we don't set its DECL_INITIAL.
4026 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4027 C++ front-end uses it for cdtors to refer to their internal
4028 declarations, that are not real functions. Fortunately those
4029 don't have trees to be saved, so we can tell by checking their
4030 gimple_body. */
4031 if (!DECL_INITIAL (fn)
4032 && DECL_ABSTRACT_ORIGIN (fn)
4033 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4034 fn = DECL_ABSTRACT_ORIGIN (fn);
4035
4036 /* Don't try to inline functions that are not well-suited to inlining. */
4037 if (cg_edge->inline_failed)
4038 {
4039 reason = cg_edge->inline_failed;
4040 /* If this call was originally indirect, we do not want to emit any
4041 inlining related warnings or sorry messages because there are no
4042 guarantees regarding those. */
4043 if (cg_edge->indirect_inlining_edge)
4044 goto egress;
4045
4046 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4047 /* For extern inline functions that get redefined we always
4048 silently ignored always_inline flag. Better behaviour would
4049 be to be able to keep both bodies and use extern inline body
4050 for inlining, but we can't do that because frontends overwrite
4051 the body. */
4052 && !cg_edge->callee->local.redefined_extern_inline
4053 /* During early inline pass, report only when optimization is
4054 not turned on. */
4055 && (cgraph_global_info_ready
4056 || !optimize)
4057 /* PR 20090218-1_0.c. Body can be provided by another module. */
4058 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4059 {
4060 error ("inlining failed in call to always_inline %q+F: %s", fn,
4061 cgraph_inline_failed_string (reason));
4062 error ("called from here");
4063 }
4064 else if (warn_inline
4065 && DECL_DECLARED_INLINE_P (fn)
4066 && !DECL_NO_INLINE_WARNING_P (fn)
4067 && !DECL_IN_SYSTEM_HEADER (fn)
4068 && reason != CIF_UNSPECIFIED
4069 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4070 /* Do not warn about not inlined recursive calls. */
4071 && !cgraph_edge_recursive_p (cg_edge)
4072 /* Avoid warnings during early inline pass. */
4073 && cgraph_global_info_ready)
4074 {
4075 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4076 fn, _(cgraph_inline_failed_string (reason)));
4077 warning (OPT_Winline, "called from here");
4078 }
4079 goto egress;
4080 }
4081 fn = cg_edge->callee->symbol.decl;
4082 cgraph_get_body (cg_edge->callee);
4083
4084 #ifdef ENABLE_CHECKING
4085 if (cg_edge->callee->symbol.decl != id->dst_node->symbol.decl)
4086 verify_cgraph_node (cg_edge->callee);
4087 #endif
4088
4089 /* We will be inlining this callee. */
4090 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4091
4092 /* Update the callers EH personality. */
4093 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl))
4094 DECL_FUNCTION_PERSONALITY (cg_edge->caller->symbol.decl)
4095 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl);
4096
4097 /* Split the block holding the GIMPLE_CALL. */
4098 e = split_block (bb, stmt);
4099 bb = e->src;
4100 return_block = e->dest;
4101 remove_edge (e);
4102
4103 /* split_block splits after the statement; work around this by
4104 moving the call into the second block manually. Not pretty,
4105 but seems easier than doing the CFG manipulation by hand
4106 when the GIMPLE_CALL is in the last statement of BB. */
4107 stmt_gsi = gsi_last_bb (bb);
4108 gsi_remove (&stmt_gsi, false);
4109
4110 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4111 been the source of abnormal edges. In this case, schedule
4112 the removal of dead abnormal edges. */
4113 gsi = gsi_start_bb (return_block);
4114 if (gsi_end_p (gsi))
4115 {
4116 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4117 purge_dead_abnormal_edges = true;
4118 }
4119 else
4120 {
4121 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4122 purge_dead_abnormal_edges = false;
4123 }
4124
4125 stmt_gsi = gsi_start_bb (return_block);
4126
4127 /* Build a block containing code to initialize the arguments, the
4128 actual inline expansion of the body, and a label for the return
4129 statements within the function to jump to. The type of the
4130 statement expression is the return type of the function call.
4131 ??? If the call does not have an associated block then we will
4132 remap all callee blocks to NULL, effectively dropping most of
4133 its debug information. This should only happen for calls to
4134 artificial decls inserted by the compiler itself. We need to
4135 either link the inlined blocks into the caller block tree or
4136 not refer to them in any way to not break GC for locations. */
4137 if (gimple_block (stmt))
4138 {
4139 id->block = make_node (BLOCK);
4140 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4141 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4142 prepend_lexical_block (gimple_block (stmt), id->block);
4143 }
4144
4145 /* Local declarations will be replaced by their equivalents in this
4146 map. */
4147 st = id->decl_map;
4148 id->decl_map = pointer_map_create ();
4149 dst = id->debug_map;
4150 id->debug_map = NULL;
4151
4152 /* Record the function we are about to inline. */
4153 id->src_fn = fn;
4154 id->src_node = cg_edge->callee;
4155 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4156 id->gimple_call = stmt;
4157
4158 gcc_assert (!id->src_cfun->after_inlining);
4159
4160 id->entry_bb = bb;
4161 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4162 {
4163 gimple_stmt_iterator si = gsi_last_bb (bb);
4164 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4165 NOT_TAKEN),
4166 GSI_NEW_STMT);
4167 }
4168 initialize_inlined_parameters (id, stmt, fn, bb);
4169
4170 if (DECL_INITIAL (fn))
4171 {
4172 if (gimple_block (stmt))
4173 {
4174 tree *var;
4175
4176 prepend_lexical_block (id->block,
4177 remap_blocks (DECL_INITIAL (fn), id));
4178 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4179 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4180 == NULL_TREE));
4181 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4182 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4183 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4184 under it. The parameters can be then evaluated in the debugger,
4185 but don't show in backtraces. */
4186 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4187 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4188 {
4189 tree v = *var;
4190 *var = TREE_CHAIN (v);
4191 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4192 BLOCK_VARS (id->block) = v;
4193 }
4194 else
4195 var = &TREE_CHAIN (*var);
4196 }
4197 else
4198 remap_blocks_to_null (DECL_INITIAL (fn), id);
4199 }
4200
4201 /* Return statements in the function body will be replaced by jumps
4202 to the RET_LABEL. */
4203 gcc_assert (DECL_INITIAL (fn));
4204 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4205
4206 /* Find the LHS to which the result of this call is assigned. */
4207 return_slot = NULL;
4208 if (gimple_call_lhs (stmt))
4209 {
4210 modify_dest = gimple_call_lhs (stmt);
4211
4212 /* The function which we are inlining might not return a value,
4213 in which case we should issue a warning that the function
4214 does not return a value. In that case the optimizers will
4215 see that the variable to which the value is assigned was not
4216 initialized. We do not want to issue a warning about that
4217 uninitialized variable. */
4218 if (DECL_P (modify_dest))
4219 TREE_NO_WARNING (modify_dest) = 1;
4220
4221 if (gimple_call_return_slot_opt_p (stmt))
4222 {
4223 return_slot = modify_dest;
4224 modify_dest = NULL;
4225 }
4226 }
4227 else
4228 modify_dest = NULL;
4229
4230 /* If we are inlining a call to the C++ operator new, we don't want
4231 to use type based alias analysis on the return value. Otherwise
4232 we may get confused if the compiler sees that the inlined new
4233 function returns a pointer which was just deleted. See bug
4234 33407. */
4235 if (DECL_IS_OPERATOR_NEW (fn))
4236 {
4237 return_slot = NULL;
4238 modify_dest = NULL;
4239 }
4240
4241 /* Declare the return variable for the function. */
4242 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4243
4244 /* Add local vars in this inlined callee to caller. */
4245 add_local_variables (id->src_cfun, cfun, id);
4246
4247 if (dump_file && (dump_flags & TDF_DETAILS))
4248 {
4249 fprintf (dump_file, "Inlining ");
4250 print_generic_expr (dump_file, id->src_fn, 0);
4251 fprintf (dump_file, " to ");
4252 print_generic_expr (dump_file, id->dst_fn, 0);
4253 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4254 }
4255
4256 /* This is it. Duplicate the callee body. Assume callee is
4257 pre-gimplified. Note that we must not alter the caller
4258 function in any way before this point, as this CALL_EXPR may be
4259 a self-referential call; if we're calling ourselves, we need to
4260 duplicate our body before altering anything. */
4261 copy_body (id, bb->count,
4262 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4263 bb, return_block, NULL);
4264
4265 /* Reset the escaped solution. */
4266 if (cfun->gimple_df)
4267 pt_solution_reset (&cfun->gimple_df->escaped);
4268
4269 /* Clean up. */
4270 if (id->debug_map)
4271 {
4272 pointer_map_destroy (id->debug_map);
4273 id->debug_map = dst;
4274 }
4275 pointer_map_destroy (id->decl_map);
4276 id->decl_map = st;
4277
4278 /* Unlink the calls virtual operands before replacing it. */
4279 unlink_stmt_vdef (stmt);
4280
4281 /* If the inlined function returns a result that we care about,
4282 substitute the GIMPLE_CALL with an assignment of the return
4283 variable to the LHS of the call. That is, if STMT was
4284 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4285 if (use_retvar && gimple_call_lhs (stmt))
4286 {
4287 gimple old_stmt = stmt;
4288 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4289 gsi_replace (&stmt_gsi, stmt, false);
4290 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4291 }
4292 else
4293 {
4294 /* Handle the case of inlining a function with no return
4295 statement, which causes the return value to become undefined. */
4296 if (gimple_call_lhs (stmt)
4297 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4298 {
4299 tree name = gimple_call_lhs (stmt);
4300 tree var = SSA_NAME_VAR (name);
4301 tree def = ssa_default_def (cfun, var);
4302
4303 if (def)
4304 {
4305 /* If the variable is used undefined, make this name
4306 undefined via a move. */
4307 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4308 gsi_replace (&stmt_gsi, stmt, true);
4309 }
4310 else
4311 {
4312 /* Otherwise make this variable undefined. */
4313 gsi_remove (&stmt_gsi, true);
4314 set_ssa_default_def (cfun, var, name);
4315 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4316 }
4317 }
4318 else
4319 gsi_remove (&stmt_gsi, true);
4320 }
4321
4322 if (purge_dead_abnormal_edges)
4323 {
4324 gimple_purge_dead_eh_edges (return_block);
4325 gimple_purge_dead_abnormal_call_edges (return_block);
4326 }
4327
4328 /* If the value of the new expression is ignored, that's OK. We
4329 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4330 the equivalent inlined version either. */
4331 if (is_gimple_assign (stmt))
4332 {
4333 gcc_assert (gimple_assign_single_p (stmt)
4334 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4335 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4336 }
4337
4338 /* Output the inlining info for this abstract function, since it has been
4339 inlined. If we don't do this now, we can lose the information about the
4340 variables in the function when the blocks get blown away as soon as we
4341 remove the cgraph node. */
4342 if (gimple_block (stmt))
4343 (*debug_hooks->outlining_inline_function) (cg_edge->callee->symbol.decl);
4344
4345 /* Update callgraph if needed. */
4346 cgraph_remove_node (cg_edge->callee);
4347
4348 id->block = NULL_TREE;
4349 successfully_inlined = TRUE;
4350
4351 egress:
4352 input_location = saved_location;
4353 return successfully_inlined;
4354 }
4355
4356 /* Expand call statements reachable from STMT_P.
4357 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4358 in a MODIFY_EXPR. */
4359
4360 static bool
4361 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4362 {
4363 gimple_stmt_iterator gsi;
4364
4365 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4366 {
4367 gimple stmt = gsi_stmt (gsi);
4368
4369 if (is_gimple_call (stmt)
4370 && expand_call_inline (bb, stmt, id))
4371 return true;
4372 }
4373
4374 return false;
4375 }
4376
4377
4378 /* Walk all basic blocks created after FIRST and try to fold every statement
4379 in the STATEMENTS pointer set. */
4380
4381 static void
4382 fold_marked_statements (int first, struct pointer_set_t *statements)
4383 {
4384 for (; first < n_basic_blocks; first++)
4385 if (BASIC_BLOCK (first))
4386 {
4387 gimple_stmt_iterator gsi;
4388
4389 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4390 !gsi_end_p (gsi);
4391 gsi_next (&gsi))
4392 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4393 {
4394 gimple old_stmt = gsi_stmt (gsi);
4395 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4396
4397 if (old_decl && DECL_BUILT_IN (old_decl))
4398 {
4399 /* Folding builtins can create multiple instructions,
4400 we need to look at all of them. */
4401 gimple_stmt_iterator i2 = gsi;
4402 gsi_prev (&i2);
4403 if (fold_stmt (&gsi))
4404 {
4405 gimple new_stmt;
4406 /* If a builtin at the end of a bb folded into nothing,
4407 the following loop won't work. */
4408 if (gsi_end_p (gsi))
4409 {
4410 cgraph_update_edges_for_call_stmt (old_stmt,
4411 old_decl, NULL);
4412 break;
4413 }
4414 if (gsi_end_p (i2))
4415 i2 = gsi_start_bb (BASIC_BLOCK (first));
4416 else
4417 gsi_next (&i2);
4418 while (1)
4419 {
4420 new_stmt = gsi_stmt (i2);
4421 update_stmt (new_stmt);
4422 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4423 new_stmt);
4424
4425 if (new_stmt == gsi_stmt (gsi))
4426 {
4427 /* It is okay to check only for the very last
4428 of these statements. If it is a throwing
4429 statement nothing will change. If it isn't
4430 this can remove EH edges. If that weren't
4431 correct then because some intermediate stmts
4432 throw, but not the last one. That would mean
4433 we'd have to split the block, which we can't
4434 here and we'd loose anyway. And as builtins
4435 probably never throw, this all
4436 is mood anyway. */
4437 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4438 new_stmt))
4439 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4440 break;
4441 }
4442 gsi_next (&i2);
4443 }
4444 }
4445 }
4446 else if (fold_stmt (&gsi))
4447 {
4448 /* Re-read the statement from GSI as fold_stmt() may
4449 have changed it. */
4450 gimple new_stmt = gsi_stmt (gsi);
4451 update_stmt (new_stmt);
4452
4453 if (is_gimple_call (old_stmt)
4454 || is_gimple_call (new_stmt))
4455 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4456 new_stmt);
4457
4458 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4459 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4460 }
4461 }
4462 }
4463 }
4464
4465 /* Return true if BB has at least one abnormal outgoing edge. */
4466
4467 static inline bool
4468 has_abnormal_outgoing_edge_p (basic_block bb)
4469 {
4470 edge e;
4471 edge_iterator ei;
4472
4473 FOR_EACH_EDGE (e, ei, bb->succs)
4474 if (e->flags & EDGE_ABNORMAL)
4475 return true;
4476
4477 return false;
4478 }
4479
4480 /* Expand calls to inline functions in the body of FN. */
4481
4482 unsigned int
4483 optimize_inline_calls (tree fn)
4484 {
4485 copy_body_data id;
4486 basic_block bb;
4487 int last = n_basic_blocks;
4488 struct gimplify_ctx gctx;
4489 bool inlined_p = false;
4490
4491 /* Clear out ID. */
4492 memset (&id, 0, sizeof (id));
4493
4494 id.src_node = id.dst_node = cgraph_get_node (fn);
4495 gcc_assert (id.dst_node->symbol.definition);
4496 id.dst_fn = fn;
4497 /* Or any functions that aren't finished yet. */
4498 if (current_function_decl)
4499 id.dst_fn = current_function_decl;
4500
4501 id.copy_decl = copy_decl_maybe_to_var;
4502 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4503 id.transform_new_cfg = false;
4504 id.transform_return_to_modify = true;
4505 id.transform_parameter = true;
4506 id.transform_lang_insert_block = NULL;
4507 id.statements_to_fold = pointer_set_create ();
4508
4509 push_gimplify_context (&gctx);
4510
4511 /* We make no attempts to keep dominance info up-to-date. */
4512 free_dominance_info (CDI_DOMINATORS);
4513 free_dominance_info (CDI_POST_DOMINATORS);
4514
4515 /* Register specific gimple functions. */
4516 gimple_register_cfg_hooks ();
4517
4518 /* Reach the trees by walking over the CFG, and note the
4519 enclosing basic-blocks in the call edges. */
4520 /* We walk the blocks going forward, because inlined function bodies
4521 will split id->current_basic_block, and the new blocks will
4522 follow it; we'll trudge through them, processing their CALL_EXPRs
4523 along the way. */
4524 FOR_EACH_BB (bb)
4525 inlined_p |= gimple_expand_calls_inline (bb, &id);
4526
4527 pop_gimplify_context (NULL);
4528
4529 #ifdef ENABLE_CHECKING
4530 {
4531 struct cgraph_edge *e;
4532
4533 verify_cgraph_node (id.dst_node);
4534
4535 /* Double check that we inlined everything we are supposed to inline. */
4536 for (e = id.dst_node->callees; e; e = e->next_callee)
4537 gcc_assert (e->inline_failed);
4538 }
4539 #endif
4540
4541 /* Fold queued statements. */
4542 fold_marked_statements (last, id.statements_to_fold);
4543 pointer_set_destroy (id.statements_to_fold);
4544
4545 gcc_assert (!id.debug_stmts.exists ());
4546
4547 /* If we didn't inline into the function there is nothing to do. */
4548 if (!inlined_p)
4549 return 0;
4550
4551 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4552 number_blocks (fn);
4553
4554 delete_unreachable_blocks_update_callgraph (&id);
4555 #ifdef ENABLE_CHECKING
4556 verify_cgraph_node (id.dst_node);
4557 #endif
4558
4559 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4560 not possible yet - the IPA passes might make various functions to not
4561 throw and they don't care to proactively update local EH info. This is
4562 done later in fixup_cfg pass that also execute the verification. */
4563 return (TODO_update_ssa
4564 | TODO_cleanup_cfg
4565 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4566 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4567 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4568 }
4569
4570 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4571
4572 tree
4573 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4574 {
4575 enum tree_code code = TREE_CODE (*tp);
4576 enum tree_code_class cl = TREE_CODE_CLASS (code);
4577
4578 /* We make copies of most nodes. */
4579 if (IS_EXPR_CODE_CLASS (cl)
4580 || code == TREE_LIST
4581 || code == TREE_VEC
4582 || code == TYPE_DECL
4583 || code == OMP_CLAUSE)
4584 {
4585 /* Because the chain gets clobbered when we make a copy, we save it
4586 here. */
4587 tree chain = NULL_TREE, new_tree;
4588
4589 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4590 chain = TREE_CHAIN (*tp);
4591
4592 /* Copy the node. */
4593 new_tree = copy_node (*tp);
4594
4595 /* Propagate mudflap marked-ness. */
4596 if (flag_mudflap && mf_marked_p (*tp))
4597 mf_mark (new_tree);
4598
4599 *tp = new_tree;
4600
4601 /* Now, restore the chain, if appropriate. That will cause
4602 walk_tree to walk into the chain as well. */
4603 if (code == PARM_DECL
4604 || code == TREE_LIST
4605 || code == OMP_CLAUSE)
4606 TREE_CHAIN (*tp) = chain;
4607
4608 /* For now, we don't update BLOCKs when we make copies. So, we
4609 have to nullify all BIND_EXPRs. */
4610 if (TREE_CODE (*tp) == BIND_EXPR)
4611 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4612 }
4613 else if (code == CONSTRUCTOR)
4614 {
4615 /* CONSTRUCTOR nodes need special handling because
4616 we need to duplicate the vector of elements. */
4617 tree new_tree;
4618
4619 new_tree = copy_node (*tp);
4620
4621 /* Propagate mudflap marked-ness. */
4622 if (flag_mudflap && mf_marked_p (*tp))
4623 mf_mark (new_tree);
4624
4625 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4626 *tp = new_tree;
4627 }
4628 else if (code == STATEMENT_LIST)
4629 /* We used to just abort on STATEMENT_LIST, but we can run into them
4630 with statement-expressions (c++/40975). */
4631 copy_statement_list (tp);
4632 else if (TREE_CODE_CLASS (code) == tcc_type)
4633 *walk_subtrees = 0;
4634 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4635 *walk_subtrees = 0;
4636 else if (TREE_CODE_CLASS (code) == tcc_constant)
4637 *walk_subtrees = 0;
4638 return NULL_TREE;
4639 }
4640
4641 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4642 information indicating to what new SAVE_EXPR this one should be mapped,
4643 use that one. Otherwise, create a new node and enter it in ST. FN is
4644 the function into which the copy will be placed. */
4645
4646 static void
4647 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4648 {
4649 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4650 tree *n;
4651 tree t;
4652
4653 /* See if we already encountered this SAVE_EXPR. */
4654 n = (tree *) pointer_map_contains (st, *tp);
4655
4656 /* If we didn't already remap this SAVE_EXPR, do so now. */
4657 if (!n)
4658 {
4659 t = copy_node (*tp);
4660
4661 /* Remember this SAVE_EXPR. */
4662 *pointer_map_insert (st, *tp) = t;
4663 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4664 *pointer_map_insert (st, t) = t;
4665 }
4666 else
4667 {
4668 /* We've already walked into this SAVE_EXPR; don't do it again. */
4669 *walk_subtrees = 0;
4670 t = *n;
4671 }
4672
4673 /* Replace this SAVE_EXPR with the copy. */
4674 *tp = t;
4675 }
4676
4677 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4678 label, copies the declaration and enters it in the splay_tree in DATA (which
4679 is really a 'copy_body_data *'. */
4680
4681 static tree
4682 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4683 bool *handled_ops_p ATTRIBUTE_UNUSED,
4684 struct walk_stmt_info *wi)
4685 {
4686 copy_body_data *id = (copy_body_data *) wi->info;
4687 gimple stmt = gsi_stmt (*gsip);
4688
4689 if (gimple_code (stmt) == GIMPLE_LABEL)
4690 {
4691 tree decl = gimple_label_label (stmt);
4692
4693 /* Copy the decl and remember the copy. */
4694 insert_decl_map (id, decl, id->copy_decl (decl, id));
4695 }
4696
4697 return NULL_TREE;
4698 }
4699
4700
4701 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4702 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4703 remaps all local declarations to appropriate replacements in gimple
4704 operands. */
4705
4706 static tree
4707 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4708 {
4709 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4710 copy_body_data *id = (copy_body_data *) wi->info;
4711 struct pointer_map_t *st = id->decl_map;
4712 tree *n;
4713 tree expr = *tp;
4714
4715 /* Only a local declaration (variable or label). */
4716 if ((TREE_CODE (expr) == VAR_DECL
4717 && !TREE_STATIC (expr))
4718 || TREE_CODE (expr) == LABEL_DECL)
4719 {
4720 /* Lookup the declaration. */
4721 n = (tree *) pointer_map_contains (st, expr);
4722
4723 /* If it's there, remap it. */
4724 if (n)
4725 *tp = *n;
4726 *walk_subtrees = 0;
4727 }
4728 else if (TREE_CODE (expr) == STATEMENT_LIST
4729 || TREE_CODE (expr) == BIND_EXPR
4730 || TREE_CODE (expr) == SAVE_EXPR)
4731 gcc_unreachable ();
4732 else if (TREE_CODE (expr) == TARGET_EXPR)
4733 {
4734 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4735 It's OK for this to happen if it was part of a subtree that
4736 isn't immediately expanded, such as operand 2 of another
4737 TARGET_EXPR. */
4738 if (!TREE_OPERAND (expr, 1))
4739 {
4740 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4741 TREE_OPERAND (expr, 3) = NULL_TREE;
4742 }
4743 }
4744
4745 /* Keep iterating. */
4746 return NULL_TREE;
4747 }
4748
4749
4750 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4751 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4752 remaps all local declarations to appropriate replacements in gimple
4753 statements. */
4754
4755 static tree
4756 replace_locals_stmt (gimple_stmt_iterator *gsip,
4757 bool *handled_ops_p ATTRIBUTE_UNUSED,
4758 struct walk_stmt_info *wi)
4759 {
4760 copy_body_data *id = (copy_body_data *) wi->info;
4761 gimple stmt = gsi_stmt (*gsip);
4762
4763 if (gimple_code (stmt) == GIMPLE_BIND)
4764 {
4765 tree block = gimple_bind_block (stmt);
4766
4767 if (block)
4768 {
4769 remap_block (&block, id);
4770 gimple_bind_set_block (stmt, block);
4771 }
4772
4773 /* This will remap a lot of the same decls again, but this should be
4774 harmless. */
4775 if (gimple_bind_vars (stmt))
4776 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4777 NULL, id));
4778 }
4779
4780 /* Keep iterating. */
4781 return NULL_TREE;
4782 }
4783
4784
4785 /* Copies everything in SEQ and replaces variables and labels local to
4786 current_function_decl. */
4787
4788 gimple_seq
4789 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4790 {
4791 copy_body_data id;
4792 struct walk_stmt_info wi;
4793 struct pointer_set_t *visited;
4794 gimple_seq copy;
4795
4796 /* There's nothing to do for NULL_TREE. */
4797 if (seq == NULL)
4798 return seq;
4799
4800 /* Set up ID. */
4801 memset (&id, 0, sizeof (id));
4802 id.src_fn = current_function_decl;
4803 id.dst_fn = current_function_decl;
4804 id.decl_map = pointer_map_create ();
4805 id.debug_map = NULL;
4806
4807 id.copy_decl = copy_decl_no_change;
4808 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4809 id.transform_new_cfg = false;
4810 id.transform_return_to_modify = false;
4811 id.transform_parameter = false;
4812 id.transform_lang_insert_block = NULL;
4813
4814 /* Walk the tree once to find local labels. */
4815 memset (&wi, 0, sizeof (wi));
4816 visited = pointer_set_create ();
4817 wi.info = &id;
4818 wi.pset = visited;
4819 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4820 pointer_set_destroy (visited);
4821
4822 copy = gimple_seq_copy (seq);
4823
4824 /* Walk the copy, remapping decls. */
4825 memset (&wi, 0, sizeof (wi));
4826 wi.info = &id;
4827 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4828
4829 /* Clean up. */
4830 pointer_map_destroy (id.decl_map);
4831 if (id.debug_map)
4832 pointer_map_destroy (id.debug_map);
4833
4834 return copy;
4835 }
4836
4837
4838 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4839
4840 static tree
4841 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4842 {
4843 if (*tp == data)
4844 return (tree) data;
4845 else
4846 return NULL;
4847 }
4848
4849 DEBUG_FUNCTION bool
4850 debug_find_tree (tree top, tree search)
4851 {
4852 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4853 }
4854
4855
4856 /* Declare the variables created by the inliner. Add all the variables in
4857 VARS to BIND_EXPR. */
4858
4859 static void
4860 declare_inline_vars (tree block, tree vars)
4861 {
4862 tree t;
4863 for (t = vars; t; t = DECL_CHAIN (t))
4864 {
4865 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4866 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4867 add_local_decl (cfun, t);
4868 }
4869
4870 if (block)
4871 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4872 }
4873
4874 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4875 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4876 VAR_DECL translation. */
4877
4878 static tree
4879 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4880 {
4881 /* Don't generate debug information for the copy if we wouldn't have
4882 generated it for the copy either. */
4883 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4884 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4885
4886 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4887 declaration inspired this copy. */
4888 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4889
4890 /* The new variable/label has no RTL, yet. */
4891 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4892 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4893 SET_DECL_RTL (copy, 0);
4894
4895 /* These args would always appear unused, if not for this. */
4896 TREE_USED (copy) = 1;
4897
4898 /* Set the context for the new declaration. */
4899 if (!DECL_CONTEXT (decl))
4900 /* Globals stay global. */
4901 ;
4902 else if (DECL_CONTEXT (decl) != id->src_fn)
4903 /* Things that weren't in the scope of the function we're inlining
4904 from aren't in the scope we're inlining to, either. */
4905 ;
4906 else if (TREE_STATIC (decl))
4907 /* Function-scoped static variables should stay in the original
4908 function. */
4909 ;
4910 else
4911 /* Ordinary automatic local variables are now in the scope of the
4912 new function. */
4913 DECL_CONTEXT (copy) = id->dst_fn;
4914
4915 return copy;
4916 }
4917
4918 static tree
4919 copy_decl_to_var (tree decl, copy_body_data *id)
4920 {
4921 tree copy, type;
4922
4923 gcc_assert (TREE_CODE (decl) == PARM_DECL
4924 || TREE_CODE (decl) == RESULT_DECL);
4925
4926 type = TREE_TYPE (decl);
4927
4928 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4929 VAR_DECL, DECL_NAME (decl), type);
4930 if (DECL_PT_UID_SET_P (decl))
4931 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4932 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4933 TREE_READONLY (copy) = TREE_READONLY (decl);
4934 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4935 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4936
4937 return copy_decl_for_dup_finish (id, decl, copy);
4938 }
4939
4940 /* Like copy_decl_to_var, but create a return slot object instead of a
4941 pointer variable for return by invisible reference. */
4942
4943 static tree
4944 copy_result_decl_to_var (tree decl, copy_body_data *id)
4945 {
4946 tree copy, type;
4947
4948 gcc_assert (TREE_CODE (decl) == PARM_DECL
4949 || TREE_CODE (decl) == RESULT_DECL);
4950
4951 type = TREE_TYPE (decl);
4952 if (DECL_BY_REFERENCE (decl))
4953 type = TREE_TYPE (type);
4954
4955 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4956 VAR_DECL, DECL_NAME (decl), type);
4957 if (DECL_PT_UID_SET_P (decl))
4958 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4959 TREE_READONLY (copy) = TREE_READONLY (decl);
4960 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4961 if (!DECL_BY_REFERENCE (decl))
4962 {
4963 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4964 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4965 }
4966
4967 return copy_decl_for_dup_finish (id, decl, copy);
4968 }
4969
4970 tree
4971 copy_decl_no_change (tree decl, copy_body_data *id)
4972 {
4973 tree copy;
4974
4975 copy = copy_node (decl);
4976
4977 /* The COPY is not abstract; it will be generated in DST_FN. */
4978 DECL_ABSTRACT (copy) = 0;
4979 lang_hooks.dup_lang_specific_decl (copy);
4980
4981 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4982 been taken; it's for internal bookkeeping in expand_goto_internal. */
4983 if (TREE_CODE (copy) == LABEL_DECL)
4984 {
4985 TREE_ADDRESSABLE (copy) = 0;
4986 LABEL_DECL_UID (copy) = -1;
4987 }
4988
4989 return copy_decl_for_dup_finish (id, decl, copy);
4990 }
4991
4992 static tree
4993 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4994 {
4995 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4996 return copy_decl_to_var (decl, id);
4997 else
4998 return copy_decl_no_change (decl, id);
4999 }
5000
5001 /* Return a copy of the function's argument tree. */
5002 static tree
5003 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5004 bitmap args_to_skip, tree *vars)
5005 {
5006 tree arg, *parg;
5007 tree new_parm = NULL;
5008 int i = 0;
5009
5010 parg = &new_parm;
5011
5012 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5013 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5014 {
5015 tree new_tree = remap_decl (arg, id);
5016 if (TREE_CODE (new_tree) != PARM_DECL)
5017 new_tree = id->copy_decl (arg, id);
5018 lang_hooks.dup_lang_specific_decl (new_tree);
5019 *parg = new_tree;
5020 parg = &DECL_CHAIN (new_tree);
5021 }
5022 else if (!pointer_map_contains (id->decl_map, arg))
5023 {
5024 /* Make an equivalent VAR_DECL. If the argument was used
5025 as temporary variable later in function, the uses will be
5026 replaced by local variable. */
5027 tree var = copy_decl_to_var (arg, id);
5028 insert_decl_map (id, arg, var);
5029 /* Declare this new variable. */
5030 DECL_CHAIN (var) = *vars;
5031 *vars = var;
5032 }
5033 return new_parm;
5034 }
5035
5036 /* Return a copy of the function's static chain. */
5037 static tree
5038 copy_static_chain (tree static_chain, copy_body_data * id)
5039 {
5040 tree *chain_copy, *pvar;
5041
5042 chain_copy = &static_chain;
5043 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5044 {
5045 tree new_tree = remap_decl (*pvar, id);
5046 lang_hooks.dup_lang_specific_decl (new_tree);
5047 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5048 *pvar = new_tree;
5049 }
5050 return static_chain;
5051 }
5052
5053 /* Return true if the function is allowed to be versioned.
5054 This is a guard for the versioning functionality. */
5055
5056 bool
5057 tree_versionable_function_p (tree fndecl)
5058 {
5059 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5060 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5061 }
5062
5063 /* Delete all unreachable basic blocks and update callgraph.
5064 Doing so is somewhat nontrivial because we need to update all clones and
5065 remove inline function that become unreachable. */
5066
5067 static bool
5068 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5069 {
5070 bool changed = false;
5071 basic_block b, next_bb;
5072
5073 find_unreachable_blocks ();
5074
5075 /* Delete all unreachable basic blocks. */
5076
5077 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
5078 {
5079 next_bb = b->next_bb;
5080
5081 if (!(b->flags & BB_REACHABLE))
5082 {
5083 gimple_stmt_iterator bsi;
5084
5085 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5086 {
5087 struct cgraph_edge *e;
5088 struct cgraph_node *node;
5089
5090 ipa_remove_stmt_references ((symtab_node)id->dst_node, gsi_stmt (bsi));
5091
5092 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5093 &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5094 {
5095 if (!e->inline_failed)
5096 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5097 else
5098 cgraph_remove_edge (e);
5099 }
5100 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5101 && id->dst_node->clones)
5102 for (node = id->dst_node->clones; node != id->dst_node;)
5103 {
5104 ipa_remove_stmt_references ((symtab_node)node, gsi_stmt (bsi));
5105 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5106 && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5107 {
5108 if (!e->inline_failed)
5109 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5110 else
5111 cgraph_remove_edge (e);
5112 }
5113
5114 if (node->clones)
5115 node = node->clones;
5116 else if (node->next_sibling_clone)
5117 node = node->next_sibling_clone;
5118 else
5119 {
5120 while (node != id->dst_node && !node->next_sibling_clone)
5121 node = node->clone_of;
5122 if (node != id->dst_node)
5123 node = node->next_sibling_clone;
5124 }
5125 }
5126 }
5127 delete_basic_block (b);
5128 changed = true;
5129 }
5130 }
5131
5132 return changed;
5133 }
5134
5135 /* Update clone info after duplication. */
5136
5137 static void
5138 update_clone_info (copy_body_data * id)
5139 {
5140 struct cgraph_node *node;
5141 if (!id->dst_node->clones)
5142 return;
5143 for (node = id->dst_node->clones; node != id->dst_node;)
5144 {
5145 /* First update replace maps to match the new body. */
5146 if (node->clone.tree_map)
5147 {
5148 unsigned int i;
5149 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5150 {
5151 struct ipa_replace_map *replace_info;
5152 replace_info = (*node->clone.tree_map)[i];
5153 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5154 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5155 }
5156 }
5157 if (node->clones)
5158 node = node->clones;
5159 else if (node->next_sibling_clone)
5160 node = node->next_sibling_clone;
5161 else
5162 {
5163 while (node != id->dst_node && !node->next_sibling_clone)
5164 node = node->clone_of;
5165 if (node != id->dst_node)
5166 node = node->next_sibling_clone;
5167 }
5168 }
5169 }
5170
5171 /* Create a copy of a function's tree.
5172 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5173 of the original function and the new copied function
5174 respectively. In case we want to replace a DECL
5175 tree with another tree while duplicating the function's
5176 body, TREE_MAP represents the mapping between these
5177 trees. If UPDATE_CLONES is set, the call_stmt fields
5178 of edges of clones of the function will be updated.
5179
5180 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5181 from new version.
5182 If SKIP_RETURN is true, the new version will return void.
5183 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5184 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5185 */
5186 void
5187 tree_function_versioning (tree old_decl, tree new_decl,
5188 vec<ipa_replace_map_p, va_gc> *tree_map,
5189 bool update_clones, bitmap args_to_skip,
5190 bool skip_return, bitmap blocks_to_copy,
5191 basic_block new_entry)
5192 {
5193 struct cgraph_node *old_version_node;
5194 struct cgraph_node *new_version_node;
5195 copy_body_data id;
5196 tree p;
5197 unsigned i;
5198 struct ipa_replace_map *replace_info;
5199 basic_block old_entry_block, bb;
5200 vec<gimple> init_stmts;
5201 init_stmts.create (10);
5202 tree vars = NULL_TREE;
5203
5204 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5205 && TREE_CODE (new_decl) == FUNCTION_DECL);
5206 DECL_POSSIBLY_INLINED (old_decl) = 1;
5207
5208 old_version_node = cgraph_get_node (old_decl);
5209 gcc_checking_assert (old_version_node);
5210 new_version_node = cgraph_get_node (new_decl);
5211 gcc_checking_assert (new_version_node);
5212
5213 /* Copy over debug args. */
5214 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5215 {
5216 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5217 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5218 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5219 old_debug_args = decl_debug_args_lookup (old_decl);
5220 if (old_debug_args)
5221 {
5222 new_debug_args = decl_debug_args_insert (new_decl);
5223 *new_debug_args = vec_safe_copy (*old_debug_args);
5224 }
5225 }
5226
5227 /* Output the inlining info for this abstract function, since it has been
5228 inlined. If we don't do this now, we can lose the information about the
5229 variables in the function when the blocks get blown away as soon as we
5230 remove the cgraph node. */
5231 (*debug_hooks->outlining_inline_function) (old_decl);
5232
5233 DECL_ARTIFICIAL (new_decl) = 1;
5234 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5235 if (DECL_ORIGIN (old_decl) == old_decl)
5236 old_version_node->used_as_abstract_origin = true;
5237 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5238
5239 /* Prepare the data structures for the tree copy. */
5240 memset (&id, 0, sizeof (id));
5241
5242 /* Generate a new name for the new version. */
5243 id.statements_to_fold = pointer_set_create ();
5244
5245 id.decl_map = pointer_map_create ();
5246 id.debug_map = NULL;
5247 id.src_fn = old_decl;
5248 id.dst_fn = new_decl;
5249 id.src_node = old_version_node;
5250 id.dst_node = new_version_node;
5251 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5252 id.blocks_to_copy = blocks_to_copy;
5253 if (id.src_node->ipa_transforms_to_apply.exists ())
5254 {
5255 vec<ipa_opt_pass> old_transforms_to_apply
5256 = id.dst_node->ipa_transforms_to_apply;
5257 unsigned int i;
5258
5259 id.dst_node->ipa_transforms_to_apply
5260 = id.src_node->ipa_transforms_to_apply.copy ();
5261 for (i = 0; i < old_transforms_to_apply.length (); i++)
5262 id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5263 old_transforms_to_apply.release ();
5264 }
5265
5266 id.copy_decl = copy_decl_no_change;
5267 id.transform_call_graph_edges
5268 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5269 id.transform_new_cfg = true;
5270 id.transform_return_to_modify = false;
5271 id.transform_parameter = false;
5272 id.transform_lang_insert_block = NULL;
5273
5274 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5275 (DECL_STRUCT_FUNCTION (old_decl));
5276 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5277 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5278 initialize_cfun (new_decl, old_decl,
5279 old_entry_block->count);
5280 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5281 = id.src_cfun->gimple_df->ipa_pta;
5282
5283 /* Copy the function's static chain. */
5284 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5285 if (p)
5286 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5287 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5288 &id);
5289
5290 /* If there's a tree_map, prepare for substitution. */
5291 if (tree_map)
5292 for (i = 0; i < tree_map->length (); i++)
5293 {
5294 gimple init;
5295 replace_info = (*tree_map)[i];
5296 if (replace_info->replace_p)
5297 {
5298 if (!replace_info->old_tree)
5299 {
5300 int i = replace_info->parm_num;
5301 tree parm;
5302 tree req_type;
5303
5304 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5305 i --;
5306 replace_info->old_tree = parm;
5307 req_type = TREE_TYPE (parm);
5308 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5309 {
5310 if (fold_convertible_p (req_type, replace_info->new_tree))
5311 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5312 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5313 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5314 else
5315 {
5316 if (dump_file)
5317 {
5318 fprintf (dump_file, " const ");
5319 print_generic_expr (dump_file, replace_info->new_tree, 0);
5320 fprintf (dump_file, " can't be converted to param ");
5321 print_generic_expr (dump_file, parm, 0);
5322 fprintf (dump_file, "\n");
5323 }
5324 replace_info->old_tree = NULL;
5325 }
5326 }
5327 }
5328 else
5329 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5330 if (replace_info->old_tree)
5331 {
5332 init = setup_one_parameter (&id, replace_info->old_tree,
5333 replace_info->new_tree, id.src_fn,
5334 NULL,
5335 &vars);
5336 if (init)
5337 init_stmts.safe_push (init);
5338 }
5339 }
5340 }
5341 /* Copy the function's arguments. */
5342 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5343 DECL_ARGUMENTS (new_decl) =
5344 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5345 args_to_skip, &vars);
5346
5347 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5348 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5349
5350 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5351
5352 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5353 /* Add local vars. */
5354 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5355
5356 if (DECL_RESULT (old_decl) == NULL_TREE)
5357 ;
5358 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5359 {
5360 DECL_RESULT (new_decl)
5361 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5362 RESULT_DECL, NULL_TREE, void_type_node);
5363 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5364 cfun->returns_struct = 0;
5365 cfun->returns_pcc_struct = 0;
5366 }
5367 else
5368 {
5369 tree old_name;
5370 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5371 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5372 if (gimple_in_ssa_p (id.src_cfun)
5373 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5374 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5375 {
5376 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5377 insert_decl_map (&id, old_name, new_name);
5378 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5379 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5380 }
5381 }
5382
5383 /* Set up the destination functions loop tree. */
5384 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5385 {
5386 cfun->curr_properties &= ~PROP_loops;
5387 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5388 cfun->curr_properties |= PROP_loops;
5389 }
5390
5391 /* Copy the Function's body. */
5392 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5393 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, new_entry);
5394
5395 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5396 number_blocks (new_decl);
5397
5398 /* We want to create the BB unconditionally, so that the addition of
5399 debug stmts doesn't affect BB count, which may in the end cause
5400 codegen differences. */
5401 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5402 while (init_stmts.length ())
5403 insert_init_stmt (&id, bb, init_stmts.pop ());
5404 update_clone_info (&id);
5405
5406 /* Remap the nonlocal_goto_save_area, if any. */
5407 if (cfun->nonlocal_goto_save_area)
5408 {
5409 struct walk_stmt_info wi;
5410
5411 memset (&wi, 0, sizeof (wi));
5412 wi.info = &id;
5413 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5414 }
5415
5416 /* Clean up. */
5417 pointer_map_destroy (id.decl_map);
5418 if (id.debug_map)
5419 pointer_map_destroy (id.debug_map);
5420 free_dominance_info (CDI_DOMINATORS);
5421 free_dominance_info (CDI_POST_DOMINATORS);
5422
5423 fold_marked_statements (0, id.statements_to_fold);
5424 pointer_set_destroy (id.statements_to_fold);
5425 fold_cond_expr_cond ();
5426 delete_unreachable_blocks_update_callgraph (&id);
5427 if (id.dst_node->symbol.definition)
5428 cgraph_rebuild_references ();
5429 update_ssa (TODO_update_ssa);
5430
5431 /* After partial cloning we need to rescale frequencies, so they are
5432 within proper range in the cloned function. */
5433 if (new_entry)
5434 {
5435 struct cgraph_edge *e;
5436 rebuild_frequencies ();
5437
5438 new_version_node->count = ENTRY_BLOCK_PTR->count;
5439 for (e = new_version_node->callees; e; e = e->next_callee)
5440 {
5441 basic_block bb = gimple_bb (e->call_stmt);
5442 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5443 bb);
5444 e->count = bb->count;
5445 }
5446 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5447 {
5448 basic_block bb = gimple_bb (e->call_stmt);
5449 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5450 bb);
5451 e->count = bb->count;
5452 }
5453 }
5454
5455 free_dominance_info (CDI_DOMINATORS);
5456 free_dominance_info (CDI_POST_DOMINATORS);
5457
5458 gcc_assert (!id.debug_stmts.exists ());
5459 init_stmts.release ();
5460 pop_cfun ();
5461 return;
5462 }
5463
5464 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5465 the callee and return the inlined body on success. */
5466
5467 tree
5468 maybe_inline_call_in_expr (tree exp)
5469 {
5470 tree fn = get_callee_fndecl (exp);
5471
5472 /* We can only try to inline "const" functions. */
5473 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5474 {
5475 struct pointer_map_t *decl_map = pointer_map_create ();
5476 call_expr_arg_iterator iter;
5477 copy_body_data id;
5478 tree param, arg, t;
5479
5480 /* Remap the parameters. */
5481 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5482 param;
5483 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5484 *pointer_map_insert (decl_map, param) = arg;
5485
5486 memset (&id, 0, sizeof (id));
5487 id.src_fn = fn;
5488 id.dst_fn = current_function_decl;
5489 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5490 id.decl_map = decl_map;
5491
5492 id.copy_decl = copy_decl_no_change;
5493 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5494 id.transform_new_cfg = false;
5495 id.transform_return_to_modify = true;
5496 id.transform_parameter = true;
5497 id.transform_lang_insert_block = NULL;
5498
5499 /* Make sure not to unshare trees behind the front-end's back
5500 since front-end specific mechanisms may rely on sharing. */
5501 id.regimplify = false;
5502 id.do_not_unshare = true;
5503
5504 /* We're not inside any EH region. */
5505 id.eh_lp_nr = 0;
5506
5507 t = copy_tree_body (&id);
5508 pointer_map_destroy (decl_map);
5509
5510 /* We can only return something suitable for use in a GENERIC
5511 expression tree. */
5512 if (TREE_CODE (t) == MODIFY_EXPR)
5513 return TREE_OPERAND (t, 1);
5514 }
5515
5516 return NULL_TREE;
5517 }
5518
5519 /* Duplicate a type, fields and all. */
5520
5521 tree
5522 build_duplicate_type (tree type)
5523 {
5524 struct copy_body_data id;
5525
5526 memset (&id, 0, sizeof (id));
5527 id.src_fn = current_function_decl;
5528 id.dst_fn = current_function_decl;
5529 id.src_cfun = cfun;
5530 id.decl_map = pointer_map_create ();
5531 id.debug_map = NULL;
5532 id.copy_decl = copy_decl_no_change;
5533
5534 type = remap_type_1 (type, &id);
5535
5536 pointer_map_destroy (id.decl_map);
5537 if (id.debug_map)
5538 pointer_map_destroy (id.debug_map);
5539
5540 TYPE_CANONICAL (type) = type;
5541
5542 return type;
5543 }