re PR tree-optimization/44667 (ICE: verify_stmts failed: non-trivial conversion at...
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "insn-config.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-iterator.h"
37 #include "cgraph.h"
38 #include "intl.h"
39 #include "tree-mudflap.h"
40 #include "tree-flow.h"
41 #include "function.h"
42 #include "tree-flow.h"
43 #include "tree-pretty-print.h"
44 #include "except.h"
45 #include "debug.h"
46 #include "pointer-set.h"
47 #include "ipa-prop.h"
48 #include "value-prof.h"
49 #include "tree-pass.h"
50 #include "target.h"
51 #include "integrate.h"
52
53 #include "rtl.h" /* FIXME: For asm_str_count. */
54
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "gimple.h"
58
59 /* Inlining, Cloning, Versioning, Parallelization
60
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
67
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
72
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
76
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
80
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
88
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
90
91 /* To Do:
92
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
99
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
102
103
104 /* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
106
107 eni_weights eni_size_weights;
108
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
111
112 eni_weights eni_time_weights;
113
114 /* Prototypes. */
115
116 static tree declare_return_variable (copy_body_data *, tree, tree);
117 static void remap_block (tree *, copy_body_data *);
118 static void copy_bind_expr (tree *, int *, copy_body_data *);
119 static tree mark_local_for_remap_r (tree *, int *, void *);
120 static void unsave_expr_1 (tree);
121 static tree unsave_r (tree *, int *, void *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
130
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
133
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
136 {
137 *pointer_map_insert (id->decl_map, key) = value;
138
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
143 }
144
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
147
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150 {
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
153
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
156
157 if (!target_for_debug_bind (key))
158 return;
159
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
162
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
165
166 *pointer_map_insert (id->debug_map, key) = value;
167 }
168
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
174
175 /* Construct new SSA name for old NAME. ID is the inline context. */
176
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
179 {
180 tree new_tree;
181 tree *n;
182
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
184
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
188
189 if (processing_debug_stmt)
190 {
191 processing_debug_stmt = -1;
192 return name;
193 }
194
195 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
196 in copy_bb. */
197 new_tree = remap_decl (SSA_NAME_VAR (name), id);
198
199 /* We might've substituted constant or another SSA_NAME for
200 the variable.
201
202 Replace the SSA name representing RESULT_DECL by variable during
203 inlining: this saves us from need to introduce PHI node in a case
204 return value is just partly initialized. */
205 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
206 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
207 || !id->transform_return_to_modify))
208 {
209 struct ptr_info_def *pi;
210 new_tree = make_ssa_name (new_tree, NULL);
211 insert_decl_map (id, name, new_tree);
212 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
213 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
214 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
215 /* At least IPA points-to info can be directly transferred. */
216 if (id->src_cfun->gimple_df
217 && id->src_cfun->gimple_df->ipa_pta
218 && (pi = SSA_NAME_PTR_INFO (name))
219 && !pi->pt.anything)
220 {
221 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
222 new_pi->pt = pi->pt;
223 }
224 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
225 {
226 /* By inlining function having uninitialized variable, we might
227 extend the lifetime (variable might get reused). This cause
228 ICE in the case we end up extending lifetime of SSA name across
229 abnormal edge, but also increase register pressure.
230
231 We simply initialize all uninitialized vars by 0 except
232 for case we are inlining to very first BB. We can avoid
233 this for all BBs that are not inside strongly connected
234 regions of the CFG, but this is expensive to test. */
235 if (id->entry_bb
236 && is_gimple_reg (SSA_NAME_VAR (name))
237 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
238 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
239 || EDGE_COUNT (id->entry_bb->preds) != 1))
240 {
241 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
242 gimple init_stmt;
243
244 init_stmt = gimple_build_assign (new_tree,
245 fold_convert (TREE_TYPE (new_tree),
246 integer_zero_node));
247 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
248 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
249 }
250 else
251 {
252 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
253 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
254 == name)
255 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
256 }
257 }
258 }
259 else
260 insert_decl_map (id, name, new_tree);
261 return new_tree;
262 }
263
264 /* Remap DECL during the copying of the BLOCK tree for the function. */
265
266 tree
267 remap_decl (tree decl, copy_body_data *id)
268 {
269 tree *n;
270
271 /* We only remap local variables in the current function. */
272
273 /* See if we have remapped this declaration. */
274
275 n = (tree *) pointer_map_contains (id->decl_map, decl);
276
277 if (!n && processing_debug_stmt)
278 {
279 processing_debug_stmt = -1;
280 return decl;
281 }
282
283 /* If we didn't already have an equivalent for this declaration,
284 create one now. */
285 if (!n)
286 {
287 /* Make a copy of the variable or label. */
288 tree t = id->copy_decl (decl, id);
289
290 /* Remember it, so that if we encounter this local entity again
291 we can reuse this copy. Do this early because remap_type may
292 need this decl for TYPE_STUB_DECL. */
293 insert_decl_map (id, decl, t);
294
295 if (!DECL_P (t))
296 return t;
297
298 /* Remap types, if necessary. */
299 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
300 if (TREE_CODE (t) == TYPE_DECL)
301 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
302
303 /* Remap sizes as necessary. */
304 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
305 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
306
307 /* If fields, do likewise for offset and qualifier. */
308 if (TREE_CODE (t) == FIELD_DECL)
309 {
310 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
311 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
312 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
313 }
314
315 if (cfun && gimple_in_ssa_p (cfun)
316 && (TREE_CODE (t) == VAR_DECL
317 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
318 {
319 get_var_ann (t);
320 add_referenced_var (t);
321 }
322 return t;
323 }
324
325 if (id->do_not_unshare)
326 return *n;
327 else
328 return unshare_expr (*n);
329 }
330
331 static tree
332 remap_type_1 (tree type, copy_body_data *id)
333 {
334 tree new_tree, t;
335
336 /* We do need a copy. build and register it now. If this is a pointer or
337 reference type, remap the designated type and make a new pointer or
338 reference type. */
339 if (TREE_CODE (type) == POINTER_TYPE)
340 {
341 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
342 TYPE_MODE (type),
343 TYPE_REF_CAN_ALIAS_ALL (type));
344 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
345 new_tree = build_type_attribute_qual_variant (new_tree,
346 TYPE_ATTRIBUTES (type),
347 TYPE_QUALS (type));
348 insert_decl_map (id, type, new_tree);
349 return new_tree;
350 }
351 else if (TREE_CODE (type) == REFERENCE_TYPE)
352 {
353 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
354 TYPE_MODE (type),
355 TYPE_REF_CAN_ALIAS_ALL (type));
356 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
357 new_tree = build_type_attribute_qual_variant (new_tree,
358 TYPE_ATTRIBUTES (type),
359 TYPE_QUALS (type));
360 insert_decl_map (id, type, new_tree);
361 return new_tree;
362 }
363 else
364 new_tree = copy_node (type);
365
366 insert_decl_map (id, type, new_tree);
367
368 /* This is a new type, not a copy of an old type. Need to reassociate
369 variants. We can handle everything except the main variant lazily. */
370 t = TYPE_MAIN_VARIANT (type);
371 if (type != t)
372 {
373 t = remap_type (t, id);
374 TYPE_MAIN_VARIANT (new_tree) = t;
375 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
376 TYPE_NEXT_VARIANT (t) = new_tree;
377 }
378 else
379 {
380 TYPE_MAIN_VARIANT (new_tree) = new_tree;
381 TYPE_NEXT_VARIANT (new_tree) = NULL;
382 }
383
384 if (TYPE_STUB_DECL (type))
385 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
386
387 /* Lazily create pointer and reference types. */
388 TYPE_POINTER_TO (new_tree) = NULL;
389 TYPE_REFERENCE_TO (new_tree) = NULL;
390
391 switch (TREE_CODE (new_tree))
392 {
393 case INTEGER_TYPE:
394 case REAL_TYPE:
395 case FIXED_POINT_TYPE:
396 case ENUMERAL_TYPE:
397 case BOOLEAN_TYPE:
398 t = TYPE_MIN_VALUE (new_tree);
399 if (t && TREE_CODE (t) != INTEGER_CST)
400 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
401
402 t = TYPE_MAX_VALUE (new_tree);
403 if (t && TREE_CODE (t) != INTEGER_CST)
404 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
405 return new_tree;
406
407 case FUNCTION_TYPE:
408 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
410 return new_tree;
411
412 case ARRAY_TYPE:
413 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
414 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
415 break;
416
417 case RECORD_TYPE:
418 case UNION_TYPE:
419 case QUAL_UNION_TYPE:
420 {
421 tree f, nf = NULL;
422
423 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
424 {
425 t = remap_decl (f, id);
426 DECL_CONTEXT (t) = new_tree;
427 TREE_CHAIN (t) = nf;
428 nf = t;
429 }
430 TYPE_FIELDS (new_tree) = nreverse (nf);
431 }
432 break;
433
434 case OFFSET_TYPE:
435 default:
436 /* Shouldn't have been thought variable sized. */
437 gcc_unreachable ();
438 }
439
440 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
441 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
442
443 return new_tree;
444 }
445
446 tree
447 remap_type (tree type, copy_body_data *id)
448 {
449 tree *node;
450 tree tmp;
451
452 if (type == NULL)
453 return type;
454
455 /* See if we have remapped this type. */
456 node = (tree *) pointer_map_contains (id->decl_map, type);
457 if (node)
458 return *node;
459
460 /* The type only needs remapping if it's variably modified. */
461 if (! variably_modified_type_p (type, id->src_fn))
462 {
463 insert_decl_map (id, type, type);
464 return type;
465 }
466
467 id->remapping_type_depth++;
468 tmp = remap_type_1 (type, id);
469 id->remapping_type_depth--;
470
471 return tmp;
472 }
473
474 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
475 is NULL or TYPE has not been remapped before. */
476
477 static tree
478 remapped_type (tree type, copy_body_data *id)
479 {
480 tree *node;
481
482 if (type == NULL)
483 return type;
484
485 /* See if we have remapped this type. */
486 node = (tree *) pointer_map_contains (id->decl_map, type);
487 if (node)
488 return *node;
489 else
490 return NULL;
491 }
492
493 /* The type only needs remapping if it's variably modified. */
494 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
495
496 static bool
497 can_be_nonlocal (tree decl, copy_body_data *id)
498 {
499 /* We can not duplicate function decls. */
500 if (TREE_CODE (decl) == FUNCTION_DECL)
501 return true;
502
503 /* Local static vars must be non-local or we get multiple declaration
504 problems. */
505 if (TREE_CODE (decl) == VAR_DECL
506 && !auto_var_in_fn_p (decl, id->src_fn))
507 return true;
508
509 /* At the moment dwarf2out can handle only these types of nodes. We
510 can support more later. */
511 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
512 return false;
513
514 /* We must use global type. We call remapped_type instead of
515 remap_type since we don't want to remap this type here if it
516 hasn't been remapped before. */
517 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
518 return false;
519
520 /* Wihtout SSA we can't tell if variable is used. */
521 if (!gimple_in_ssa_p (cfun))
522 return false;
523
524 /* Live variables must be copied so we can attach DECL_RTL. */
525 if (var_ann (decl))
526 return false;
527
528 return true;
529 }
530
531 static tree
532 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
533 {
534 tree old_var;
535 tree new_decls = NULL_TREE;
536
537 /* Remap its variables. */
538 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
539 {
540 tree new_var;
541
542 if (can_be_nonlocal (old_var, id))
543 {
544 if (TREE_CODE (old_var) == VAR_DECL
545 && ! DECL_EXTERNAL (old_var)
546 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
547 cfun->local_decls = tree_cons (NULL_TREE, old_var,
548 cfun->local_decls);
549 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
550 && !DECL_IGNORED_P (old_var)
551 && nonlocalized_list)
552 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
553 continue;
554 }
555
556 /* Remap the variable. */
557 new_var = remap_decl (old_var, id);
558
559 /* If we didn't remap this variable, we can't mess with its
560 TREE_CHAIN. If we remapped this variable to the return slot, it's
561 already declared somewhere else, so don't declare it here. */
562
563 if (new_var == id->retvar)
564 ;
565 else if (!new_var)
566 {
567 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
568 && !DECL_IGNORED_P (old_var)
569 && nonlocalized_list)
570 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
571 }
572 else
573 {
574 gcc_assert (DECL_P (new_var));
575 TREE_CHAIN (new_var) = new_decls;
576 new_decls = new_var;
577
578 /* Also copy value-expressions. */
579 if (TREE_CODE (new_var) == VAR_DECL
580 && DECL_HAS_VALUE_EXPR_P (new_var))
581 {
582 tree tem = DECL_VALUE_EXPR (new_var);
583 bool old_regimplify = id->regimplify;
584 id->remapping_type_depth++;
585 walk_tree (&tem, copy_tree_body_r, id, NULL);
586 id->remapping_type_depth--;
587 id->regimplify = old_regimplify;
588 SET_DECL_VALUE_EXPR (new_var, tem);
589 }
590 }
591 }
592
593 return nreverse (new_decls);
594 }
595
596 /* Copy the BLOCK to contain remapped versions of the variables
597 therein. And hook the new block into the block-tree. */
598
599 static void
600 remap_block (tree *block, copy_body_data *id)
601 {
602 tree old_block;
603 tree new_block;
604
605 /* Make the new block. */
606 old_block = *block;
607 new_block = make_node (BLOCK);
608 TREE_USED (new_block) = TREE_USED (old_block);
609 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
610 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
611 BLOCK_NONLOCALIZED_VARS (new_block)
612 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
613 *block = new_block;
614
615 /* Remap its variables. */
616 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
617 &BLOCK_NONLOCALIZED_VARS (new_block),
618 id);
619
620 if (id->transform_lang_insert_block)
621 id->transform_lang_insert_block (new_block);
622
623 /* Remember the remapped block. */
624 insert_decl_map (id, old_block, new_block);
625 }
626
627 /* Copy the whole block tree and root it in id->block. */
628 static tree
629 remap_blocks (tree block, copy_body_data *id)
630 {
631 tree t;
632 tree new_tree = block;
633
634 if (!block)
635 return NULL;
636
637 remap_block (&new_tree, id);
638 gcc_assert (new_tree != block);
639 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
640 prepend_lexical_block (new_tree, remap_blocks (t, id));
641 /* Blocks are in arbitrary order, but make things slightly prettier and do
642 not swap order when producing a copy. */
643 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
644 return new_tree;
645 }
646
647 static void
648 copy_statement_list (tree *tp)
649 {
650 tree_stmt_iterator oi, ni;
651 tree new_tree;
652
653 new_tree = alloc_stmt_list ();
654 ni = tsi_start (new_tree);
655 oi = tsi_start (*tp);
656 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
657 *tp = new_tree;
658
659 for (; !tsi_end_p (oi); tsi_next (&oi))
660 {
661 tree stmt = tsi_stmt (oi);
662 if (TREE_CODE (stmt) == STATEMENT_LIST)
663 copy_statement_list (&stmt);
664 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
665 }
666 }
667
668 static void
669 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
670 {
671 tree block = BIND_EXPR_BLOCK (*tp);
672 /* Copy (and replace) the statement. */
673 copy_tree_r (tp, walk_subtrees, NULL);
674 if (block)
675 {
676 remap_block (&block, id);
677 BIND_EXPR_BLOCK (*tp) = block;
678 }
679
680 if (BIND_EXPR_VARS (*tp))
681 /* This will remap a lot of the same decls again, but this should be
682 harmless. */
683 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
684 }
685
686
687 /* Create a new gimple_seq by remapping all the statements in BODY
688 using the inlining information in ID. */
689
690 static gimple_seq
691 remap_gimple_seq (gimple_seq body, copy_body_data *id)
692 {
693 gimple_stmt_iterator si;
694 gimple_seq new_body = NULL;
695
696 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
697 {
698 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
699 gimple_seq_add_stmt (&new_body, new_stmt);
700 }
701
702 return new_body;
703 }
704
705
706 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
707 block using the mapping information in ID. */
708
709 static gimple
710 copy_gimple_bind (gimple stmt, copy_body_data *id)
711 {
712 gimple new_bind;
713 tree new_block, new_vars;
714 gimple_seq body, new_body;
715
716 /* Copy the statement. Note that we purposely don't use copy_stmt
717 here because we need to remap statements as we copy. */
718 body = gimple_bind_body (stmt);
719 new_body = remap_gimple_seq (body, id);
720
721 new_block = gimple_bind_block (stmt);
722 if (new_block)
723 remap_block (&new_block, id);
724
725 /* This will remap a lot of the same decls again, but this should be
726 harmless. */
727 new_vars = gimple_bind_vars (stmt);
728 if (new_vars)
729 new_vars = remap_decls (new_vars, NULL, id);
730
731 new_bind = gimple_build_bind (new_vars, new_body, new_block);
732
733 return new_bind;
734 }
735
736
737 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
738 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
739 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
740 recursing into the children nodes of *TP. */
741
742 static tree
743 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
744 {
745 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
746 copy_body_data *id = (copy_body_data *) wi_p->info;
747 tree fn = id->src_fn;
748
749 if (TREE_CODE (*tp) == SSA_NAME)
750 {
751 *tp = remap_ssa_name (*tp, id);
752 *walk_subtrees = 0;
753 return NULL;
754 }
755 else if (auto_var_in_fn_p (*tp, fn))
756 {
757 /* Local variables and labels need to be replaced by equivalent
758 variables. We don't want to copy static variables; there's
759 only one of those, no matter how many times we inline the
760 containing function. Similarly for globals from an outer
761 function. */
762 tree new_decl;
763
764 /* Remap the declaration. */
765 new_decl = remap_decl (*tp, id);
766 gcc_assert (new_decl);
767 /* Replace this variable with the copy. */
768 STRIP_TYPE_NOPS (new_decl);
769 /* ??? The C++ frontend uses void * pointer zero to initialize
770 any other type. This confuses the middle-end type verification.
771 As cloned bodies do not go through gimplification again the fixup
772 there doesn't trigger. */
773 if (TREE_CODE (new_decl) == INTEGER_CST
774 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
775 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
776 *tp = new_decl;
777 *walk_subtrees = 0;
778 }
779 else if (TREE_CODE (*tp) == STATEMENT_LIST)
780 gcc_unreachable ();
781 else if (TREE_CODE (*tp) == SAVE_EXPR)
782 gcc_unreachable ();
783 else if (TREE_CODE (*tp) == LABEL_DECL
784 && (!DECL_CONTEXT (*tp)
785 || decl_function_context (*tp) == id->src_fn))
786 /* These may need to be remapped for EH handling. */
787 *tp = remap_decl (*tp, id);
788 else if (TYPE_P (*tp))
789 /* Types may need remapping as well. */
790 *tp = remap_type (*tp, id);
791 else if (CONSTANT_CLASS_P (*tp))
792 {
793 /* If this is a constant, we have to copy the node iff the type
794 will be remapped. copy_tree_r will not copy a constant. */
795 tree new_type = remap_type (TREE_TYPE (*tp), id);
796
797 if (new_type == TREE_TYPE (*tp))
798 *walk_subtrees = 0;
799
800 else if (TREE_CODE (*tp) == INTEGER_CST)
801 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
802 TREE_INT_CST_HIGH (*tp));
803 else
804 {
805 *tp = copy_node (*tp);
806 TREE_TYPE (*tp) = new_type;
807 }
808 }
809 else
810 {
811 /* Otherwise, just copy the node. Note that copy_tree_r already
812 knows not to copy VAR_DECLs, etc., so this is safe. */
813 if (TREE_CODE (*tp) == INDIRECT_REF)
814 {
815 /* Get rid of *& from inline substitutions that can happen when a
816 pointer argument is an ADDR_EXPR. */
817 tree decl = TREE_OPERAND (*tp, 0);
818 tree *n;
819
820 n = (tree *) pointer_map_contains (id->decl_map, decl);
821 if (n)
822 {
823 tree type, new_tree, old;
824
825 /* If we happen to get an ADDR_EXPR in n->value, strip
826 it manually here as we'll eventually get ADDR_EXPRs
827 which lie about their types pointed to. In this case
828 build_fold_indirect_ref wouldn't strip the
829 INDIRECT_REF, but we absolutely rely on that. As
830 fold_indirect_ref does other useful transformations,
831 try that first, though. */
832 type = TREE_TYPE (TREE_TYPE (*n));
833 new_tree = unshare_expr (*n);
834 old = *tp;
835 *tp = gimple_fold_indirect_ref (new_tree);
836 if (!*tp)
837 {
838 if (TREE_CODE (new_tree) == ADDR_EXPR)
839 {
840 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
841 type, new_tree);
842 /* ??? We should either assert here or build
843 a VIEW_CONVERT_EXPR instead of blindly leaking
844 incompatible types to our IL. */
845 if (! *tp)
846 *tp = TREE_OPERAND (new_tree, 0);
847 }
848 else
849 {
850 *tp = build1 (INDIRECT_REF, type, new_tree);
851 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
852 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
853 }
854 }
855 *walk_subtrees = 0;
856 return NULL;
857 }
858 }
859
860 /* Here is the "usual case". Copy this tree node, and then
861 tweak some special cases. */
862 copy_tree_r (tp, walk_subtrees, NULL);
863
864 /* Global variables we haven't seen yet need to go into referenced
865 vars. If not referenced from types only. */
866 if (gimple_in_ssa_p (cfun)
867 && TREE_CODE (*tp) == VAR_DECL
868 && id->remapping_type_depth == 0
869 && !processing_debug_stmt)
870 add_referenced_var (*tp);
871
872 /* We should never have TREE_BLOCK set on non-statements. */
873 if (EXPR_P (*tp))
874 gcc_assert (!TREE_BLOCK (*tp));
875
876 if (TREE_CODE (*tp) != OMP_CLAUSE)
877 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
878
879 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
880 {
881 /* The copied TARGET_EXPR has never been expanded, even if the
882 original node was expanded already. */
883 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
884 TREE_OPERAND (*tp, 3) = NULL_TREE;
885 }
886 else if (TREE_CODE (*tp) == ADDR_EXPR)
887 {
888 /* Variable substitution need not be simple. In particular,
889 the INDIRECT_REF substitution above. Make sure that
890 TREE_CONSTANT and friends are up-to-date. But make sure
891 to not improperly set TREE_BLOCK on some sub-expressions. */
892 int invariant = is_gimple_min_invariant (*tp);
893 tree block = id->block;
894 id->block = NULL_TREE;
895 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
896 id->block = block;
897
898 /* Handle the case where we substituted an INDIRECT_REF
899 into the operand of the ADDR_EXPR. */
900 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
901 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
902 else
903 recompute_tree_invariant_for_addr_expr (*tp);
904
905 /* If this used to be invariant, but is not any longer,
906 then regimplification is probably needed. */
907 if (invariant && !is_gimple_min_invariant (*tp))
908 id->regimplify = true;
909
910 *walk_subtrees = 0;
911 }
912 }
913
914 /* Keep iterating. */
915 return NULL_TREE;
916 }
917
918
919 /* Called from copy_body_id via walk_tree. DATA is really a
920 `copy_body_data *'. */
921
922 tree
923 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
924 {
925 copy_body_data *id = (copy_body_data *) data;
926 tree fn = id->src_fn;
927 tree new_block;
928
929 /* Begin by recognizing trees that we'll completely rewrite for the
930 inlining context. Our output for these trees is completely
931 different from out input (e.g. RETURN_EXPR is deleted, and morphs
932 into an edge). Further down, we'll handle trees that get
933 duplicated and/or tweaked. */
934
935 /* When requested, RETURN_EXPRs should be transformed to just the
936 contained MODIFY_EXPR. The branch semantics of the return will
937 be handled elsewhere by manipulating the CFG rather than a statement. */
938 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
939 {
940 tree assignment = TREE_OPERAND (*tp, 0);
941
942 /* If we're returning something, just turn that into an
943 assignment into the equivalent of the original RESULT_DECL.
944 If the "assignment" is just the result decl, the result
945 decl has already been set (e.g. a recent "foo (&result_decl,
946 ...)"); just toss the entire RETURN_EXPR. */
947 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
948 {
949 /* Replace the RETURN_EXPR with (a copy of) the
950 MODIFY_EXPR hanging underneath. */
951 *tp = copy_node (assignment);
952 }
953 else /* Else the RETURN_EXPR returns no value. */
954 {
955 *tp = NULL;
956 return (tree) (void *)1;
957 }
958 }
959 else if (TREE_CODE (*tp) == SSA_NAME)
960 {
961 *tp = remap_ssa_name (*tp, id);
962 *walk_subtrees = 0;
963 return NULL;
964 }
965
966 /* Local variables and labels need to be replaced by equivalent
967 variables. We don't want to copy static variables; there's only
968 one of those, no matter how many times we inline the containing
969 function. Similarly for globals from an outer function. */
970 else if (auto_var_in_fn_p (*tp, fn))
971 {
972 tree new_decl;
973
974 /* Remap the declaration. */
975 new_decl = remap_decl (*tp, id);
976 gcc_assert (new_decl);
977 /* Replace this variable with the copy. */
978 STRIP_TYPE_NOPS (new_decl);
979 *tp = new_decl;
980 *walk_subtrees = 0;
981 }
982 else if (TREE_CODE (*tp) == STATEMENT_LIST)
983 copy_statement_list (tp);
984 else if (TREE_CODE (*tp) == SAVE_EXPR
985 || TREE_CODE (*tp) == TARGET_EXPR)
986 remap_save_expr (tp, id->decl_map, walk_subtrees);
987 else if (TREE_CODE (*tp) == LABEL_DECL
988 && (! DECL_CONTEXT (*tp)
989 || decl_function_context (*tp) == id->src_fn))
990 /* These may need to be remapped for EH handling. */
991 *tp = remap_decl (*tp, id);
992 else if (TREE_CODE (*tp) == BIND_EXPR)
993 copy_bind_expr (tp, walk_subtrees, id);
994 /* Types may need remapping as well. */
995 else if (TYPE_P (*tp))
996 *tp = remap_type (*tp, id);
997
998 /* If this is a constant, we have to copy the node iff the type will be
999 remapped. copy_tree_r will not copy a constant. */
1000 else if (CONSTANT_CLASS_P (*tp))
1001 {
1002 tree new_type = remap_type (TREE_TYPE (*tp), id);
1003
1004 if (new_type == TREE_TYPE (*tp))
1005 *walk_subtrees = 0;
1006
1007 else if (TREE_CODE (*tp) == INTEGER_CST)
1008 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1009 TREE_INT_CST_HIGH (*tp));
1010 else
1011 {
1012 *tp = copy_node (*tp);
1013 TREE_TYPE (*tp) = new_type;
1014 }
1015 }
1016
1017 /* Otherwise, just copy the node. Note that copy_tree_r already
1018 knows not to copy VAR_DECLs, etc., so this is safe. */
1019 else
1020 {
1021 /* Here we handle trees that are not completely rewritten.
1022 First we detect some inlining-induced bogosities for
1023 discarding. */
1024 if (TREE_CODE (*tp) == MODIFY_EXPR
1025 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1026 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1027 {
1028 /* Some assignments VAR = VAR; don't generate any rtl code
1029 and thus don't count as variable modification. Avoid
1030 keeping bogosities like 0 = 0. */
1031 tree decl = TREE_OPERAND (*tp, 0), value;
1032 tree *n;
1033
1034 n = (tree *) pointer_map_contains (id->decl_map, decl);
1035 if (n)
1036 {
1037 value = *n;
1038 STRIP_TYPE_NOPS (value);
1039 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1040 {
1041 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1042 return copy_tree_body_r (tp, walk_subtrees, data);
1043 }
1044 }
1045 }
1046 else if (TREE_CODE (*tp) == INDIRECT_REF)
1047 {
1048 /* Get rid of *& from inline substitutions that can happen when a
1049 pointer argument is an ADDR_EXPR. */
1050 tree decl = TREE_OPERAND (*tp, 0);
1051 tree *n;
1052
1053 n = (tree *) pointer_map_contains (id->decl_map, decl);
1054 if (n)
1055 {
1056 tree new_tree;
1057 tree old;
1058 /* If we happen to get an ADDR_EXPR in n->value, strip
1059 it manually here as we'll eventually get ADDR_EXPRs
1060 which lie about their types pointed to. In this case
1061 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1062 but we absolutely rely on that. As fold_indirect_ref
1063 does other useful transformations, try that first, though. */
1064 tree type = TREE_TYPE (TREE_TYPE (*n));
1065 if (id->do_not_unshare)
1066 new_tree = *n;
1067 else
1068 new_tree = unshare_expr (*n);
1069 old = *tp;
1070 *tp = gimple_fold_indirect_ref (new_tree);
1071 if (! *tp)
1072 {
1073 if (TREE_CODE (new_tree) == ADDR_EXPR)
1074 {
1075 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1076 type, new_tree);
1077 /* ??? We should either assert here or build
1078 a VIEW_CONVERT_EXPR instead of blindly leaking
1079 incompatible types to our IL. */
1080 if (! *tp)
1081 *tp = TREE_OPERAND (new_tree, 0);
1082 }
1083 else
1084 {
1085 *tp = build1 (INDIRECT_REF, type, new_tree);
1086 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1087 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1088 }
1089 }
1090 *walk_subtrees = 0;
1091 return NULL;
1092 }
1093 }
1094
1095 /* Here is the "usual case". Copy this tree node, and then
1096 tweak some special cases. */
1097 copy_tree_r (tp, walk_subtrees, NULL);
1098
1099 /* Global variables we haven't seen yet needs to go into referenced
1100 vars. If not referenced from types or debug stmts only. */
1101 if (gimple_in_ssa_p (cfun)
1102 && TREE_CODE (*tp) == VAR_DECL
1103 && id->remapping_type_depth == 0
1104 && !processing_debug_stmt)
1105 add_referenced_var (*tp);
1106
1107 /* If EXPR has block defined, map it to newly constructed block.
1108 When inlining we want EXPRs without block appear in the block
1109 of function call if we are not remapping a type. */
1110 if (EXPR_P (*tp))
1111 {
1112 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1113 if (TREE_BLOCK (*tp))
1114 {
1115 tree *n;
1116 n = (tree *) pointer_map_contains (id->decl_map,
1117 TREE_BLOCK (*tp));
1118 gcc_assert (n || id->remapping_type_depth != 0);
1119 if (n)
1120 new_block = *n;
1121 }
1122 TREE_BLOCK (*tp) = new_block;
1123 }
1124
1125 if (TREE_CODE (*tp) != OMP_CLAUSE)
1126 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1127
1128 /* The copied TARGET_EXPR has never been expanded, even if the
1129 original node was expanded already. */
1130 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1131 {
1132 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1133 TREE_OPERAND (*tp, 3) = NULL_TREE;
1134 }
1135
1136 /* Variable substitution need not be simple. In particular, the
1137 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1138 and friends are up-to-date. */
1139 else if (TREE_CODE (*tp) == ADDR_EXPR)
1140 {
1141 int invariant = is_gimple_min_invariant (*tp);
1142 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1143
1144 /* Handle the case where we substituted an INDIRECT_REF
1145 into the operand of the ADDR_EXPR. */
1146 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1147 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1148 else
1149 recompute_tree_invariant_for_addr_expr (*tp);
1150
1151 /* If this used to be invariant, but is not any longer,
1152 then regimplification is probably needed. */
1153 if (invariant && !is_gimple_min_invariant (*tp))
1154 id->regimplify = true;
1155
1156 *walk_subtrees = 0;
1157 }
1158 }
1159
1160 /* Keep iterating. */
1161 return NULL_TREE;
1162 }
1163
1164 /* Helper for remap_gimple_stmt. Given an EH region number for the
1165 source function, map that to the duplicate EH region number in
1166 the destination function. */
1167
1168 static int
1169 remap_eh_region_nr (int old_nr, copy_body_data *id)
1170 {
1171 eh_region old_r, new_r;
1172 void **slot;
1173
1174 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1175 slot = pointer_map_contains (id->eh_map, old_r);
1176 new_r = (eh_region) *slot;
1177
1178 return new_r->index;
1179 }
1180
1181 /* Similar, but operate on INTEGER_CSTs. */
1182
1183 static tree
1184 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1185 {
1186 int old_nr, new_nr;
1187
1188 old_nr = tree_low_cst (old_t_nr, 0);
1189 new_nr = remap_eh_region_nr (old_nr, id);
1190
1191 return build_int_cst (NULL, new_nr);
1192 }
1193
1194 /* Helper for copy_bb. Remap statement STMT using the inlining
1195 information in ID. Return the new statement copy. */
1196
1197 static gimple
1198 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1199 {
1200 gimple copy = NULL;
1201 struct walk_stmt_info wi;
1202 tree new_block;
1203 bool skip_first = false;
1204
1205 /* Begin by recognizing trees that we'll completely rewrite for the
1206 inlining context. Our output for these trees is completely
1207 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1208 into an edge). Further down, we'll handle trees that get
1209 duplicated and/or tweaked. */
1210
1211 /* When requested, GIMPLE_RETURNs should be transformed to just the
1212 contained GIMPLE_ASSIGN. The branch semantics of the return will
1213 be handled elsewhere by manipulating the CFG rather than the
1214 statement. */
1215 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1216 {
1217 tree retval = gimple_return_retval (stmt);
1218
1219 /* If we're returning something, just turn that into an
1220 assignment into the equivalent of the original RESULT_DECL.
1221 If RETVAL is just the result decl, the result decl has
1222 already been set (e.g. a recent "foo (&result_decl, ...)");
1223 just toss the entire GIMPLE_RETURN. */
1224 if (retval && TREE_CODE (retval) != RESULT_DECL)
1225 {
1226 copy = gimple_build_assign (id->retvar, retval);
1227 /* id->retvar is already substituted. Skip it on later remapping. */
1228 skip_first = true;
1229 }
1230 else
1231 return gimple_build_nop ();
1232 }
1233 else if (gimple_has_substatements (stmt))
1234 {
1235 gimple_seq s1, s2;
1236
1237 /* When cloning bodies from the C++ front end, we will be handed bodies
1238 in High GIMPLE form. Handle here all the High GIMPLE statements that
1239 have embedded statements. */
1240 switch (gimple_code (stmt))
1241 {
1242 case GIMPLE_BIND:
1243 copy = copy_gimple_bind (stmt, id);
1244 break;
1245
1246 case GIMPLE_CATCH:
1247 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1248 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1249 break;
1250
1251 case GIMPLE_EH_FILTER:
1252 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1253 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1254 break;
1255
1256 case GIMPLE_TRY:
1257 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1258 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1259 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1260 break;
1261
1262 case GIMPLE_WITH_CLEANUP_EXPR:
1263 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1264 copy = gimple_build_wce (s1);
1265 break;
1266
1267 case GIMPLE_OMP_PARALLEL:
1268 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1269 copy = gimple_build_omp_parallel
1270 (s1,
1271 gimple_omp_parallel_clauses (stmt),
1272 gimple_omp_parallel_child_fn (stmt),
1273 gimple_omp_parallel_data_arg (stmt));
1274 break;
1275
1276 case GIMPLE_OMP_TASK:
1277 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1278 copy = gimple_build_omp_task
1279 (s1,
1280 gimple_omp_task_clauses (stmt),
1281 gimple_omp_task_child_fn (stmt),
1282 gimple_omp_task_data_arg (stmt),
1283 gimple_omp_task_copy_fn (stmt),
1284 gimple_omp_task_arg_size (stmt),
1285 gimple_omp_task_arg_align (stmt));
1286 break;
1287
1288 case GIMPLE_OMP_FOR:
1289 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1290 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1291 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1292 gimple_omp_for_collapse (stmt), s2);
1293 {
1294 size_t i;
1295 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1296 {
1297 gimple_omp_for_set_index (copy, i,
1298 gimple_omp_for_index (stmt, i));
1299 gimple_omp_for_set_initial (copy, i,
1300 gimple_omp_for_initial (stmt, i));
1301 gimple_omp_for_set_final (copy, i,
1302 gimple_omp_for_final (stmt, i));
1303 gimple_omp_for_set_incr (copy, i,
1304 gimple_omp_for_incr (stmt, i));
1305 gimple_omp_for_set_cond (copy, i,
1306 gimple_omp_for_cond (stmt, i));
1307 }
1308 }
1309 break;
1310
1311 case GIMPLE_OMP_MASTER:
1312 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1313 copy = gimple_build_omp_master (s1);
1314 break;
1315
1316 case GIMPLE_OMP_ORDERED:
1317 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1318 copy = gimple_build_omp_ordered (s1);
1319 break;
1320
1321 case GIMPLE_OMP_SECTION:
1322 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1323 copy = gimple_build_omp_section (s1);
1324 break;
1325
1326 case GIMPLE_OMP_SECTIONS:
1327 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1328 copy = gimple_build_omp_sections
1329 (s1, gimple_omp_sections_clauses (stmt));
1330 break;
1331
1332 case GIMPLE_OMP_SINGLE:
1333 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1334 copy = gimple_build_omp_single
1335 (s1, gimple_omp_single_clauses (stmt));
1336 break;
1337
1338 case GIMPLE_OMP_CRITICAL:
1339 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1340 copy
1341 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1342 break;
1343
1344 default:
1345 gcc_unreachable ();
1346 }
1347 }
1348 else
1349 {
1350 if (gimple_assign_copy_p (stmt)
1351 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1352 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1353 {
1354 /* Here we handle statements that are not completely rewritten.
1355 First we detect some inlining-induced bogosities for
1356 discarding. */
1357
1358 /* Some assignments VAR = VAR; don't generate any rtl code
1359 and thus don't count as variable modification. Avoid
1360 keeping bogosities like 0 = 0. */
1361 tree decl = gimple_assign_lhs (stmt), value;
1362 tree *n;
1363
1364 n = (tree *) pointer_map_contains (id->decl_map, decl);
1365 if (n)
1366 {
1367 value = *n;
1368 STRIP_TYPE_NOPS (value);
1369 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1370 return gimple_build_nop ();
1371 }
1372 }
1373
1374 if (gimple_debug_bind_p (stmt))
1375 {
1376 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1377 gimple_debug_bind_get_value (stmt),
1378 stmt);
1379 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1380 return copy;
1381 }
1382
1383 /* Create a new deep copy of the statement. */
1384 copy = gimple_copy (stmt);
1385
1386 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1387 RESX and EH_DISPATCH. */
1388 if (id->eh_map)
1389 switch (gimple_code (copy))
1390 {
1391 case GIMPLE_CALL:
1392 {
1393 tree r, fndecl = gimple_call_fndecl (copy);
1394 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1395 switch (DECL_FUNCTION_CODE (fndecl))
1396 {
1397 case BUILT_IN_EH_COPY_VALUES:
1398 r = gimple_call_arg (copy, 1);
1399 r = remap_eh_region_tree_nr (r, id);
1400 gimple_call_set_arg (copy, 1, r);
1401 /* FALLTHRU */
1402
1403 case BUILT_IN_EH_POINTER:
1404 case BUILT_IN_EH_FILTER:
1405 r = gimple_call_arg (copy, 0);
1406 r = remap_eh_region_tree_nr (r, id);
1407 gimple_call_set_arg (copy, 0, r);
1408 break;
1409
1410 default:
1411 break;
1412 }
1413
1414 /* Reset alias info if we didn't apply measures to
1415 keep it valid over inlining by setting DECL_PT_UID. */
1416 if (!id->src_cfun->gimple_df
1417 || !id->src_cfun->gimple_df->ipa_pta)
1418 gimple_call_reset_alias_info (copy);
1419 }
1420 break;
1421
1422 case GIMPLE_RESX:
1423 {
1424 int r = gimple_resx_region (copy);
1425 r = remap_eh_region_nr (r, id);
1426 gimple_resx_set_region (copy, r);
1427 }
1428 break;
1429
1430 case GIMPLE_EH_DISPATCH:
1431 {
1432 int r = gimple_eh_dispatch_region (copy);
1433 r = remap_eh_region_nr (r, id);
1434 gimple_eh_dispatch_set_region (copy, r);
1435 }
1436 break;
1437
1438 default:
1439 break;
1440 }
1441 }
1442
1443 /* If STMT has a block defined, map it to the newly constructed
1444 block. When inlining we want statements without a block to
1445 appear in the block of the function call. */
1446 new_block = id->block;
1447 if (gimple_block (copy))
1448 {
1449 tree *n;
1450 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1451 gcc_assert (n);
1452 new_block = *n;
1453 }
1454
1455 gimple_set_block (copy, new_block);
1456
1457 if (gimple_debug_bind_p (copy))
1458 return copy;
1459
1460 /* Remap all the operands in COPY. */
1461 memset (&wi, 0, sizeof (wi));
1462 wi.info = id;
1463 if (skip_first)
1464 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1465 else
1466 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1467
1468 /* Clear the copied virtual operands. We are not remapping them here
1469 but are going to recreate them from scratch. */
1470 if (gimple_has_mem_ops (copy))
1471 {
1472 gimple_set_vdef (copy, NULL_TREE);
1473 gimple_set_vuse (copy, NULL_TREE);
1474 }
1475
1476 return copy;
1477 }
1478
1479
1480 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1481 later */
1482
1483 static basic_block
1484 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1485 gcov_type count_scale)
1486 {
1487 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1488 basic_block copy_basic_block;
1489 tree decl;
1490 gcov_type freq;
1491 basic_block prev;
1492
1493 /* Search for previous copied basic block. */
1494 prev = bb->prev_bb;
1495 while (!prev->aux)
1496 prev = prev->prev_bb;
1497
1498 /* create_basic_block() will append every new block to
1499 basic_block_info automatically. */
1500 copy_basic_block = create_basic_block (NULL, (void *) 0,
1501 (basic_block) prev->aux);
1502 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1503
1504 /* We are going to rebuild frequencies from scratch. These values
1505 have just small importance to drive canonicalize_loop_headers. */
1506 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1507
1508 /* We recompute frequencies after inlining, so this is quite safe. */
1509 if (freq > BB_FREQ_MAX)
1510 freq = BB_FREQ_MAX;
1511 copy_basic_block->frequency = freq;
1512
1513 copy_gsi = gsi_start_bb (copy_basic_block);
1514
1515 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1516 {
1517 gimple stmt = gsi_stmt (gsi);
1518 gimple orig_stmt = stmt;
1519
1520 id->regimplify = false;
1521 stmt = remap_gimple_stmt (stmt, id);
1522 if (gimple_nop_p (stmt))
1523 continue;
1524
1525 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1526 seq_gsi = copy_gsi;
1527
1528 /* With return slot optimization we can end up with
1529 non-gimple (foo *)&this->m, fix that here. */
1530 if (is_gimple_assign (stmt)
1531 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1532 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1533 {
1534 tree new_rhs;
1535 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1536 gimple_assign_rhs1 (stmt),
1537 true, NULL, false, GSI_NEW_STMT);
1538 gimple_assign_set_rhs1 (stmt, new_rhs);
1539 id->regimplify = false;
1540 }
1541
1542 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1543
1544 if (id->regimplify)
1545 gimple_regimplify_operands (stmt, &seq_gsi);
1546
1547 /* If copy_basic_block has been empty at the start of this iteration,
1548 call gsi_start_bb again to get at the newly added statements. */
1549 if (gsi_end_p (copy_gsi))
1550 copy_gsi = gsi_start_bb (copy_basic_block);
1551 else
1552 gsi_next (&copy_gsi);
1553
1554 /* Process the new statement. The call to gimple_regimplify_operands
1555 possibly turned the statement into multiple statements, we
1556 need to process all of them. */
1557 do
1558 {
1559 tree fn;
1560
1561 stmt = gsi_stmt (copy_gsi);
1562 if (is_gimple_call (stmt)
1563 && gimple_call_va_arg_pack_p (stmt)
1564 && id->gimple_call)
1565 {
1566 /* __builtin_va_arg_pack () should be replaced by
1567 all arguments corresponding to ... in the caller. */
1568 tree p;
1569 gimple new_call;
1570 VEC(tree, heap) *argarray;
1571 size_t nargs = gimple_call_num_args (id->gimple_call);
1572 size_t n;
1573
1574 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1575 nargs--;
1576
1577 /* Create the new array of arguments. */
1578 n = nargs + gimple_call_num_args (stmt);
1579 argarray = VEC_alloc (tree, heap, n);
1580 VEC_safe_grow (tree, heap, argarray, n);
1581
1582 /* Copy all the arguments before '...' */
1583 memcpy (VEC_address (tree, argarray),
1584 gimple_call_arg_ptr (stmt, 0),
1585 gimple_call_num_args (stmt) * sizeof (tree));
1586
1587 /* Append the arguments passed in '...' */
1588 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1589 gimple_call_arg_ptr (id->gimple_call, 0)
1590 + (gimple_call_num_args (id->gimple_call) - nargs),
1591 nargs * sizeof (tree));
1592
1593 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1594 argarray);
1595
1596 VEC_free (tree, heap, argarray);
1597
1598 /* Copy all GIMPLE_CALL flags, location and block, except
1599 GF_CALL_VA_ARG_PACK. */
1600 gimple_call_copy_flags (new_call, stmt);
1601 gimple_call_set_va_arg_pack (new_call, false);
1602 gimple_set_location (new_call, gimple_location (stmt));
1603 gimple_set_block (new_call, gimple_block (stmt));
1604 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1605
1606 gsi_replace (&copy_gsi, new_call, false);
1607 stmt = new_call;
1608 }
1609 else if (is_gimple_call (stmt)
1610 && id->gimple_call
1611 && (decl = gimple_call_fndecl (stmt))
1612 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1613 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1614 {
1615 /* __builtin_va_arg_pack_len () should be replaced by
1616 the number of anonymous arguments. */
1617 size_t nargs = gimple_call_num_args (id->gimple_call);
1618 tree count, p;
1619 gimple new_stmt;
1620
1621 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1622 nargs--;
1623
1624 count = build_int_cst (integer_type_node, nargs);
1625 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1626 gsi_replace (&copy_gsi, new_stmt, false);
1627 stmt = new_stmt;
1628 }
1629
1630 /* Statements produced by inlining can be unfolded, especially
1631 when we constant propagated some operands. We can't fold
1632 them right now for two reasons:
1633 1) folding require SSA_NAME_DEF_STMTs to be correct
1634 2) we can't change function calls to builtins.
1635 So we just mark statement for later folding. We mark
1636 all new statements, instead just statements that has changed
1637 by some nontrivial substitution so even statements made
1638 foldable indirectly are updated. If this turns out to be
1639 expensive, copy_body can be told to watch for nontrivial
1640 changes. */
1641 if (id->statements_to_fold)
1642 pointer_set_insert (id->statements_to_fold, stmt);
1643
1644 /* We're duplicating a CALL_EXPR. Find any corresponding
1645 callgraph edges and update or duplicate them. */
1646 if (is_gimple_call (stmt))
1647 {
1648 struct cgraph_edge *edge;
1649 int flags;
1650
1651 switch (id->transform_call_graph_edges)
1652 {
1653 case CB_CGE_DUPLICATE:
1654 edge = cgraph_edge (id->src_node, orig_stmt);
1655 if (edge)
1656 {
1657 int edge_freq = edge->frequency;
1658 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1659 gimple_uid (stmt),
1660 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1661 edge->frequency, true);
1662 /* We could also just rescale the frequency, but
1663 doing so would introduce roundoff errors and make
1664 verifier unhappy. */
1665 edge->frequency
1666 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1667 copy_basic_block);
1668 if (dump_file
1669 && profile_status_for_function (cfun) != PROFILE_ABSENT
1670 && (edge_freq > edge->frequency + 10
1671 || edge_freq < edge->frequency - 10))
1672 {
1673 fprintf (dump_file, "Edge frequency estimated by "
1674 "cgraph %i diverge from inliner's estimate %i\n",
1675 edge_freq,
1676 edge->frequency);
1677 fprintf (dump_file,
1678 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1679 bb->index,
1680 bb->frequency,
1681 copy_basic_block->frequency);
1682 }
1683 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1684 }
1685 break;
1686
1687 case CB_CGE_MOVE_CLONES:
1688 cgraph_set_call_stmt_including_clones (id->dst_node,
1689 orig_stmt, stmt);
1690 edge = cgraph_edge (id->dst_node, stmt);
1691 break;
1692
1693 case CB_CGE_MOVE:
1694 edge = cgraph_edge (id->dst_node, orig_stmt);
1695 if (edge)
1696 cgraph_set_call_stmt (edge, stmt);
1697 break;
1698
1699 default:
1700 gcc_unreachable ();
1701 }
1702
1703 /* Constant propagation on argument done during inlining
1704 may create new direct call. Produce an edge for it. */
1705 if ((!edge
1706 || (edge->indirect_inlining_edge
1707 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1708 && (fn = gimple_call_fndecl (stmt)) != NULL)
1709 {
1710 struct cgraph_node *dest = cgraph_node (fn);
1711
1712 /* We have missing edge in the callgraph. This can happen
1713 when previous inlining turned an indirect call into a
1714 direct call by constant propagating arguments or we are
1715 producing dead clone (for further clonning). In all
1716 other cases we hit a bug (incorrect node sharing is the
1717 most common reason for missing edges). */
1718 gcc_assert (dest->needed || !dest->analyzed
1719 || dest->address_taken
1720 || !id->src_node->analyzed);
1721 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1722 cgraph_create_edge_including_clones
1723 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1724 compute_call_stmt_bb_frequency (id->dst_node->decl,
1725 copy_basic_block),
1726 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1727 else
1728 cgraph_create_edge (id->dst_node, dest, stmt,
1729 bb->count,
1730 compute_call_stmt_bb_frequency
1731 (id->dst_node->decl, copy_basic_block),
1732 bb->loop_depth)->inline_failed
1733 = CIF_ORIGINALLY_INDIRECT_CALL;
1734 if (dump_file)
1735 {
1736 fprintf (dump_file, "Created new direct edge to %s\n",
1737 cgraph_node_name (dest));
1738 }
1739 }
1740
1741 flags = gimple_call_flags (stmt);
1742 if (flags & ECF_MAY_BE_ALLOCA)
1743 cfun->calls_alloca = true;
1744 if (flags & ECF_RETURNS_TWICE)
1745 cfun->calls_setjmp = true;
1746 }
1747
1748 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1749 id->eh_map, id->eh_lp_nr);
1750
1751 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1752 {
1753 ssa_op_iter i;
1754 tree def;
1755
1756 find_new_referenced_vars (gsi_stmt (copy_gsi));
1757 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1758 if (TREE_CODE (def) == SSA_NAME)
1759 SSA_NAME_DEF_STMT (def) = stmt;
1760 }
1761
1762 gsi_next (&copy_gsi);
1763 }
1764 while (!gsi_end_p (copy_gsi));
1765
1766 copy_gsi = gsi_last_bb (copy_basic_block);
1767 }
1768
1769 return copy_basic_block;
1770 }
1771
1772 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1773 form is quite easy, since dominator relationship for old basic blocks does
1774 not change.
1775
1776 There is however exception where inlining might change dominator relation
1777 across EH edges from basic block within inlined functions destinating
1778 to landing pads in function we inline into.
1779
1780 The function fills in PHI_RESULTs of such PHI nodes if they refer
1781 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1782 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1783 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1784 set, and this means that there will be no overlapping live ranges
1785 for the underlying symbol.
1786
1787 This might change in future if we allow redirecting of EH edges and
1788 we might want to change way build CFG pre-inlining to include
1789 all the possible edges then. */
1790 static void
1791 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1792 bool can_throw, bool nonlocal_goto)
1793 {
1794 edge e;
1795 edge_iterator ei;
1796
1797 FOR_EACH_EDGE (e, ei, bb->succs)
1798 if (!e->dest->aux
1799 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1800 {
1801 gimple phi;
1802 gimple_stmt_iterator si;
1803
1804 if (!nonlocal_goto)
1805 gcc_assert (e->flags & EDGE_EH);
1806
1807 if (!can_throw)
1808 gcc_assert (!(e->flags & EDGE_EH));
1809
1810 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1811 {
1812 edge re;
1813
1814 phi = gsi_stmt (si);
1815
1816 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1817 gcc_assert (!e->dest->aux);
1818
1819 gcc_assert ((e->flags & EDGE_EH)
1820 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1821
1822 if (!is_gimple_reg (PHI_RESULT (phi)))
1823 {
1824 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1825 continue;
1826 }
1827
1828 re = find_edge (ret_bb, e->dest);
1829 gcc_assert (re);
1830 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1831 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1832
1833 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1834 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1835 }
1836 }
1837 }
1838
1839
1840 /* Copy edges from BB into its copy constructed earlier, scale profile
1841 accordingly. Edges will be taken care of later. Assume aux
1842 pointers to point to the copies of each BB. Return true if any
1843 debug stmts are left after a statement that must end the basic block. */
1844
1845 static bool
1846 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1847 {
1848 basic_block new_bb = (basic_block) bb->aux;
1849 edge_iterator ei;
1850 edge old_edge;
1851 gimple_stmt_iterator si;
1852 int flags;
1853 bool need_debug_cleanup = false;
1854
1855 /* Use the indices from the original blocks to create edges for the
1856 new ones. */
1857 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1858 if (!(old_edge->flags & EDGE_EH))
1859 {
1860 edge new_edge;
1861
1862 flags = old_edge->flags;
1863
1864 /* Return edges do get a FALLTHRU flag when the get inlined. */
1865 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1866 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1867 flags |= EDGE_FALLTHRU;
1868 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1869 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1870 new_edge->probability = old_edge->probability;
1871 }
1872
1873 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1874 return false;
1875
1876 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1877 {
1878 gimple copy_stmt;
1879 bool can_throw, nonlocal_goto;
1880
1881 copy_stmt = gsi_stmt (si);
1882 if (!is_gimple_debug (copy_stmt))
1883 {
1884 update_stmt (copy_stmt);
1885 if (gimple_in_ssa_p (cfun))
1886 mark_symbols_for_renaming (copy_stmt);
1887 }
1888
1889 /* Do this before the possible split_block. */
1890 gsi_next (&si);
1891
1892 /* If this tree could throw an exception, there are two
1893 cases where we need to add abnormal edge(s): the
1894 tree wasn't in a region and there is a "current
1895 region" in the caller; or the original tree had
1896 EH edges. In both cases split the block after the tree,
1897 and add abnormal edge(s) as needed; we need both
1898 those from the callee and the caller.
1899 We check whether the copy can throw, because the const
1900 propagation can change an INDIRECT_REF which throws
1901 into a COMPONENT_REF which doesn't. If the copy
1902 can throw, the original could also throw. */
1903 can_throw = stmt_can_throw_internal (copy_stmt);
1904 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1905
1906 if (can_throw || nonlocal_goto)
1907 {
1908 if (!gsi_end_p (si))
1909 {
1910 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1911 gsi_next (&si);
1912 if (gsi_end_p (si))
1913 need_debug_cleanup = true;
1914 }
1915 if (!gsi_end_p (si))
1916 /* Note that bb's predecessor edges aren't necessarily
1917 right at this point; split_block doesn't care. */
1918 {
1919 edge e = split_block (new_bb, copy_stmt);
1920
1921 new_bb = e->dest;
1922 new_bb->aux = e->src->aux;
1923 si = gsi_start_bb (new_bb);
1924 }
1925 }
1926
1927 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1928 make_eh_dispatch_edges (copy_stmt);
1929 else if (can_throw)
1930 make_eh_edges (copy_stmt);
1931
1932 if (nonlocal_goto)
1933 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1934
1935 if ((can_throw || nonlocal_goto)
1936 && gimple_in_ssa_p (cfun))
1937 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1938 can_throw, nonlocal_goto);
1939 }
1940 return need_debug_cleanup;
1941 }
1942
1943 /* Copy the PHIs. All blocks and edges are copied, some blocks
1944 was possibly split and new outgoing EH edges inserted.
1945 BB points to the block of original function and AUX pointers links
1946 the original and newly copied blocks. */
1947
1948 static void
1949 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1950 {
1951 basic_block const new_bb = (basic_block) bb->aux;
1952 edge_iterator ei;
1953 gimple phi;
1954 gimple_stmt_iterator si;
1955
1956 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1957 {
1958 tree res, new_res;
1959 gimple new_phi;
1960 edge new_edge;
1961
1962 phi = gsi_stmt (si);
1963 res = PHI_RESULT (phi);
1964 new_res = res;
1965 if (is_gimple_reg (res))
1966 {
1967 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1968 SSA_NAME_DEF_STMT (new_res)
1969 = new_phi = create_phi_node (new_res, new_bb);
1970 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1971 {
1972 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
1973 tree arg;
1974 tree new_arg;
1975 tree block = id->block;
1976 edge_iterator ei2;
1977
1978 /* When doing partial clonning, we allow PHIs on the entry block
1979 as long as all the arguments are the same. Find any input
1980 edge to see argument to copy. */
1981 if (!old_edge)
1982 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
1983 if (!old_edge->src->aux)
1984 break;
1985
1986 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1987 new_arg = arg;
1988 id->block = NULL_TREE;
1989 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1990 id->block = block;
1991 gcc_assert (new_arg);
1992 /* With return slot optimization we can end up with
1993 non-gimple (foo *)&this->m, fix that here. */
1994 if (TREE_CODE (new_arg) != SSA_NAME
1995 && TREE_CODE (new_arg) != FUNCTION_DECL
1996 && !is_gimple_val (new_arg))
1997 {
1998 gimple_seq stmts = NULL;
1999 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2000 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
2001 }
2002 add_phi_arg (new_phi, new_arg, new_edge,
2003 gimple_phi_arg_location_from_edge (phi, old_edge));
2004 }
2005 }
2006 }
2007 }
2008
2009
2010 /* Wrapper for remap_decl so it can be used as a callback. */
2011
2012 static tree
2013 remap_decl_1 (tree decl, void *data)
2014 {
2015 return remap_decl (decl, (copy_body_data *) data);
2016 }
2017
2018 /* Build struct function and associated datastructures for the new clone
2019 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2020
2021 static void
2022 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2023 {
2024 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2025 gcov_type count_scale;
2026
2027 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2028 count_scale = (REG_BR_PROB_BASE * count
2029 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2030 else
2031 count_scale = REG_BR_PROB_BASE;
2032
2033 /* Register specific tree functions. */
2034 gimple_register_cfg_hooks ();
2035
2036 /* Get clean struct function. */
2037 push_struct_function (new_fndecl);
2038
2039 /* We will rebuild these, so just sanity check that they are empty. */
2040 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2041 gcc_assert (cfun->local_decls == NULL);
2042 gcc_assert (cfun->cfg == NULL);
2043 gcc_assert (cfun->decl == new_fndecl);
2044
2045 /* Copy items we preserve during clonning. */
2046 cfun->static_chain_decl = src_cfun->static_chain_decl;
2047 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2048 cfun->function_end_locus = src_cfun->function_end_locus;
2049 cfun->curr_properties = src_cfun->curr_properties;
2050 cfun->last_verified = src_cfun->last_verified;
2051 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2052 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2053 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2054 cfun->stdarg = src_cfun->stdarg;
2055 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2056 cfun->after_inlining = src_cfun->after_inlining;
2057 cfun->can_throw_non_call_exceptions
2058 = src_cfun->can_throw_non_call_exceptions;
2059 cfun->returns_struct = src_cfun->returns_struct;
2060 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2061 cfun->after_tree_profile = src_cfun->after_tree_profile;
2062
2063 init_empty_tree_cfg ();
2064
2065 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2066 ENTRY_BLOCK_PTR->count =
2067 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2068 REG_BR_PROB_BASE);
2069 ENTRY_BLOCK_PTR->frequency
2070 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2071 EXIT_BLOCK_PTR->count =
2072 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2073 REG_BR_PROB_BASE);
2074 EXIT_BLOCK_PTR->frequency =
2075 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2076 if (src_cfun->eh)
2077 init_eh_for_function ();
2078
2079 if (src_cfun->gimple_df)
2080 {
2081 init_tree_ssa (cfun);
2082 cfun->gimple_df->in_ssa_p = true;
2083 init_ssa_operands ();
2084 }
2085 pop_cfun ();
2086 }
2087
2088 /* Helper function for copy_cfg_body. Move debug stmts from the end
2089 of NEW_BB to the beginning of successor basic blocks when needed. If the
2090 successor has multiple predecessors, reset them, otherwise keep
2091 their value. */
2092
2093 static void
2094 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2095 {
2096 edge e;
2097 edge_iterator ei;
2098 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2099
2100 if (gsi_end_p (si)
2101 || gsi_one_before_end_p (si)
2102 || !(stmt_can_throw_internal (gsi_stmt (si))
2103 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2104 return;
2105
2106 FOR_EACH_EDGE (e, ei, new_bb->succs)
2107 {
2108 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2109 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2110 while (is_gimple_debug (gsi_stmt (ssi)))
2111 {
2112 gimple stmt = gsi_stmt (ssi), new_stmt;
2113 tree var;
2114 tree value;
2115
2116 /* For the last edge move the debug stmts instead of copying
2117 them. */
2118 if (ei_one_before_end_p (ei))
2119 {
2120 si = ssi;
2121 gsi_prev (&ssi);
2122 if (!single_pred_p (e->dest))
2123 gimple_debug_bind_reset_value (stmt);
2124 gsi_remove (&si, false);
2125 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2126 continue;
2127 }
2128
2129 var = gimple_debug_bind_get_var (stmt);
2130 if (single_pred_p (e->dest))
2131 {
2132 value = gimple_debug_bind_get_value (stmt);
2133 value = unshare_expr (value);
2134 }
2135 else
2136 value = NULL_TREE;
2137 new_stmt = gimple_build_debug_bind (var, value, stmt);
2138 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2139 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2140 gsi_prev (&ssi);
2141 }
2142 }
2143 }
2144
2145 /* Make a copy of the body of FN so that it can be inserted inline in
2146 another function. Walks FN via CFG, returns new fndecl. */
2147
2148 static tree
2149 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2150 basic_block entry_block_map, basic_block exit_block_map,
2151 bitmap blocks_to_copy, basic_block new_entry)
2152 {
2153 tree callee_fndecl = id->src_fn;
2154 /* Original cfun for the callee, doesn't change. */
2155 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2156 struct function *cfun_to_copy;
2157 basic_block bb;
2158 tree new_fndecl = NULL;
2159 bool need_debug_cleanup = false;
2160 gcov_type count_scale;
2161 int last;
2162
2163 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2164 count_scale = (REG_BR_PROB_BASE * count
2165 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2166 else
2167 count_scale = REG_BR_PROB_BASE;
2168
2169 /* Register specific tree functions. */
2170 gimple_register_cfg_hooks ();
2171
2172 /* Must have a CFG here at this point. */
2173 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2174 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2175
2176 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2177
2178 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2179 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2180 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2181 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2182
2183 /* Duplicate any exception-handling regions. */
2184 if (cfun->eh)
2185 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2186 remap_decl_1, id);
2187
2188 /* Use aux pointers to map the original blocks to copy. */
2189 FOR_EACH_BB_FN (bb, cfun_to_copy)
2190 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2191 {
2192 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2193 bb->aux = new_bb;
2194 new_bb->aux = bb;
2195 }
2196
2197 last = last_basic_block;
2198
2199 /* Now that we've duplicated the blocks, duplicate their edges. */
2200 FOR_ALL_BB_FN (bb, cfun_to_copy)
2201 if (!blocks_to_copy
2202 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2203 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2204
2205 if (new_entry)
2206 {
2207 edge e;
2208 e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2209 e->probability = REG_BR_PROB_BASE;
2210 e->count = entry_block_map->count;
2211 }
2212
2213 if (gimple_in_ssa_p (cfun))
2214 FOR_ALL_BB_FN (bb, cfun_to_copy)
2215 if (!blocks_to_copy
2216 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2217 copy_phis_for_bb (bb, id);
2218
2219 FOR_ALL_BB_FN (bb, cfun_to_copy)
2220 if (bb->aux)
2221 {
2222 if (need_debug_cleanup
2223 && bb->index != ENTRY_BLOCK
2224 && bb->index != EXIT_BLOCK)
2225 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2226 ((basic_block)bb->aux)->aux = NULL;
2227 bb->aux = NULL;
2228 }
2229
2230 /* Zero out AUX fields of newly created block during EH edge
2231 insertion. */
2232 for (; last < last_basic_block; last++)
2233 {
2234 if (need_debug_cleanup)
2235 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2236 BASIC_BLOCK (last)->aux = NULL;
2237 }
2238 entry_block_map->aux = NULL;
2239 exit_block_map->aux = NULL;
2240
2241 if (id->eh_map)
2242 {
2243 pointer_map_destroy (id->eh_map);
2244 id->eh_map = NULL;
2245 }
2246
2247 return new_fndecl;
2248 }
2249
2250 /* Copy the debug STMT using ID. We deal with these statements in a
2251 special way: if any variable in their VALUE expression wasn't
2252 remapped yet, we won't remap it, because that would get decl uids
2253 out of sync, causing codegen differences between -g and -g0. If
2254 this arises, we drop the VALUE expression altogether. */
2255
2256 static void
2257 copy_debug_stmt (gimple stmt, copy_body_data *id)
2258 {
2259 tree t, *n;
2260 struct walk_stmt_info wi;
2261
2262 t = id->block;
2263 if (gimple_block (stmt))
2264 {
2265 tree *n;
2266 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2267 if (n)
2268 t = *n;
2269 }
2270 gimple_set_block (stmt, t);
2271
2272 /* Remap all the operands in COPY. */
2273 memset (&wi, 0, sizeof (wi));
2274 wi.info = id;
2275
2276 processing_debug_stmt = 1;
2277
2278 t = gimple_debug_bind_get_var (stmt);
2279
2280 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2281 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2282 {
2283 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2284 t = *n;
2285 }
2286 else if (TREE_CODE (t) == VAR_DECL
2287 && !TREE_STATIC (t)
2288 && gimple_in_ssa_p (cfun)
2289 && !pointer_map_contains (id->decl_map, t)
2290 && !var_ann (t))
2291 /* T is a non-localized variable. */;
2292 else
2293 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2294
2295 gimple_debug_bind_set_var (stmt, t);
2296
2297 if (gimple_debug_bind_has_value_p (stmt))
2298 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2299 remap_gimple_op_r, &wi, NULL);
2300
2301 /* Punt if any decl couldn't be remapped. */
2302 if (processing_debug_stmt < 0)
2303 gimple_debug_bind_reset_value (stmt);
2304
2305 processing_debug_stmt = 0;
2306
2307 update_stmt (stmt);
2308 if (gimple_in_ssa_p (cfun))
2309 mark_symbols_for_renaming (stmt);
2310 }
2311
2312 /* Process deferred debug stmts. In order to give values better odds
2313 of being successfully remapped, we delay the processing of debug
2314 stmts until all other stmts that might require remapping are
2315 processed. */
2316
2317 static void
2318 copy_debug_stmts (copy_body_data *id)
2319 {
2320 size_t i;
2321 gimple stmt;
2322
2323 if (!id->debug_stmts)
2324 return;
2325
2326 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2327 copy_debug_stmt (stmt, id);
2328
2329 VEC_free (gimple, heap, id->debug_stmts);
2330 }
2331
2332 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2333 another function. */
2334
2335 static tree
2336 copy_tree_body (copy_body_data *id)
2337 {
2338 tree fndecl = id->src_fn;
2339 tree body = DECL_SAVED_TREE (fndecl);
2340
2341 walk_tree (&body, copy_tree_body_r, id, NULL);
2342
2343 return body;
2344 }
2345
2346 /* Make a copy of the body of FN so that it can be inserted inline in
2347 another function. */
2348
2349 static tree
2350 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2351 basic_block entry_block_map, basic_block exit_block_map,
2352 bitmap blocks_to_copy, basic_block new_entry)
2353 {
2354 tree fndecl = id->src_fn;
2355 tree body;
2356
2357 /* If this body has a CFG, walk CFG and copy. */
2358 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2359 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2360 blocks_to_copy, new_entry);
2361 copy_debug_stmts (id);
2362
2363 return body;
2364 }
2365
2366 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2367 defined in function FN, or of a data member thereof. */
2368
2369 static bool
2370 self_inlining_addr_expr (tree value, tree fn)
2371 {
2372 tree var;
2373
2374 if (TREE_CODE (value) != ADDR_EXPR)
2375 return false;
2376
2377 var = get_base_address (TREE_OPERAND (value, 0));
2378
2379 return var && auto_var_in_fn_p (var, fn);
2380 }
2381
2382 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2383 lexical block and line number information from base_stmt, if given,
2384 or from the last stmt of the block otherwise. */
2385
2386 static gimple
2387 insert_init_debug_bind (copy_body_data *id,
2388 basic_block bb, tree var, tree value,
2389 gimple base_stmt)
2390 {
2391 gimple note;
2392 gimple_stmt_iterator gsi;
2393 tree tracked_var;
2394
2395 if (!gimple_in_ssa_p (id->src_cfun))
2396 return NULL;
2397
2398 if (!MAY_HAVE_DEBUG_STMTS)
2399 return NULL;
2400
2401 tracked_var = target_for_debug_bind (var);
2402 if (!tracked_var)
2403 return NULL;
2404
2405 if (bb)
2406 {
2407 gsi = gsi_last_bb (bb);
2408 if (!base_stmt && !gsi_end_p (gsi))
2409 base_stmt = gsi_stmt (gsi);
2410 }
2411
2412 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2413
2414 if (bb)
2415 {
2416 if (!gsi_end_p (gsi))
2417 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2418 else
2419 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2420 }
2421
2422 return note;
2423 }
2424
2425 static void
2426 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2427 {
2428 /* If VAR represents a zero-sized variable, it's possible that the
2429 assignment statement may result in no gimple statements. */
2430 if (init_stmt)
2431 {
2432 gimple_stmt_iterator si = gsi_last_bb (bb);
2433
2434 /* We can end up with init statements that store to a non-register
2435 from a rhs with a conversion. Handle that here by forcing the
2436 rhs into a temporary. gimple_regimplify_operands is not
2437 prepared to do this for us. */
2438 if (!is_gimple_debug (init_stmt)
2439 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2440 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2441 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2442 {
2443 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2444 gimple_expr_type (init_stmt),
2445 gimple_assign_rhs1 (init_stmt));
2446 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2447 GSI_NEW_STMT);
2448 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2449 gimple_assign_set_rhs1 (init_stmt, rhs);
2450 }
2451 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2452 gimple_regimplify_operands (init_stmt, &si);
2453 mark_symbols_for_renaming (init_stmt);
2454
2455 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2456 {
2457 tree var, def = gimple_assign_lhs (init_stmt);
2458
2459 if (TREE_CODE (def) == SSA_NAME)
2460 var = SSA_NAME_VAR (def);
2461 else
2462 var = def;
2463
2464 insert_init_debug_bind (id, bb, var, def, init_stmt);
2465 }
2466 }
2467 }
2468
2469 /* Initialize parameter P with VALUE. If needed, produce init statement
2470 at the end of BB. When BB is NULL, we return init statement to be
2471 output later. */
2472 static gimple
2473 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2474 basic_block bb, tree *vars)
2475 {
2476 gimple init_stmt = NULL;
2477 tree var;
2478 tree rhs = value;
2479 tree def = (gimple_in_ssa_p (cfun)
2480 ? gimple_default_def (id->src_cfun, p) : NULL);
2481
2482 if (value
2483 && value != error_mark_node
2484 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2485 {
2486 if (fold_convertible_p (TREE_TYPE (p), value))
2487 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2488 else
2489 /* ??? For valid (GIMPLE) programs we should not end up here.
2490 Still if something has gone wrong and we end up with truly
2491 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2492 to not leak invalid GIMPLE to the following passes. */
2493 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2494 }
2495
2496 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2497 here since the type of this decl must be visible to the calling
2498 function. */
2499 var = copy_decl_to_var (p, id);
2500
2501 /* We're actually using the newly-created var. */
2502 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2503 {
2504 get_var_ann (var);
2505 add_referenced_var (var);
2506 }
2507
2508 /* Declare this new variable. */
2509 TREE_CHAIN (var) = *vars;
2510 *vars = var;
2511
2512 /* Make gimplifier happy about this variable. */
2513 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2514
2515 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2516 we would not need to create a new variable here at all, if it
2517 weren't for debug info. Still, we can just use the argument
2518 value. */
2519 if (TREE_READONLY (p)
2520 && !TREE_ADDRESSABLE (p)
2521 && value && !TREE_SIDE_EFFECTS (value)
2522 && !def)
2523 {
2524 /* We may produce non-gimple trees by adding NOPs or introduce
2525 invalid sharing when operand is not really constant.
2526 It is not big deal to prohibit constant propagation here as
2527 we will constant propagate in DOM1 pass anyway. */
2528 if (is_gimple_min_invariant (value)
2529 && useless_type_conversion_p (TREE_TYPE (p),
2530 TREE_TYPE (value))
2531 /* We have to be very careful about ADDR_EXPR. Make sure
2532 the base variable isn't a local variable of the inlined
2533 function, e.g., when doing recursive inlining, direct or
2534 mutually-recursive or whatever, which is why we don't
2535 just test whether fn == current_function_decl. */
2536 && ! self_inlining_addr_expr (value, fn))
2537 {
2538 insert_decl_map (id, p, value);
2539 insert_debug_decl_map (id, p, var);
2540 return insert_init_debug_bind (id, bb, var, value, NULL);
2541 }
2542 }
2543
2544 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2545 that way, when the PARM_DECL is encountered, it will be
2546 automatically replaced by the VAR_DECL. */
2547 insert_decl_map (id, p, var);
2548
2549 /* Even if P was TREE_READONLY, the new VAR should not be.
2550 In the original code, we would have constructed a
2551 temporary, and then the function body would have never
2552 changed the value of P. However, now, we will be
2553 constructing VAR directly. The constructor body may
2554 change its value multiple times as it is being
2555 constructed. Therefore, it must not be TREE_READONLY;
2556 the back-end assumes that TREE_READONLY variable is
2557 assigned to only once. */
2558 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2559 TREE_READONLY (var) = 0;
2560
2561 /* If there is no setup required and we are in SSA, take the easy route
2562 replacing all SSA names representing the function parameter by the
2563 SSA name passed to function.
2564
2565 We need to construct map for the variable anyway as it might be used
2566 in different SSA names when parameter is set in function.
2567
2568 Do replacement at -O0 for const arguments replaced by constant.
2569 This is important for builtin_constant_p and other construct requiring
2570 constant argument to be visible in inlined function body. */
2571 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2572 && (optimize
2573 || (TREE_READONLY (p)
2574 && is_gimple_min_invariant (rhs)))
2575 && (TREE_CODE (rhs) == SSA_NAME
2576 || is_gimple_min_invariant (rhs))
2577 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2578 {
2579 insert_decl_map (id, def, rhs);
2580 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2581 }
2582
2583 /* If the value of argument is never used, don't care about initializing
2584 it. */
2585 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2586 {
2587 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2588 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2589 }
2590
2591 /* Initialize this VAR_DECL from the equivalent argument. Convert
2592 the argument to the proper type in case it was promoted. */
2593 if (value)
2594 {
2595 if (rhs == error_mark_node)
2596 {
2597 insert_decl_map (id, p, var);
2598 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2599 }
2600
2601 STRIP_USELESS_TYPE_CONVERSION (rhs);
2602
2603 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2604 keep our trees in gimple form. */
2605 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2606 {
2607 def = remap_ssa_name (def, id);
2608 init_stmt = gimple_build_assign (def, rhs);
2609 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2610 set_default_def (var, NULL);
2611 }
2612 else
2613 init_stmt = gimple_build_assign (var, rhs);
2614
2615 if (bb && init_stmt)
2616 insert_init_stmt (id, bb, init_stmt);
2617 }
2618 return init_stmt;
2619 }
2620
2621 /* Generate code to initialize the parameters of the function at the
2622 top of the stack in ID from the GIMPLE_CALL STMT. */
2623
2624 static void
2625 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2626 tree fn, basic_block bb)
2627 {
2628 tree parms;
2629 size_t i;
2630 tree p;
2631 tree vars = NULL_TREE;
2632 tree static_chain = gimple_call_chain (stmt);
2633
2634 /* Figure out what the parameters are. */
2635 parms = DECL_ARGUMENTS (fn);
2636
2637 /* Loop through the parameter declarations, replacing each with an
2638 equivalent VAR_DECL, appropriately initialized. */
2639 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2640 {
2641 tree val;
2642 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2643 setup_one_parameter (id, p, val, fn, bb, &vars);
2644 }
2645 /* After remapping parameters remap their types. This has to be done
2646 in a second loop over all parameters to appropriately remap
2647 variable sized arrays when the size is specified in a
2648 parameter following the array. */
2649 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2650 {
2651 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2652 if (varp
2653 && TREE_CODE (*varp) == VAR_DECL)
2654 {
2655 tree def = (gimple_in_ssa_p (cfun)
2656 ? gimple_default_def (id->src_cfun, p) : NULL);
2657 TREE_TYPE (*varp) = remap_type (TREE_TYPE (*varp), id);
2658 /* Also remap the default definition if it was remapped
2659 to the default definition of the parameter replacement
2660 by the parameter setup. */
2661 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2662 {
2663 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2664 if (defp
2665 && TREE_CODE (*defp) == SSA_NAME
2666 && SSA_NAME_VAR (*defp) == *varp)
2667 TREE_TYPE (*defp) = TREE_TYPE (*varp);
2668 }
2669 }
2670 }
2671
2672 /* Initialize the static chain. */
2673 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2674 gcc_assert (fn != current_function_decl);
2675 if (p)
2676 {
2677 /* No static chain? Seems like a bug in tree-nested.c. */
2678 gcc_assert (static_chain);
2679
2680 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2681 }
2682
2683 declare_inline_vars (id->block, vars);
2684 }
2685
2686
2687 /* Declare a return variable to replace the RESULT_DECL for the
2688 function we are calling. An appropriate DECL_STMT is returned.
2689 The USE_STMT is filled to contain a use of the declaration to
2690 indicate the return value of the function.
2691
2692 RETURN_SLOT, if non-null is place where to store the result. It
2693 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2694 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2695
2696 The return value is a (possibly null) value that holds the result
2697 as seen by the caller. */
2698
2699 static tree
2700 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2701 {
2702 tree callee = id->src_fn;
2703 tree caller = id->dst_fn;
2704 tree result = DECL_RESULT (callee);
2705 tree callee_type = TREE_TYPE (result);
2706 tree caller_type;
2707 tree var, use;
2708
2709 /* Handle type-mismatches in the function declaration return type
2710 vs. the call expression. */
2711 if (modify_dest)
2712 caller_type = TREE_TYPE (modify_dest);
2713 else
2714 caller_type = TREE_TYPE (TREE_TYPE (callee));
2715
2716 /* We don't need to do anything for functions that don't return
2717 anything. */
2718 if (!result || VOID_TYPE_P (callee_type))
2719 return NULL_TREE;
2720
2721 /* If there was a return slot, then the return value is the
2722 dereferenced address of that object. */
2723 if (return_slot)
2724 {
2725 /* The front end shouldn't have used both return_slot and
2726 a modify expression. */
2727 gcc_assert (!modify_dest);
2728 if (DECL_BY_REFERENCE (result))
2729 {
2730 tree return_slot_addr = build_fold_addr_expr (return_slot);
2731 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2732
2733 /* We are going to construct *&return_slot and we can't do that
2734 for variables believed to be not addressable.
2735
2736 FIXME: This check possibly can match, because values returned
2737 via return slot optimization are not believed to have address
2738 taken by alias analysis. */
2739 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2740 var = return_slot_addr;
2741 }
2742 else
2743 {
2744 var = return_slot;
2745 gcc_assert (TREE_CODE (var) != SSA_NAME);
2746 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2747 }
2748 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2749 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2750 && !DECL_GIMPLE_REG_P (result)
2751 && DECL_P (var))
2752 DECL_GIMPLE_REG_P (var) = 0;
2753 use = NULL;
2754 goto done;
2755 }
2756
2757 /* All types requiring non-trivial constructors should have been handled. */
2758 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2759
2760 /* Attempt to avoid creating a new temporary variable. */
2761 if (modify_dest
2762 && TREE_CODE (modify_dest) != SSA_NAME)
2763 {
2764 bool use_it = false;
2765
2766 /* We can't use MODIFY_DEST if there's type promotion involved. */
2767 if (!useless_type_conversion_p (callee_type, caller_type))
2768 use_it = false;
2769
2770 /* ??? If we're assigning to a variable sized type, then we must
2771 reuse the destination variable, because we've no good way to
2772 create variable sized temporaries at this point. */
2773 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2774 use_it = true;
2775
2776 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2777 reuse it as the result of the call directly. Don't do this if
2778 it would promote MODIFY_DEST to addressable. */
2779 else if (TREE_ADDRESSABLE (result))
2780 use_it = false;
2781 else
2782 {
2783 tree base_m = get_base_address (modify_dest);
2784
2785 /* If the base isn't a decl, then it's a pointer, and we don't
2786 know where that's going to go. */
2787 if (!DECL_P (base_m))
2788 use_it = false;
2789 else if (is_global_var (base_m))
2790 use_it = false;
2791 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2792 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2793 && !DECL_GIMPLE_REG_P (result)
2794 && DECL_GIMPLE_REG_P (base_m))
2795 use_it = false;
2796 else if (!TREE_ADDRESSABLE (base_m))
2797 use_it = true;
2798 }
2799
2800 if (use_it)
2801 {
2802 var = modify_dest;
2803 use = NULL;
2804 goto done;
2805 }
2806 }
2807
2808 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2809
2810 var = copy_result_decl_to_var (result, id);
2811 if (gimple_in_ssa_p (cfun))
2812 {
2813 get_var_ann (var);
2814 add_referenced_var (var);
2815 }
2816
2817 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2818 DECL_STRUCT_FUNCTION (caller)->local_decls
2819 = tree_cons (NULL_TREE, var,
2820 DECL_STRUCT_FUNCTION (caller)->local_decls);
2821
2822 /* Do not have the rest of GCC warn about this variable as it should
2823 not be visible to the user. */
2824 TREE_NO_WARNING (var) = 1;
2825
2826 declare_inline_vars (id->block, var);
2827
2828 /* Build the use expr. If the return type of the function was
2829 promoted, convert it back to the expected type. */
2830 use = var;
2831 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2832 use = fold_convert (caller_type, var);
2833
2834 STRIP_USELESS_TYPE_CONVERSION (use);
2835
2836 if (DECL_BY_REFERENCE (result))
2837 {
2838 TREE_ADDRESSABLE (var) = 1;
2839 var = build_fold_addr_expr (var);
2840 }
2841
2842 done:
2843 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2844 way, when the RESULT_DECL is encountered, it will be
2845 automatically replaced by the VAR_DECL. */
2846 insert_decl_map (id, result, var);
2847
2848 /* Remember this so we can ignore it in remap_decls. */
2849 id->retvar = var;
2850
2851 return use;
2852 }
2853
2854 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2855 to a local label. */
2856
2857 static tree
2858 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2859 {
2860 tree node = *nodep;
2861 tree fn = (tree) fnp;
2862
2863 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2864 return node;
2865
2866 if (TYPE_P (node))
2867 *walk_subtrees = 0;
2868
2869 return NULL_TREE;
2870 }
2871
2872 /* Determine if the function can be copied. If so return NULL. If
2873 not return a string describng the reason for failure. */
2874
2875 static const char *
2876 copy_forbidden (struct function *fun, tree fndecl)
2877 {
2878 const char *reason = fun->cannot_be_copied_reason;
2879 tree step;
2880
2881 /* Only examine the function once. */
2882 if (fun->cannot_be_copied_set)
2883 return reason;
2884
2885 /* We cannot copy a function that receives a non-local goto
2886 because we cannot remap the destination label used in the
2887 function that is performing the non-local goto. */
2888 /* ??? Actually, this should be possible, if we work at it.
2889 No doubt there's just a handful of places that simply
2890 assume it doesn't happen and don't substitute properly. */
2891 if (fun->has_nonlocal_label)
2892 {
2893 reason = G_("function %q+F can never be copied "
2894 "because it receives a non-local goto");
2895 goto fail;
2896 }
2897
2898 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2899 {
2900 tree decl = TREE_VALUE (step);
2901
2902 if (TREE_CODE (decl) == VAR_DECL
2903 && TREE_STATIC (decl)
2904 && !DECL_EXTERNAL (decl)
2905 && DECL_INITIAL (decl)
2906 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2907 has_label_address_in_static_1,
2908 fndecl))
2909 {
2910 reason = G_("function %q+F can never be copied because it saves "
2911 "address of local label in a static variable");
2912 goto fail;
2913 }
2914 }
2915
2916 fail:
2917 fun->cannot_be_copied_reason = reason;
2918 fun->cannot_be_copied_set = true;
2919 return reason;
2920 }
2921
2922
2923 static const char *inline_forbidden_reason;
2924
2925 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2926 iff a function can not be inlined. Also sets the reason why. */
2927
2928 static tree
2929 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2930 struct walk_stmt_info *wip)
2931 {
2932 tree fn = (tree) wip->info;
2933 tree t;
2934 gimple stmt = gsi_stmt (*gsi);
2935
2936 switch (gimple_code (stmt))
2937 {
2938 case GIMPLE_CALL:
2939 /* Refuse to inline alloca call unless user explicitly forced so as
2940 this may change program's memory overhead drastically when the
2941 function using alloca is called in loop. In GCC present in
2942 SPEC2000 inlining into schedule_block cause it to require 2GB of
2943 RAM instead of 256MB. */
2944 if (gimple_alloca_call_p (stmt)
2945 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2946 {
2947 inline_forbidden_reason
2948 = G_("function %q+F can never be inlined because it uses "
2949 "alloca (override using the always_inline attribute)");
2950 *handled_ops_p = true;
2951 return fn;
2952 }
2953
2954 t = gimple_call_fndecl (stmt);
2955 if (t == NULL_TREE)
2956 break;
2957
2958 /* We cannot inline functions that call setjmp. */
2959 if (setjmp_call_p (t))
2960 {
2961 inline_forbidden_reason
2962 = G_("function %q+F can never be inlined because it uses setjmp");
2963 *handled_ops_p = true;
2964 return t;
2965 }
2966
2967 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2968 switch (DECL_FUNCTION_CODE (t))
2969 {
2970 /* We cannot inline functions that take a variable number of
2971 arguments. */
2972 case BUILT_IN_VA_START:
2973 case BUILT_IN_NEXT_ARG:
2974 case BUILT_IN_VA_END:
2975 inline_forbidden_reason
2976 = G_("function %q+F can never be inlined because it "
2977 "uses variable argument lists");
2978 *handled_ops_p = true;
2979 return t;
2980
2981 case BUILT_IN_LONGJMP:
2982 /* We can't inline functions that call __builtin_longjmp at
2983 all. The non-local goto machinery really requires the
2984 destination be in a different function. If we allow the
2985 function calling __builtin_longjmp to be inlined into the
2986 function calling __builtin_setjmp, Things will Go Awry. */
2987 inline_forbidden_reason
2988 = G_("function %q+F can never be inlined because "
2989 "it uses setjmp-longjmp exception handling");
2990 *handled_ops_p = true;
2991 return t;
2992
2993 case BUILT_IN_NONLOCAL_GOTO:
2994 /* Similarly. */
2995 inline_forbidden_reason
2996 = G_("function %q+F can never be inlined because "
2997 "it uses non-local goto");
2998 *handled_ops_p = true;
2999 return t;
3000
3001 case BUILT_IN_RETURN:
3002 case BUILT_IN_APPLY_ARGS:
3003 /* If a __builtin_apply_args caller would be inlined,
3004 it would be saving arguments of the function it has
3005 been inlined into. Similarly __builtin_return would
3006 return from the function the inline has been inlined into. */
3007 inline_forbidden_reason
3008 = G_("function %q+F can never be inlined because "
3009 "it uses __builtin_return or __builtin_apply_args");
3010 *handled_ops_p = true;
3011 return t;
3012
3013 default:
3014 break;
3015 }
3016 break;
3017
3018 case GIMPLE_GOTO:
3019 t = gimple_goto_dest (stmt);
3020
3021 /* We will not inline a function which uses computed goto. The
3022 addresses of its local labels, which may be tucked into
3023 global storage, are of course not constant across
3024 instantiations, which causes unexpected behavior. */
3025 if (TREE_CODE (t) != LABEL_DECL)
3026 {
3027 inline_forbidden_reason
3028 = G_("function %q+F can never be inlined "
3029 "because it contains a computed goto");
3030 *handled_ops_p = true;
3031 return t;
3032 }
3033 break;
3034
3035 default:
3036 break;
3037 }
3038
3039 *handled_ops_p = false;
3040 return NULL_TREE;
3041 }
3042
3043 /* Return true if FNDECL is a function that cannot be inlined into
3044 another one. */
3045
3046 static bool
3047 inline_forbidden_p (tree fndecl)
3048 {
3049 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3050 struct walk_stmt_info wi;
3051 struct pointer_set_t *visited_nodes;
3052 basic_block bb;
3053 bool forbidden_p = false;
3054
3055 /* First check for shared reasons not to copy the code. */
3056 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3057 if (inline_forbidden_reason != NULL)
3058 return true;
3059
3060 /* Next, walk the statements of the function looking for
3061 constraucts we can't handle, or are non-optimal for inlining. */
3062 visited_nodes = pointer_set_create ();
3063 memset (&wi, 0, sizeof (wi));
3064 wi.info = (void *) fndecl;
3065 wi.pset = visited_nodes;
3066
3067 FOR_EACH_BB_FN (bb, fun)
3068 {
3069 gimple ret;
3070 gimple_seq seq = bb_seq (bb);
3071 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3072 forbidden_p = (ret != NULL);
3073 if (forbidden_p)
3074 break;
3075 }
3076
3077 pointer_set_destroy (visited_nodes);
3078 return forbidden_p;
3079 }
3080
3081 /* Return true if CALLEE cannot be inlined into CALLER. */
3082
3083 static bool
3084 inline_forbidden_into_p (tree caller, tree callee)
3085 {
3086 /* Don't inline if the functions have different EH personalities. */
3087 if (DECL_FUNCTION_PERSONALITY (caller)
3088 && DECL_FUNCTION_PERSONALITY (callee)
3089 && (DECL_FUNCTION_PERSONALITY (caller)
3090 != DECL_FUNCTION_PERSONALITY (callee)))
3091 return true;
3092
3093 /* Don't inline if the callee can throw non-call exceptions but the
3094 caller cannot. */
3095 if (DECL_STRUCT_FUNCTION (callee)
3096 && DECL_STRUCT_FUNCTION (callee)->can_throw_non_call_exceptions
3097 && !(DECL_STRUCT_FUNCTION (caller)
3098 && DECL_STRUCT_FUNCTION (caller)->can_throw_non_call_exceptions))
3099 return true;
3100
3101 return false;
3102 }
3103
3104 /* Returns nonzero if FN is a function that does not have any
3105 fundamental inline blocking properties. */
3106
3107 bool
3108 tree_inlinable_function_p (tree fn)
3109 {
3110 bool inlinable = true;
3111 bool do_warning;
3112 tree always_inline;
3113
3114 /* If we've already decided this function shouldn't be inlined,
3115 there's no need to check again. */
3116 if (DECL_UNINLINABLE (fn))
3117 return false;
3118
3119 /* We only warn for functions declared `inline' by the user. */
3120 do_warning = (warn_inline
3121 && DECL_DECLARED_INLINE_P (fn)
3122 && !DECL_NO_INLINE_WARNING_P (fn)
3123 && !DECL_IN_SYSTEM_HEADER (fn));
3124
3125 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3126
3127 if (flag_no_inline
3128 && always_inline == NULL)
3129 {
3130 if (do_warning)
3131 warning (OPT_Winline, "function %q+F can never be inlined because it "
3132 "is suppressed using -fno-inline", fn);
3133 inlinable = false;
3134 }
3135
3136 /* Don't auto-inline anything that might not be bound within
3137 this unit of translation. */
3138 else if (!DECL_DECLARED_INLINE_P (fn)
3139 && DECL_REPLACEABLE_P (fn))
3140 inlinable = false;
3141
3142 else if (!function_attribute_inlinable_p (fn))
3143 {
3144 if (do_warning)
3145 warning (OPT_Winline, "function %q+F can never be inlined because it "
3146 "uses attributes conflicting with inlining", fn);
3147 inlinable = false;
3148 }
3149
3150 else if (inline_forbidden_p (fn))
3151 {
3152 /* See if we should warn about uninlinable functions. Previously,
3153 some of these warnings would be issued while trying to expand
3154 the function inline, but that would cause multiple warnings
3155 about functions that would for example call alloca. But since
3156 this a property of the function, just one warning is enough.
3157 As a bonus we can now give more details about the reason why a
3158 function is not inlinable. */
3159 if (always_inline)
3160 sorry (inline_forbidden_reason, fn);
3161 else if (do_warning)
3162 warning (OPT_Winline, inline_forbidden_reason, fn);
3163
3164 inlinable = false;
3165 }
3166
3167 /* Squirrel away the result so that we don't have to check again. */
3168 DECL_UNINLINABLE (fn) = !inlinable;
3169
3170 return inlinable;
3171 }
3172
3173 /* Estimate the cost of a memory move. Use machine dependent
3174 word size and take possible memcpy call into account. */
3175
3176 int
3177 estimate_move_cost (tree type)
3178 {
3179 HOST_WIDE_INT size;
3180
3181 gcc_assert (!VOID_TYPE_P (type));
3182
3183 size = int_size_in_bytes (type);
3184
3185 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3186 /* Cost of a memcpy call, 3 arguments and the call. */
3187 return 4;
3188 else
3189 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3190 }
3191
3192 /* Returns cost of operation CODE, according to WEIGHTS */
3193
3194 static int
3195 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3196 tree op1 ATTRIBUTE_UNUSED, tree op2)
3197 {
3198 switch (code)
3199 {
3200 /* These are "free" conversions, or their presumed cost
3201 is folded into other operations. */
3202 case RANGE_EXPR:
3203 CASE_CONVERT:
3204 case COMPLEX_EXPR:
3205 case PAREN_EXPR:
3206 return 0;
3207
3208 /* Assign cost of 1 to usual operations.
3209 ??? We may consider mapping RTL costs to this. */
3210 case COND_EXPR:
3211 case VEC_COND_EXPR:
3212
3213 case PLUS_EXPR:
3214 case POINTER_PLUS_EXPR:
3215 case MINUS_EXPR:
3216 case MULT_EXPR:
3217
3218 case ADDR_SPACE_CONVERT_EXPR:
3219 case FIXED_CONVERT_EXPR:
3220 case FIX_TRUNC_EXPR:
3221
3222 case NEGATE_EXPR:
3223 case FLOAT_EXPR:
3224 case MIN_EXPR:
3225 case MAX_EXPR:
3226 case ABS_EXPR:
3227
3228 case LSHIFT_EXPR:
3229 case RSHIFT_EXPR:
3230 case LROTATE_EXPR:
3231 case RROTATE_EXPR:
3232 case VEC_LSHIFT_EXPR:
3233 case VEC_RSHIFT_EXPR:
3234
3235 case BIT_IOR_EXPR:
3236 case BIT_XOR_EXPR:
3237 case BIT_AND_EXPR:
3238 case BIT_NOT_EXPR:
3239
3240 case TRUTH_ANDIF_EXPR:
3241 case TRUTH_ORIF_EXPR:
3242 case TRUTH_AND_EXPR:
3243 case TRUTH_OR_EXPR:
3244 case TRUTH_XOR_EXPR:
3245 case TRUTH_NOT_EXPR:
3246
3247 case LT_EXPR:
3248 case LE_EXPR:
3249 case GT_EXPR:
3250 case GE_EXPR:
3251 case EQ_EXPR:
3252 case NE_EXPR:
3253 case ORDERED_EXPR:
3254 case UNORDERED_EXPR:
3255
3256 case UNLT_EXPR:
3257 case UNLE_EXPR:
3258 case UNGT_EXPR:
3259 case UNGE_EXPR:
3260 case UNEQ_EXPR:
3261 case LTGT_EXPR:
3262
3263 case CONJ_EXPR:
3264
3265 case PREDECREMENT_EXPR:
3266 case PREINCREMENT_EXPR:
3267 case POSTDECREMENT_EXPR:
3268 case POSTINCREMENT_EXPR:
3269
3270 case REALIGN_LOAD_EXPR:
3271
3272 case REDUC_MAX_EXPR:
3273 case REDUC_MIN_EXPR:
3274 case REDUC_PLUS_EXPR:
3275 case WIDEN_SUM_EXPR:
3276 case WIDEN_MULT_EXPR:
3277 case DOT_PROD_EXPR:
3278 case WIDEN_MULT_PLUS_EXPR:
3279 case WIDEN_MULT_MINUS_EXPR:
3280
3281 case VEC_WIDEN_MULT_HI_EXPR:
3282 case VEC_WIDEN_MULT_LO_EXPR:
3283 case VEC_UNPACK_HI_EXPR:
3284 case VEC_UNPACK_LO_EXPR:
3285 case VEC_UNPACK_FLOAT_HI_EXPR:
3286 case VEC_UNPACK_FLOAT_LO_EXPR:
3287 case VEC_PACK_TRUNC_EXPR:
3288 case VEC_PACK_SAT_EXPR:
3289 case VEC_PACK_FIX_TRUNC_EXPR:
3290 case VEC_EXTRACT_EVEN_EXPR:
3291 case VEC_EXTRACT_ODD_EXPR:
3292 case VEC_INTERLEAVE_HIGH_EXPR:
3293 case VEC_INTERLEAVE_LOW_EXPR:
3294
3295 return 1;
3296
3297 /* Few special cases of expensive operations. This is useful
3298 to avoid inlining on functions having too many of these. */
3299 case TRUNC_DIV_EXPR:
3300 case CEIL_DIV_EXPR:
3301 case FLOOR_DIV_EXPR:
3302 case ROUND_DIV_EXPR:
3303 case EXACT_DIV_EXPR:
3304 case TRUNC_MOD_EXPR:
3305 case CEIL_MOD_EXPR:
3306 case FLOOR_MOD_EXPR:
3307 case ROUND_MOD_EXPR:
3308 case RDIV_EXPR:
3309 if (TREE_CODE (op2) != INTEGER_CST)
3310 return weights->div_mod_cost;
3311 return 1;
3312
3313 default:
3314 /* We expect a copy assignment with no operator. */
3315 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3316 return 0;
3317 }
3318 }
3319
3320
3321 /* Estimate number of instructions that will be created by expanding
3322 the statements in the statement sequence STMTS.
3323 WEIGHTS contains weights attributed to various constructs. */
3324
3325 static
3326 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3327 {
3328 int cost;
3329 gimple_stmt_iterator gsi;
3330
3331 cost = 0;
3332 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3333 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3334
3335 return cost;
3336 }
3337
3338
3339 /* Estimate number of instructions that will be created by expanding STMT.
3340 WEIGHTS contains weights attributed to various constructs. */
3341
3342 int
3343 estimate_num_insns (gimple stmt, eni_weights *weights)
3344 {
3345 unsigned cost, i;
3346 enum gimple_code code = gimple_code (stmt);
3347 tree lhs;
3348 tree rhs;
3349
3350 switch (code)
3351 {
3352 case GIMPLE_ASSIGN:
3353 /* Try to estimate the cost of assignments. We have three cases to
3354 deal with:
3355 1) Simple assignments to registers;
3356 2) Stores to things that must live in memory. This includes
3357 "normal" stores to scalars, but also assignments of large
3358 structures, or constructors of big arrays;
3359
3360 Let us look at the first two cases, assuming we have "a = b + C":
3361 <GIMPLE_ASSIGN <var_decl "a">
3362 <plus_expr <var_decl "b"> <constant C>>
3363 If "a" is a GIMPLE register, the assignment to it is free on almost
3364 any target, because "a" usually ends up in a real register. Hence
3365 the only cost of this expression comes from the PLUS_EXPR, and we
3366 can ignore the GIMPLE_ASSIGN.
3367 If "a" is not a GIMPLE register, the assignment to "a" will most
3368 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3369 of moving something into "a", which we compute using the function
3370 estimate_move_cost. */
3371 lhs = gimple_assign_lhs (stmt);
3372 rhs = gimple_assign_rhs1 (stmt);
3373
3374 if (is_gimple_reg (lhs))
3375 cost = 0;
3376 else
3377 cost = estimate_move_cost (TREE_TYPE (lhs));
3378
3379 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3380 cost += estimate_move_cost (TREE_TYPE (rhs));
3381
3382 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3383 gimple_assign_rhs1 (stmt),
3384 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3385 == GIMPLE_BINARY_RHS
3386 ? gimple_assign_rhs2 (stmt) : NULL);
3387 break;
3388
3389 case GIMPLE_COND:
3390 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3391 gimple_op (stmt, 0),
3392 gimple_op (stmt, 1));
3393 break;
3394
3395 case GIMPLE_SWITCH:
3396 /* Take into account cost of the switch + guess 2 conditional jumps for
3397 each case label.
3398
3399 TODO: once the switch expansion logic is sufficiently separated, we can
3400 do better job on estimating cost of the switch. */
3401 if (weights->time_based)
3402 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3403 else
3404 cost = gimple_switch_num_labels (stmt) * 2;
3405 break;
3406
3407 case GIMPLE_CALL:
3408 {
3409 tree decl = gimple_call_fndecl (stmt);
3410 tree addr = gimple_call_fn (stmt);
3411 tree funtype = TREE_TYPE (addr);
3412 bool stdarg = false;
3413
3414 if (POINTER_TYPE_P (funtype))
3415 funtype = TREE_TYPE (funtype);
3416
3417 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3418 cost = weights->target_builtin_call_cost;
3419 else
3420 cost = weights->call_cost;
3421
3422 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3423 switch (DECL_FUNCTION_CODE (decl))
3424 {
3425 /* Builtins that expand to constants. */
3426 case BUILT_IN_CONSTANT_P:
3427 case BUILT_IN_EXPECT:
3428 case BUILT_IN_OBJECT_SIZE:
3429 case BUILT_IN_UNREACHABLE:
3430 /* Simple register moves or loads from stack. */
3431 case BUILT_IN_RETURN_ADDRESS:
3432 case BUILT_IN_EXTRACT_RETURN_ADDR:
3433 case BUILT_IN_FROB_RETURN_ADDR:
3434 case BUILT_IN_RETURN:
3435 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3436 case BUILT_IN_FRAME_ADDRESS:
3437 case BUILT_IN_VA_END:
3438 case BUILT_IN_STACK_SAVE:
3439 case BUILT_IN_STACK_RESTORE:
3440 /* Exception state returns or moves registers around. */
3441 case BUILT_IN_EH_FILTER:
3442 case BUILT_IN_EH_POINTER:
3443 case BUILT_IN_EH_COPY_VALUES:
3444 return 0;
3445
3446 /* builtins that are not expensive (that is they are most probably
3447 expanded inline into resonably simple code). */
3448 case BUILT_IN_ABS:
3449 case BUILT_IN_ALLOCA:
3450 case BUILT_IN_BSWAP32:
3451 case BUILT_IN_BSWAP64:
3452 case BUILT_IN_CLZ:
3453 case BUILT_IN_CLZIMAX:
3454 case BUILT_IN_CLZL:
3455 case BUILT_IN_CLZLL:
3456 case BUILT_IN_CTZ:
3457 case BUILT_IN_CTZIMAX:
3458 case BUILT_IN_CTZL:
3459 case BUILT_IN_CTZLL:
3460 case BUILT_IN_FFS:
3461 case BUILT_IN_FFSIMAX:
3462 case BUILT_IN_FFSL:
3463 case BUILT_IN_FFSLL:
3464 case BUILT_IN_IMAXABS:
3465 case BUILT_IN_FINITE:
3466 case BUILT_IN_FINITEF:
3467 case BUILT_IN_FINITEL:
3468 case BUILT_IN_FINITED32:
3469 case BUILT_IN_FINITED64:
3470 case BUILT_IN_FINITED128:
3471 case BUILT_IN_FPCLASSIFY:
3472 case BUILT_IN_ISFINITE:
3473 case BUILT_IN_ISINF_SIGN:
3474 case BUILT_IN_ISINF:
3475 case BUILT_IN_ISINFF:
3476 case BUILT_IN_ISINFL:
3477 case BUILT_IN_ISINFD32:
3478 case BUILT_IN_ISINFD64:
3479 case BUILT_IN_ISINFD128:
3480 case BUILT_IN_ISNAN:
3481 case BUILT_IN_ISNANF:
3482 case BUILT_IN_ISNANL:
3483 case BUILT_IN_ISNAND32:
3484 case BUILT_IN_ISNAND64:
3485 case BUILT_IN_ISNAND128:
3486 case BUILT_IN_ISNORMAL:
3487 case BUILT_IN_ISGREATER:
3488 case BUILT_IN_ISGREATEREQUAL:
3489 case BUILT_IN_ISLESS:
3490 case BUILT_IN_ISLESSEQUAL:
3491 case BUILT_IN_ISLESSGREATER:
3492 case BUILT_IN_ISUNORDERED:
3493 case BUILT_IN_VA_ARG_PACK:
3494 case BUILT_IN_VA_ARG_PACK_LEN:
3495 case BUILT_IN_VA_COPY:
3496 case BUILT_IN_TRAP:
3497 case BUILT_IN_SAVEREGS:
3498 case BUILT_IN_POPCOUNTL:
3499 case BUILT_IN_POPCOUNTLL:
3500 case BUILT_IN_POPCOUNTIMAX:
3501 case BUILT_IN_POPCOUNT:
3502 case BUILT_IN_PARITYL:
3503 case BUILT_IN_PARITYLL:
3504 case BUILT_IN_PARITYIMAX:
3505 case BUILT_IN_PARITY:
3506 case BUILT_IN_LABS:
3507 case BUILT_IN_LLABS:
3508 case BUILT_IN_PREFETCH:
3509 cost = weights->target_builtin_call_cost;
3510 break;
3511
3512 default:
3513 break;
3514 }
3515
3516 if (decl)
3517 funtype = TREE_TYPE (decl);
3518
3519 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3520 cost += estimate_move_cost (TREE_TYPE (funtype));
3521
3522 if (funtype)
3523 stdarg = stdarg_p (funtype);
3524
3525 /* Our cost must be kept in sync with
3526 cgraph_estimate_size_after_inlining that does use function
3527 declaration to figure out the arguments.
3528
3529 For functions taking variable list of arguments we must
3530 look into call statement intself. This is safe because
3531 we will get only higher costs and in most cases we will
3532 not inline these anyway. */
3533 if (decl && DECL_ARGUMENTS (decl) && !stdarg)
3534 {
3535 tree arg;
3536 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3537 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3538 cost += estimate_move_cost (TREE_TYPE (arg));
3539 }
3540 else if (funtype && prototype_p (funtype) && !stdarg)
3541 {
3542 tree t;
3543 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3544 t = TREE_CHAIN (t))
3545 if (!VOID_TYPE_P (TREE_VALUE (t)))
3546 cost += estimate_move_cost (TREE_VALUE (t));
3547 }
3548 else
3549 {
3550 for (i = 0; i < gimple_call_num_args (stmt); i++)
3551 {
3552 tree arg = gimple_call_arg (stmt, i);
3553 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3554 cost += estimate_move_cost (TREE_TYPE (arg));
3555 }
3556 }
3557
3558 break;
3559 }
3560
3561 case GIMPLE_GOTO:
3562 case GIMPLE_LABEL:
3563 case GIMPLE_NOP:
3564 case GIMPLE_PHI:
3565 case GIMPLE_RETURN:
3566 case GIMPLE_PREDICT:
3567 case GIMPLE_DEBUG:
3568 return 0;
3569
3570 case GIMPLE_ASM:
3571 return asm_str_count (gimple_asm_string (stmt));
3572
3573 case GIMPLE_RESX:
3574 /* This is either going to be an external function call with one
3575 argument, or two register copy statements plus a goto. */
3576 return 2;
3577
3578 case GIMPLE_EH_DISPATCH:
3579 /* ??? This is going to turn into a switch statement. Ideally
3580 we'd have a look at the eh region and estimate the number of
3581 edges involved. */
3582 return 10;
3583
3584 case GIMPLE_BIND:
3585 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3586
3587 case GIMPLE_EH_FILTER:
3588 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3589
3590 case GIMPLE_CATCH:
3591 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3592
3593 case GIMPLE_TRY:
3594 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3595 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3596
3597 /* OpenMP directives are generally very expensive. */
3598
3599 case GIMPLE_OMP_RETURN:
3600 case GIMPLE_OMP_SECTIONS_SWITCH:
3601 case GIMPLE_OMP_ATOMIC_STORE:
3602 case GIMPLE_OMP_CONTINUE:
3603 /* ...except these, which are cheap. */
3604 return 0;
3605
3606 case GIMPLE_OMP_ATOMIC_LOAD:
3607 return weights->omp_cost;
3608
3609 case GIMPLE_OMP_FOR:
3610 return (weights->omp_cost
3611 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3612 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3613
3614 case GIMPLE_OMP_PARALLEL:
3615 case GIMPLE_OMP_TASK:
3616 case GIMPLE_OMP_CRITICAL:
3617 case GIMPLE_OMP_MASTER:
3618 case GIMPLE_OMP_ORDERED:
3619 case GIMPLE_OMP_SECTION:
3620 case GIMPLE_OMP_SECTIONS:
3621 case GIMPLE_OMP_SINGLE:
3622 return (weights->omp_cost
3623 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3624
3625 default:
3626 gcc_unreachable ();
3627 }
3628
3629 return cost;
3630 }
3631
3632 /* Estimate number of instructions that will be created by expanding
3633 function FNDECL. WEIGHTS contains weights attributed to various
3634 constructs. */
3635
3636 int
3637 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3638 {
3639 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3640 gimple_stmt_iterator bsi;
3641 basic_block bb;
3642 int n = 0;
3643
3644 gcc_assert (my_function && my_function->cfg);
3645 FOR_EACH_BB_FN (bb, my_function)
3646 {
3647 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3648 n += estimate_num_insns (gsi_stmt (bsi), weights);
3649 }
3650
3651 return n;
3652 }
3653
3654
3655 /* Initializes weights used by estimate_num_insns. */
3656
3657 void
3658 init_inline_once (void)
3659 {
3660 eni_size_weights.call_cost = 1;
3661 eni_size_weights.target_builtin_call_cost = 1;
3662 eni_size_weights.div_mod_cost = 1;
3663 eni_size_weights.omp_cost = 40;
3664 eni_size_weights.time_based = false;
3665
3666 /* Estimating time for call is difficult, since we have no idea what the
3667 called function does. In the current uses of eni_time_weights,
3668 underestimating the cost does less harm than overestimating it, so
3669 we choose a rather small value here. */
3670 eni_time_weights.call_cost = 10;
3671 eni_time_weights.target_builtin_call_cost = 10;
3672 eni_time_weights.div_mod_cost = 10;
3673 eni_time_weights.omp_cost = 40;
3674 eni_time_weights.time_based = true;
3675 }
3676
3677 /* Estimate the number of instructions in a gimple_seq. */
3678
3679 int
3680 count_insns_seq (gimple_seq seq, eni_weights *weights)
3681 {
3682 gimple_stmt_iterator gsi;
3683 int n = 0;
3684 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3685 n += estimate_num_insns (gsi_stmt (gsi), weights);
3686
3687 return n;
3688 }
3689
3690
3691 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3692
3693 static void
3694 prepend_lexical_block (tree current_block, tree new_block)
3695 {
3696 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3697 BLOCK_SUBBLOCKS (current_block) = new_block;
3698 BLOCK_SUPERCONTEXT (new_block) = current_block;
3699 }
3700
3701 /* Fetch callee declaration from the call graph edge going from NODE and
3702 associated with STMR call statement. Return NULL_TREE if not found. */
3703 static tree
3704 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3705 {
3706 struct cgraph_edge *cs;
3707
3708 cs = cgraph_edge (node, stmt);
3709 if (cs && !cs->indirect_unknown_callee)
3710 return cs->callee->decl;
3711
3712 return NULL_TREE;
3713 }
3714
3715 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3716
3717 static bool
3718 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3719 {
3720 tree use_retvar;
3721 tree fn;
3722 struct pointer_map_t *st, *dst;
3723 tree return_slot;
3724 tree modify_dest;
3725 location_t saved_location;
3726 struct cgraph_edge *cg_edge;
3727 cgraph_inline_failed_t reason;
3728 basic_block return_block;
3729 edge e;
3730 gimple_stmt_iterator gsi, stmt_gsi;
3731 bool successfully_inlined = FALSE;
3732 bool purge_dead_abnormal_edges;
3733 tree t_step;
3734 tree var;
3735
3736 /* Set input_location here so we get the right instantiation context
3737 if we call instantiate_decl from inlinable_function_p. */
3738 saved_location = input_location;
3739 if (gimple_has_location (stmt))
3740 input_location = gimple_location (stmt);
3741
3742 /* From here on, we're only interested in CALL_EXPRs. */
3743 if (gimple_code (stmt) != GIMPLE_CALL)
3744 goto egress;
3745
3746 /* First, see if we can figure out what function is being called.
3747 If we cannot, then there is no hope of inlining the function. */
3748 fn = gimple_call_fndecl (stmt);
3749 if (!fn)
3750 {
3751 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3752 if (!fn)
3753 goto egress;
3754 }
3755
3756 /* Turn forward declarations into real ones. */
3757 fn = cgraph_node (fn)->decl;
3758
3759 /* If FN is a declaration of a function in a nested scope that was
3760 globally declared inline, we don't set its DECL_INITIAL.
3761 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3762 C++ front-end uses it for cdtors to refer to their internal
3763 declarations, that are not real functions. Fortunately those
3764 don't have trees to be saved, so we can tell by checking their
3765 gimple_body. */
3766 if (!DECL_INITIAL (fn)
3767 && DECL_ABSTRACT_ORIGIN (fn)
3768 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3769 fn = DECL_ABSTRACT_ORIGIN (fn);
3770
3771 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3772 Kill this check once this is fixed. */
3773 if (!id->dst_node->analyzed)
3774 goto egress;
3775
3776 cg_edge = cgraph_edge (id->dst_node, stmt);
3777
3778 /* First check that inlining isn't simply forbidden in this case. */
3779 if (inline_forbidden_into_p (cg_edge->caller->decl, cg_edge->callee->decl))
3780 goto egress;
3781
3782 /* Don't try to inline functions that are not well-suited to inlining. */
3783 if (!cgraph_inline_p (cg_edge, &reason))
3784 {
3785 /* If this call was originally indirect, we do not want to emit any
3786 inlining related warnings or sorry messages because there are no
3787 guarantees regarding those. */
3788 if (cg_edge->indirect_inlining_edge)
3789 goto egress;
3790
3791 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3792 /* Avoid warnings during early inline pass. */
3793 && cgraph_global_info_ready)
3794 {
3795 sorry ("inlining failed in call to %q+F: %s", fn,
3796 _(cgraph_inline_failed_string (reason)));
3797 sorry ("called from here");
3798 }
3799 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3800 && !DECL_IN_SYSTEM_HEADER (fn)
3801 && reason != CIF_UNSPECIFIED
3802 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3803 /* Avoid warnings during early inline pass. */
3804 && cgraph_global_info_ready)
3805 {
3806 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3807 fn, _(cgraph_inline_failed_string (reason)));
3808 warning (OPT_Winline, "called from here");
3809 }
3810 goto egress;
3811 }
3812 fn = cg_edge->callee->decl;
3813
3814 #ifdef ENABLE_CHECKING
3815 if (cg_edge->callee->decl != id->dst_node->decl)
3816 verify_cgraph_node (cg_edge->callee);
3817 #endif
3818
3819 /* We will be inlining this callee. */
3820 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3821
3822 /* Update the callers EH personality. */
3823 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3824 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3825 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3826
3827 /* Split the block holding the GIMPLE_CALL. */
3828 e = split_block (bb, stmt);
3829 bb = e->src;
3830 return_block = e->dest;
3831 remove_edge (e);
3832
3833 /* split_block splits after the statement; work around this by
3834 moving the call into the second block manually. Not pretty,
3835 but seems easier than doing the CFG manipulation by hand
3836 when the GIMPLE_CALL is in the last statement of BB. */
3837 stmt_gsi = gsi_last_bb (bb);
3838 gsi_remove (&stmt_gsi, false);
3839
3840 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3841 been the source of abnormal edges. In this case, schedule
3842 the removal of dead abnormal edges. */
3843 gsi = gsi_start_bb (return_block);
3844 if (gsi_end_p (gsi))
3845 {
3846 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3847 purge_dead_abnormal_edges = true;
3848 }
3849 else
3850 {
3851 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3852 purge_dead_abnormal_edges = false;
3853 }
3854
3855 stmt_gsi = gsi_start_bb (return_block);
3856
3857 /* Build a block containing code to initialize the arguments, the
3858 actual inline expansion of the body, and a label for the return
3859 statements within the function to jump to. The type of the
3860 statement expression is the return type of the function call. */
3861 id->block = make_node (BLOCK);
3862 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3863 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3864 prepend_lexical_block (gimple_block (stmt), id->block);
3865
3866 /* Local declarations will be replaced by their equivalents in this
3867 map. */
3868 st = id->decl_map;
3869 id->decl_map = pointer_map_create ();
3870 dst = id->debug_map;
3871 id->debug_map = NULL;
3872
3873 /* Record the function we are about to inline. */
3874 id->src_fn = fn;
3875 id->src_node = cg_edge->callee;
3876 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3877 id->gimple_call = stmt;
3878
3879 gcc_assert (!id->src_cfun->after_inlining);
3880
3881 id->entry_bb = bb;
3882 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3883 {
3884 gimple_stmt_iterator si = gsi_last_bb (bb);
3885 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3886 NOT_TAKEN),
3887 GSI_NEW_STMT);
3888 }
3889 initialize_inlined_parameters (id, stmt, fn, bb);
3890
3891 if (DECL_INITIAL (fn))
3892 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3893
3894 /* Return statements in the function body will be replaced by jumps
3895 to the RET_LABEL. */
3896 gcc_assert (DECL_INITIAL (fn));
3897 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3898
3899 /* Find the LHS to which the result of this call is assigned. */
3900 return_slot = NULL;
3901 if (gimple_call_lhs (stmt))
3902 {
3903 modify_dest = gimple_call_lhs (stmt);
3904
3905 /* The function which we are inlining might not return a value,
3906 in which case we should issue a warning that the function
3907 does not return a value. In that case the optimizers will
3908 see that the variable to which the value is assigned was not
3909 initialized. We do not want to issue a warning about that
3910 uninitialized variable. */
3911 if (DECL_P (modify_dest))
3912 TREE_NO_WARNING (modify_dest) = 1;
3913
3914 if (gimple_call_return_slot_opt_p (stmt))
3915 {
3916 return_slot = modify_dest;
3917 modify_dest = NULL;
3918 }
3919 }
3920 else
3921 modify_dest = NULL;
3922
3923 /* If we are inlining a call to the C++ operator new, we don't want
3924 to use type based alias analysis on the return value. Otherwise
3925 we may get confused if the compiler sees that the inlined new
3926 function returns a pointer which was just deleted. See bug
3927 33407. */
3928 if (DECL_IS_OPERATOR_NEW (fn))
3929 {
3930 return_slot = NULL;
3931 modify_dest = NULL;
3932 }
3933
3934 /* Declare the return variable for the function. */
3935 use_retvar = declare_return_variable (id, return_slot, modify_dest);
3936
3937 /* Add local vars in this inlined callee to caller. */
3938 t_step = id->src_cfun->local_decls;
3939 for (; t_step; t_step = TREE_CHAIN (t_step))
3940 {
3941 var = TREE_VALUE (t_step);
3942 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3943 {
3944 if (var_ann (var) && add_referenced_var (var))
3945 cfun->local_decls = tree_cons (NULL_TREE, var,
3946 cfun->local_decls);
3947 }
3948 else if (!can_be_nonlocal (var, id))
3949 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3950 cfun->local_decls);
3951 }
3952
3953 if (dump_file && (dump_flags & TDF_DETAILS))
3954 {
3955 fprintf (dump_file, "Inlining ");
3956 print_generic_expr (dump_file, id->src_fn, 0);
3957 fprintf (dump_file, " to ");
3958 print_generic_expr (dump_file, id->dst_fn, 0);
3959 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3960 }
3961
3962 /* This is it. Duplicate the callee body. Assume callee is
3963 pre-gimplified. Note that we must not alter the caller
3964 function in any way before this point, as this CALL_EXPR may be
3965 a self-referential call; if we're calling ourselves, we need to
3966 duplicate our body before altering anything. */
3967 copy_body (id, bb->count,
3968 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3969 bb, return_block, NULL, NULL);
3970
3971 /* Reset the escaped solution. */
3972 if (cfun->gimple_df)
3973 pt_solution_reset (&cfun->gimple_df->escaped);
3974
3975 /* Clean up. */
3976 if (id->debug_map)
3977 {
3978 pointer_map_destroy (id->debug_map);
3979 id->debug_map = dst;
3980 }
3981 pointer_map_destroy (id->decl_map);
3982 id->decl_map = st;
3983
3984 /* Unlink the calls virtual operands before replacing it. */
3985 unlink_stmt_vdef (stmt);
3986
3987 /* If the inlined function returns a result that we care about,
3988 substitute the GIMPLE_CALL with an assignment of the return
3989 variable to the LHS of the call. That is, if STMT was
3990 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3991 if (use_retvar && gimple_call_lhs (stmt))
3992 {
3993 gimple old_stmt = stmt;
3994 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3995 gsi_replace (&stmt_gsi, stmt, false);
3996 if (gimple_in_ssa_p (cfun))
3997 mark_symbols_for_renaming (stmt);
3998 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3999 }
4000 else
4001 {
4002 /* Handle the case of inlining a function with no return
4003 statement, which causes the return value to become undefined. */
4004 if (gimple_call_lhs (stmt)
4005 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4006 {
4007 tree name = gimple_call_lhs (stmt);
4008 tree var = SSA_NAME_VAR (name);
4009 tree def = gimple_default_def (cfun, var);
4010
4011 if (def)
4012 {
4013 /* If the variable is used undefined, make this name
4014 undefined via a move. */
4015 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4016 gsi_replace (&stmt_gsi, stmt, true);
4017 }
4018 else
4019 {
4020 /* Otherwise make this variable undefined. */
4021 gsi_remove (&stmt_gsi, true);
4022 set_default_def (var, name);
4023 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4024 }
4025 }
4026 else
4027 gsi_remove (&stmt_gsi, true);
4028 }
4029
4030 if (purge_dead_abnormal_edges)
4031 gimple_purge_dead_abnormal_call_edges (return_block);
4032
4033 /* If the value of the new expression is ignored, that's OK. We
4034 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4035 the equivalent inlined version either. */
4036 if (is_gimple_assign (stmt))
4037 {
4038 gcc_assert (gimple_assign_single_p (stmt)
4039 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4040 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4041 }
4042
4043 /* Output the inlining info for this abstract function, since it has been
4044 inlined. If we don't do this now, we can lose the information about the
4045 variables in the function when the blocks get blown away as soon as we
4046 remove the cgraph node. */
4047 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4048
4049 /* Update callgraph if needed. */
4050 cgraph_remove_node (cg_edge->callee);
4051
4052 id->block = NULL_TREE;
4053 successfully_inlined = TRUE;
4054
4055 egress:
4056 input_location = saved_location;
4057 return successfully_inlined;
4058 }
4059
4060 /* Expand call statements reachable from STMT_P.
4061 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4062 in a MODIFY_EXPR. See gimple.c:get_call_expr_in(). We can
4063 unfortunately not use that function here because we need a pointer
4064 to the CALL_EXPR, not the tree itself. */
4065
4066 static bool
4067 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4068 {
4069 gimple_stmt_iterator gsi;
4070
4071 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4072 {
4073 gimple stmt = gsi_stmt (gsi);
4074
4075 if (is_gimple_call (stmt)
4076 && expand_call_inline (bb, stmt, id))
4077 return true;
4078 }
4079
4080 return false;
4081 }
4082
4083
4084 /* Walk all basic blocks created after FIRST and try to fold every statement
4085 in the STATEMENTS pointer set. */
4086
4087 static void
4088 fold_marked_statements (int first, struct pointer_set_t *statements)
4089 {
4090 for (; first < n_basic_blocks; first++)
4091 if (BASIC_BLOCK (first))
4092 {
4093 gimple_stmt_iterator gsi;
4094
4095 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4096 !gsi_end_p (gsi);
4097 gsi_next (&gsi))
4098 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4099 {
4100 gimple old_stmt = gsi_stmt (gsi);
4101 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4102
4103 if (old_decl && DECL_BUILT_IN (old_decl))
4104 {
4105 /* Folding builtins can create multiple instructions,
4106 we need to look at all of them. */
4107 gimple_stmt_iterator i2 = gsi;
4108 gsi_prev (&i2);
4109 if (fold_stmt (&gsi))
4110 {
4111 gimple new_stmt;
4112 if (gsi_end_p (i2))
4113 i2 = gsi_start_bb (BASIC_BLOCK (first));
4114 else
4115 gsi_next (&i2);
4116 while (1)
4117 {
4118 new_stmt = gsi_stmt (i2);
4119 update_stmt (new_stmt);
4120 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4121 new_stmt);
4122
4123 if (new_stmt == gsi_stmt (gsi))
4124 {
4125 /* It is okay to check only for the very last
4126 of these statements. If it is a throwing
4127 statement nothing will change. If it isn't
4128 this can remove EH edges. If that weren't
4129 correct then because some intermediate stmts
4130 throw, but not the last one. That would mean
4131 we'd have to split the block, which we can't
4132 here and we'd loose anyway. And as builtins
4133 probably never throw, this all
4134 is mood anyway. */
4135 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4136 new_stmt))
4137 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4138 break;
4139 }
4140 gsi_next (&i2);
4141 }
4142 }
4143 }
4144 else if (fold_stmt (&gsi))
4145 {
4146 /* Re-read the statement from GSI as fold_stmt() may
4147 have changed it. */
4148 gimple new_stmt = gsi_stmt (gsi);
4149 update_stmt (new_stmt);
4150
4151 if (is_gimple_call (old_stmt)
4152 || is_gimple_call (new_stmt))
4153 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4154 new_stmt);
4155
4156 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4157 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4158 }
4159 }
4160 }
4161 }
4162
4163 /* Return true if BB has at least one abnormal outgoing edge. */
4164
4165 static inline bool
4166 has_abnormal_outgoing_edge_p (basic_block bb)
4167 {
4168 edge e;
4169 edge_iterator ei;
4170
4171 FOR_EACH_EDGE (e, ei, bb->succs)
4172 if (e->flags & EDGE_ABNORMAL)
4173 return true;
4174
4175 return false;
4176 }
4177
4178 /* Expand calls to inline functions in the body of FN. */
4179
4180 unsigned int
4181 optimize_inline_calls (tree fn)
4182 {
4183 copy_body_data id;
4184 basic_block bb;
4185 int last = n_basic_blocks;
4186 struct gimplify_ctx gctx;
4187
4188 /* There is no point in performing inlining if errors have already
4189 occurred -- and we might crash if we try to inline invalid
4190 code. */
4191 if (seen_error ())
4192 return 0;
4193
4194 /* Clear out ID. */
4195 memset (&id, 0, sizeof (id));
4196
4197 id.src_node = id.dst_node = cgraph_node (fn);
4198 id.dst_fn = fn;
4199 /* Or any functions that aren't finished yet. */
4200 if (current_function_decl)
4201 id.dst_fn = current_function_decl;
4202
4203 id.copy_decl = copy_decl_maybe_to_var;
4204 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4205 id.transform_new_cfg = false;
4206 id.transform_return_to_modify = true;
4207 id.transform_lang_insert_block = NULL;
4208 id.statements_to_fold = pointer_set_create ();
4209
4210 push_gimplify_context (&gctx);
4211
4212 /* We make no attempts to keep dominance info up-to-date. */
4213 free_dominance_info (CDI_DOMINATORS);
4214 free_dominance_info (CDI_POST_DOMINATORS);
4215
4216 /* Register specific gimple functions. */
4217 gimple_register_cfg_hooks ();
4218
4219 /* Reach the trees by walking over the CFG, and note the
4220 enclosing basic-blocks in the call edges. */
4221 /* We walk the blocks going forward, because inlined function bodies
4222 will split id->current_basic_block, and the new blocks will
4223 follow it; we'll trudge through them, processing their CALL_EXPRs
4224 along the way. */
4225 FOR_EACH_BB (bb)
4226 gimple_expand_calls_inline (bb, &id);
4227
4228 pop_gimplify_context (NULL);
4229
4230 #ifdef ENABLE_CHECKING
4231 {
4232 struct cgraph_edge *e;
4233
4234 verify_cgraph_node (id.dst_node);
4235
4236 /* Double check that we inlined everything we are supposed to inline. */
4237 for (e = id.dst_node->callees; e; e = e->next_callee)
4238 gcc_assert (e->inline_failed);
4239 }
4240 #endif
4241
4242 /* Fold the statements before compacting/renumbering the basic blocks. */
4243 fold_marked_statements (last, id.statements_to_fold);
4244 pointer_set_destroy (id.statements_to_fold);
4245
4246 gcc_assert (!id.debug_stmts);
4247
4248 /* Renumber the (code) basic_blocks consecutively. */
4249 compact_blocks ();
4250 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4251 number_blocks (fn);
4252
4253 fold_cond_expr_cond ();
4254 delete_unreachable_blocks_update_callgraph (&id);
4255 #ifdef ENABLE_CHECKING
4256 verify_cgraph_node (id.dst_node);
4257 #endif
4258
4259 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4260 not possible yet - the IPA passes might make various functions to not
4261 throw and they don't care to proactively update local EH info. This is
4262 done later in fixup_cfg pass that also execute the verification. */
4263 return (TODO_update_ssa
4264 | TODO_cleanup_cfg
4265 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4266 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4267 }
4268
4269 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4270
4271 tree
4272 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4273 {
4274 enum tree_code code = TREE_CODE (*tp);
4275 enum tree_code_class cl = TREE_CODE_CLASS (code);
4276
4277 /* We make copies of most nodes. */
4278 if (IS_EXPR_CODE_CLASS (cl)
4279 || code == TREE_LIST
4280 || code == TREE_VEC
4281 || code == TYPE_DECL
4282 || code == OMP_CLAUSE)
4283 {
4284 /* Because the chain gets clobbered when we make a copy, we save it
4285 here. */
4286 tree chain = NULL_TREE, new_tree;
4287
4288 chain = TREE_CHAIN (*tp);
4289
4290 /* Copy the node. */
4291 new_tree = copy_node (*tp);
4292
4293 /* Propagate mudflap marked-ness. */
4294 if (flag_mudflap && mf_marked_p (*tp))
4295 mf_mark (new_tree);
4296
4297 *tp = new_tree;
4298
4299 /* Now, restore the chain, if appropriate. That will cause
4300 walk_tree to walk into the chain as well. */
4301 if (code == PARM_DECL
4302 || code == TREE_LIST
4303 || code == OMP_CLAUSE)
4304 TREE_CHAIN (*tp) = chain;
4305
4306 /* For now, we don't update BLOCKs when we make copies. So, we
4307 have to nullify all BIND_EXPRs. */
4308 if (TREE_CODE (*tp) == BIND_EXPR)
4309 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4310 }
4311 else if (code == CONSTRUCTOR)
4312 {
4313 /* CONSTRUCTOR nodes need special handling because
4314 we need to duplicate the vector of elements. */
4315 tree new_tree;
4316
4317 new_tree = copy_node (*tp);
4318
4319 /* Propagate mudflap marked-ness. */
4320 if (flag_mudflap && mf_marked_p (*tp))
4321 mf_mark (new_tree);
4322
4323 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4324 CONSTRUCTOR_ELTS (*tp));
4325 *tp = new_tree;
4326 }
4327 else if (TREE_CODE_CLASS (code) == tcc_type)
4328 *walk_subtrees = 0;
4329 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4330 *walk_subtrees = 0;
4331 else if (TREE_CODE_CLASS (code) == tcc_constant)
4332 *walk_subtrees = 0;
4333 else
4334 gcc_assert (code != STATEMENT_LIST);
4335 return NULL_TREE;
4336 }
4337
4338 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4339 information indicating to what new SAVE_EXPR this one should be mapped,
4340 use that one. Otherwise, create a new node and enter it in ST. FN is
4341 the function into which the copy will be placed. */
4342
4343 static void
4344 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4345 {
4346 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4347 tree *n;
4348 tree t;
4349
4350 /* See if we already encountered this SAVE_EXPR. */
4351 n = (tree *) pointer_map_contains (st, *tp);
4352
4353 /* If we didn't already remap this SAVE_EXPR, do so now. */
4354 if (!n)
4355 {
4356 t = copy_node (*tp);
4357
4358 /* Remember this SAVE_EXPR. */
4359 *pointer_map_insert (st, *tp) = t;
4360 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4361 *pointer_map_insert (st, t) = t;
4362 }
4363 else
4364 {
4365 /* We've already walked into this SAVE_EXPR; don't do it again. */
4366 *walk_subtrees = 0;
4367 t = *n;
4368 }
4369
4370 /* Replace this SAVE_EXPR with the copy. */
4371 *tp = t;
4372 }
4373
4374 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4375 copies the declaration and enters it in the splay_tree in DATA (which is
4376 really an `copy_body_data *'). */
4377
4378 static tree
4379 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4380 void *data)
4381 {
4382 copy_body_data *id = (copy_body_data *) data;
4383
4384 /* Don't walk into types. */
4385 if (TYPE_P (*tp))
4386 *walk_subtrees = 0;
4387
4388 else if (TREE_CODE (*tp) == LABEL_EXPR)
4389 {
4390 tree decl = TREE_OPERAND (*tp, 0);
4391
4392 /* Copy the decl and remember the copy. */
4393 insert_decl_map (id, decl, id->copy_decl (decl, id));
4394 }
4395
4396 return NULL_TREE;
4397 }
4398
4399 /* Perform any modifications to EXPR required when it is unsaved. Does
4400 not recurse into EXPR's subtrees. */
4401
4402 static void
4403 unsave_expr_1 (tree expr)
4404 {
4405 switch (TREE_CODE (expr))
4406 {
4407 case TARGET_EXPR:
4408 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4409 It's OK for this to happen if it was part of a subtree that
4410 isn't immediately expanded, such as operand 2 of another
4411 TARGET_EXPR. */
4412 if (TREE_OPERAND (expr, 1))
4413 break;
4414
4415 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4416 TREE_OPERAND (expr, 3) = NULL_TREE;
4417 break;
4418
4419 default:
4420 break;
4421 }
4422 }
4423
4424 /* Called via walk_tree when an expression is unsaved. Using the
4425 splay_tree pointed to by ST (which is really a `splay_tree'),
4426 remaps all local declarations to appropriate replacements. */
4427
4428 static tree
4429 unsave_r (tree *tp, int *walk_subtrees, void *data)
4430 {
4431 copy_body_data *id = (copy_body_data *) data;
4432 struct pointer_map_t *st = id->decl_map;
4433 tree *n;
4434
4435 /* Only a local declaration (variable or label). */
4436 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4437 || TREE_CODE (*tp) == LABEL_DECL)
4438 {
4439 /* Lookup the declaration. */
4440 n = (tree *) pointer_map_contains (st, *tp);
4441
4442 /* If it's there, remap it. */
4443 if (n)
4444 *tp = *n;
4445 }
4446
4447 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4448 gcc_unreachable ();
4449 else if (TREE_CODE (*tp) == BIND_EXPR)
4450 copy_bind_expr (tp, walk_subtrees, id);
4451 else if (TREE_CODE (*tp) == SAVE_EXPR
4452 || TREE_CODE (*tp) == TARGET_EXPR)
4453 remap_save_expr (tp, st, walk_subtrees);
4454 else
4455 {
4456 copy_tree_r (tp, walk_subtrees, NULL);
4457
4458 /* Do whatever unsaving is required. */
4459 unsave_expr_1 (*tp);
4460 }
4461
4462 /* Keep iterating. */
4463 return NULL_TREE;
4464 }
4465
4466 /* Copies everything in EXPR and replaces variables, labels
4467 and SAVE_EXPRs local to EXPR. */
4468
4469 tree
4470 unsave_expr_now (tree expr)
4471 {
4472 copy_body_data id;
4473
4474 /* There's nothing to do for NULL_TREE. */
4475 if (expr == 0)
4476 return expr;
4477
4478 /* Set up ID. */
4479 memset (&id, 0, sizeof (id));
4480 id.src_fn = current_function_decl;
4481 id.dst_fn = current_function_decl;
4482 id.decl_map = pointer_map_create ();
4483 id.debug_map = NULL;
4484
4485 id.copy_decl = copy_decl_no_change;
4486 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4487 id.transform_new_cfg = false;
4488 id.transform_return_to_modify = false;
4489 id.transform_lang_insert_block = NULL;
4490
4491 /* Walk the tree once to find local labels. */
4492 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4493
4494 /* Walk the tree again, copying, remapping, and unsaving. */
4495 walk_tree (&expr, unsave_r, &id, NULL);
4496
4497 /* Clean up. */
4498 pointer_map_destroy (id.decl_map);
4499 if (id.debug_map)
4500 pointer_map_destroy (id.debug_map);
4501
4502 return expr;
4503 }
4504
4505 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4506 label, copies the declaration and enters it in the splay_tree in DATA (which
4507 is really a 'copy_body_data *'. */
4508
4509 static tree
4510 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4511 bool *handled_ops_p ATTRIBUTE_UNUSED,
4512 struct walk_stmt_info *wi)
4513 {
4514 copy_body_data *id = (copy_body_data *) wi->info;
4515 gimple stmt = gsi_stmt (*gsip);
4516
4517 if (gimple_code (stmt) == GIMPLE_LABEL)
4518 {
4519 tree decl = gimple_label_label (stmt);
4520
4521 /* Copy the decl and remember the copy. */
4522 insert_decl_map (id, decl, id->copy_decl (decl, id));
4523 }
4524
4525 return NULL_TREE;
4526 }
4527
4528
4529 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4530 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4531 remaps all local declarations to appropriate replacements in gimple
4532 operands. */
4533
4534 static tree
4535 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4536 {
4537 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4538 copy_body_data *id = (copy_body_data *) wi->info;
4539 struct pointer_map_t *st = id->decl_map;
4540 tree *n;
4541 tree expr = *tp;
4542
4543 /* Only a local declaration (variable or label). */
4544 if ((TREE_CODE (expr) == VAR_DECL
4545 && !TREE_STATIC (expr))
4546 || TREE_CODE (expr) == LABEL_DECL)
4547 {
4548 /* Lookup the declaration. */
4549 n = (tree *) pointer_map_contains (st, expr);
4550
4551 /* If it's there, remap it. */
4552 if (n)
4553 *tp = *n;
4554 *walk_subtrees = 0;
4555 }
4556 else if (TREE_CODE (expr) == STATEMENT_LIST
4557 || TREE_CODE (expr) == BIND_EXPR
4558 || TREE_CODE (expr) == SAVE_EXPR)
4559 gcc_unreachable ();
4560 else if (TREE_CODE (expr) == TARGET_EXPR)
4561 {
4562 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4563 It's OK for this to happen if it was part of a subtree that
4564 isn't immediately expanded, such as operand 2 of another
4565 TARGET_EXPR. */
4566 if (!TREE_OPERAND (expr, 1))
4567 {
4568 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4569 TREE_OPERAND (expr, 3) = NULL_TREE;
4570 }
4571 }
4572
4573 /* Keep iterating. */
4574 return NULL_TREE;
4575 }
4576
4577
4578 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4579 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4580 remaps all local declarations to appropriate replacements in gimple
4581 statements. */
4582
4583 static tree
4584 replace_locals_stmt (gimple_stmt_iterator *gsip,
4585 bool *handled_ops_p ATTRIBUTE_UNUSED,
4586 struct walk_stmt_info *wi)
4587 {
4588 copy_body_data *id = (copy_body_data *) wi->info;
4589 gimple stmt = gsi_stmt (*gsip);
4590
4591 if (gimple_code (stmt) == GIMPLE_BIND)
4592 {
4593 tree block = gimple_bind_block (stmt);
4594
4595 if (block)
4596 {
4597 remap_block (&block, id);
4598 gimple_bind_set_block (stmt, block);
4599 }
4600
4601 /* This will remap a lot of the same decls again, but this should be
4602 harmless. */
4603 if (gimple_bind_vars (stmt))
4604 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4605 }
4606
4607 /* Keep iterating. */
4608 return NULL_TREE;
4609 }
4610
4611
4612 /* Copies everything in SEQ and replaces variables and labels local to
4613 current_function_decl. */
4614
4615 gimple_seq
4616 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4617 {
4618 copy_body_data id;
4619 struct walk_stmt_info wi;
4620 struct pointer_set_t *visited;
4621 gimple_seq copy;
4622
4623 /* There's nothing to do for NULL_TREE. */
4624 if (seq == NULL)
4625 return seq;
4626
4627 /* Set up ID. */
4628 memset (&id, 0, sizeof (id));
4629 id.src_fn = current_function_decl;
4630 id.dst_fn = current_function_decl;
4631 id.decl_map = pointer_map_create ();
4632 id.debug_map = NULL;
4633
4634 id.copy_decl = copy_decl_no_change;
4635 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4636 id.transform_new_cfg = false;
4637 id.transform_return_to_modify = false;
4638 id.transform_lang_insert_block = NULL;
4639
4640 /* Walk the tree once to find local labels. */
4641 memset (&wi, 0, sizeof (wi));
4642 visited = pointer_set_create ();
4643 wi.info = &id;
4644 wi.pset = visited;
4645 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4646 pointer_set_destroy (visited);
4647
4648 copy = gimple_seq_copy (seq);
4649
4650 /* Walk the copy, remapping decls. */
4651 memset (&wi, 0, sizeof (wi));
4652 wi.info = &id;
4653 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4654
4655 /* Clean up. */
4656 pointer_map_destroy (id.decl_map);
4657 if (id.debug_map)
4658 pointer_map_destroy (id.debug_map);
4659
4660 return copy;
4661 }
4662
4663
4664 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4665
4666 static tree
4667 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4668 {
4669 if (*tp == data)
4670 return (tree) data;
4671 else
4672 return NULL;
4673 }
4674
4675 DEBUG_FUNCTION bool
4676 debug_find_tree (tree top, tree search)
4677 {
4678 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4679 }
4680
4681
4682 /* Declare the variables created by the inliner. Add all the variables in
4683 VARS to BIND_EXPR. */
4684
4685 static void
4686 declare_inline_vars (tree block, tree vars)
4687 {
4688 tree t;
4689 for (t = vars; t; t = TREE_CHAIN (t))
4690 {
4691 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4692 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4693 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4694 }
4695
4696 if (block)
4697 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4698 }
4699
4700 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4701 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4702 VAR_DECL translation. */
4703
4704 static tree
4705 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4706 {
4707 /* Don't generate debug information for the copy if we wouldn't have
4708 generated it for the copy either. */
4709 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4710 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4711
4712 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4713 declaration inspired this copy. */
4714 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4715
4716 /* The new variable/label has no RTL, yet. */
4717 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4718 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4719 SET_DECL_RTL (copy, 0);
4720
4721 /* These args would always appear unused, if not for this. */
4722 TREE_USED (copy) = 1;
4723
4724 /* Set the context for the new declaration. */
4725 if (!DECL_CONTEXT (decl))
4726 /* Globals stay global. */
4727 ;
4728 else if (DECL_CONTEXT (decl) != id->src_fn)
4729 /* Things that weren't in the scope of the function we're inlining
4730 from aren't in the scope we're inlining to, either. */
4731 ;
4732 else if (TREE_STATIC (decl))
4733 /* Function-scoped static variables should stay in the original
4734 function. */
4735 ;
4736 else
4737 /* Ordinary automatic local variables are now in the scope of the
4738 new function. */
4739 DECL_CONTEXT (copy) = id->dst_fn;
4740
4741 return copy;
4742 }
4743
4744 static tree
4745 copy_decl_to_var (tree decl, copy_body_data *id)
4746 {
4747 tree copy, type;
4748
4749 gcc_assert (TREE_CODE (decl) == PARM_DECL
4750 || TREE_CODE (decl) == RESULT_DECL);
4751
4752 type = TREE_TYPE (decl);
4753
4754 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4755 VAR_DECL, DECL_NAME (decl), type);
4756 if (DECL_PT_UID_SET_P (decl))
4757 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4758 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4759 TREE_READONLY (copy) = TREE_READONLY (decl);
4760 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4761 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4762
4763 return copy_decl_for_dup_finish (id, decl, copy);
4764 }
4765
4766 /* Like copy_decl_to_var, but create a return slot object instead of a
4767 pointer variable for return by invisible reference. */
4768
4769 static tree
4770 copy_result_decl_to_var (tree decl, copy_body_data *id)
4771 {
4772 tree copy, type;
4773
4774 gcc_assert (TREE_CODE (decl) == PARM_DECL
4775 || TREE_CODE (decl) == RESULT_DECL);
4776
4777 type = TREE_TYPE (decl);
4778 if (DECL_BY_REFERENCE (decl))
4779 type = TREE_TYPE (type);
4780
4781 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4782 VAR_DECL, DECL_NAME (decl), type);
4783 if (DECL_PT_UID_SET_P (decl))
4784 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4785 TREE_READONLY (copy) = TREE_READONLY (decl);
4786 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4787 if (!DECL_BY_REFERENCE (decl))
4788 {
4789 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4790 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4791 }
4792
4793 return copy_decl_for_dup_finish (id, decl, copy);
4794 }
4795
4796 tree
4797 copy_decl_no_change (tree decl, copy_body_data *id)
4798 {
4799 tree copy;
4800
4801 copy = copy_node (decl);
4802
4803 /* The COPY is not abstract; it will be generated in DST_FN. */
4804 DECL_ABSTRACT (copy) = 0;
4805 lang_hooks.dup_lang_specific_decl (copy);
4806
4807 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4808 been taken; it's for internal bookkeeping in expand_goto_internal. */
4809 if (TREE_CODE (copy) == LABEL_DECL)
4810 {
4811 TREE_ADDRESSABLE (copy) = 0;
4812 LABEL_DECL_UID (copy) = -1;
4813 }
4814
4815 return copy_decl_for_dup_finish (id, decl, copy);
4816 }
4817
4818 static tree
4819 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4820 {
4821 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4822 return copy_decl_to_var (decl, id);
4823 else
4824 return copy_decl_no_change (decl, id);
4825 }
4826
4827 /* Return a copy of the function's argument tree. */
4828 static tree
4829 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4830 bitmap args_to_skip, tree *vars)
4831 {
4832 tree arg, *parg;
4833 tree new_parm = NULL;
4834 int i = 0;
4835
4836 parg = &new_parm;
4837
4838 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4839 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4840 {
4841 tree new_tree = remap_decl (arg, id);
4842 lang_hooks.dup_lang_specific_decl (new_tree);
4843 *parg = new_tree;
4844 parg = &TREE_CHAIN (new_tree);
4845 }
4846 else if (!pointer_map_contains (id->decl_map, arg))
4847 {
4848 /* Make an equivalent VAR_DECL. If the argument was used
4849 as temporary variable later in function, the uses will be
4850 replaced by local variable. */
4851 tree var = copy_decl_to_var (arg, id);
4852 get_var_ann (var);
4853 add_referenced_var (var);
4854 insert_decl_map (id, arg, var);
4855 /* Declare this new variable. */
4856 TREE_CHAIN (var) = *vars;
4857 *vars = var;
4858 }
4859 return new_parm;
4860 }
4861
4862 /* Return a copy of the function's static chain. */
4863 static tree
4864 copy_static_chain (tree static_chain, copy_body_data * id)
4865 {
4866 tree *chain_copy, *pvar;
4867
4868 chain_copy = &static_chain;
4869 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4870 {
4871 tree new_tree = remap_decl (*pvar, id);
4872 lang_hooks.dup_lang_specific_decl (new_tree);
4873 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4874 *pvar = new_tree;
4875 }
4876 return static_chain;
4877 }
4878
4879 /* Return true if the function is allowed to be versioned.
4880 This is a guard for the versioning functionality. */
4881
4882 bool
4883 tree_versionable_function_p (tree fndecl)
4884 {
4885 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4886 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4887 }
4888
4889 /* Delete all unreachable basic blocks and update callgraph.
4890 Doing so is somewhat nontrivial because we need to update all clones and
4891 remove inline function that become unreachable. */
4892
4893 static bool
4894 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4895 {
4896 bool changed = false;
4897 basic_block b, next_bb;
4898
4899 find_unreachable_blocks ();
4900
4901 /* Delete all unreachable basic blocks. */
4902
4903 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4904 {
4905 next_bb = b->next_bb;
4906
4907 if (!(b->flags & BB_REACHABLE))
4908 {
4909 gimple_stmt_iterator bsi;
4910
4911 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4912 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4913 {
4914 struct cgraph_edge *e;
4915 struct cgraph_node *node;
4916
4917 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4918 {
4919 if (!e->inline_failed)
4920 cgraph_remove_node_and_inline_clones (e->callee);
4921 else
4922 cgraph_remove_edge (e);
4923 }
4924 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4925 && id->dst_node->clones)
4926 for (node = id->dst_node->clones; node != id->dst_node;)
4927 {
4928 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4929 {
4930 if (!e->inline_failed)
4931 cgraph_remove_node_and_inline_clones (e->callee);
4932 else
4933 cgraph_remove_edge (e);
4934 }
4935
4936 if (node->clones)
4937 node = node->clones;
4938 else if (node->next_sibling_clone)
4939 node = node->next_sibling_clone;
4940 else
4941 {
4942 while (node != id->dst_node && !node->next_sibling_clone)
4943 node = node->clone_of;
4944 if (node != id->dst_node)
4945 node = node->next_sibling_clone;
4946 }
4947 }
4948 }
4949 delete_basic_block (b);
4950 changed = true;
4951 }
4952 }
4953
4954 if (changed)
4955 tidy_fallthru_edges ();
4956 return changed;
4957 }
4958
4959 /* Update clone info after duplication. */
4960
4961 static void
4962 update_clone_info (copy_body_data * id)
4963 {
4964 struct cgraph_node *node;
4965 if (!id->dst_node->clones)
4966 return;
4967 for (node = id->dst_node->clones; node != id->dst_node;)
4968 {
4969 /* First update replace maps to match the new body. */
4970 if (node->clone.tree_map)
4971 {
4972 unsigned int i;
4973 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4974 {
4975 struct ipa_replace_map *replace_info;
4976 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4977 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4978 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4979 }
4980 }
4981 if (node->clones)
4982 node = node->clones;
4983 else if (node->next_sibling_clone)
4984 node = node->next_sibling_clone;
4985 else
4986 {
4987 while (node != id->dst_node && !node->next_sibling_clone)
4988 node = node->clone_of;
4989 if (node != id->dst_node)
4990 node = node->next_sibling_clone;
4991 }
4992 }
4993 }
4994
4995 /* Create a copy of a function's tree.
4996 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4997 of the original function and the new copied function
4998 respectively. In case we want to replace a DECL
4999 tree with another tree while duplicating the function's
5000 body, TREE_MAP represents the mapping between these
5001 trees. If UPDATE_CLONES is set, the call_stmt fields
5002 of edges of clones of the function will be updated.
5003
5004 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5005 from new version.
5006 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5007 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5008 */
5009 void
5010 tree_function_versioning (tree old_decl, tree new_decl,
5011 VEC(ipa_replace_map_p,gc)* tree_map,
5012 bool update_clones, bitmap args_to_skip,
5013 bitmap blocks_to_copy, basic_block new_entry)
5014 {
5015 struct cgraph_node *old_version_node;
5016 struct cgraph_node *new_version_node;
5017 copy_body_data id;
5018 tree p;
5019 unsigned i;
5020 struct ipa_replace_map *replace_info;
5021 basic_block old_entry_block, bb;
5022 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
5023
5024 tree t_step;
5025 tree old_current_function_decl = current_function_decl;
5026 tree vars = NULL_TREE;
5027
5028 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5029 && TREE_CODE (new_decl) == FUNCTION_DECL);
5030 DECL_POSSIBLY_INLINED (old_decl) = 1;
5031
5032 old_version_node = cgraph_node (old_decl);
5033 new_version_node = cgraph_node (new_decl);
5034
5035 /* Output the inlining info for this abstract function, since it has been
5036 inlined. If we don't do this now, we can lose the information about the
5037 variables in the function when the blocks get blown away as soon as we
5038 remove the cgraph node. */
5039 (*debug_hooks->outlining_inline_function) (old_decl);
5040
5041 DECL_ARTIFICIAL (new_decl) = 1;
5042 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5043 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5044
5045 /* Prepare the data structures for the tree copy. */
5046 memset (&id, 0, sizeof (id));
5047
5048 /* Generate a new name for the new version. */
5049 id.statements_to_fold = pointer_set_create ();
5050
5051 id.decl_map = pointer_map_create ();
5052 id.debug_map = NULL;
5053 id.src_fn = old_decl;
5054 id.dst_fn = new_decl;
5055 id.src_node = old_version_node;
5056 id.dst_node = new_version_node;
5057 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5058 if (id.src_node->ipa_transforms_to_apply)
5059 {
5060 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5061 unsigned int i;
5062
5063 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5064 id.src_node->ipa_transforms_to_apply);
5065 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5066 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5067 VEC_index (ipa_opt_pass,
5068 old_transforms_to_apply,
5069 i));
5070 }
5071
5072 id.copy_decl = copy_decl_no_change;
5073 id.transform_call_graph_edges
5074 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5075 id.transform_new_cfg = true;
5076 id.transform_return_to_modify = false;
5077 id.transform_lang_insert_block = NULL;
5078
5079 current_function_decl = new_decl;
5080 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5081 (DECL_STRUCT_FUNCTION (old_decl));
5082 initialize_cfun (new_decl, old_decl,
5083 old_entry_block->count);
5084 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5085 = id.src_cfun->gimple_df->ipa_pta;
5086 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5087
5088 /* Copy the function's static chain. */
5089 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5090 if (p)
5091 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5092 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5093 &id);
5094
5095 /* If there's a tree_map, prepare for substitution. */
5096 if (tree_map)
5097 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5098 {
5099 gimple init;
5100 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5101 if (replace_info->replace_p)
5102 {
5103 tree op = replace_info->new_tree;
5104 if (!replace_info->old_tree)
5105 {
5106 int i = replace_info->parm_num;
5107 tree parm;
5108 for (parm = DECL_ARGUMENTS (old_decl); i; parm = TREE_CHAIN (parm))
5109 i --;
5110 replace_info->old_tree = parm;
5111 }
5112
5113
5114 STRIP_NOPS (op);
5115
5116 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5117 op = TREE_OPERAND (op, 0);
5118
5119 if (TREE_CODE (op) == ADDR_EXPR)
5120 {
5121 op = TREE_OPERAND (op, 0);
5122 while (handled_component_p (op))
5123 op = TREE_OPERAND (op, 0);
5124 if (TREE_CODE (op) == VAR_DECL)
5125 add_referenced_var (op);
5126 }
5127 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5128 init = setup_one_parameter (&id, replace_info->old_tree,
5129 replace_info->new_tree, id.src_fn,
5130 NULL,
5131 &vars);
5132 if (init)
5133 VEC_safe_push (gimple, heap, init_stmts, init);
5134 }
5135 }
5136 /* Copy the function's arguments. */
5137 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5138 DECL_ARGUMENTS (new_decl) =
5139 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5140 args_to_skip, &vars);
5141
5142 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5143
5144 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5145 number_blocks (id.dst_fn);
5146
5147 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5148
5149 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
5150 /* Add local vars. */
5151 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
5152 t_step; t_step = TREE_CHAIN (t_step))
5153 {
5154 tree var = TREE_VALUE (t_step);
5155 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
5156 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
5157 else if (!can_be_nonlocal (var, &id))
5158 cfun->local_decls =
5159 tree_cons (NULL_TREE, remap_decl (var, &id),
5160 cfun->local_decls);
5161 }
5162
5163 /* Copy the Function's body. */
5164 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5165 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5166
5167 if (DECL_RESULT (old_decl) != NULL_TREE)
5168 {
5169 tree *res_decl = &DECL_RESULT (old_decl);
5170 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
5171 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5172 }
5173
5174 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5175 number_blocks (new_decl);
5176
5177 /* We want to create the BB unconditionally, so that the addition of
5178 debug stmts doesn't affect BB count, which may in the end cause
5179 codegen differences. */
5180 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5181 while (VEC_length (gimple, init_stmts))
5182 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5183 update_clone_info (&id);
5184
5185 /* Remap the nonlocal_goto_save_area, if any. */
5186 if (cfun->nonlocal_goto_save_area)
5187 {
5188 struct walk_stmt_info wi;
5189
5190 memset (&wi, 0, sizeof (wi));
5191 wi.info = &id;
5192 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5193 }
5194
5195 /* Clean up. */
5196 pointer_map_destroy (id.decl_map);
5197 if (id.debug_map)
5198 pointer_map_destroy (id.debug_map);
5199 free_dominance_info (CDI_DOMINATORS);
5200 free_dominance_info (CDI_POST_DOMINATORS);
5201
5202 fold_marked_statements (0, id.statements_to_fold);
5203 pointer_set_destroy (id.statements_to_fold);
5204 fold_cond_expr_cond ();
5205 delete_unreachable_blocks_update_callgraph (&id);
5206 if (id.dst_node->analyzed)
5207 cgraph_rebuild_references ();
5208 update_ssa (TODO_update_ssa);
5209 free_dominance_info (CDI_DOMINATORS);
5210 free_dominance_info (CDI_POST_DOMINATORS);
5211
5212 gcc_assert (!id.debug_stmts);
5213 VEC_free (gimple, heap, init_stmts);
5214 pop_cfun ();
5215 current_function_decl = old_current_function_decl;
5216 gcc_assert (!current_function_decl
5217 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5218 return;
5219 }
5220
5221 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5222 the callee and return the inlined body on success. */
5223
5224 tree
5225 maybe_inline_call_in_expr (tree exp)
5226 {
5227 tree fn = get_callee_fndecl (exp);
5228
5229 /* We can only try to inline "const" functions. */
5230 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5231 {
5232 struct pointer_map_t *decl_map = pointer_map_create ();
5233 call_expr_arg_iterator iter;
5234 copy_body_data id;
5235 tree param, arg, t;
5236
5237 /* Remap the parameters. */
5238 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5239 param;
5240 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5241 *pointer_map_insert (decl_map, param) = arg;
5242
5243 memset (&id, 0, sizeof (id));
5244 id.src_fn = fn;
5245 id.dst_fn = current_function_decl;
5246 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5247 id.decl_map = decl_map;
5248
5249 id.copy_decl = copy_decl_no_change;
5250 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5251 id.transform_new_cfg = false;
5252 id.transform_return_to_modify = true;
5253 id.transform_lang_insert_block = false;
5254
5255 /* Make sure not to unshare trees behind the front-end's back
5256 since front-end specific mechanisms may rely on sharing. */
5257 id.regimplify = false;
5258 id.do_not_unshare = true;
5259
5260 /* We're not inside any EH region. */
5261 id.eh_lp_nr = 0;
5262
5263 t = copy_tree_body (&id);
5264 pointer_map_destroy (decl_map);
5265
5266 /* We can only return something suitable for use in a GENERIC
5267 expression tree. */
5268 if (TREE_CODE (t) == MODIFY_EXPR)
5269 return TREE_OPERAND (t, 1);
5270 }
5271
5272 return NULL_TREE;
5273 }
5274
5275 /* Duplicate a type, fields and all. */
5276
5277 tree
5278 build_duplicate_type (tree type)
5279 {
5280 struct copy_body_data id;
5281
5282 memset (&id, 0, sizeof (id));
5283 id.src_fn = current_function_decl;
5284 id.dst_fn = current_function_decl;
5285 id.src_cfun = cfun;
5286 id.decl_map = pointer_map_create ();
5287 id.debug_map = NULL;
5288 id.copy_decl = copy_decl_no_change;
5289
5290 type = remap_type_1 (type, &id);
5291
5292 pointer_map_destroy (id.decl_map);
5293 if (id.debug_map)
5294 pointer_map_destroy (id.debug_map);
5295
5296 TYPE_CANONICAL (type) = type;
5297
5298 return type;
5299 }
5300
5301 /* Return whether it is safe to inline a function because it used different
5302 target specific options or call site actual types mismatch parameter types.
5303 E is the call edge to be checked. */
5304 bool
5305 tree_can_inline_p (struct cgraph_edge *e)
5306 {
5307 #if 0
5308 /* This causes a regression in SPEC in that it prevents a cold function from
5309 inlining a hot function. Perhaps this should only apply to functions
5310 that the user declares hot/cold/optimize explicitly. */
5311
5312 /* Don't inline a function with a higher optimization level than the
5313 caller, or with different space constraints (hot/cold functions). */
5314 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5315 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5316
5317 if (caller_tree != callee_tree)
5318 {
5319 struct cl_optimization *caller_opt
5320 = TREE_OPTIMIZATION ((caller_tree)
5321 ? caller_tree
5322 : optimization_default_node);
5323
5324 struct cl_optimization *callee_opt
5325 = TREE_OPTIMIZATION ((callee_tree)
5326 ? callee_tree
5327 : optimization_default_node);
5328
5329 if ((caller_opt->optimize > callee_opt->optimize)
5330 || (caller_opt->optimize_size != callee_opt->optimize_size))
5331 return false;
5332 }
5333 #endif
5334 tree caller, callee, lhs;
5335
5336 caller = e->caller->decl;
5337 callee = e->callee->decl;
5338
5339 /* First check that inlining isn't simply forbidden in this case. */
5340 if (inline_forbidden_into_p (caller, callee))
5341 {
5342 e->inline_failed = CIF_UNSPECIFIED;
5343 gimple_call_set_cannot_inline (e->call_stmt, true);
5344 return false;
5345 }
5346
5347 /* Allow the backend to decide if inlining is ok. */
5348 if (!targetm.target_option.can_inline_p (caller, callee))
5349 {
5350 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5351 gimple_call_set_cannot_inline (e->call_stmt, true);
5352 e->call_stmt_cannot_inline_p = true;
5353 return false;
5354 }
5355
5356 /* Do not inline calls where we cannot triviall work around mismatches
5357 in argument or return types. */
5358 if (e->call_stmt
5359 && ((DECL_RESULT (callee)
5360 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5361 && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5362 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5363 TREE_TYPE (lhs))
5364 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5365 || !gimple_check_call_args (e->call_stmt)))
5366 {
5367 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5368 gimple_call_set_cannot_inline (e->call_stmt, true);
5369 e->call_stmt_cannot_inline_p = true;
5370 return false;
5371 }
5372
5373 return true;
5374 }