With large parts from Jim Wilson:
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "insn-config.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-iterator.h"
37 #include "cgraph.h"
38 #include "intl.h"
39 #include "tree-mudflap.h"
40 #include "tree-flow.h"
41 #include "function.h"
42 #include "tree-flow.h"
43 #include "tree-pretty-print.h"
44 #include "except.h"
45 #include "debug.h"
46 #include "pointer-set.h"
47 #include "ipa-prop.h"
48 #include "value-prof.h"
49 #include "tree-pass.h"
50 #include "target.h"
51 #include "integrate.h"
52
53 #include "rtl.h" /* FIXME: For asm_str_count. */
54
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "gimple.h"
58
59 /* Inlining, Cloning, Versioning, Parallelization
60
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
67
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
72
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
76
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
80
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
88
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
90
91 /* To Do:
92
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
99
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
102
103
104 /* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
106
107 eni_weights eni_size_weights;
108
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
111
112 eni_weights eni_time_weights;
113
114 /* Prototypes. */
115
116 static tree declare_return_variable (copy_body_data *, tree, tree);
117 static void remap_block (tree *, copy_body_data *);
118 static void copy_bind_expr (tree *, int *, copy_body_data *);
119 static tree mark_local_for_remap_r (tree *, int *, void *);
120 static void unsave_expr_1 (tree);
121 static tree unsave_r (tree *, int *, void *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
130
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
133
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
136 {
137 *pointer_map_insert (id->decl_map, key) = value;
138
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
143 }
144
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
147
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150 {
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
153
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
156
157 if (!target_for_debug_bind (key))
158 return;
159
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
162
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
165
166 *pointer_map_insert (id->debug_map, key) = value;
167 }
168
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
174
175 /* Construct new SSA name for old NAME. ID is the inline context. */
176
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
179 {
180 tree new_tree;
181 tree *n;
182
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
184
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
188
189 if (processing_debug_stmt)
190 {
191 processing_debug_stmt = -1;
192 return name;
193 }
194
195 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
196 in copy_bb. */
197 new_tree = remap_decl (SSA_NAME_VAR (name), id);
198
199 /* We might've substituted constant or another SSA_NAME for
200 the variable.
201
202 Replace the SSA name representing RESULT_DECL by variable during
203 inlining: this saves us from need to introduce PHI node in a case
204 return value is just partly initialized. */
205 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
206 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
207 || !id->transform_return_to_modify))
208 {
209 struct ptr_info_def *pi;
210 new_tree = make_ssa_name (new_tree, NULL);
211 insert_decl_map (id, name, new_tree);
212 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
213 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
214 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
215 /* At least IPA points-to info can be directly transferred. */
216 if (id->src_cfun->gimple_df
217 && id->src_cfun->gimple_df->ipa_pta
218 && (pi = SSA_NAME_PTR_INFO (name))
219 && !pi->pt.anything)
220 {
221 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
222 new_pi->pt = pi->pt;
223 }
224 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
225 {
226 /* By inlining function having uninitialized variable, we might
227 extend the lifetime (variable might get reused). This cause
228 ICE in the case we end up extending lifetime of SSA name across
229 abnormal edge, but also increase register pressure.
230
231 We simply initialize all uninitialized vars by 0 except
232 for case we are inlining to very first BB. We can avoid
233 this for all BBs that are not inside strongly connected
234 regions of the CFG, but this is expensive to test. */
235 if (id->entry_bb
236 && is_gimple_reg (SSA_NAME_VAR (name))
237 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
238 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
239 || EDGE_COUNT (id->entry_bb->preds) != 1))
240 {
241 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
242 gimple init_stmt;
243
244 init_stmt = gimple_build_assign (new_tree,
245 fold_convert (TREE_TYPE (new_tree),
246 integer_zero_node));
247 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
248 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
249 }
250 else
251 {
252 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
253 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
254 == name)
255 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
256 }
257 }
258 }
259 else
260 insert_decl_map (id, name, new_tree);
261 return new_tree;
262 }
263
264 /* Remap DECL during the copying of the BLOCK tree for the function. */
265
266 tree
267 remap_decl (tree decl, copy_body_data *id)
268 {
269 tree *n;
270
271 /* We only remap local variables in the current function. */
272
273 /* See if we have remapped this declaration. */
274
275 n = (tree *) pointer_map_contains (id->decl_map, decl);
276
277 if (!n && processing_debug_stmt)
278 {
279 processing_debug_stmt = -1;
280 return decl;
281 }
282
283 /* If we didn't already have an equivalent for this declaration,
284 create one now. */
285 if (!n)
286 {
287 /* Make a copy of the variable or label. */
288 tree t = id->copy_decl (decl, id);
289
290 /* Remember it, so that if we encounter this local entity again
291 we can reuse this copy. Do this early because remap_type may
292 need this decl for TYPE_STUB_DECL. */
293 insert_decl_map (id, decl, t);
294
295 if (!DECL_P (t))
296 return t;
297
298 /* Remap types, if necessary. */
299 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
300 if (TREE_CODE (t) == TYPE_DECL)
301 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
302
303 /* Remap sizes as necessary. */
304 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
305 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
306
307 /* If fields, do likewise for offset and qualifier. */
308 if (TREE_CODE (t) == FIELD_DECL)
309 {
310 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
311 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
312 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
313 }
314
315 if (cfun && gimple_in_ssa_p (cfun)
316 && (TREE_CODE (t) == VAR_DECL
317 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
318 {
319 get_var_ann (t);
320 add_referenced_var (t);
321 }
322 return t;
323 }
324
325 if (id->do_not_unshare)
326 return *n;
327 else
328 return unshare_expr (*n);
329 }
330
331 static tree
332 remap_type_1 (tree type, copy_body_data *id)
333 {
334 tree new_tree, t;
335
336 /* We do need a copy. build and register it now. If this is a pointer or
337 reference type, remap the designated type and make a new pointer or
338 reference type. */
339 if (TREE_CODE (type) == POINTER_TYPE)
340 {
341 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
342 TYPE_MODE (type),
343 TYPE_REF_CAN_ALIAS_ALL (type));
344 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
345 new_tree = build_type_attribute_qual_variant (new_tree,
346 TYPE_ATTRIBUTES (type),
347 TYPE_QUALS (type));
348 insert_decl_map (id, type, new_tree);
349 return new_tree;
350 }
351 else if (TREE_CODE (type) == REFERENCE_TYPE)
352 {
353 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
354 TYPE_MODE (type),
355 TYPE_REF_CAN_ALIAS_ALL (type));
356 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
357 new_tree = build_type_attribute_qual_variant (new_tree,
358 TYPE_ATTRIBUTES (type),
359 TYPE_QUALS (type));
360 insert_decl_map (id, type, new_tree);
361 return new_tree;
362 }
363 else
364 new_tree = copy_node (type);
365
366 insert_decl_map (id, type, new_tree);
367
368 /* This is a new type, not a copy of an old type. Need to reassociate
369 variants. We can handle everything except the main variant lazily. */
370 t = TYPE_MAIN_VARIANT (type);
371 if (type != t)
372 {
373 t = remap_type (t, id);
374 TYPE_MAIN_VARIANT (new_tree) = t;
375 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
376 TYPE_NEXT_VARIANT (t) = new_tree;
377 }
378 else
379 {
380 TYPE_MAIN_VARIANT (new_tree) = new_tree;
381 TYPE_NEXT_VARIANT (new_tree) = NULL;
382 }
383
384 if (TYPE_STUB_DECL (type))
385 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
386
387 /* Lazily create pointer and reference types. */
388 TYPE_POINTER_TO (new_tree) = NULL;
389 TYPE_REFERENCE_TO (new_tree) = NULL;
390
391 switch (TREE_CODE (new_tree))
392 {
393 case INTEGER_TYPE:
394 case REAL_TYPE:
395 case FIXED_POINT_TYPE:
396 case ENUMERAL_TYPE:
397 case BOOLEAN_TYPE:
398 t = TYPE_MIN_VALUE (new_tree);
399 if (t && TREE_CODE (t) != INTEGER_CST)
400 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
401
402 t = TYPE_MAX_VALUE (new_tree);
403 if (t && TREE_CODE (t) != INTEGER_CST)
404 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
405 return new_tree;
406
407 case FUNCTION_TYPE:
408 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
410 return new_tree;
411
412 case ARRAY_TYPE:
413 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
414 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
415 break;
416
417 case RECORD_TYPE:
418 case UNION_TYPE:
419 case QUAL_UNION_TYPE:
420 {
421 tree f, nf = NULL;
422
423 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
424 {
425 t = remap_decl (f, id);
426 DECL_CONTEXT (t) = new_tree;
427 TREE_CHAIN (t) = nf;
428 nf = t;
429 }
430 TYPE_FIELDS (new_tree) = nreverse (nf);
431 }
432 break;
433
434 case OFFSET_TYPE:
435 default:
436 /* Shouldn't have been thought variable sized. */
437 gcc_unreachable ();
438 }
439
440 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
441 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
442
443 return new_tree;
444 }
445
446 tree
447 remap_type (tree type, copy_body_data *id)
448 {
449 tree *node;
450 tree tmp;
451
452 if (type == NULL)
453 return type;
454
455 /* See if we have remapped this type. */
456 node = (tree *) pointer_map_contains (id->decl_map, type);
457 if (node)
458 return *node;
459
460 /* The type only needs remapping if it's variably modified. */
461 if (! variably_modified_type_p (type, id->src_fn))
462 {
463 insert_decl_map (id, type, type);
464 return type;
465 }
466
467 id->remapping_type_depth++;
468 tmp = remap_type_1 (type, id);
469 id->remapping_type_depth--;
470
471 return tmp;
472 }
473
474 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
475 is NULL or TYPE has not been remapped before. */
476
477 static tree
478 remapped_type (tree type, copy_body_data *id)
479 {
480 tree *node;
481
482 if (type == NULL)
483 return type;
484
485 /* See if we have remapped this type. */
486 node = (tree *) pointer_map_contains (id->decl_map, type);
487 if (node)
488 return *node;
489 else
490 return NULL;
491 }
492
493 /* The type only needs remapping if it's variably modified. */
494 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
495
496 static bool
497 can_be_nonlocal (tree decl, copy_body_data *id)
498 {
499 /* We can not duplicate function decls. */
500 if (TREE_CODE (decl) == FUNCTION_DECL)
501 return true;
502
503 /* Local static vars must be non-local or we get multiple declaration
504 problems. */
505 if (TREE_CODE (decl) == VAR_DECL
506 && !auto_var_in_fn_p (decl, id->src_fn))
507 return true;
508
509 /* At the moment dwarf2out can handle only these types of nodes. We
510 can support more later. */
511 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
512 return false;
513
514 /* We must use global type. We call remapped_type instead of
515 remap_type since we don't want to remap this type here if it
516 hasn't been remapped before. */
517 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
518 return false;
519
520 /* Wihtout SSA we can't tell if variable is used. */
521 if (!gimple_in_ssa_p (cfun))
522 return false;
523
524 /* Live variables must be copied so we can attach DECL_RTL. */
525 if (var_ann (decl))
526 return false;
527
528 return true;
529 }
530
531 static tree
532 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
533 {
534 tree old_var;
535 tree new_decls = NULL_TREE;
536
537 /* Remap its variables. */
538 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
539 {
540 tree new_var;
541
542 if (can_be_nonlocal (old_var, id))
543 {
544 if (TREE_CODE (old_var) == VAR_DECL
545 && ! DECL_EXTERNAL (old_var)
546 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
547 cfun->local_decls = tree_cons (NULL_TREE, old_var,
548 cfun->local_decls);
549 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
550 && !DECL_IGNORED_P (old_var)
551 && nonlocalized_list)
552 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
553 continue;
554 }
555
556 /* Remap the variable. */
557 new_var = remap_decl (old_var, id);
558
559 /* If we didn't remap this variable, we can't mess with its
560 TREE_CHAIN. If we remapped this variable to the return slot, it's
561 already declared somewhere else, so don't declare it here. */
562
563 if (new_var == id->retvar)
564 ;
565 else if (!new_var)
566 {
567 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
568 && !DECL_IGNORED_P (old_var)
569 && nonlocalized_list)
570 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
571 }
572 else
573 {
574 gcc_assert (DECL_P (new_var));
575 TREE_CHAIN (new_var) = new_decls;
576 new_decls = new_var;
577
578 /* Also copy value-expressions. */
579 if (TREE_CODE (new_var) == VAR_DECL
580 && DECL_HAS_VALUE_EXPR_P (new_var))
581 {
582 tree tem = DECL_VALUE_EXPR (new_var);
583 bool old_regimplify = id->regimplify;
584 id->remapping_type_depth++;
585 walk_tree (&tem, copy_tree_body_r, id, NULL);
586 id->remapping_type_depth--;
587 id->regimplify = old_regimplify;
588 SET_DECL_VALUE_EXPR (new_var, tem);
589 }
590 }
591 }
592
593 return nreverse (new_decls);
594 }
595
596 /* Copy the BLOCK to contain remapped versions of the variables
597 therein. And hook the new block into the block-tree. */
598
599 static void
600 remap_block (tree *block, copy_body_data *id)
601 {
602 tree old_block;
603 tree new_block;
604
605 /* Make the new block. */
606 old_block = *block;
607 new_block = make_node (BLOCK);
608 TREE_USED (new_block) = TREE_USED (old_block);
609 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
610 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
611 BLOCK_NONLOCALIZED_VARS (new_block)
612 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
613 *block = new_block;
614
615 /* Remap its variables. */
616 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
617 &BLOCK_NONLOCALIZED_VARS (new_block),
618 id);
619
620 if (id->transform_lang_insert_block)
621 id->transform_lang_insert_block (new_block);
622
623 /* Remember the remapped block. */
624 insert_decl_map (id, old_block, new_block);
625 }
626
627 /* Copy the whole block tree and root it in id->block. */
628 static tree
629 remap_blocks (tree block, copy_body_data *id)
630 {
631 tree t;
632 tree new_tree = block;
633
634 if (!block)
635 return NULL;
636
637 remap_block (&new_tree, id);
638 gcc_assert (new_tree != block);
639 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
640 prepend_lexical_block (new_tree, remap_blocks (t, id));
641 /* Blocks are in arbitrary order, but make things slightly prettier and do
642 not swap order when producing a copy. */
643 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
644 return new_tree;
645 }
646
647 static void
648 copy_statement_list (tree *tp)
649 {
650 tree_stmt_iterator oi, ni;
651 tree new_tree;
652
653 new_tree = alloc_stmt_list ();
654 ni = tsi_start (new_tree);
655 oi = tsi_start (*tp);
656 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
657 *tp = new_tree;
658
659 for (; !tsi_end_p (oi); tsi_next (&oi))
660 {
661 tree stmt = tsi_stmt (oi);
662 if (TREE_CODE (stmt) == STATEMENT_LIST)
663 copy_statement_list (&stmt);
664 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
665 }
666 }
667
668 static void
669 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
670 {
671 tree block = BIND_EXPR_BLOCK (*tp);
672 /* Copy (and replace) the statement. */
673 copy_tree_r (tp, walk_subtrees, NULL);
674 if (block)
675 {
676 remap_block (&block, id);
677 BIND_EXPR_BLOCK (*tp) = block;
678 }
679
680 if (BIND_EXPR_VARS (*tp))
681 /* This will remap a lot of the same decls again, but this should be
682 harmless. */
683 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
684 }
685
686
687 /* Create a new gimple_seq by remapping all the statements in BODY
688 using the inlining information in ID. */
689
690 static gimple_seq
691 remap_gimple_seq (gimple_seq body, copy_body_data *id)
692 {
693 gimple_stmt_iterator si;
694 gimple_seq new_body = NULL;
695
696 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
697 {
698 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
699 gimple_seq_add_stmt (&new_body, new_stmt);
700 }
701
702 return new_body;
703 }
704
705
706 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
707 block using the mapping information in ID. */
708
709 static gimple
710 copy_gimple_bind (gimple stmt, copy_body_data *id)
711 {
712 gimple new_bind;
713 tree new_block, new_vars;
714 gimple_seq body, new_body;
715
716 /* Copy the statement. Note that we purposely don't use copy_stmt
717 here because we need to remap statements as we copy. */
718 body = gimple_bind_body (stmt);
719 new_body = remap_gimple_seq (body, id);
720
721 new_block = gimple_bind_block (stmt);
722 if (new_block)
723 remap_block (&new_block, id);
724
725 /* This will remap a lot of the same decls again, but this should be
726 harmless. */
727 new_vars = gimple_bind_vars (stmt);
728 if (new_vars)
729 new_vars = remap_decls (new_vars, NULL, id);
730
731 new_bind = gimple_build_bind (new_vars, new_body, new_block);
732
733 return new_bind;
734 }
735
736
737 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
738 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
739 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
740 recursing into the children nodes of *TP. */
741
742 static tree
743 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
744 {
745 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
746 copy_body_data *id = (copy_body_data *) wi_p->info;
747 tree fn = id->src_fn;
748
749 if (TREE_CODE (*tp) == SSA_NAME)
750 {
751 *tp = remap_ssa_name (*tp, id);
752 *walk_subtrees = 0;
753 return NULL;
754 }
755 else if (auto_var_in_fn_p (*tp, fn))
756 {
757 /* Local variables and labels need to be replaced by equivalent
758 variables. We don't want to copy static variables; there's
759 only one of those, no matter how many times we inline the
760 containing function. Similarly for globals from an outer
761 function. */
762 tree new_decl;
763
764 /* Remap the declaration. */
765 new_decl = remap_decl (*tp, id);
766 gcc_assert (new_decl);
767 /* Replace this variable with the copy. */
768 STRIP_TYPE_NOPS (new_decl);
769 /* ??? The C++ frontend uses void * pointer zero to initialize
770 any other type. This confuses the middle-end type verification.
771 As cloned bodies do not go through gimplification again the fixup
772 there doesn't trigger. */
773 if (TREE_CODE (new_decl) == INTEGER_CST
774 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
775 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
776 *tp = new_decl;
777 *walk_subtrees = 0;
778 }
779 else if (TREE_CODE (*tp) == STATEMENT_LIST)
780 gcc_unreachable ();
781 else if (TREE_CODE (*tp) == SAVE_EXPR)
782 gcc_unreachable ();
783 else if (TREE_CODE (*tp) == LABEL_DECL
784 && (!DECL_CONTEXT (*tp)
785 || decl_function_context (*tp) == id->src_fn))
786 /* These may need to be remapped for EH handling. */
787 *tp = remap_decl (*tp, id);
788 else if (TYPE_P (*tp))
789 /* Types may need remapping as well. */
790 *tp = remap_type (*tp, id);
791 else if (CONSTANT_CLASS_P (*tp))
792 {
793 /* If this is a constant, we have to copy the node iff the type
794 will be remapped. copy_tree_r will not copy a constant. */
795 tree new_type = remap_type (TREE_TYPE (*tp), id);
796
797 if (new_type == TREE_TYPE (*tp))
798 *walk_subtrees = 0;
799
800 else if (TREE_CODE (*tp) == INTEGER_CST)
801 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
802 TREE_INT_CST_HIGH (*tp));
803 else
804 {
805 *tp = copy_node (*tp);
806 TREE_TYPE (*tp) = new_type;
807 }
808 }
809 else
810 {
811 /* Otherwise, just copy the node. Note that copy_tree_r already
812 knows not to copy VAR_DECLs, etc., so this is safe. */
813 if (TREE_CODE (*tp) == INDIRECT_REF)
814 {
815 /* Get rid of *& from inline substitutions that can happen when a
816 pointer argument is an ADDR_EXPR. */
817 tree decl = TREE_OPERAND (*tp, 0);
818 tree *n;
819
820 n = (tree *) pointer_map_contains (id->decl_map, decl);
821 if (n)
822 {
823 tree type, new_tree, old;
824
825 /* If we happen to get an ADDR_EXPR in n->value, strip
826 it manually here as we'll eventually get ADDR_EXPRs
827 which lie about their types pointed to. In this case
828 build_fold_indirect_ref wouldn't strip the
829 INDIRECT_REF, but we absolutely rely on that. As
830 fold_indirect_ref does other useful transformations,
831 try that first, though. */
832 type = TREE_TYPE (TREE_TYPE (*n));
833 new_tree = unshare_expr (*n);
834 old = *tp;
835 *tp = gimple_fold_indirect_ref (new_tree);
836 if (!*tp)
837 {
838 if (TREE_CODE (new_tree) == ADDR_EXPR)
839 {
840 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
841 type, new_tree);
842 /* ??? We should either assert here or build
843 a VIEW_CONVERT_EXPR instead of blindly leaking
844 incompatible types to our IL. */
845 if (! *tp)
846 *tp = TREE_OPERAND (new_tree, 0);
847 }
848 else
849 {
850 *tp = build1 (INDIRECT_REF, type, new_tree);
851 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
852 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
853 }
854 }
855 *walk_subtrees = 0;
856 return NULL;
857 }
858 }
859
860 /* Here is the "usual case". Copy this tree node, and then
861 tweak some special cases. */
862 copy_tree_r (tp, walk_subtrees, NULL);
863
864 /* Global variables we haven't seen yet need to go into referenced
865 vars. If not referenced from types only. */
866 if (gimple_in_ssa_p (cfun)
867 && TREE_CODE (*tp) == VAR_DECL
868 && id->remapping_type_depth == 0
869 && !processing_debug_stmt)
870 add_referenced_var (*tp);
871
872 /* We should never have TREE_BLOCK set on non-statements. */
873 if (EXPR_P (*tp))
874 gcc_assert (!TREE_BLOCK (*tp));
875
876 if (TREE_CODE (*tp) != OMP_CLAUSE)
877 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
878
879 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
880 {
881 /* The copied TARGET_EXPR has never been expanded, even if the
882 original node was expanded already. */
883 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
884 TREE_OPERAND (*tp, 3) = NULL_TREE;
885 }
886 else if (TREE_CODE (*tp) == ADDR_EXPR)
887 {
888 /* Variable substitution need not be simple. In particular,
889 the INDIRECT_REF substitution above. Make sure that
890 TREE_CONSTANT and friends are up-to-date. But make sure
891 to not improperly set TREE_BLOCK on some sub-expressions. */
892 int invariant = is_gimple_min_invariant (*tp);
893 tree block = id->block;
894 id->block = NULL_TREE;
895 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
896 id->block = block;
897
898 /* Handle the case where we substituted an INDIRECT_REF
899 into the operand of the ADDR_EXPR. */
900 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
901 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
902 else
903 recompute_tree_invariant_for_addr_expr (*tp);
904
905 /* If this used to be invariant, but is not any longer,
906 then regimplification is probably needed. */
907 if (invariant && !is_gimple_min_invariant (*tp))
908 id->regimplify = true;
909
910 *walk_subtrees = 0;
911 }
912 }
913
914 /* Keep iterating. */
915 return NULL_TREE;
916 }
917
918
919 /* Called from copy_body_id via walk_tree. DATA is really a
920 `copy_body_data *'. */
921
922 tree
923 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
924 {
925 copy_body_data *id = (copy_body_data *) data;
926 tree fn = id->src_fn;
927 tree new_block;
928
929 /* Begin by recognizing trees that we'll completely rewrite for the
930 inlining context. Our output for these trees is completely
931 different from out input (e.g. RETURN_EXPR is deleted, and morphs
932 into an edge). Further down, we'll handle trees that get
933 duplicated and/or tweaked. */
934
935 /* When requested, RETURN_EXPRs should be transformed to just the
936 contained MODIFY_EXPR. The branch semantics of the return will
937 be handled elsewhere by manipulating the CFG rather than a statement. */
938 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
939 {
940 tree assignment = TREE_OPERAND (*tp, 0);
941
942 /* If we're returning something, just turn that into an
943 assignment into the equivalent of the original RESULT_DECL.
944 If the "assignment" is just the result decl, the result
945 decl has already been set (e.g. a recent "foo (&result_decl,
946 ...)"); just toss the entire RETURN_EXPR. */
947 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
948 {
949 /* Replace the RETURN_EXPR with (a copy of) the
950 MODIFY_EXPR hanging underneath. */
951 *tp = copy_node (assignment);
952 }
953 else /* Else the RETURN_EXPR returns no value. */
954 {
955 *tp = NULL;
956 return (tree) (void *)1;
957 }
958 }
959 else if (TREE_CODE (*tp) == SSA_NAME)
960 {
961 *tp = remap_ssa_name (*tp, id);
962 *walk_subtrees = 0;
963 return NULL;
964 }
965
966 /* Local variables and labels need to be replaced by equivalent
967 variables. We don't want to copy static variables; there's only
968 one of those, no matter how many times we inline the containing
969 function. Similarly for globals from an outer function. */
970 else if (auto_var_in_fn_p (*tp, fn))
971 {
972 tree new_decl;
973
974 /* Remap the declaration. */
975 new_decl = remap_decl (*tp, id);
976 gcc_assert (new_decl);
977 /* Replace this variable with the copy. */
978 STRIP_TYPE_NOPS (new_decl);
979 *tp = new_decl;
980 *walk_subtrees = 0;
981 }
982 else if (TREE_CODE (*tp) == STATEMENT_LIST)
983 copy_statement_list (tp);
984 else if (TREE_CODE (*tp) == SAVE_EXPR
985 || TREE_CODE (*tp) == TARGET_EXPR)
986 remap_save_expr (tp, id->decl_map, walk_subtrees);
987 else if (TREE_CODE (*tp) == LABEL_DECL
988 && (! DECL_CONTEXT (*tp)
989 || decl_function_context (*tp) == id->src_fn))
990 /* These may need to be remapped for EH handling. */
991 *tp = remap_decl (*tp, id);
992 else if (TREE_CODE (*tp) == BIND_EXPR)
993 copy_bind_expr (tp, walk_subtrees, id);
994 /* Types may need remapping as well. */
995 else if (TYPE_P (*tp))
996 *tp = remap_type (*tp, id);
997
998 /* If this is a constant, we have to copy the node iff the type will be
999 remapped. copy_tree_r will not copy a constant. */
1000 else if (CONSTANT_CLASS_P (*tp))
1001 {
1002 tree new_type = remap_type (TREE_TYPE (*tp), id);
1003
1004 if (new_type == TREE_TYPE (*tp))
1005 *walk_subtrees = 0;
1006
1007 else if (TREE_CODE (*tp) == INTEGER_CST)
1008 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1009 TREE_INT_CST_HIGH (*tp));
1010 else
1011 {
1012 *tp = copy_node (*tp);
1013 TREE_TYPE (*tp) = new_type;
1014 }
1015 }
1016
1017 /* Otherwise, just copy the node. Note that copy_tree_r already
1018 knows not to copy VAR_DECLs, etc., so this is safe. */
1019 else
1020 {
1021 /* Here we handle trees that are not completely rewritten.
1022 First we detect some inlining-induced bogosities for
1023 discarding. */
1024 if (TREE_CODE (*tp) == MODIFY_EXPR
1025 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1026 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1027 {
1028 /* Some assignments VAR = VAR; don't generate any rtl code
1029 and thus don't count as variable modification. Avoid
1030 keeping bogosities like 0 = 0. */
1031 tree decl = TREE_OPERAND (*tp, 0), value;
1032 tree *n;
1033
1034 n = (tree *) pointer_map_contains (id->decl_map, decl);
1035 if (n)
1036 {
1037 value = *n;
1038 STRIP_TYPE_NOPS (value);
1039 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1040 {
1041 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1042 return copy_tree_body_r (tp, walk_subtrees, data);
1043 }
1044 }
1045 }
1046 else if (TREE_CODE (*tp) == INDIRECT_REF)
1047 {
1048 /* Get rid of *& from inline substitutions that can happen when a
1049 pointer argument is an ADDR_EXPR. */
1050 tree decl = TREE_OPERAND (*tp, 0);
1051 tree *n;
1052
1053 n = (tree *) pointer_map_contains (id->decl_map, decl);
1054 if (n)
1055 {
1056 tree new_tree;
1057 tree old;
1058 /* If we happen to get an ADDR_EXPR in n->value, strip
1059 it manually here as we'll eventually get ADDR_EXPRs
1060 which lie about their types pointed to. In this case
1061 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1062 but we absolutely rely on that. As fold_indirect_ref
1063 does other useful transformations, try that first, though. */
1064 tree type = TREE_TYPE (TREE_TYPE (*n));
1065 if (id->do_not_unshare)
1066 new_tree = *n;
1067 else
1068 new_tree = unshare_expr (*n);
1069 old = *tp;
1070 *tp = gimple_fold_indirect_ref (new_tree);
1071 if (! *tp)
1072 {
1073 if (TREE_CODE (new_tree) == ADDR_EXPR)
1074 {
1075 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1076 type, new_tree);
1077 /* ??? We should either assert here or build
1078 a VIEW_CONVERT_EXPR instead of blindly leaking
1079 incompatible types to our IL. */
1080 if (! *tp)
1081 *tp = TREE_OPERAND (new_tree, 0);
1082 }
1083 else
1084 {
1085 *tp = build1 (INDIRECT_REF, type, new_tree);
1086 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1087 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1088 }
1089 }
1090 *walk_subtrees = 0;
1091 return NULL;
1092 }
1093 }
1094
1095 /* Here is the "usual case". Copy this tree node, and then
1096 tweak some special cases. */
1097 copy_tree_r (tp, walk_subtrees, NULL);
1098
1099 /* Global variables we haven't seen yet needs to go into referenced
1100 vars. If not referenced from types or debug stmts only. */
1101 if (gimple_in_ssa_p (cfun)
1102 && TREE_CODE (*tp) == VAR_DECL
1103 && id->remapping_type_depth == 0
1104 && !processing_debug_stmt)
1105 add_referenced_var (*tp);
1106
1107 /* If EXPR has block defined, map it to newly constructed block.
1108 When inlining we want EXPRs without block appear in the block
1109 of function call if we are not remapping a type. */
1110 if (EXPR_P (*tp))
1111 {
1112 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1113 if (TREE_BLOCK (*tp))
1114 {
1115 tree *n;
1116 n = (tree *) pointer_map_contains (id->decl_map,
1117 TREE_BLOCK (*tp));
1118 gcc_assert (n || id->remapping_type_depth != 0);
1119 if (n)
1120 new_block = *n;
1121 }
1122 TREE_BLOCK (*tp) = new_block;
1123 }
1124
1125 if (TREE_CODE (*tp) != OMP_CLAUSE)
1126 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1127
1128 /* The copied TARGET_EXPR has never been expanded, even if the
1129 original node was expanded already. */
1130 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1131 {
1132 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1133 TREE_OPERAND (*tp, 3) = NULL_TREE;
1134 }
1135
1136 /* Variable substitution need not be simple. In particular, the
1137 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1138 and friends are up-to-date. */
1139 else if (TREE_CODE (*tp) == ADDR_EXPR)
1140 {
1141 int invariant = is_gimple_min_invariant (*tp);
1142 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1143
1144 /* Handle the case where we substituted an INDIRECT_REF
1145 into the operand of the ADDR_EXPR. */
1146 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1147 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1148 else
1149 recompute_tree_invariant_for_addr_expr (*tp);
1150
1151 /* If this used to be invariant, but is not any longer,
1152 then regimplification is probably needed. */
1153 if (invariant && !is_gimple_min_invariant (*tp))
1154 id->regimplify = true;
1155
1156 *walk_subtrees = 0;
1157 }
1158 }
1159
1160 /* Keep iterating. */
1161 return NULL_TREE;
1162 }
1163
1164 /* Helper for remap_gimple_stmt. Given an EH region number for the
1165 source function, map that to the duplicate EH region number in
1166 the destination function. */
1167
1168 static int
1169 remap_eh_region_nr (int old_nr, copy_body_data *id)
1170 {
1171 eh_region old_r, new_r;
1172 void **slot;
1173
1174 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1175 slot = pointer_map_contains (id->eh_map, old_r);
1176 new_r = (eh_region) *slot;
1177
1178 return new_r->index;
1179 }
1180
1181 /* Similar, but operate on INTEGER_CSTs. */
1182
1183 static tree
1184 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1185 {
1186 int old_nr, new_nr;
1187
1188 old_nr = tree_low_cst (old_t_nr, 0);
1189 new_nr = remap_eh_region_nr (old_nr, id);
1190
1191 return build_int_cst (NULL, new_nr);
1192 }
1193
1194 /* Helper for copy_bb. Remap statement STMT using the inlining
1195 information in ID. Return the new statement copy. */
1196
1197 static gimple
1198 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1199 {
1200 gimple copy = NULL;
1201 struct walk_stmt_info wi;
1202 tree new_block;
1203 bool skip_first = false;
1204
1205 /* Begin by recognizing trees that we'll completely rewrite for the
1206 inlining context. Our output for these trees is completely
1207 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1208 into an edge). Further down, we'll handle trees that get
1209 duplicated and/or tweaked. */
1210
1211 /* When requested, GIMPLE_RETURNs should be transformed to just the
1212 contained GIMPLE_ASSIGN. The branch semantics of the return will
1213 be handled elsewhere by manipulating the CFG rather than the
1214 statement. */
1215 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1216 {
1217 tree retval = gimple_return_retval (stmt);
1218
1219 /* If we're returning something, just turn that into an
1220 assignment into the equivalent of the original RESULT_DECL.
1221 If RETVAL is just the result decl, the result decl has
1222 already been set (e.g. a recent "foo (&result_decl, ...)");
1223 just toss the entire GIMPLE_RETURN. */
1224 if (retval && TREE_CODE (retval) != RESULT_DECL)
1225 {
1226 copy = gimple_build_assign (id->retvar, retval);
1227 /* id->retvar is already substituted. Skip it on later remapping. */
1228 skip_first = true;
1229 }
1230 else
1231 return gimple_build_nop ();
1232 }
1233 else if (gimple_has_substatements (stmt))
1234 {
1235 gimple_seq s1, s2;
1236
1237 /* When cloning bodies from the C++ front end, we will be handed bodies
1238 in High GIMPLE form. Handle here all the High GIMPLE statements that
1239 have embedded statements. */
1240 switch (gimple_code (stmt))
1241 {
1242 case GIMPLE_BIND:
1243 copy = copy_gimple_bind (stmt, id);
1244 break;
1245
1246 case GIMPLE_CATCH:
1247 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1248 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1249 break;
1250
1251 case GIMPLE_EH_FILTER:
1252 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1253 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1254 break;
1255
1256 case GIMPLE_TRY:
1257 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1258 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1259 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1260 break;
1261
1262 case GIMPLE_WITH_CLEANUP_EXPR:
1263 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1264 copy = gimple_build_wce (s1);
1265 break;
1266
1267 case GIMPLE_OMP_PARALLEL:
1268 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1269 copy = gimple_build_omp_parallel
1270 (s1,
1271 gimple_omp_parallel_clauses (stmt),
1272 gimple_omp_parallel_child_fn (stmt),
1273 gimple_omp_parallel_data_arg (stmt));
1274 break;
1275
1276 case GIMPLE_OMP_TASK:
1277 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1278 copy = gimple_build_omp_task
1279 (s1,
1280 gimple_omp_task_clauses (stmt),
1281 gimple_omp_task_child_fn (stmt),
1282 gimple_omp_task_data_arg (stmt),
1283 gimple_omp_task_copy_fn (stmt),
1284 gimple_omp_task_arg_size (stmt),
1285 gimple_omp_task_arg_align (stmt));
1286 break;
1287
1288 case GIMPLE_OMP_FOR:
1289 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1290 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1291 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1292 gimple_omp_for_collapse (stmt), s2);
1293 {
1294 size_t i;
1295 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1296 {
1297 gimple_omp_for_set_index (copy, i,
1298 gimple_omp_for_index (stmt, i));
1299 gimple_omp_for_set_initial (copy, i,
1300 gimple_omp_for_initial (stmt, i));
1301 gimple_omp_for_set_final (copy, i,
1302 gimple_omp_for_final (stmt, i));
1303 gimple_omp_for_set_incr (copy, i,
1304 gimple_omp_for_incr (stmt, i));
1305 gimple_omp_for_set_cond (copy, i,
1306 gimple_omp_for_cond (stmt, i));
1307 }
1308 }
1309 break;
1310
1311 case GIMPLE_OMP_MASTER:
1312 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1313 copy = gimple_build_omp_master (s1);
1314 break;
1315
1316 case GIMPLE_OMP_ORDERED:
1317 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1318 copy = gimple_build_omp_ordered (s1);
1319 break;
1320
1321 case GIMPLE_OMP_SECTION:
1322 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1323 copy = gimple_build_omp_section (s1);
1324 break;
1325
1326 case GIMPLE_OMP_SECTIONS:
1327 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1328 copy = gimple_build_omp_sections
1329 (s1, gimple_omp_sections_clauses (stmt));
1330 break;
1331
1332 case GIMPLE_OMP_SINGLE:
1333 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1334 copy = gimple_build_omp_single
1335 (s1, gimple_omp_single_clauses (stmt));
1336 break;
1337
1338 case GIMPLE_OMP_CRITICAL:
1339 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1340 copy
1341 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1342 break;
1343
1344 default:
1345 gcc_unreachable ();
1346 }
1347 }
1348 else
1349 {
1350 if (gimple_assign_copy_p (stmt)
1351 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1352 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1353 {
1354 /* Here we handle statements that are not completely rewritten.
1355 First we detect some inlining-induced bogosities for
1356 discarding. */
1357
1358 /* Some assignments VAR = VAR; don't generate any rtl code
1359 and thus don't count as variable modification. Avoid
1360 keeping bogosities like 0 = 0. */
1361 tree decl = gimple_assign_lhs (stmt), value;
1362 tree *n;
1363
1364 n = (tree *) pointer_map_contains (id->decl_map, decl);
1365 if (n)
1366 {
1367 value = *n;
1368 STRIP_TYPE_NOPS (value);
1369 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1370 return gimple_build_nop ();
1371 }
1372 }
1373
1374 if (gimple_debug_bind_p (stmt))
1375 {
1376 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1377 gimple_debug_bind_get_value (stmt),
1378 stmt);
1379 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1380 return copy;
1381 }
1382
1383 /* Create a new deep copy of the statement. */
1384 copy = gimple_copy (stmt);
1385
1386 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1387 RESX and EH_DISPATCH. */
1388 if (id->eh_map)
1389 switch (gimple_code (copy))
1390 {
1391 case GIMPLE_CALL:
1392 {
1393 tree r, fndecl = gimple_call_fndecl (copy);
1394 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1395 switch (DECL_FUNCTION_CODE (fndecl))
1396 {
1397 case BUILT_IN_EH_COPY_VALUES:
1398 r = gimple_call_arg (copy, 1);
1399 r = remap_eh_region_tree_nr (r, id);
1400 gimple_call_set_arg (copy, 1, r);
1401 /* FALLTHRU */
1402
1403 case BUILT_IN_EH_POINTER:
1404 case BUILT_IN_EH_FILTER:
1405 r = gimple_call_arg (copy, 0);
1406 r = remap_eh_region_tree_nr (r, id);
1407 gimple_call_set_arg (copy, 0, r);
1408 break;
1409
1410 default:
1411 break;
1412 }
1413
1414 /* Reset alias info if we didn't apply measures to
1415 keep it valid over inlining by setting DECL_PT_UID. */
1416 if (!id->src_cfun->gimple_df
1417 || !id->src_cfun->gimple_df->ipa_pta)
1418 gimple_call_reset_alias_info (copy);
1419 }
1420 break;
1421
1422 case GIMPLE_RESX:
1423 {
1424 int r = gimple_resx_region (copy);
1425 r = remap_eh_region_nr (r, id);
1426 gimple_resx_set_region (copy, r);
1427 }
1428 break;
1429
1430 case GIMPLE_EH_DISPATCH:
1431 {
1432 int r = gimple_eh_dispatch_region (copy);
1433 r = remap_eh_region_nr (r, id);
1434 gimple_eh_dispatch_set_region (copy, r);
1435 }
1436 break;
1437
1438 default:
1439 break;
1440 }
1441 }
1442
1443 /* If STMT has a block defined, map it to the newly constructed
1444 block. When inlining we want statements without a block to
1445 appear in the block of the function call. */
1446 new_block = id->block;
1447 if (gimple_block (copy))
1448 {
1449 tree *n;
1450 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1451 gcc_assert (n);
1452 new_block = *n;
1453 }
1454
1455 gimple_set_block (copy, new_block);
1456
1457 if (gimple_debug_bind_p (copy))
1458 return copy;
1459
1460 /* Remap all the operands in COPY. */
1461 memset (&wi, 0, sizeof (wi));
1462 wi.info = id;
1463 if (skip_first)
1464 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1465 else
1466 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1467
1468 /* Clear the copied virtual operands. We are not remapping them here
1469 but are going to recreate them from scratch. */
1470 if (gimple_has_mem_ops (copy))
1471 {
1472 gimple_set_vdef (copy, NULL_TREE);
1473 gimple_set_vuse (copy, NULL_TREE);
1474 }
1475
1476 return copy;
1477 }
1478
1479
1480 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1481 later */
1482
1483 static basic_block
1484 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1485 gcov_type count_scale)
1486 {
1487 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1488 basic_block copy_basic_block;
1489 tree decl;
1490 gcov_type freq;
1491 basic_block prev;
1492
1493 /* Search for previous copied basic block. */
1494 prev = bb->prev_bb;
1495 while (!prev->aux)
1496 prev = prev->prev_bb;
1497
1498 /* create_basic_block() will append every new block to
1499 basic_block_info automatically. */
1500 copy_basic_block = create_basic_block (NULL, (void *) 0,
1501 (basic_block) prev->aux);
1502 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1503
1504 /* We are going to rebuild frequencies from scratch. These values
1505 have just small importance to drive canonicalize_loop_headers. */
1506 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1507
1508 /* We recompute frequencies after inlining, so this is quite safe. */
1509 if (freq > BB_FREQ_MAX)
1510 freq = BB_FREQ_MAX;
1511 copy_basic_block->frequency = freq;
1512
1513 copy_gsi = gsi_start_bb (copy_basic_block);
1514
1515 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1516 {
1517 gimple stmt = gsi_stmt (gsi);
1518 gimple orig_stmt = stmt;
1519
1520 id->regimplify = false;
1521 stmt = remap_gimple_stmt (stmt, id);
1522 if (gimple_nop_p (stmt))
1523 continue;
1524
1525 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1526 seq_gsi = copy_gsi;
1527
1528 /* With return slot optimization we can end up with
1529 non-gimple (foo *)&this->m, fix that here. */
1530 if (is_gimple_assign (stmt)
1531 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1532 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1533 {
1534 tree new_rhs;
1535 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1536 gimple_assign_rhs1 (stmt),
1537 true, NULL, false, GSI_NEW_STMT);
1538 gimple_assign_set_rhs1 (stmt, new_rhs);
1539 id->regimplify = false;
1540 }
1541
1542 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1543
1544 if (id->regimplify)
1545 gimple_regimplify_operands (stmt, &seq_gsi);
1546
1547 /* If copy_basic_block has been empty at the start of this iteration,
1548 call gsi_start_bb again to get at the newly added statements. */
1549 if (gsi_end_p (copy_gsi))
1550 copy_gsi = gsi_start_bb (copy_basic_block);
1551 else
1552 gsi_next (&copy_gsi);
1553
1554 /* Process the new statement. The call to gimple_regimplify_operands
1555 possibly turned the statement into multiple statements, we
1556 need to process all of them. */
1557 do
1558 {
1559 tree fn;
1560
1561 stmt = gsi_stmt (copy_gsi);
1562 if (is_gimple_call (stmt)
1563 && gimple_call_va_arg_pack_p (stmt)
1564 && id->gimple_call)
1565 {
1566 /* __builtin_va_arg_pack () should be replaced by
1567 all arguments corresponding to ... in the caller. */
1568 tree p;
1569 gimple new_call;
1570 VEC(tree, heap) *argarray;
1571 size_t nargs = gimple_call_num_args (id->gimple_call);
1572 size_t n;
1573
1574 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1575 nargs--;
1576
1577 /* Create the new array of arguments. */
1578 n = nargs + gimple_call_num_args (stmt);
1579 argarray = VEC_alloc (tree, heap, n);
1580 VEC_safe_grow (tree, heap, argarray, n);
1581
1582 /* Copy all the arguments before '...' */
1583 memcpy (VEC_address (tree, argarray),
1584 gimple_call_arg_ptr (stmt, 0),
1585 gimple_call_num_args (stmt) * sizeof (tree));
1586
1587 /* Append the arguments passed in '...' */
1588 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1589 gimple_call_arg_ptr (id->gimple_call, 0)
1590 + (gimple_call_num_args (id->gimple_call) - nargs),
1591 nargs * sizeof (tree));
1592
1593 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1594 argarray);
1595
1596 VEC_free (tree, heap, argarray);
1597
1598 /* Copy all GIMPLE_CALL flags, location and block, except
1599 GF_CALL_VA_ARG_PACK. */
1600 gimple_call_copy_flags (new_call, stmt);
1601 gimple_call_set_va_arg_pack (new_call, false);
1602 gimple_set_location (new_call, gimple_location (stmt));
1603 gimple_set_block (new_call, gimple_block (stmt));
1604 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1605
1606 gsi_replace (&copy_gsi, new_call, false);
1607 stmt = new_call;
1608 }
1609 else if (is_gimple_call (stmt)
1610 && id->gimple_call
1611 && (decl = gimple_call_fndecl (stmt))
1612 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1613 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1614 {
1615 /* __builtin_va_arg_pack_len () should be replaced by
1616 the number of anonymous arguments. */
1617 size_t nargs = gimple_call_num_args (id->gimple_call);
1618 tree count, p;
1619 gimple new_stmt;
1620
1621 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1622 nargs--;
1623
1624 count = build_int_cst (integer_type_node, nargs);
1625 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1626 gsi_replace (&copy_gsi, new_stmt, false);
1627 stmt = new_stmt;
1628 }
1629
1630 /* Statements produced by inlining can be unfolded, especially
1631 when we constant propagated some operands. We can't fold
1632 them right now for two reasons:
1633 1) folding require SSA_NAME_DEF_STMTs to be correct
1634 2) we can't change function calls to builtins.
1635 So we just mark statement for later folding. We mark
1636 all new statements, instead just statements that has changed
1637 by some nontrivial substitution so even statements made
1638 foldable indirectly are updated. If this turns out to be
1639 expensive, copy_body can be told to watch for nontrivial
1640 changes. */
1641 if (id->statements_to_fold)
1642 pointer_set_insert (id->statements_to_fold, stmt);
1643
1644 /* We're duplicating a CALL_EXPR. Find any corresponding
1645 callgraph edges and update or duplicate them. */
1646 if (is_gimple_call (stmt))
1647 {
1648 struct cgraph_edge *edge;
1649 int flags;
1650
1651 switch (id->transform_call_graph_edges)
1652 {
1653 case CB_CGE_DUPLICATE:
1654 edge = cgraph_edge (id->src_node, orig_stmt);
1655 if (edge)
1656 {
1657 int edge_freq = edge->frequency;
1658 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1659 gimple_uid (stmt),
1660 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1661 edge->frequency, true);
1662 /* We could also just rescale the frequency, but
1663 doing so would introduce roundoff errors and make
1664 verifier unhappy. */
1665 edge->frequency
1666 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1667 copy_basic_block);
1668 if (dump_file
1669 && profile_status_for_function (cfun) != PROFILE_ABSENT
1670 && (edge_freq > edge->frequency + 10
1671 || edge_freq < edge->frequency - 10))
1672 {
1673 fprintf (dump_file, "Edge frequency estimated by "
1674 "cgraph %i diverge from inliner's estimate %i\n",
1675 edge_freq,
1676 edge->frequency);
1677 fprintf (dump_file,
1678 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1679 bb->index,
1680 bb->frequency,
1681 copy_basic_block->frequency);
1682 }
1683 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1684 }
1685 break;
1686
1687 case CB_CGE_MOVE_CLONES:
1688 cgraph_set_call_stmt_including_clones (id->dst_node,
1689 orig_stmt, stmt);
1690 edge = cgraph_edge (id->dst_node, stmt);
1691 break;
1692
1693 case CB_CGE_MOVE:
1694 edge = cgraph_edge (id->dst_node, orig_stmt);
1695 if (edge)
1696 cgraph_set_call_stmt (edge, stmt);
1697 break;
1698
1699 default:
1700 gcc_unreachable ();
1701 }
1702
1703 /* Constant propagation on argument done during inlining
1704 may create new direct call. Produce an edge for it. */
1705 if ((!edge
1706 || (edge->indirect_inlining_edge
1707 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1708 && (fn = gimple_call_fndecl (stmt)) != NULL)
1709 {
1710 struct cgraph_node *dest = cgraph_node (fn);
1711
1712 /* We have missing edge in the callgraph. This can happen
1713 when previous inlining turned an indirect call into a
1714 direct call by constant propagating arguments or we are
1715 producing dead clone (for further clonning). In all
1716 other cases we hit a bug (incorrect node sharing is the
1717 most common reason for missing edges). */
1718 gcc_assert (dest->needed || !dest->analyzed
1719 || dest->address_taken
1720 || !id->src_node->analyzed);
1721 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1722 cgraph_create_edge_including_clones
1723 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1724 compute_call_stmt_bb_frequency (id->dst_node->decl,
1725 copy_basic_block),
1726 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1727 else
1728 cgraph_create_edge (id->dst_node, dest, stmt,
1729 bb->count,
1730 compute_call_stmt_bb_frequency
1731 (id->dst_node->decl, copy_basic_block),
1732 bb->loop_depth)->inline_failed
1733 = CIF_ORIGINALLY_INDIRECT_CALL;
1734 if (dump_file)
1735 {
1736 fprintf (dump_file, "Created new direct edge to %s\n",
1737 cgraph_node_name (dest));
1738 }
1739 }
1740
1741 flags = gimple_call_flags (stmt);
1742 if (flags & ECF_MAY_BE_ALLOCA)
1743 cfun->calls_alloca = true;
1744 if (flags & ECF_RETURNS_TWICE)
1745 cfun->calls_setjmp = true;
1746 }
1747
1748 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1749 id->eh_map, id->eh_lp_nr);
1750
1751 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1752 {
1753 ssa_op_iter i;
1754 tree def;
1755
1756 find_new_referenced_vars (gsi_stmt (copy_gsi));
1757 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1758 if (TREE_CODE (def) == SSA_NAME)
1759 SSA_NAME_DEF_STMT (def) = stmt;
1760 }
1761
1762 gsi_next (&copy_gsi);
1763 }
1764 while (!gsi_end_p (copy_gsi));
1765
1766 copy_gsi = gsi_last_bb (copy_basic_block);
1767 }
1768
1769 return copy_basic_block;
1770 }
1771
1772 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1773 form is quite easy, since dominator relationship for old basic blocks does
1774 not change.
1775
1776 There is however exception where inlining might change dominator relation
1777 across EH edges from basic block within inlined functions destinating
1778 to landing pads in function we inline into.
1779
1780 The function fills in PHI_RESULTs of such PHI nodes if they refer
1781 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1782 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1783 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1784 set, and this means that there will be no overlapping live ranges
1785 for the underlying symbol.
1786
1787 This might change in future if we allow redirecting of EH edges and
1788 we might want to change way build CFG pre-inlining to include
1789 all the possible edges then. */
1790 static void
1791 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1792 bool can_throw, bool nonlocal_goto)
1793 {
1794 edge e;
1795 edge_iterator ei;
1796
1797 FOR_EACH_EDGE (e, ei, bb->succs)
1798 if (!e->dest->aux
1799 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1800 {
1801 gimple phi;
1802 gimple_stmt_iterator si;
1803
1804 if (!nonlocal_goto)
1805 gcc_assert (e->flags & EDGE_EH);
1806
1807 if (!can_throw)
1808 gcc_assert (!(e->flags & EDGE_EH));
1809
1810 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1811 {
1812 edge re;
1813
1814 phi = gsi_stmt (si);
1815
1816 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1817 gcc_assert (!e->dest->aux);
1818
1819 gcc_assert ((e->flags & EDGE_EH)
1820 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1821
1822 if (!is_gimple_reg (PHI_RESULT (phi)))
1823 {
1824 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1825 continue;
1826 }
1827
1828 re = find_edge (ret_bb, e->dest);
1829 gcc_assert (re);
1830 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1831 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1832
1833 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1834 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1835 }
1836 }
1837 }
1838
1839
1840 /* Copy edges from BB into its copy constructed earlier, scale profile
1841 accordingly. Edges will be taken care of later. Assume aux
1842 pointers to point to the copies of each BB. Return true if any
1843 debug stmts are left after a statement that must end the basic block. */
1844
1845 static bool
1846 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1847 {
1848 basic_block new_bb = (basic_block) bb->aux;
1849 edge_iterator ei;
1850 edge old_edge;
1851 gimple_stmt_iterator si;
1852 int flags;
1853 bool need_debug_cleanup = false;
1854
1855 /* Use the indices from the original blocks to create edges for the
1856 new ones. */
1857 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1858 if (!(old_edge->flags & EDGE_EH))
1859 {
1860 edge new_edge;
1861
1862 flags = old_edge->flags;
1863
1864 /* Return edges do get a FALLTHRU flag when the get inlined. */
1865 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1866 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1867 flags |= EDGE_FALLTHRU;
1868 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1869 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1870 new_edge->probability = old_edge->probability;
1871 }
1872
1873 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1874 return false;
1875
1876 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1877 {
1878 gimple copy_stmt;
1879 bool can_throw, nonlocal_goto;
1880
1881 copy_stmt = gsi_stmt (si);
1882 if (!is_gimple_debug (copy_stmt))
1883 {
1884 update_stmt (copy_stmt);
1885 if (gimple_in_ssa_p (cfun))
1886 mark_symbols_for_renaming (copy_stmt);
1887 }
1888
1889 /* Do this before the possible split_block. */
1890 gsi_next (&si);
1891
1892 /* If this tree could throw an exception, there are two
1893 cases where we need to add abnormal edge(s): the
1894 tree wasn't in a region and there is a "current
1895 region" in the caller; or the original tree had
1896 EH edges. In both cases split the block after the tree,
1897 and add abnormal edge(s) as needed; we need both
1898 those from the callee and the caller.
1899 We check whether the copy can throw, because the const
1900 propagation can change an INDIRECT_REF which throws
1901 into a COMPONENT_REF which doesn't. If the copy
1902 can throw, the original could also throw. */
1903 can_throw = stmt_can_throw_internal (copy_stmt);
1904 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1905
1906 if (can_throw || nonlocal_goto)
1907 {
1908 if (!gsi_end_p (si))
1909 {
1910 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1911 gsi_next (&si);
1912 if (gsi_end_p (si))
1913 need_debug_cleanup = true;
1914 }
1915 if (!gsi_end_p (si))
1916 /* Note that bb's predecessor edges aren't necessarily
1917 right at this point; split_block doesn't care. */
1918 {
1919 edge e = split_block (new_bb, copy_stmt);
1920
1921 new_bb = e->dest;
1922 new_bb->aux = e->src->aux;
1923 si = gsi_start_bb (new_bb);
1924 }
1925 }
1926
1927 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1928 make_eh_dispatch_edges (copy_stmt);
1929 else if (can_throw)
1930 make_eh_edges (copy_stmt);
1931
1932 if (nonlocal_goto)
1933 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1934
1935 if ((can_throw || nonlocal_goto)
1936 && gimple_in_ssa_p (cfun))
1937 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1938 can_throw, nonlocal_goto);
1939 }
1940 return need_debug_cleanup;
1941 }
1942
1943 /* Copy the PHIs. All blocks and edges are copied, some blocks
1944 was possibly split and new outgoing EH edges inserted.
1945 BB points to the block of original function and AUX pointers links
1946 the original and newly copied blocks. */
1947
1948 static void
1949 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1950 {
1951 basic_block const new_bb = (basic_block) bb->aux;
1952 edge_iterator ei;
1953 gimple phi;
1954 gimple_stmt_iterator si;
1955
1956 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1957 {
1958 tree res, new_res;
1959 gimple new_phi;
1960 edge new_edge;
1961
1962 phi = gsi_stmt (si);
1963 res = PHI_RESULT (phi);
1964 new_res = res;
1965 if (is_gimple_reg (res))
1966 {
1967 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1968 SSA_NAME_DEF_STMT (new_res)
1969 = new_phi = create_phi_node (new_res, new_bb);
1970 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1971 {
1972 edge const old_edge
1973 = find_edge ((basic_block) new_edge->src->aux, bb);
1974 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1975 tree new_arg = arg;
1976 tree block = id->block;
1977 id->block = NULL_TREE;
1978 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1979 id->block = block;
1980 gcc_assert (new_arg);
1981 /* With return slot optimization we can end up with
1982 non-gimple (foo *)&this->m, fix that here. */
1983 if (TREE_CODE (new_arg) != SSA_NAME
1984 && TREE_CODE (new_arg) != FUNCTION_DECL
1985 && !is_gimple_val (new_arg))
1986 {
1987 gimple_seq stmts = NULL;
1988 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1989 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1990 }
1991 add_phi_arg (new_phi, new_arg, new_edge,
1992 gimple_phi_arg_location_from_edge (phi, old_edge));
1993 }
1994 }
1995 }
1996 }
1997
1998
1999 /* Wrapper for remap_decl so it can be used as a callback. */
2000
2001 static tree
2002 remap_decl_1 (tree decl, void *data)
2003 {
2004 return remap_decl (decl, (copy_body_data *) data);
2005 }
2006
2007 /* Build struct function and associated datastructures for the new clone
2008 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2009
2010 static void
2011 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2012 {
2013 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2014 gcov_type count_scale;
2015
2016 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2017 count_scale = (REG_BR_PROB_BASE * count
2018 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2019 else
2020 count_scale = REG_BR_PROB_BASE;
2021
2022 /* Register specific tree functions. */
2023 gimple_register_cfg_hooks ();
2024
2025 /* Get clean struct function. */
2026 push_struct_function (new_fndecl);
2027
2028 /* We will rebuild these, so just sanity check that they are empty. */
2029 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2030 gcc_assert (cfun->local_decls == NULL);
2031 gcc_assert (cfun->cfg == NULL);
2032 gcc_assert (cfun->decl == new_fndecl);
2033
2034 /* Copy items we preserve during clonning. */
2035 cfun->static_chain_decl = src_cfun->static_chain_decl;
2036 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2037 cfun->function_end_locus = src_cfun->function_end_locus;
2038 cfun->curr_properties = src_cfun->curr_properties;
2039 cfun->last_verified = src_cfun->last_verified;
2040 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2041 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2042 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2043 cfun->stdarg = src_cfun->stdarg;
2044 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2045 cfun->after_inlining = src_cfun->after_inlining;
2046 cfun->can_throw_non_call_exceptions
2047 = src_cfun->can_throw_non_call_exceptions;
2048 cfun->returns_struct = src_cfun->returns_struct;
2049 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2050 cfun->after_tree_profile = src_cfun->after_tree_profile;
2051
2052 init_empty_tree_cfg ();
2053
2054 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2055 ENTRY_BLOCK_PTR->count =
2056 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2057 REG_BR_PROB_BASE);
2058 ENTRY_BLOCK_PTR->frequency
2059 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2060 EXIT_BLOCK_PTR->count =
2061 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2062 REG_BR_PROB_BASE);
2063 EXIT_BLOCK_PTR->frequency =
2064 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2065 if (src_cfun->eh)
2066 init_eh_for_function ();
2067
2068 if (src_cfun->gimple_df)
2069 {
2070 init_tree_ssa (cfun);
2071 cfun->gimple_df->in_ssa_p = true;
2072 init_ssa_operands ();
2073 }
2074 pop_cfun ();
2075 }
2076
2077 /* Helper function for copy_cfg_body. Move debug stmts from the end
2078 of NEW_BB to the beginning of successor basic blocks when needed. If the
2079 successor has multiple predecessors, reset them, otherwise keep
2080 their value. */
2081
2082 static void
2083 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2084 {
2085 edge e;
2086 edge_iterator ei;
2087 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2088
2089 if (gsi_end_p (si)
2090 || gsi_one_before_end_p (si)
2091 || !(stmt_can_throw_internal (gsi_stmt (si))
2092 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2093 return;
2094
2095 FOR_EACH_EDGE (e, ei, new_bb->succs)
2096 {
2097 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2098 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2099 while (is_gimple_debug (gsi_stmt (ssi)))
2100 {
2101 gimple stmt = gsi_stmt (ssi), new_stmt;
2102 tree var;
2103 tree value;
2104
2105 /* For the last edge move the debug stmts instead of copying
2106 them. */
2107 if (ei_one_before_end_p (ei))
2108 {
2109 si = ssi;
2110 gsi_prev (&ssi);
2111 if (!single_pred_p (e->dest))
2112 gimple_debug_bind_reset_value (stmt);
2113 gsi_remove (&si, false);
2114 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2115 continue;
2116 }
2117
2118 var = gimple_debug_bind_get_var (stmt);
2119 if (single_pred_p (e->dest))
2120 {
2121 value = gimple_debug_bind_get_value (stmt);
2122 value = unshare_expr (value);
2123 }
2124 else
2125 value = NULL_TREE;
2126 new_stmt = gimple_build_debug_bind (var, value, stmt);
2127 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2128 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2129 gsi_prev (&ssi);
2130 }
2131 }
2132 }
2133
2134 /* Make a copy of the body of FN so that it can be inserted inline in
2135 another function. Walks FN via CFG, returns new fndecl. */
2136
2137 static tree
2138 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2139 basic_block entry_block_map, basic_block exit_block_map,
2140 bitmap blocks_to_copy, basic_block new_entry)
2141 {
2142 tree callee_fndecl = id->src_fn;
2143 /* Original cfun for the callee, doesn't change. */
2144 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2145 struct function *cfun_to_copy;
2146 basic_block bb;
2147 tree new_fndecl = NULL;
2148 bool need_debug_cleanup = false;
2149 gcov_type count_scale;
2150 int last;
2151
2152 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2153 count_scale = (REG_BR_PROB_BASE * count
2154 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2155 else
2156 count_scale = REG_BR_PROB_BASE;
2157
2158 /* Register specific tree functions. */
2159 gimple_register_cfg_hooks ();
2160
2161 /* Must have a CFG here at this point. */
2162 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2163 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2164
2165 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2166
2167 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2168 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2169 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2170 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2171
2172 /* Duplicate any exception-handling regions. */
2173 if (cfun->eh)
2174 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2175 remap_decl_1, id);
2176
2177 /* Use aux pointers to map the original blocks to copy. */
2178 FOR_EACH_BB_FN (bb, cfun_to_copy)
2179 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2180 {
2181 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2182 bb->aux = new_bb;
2183 new_bb->aux = bb;
2184 }
2185
2186 last = last_basic_block;
2187
2188 /* Now that we've duplicated the blocks, duplicate their edges. */
2189 FOR_ALL_BB_FN (bb, cfun_to_copy)
2190 if (!blocks_to_copy
2191 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2192 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2193
2194 if (gimple_in_ssa_p (cfun))
2195 FOR_ALL_BB_FN (bb, cfun_to_copy)
2196 if (!blocks_to_copy
2197 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2198 copy_phis_for_bb (bb, id);
2199
2200 if (new_entry)
2201 {
2202 edge e;
2203 e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2204 e->probability = REG_BR_PROB_BASE;
2205 e->count = entry_block_map->count;
2206 }
2207
2208 FOR_ALL_BB_FN (bb, cfun_to_copy)
2209 if (bb->aux)
2210 {
2211 if (need_debug_cleanup
2212 && bb->index != ENTRY_BLOCK
2213 && bb->index != EXIT_BLOCK)
2214 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2215 ((basic_block)bb->aux)->aux = NULL;
2216 bb->aux = NULL;
2217 }
2218
2219 /* Zero out AUX fields of newly created block during EH edge
2220 insertion. */
2221 for (; last < last_basic_block; last++)
2222 {
2223 if (need_debug_cleanup)
2224 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2225 BASIC_BLOCK (last)->aux = NULL;
2226 }
2227 entry_block_map->aux = NULL;
2228 exit_block_map->aux = NULL;
2229
2230 if (id->eh_map)
2231 {
2232 pointer_map_destroy (id->eh_map);
2233 id->eh_map = NULL;
2234 }
2235
2236 return new_fndecl;
2237 }
2238
2239 /* Copy the debug STMT using ID. We deal with these statements in a
2240 special way: if any variable in their VALUE expression wasn't
2241 remapped yet, we won't remap it, because that would get decl uids
2242 out of sync, causing codegen differences between -g and -g0. If
2243 this arises, we drop the VALUE expression altogether. */
2244
2245 static void
2246 copy_debug_stmt (gimple stmt, copy_body_data *id)
2247 {
2248 tree t, *n;
2249 struct walk_stmt_info wi;
2250
2251 t = id->block;
2252 if (gimple_block (stmt))
2253 {
2254 tree *n;
2255 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2256 if (n)
2257 t = *n;
2258 }
2259 gimple_set_block (stmt, t);
2260
2261 /* Remap all the operands in COPY. */
2262 memset (&wi, 0, sizeof (wi));
2263 wi.info = id;
2264
2265 processing_debug_stmt = 1;
2266
2267 t = gimple_debug_bind_get_var (stmt);
2268
2269 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2270 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2271 {
2272 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2273 t = *n;
2274 }
2275 else if (TREE_CODE (t) == VAR_DECL
2276 && !TREE_STATIC (t)
2277 && gimple_in_ssa_p (cfun)
2278 && !pointer_map_contains (id->decl_map, t)
2279 && !var_ann (t))
2280 /* T is a non-localized variable. */;
2281 else
2282 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2283
2284 gimple_debug_bind_set_var (stmt, t);
2285
2286 if (gimple_debug_bind_has_value_p (stmt))
2287 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2288 remap_gimple_op_r, &wi, NULL);
2289
2290 /* Punt if any decl couldn't be remapped. */
2291 if (processing_debug_stmt < 0)
2292 gimple_debug_bind_reset_value (stmt);
2293
2294 processing_debug_stmt = 0;
2295
2296 update_stmt (stmt);
2297 if (gimple_in_ssa_p (cfun))
2298 mark_symbols_for_renaming (stmt);
2299 }
2300
2301 /* Process deferred debug stmts. In order to give values better odds
2302 of being successfully remapped, we delay the processing of debug
2303 stmts until all other stmts that might require remapping are
2304 processed. */
2305
2306 static void
2307 copy_debug_stmts (copy_body_data *id)
2308 {
2309 size_t i;
2310 gimple stmt;
2311
2312 if (!id->debug_stmts)
2313 return;
2314
2315 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2316 copy_debug_stmt (stmt, id);
2317
2318 VEC_free (gimple, heap, id->debug_stmts);
2319 }
2320
2321 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2322 another function. */
2323
2324 static tree
2325 copy_tree_body (copy_body_data *id)
2326 {
2327 tree fndecl = id->src_fn;
2328 tree body = DECL_SAVED_TREE (fndecl);
2329
2330 walk_tree (&body, copy_tree_body_r, id, NULL);
2331
2332 return body;
2333 }
2334
2335 /* Make a copy of the body of FN so that it can be inserted inline in
2336 another function. */
2337
2338 static tree
2339 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2340 basic_block entry_block_map, basic_block exit_block_map,
2341 bitmap blocks_to_copy, basic_block new_entry)
2342 {
2343 tree fndecl = id->src_fn;
2344 tree body;
2345
2346 /* If this body has a CFG, walk CFG and copy. */
2347 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2348 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2349 blocks_to_copy, new_entry);
2350 copy_debug_stmts (id);
2351
2352 return body;
2353 }
2354
2355 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2356 defined in function FN, or of a data member thereof. */
2357
2358 static bool
2359 self_inlining_addr_expr (tree value, tree fn)
2360 {
2361 tree var;
2362
2363 if (TREE_CODE (value) != ADDR_EXPR)
2364 return false;
2365
2366 var = get_base_address (TREE_OPERAND (value, 0));
2367
2368 return var && auto_var_in_fn_p (var, fn);
2369 }
2370
2371 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2372 lexical block and line number information from base_stmt, if given,
2373 or from the last stmt of the block otherwise. */
2374
2375 static gimple
2376 insert_init_debug_bind (copy_body_data *id,
2377 basic_block bb, tree var, tree value,
2378 gimple base_stmt)
2379 {
2380 gimple note;
2381 gimple_stmt_iterator gsi;
2382 tree tracked_var;
2383
2384 if (!gimple_in_ssa_p (id->src_cfun))
2385 return NULL;
2386
2387 if (!MAY_HAVE_DEBUG_STMTS)
2388 return NULL;
2389
2390 tracked_var = target_for_debug_bind (var);
2391 if (!tracked_var)
2392 return NULL;
2393
2394 if (bb)
2395 {
2396 gsi = gsi_last_bb (bb);
2397 if (!base_stmt && !gsi_end_p (gsi))
2398 base_stmt = gsi_stmt (gsi);
2399 }
2400
2401 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2402
2403 if (bb)
2404 {
2405 if (!gsi_end_p (gsi))
2406 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2407 else
2408 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2409 }
2410
2411 return note;
2412 }
2413
2414 static void
2415 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2416 {
2417 /* If VAR represents a zero-sized variable, it's possible that the
2418 assignment statement may result in no gimple statements. */
2419 if (init_stmt)
2420 {
2421 gimple_stmt_iterator si = gsi_last_bb (bb);
2422
2423 /* We can end up with init statements that store to a non-register
2424 from a rhs with a conversion. Handle that here by forcing the
2425 rhs into a temporary. gimple_regimplify_operands is not
2426 prepared to do this for us. */
2427 if (!is_gimple_debug (init_stmt)
2428 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2429 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2430 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2431 {
2432 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2433 gimple_expr_type (init_stmt),
2434 gimple_assign_rhs1 (init_stmt));
2435 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2436 GSI_NEW_STMT);
2437 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2438 gimple_assign_set_rhs1 (init_stmt, rhs);
2439 }
2440 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2441 gimple_regimplify_operands (init_stmt, &si);
2442 mark_symbols_for_renaming (init_stmt);
2443
2444 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2445 {
2446 tree var, def = gimple_assign_lhs (init_stmt);
2447
2448 if (TREE_CODE (def) == SSA_NAME)
2449 var = SSA_NAME_VAR (def);
2450 else
2451 var = def;
2452
2453 insert_init_debug_bind (id, bb, var, def, init_stmt);
2454 }
2455 }
2456 }
2457
2458 /* Initialize parameter P with VALUE. If needed, produce init statement
2459 at the end of BB. When BB is NULL, we return init statement to be
2460 output later. */
2461 static gimple
2462 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2463 basic_block bb, tree *vars)
2464 {
2465 gimple init_stmt = NULL;
2466 tree var;
2467 tree rhs = value;
2468 tree def = (gimple_in_ssa_p (cfun)
2469 ? gimple_default_def (id->src_cfun, p) : NULL);
2470
2471 if (value
2472 && value != error_mark_node
2473 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2474 {
2475 if (fold_convertible_p (TREE_TYPE (p), value))
2476 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2477 else
2478 /* ??? For valid (GIMPLE) programs we should not end up here.
2479 Still if something has gone wrong and we end up with truly
2480 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2481 to not leak invalid GIMPLE to the following passes. */
2482 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2483 }
2484
2485 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2486 here since the type of this decl must be visible to the calling
2487 function. */
2488 var = copy_decl_to_var (p, id);
2489
2490 /* We're actually using the newly-created var. */
2491 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2492 {
2493 get_var_ann (var);
2494 add_referenced_var (var);
2495 }
2496
2497 /* Declare this new variable. */
2498 TREE_CHAIN (var) = *vars;
2499 *vars = var;
2500
2501 /* Make gimplifier happy about this variable. */
2502 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2503
2504 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2505 we would not need to create a new variable here at all, if it
2506 weren't for debug info. Still, we can just use the argument
2507 value. */
2508 if (TREE_READONLY (p)
2509 && !TREE_ADDRESSABLE (p)
2510 && value && !TREE_SIDE_EFFECTS (value)
2511 && !def)
2512 {
2513 /* We may produce non-gimple trees by adding NOPs or introduce
2514 invalid sharing when operand is not really constant.
2515 It is not big deal to prohibit constant propagation here as
2516 we will constant propagate in DOM1 pass anyway. */
2517 if (is_gimple_min_invariant (value)
2518 && useless_type_conversion_p (TREE_TYPE (p),
2519 TREE_TYPE (value))
2520 /* We have to be very careful about ADDR_EXPR. Make sure
2521 the base variable isn't a local variable of the inlined
2522 function, e.g., when doing recursive inlining, direct or
2523 mutually-recursive or whatever, which is why we don't
2524 just test whether fn == current_function_decl. */
2525 && ! self_inlining_addr_expr (value, fn))
2526 {
2527 insert_decl_map (id, p, value);
2528 insert_debug_decl_map (id, p, var);
2529 return insert_init_debug_bind (id, bb, var, value, NULL);
2530 }
2531 }
2532
2533 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2534 that way, when the PARM_DECL is encountered, it will be
2535 automatically replaced by the VAR_DECL. */
2536 insert_decl_map (id, p, var);
2537
2538 /* Even if P was TREE_READONLY, the new VAR should not be.
2539 In the original code, we would have constructed a
2540 temporary, and then the function body would have never
2541 changed the value of P. However, now, we will be
2542 constructing VAR directly. The constructor body may
2543 change its value multiple times as it is being
2544 constructed. Therefore, it must not be TREE_READONLY;
2545 the back-end assumes that TREE_READONLY variable is
2546 assigned to only once. */
2547 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2548 TREE_READONLY (var) = 0;
2549
2550 /* If there is no setup required and we are in SSA, take the easy route
2551 replacing all SSA names representing the function parameter by the
2552 SSA name passed to function.
2553
2554 We need to construct map for the variable anyway as it might be used
2555 in different SSA names when parameter is set in function.
2556
2557 Do replacement at -O0 for const arguments replaced by constant.
2558 This is important for builtin_constant_p and other construct requiring
2559 constant argument to be visible in inlined function body. */
2560 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2561 && (optimize
2562 || (TREE_READONLY (p)
2563 && is_gimple_min_invariant (rhs)))
2564 && (TREE_CODE (rhs) == SSA_NAME
2565 || is_gimple_min_invariant (rhs))
2566 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2567 {
2568 insert_decl_map (id, def, rhs);
2569 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2570 }
2571
2572 /* If the value of argument is never used, don't care about initializing
2573 it. */
2574 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2575 {
2576 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2577 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2578 }
2579
2580 /* Initialize this VAR_DECL from the equivalent argument. Convert
2581 the argument to the proper type in case it was promoted. */
2582 if (value)
2583 {
2584 if (rhs == error_mark_node)
2585 {
2586 insert_decl_map (id, p, var);
2587 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2588 }
2589
2590 STRIP_USELESS_TYPE_CONVERSION (rhs);
2591
2592 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2593 keep our trees in gimple form. */
2594 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2595 {
2596 def = remap_ssa_name (def, id);
2597 init_stmt = gimple_build_assign (def, rhs);
2598 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2599 set_default_def (var, NULL);
2600 }
2601 else
2602 init_stmt = gimple_build_assign (var, rhs);
2603
2604 if (bb && init_stmt)
2605 insert_init_stmt (id, bb, init_stmt);
2606 }
2607 return init_stmt;
2608 }
2609
2610 /* Generate code to initialize the parameters of the function at the
2611 top of the stack in ID from the GIMPLE_CALL STMT. */
2612
2613 static void
2614 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2615 tree fn, basic_block bb)
2616 {
2617 tree parms;
2618 size_t i;
2619 tree p;
2620 tree vars = NULL_TREE;
2621 tree static_chain = gimple_call_chain (stmt);
2622
2623 /* Figure out what the parameters are. */
2624 parms = DECL_ARGUMENTS (fn);
2625
2626 /* Loop through the parameter declarations, replacing each with an
2627 equivalent VAR_DECL, appropriately initialized. */
2628 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2629 {
2630 tree val;
2631 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2632 setup_one_parameter (id, p, val, fn, bb, &vars);
2633 }
2634
2635 /* Initialize the static chain. */
2636 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2637 gcc_assert (fn != current_function_decl);
2638 if (p)
2639 {
2640 /* No static chain? Seems like a bug in tree-nested.c. */
2641 gcc_assert (static_chain);
2642
2643 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2644 }
2645
2646 declare_inline_vars (id->block, vars);
2647 }
2648
2649
2650 /* Declare a return variable to replace the RESULT_DECL for the
2651 function we are calling. An appropriate DECL_STMT is returned.
2652 The USE_STMT is filled to contain a use of the declaration to
2653 indicate the return value of the function.
2654
2655 RETURN_SLOT, if non-null is place where to store the result. It
2656 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2657 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2658
2659 The return value is a (possibly null) value that holds the result
2660 as seen by the caller. */
2661
2662 static tree
2663 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2664 {
2665 tree callee = id->src_fn;
2666 tree caller = id->dst_fn;
2667 tree result = DECL_RESULT (callee);
2668 tree callee_type = TREE_TYPE (result);
2669 tree caller_type;
2670 tree var, use;
2671
2672 /* Handle type-mismatches in the function declaration return type
2673 vs. the call expression. */
2674 if (modify_dest)
2675 caller_type = TREE_TYPE (modify_dest);
2676 else
2677 caller_type = TREE_TYPE (TREE_TYPE (callee));
2678
2679 /* We don't need to do anything for functions that don't return
2680 anything. */
2681 if (!result || VOID_TYPE_P (callee_type))
2682 return NULL_TREE;
2683
2684 /* If there was a return slot, then the return value is the
2685 dereferenced address of that object. */
2686 if (return_slot)
2687 {
2688 /* The front end shouldn't have used both return_slot and
2689 a modify expression. */
2690 gcc_assert (!modify_dest);
2691 if (DECL_BY_REFERENCE (result))
2692 {
2693 tree return_slot_addr = build_fold_addr_expr (return_slot);
2694 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2695
2696 /* We are going to construct *&return_slot and we can't do that
2697 for variables believed to be not addressable.
2698
2699 FIXME: This check possibly can match, because values returned
2700 via return slot optimization are not believed to have address
2701 taken by alias analysis. */
2702 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2703 var = return_slot_addr;
2704 }
2705 else
2706 {
2707 var = return_slot;
2708 gcc_assert (TREE_CODE (var) != SSA_NAME);
2709 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2710 }
2711 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2712 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2713 && !DECL_GIMPLE_REG_P (result)
2714 && DECL_P (var))
2715 DECL_GIMPLE_REG_P (var) = 0;
2716 use = NULL;
2717 goto done;
2718 }
2719
2720 /* All types requiring non-trivial constructors should have been handled. */
2721 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2722
2723 /* Attempt to avoid creating a new temporary variable. */
2724 if (modify_dest
2725 && TREE_CODE (modify_dest) != SSA_NAME)
2726 {
2727 bool use_it = false;
2728
2729 /* We can't use MODIFY_DEST if there's type promotion involved. */
2730 if (!useless_type_conversion_p (callee_type, caller_type))
2731 use_it = false;
2732
2733 /* ??? If we're assigning to a variable sized type, then we must
2734 reuse the destination variable, because we've no good way to
2735 create variable sized temporaries at this point. */
2736 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2737 use_it = true;
2738
2739 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2740 reuse it as the result of the call directly. Don't do this if
2741 it would promote MODIFY_DEST to addressable. */
2742 else if (TREE_ADDRESSABLE (result))
2743 use_it = false;
2744 else
2745 {
2746 tree base_m = get_base_address (modify_dest);
2747
2748 /* If the base isn't a decl, then it's a pointer, and we don't
2749 know where that's going to go. */
2750 if (!DECL_P (base_m))
2751 use_it = false;
2752 else if (is_global_var (base_m))
2753 use_it = false;
2754 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2755 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2756 && !DECL_GIMPLE_REG_P (result)
2757 && DECL_GIMPLE_REG_P (base_m))
2758 use_it = false;
2759 else if (!TREE_ADDRESSABLE (base_m))
2760 use_it = true;
2761 }
2762
2763 if (use_it)
2764 {
2765 var = modify_dest;
2766 use = NULL;
2767 goto done;
2768 }
2769 }
2770
2771 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2772
2773 var = copy_result_decl_to_var (result, id);
2774 if (gimple_in_ssa_p (cfun))
2775 {
2776 get_var_ann (var);
2777 add_referenced_var (var);
2778 }
2779
2780 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2781 DECL_STRUCT_FUNCTION (caller)->local_decls
2782 = tree_cons (NULL_TREE, var,
2783 DECL_STRUCT_FUNCTION (caller)->local_decls);
2784
2785 /* Do not have the rest of GCC warn about this variable as it should
2786 not be visible to the user. */
2787 TREE_NO_WARNING (var) = 1;
2788
2789 declare_inline_vars (id->block, var);
2790
2791 /* Build the use expr. If the return type of the function was
2792 promoted, convert it back to the expected type. */
2793 use = var;
2794 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2795 use = fold_convert (caller_type, var);
2796
2797 STRIP_USELESS_TYPE_CONVERSION (use);
2798
2799 if (DECL_BY_REFERENCE (result))
2800 {
2801 TREE_ADDRESSABLE (var) = 1;
2802 var = build_fold_addr_expr (var);
2803 }
2804
2805 done:
2806 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2807 way, when the RESULT_DECL is encountered, it will be
2808 automatically replaced by the VAR_DECL. */
2809 insert_decl_map (id, result, var);
2810
2811 /* Remember this so we can ignore it in remap_decls. */
2812 id->retvar = var;
2813
2814 return use;
2815 }
2816
2817 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2818 to a local label. */
2819
2820 static tree
2821 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2822 {
2823 tree node = *nodep;
2824 tree fn = (tree) fnp;
2825
2826 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2827 return node;
2828
2829 if (TYPE_P (node))
2830 *walk_subtrees = 0;
2831
2832 return NULL_TREE;
2833 }
2834
2835 /* Determine if the function can be copied. If so return NULL. If
2836 not return a string describng the reason for failure. */
2837
2838 static const char *
2839 copy_forbidden (struct function *fun, tree fndecl)
2840 {
2841 const char *reason = fun->cannot_be_copied_reason;
2842 tree step;
2843
2844 /* Only examine the function once. */
2845 if (fun->cannot_be_copied_set)
2846 return reason;
2847
2848 /* We cannot copy a function that receives a non-local goto
2849 because we cannot remap the destination label used in the
2850 function that is performing the non-local goto. */
2851 /* ??? Actually, this should be possible, if we work at it.
2852 No doubt there's just a handful of places that simply
2853 assume it doesn't happen and don't substitute properly. */
2854 if (fun->has_nonlocal_label)
2855 {
2856 reason = G_("function %q+F can never be copied "
2857 "because it receives a non-local goto");
2858 goto fail;
2859 }
2860
2861 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2862 {
2863 tree decl = TREE_VALUE (step);
2864
2865 if (TREE_CODE (decl) == VAR_DECL
2866 && TREE_STATIC (decl)
2867 && !DECL_EXTERNAL (decl)
2868 && DECL_INITIAL (decl)
2869 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2870 has_label_address_in_static_1,
2871 fndecl))
2872 {
2873 reason = G_("function %q+F can never be copied because it saves "
2874 "address of local label in a static variable");
2875 goto fail;
2876 }
2877 }
2878
2879 fail:
2880 fun->cannot_be_copied_reason = reason;
2881 fun->cannot_be_copied_set = true;
2882 return reason;
2883 }
2884
2885
2886 static const char *inline_forbidden_reason;
2887
2888 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2889 iff a function can not be inlined. Also sets the reason why. */
2890
2891 static tree
2892 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2893 struct walk_stmt_info *wip)
2894 {
2895 tree fn = (tree) wip->info;
2896 tree t;
2897 gimple stmt = gsi_stmt (*gsi);
2898
2899 switch (gimple_code (stmt))
2900 {
2901 case GIMPLE_CALL:
2902 /* Refuse to inline alloca call unless user explicitly forced so as
2903 this may change program's memory overhead drastically when the
2904 function using alloca is called in loop. In GCC present in
2905 SPEC2000 inlining into schedule_block cause it to require 2GB of
2906 RAM instead of 256MB. */
2907 if (gimple_alloca_call_p (stmt)
2908 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2909 {
2910 inline_forbidden_reason
2911 = G_("function %q+F can never be inlined because it uses "
2912 "alloca (override using the always_inline attribute)");
2913 *handled_ops_p = true;
2914 return fn;
2915 }
2916
2917 t = gimple_call_fndecl (stmt);
2918 if (t == NULL_TREE)
2919 break;
2920
2921 /* We cannot inline functions that call setjmp. */
2922 if (setjmp_call_p (t))
2923 {
2924 inline_forbidden_reason
2925 = G_("function %q+F can never be inlined because it uses setjmp");
2926 *handled_ops_p = true;
2927 return t;
2928 }
2929
2930 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2931 switch (DECL_FUNCTION_CODE (t))
2932 {
2933 /* We cannot inline functions that take a variable number of
2934 arguments. */
2935 case BUILT_IN_VA_START:
2936 case BUILT_IN_NEXT_ARG:
2937 case BUILT_IN_VA_END:
2938 inline_forbidden_reason
2939 = G_("function %q+F can never be inlined because it "
2940 "uses variable argument lists");
2941 *handled_ops_p = true;
2942 return t;
2943
2944 case BUILT_IN_LONGJMP:
2945 /* We can't inline functions that call __builtin_longjmp at
2946 all. The non-local goto machinery really requires the
2947 destination be in a different function. If we allow the
2948 function calling __builtin_longjmp to be inlined into the
2949 function calling __builtin_setjmp, Things will Go Awry. */
2950 inline_forbidden_reason
2951 = G_("function %q+F can never be inlined because "
2952 "it uses setjmp-longjmp exception handling");
2953 *handled_ops_p = true;
2954 return t;
2955
2956 case BUILT_IN_NONLOCAL_GOTO:
2957 /* Similarly. */
2958 inline_forbidden_reason
2959 = G_("function %q+F can never be inlined because "
2960 "it uses non-local goto");
2961 *handled_ops_p = true;
2962 return t;
2963
2964 case BUILT_IN_RETURN:
2965 case BUILT_IN_APPLY_ARGS:
2966 /* If a __builtin_apply_args caller would be inlined,
2967 it would be saving arguments of the function it has
2968 been inlined into. Similarly __builtin_return would
2969 return from the function the inline has been inlined into. */
2970 inline_forbidden_reason
2971 = G_("function %q+F can never be inlined because "
2972 "it uses __builtin_return or __builtin_apply_args");
2973 *handled_ops_p = true;
2974 return t;
2975
2976 default:
2977 break;
2978 }
2979 break;
2980
2981 case GIMPLE_GOTO:
2982 t = gimple_goto_dest (stmt);
2983
2984 /* We will not inline a function which uses computed goto. The
2985 addresses of its local labels, which may be tucked into
2986 global storage, are of course not constant across
2987 instantiations, which causes unexpected behavior. */
2988 if (TREE_CODE (t) != LABEL_DECL)
2989 {
2990 inline_forbidden_reason
2991 = G_("function %q+F can never be inlined "
2992 "because it contains a computed goto");
2993 *handled_ops_p = true;
2994 return t;
2995 }
2996 break;
2997
2998 default:
2999 break;
3000 }
3001
3002 *handled_ops_p = false;
3003 return NULL_TREE;
3004 }
3005
3006 /* Return true if FNDECL is a function that cannot be inlined into
3007 another one. */
3008
3009 static bool
3010 inline_forbidden_p (tree fndecl)
3011 {
3012 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3013 struct walk_stmt_info wi;
3014 struct pointer_set_t *visited_nodes;
3015 basic_block bb;
3016 bool forbidden_p = false;
3017
3018 /* First check for shared reasons not to copy the code. */
3019 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3020 if (inline_forbidden_reason != NULL)
3021 return true;
3022
3023 /* Next, walk the statements of the function looking for
3024 constraucts we can't handle, or are non-optimal for inlining. */
3025 visited_nodes = pointer_set_create ();
3026 memset (&wi, 0, sizeof (wi));
3027 wi.info = (void *) fndecl;
3028 wi.pset = visited_nodes;
3029
3030 FOR_EACH_BB_FN (bb, fun)
3031 {
3032 gimple ret;
3033 gimple_seq seq = bb_seq (bb);
3034 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3035 forbidden_p = (ret != NULL);
3036 if (forbidden_p)
3037 break;
3038 }
3039
3040 pointer_set_destroy (visited_nodes);
3041 return forbidden_p;
3042 }
3043
3044 /* Return true if CALLEE cannot be inlined into CALLER. */
3045
3046 static bool
3047 inline_forbidden_into_p (tree caller, tree callee)
3048 {
3049 /* Don't inline if the functions have different EH personalities. */
3050 if (DECL_FUNCTION_PERSONALITY (caller)
3051 && DECL_FUNCTION_PERSONALITY (callee)
3052 && (DECL_FUNCTION_PERSONALITY (caller)
3053 != DECL_FUNCTION_PERSONALITY (callee)))
3054 return true;
3055
3056 /* Don't inline if the callee can throw non-call exceptions but the
3057 caller cannot. */
3058 if (DECL_STRUCT_FUNCTION (callee)
3059 && DECL_STRUCT_FUNCTION (callee)->can_throw_non_call_exceptions
3060 && !(DECL_STRUCT_FUNCTION (caller)
3061 && DECL_STRUCT_FUNCTION (caller)->can_throw_non_call_exceptions))
3062 return true;
3063
3064 return false;
3065 }
3066
3067 /* Returns nonzero if FN is a function that does not have any
3068 fundamental inline blocking properties. */
3069
3070 bool
3071 tree_inlinable_function_p (tree fn)
3072 {
3073 bool inlinable = true;
3074 bool do_warning;
3075 tree always_inline;
3076
3077 /* If we've already decided this function shouldn't be inlined,
3078 there's no need to check again. */
3079 if (DECL_UNINLINABLE (fn))
3080 return false;
3081
3082 /* We only warn for functions declared `inline' by the user. */
3083 do_warning = (warn_inline
3084 && DECL_DECLARED_INLINE_P (fn)
3085 && !DECL_NO_INLINE_WARNING_P (fn)
3086 && !DECL_IN_SYSTEM_HEADER (fn));
3087
3088 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3089
3090 if (flag_no_inline
3091 && always_inline == NULL)
3092 {
3093 if (do_warning)
3094 warning (OPT_Winline, "function %q+F can never be inlined because it "
3095 "is suppressed using -fno-inline", fn);
3096 inlinable = false;
3097 }
3098
3099 /* Don't auto-inline anything that might not be bound within
3100 this unit of translation. */
3101 else if (!DECL_DECLARED_INLINE_P (fn)
3102 && DECL_REPLACEABLE_P (fn))
3103 inlinable = false;
3104
3105 else if (!function_attribute_inlinable_p (fn))
3106 {
3107 if (do_warning)
3108 warning (OPT_Winline, "function %q+F can never be inlined because it "
3109 "uses attributes conflicting with inlining", fn);
3110 inlinable = false;
3111 }
3112
3113 else if (inline_forbidden_p (fn))
3114 {
3115 /* See if we should warn about uninlinable functions. Previously,
3116 some of these warnings would be issued while trying to expand
3117 the function inline, but that would cause multiple warnings
3118 about functions that would for example call alloca. But since
3119 this a property of the function, just one warning is enough.
3120 As a bonus we can now give more details about the reason why a
3121 function is not inlinable. */
3122 if (always_inline)
3123 sorry (inline_forbidden_reason, fn);
3124 else if (do_warning)
3125 warning (OPT_Winline, inline_forbidden_reason, fn);
3126
3127 inlinable = false;
3128 }
3129
3130 /* Squirrel away the result so that we don't have to check again. */
3131 DECL_UNINLINABLE (fn) = !inlinable;
3132
3133 return inlinable;
3134 }
3135
3136 /* Estimate the cost of a memory move. Use machine dependent
3137 word size and take possible memcpy call into account. */
3138
3139 int
3140 estimate_move_cost (tree type)
3141 {
3142 HOST_WIDE_INT size;
3143
3144 gcc_assert (!VOID_TYPE_P (type));
3145
3146 size = int_size_in_bytes (type);
3147
3148 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3149 /* Cost of a memcpy call, 3 arguments and the call. */
3150 return 4;
3151 else
3152 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3153 }
3154
3155 /* Returns cost of operation CODE, according to WEIGHTS */
3156
3157 static int
3158 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3159 tree op1 ATTRIBUTE_UNUSED, tree op2)
3160 {
3161 switch (code)
3162 {
3163 /* These are "free" conversions, or their presumed cost
3164 is folded into other operations. */
3165 case RANGE_EXPR:
3166 CASE_CONVERT:
3167 case COMPLEX_EXPR:
3168 case PAREN_EXPR:
3169 return 0;
3170
3171 /* Assign cost of 1 to usual operations.
3172 ??? We may consider mapping RTL costs to this. */
3173 case COND_EXPR:
3174 case VEC_COND_EXPR:
3175
3176 case PLUS_EXPR:
3177 case POINTER_PLUS_EXPR:
3178 case MINUS_EXPR:
3179 case MULT_EXPR:
3180
3181 case ADDR_SPACE_CONVERT_EXPR:
3182 case FIXED_CONVERT_EXPR:
3183 case FIX_TRUNC_EXPR:
3184
3185 case NEGATE_EXPR:
3186 case FLOAT_EXPR:
3187 case MIN_EXPR:
3188 case MAX_EXPR:
3189 case ABS_EXPR:
3190
3191 case LSHIFT_EXPR:
3192 case RSHIFT_EXPR:
3193 case LROTATE_EXPR:
3194 case RROTATE_EXPR:
3195 case VEC_LSHIFT_EXPR:
3196 case VEC_RSHIFT_EXPR:
3197
3198 case BIT_IOR_EXPR:
3199 case BIT_XOR_EXPR:
3200 case BIT_AND_EXPR:
3201 case BIT_NOT_EXPR:
3202
3203 case TRUTH_ANDIF_EXPR:
3204 case TRUTH_ORIF_EXPR:
3205 case TRUTH_AND_EXPR:
3206 case TRUTH_OR_EXPR:
3207 case TRUTH_XOR_EXPR:
3208 case TRUTH_NOT_EXPR:
3209
3210 case LT_EXPR:
3211 case LE_EXPR:
3212 case GT_EXPR:
3213 case GE_EXPR:
3214 case EQ_EXPR:
3215 case NE_EXPR:
3216 case ORDERED_EXPR:
3217 case UNORDERED_EXPR:
3218
3219 case UNLT_EXPR:
3220 case UNLE_EXPR:
3221 case UNGT_EXPR:
3222 case UNGE_EXPR:
3223 case UNEQ_EXPR:
3224 case LTGT_EXPR:
3225
3226 case CONJ_EXPR:
3227
3228 case PREDECREMENT_EXPR:
3229 case PREINCREMENT_EXPR:
3230 case POSTDECREMENT_EXPR:
3231 case POSTINCREMENT_EXPR:
3232
3233 case REALIGN_LOAD_EXPR:
3234
3235 case REDUC_MAX_EXPR:
3236 case REDUC_MIN_EXPR:
3237 case REDUC_PLUS_EXPR:
3238 case WIDEN_SUM_EXPR:
3239 case WIDEN_MULT_EXPR:
3240 case DOT_PROD_EXPR:
3241 case WIDEN_MULT_PLUS_EXPR:
3242 case WIDEN_MULT_MINUS_EXPR:
3243
3244 case VEC_WIDEN_MULT_HI_EXPR:
3245 case VEC_WIDEN_MULT_LO_EXPR:
3246 case VEC_UNPACK_HI_EXPR:
3247 case VEC_UNPACK_LO_EXPR:
3248 case VEC_UNPACK_FLOAT_HI_EXPR:
3249 case VEC_UNPACK_FLOAT_LO_EXPR:
3250 case VEC_PACK_TRUNC_EXPR:
3251 case VEC_PACK_SAT_EXPR:
3252 case VEC_PACK_FIX_TRUNC_EXPR:
3253 case VEC_EXTRACT_EVEN_EXPR:
3254 case VEC_EXTRACT_ODD_EXPR:
3255 case VEC_INTERLEAVE_HIGH_EXPR:
3256 case VEC_INTERLEAVE_LOW_EXPR:
3257
3258 return 1;
3259
3260 /* Few special cases of expensive operations. This is useful
3261 to avoid inlining on functions having too many of these. */
3262 case TRUNC_DIV_EXPR:
3263 case CEIL_DIV_EXPR:
3264 case FLOOR_DIV_EXPR:
3265 case ROUND_DIV_EXPR:
3266 case EXACT_DIV_EXPR:
3267 case TRUNC_MOD_EXPR:
3268 case CEIL_MOD_EXPR:
3269 case FLOOR_MOD_EXPR:
3270 case ROUND_MOD_EXPR:
3271 case RDIV_EXPR:
3272 if (TREE_CODE (op2) != INTEGER_CST)
3273 return weights->div_mod_cost;
3274 return 1;
3275
3276 default:
3277 /* We expect a copy assignment with no operator. */
3278 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3279 return 0;
3280 }
3281 }
3282
3283
3284 /* Estimate number of instructions that will be created by expanding
3285 the statements in the statement sequence STMTS.
3286 WEIGHTS contains weights attributed to various constructs. */
3287
3288 static
3289 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3290 {
3291 int cost;
3292 gimple_stmt_iterator gsi;
3293
3294 cost = 0;
3295 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3296 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3297
3298 return cost;
3299 }
3300
3301
3302 /* Estimate number of instructions that will be created by expanding STMT.
3303 WEIGHTS contains weights attributed to various constructs. */
3304
3305 int
3306 estimate_num_insns (gimple stmt, eni_weights *weights)
3307 {
3308 unsigned cost, i;
3309 enum gimple_code code = gimple_code (stmt);
3310 tree lhs;
3311 tree rhs;
3312
3313 switch (code)
3314 {
3315 case GIMPLE_ASSIGN:
3316 /* Try to estimate the cost of assignments. We have three cases to
3317 deal with:
3318 1) Simple assignments to registers;
3319 2) Stores to things that must live in memory. This includes
3320 "normal" stores to scalars, but also assignments of large
3321 structures, or constructors of big arrays;
3322
3323 Let us look at the first two cases, assuming we have "a = b + C":
3324 <GIMPLE_ASSIGN <var_decl "a">
3325 <plus_expr <var_decl "b"> <constant C>>
3326 If "a" is a GIMPLE register, the assignment to it is free on almost
3327 any target, because "a" usually ends up in a real register. Hence
3328 the only cost of this expression comes from the PLUS_EXPR, and we
3329 can ignore the GIMPLE_ASSIGN.
3330 If "a" is not a GIMPLE register, the assignment to "a" will most
3331 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3332 of moving something into "a", which we compute using the function
3333 estimate_move_cost. */
3334 lhs = gimple_assign_lhs (stmt);
3335 rhs = gimple_assign_rhs1 (stmt);
3336
3337 if (is_gimple_reg (lhs))
3338 cost = 0;
3339 else
3340 cost = estimate_move_cost (TREE_TYPE (lhs));
3341
3342 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3343 cost += estimate_move_cost (TREE_TYPE (rhs));
3344
3345 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3346 gimple_assign_rhs1 (stmt),
3347 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3348 == GIMPLE_BINARY_RHS
3349 ? gimple_assign_rhs2 (stmt) : NULL);
3350 break;
3351
3352 case GIMPLE_COND:
3353 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3354 gimple_op (stmt, 0),
3355 gimple_op (stmt, 1));
3356 break;
3357
3358 case GIMPLE_SWITCH:
3359 /* Take into account cost of the switch + guess 2 conditional jumps for
3360 each case label.
3361
3362 TODO: once the switch expansion logic is sufficiently separated, we can
3363 do better job on estimating cost of the switch. */
3364 if (weights->time_based)
3365 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3366 else
3367 cost = gimple_switch_num_labels (stmt) * 2;
3368 break;
3369
3370 case GIMPLE_CALL:
3371 {
3372 tree decl = gimple_call_fndecl (stmt);
3373 tree addr = gimple_call_fn (stmt);
3374 tree funtype = TREE_TYPE (addr);
3375 bool stdarg = false;
3376
3377 if (POINTER_TYPE_P (funtype))
3378 funtype = TREE_TYPE (funtype);
3379
3380 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3381 cost = weights->target_builtin_call_cost;
3382 else
3383 cost = weights->call_cost;
3384
3385 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3386 switch (DECL_FUNCTION_CODE (decl))
3387 {
3388 /* Builtins that expand to constants. */
3389 case BUILT_IN_CONSTANT_P:
3390 case BUILT_IN_EXPECT:
3391 case BUILT_IN_OBJECT_SIZE:
3392 case BUILT_IN_UNREACHABLE:
3393 /* Simple register moves or loads from stack. */
3394 case BUILT_IN_RETURN_ADDRESS:
3395 case BUILT_IN_EXTRACT_RETURN_ADDR:
3396 case BUILT_IN_FROB_RETURN_ADDR:
3397 case BUILT_IN_RETURN:
3398 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3399 case BUILT_IN_FRAME_ADDRESS:
3400 case BUILT_IN_VA_END:
3401 case BUILT_IN_STACK_SAVE:
3402 case BUILT_IN_STACK_RESTORE:
3403 /* Exception state returns or moves registers around. */
3404 case BUILT_IN_EH_FILTER:
3405 case BUILT_IN_EH_POINTER:
3406 case BUILT_IN_EH_COPY_VALUES:
3407 return 0;
3408
3409 /* builtins that are not expensive (that is they are most probably
3410 expanded inline into resonably simple code). */
3411 case BUILT_IN_ABS:
3412 case BUILT_IN_ALLOCA:
3413 case BUILT_IN_BSWAP32:
3414 case BUILT_IN_BSWAP64:
3415 case BUILT_IN_CLZ:
3416 case BUILT_IN_CLZIMAX:
3417 case BUILT_IN_CLZL:
3418 case BUILT_IN_CLZLL:
3419 case BUILT_IN_CTZ:
3420 case BUILT_IN_CTZIMAX:
3421 case BUILT_IN_CTZL:
3422 case BUILT_IN_CTZLL:
3423 case BUILT_IN_FFS:
3424 case BUILT_IN_FFSIMAX:
3425 case BUILT_IN_FFSL:
3426 case BUILT_IN_FFSLL:
3427 case BUILT_IN_IMAXABS:
3428 case BUILT_IN_FINITE:
3429 case BUILT_IN_FINITEF:
3430 case BUILT_IN_FINITEL:
3431 case BUILT_IN_FINITED32:
3432 case BUILT_IN_FINITED64:
3433 case BUILT_IN_FINITED128:
3434 case BUILT_IN_FPCLASSIFY:
3435 case BUILT_IN_ISFINITE:
3436 case BUILT_IN_ISINF_SIGN:
3437 case BUILT_IN_ISINF:
3438 case BUILT_IN_ISINFF:
3439 case BUILT_IN_ISINFL:
3440 case BUILT_IN_ISINFD32:
3441 case BUILT_IN_ISINFD64:
3442 case BUILT_IN_ISINFD128:
3443 case BUILT_IN_ISNAN:
3444 case BUILT_IN_ISNANF:
3445 case BUILT_IN_ISNANL:
3446 case BUILT_IN_ISNAND32:
3447 case BUILT_IN_ISNAND64:
3448 case BUILT_IN_ISNAND128:
3449 case BUILT_IN_ISNORMAL:
3450 case BUILT_IN_ISGREATER:
3451 case BUILT_IN_ISGREATEREQUAL:
3452 case BUILT_IN_ISLESS:
3453 case BUILT_IN_ISLESSEQUAL:
3454 case BUILT_IN_ISLESSGREATER:
3455 case BUILT_IN_ISUNORDERED:
3456 case BUILT_IN_VA_ARG_PACK:
3457 case BUILT_IN_VA_ARG_PACK_LEN:
3458 case BUILT_IN_VA_COPY:
3459 case BUILT_IN_TRAP:
3460 case BUILT_IN_SAVEREGS:
3461 case BUILT_IN_POPCOUNTL:
3462 case BUILT_IN_POPCOUNTLL:
3463 case BUILT_IN_POPCOUNTIMAX:
3464 case BUILT_IN_POPCOUNT:
3465 case BUILT_IN_PARITYL:
3466 case BUILT_IN_PARITYLL:
3467 case BUILT_IN_PARITYIMAX:
3468 case BUILT_IN_PARITY:
3469 case BUILT_IN_LABS:
3470 case BUILT_IN_LLABS:
3471 case BUILT_IN_PREFETCH:
3472 cost = weights->target_builtin_call_cost;
3473 break;
3474
3475 default:
3476 break;
3477 }
3478
3479 if (decl)
3480 funtype = TREE_TYPE (decl);
3481
3482 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3483 cost += estimate_move_cost (TREE_TYPE (funtype));
3484
3485 if (funtype)
3486 stdarg = stdarg_p (funtype);
3487
3488 /* Our cost must be kept in sync with
3489 cgraph_estimate_size_after_inlining that does use function
3490 declaration to figure out the arguments.
3491
3492 For functions taking variable list of arguments we must
3493 look into call statement intself. This is safe because
3494 we will get only higher costs and in most cases we will
3495 not inline these anyway. */
3496 if (decl && DECL_ARGUMENTS (decl) && !stdarg)
3497 {
3498 tree arg;
3499 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3500 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3501 cost += estimate_move_cost (TREE_TYPE (arg));
3502 }
3503 else if (funtype && prototype_p (funtype) && !stdarg)
3504 {
3505 tree t;
3506 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3507 t = TREE_CHAIN (t))
3508 if (!VOID_TYPE_P (TREE_VALUE (t)))
3509 cost += estimate_move_cost (TREE_VALUE (t));
3510 }
3511 else
3512 {
3513 for (i = 0; i < gimple_call_num_args (stmt); i++)
3514 {
3515 tree arg = gimple_call_arg (stmt, i);
3516 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3517 cost += estimate_move_cost (TREE_TYPE (arg));
3518 }
3519 }
3520
3521 break;
3522 }
3523
3524 case GIMPLE_GOTO:
3525 case GIMPLE_LABEL:
3526 case GIMPLE_NOP:
3527 case GIMPLE_PHI:
3528 case GIMPLE_RETURN:
3529 case GIMPLE_PREDICT:
3530 case GIMPLE_DEBUG:
3531 return 0;
3532
3533 case GIMPLE_ASM:
3534 return asm_str_count (gimple_asm_string (stmt));
3535
3536 case GIMPLE_RESX:
3537 /* This is either going to be an external function call with one
3538 argument, or two register copy statements plus a goto. */
3539 return 2;
3540
3541 case GIMPLE_EH_DISPATCH:
3542 /* ??? This is going to turn into a switch statement. Ideally
3543 we'd have a look at the eh region and estimate the number of
3544 edges involved. */
3545 return 10;
3546
3547 case GIMPLE_BIND:
3548 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3549
3550 case GIMPLE_EH_FILTER:
3551 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3552
3553 case GIMPLE_CATCH:
3554 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3555
3556 case GIMPLE_TRY:
3557 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3558 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3559
3560 /* OpenMP directives are generally very expensive. */
3561
3562 case GIMPLE_OMP_RETURN:
3563 case GIMPLE_OMP_SECTIONS_SWITCH:
3564 case GIMPLE_OMP_ATOMIC_STORE:
3565 case GIMPLE_OMP_CONTINUE:
3566 /* ...except these, which are cheap. */
3567 return 0;
3568
3569 case GIMPLE_OMP_ATOMIC_LOAD:
3570 return weights->omp_cost;
3571
3572 case GIMPLE_OMP_FOR:
3573 return (weights->omp_cost
3574 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3575 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3576
3577 case GIMPLE_OMP_PARALLEL:
3578 case GIMPLE_OMP_TASK:
3579 case GIMPLE_OMP_CRITICAL:
3580 case GIMPLE_OMP_MASTER:
3581 case GIMPLE_OMP_ORDERED:
3582 case GIMPLE_OMP_SECTION:
3583 case GIMPLE_OMP_SECTIONS:
3584 case GIMPLE_OMP_SINGLE:
3585 return (weights->omp_cost
3586 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3587
3588 default:
3589 gcc_unreachable ();
3590 }
3591
3592 return cost;
3593 }
3594
3595 /* Estimate number of instructions that will be created by expanding
3596 function FNDECL. WEIGHTS contains weights attributed to various
3597 constructs. */
3598
3599 int
3600 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3601 {
3602 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3603 gimple_stmt_iterator bsi;
3604 basic_block bb;
3605 int n = 0;
3606
3607 gcc_assert (my_function && my_function->cfg);
3608 FOR_EACH_BB_FN (bb, my_function)
3609 {
3610 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3611 n += estimate_num_insns (gsi_stmt (bsi), weights);
3612 }
3613
3614 return n;
3615 }
3616
3617
3618 /* Initializes weights used by estimate_num_insns. */
3619
3620 void
3621 init_inline_once (void)
3622 {
3623 eni_size_weights.call_cost = 1;
3624 eni_size_weights.target_builtin_call_cost = 1;
3625 eni_size_weights.div_mod_cost = 1;
3626 eni_size_weights.omp_cost = 40;
3627 eni_size_weights.time_based = false;
3628
3629 /* Estimating time for call is difficult, since we have no idea what the
3630 called function does. In the current uses of eni_time_weights,
3631 underestimating the cost does less harm than overestimating it, so
3632 we choose a rather small value here. */
3633 eni_time_weights.call_cost = 10;
3634 eni_time_weights.target_builtin_call_cost = 10;
3635 eni_time_weights.div_mod_cost = 10;
3636 eni_time_weights.omp_cost = 40;
3637 eni_time_weights.time_based = true;
3638 }
3639
3640 /* Estimate the number of instructions in a gimple_seq. */
3641
3642 int
3643 count_insns_seq (gimple_seq seq, eni_weights *weights)
3644 {
3645 gimple_stmt_iterator gsi;
3646 int n = 0;
3647 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3648 n += estimate_num_insns (gsi_stmt (gsi), weights);
3649
3650 return n;
3651 }
3652
3653
3654 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3655
3656 static void
3657 prepend_lexical_block (tree current_block, tree new_block)
3658 {
3659 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3660 BLOCK_SUBBLOCKS (current_block) = new_block;
3661 BLOCK_SUPERCONTEXT (new_block) = current_block;
3662 }
3663
3664 /* Fetch callee declaration from the call graph edge going from NODE and
3665 associated with STMR call statement. Return NULL_TREE if not found. */
3666 static tree
3667 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3668 {
3669 struct cgraph_edge *cs;
3670
3671 cs = cgraph_edge (node, stmt);
3672 if (cs && !cs->indirect_unknown_callee)
3673 return cs->callee->decl;
3674
3675 return NULL_TREE;
3676 }
3677
3678 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3679
3680 static bool
3681 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3682 {
3683 tree use_retvar;
3684 tree fn;
3685 struct pointer_map_t *st, *dst;
3686 tree return_slot;
3687 tree modify_dest;
3688 location_t saved_location;
3689 struct cgraph_edge *cg_edge;
3690 cgraph_inline_failed_t reason;
3691 basic_block return_block;
3692 edge e;
3693 gimple_stmt_iterator gsi, stmt_gsi;
3694 bool successfully_inlined = FALSE;
3695 bool purge_dead_abnormal_edges;
3696 tree t_step;
3697 tree var;
3698
3699 /* Set input_location here so we get the right instantiation context
3700 if we call instantiate_decl from inlinable_function_p. */
3701 saved_location = input_location;
3702 if (gimple_has_location (stmt))
3703 input_location = gimple_location (stmt);
3704
3705 /* From here on, we're only interested in CALL_EXPRs. */
3706 if (gimple_code (stmt) != GIMPLE_CALL)
3707 goto egress;
3708
3709 /* First, see if we can figure out what function is being called.
3710 If we cannot, then there is no hope of inlining the function. */
3711 fn = gimple_call_fndecl (stmt);
3712 if (!fn)
3713 {
3714 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3715 if (!fn)
3716 goto egress;
3717 }
3718
3719 /* Turn forward declarations into real ones. */
3720 fn = cgraph_node (fn)->decl;
3721
3722 /* If FN is a declaration of a function in a nested scope that was
3723 globally declared inline, we don't set its DECL_INITIAL.
3724 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3725 C++ front-end uses it for cdtors to refer to their internal
3726 declarations, that are not real functions. Fortunately those
3727 don't have trees to be saved, so we can tell by checking their
3728 gimple_body. */
3729 if (!DECL_INITIAL (fn)
3730 && DECL_ABSTRACT_ORIGIN (fn)
3731 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3732 fn = DECL_ABSTRACT_ORIGIN (fn);
3733
3734 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3735 Kill this check once this is fixed. */
3736 if (!id->dst_node->analyzed)
3737 goto egress;
3738
3739 cg_edge = cgraph_edge (id->dst_node, stmt);
3740
3741 /* First check that inlining isn't simply forbidden in this case. */
3742 if (inline_forbidden_into_p (cg_edge->caller->decl, cg_edge->callee->decl))
3743 goto egress;
3744
3745 /* Don't try to inline functions that are not well-suited to inlining. */
3746 if (!cgraph_inline_p (cg_edge, &reason))
3747 {
3748 /* If this call was originally indirect, we do not want to emit any
3749 inlining related warnings or sorry messages because there are no
3750 guarantees regarding those. */
3751 if (cg_edge->indirect_inlining_edge)
3752 goto egress;
3753
3754 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3755 /* Avoid warnings during early inline pass. */
3756 && cgraph_global_info_ready)
3757 {
3758 sorry ("inlining failed in call to %q+F: %s", fn,
3759 _(cgraph_inline_failed_string (reason)));
3760 sorry ("called from here");
3761 }
3762 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3763 && !DECL_IN_SYSTEM_HEADER (fn)
3764 && reason != CIF_UNSPECIFIED
3765 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3766 /* Avoid warnings during early inline pass. */
3767 && cgraph_global_info_ready)
3768 {
3769 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3770 fn, _(cgraph_inline_failed_string (reason)));
3771 warning (OPT_Winline, "called from here");
3772 }
3773 goto egress;
3774 }
3775 fn = cg_edge->callee->decl;
3776
3777 #ifdef ENABLE_CHECKING
3778 if (cg_edge->callee->decl != id->dst_node->decl)
3779 verify_cgraph_node (cg_edge->callee);
3780 #endif
3781
3782 /* We will be inlining this callee. */
3783 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3784
3785 /* Update the callers EH personality. */
3786 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3787 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3788 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3789
3790 /* Split the block holding the GIMPLE_CALL. */
3791 e = split_block (bb, stmt);
3792 bb = e->src;
3793 return_block = e->dest;
3794 remove_edge (e);
3795
3796 /* split_block splits after the statement; work around this by
3797 moving the call into the second block manually. Not pretty,
3798 but seems easier than doing the CFG manipulation by hand
3799 when the GIMPLE_CALL is in the last statement of BB. */
3800 stmt_gsi = gsi_last_bb (bb);
3801 gsi_remove (&stmt_gsi, false);
3802
3803 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3804 been the source of abnormal edges. In this case, schedule
3805 the removal of dead abnormal edges. */
3806 gsi = gsi_start_bb (return_block);
3807 if (gsi_end_p (gsi))
3808 {
3809 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3810 purge_dead_abnormal_edges = true;
3811 }
3812 else
3813 {
3814 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3815 purge_dead_abnormal_edges = false;
3816 }
3817
3818 stmt_gsi = gsi_start_bb (return_block);
3819
3820 /* Build a block containing code to initialize the arguments, the
3821 actual inline expansion of the body, and a label for the return
3822 statements within the function to jump to. The type of the
3823 statement expression is the return type of the function call. */
3824 id->block = make_node (BLOCK);
3825 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3826 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3827 prepend_lexical_block (gimple_block (stmt), id->block);
3828
3829 /* Local declarations will be replaced by their equivalents in this
3830 map. */
3831 st = id->decl_map;
3832 id->decl_map = pointer_map_create ();
3833 dst = id->debug_map;
3834 id->debug_map = NULL;
3835
3836 /* Record the function we are about to inline. */
3837 id->src_fn = fn;
3838 id->src_node = cg_edge->callee;
3839 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3840 id->gimple_call = stmt;
3841
3842 gcc_assert (!id->src_cfun->after_inlining);
3843
3844 id->entry_bb = bb;
3845 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3846 {
3847 gimple_stmt_iterator si = gsi_last_bb (bb);
3848 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3849 NOT_TAKEN),
3850 GSI_NEW_STMT);
3851 }
3852 initialize_inlined_parameters (id, stmt, fn, bb);
3853
3854 if (DECL_INITIAL (fn))
3855 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3856
3857 /* Return statements in the function body will be replaced by jumps
3858 to the RET_LABEL. */
3859 gcc_assert (DECL_INITIAL (fn));
3860 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3861
3862 /* Find the LHS to which the result of this call is assigned. */
3863 return_slot = NULL;
3864 if (gimple_call_lhs (stmt))
3865 {
3866 modify_dest = gimple_call_lhs (stmt);
3867
3868 /* The function which we are inlining might not return a value,
3869 in which case we should issue a warning that the function
3870 does not return a value. In that case the optimizers will
3871 see that the variable to which the value is assigned was not
3872 initialized. We do not want to issue a warning about that
3873 uninitialized variable. */
3874 if (DECL_P (modify_dest))
3875 TREE_NO_WARNING (modify_dest) = 1;
3876
3877 if (gimple_call_return_slot_opt_p (stmt))
3878 {
3879 return_slot = modify_dest;
3880 modify_dest = NULL;
3881 }
3882 }
3883 else
3884 modify_dest = NULL;
3885
3886 /* If we are inlining a call to the C++ operator new, we don't want
3887 to use type based alias analysis on the return value. Otherwise
3888 we may get confused if the compiler sees that the inlined new
3889 function returns a pointer which was just deleted. See bug
3890 33407. */
3891 if (DECL_IS_OPERATOR_NEW (fn))
3892 {
3893 return_slot = NULL;
3894 modify_dest = NULL;
3895 }
3896
3897 /* Declare the return variable for the function. */
3898 use_retvar = declare_return_variable (id, return_slot, modify_dest);
3899
3900 /* Add local vars in this inlined callee to caller. */
3901 t_step = id->src_cfun->local_decls;
3902 for (; t_step; t_step = TREE_CHAIN (t_step))
3903 {
3904 var = TREE_VALUE (t_step);
3905 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3906 {
3907 if (var_ann (var) && add_referenced_var (var))
3908 cfun->local_decls = tree_cons (NULL_TREE, var,
3909 cfun->local_decls);
3910 }
3911 else if (!can_be_nonlocal (var, id))
3912 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3913 cfun->local_decls);
3914 }
3915
3916 if (dump_file && (dump_flags & TDF_DETAILS))
3917 {
3918 fprintf (dump_file, "Inlining ");
3919 print_generic_expr (dump_file, id->src_fn, 0);
3920 fprintf (dump_file, " to ");
3921 print_generic_expr (dump_file, id->dst_fn, 0);
3922 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3923 }
3924
3925 /* This is it. Duplicate the callee body. Assume callee is
3926 pre-gimplified. Note that we must not alter the caller
3927 function in any way before this point, as this CALL_EXPR may be
3928 a self-referential call; if we're calling ourselves, we need to
3929 duplicate our body before altering anything. */
3930 copy_body (id, bb->count,
3931 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3932 bb, return_block, NULL, NULL);
3933
3934 /* Reset the escaped solution. */
3935 if (cfun->gimple_df)
3936 pt_solution_reset (&cfun->gimple_df->escaped);
3937
3938 /* Clean up. */
3939 if (id->debug_map)
3940 {
3941 pointer_map_destroy (id->debug_map);
3942 id->debug_map = dst;
3943 }
3944 pointer_map_destroy (id->decl_map);
3945 id->decl_map = st;
3946
3947 /* Unlink the calls virtual operands before replacing it. */
3948 unlink_stmt_vdef (stmt);
3949
3950 /* If the inlined function returns a result that we care about,
3951 substitute the GIMPLE_CALL with an assignment of the return
3952 variable to the LHS of the call. That is, if STMT was
3953 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3954 if (use_retvar && gimple_call_lhs (stmt))
3955 {
3956 gimple old_stmt = stmt;
3957 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3958 gsi_replace (&stmt_gsi, stmt, false);
3959 if (gimple_in_ssa_p (cfun))
3960 mark_symbols_for_renaming (stmt);
3961 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3962 }
3963 else
3964 {
3965 /* Handle the case of inlining a function with no return
3966 statement, which causes the return value to become undefined. */
3967 if (gimple_call_lhs (stmt)
3968 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3969 {
3970 tree name = gimple_call_lhs (stmt);
3971 tree var = SSA_NAME_VAR (name);
3972 tree def = gimple_default_def (cfun, var);
3973
3974 if (def)
3975 {
3976 /* If the variable is used undefined, make this name
3977 undefined via a move. */
3978 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3979 gsi_replace (&stmt_gsi, stmt, true);
3980 }
3981 else
3982 {
3983 /* Otherwise make this variable undefined. */
3984 gsi_remove (&stmt_gsi, true);
3985 set_default_def (var, name);
3986 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3987 }
3988 }
3989 else
3990 gsi_remove (&stmt_gsi, true);
3991 }
3992
3993 if (purge_dead_abnormal_edges)
3994 gimple_purge_dead_abnormal_call_edges (return_block);
3995
3996 /* If the value of the new expression is ignored, that's OK. We
3997 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3998 the equivalent inlined version either. */
3999 if (is_gimple_assign (stmt))
4000 {
4001 gcc_assert (gimple_assign_single_p (stmt)
4002 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4003 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4004 }
4005
4006 /* Output the inlining info for this abstract function, since it has been
4007 inlined. If we don't do this now, we can lose the information about the
4008 variables in the function when the blocks get blown away as soon as we
4009 remove the cgraph node. */
4010 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4011
4012 /* Update callgraph if needed. */
4013 cgraph_remove_node (cg_edge->callee);
4014
4015 id->block = NULL_TREE;
4016 successfully_inlined = TRUE;
4017
4018 egress:
4019 input_location = saved_location;
4020 return successfully_inlined;
4021 }
4022
4023 /* Expand call statements reachable from STMT_P.
4024 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4025 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
4026 unfortunately not use that function here because we need a pointer
4027 to the CALL_EXPR, not the tree itself. */
4028
4029 static bool
4030 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4031 {
4032 gimple_stmt_iterator gsi;
4033
4034 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4035 {
4036 gimple stmt = gsi_stmt (gsi);
4037
4038 if (is_gimple_call (stmt)
4039 && expand_call_inline (bb, stmt, id))
4040 return true;
4041 }
4042
4043 return false;
4044 }
4045
4046
4047 /* Walk all basic blocks created after FIRST and try to fold every statement
4048 in the STATEMENTS pointer set. */
4049
4050 static void
4051 fold_marked_statements (int first, struct pointer_set_t *statements)
4052 {
4053 for (; first < n_basic_blocks; first++)
4054 if (BASIC_BLOCK (first))
4055 {
4056 gimple_stmt_iterator gsi;
4057
4058 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4059 !gsi_end_p (gsi);
4060 gsi_next (&gsi))
4061 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4062 {
4063 gimple old_stmt = gsi_stmt (gsi);
4064 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4065
4066 if (old_decl && DECL_BUILT_IN (old_decl))
4067 {
4068 /* Folding builtins can create multiple instructions,
4069 we need to look at all of them. */
4070 gimple_stmt_iterator i2 = gsi;
4071 gsi_prev (&i2);
4072 if (fold_stmt (&gsi))
4073 {
4074 gimple new_stmt;
4075 if (gsi_end_p (i2))
4076 i2 = gsi_start_bb (BASIC_BLOCK (first));
4077 else
4078 gsi_next (&i2);
4079 while (1)
4080 {
4081 new_stmt = gsi_stmt (i2);
4082 update_stmt (new_stmt);
4083 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4084 new_stmt);
4085
4086 if (new_stmt == gsi_stmt (gsi))
4087 {
4088 /* It is okay to check only for the very last
4089 of these statements. If it is a throwing
4090 statement nothing will change. If it isn't
4091 this can remove EH edges. If that weren't
4092 correct then because some intermediate stmts
4093 throw, but not the last one. That would mean
4094 we'd have to split the block, which we can't
4095 here and we'd loose anyway. And as builtins
4096 probably never throw, this all
4097 is mood anyway. */
4098 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4099 new_stmt))
4100 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4101 break;
4102 }
4103 gsi_next (&i2);
4104 }
4105 }
4106 }
4107 else if (fold_stmt (&gsi))
4108 {
4109 /* Re-read the statement from GSI as fold_stmt() may
4110 have changed it. */
4111 gimple new_stmt = gsi_stmt (gsi);
4112 update_stmt (new_stmt);
4113
4114 if (is_gimple_call (old_stmt)
4115 || is_gimple_call (new_stmt))
4116 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4117 new_stmt);
4118
4119 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4120 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4121 }
4122 }
4123 }
4124 }
4125
4126 /* Return true if BB has at least one abnormal outgoing edge. */
4127
4128 static inline bool
4129 has_abnormal_outgoing_edge_p (basic_block bb)
4130 {
4131 edge e;
4132 edge_iterator ei;
4133
4134 FOR_EACH_EDGE (e, ei, bb->succs)
4135 if (e->flags & EDGE_ABNORMAL)
4136 return true;
4137
4138 return false;
4139 }
4140
4141 /* Expand calls to inline functions in the body of FN. */
4142
4143 unsigned int
4144 optimize_inline_calls (tree fn)
4145 {
4146 copy_body_data id;
4147 basic_block bb;
4148 int last = n_basic_blocks;
4149 struct gimplify_ctx gctx;
4150
4151 /* There is no point in performing inlining if errors have already
4152 occurred -- and we might crash if we try to inline invalid
4153 code. */
4154 if (seen_error ())
4155 return 0;
4156
4157 /* Clear out ID. */
4158 memset (&id, 0, sizeof (id));
4159
4160 id.src_node = id.dst_node = cgraph_node (fn);
4161 id.dst_fn = fn;
4162 /* Or any functions that aren't finished yet. */
4163 if (current_function_decl)
4164 id.dst_fn = current_function_decl;
4165
4166 id.copy_decl = copy_decl_maybe_to_var;
4167 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4168 id.transform_new_cfg = false;
4169 id.transform_return_to_modify = true;
4170 id.transform_lang_insert_block = NULL;
4171 id.statements_to_fold = pointer_set_create ();
4172
4173 push_gimplify_context (&gctx);
4174
4175 /* We make no attempts to keep dominance info up-to-date. */
4176 free_dominance_info (CDI_DOMINATORS);
4177 free_dominance_info (CDI_POST_DOMINATORS);
4178
4179 /* Register specific gimple functions. */
4180 gimple_register_cfg_hooks ();
4181
4182 /* Reach the trees by walking over the CFG, and note the
4183 enclosing basic-blocks in the call edges. */
4184 /* We walk the blocks going forward, because inlined function bodies
4185 will split id->current_basic_block, and the new blocks will
4186 follow it; we'll trudge through them, processing their CALL_EXPRs
4187 along the way. */
4188 FOR_EACH_BB (bb)
4189 gimple_expand_calls_inline (bb, &id);
4190
4191 pop_gimplify_context (NULL);
4192
4193 #ifdef ENABLE_CHECKING
4194 {
4195 struct cgraph_edge *e;
4196
4197 verify_cgraph_node (id.dst_node);
4198
4199 /* Double check that we inlined everything we are supposed to inline. */
4200 for (e = id.dst_node->callees; e; e = e->next_callee)
4201 gcc_assert (e->inline_failed);
4202 }
4203 #endif
4204
4205 /* Fold the statements before compacting/renumbering the basic blocks. */
4206 fold_marked_statements (last, id.statements_to_fold);
4207 pointer_set_destroy (id.statements_to_fold);
4208
4209 gcc_assert (!id.debug_stmts);
4210
4211 /* Renumber the (code) basic_blocks consecutively. */
4212 compact_blocks ();
4213 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4214 number_blocks (fn);
4215
4216 fold_cond_expr_cond ();
4217 delete_unreachable_blocks_update_callgraph (&id);
4218 #ifdef ENABLE_CHECKING
4219 verify_cgraph_node (id.dst_node);
4220 #endif
4221
4222 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4223 not possible yet - the IPA passes might make various functions to not
4224 throw and they don't care to proactively update local EH info. This is
4225 done later in fixup_cfg pass that also execute the verification. */
4226 return (TODO_update_ssa
4227 | TODO_cleanup_cfg
4228 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4229 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4230 }
4231
4232 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4233
4234 tree
4235 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4236 {
4237 enum tree_code code = TREE_CODE (*tp);
4238 enum tree_code_class cl = TREE_CODE_CLASS (code);
4239
4240 /* We make copies of most nodes. */
4241 if (IS_EXPR_CODE_CLASS (cl)
4242 || code == TREE_LIST
4243 || code == TREE_VEC
4244 || code == TYPE_DECL
4245 || code == OMP_CLAUSE)
4246 {
4247 /* Because the chain gets clobbered when we make a copy, we save it
4248 here. */
4249 tree chain = NULL_TREE, new_tree;
4250
4251 chain = TREE_CHAIN (*tp);
4252
4253 /* Copy the node. */
4254 new_tree = copy_node (*tp);
4255
4256 /* Propagate mudflap marked-ness. */
4257 if (flag_mudflap && mf_marked_p (*tp))
4258 mf_mark (new_tree);
4259
4260 *tp = new_tree;
4261
4262 /* Now, restore the chain, if appropriate. That will cause
4263 walk_tree to walk into the chain as well. */
4264 if (code == PARM_DECL
4265 || code == TREE_LIST
4266 || code == OMP_CLAUSE)
4267 TREE_CHAIN (*tp) = chain;
4268
4269 /* For now, we don't update BLOCKs when we make copies. So, we
4270 have to nullify all BIND_EXPRs. */
4271 if (TREE_CODE (*tp) == BIND_EXPR)
4272 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4273 }
4274 else if (code == CONSTRUCTOR)
4275 {
4276 /* CONSTRUCTOR nodes need special handling because
4277 we need to duplicate the vector of elements. */
4278 tree new_tree;
4279
4280 new_tree = copy_node (*tp);
4281
4282 /* Propagate mudflap marked-ness. */
4283 if (flag_mudflap && mf_marked_p (*tp))
4284 mf_mark (new_tree);
4285
4286 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4287 CONSTRUCTOR_ELTS (*tp));
4288 *tp = new_tree;
4289 }
4290 else if (TREE_CODE_CLASS (code) == tcc_type)
4291 *walk_subtrees = 0;
4292 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4293 *walk_subtrees = 0;
4294 else if (TREE_CODE_CLASS (code) == tcc_constant)
4295 *walk_subtrees = 0;
4296 else
4297 gcc_assert (code != STATEMENT_LIST);
4298 return NULL_TREE;
4299 }
4300
4301 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4302 information indicating to what new SAVE_EXPR this one should be mapped,
4303 use that one. Otherwise, create a new node and enter it in ST. FN is
4304 the function into which the copy will be placed. */
4305
4306 static void
4307 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4308 {
4309 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4310 tree *n;
4311 tree t;
4312
4313 /* See if we already encountered this SAVE_EXPR. */
4314 n = (tree *) pointer_map_contains (st, *tp);
4315
4316 /* If we didn't already remap this SAVE_EXPR, do so now. */
4317 if (!n)
4318 {
4319 t = copy_node (*tp);
4320
4321 /* Remember this SAVE_EXPR. */
4322 *pointer_map_insert (st, *tp) = t;
4323 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4324 *pointer_map_insert (st, t) = t;
4325 }
4326 else
4327 {
4328 /* We've already walked into this SAVE_EXPR; don't do it again. */
4329 *walk_subtrees = 0;
4330 t = *n;
4331 }
4332
4333 /* Replace this SAVE_EXPR with the copy. */
4334 *tp = t;
4335 }
4336
4337 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4338 copies the declaration and enters it in the splay_tree in DATA (which is
4339 really an `copy_body_data *'). */
4340
4341 static tree
4342 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4343 void *data)
4344 {
4345 copy_body_data *id = (copy_body_data *) data;
4346
4347 /* Don't walk into types. */
4348 if (TYPE_P (*tp))
4349 *walk_subtrees = 0;
4350
4351 else if (TREE_CODE (*tp) == LABEL_EXPR)
4352 {
4353 tree decl = TREE_OPERAND (*tp, 0);
4354
4355 /* Copy the decl and remember the copy. */
4356 insert_decl_map (id, decl, id->copy_decl (decl, id));
4357 }
4358
4359 return NULL_TREE;
4360 }
4361
4362 /* Perform any modifications to EXPR required when it is unsaved. Does
4363 not recurse into EXPR's subtrees. */
4364
4365 static void
4366 unsave_expr_1 (tree expr)
4367 {
4368 switch (TREE_CODE (expr))
4369 {
4370 case TARGET_EXPR:
4371 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4372 It's OK for this to happen if it was part of a subtree that
4373 isn't immediately expanded, such as operand 2 of another
4374 TARGET_EXPR. */
4375 if (TREE_OPERAND (expr, 1))
4376 break;
4377
4378 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4379 TREE_OPERAND (expr, 3) = NULL_TREE;
4380 break;
4381
4382 default:
4383 break;
4384 }
4385 }
4386
4387 /* Called via walk_tree when an expression is unsaved. Using the
4388 splay_tree pointed to by ST (which is really a `splay_tree'),
4389 remaps all local declarations to appropriate replacements. */
4390
4391 static tree
4392 unsave_r (tree *tp, int *walk_subtrees, void *data)
4393 {
4394 copy_body_data *id = (copy_body_data *) data;
4395 struct pointer_map_t *st = id->decl_map;
4396 tree *n;
4397
4398 /* Only a local declaration (variable or label). */
4399 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4400 || TREE_CODE (*tp) == LABEL_DECL)
4401 {
4402 /* Lookup the declaration. */
4403 n = (tree *) pointer_map_contains (st, *tp);
4404
4405 /* If it's there, remap it. */
4406 if (n)
4407 *tp = *n;
4408 }
4409
4410 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4411 gcc_unreachable ();
4412 else if (TREE_CODE (*tp) == BIND_EXPR)
4413 copy_bind_expr (tp, walk_subtrees, id);
4414 else if (TREE_CODE (*tp) == SAVE_EXPR
4415 || TREE_CODE (*tp) == TARGET_EXPR)
4416 remap_save_expr (tp, st, walk_subtrees);
4417 else
4418 {
4419 copy_tree_r (tp, walk_subtrees, NULL);
4420
4421 /* Do whatever unsaving is required. */
4422 unsave_expr_1 (*tp);
4423 }
4424
4425 /* Keep iterating. */
4426 return NULL_TREE;
4427 }
4428
4429 /* Copies everything in EXPR and replaces variables, labels
4430 and SAVE_EXPRs local to EXPR. */
4431
4432 tree
4433 unsave_expr_now (tree expr)
4434 {
4435 copy_body_data id;
4436
4437 /* There's nothing to do for NULL_TREE. */
4438 if (expr == 0)
4439 return expr;
4440
4441 /* Set up ID. */
4442 memset (&id, 0, sizeof (id));
4443 id.src_fn = current_function_decl;
4444 id.dst_fn = current_function_decl;
4445 id.decl_map = pointer_map_create ();
4446 id.debug_map = NULL;
4447
4448 id.copy_decl = copy_decl_no_change;
4449 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4450 id.transform_new_cfg = false;
4451 id.transform_return_to_modify = false;
4452 id.transform_lang_insert_block = NULL;
4453
4454 /* Walk the tree once to find local labels. */
4455 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4456
4457 /* Walk the tree again, copying, remapping, and unsaving. */
4458 walk_tree (&expr, unsave_r, &id, NULL);
4459
4460 /* Clean up. */
4461 pointer_map_destroy (id.decl_map);
4462 if (id.debug_map)
4463 pointer_map_destroy (id.debug_map);
4464
4465 return expr;
4466 }
4467
4468 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4469 label, copies the declaration and enters it in the splay_tree in DATA (which
4470 is really a 'copy_body_data *'. */
4471
4472 static tree
4473 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4474 bool *handled_ops_p ATTRIBUTE_UNUSED,
4475 struct walk_stmt_info *wi)
4476 {
4477 copy_body_data *id = (copy_body_data *) wi->info;
4478 gimple stmt = gsi_stmt (*gsip);
4479
4480 if (gimple_code (stmt) == GIMPLE_LABEL)
4481 {
4482 tree decl = gimple_label_label (stmt);
4483
4484 /* Copy the decl and remember the copy. */
4485 insert_decl_map (id, decl, id->copy_decl (decl, id));
4486 }
4487
4488 return NULL_TREE;
4489 }
4490
4491
4492 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4493 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4494 remaps all local declarations to appropriate replacements in gimple
4495 operands. */
4496
4497 static tree
4498 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4499 {
4500 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4501 copy_body_data *id = (copy_body_data *) wi->info;
4502 struct pointer_map_t *st = id->decl_map;
4503 tree *n;
4504 tree expr = *tp;
4505
4506 /* Only a local declaration (variable or label). */
4507 if ((TREE_CODE (expr) == VAR_DECL
4508 && !TREE_STATIC (expr))
4509 || TREE_CODE (expr) == LABEL_DECL)
4510 {
4511 /* Lookup the declaration. */
4512 n = (tree *) pointer_map_contains (st, expr);
4513
4514 /* If it's there, remap it. */
4515 if (n)
4516 *tp = *n;
4517 *walk_subtrees = 0;
4518 }
4519 else if (TREE_CODE (expr) == STATEMENT_LIST
4520 || TREE_CODE (expr) == BIND_EXPR
4521 || TREE_CODE (expr) == SAVE_EXPR)
4522 gcc_unreachable ();
4523 else if (TREE_CODE (expr) == TARGET_EXPR)
4524 {
4525 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4526 It's OK for this to happen if it was part of a subtree that
4527 isn't immediately expanded, such as operand 2 of another
4528 TARGET_EXPR. */
4529 if (!TREE_OPERAND (expr, 1))
4530 {
4531 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4532 TREE_OPERAND (expr, 3) = NULL_TREE;
4533 }
4534 }
4535
4536 /* Keep iterating. */
4537 return NULL_TREE;
4538 }
4539
4540
4541 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4542 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4543 remaps all local declarations to appropriate replacements in gimple
4544 statements. */
4545
4546 static tree
4547 replace_locals_stmt (gimple_stmt_iterator *gsip,
4548 bool *handled_ops_p ATTRIBUTE_UNUSED,
4549 struct walk_stmt_info *wi)
4550 {
4551 copy_body_data *id = (copy_body_data *) wi->info;
4552 gimple stmt = gsi_stmt (*gsip);
4553
4554 if (gimple_code (stmt) == GIMPLE_BIND)
4555 {
4556 tree block = gimple_bind_block (stmt);
4557
4558 if (block)
4559 {
4560 remap_block (&block, id);
4561 gimple_bind_set_block (stmt, block);
4562 }
4563
4564 /* This will remap a lot of the same decls again, but this should be
4565 harmless. */
4566 if (gimple_bind_vars (stmt))
4567 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4568 }
4569
4570 /* Keep iterating. */
4571 return NULL_TREE;
4572 }
4573
4574
4575 /* Copies everything in SEQ and replaces variables and labels local to
4576 current_function_decl. */
4577
4578 gimple_seq
4579 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4580 {
4581 copy_body_data id;
4582 struct walk_stmt_info wi;
4583 struct pointer_set_t *visited;
4584 gimple_seq copy;
4585
4586 /* There's nothing to do for NULL_TREE. */
4587 if (seq == NULL)
4588 return seq;
4589
4590 /* Set up ID. */
4591 memset (&id, 0, sizeof (id));
4592 id.src_fn = current_function_decl;
4593 id.dst_fn = current_function_decl;
4594 id.decl_map = pointer_map_create ();
4595 id.debug_map = NULL;
4596
4597 id.copy_decl = copy_decl_no_change;
4598 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4599 id.transform_new_cfg = false;
4600 id.transform_return_to_modify = false;
4601 id.transform_lang_insert_block = NULL;
4602
4603 /* Walk the tree once to find local labels. */
4604 memset (&wi, 0, sizeof (wi));
4605 visited = pointer_set_create ();
4606 wi.info = &id;
4607 wi.pset = visited;
4608 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4609 pointer_set_destroy (visited);
4610
4611 copy = gimple_seq_copy (seq);
4612
4613 /* Walk the copy, remapping decls. */
4614 memset (&wi, 0, sizeof (wi));
4615 wi.info = &id;
4616 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4617
4618 /* Clean up. */
4619 pointer_map_destroy (id.decl_map);
4620 if (id.debug_map)
4621 pointer_map_destroy (id.debug_map);
4622
4623 return copy;
4624 }
4625
4626
4627 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4628
4629 static tree
4630 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4631 {
4632 if (*tp == data)
4633 return (tree) data;
4634 else
4635 return NULL;
4636 }
4637
4638 DEBUG_FUNCTION bool
4639 debug_find_tree (tree top, tree search)
4640 {
4641 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4642 }
4643
4644
4645 /* Declare the variables created by the inliner. Add all the variables in
4646 VARS to BIND_EXPR. */
4647
4648 static void
4649 declare_inline_vars (tree block, tree vars)
4650 {
4651 tree t;
4652 for (t = vars; t; t = TREE_CHAIN (t))
4653 {
4654 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4655 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4656 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4657 }
4658
4659 if (block)
4660 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4661 }
4662
4663 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4664 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4665 VAR_DECL translation. */
4666
4667 static tree
4668 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4669 {
4670 /* Don't generate debug information for the copy if we wouldn't have
4671 generated it for the copy either. */
4672 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4673 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4674
4675 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4676 declaration inspired this copy. */
4677 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4678
4679 /* The new variable/label has no RTL, yet. */
4680 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4681 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4682 SET_DECL_RTL (copy, 0);
4683
4684 /* These args would always appear unused, if not for this. */
4685 TREE_USED (copy) = 1;
4686
4687 /* Set the context for the new declaration. */
4688 if (!DECL_CONTEXT (decl))
4689 /* Globals stay global. */
4690 ;
4691 else if (DECL_CONTEXT (decl) != id->src_fn)
4692 /* Things that weren't in the scope of the function we're inlining
4693 from aren't in the scope we're inlining to, either. */
4694 ;
4695 else if (TREE_STATIC (decl))
4696 /* Function-scoped static variables should stay in the original
4697 function. */
4698 ;
4699 else
4700 /* Ordinary automatic local variables are now in the scope of the
4701 new function. */
4702 DECL_CONTEXT (copy) = id->dst_fn;
4703
4704 return copy;
4705 }
4706
4707 static tree
4708 copy_decl_to_var (tree decl, copy_body_data *id)
4709 {
4710 tree copy, type;
4711
4712 gcc_assert (TREE_CODE (decl) == PARM_DECL
4713 || TREE_CODE (decl) == RESULT_DECL);
4714
4715 type = TREE_TYPE (decl);
4716
4717 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4718 VAR_DECL, DECL_NAME (decl), type);
4719 if (DECL_PT_UID_SET_P (decl))
4720 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4721 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4722 TREE_READONLY (copy) = TREE_READONLY (decl);
4723 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4724 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4725
4726 return copy_decl_for_dup_finish (id, decl, copy);
4727 }
4728
4729 /* Like copy_decl_to_var, but create a return slot object instead of a
4730 pointer variable for return by invisible reference. */
4731
4732 static tree
4733 copy_result_decl_to_var (tree decl, copy_body_data *id)
4734 {
4735 tree copy, type;
4736
4737 gcc_assert (TREE_CODE (decl) == PARM_DECL
4738 || TREE_CODE (decl) == RESULT_DECL);
4739
4740 type = TREE_TYPE (decl);
4741 if (DECL_BY_REFERENCE (decl))
4742 type = TREE_TYPE (type);
4743
4744 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4745 VAR_DECL, DECL_NAME (decl), type);
4746 if (DECL_PT_UID_SET_P (decl))
4747 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4748 TREE_READONLY (copy) = TREE_READONLY (decl);
4749 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4750 if (!DECL_BY_REFERENCE (decl))
4751 {
4752 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4753 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4754 }
4755
4756 return copy_decl_for_dup_finish (id, decl, copy);
4757 }
4758
4759 tree
4760 copy_decl_no_change (tree decl, copy_body_data *id)
4761 {
4762 tree copy;
4763
4764 copy = copy_node (decl);
4765
4766 /* The COPY is not abstract; it will be generated in DST_FN. */
4767 DECL_ABSTRACT (copy) = 0;
4768 lang_hooks.dup_lang_specific_decl (copy);
4769
4770 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4771 been taken; it's for internal bookkeeping in expand_goto_internal. */
4772 if (TREE_CODE (copy) == LABEL_DECL)
4773 {
4774 TREE_ADDRESSABLE (copy) = 0;
4775 LABEL_DECL_UID (copy) = -1;
4776 }
4777
4778 return copy_decl_for_dup_finish (id, decl, copy);
4779 }
4780
4781 static tree
4782 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4783 {
4784 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4785 return copy_decl_to_var (decl, id);
4786 else
4787 return copy_decl_no_change (decl, id);
4788 }
4789
4790 /* Return a copy of the function's argument tree. */
4791 static tree
4792 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4793 bitmap args_to_skip, tree *vars)
4794 {
4795 tree arg, *parg;
4796 tree new_parm = NULL;
4797 int i = 0;
4798
4799 parg = &new_parm;
4800
4801 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4802 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4803 {
4804 tree new_tree = remap_decl (arg, id);
4805 lang_hooks.dup_lang_specific_decl (new_tree);
4806 *parg = new_tree;
4807 parg = &TREE_CHAIN (new_tree);
4808 }
4809 else if (!pointer_map_contains (id->decl_map, arg))
4810 {
4811 /* Make an equivalent VAR_DECL. If the argument was used
4812 as temporary variable later in function, the uses will be
4813 replaced by local variable. */
4814 tree var = copy_decl_to_var (arg, id);
4815 get_var_ann (var);
4816 add_referenced_var (var);
4817 insert_decl_map (id, arg, var);
4818 /* Declare this new variable. */
4819 TREE_CHAIN (var) = *vars;
4820 *vars = var;
4821 }
4822 return new_parm;
4823 }
4824
4825 /* Return a copy of the function's static chain. */
4826 static tree
4827 copy_static_chain (tree static_chain, copy_body_data * id)
4828 {
4829 tree *chain_copy, *pvar;
4830
4831 chain_copy = &static_chain;
4832 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4833 {
4834 tree new_tree = remap_decl (*pvar, id);
4835 lang_hooks.dup_lang_specific_decl (new_tree);
4836 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4837 *pvar = new_tree;
4838 }
4839 return static_chain;
4840 }
4841
4842 /* Return true if the function is allowed to be versioned.
4843 This is a guard for the versioning functionality. */
4844
4845 bool
4846 tree_versionable_function_p (tree fndecl)
4847 {
4848 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4849 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4850 }
4851
4852 /* Delete all unreachable basic blocks and update callgraph.
4853 Doing so is somewhat nontrivial because we need to update all clones and
4854 remove inline function that become unreachable. */
4855
4856 static bool
4857 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4858 {
4859 bool changed = false;
4860 basic_block b, next_bb;
4861
4862 find_unreachable_blocks ();
4863
4864 /* Delete all unreachable basic blocks. */
4865
4866 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4867 {
4868 next_bb = b->next_bb;
4869
4870 if (!(b->flags & BB_REACHABLE))
4871 {
4872 gimple_stmt_iterator bsi;
4873
4874 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4875 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4876 {
4877 struct cgraph_edge *e;
4878 struct cgraph_node *node;
4879
4880 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4881 {
4882 if (!e->inline_failed)
4883 cgraph_remove_node_and_inline_clones (e->callee);
4884 else
4885 cgraph_remove_edge (e);
4886 }
4887 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4888 && id->dst_node->clones)
4889 for (node = id->dst_node->clones; node != id->dst_node;)
4890 {
4891 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4892 {
4893 if (!e->inline_failed)
4894 cgraph_remove_node_and_inline_clones (e->callee);
4895 else
4896 cgraph_remove_edge (e);
4897 }
4898
4899 if (node->clones)
4900 node = node->clones;
4901 else if (node->next_sibling_clone)
4902 node = node->next_sibling_clone;
4903 else
4904 {
4905 while (node != id->dst_node && !node->next_sibling_clone)
4906 node = node->clone_of;
4907 if (node != id->dst_node)
4908 node = node->next_sibling_clone;
4909 }
4910 }
4911 }
4912 delete_basic_block (b);
4913 changed = true;
4914 }
4915 }
4916
4917 if (changed)
4918 tidy_fallthru_edges ();
4919 return changed;
4920 }
4921
4922 /* Update clone info after duplication. */
4923
4924 static void
4925 update_clone_info (copy_body_data * id)
4926 {
4927 struct cgraph_node *node;
4928 if (!id->dst_node->clones)
4929 return;
4930 for (node = id->dst_node->clones; node != id->dst_node;)
4931 {
4932 /* First update replace maps to match the new body. */
4933 if (node->clone.tree_map)
4934 {
4935 unsigned int i;
4936 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4937 {
4938 struct ipa_replace_map *replace_info;
4939 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4940 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4941 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4942 }
4943 }
4944 if (node->clones)
4945 node = node->clones;
4946 else if (node->next_sibling_clone)
4947 node = node->next_sibling_clone;
4948 else
4949 {
4950 while (node != id->dst_node && !node->next_sibling_clone)
4951 node = node->clone_of;
4952 if (node != id->dst_node)
4953 node = node->next_sibling_clone;
4954 }
4955 }
4956 }
4957
4958 /* Create a copy of a function's tree.
4959 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4960 of the original function and the new copied function
4961 respectively. In case we want to replace a DECL
4962 tree with another tree while duplicating the function's
4963 body, TREE_MAP represents the mapping between these
4964 trees. If UPDATE_CLONES is set, the call_stmt fields
4965 of edges of clones of the function will be updated.
4966
4967 If non-NULL ARGS_TO_SKIP determine function parameters to remove
4968 from new version.
4969 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
4970 If non_NULL NEW_ENTRY determine new entry BB of the clone.
4971 */
4972 void
4973 tree_function_versioning (tree old_decl, tree new_decl,
4974 VEC(ipa_replace_map_p,gc)* tree_map,
4975 bool update_clones, bitmap args_to_skip,
4976 bitmap blocks_to_copy, basic_block new_entry)
4977 {
4978 struct cgraph_node *old_version_node;
4979 struct cgraph_node *new_version_node;
4980 copy_body_data id;
4981 tree p;
4982 unsigned i;
4983 struct ipa_replace_map *replace_info;
4984 basic_block old_entry_block, bb;
4985 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4986
4987 tree t_step;
4988 tree old_current_function_decl = current_function_decl;
4989 tree vars = NULL_TREE;
4990
4991 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4992 && TREE_CODE (new_decl) == FUNCTION_DECL);
4993 DECL_POSSIBLY_INLINED (old_decl) = 1;
4994
4995 old_version_node = cgraph_node (old_decl);
4996 new_version_node = cgraph_node (new_decl);
4997
4998 /* Output the inlining info for this abstract function, since it has been
4999 inlined. If we don't do this now, we can lose the information about the
5000 variables in the function when the blocks get blown away as soon as we
5001 remove the cgraph node. */
5002 (*debug_hooks->outlining_inline_function) (old_decl);
5003
5004 DECL_ARTIFICIAL (new_decl) = 1;
5005 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5006 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5007
5008 /* Prepare the data structures for the tree copy. */
5009 memset (&id, 0, sizeof (id));
5010
5011 /* Generate a new name for the new version. */
5012 id.statements_to_fold = pointer_set_create ();
5013
5014 id.decl_map = pointer_map_create ();
5015 id.debug_map = NULL;
5016 id.src_fn = old_decl;
5017 id.dst_fn = new_decl;
5018 id.src_node = old_version_node;
5019 id.dst_node = new_version_node;
5020 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5021 if (id.src_node->ipa_transforms_to_apply)
5022 {
5023 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5024 unsigned int i;
5025
5026 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5027 id.src_node->ipa_transforms_to_apply);
5028 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5029 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5030 VEC_index (ipa_opt_pass,
5031 old_transforms_to_apply,
5032 i));
5033 }
5034
5035 id.copy_decl = copy_decl_no_change;
5036 id.transform_call_graph_edges
5037 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5038 id.transform_new_cfg = true;
5039 id.transform_return_to_modify = false;
5040 id.transform_lang_insert_block = NULL;
5041
5042 current_function_decl = new_decl;
5043 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5044 (DECL_STRUCT_FUNCTION (old_decl));
5045 initialize_cfun (new_decl, old_decl,
5046 old_entry_block->count);
5047 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5048 = id.src_cfun->gimple_df->ipa_pta;
5049 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5050
5051 /* Copy the function's static chain. */
5052 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5053 if (p)
5054 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5055 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5056 &id);
5057
5058 /* If there's a tree_map, prepare for substitution. */
5059 if (tree_map)
5060 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5061 {
5062 gimple init;
5063 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5064 if (replace_info->replace_p)
5065 {
5066 tree op = replace_info->new_tree;
5067 if (!replace_info->old_tree)
5068 {
5069 int i = replace_info->parm_num;
5070 tree parm;
5071 for (parm = DECL_ARGUMENTS (old_decl); i; parm = TREE_CHAIN (parm))
5072 i --;
5073 replace_info->old_tree = parm;
5074 }
5075
5076
5077 STRIP_NOPS (op);
5078
5079 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5080 op = TREE_OPERAND (op, 0);
5081
5082 if (TREE_CODE (op) == ADDR_EXPR)
5083 {
5084 op = TREE_OPERAND (op, 0);
5085 while (handled_component_p (op))
5086 op = TREE_OPERAND (op, 0);
5087 if (TREE_CODE (op) == VAR_DECL)
5088 add_referenced_var (op);
5089 }
5090 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5091 init = setup_one_parameter (&id, replace_info->old_tree,
5092 replace_info->new_tree, id.src_fn,
5093 NULL,
5094 &vars);
5095 if (init)
5096 VEC_safe_push (gimple, heap, init_stmts, init);
5097 }
5098 }
5099 /* Copy the function's arguments. */
5100 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5101 DECL_ARGUMENTS (new_decl) =
5102 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5103 args_to_skip, &vars);
5104
5105 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5106
5107 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5108 number_blocks (id.dst_fn);
5109
5110 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5111
5112 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
5113 /* Add local vars. */
5114 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
5115 t_step; t_step = TREE_CHAIN (t_step))
5116 {
5117 tree var = TREE_VALUE (t_step);
5118 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
5119 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
5120 else if (!can_be_nonlocal (var, &id))
5121 cfun->local_decls =
5122 tree_cons (NULL_TREE, remap_decl (var, &id),
5123 cfun->local_decls);
5124 }
5125
5126 /* Copy the Function's body. */
5127 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5128 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5129
5130 if (DECL_RESULT (old_decl) != NULL_TREE)
5131 {
5132 tree *res_decl = &DECL_RESULT (old_decl);
5133 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
5134 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5135 }
5136
5137 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5138 number_blocks (new_decl);
5139
5140 /* We want to create the BB unconditionally, so that the addition of
5141 debug stmts doesn't affect BB count, which may in the end cause
5142 codegen differences. */
5143 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5144 while (VEC_length (gimple, init_stmts))
5145 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5146 update_clone_info (&id);
5147
5148 /* Remap the nonlocal_goto_save_area, if any. */
5149 if (cfun->nonlocal_goto_save_area)
5150 {
5151 struct walk_stmt_info wi;
5152
5153 memset (&wi, 0, sizeof (wi));
5154 wi.info = &id;
5155 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5156 }
5157
5158 /* Clean up. */
5159 pointer_map_destroy (id.decl_map);
5160 if (id.debug_map)
5161 pointer_map_destroy (id.debug_map);
5162 free_dominance_info (CDI_DOMINATORS);
5163 free_dominance_info (CDI_POST_DOMINATORS);
5164
5165 fold_marked_statements (0, id.statements_to_fold);
5166 pointer_set_destroy (id.statements_to_fold);
5167 fold_cond_expr_cond ();
5168 delete_unreachable_blocks_update_callgraph (&id);
5169 if (id.dst_node->analyzed)
5170 cgraph_rebuild_references ();
5171 update_ssa (TODO_update_ssa);
5172 free_dominance_info (CDI_DOMINATORS);
5173 free_dominance_info (CDI_POST_DOMINATORS);
5174
5175 gcc_assert (!id.debug_stmts);
5176 VEC_free (gimple, heap, init_stmts);
5177 pop_cfun ();
5178 current_function_decl = old_current_function_decl;
5179 gcc_assert (!current_function_decl
5180 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5181 return;
5182 }
5183
5184 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5185 the callee and return the inlined body on success. */
5186
5187 tree
5188 maybe_inline_call_in_expr (tree exp)
5189 {
5190 tree fn = get_callee_fndecl (exp);
5191
5192 /* We can only try to inline "const" functions. */
5193 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5194 {
5195 struct pointer_map_t *decl_map = pointer_map_create ();
5196 call_expr_arg_iterator iter;
5197 copy_body_data id;
5198 tree param, arg, t;
5199
5200 /* Remap the parameters. */
5201 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5202 param;
5203 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5204 *pointer_map_insert (decl_map, param) = arg;
5205
5206 memset (&id, 0, sizeof (id));
5207 id.src_fn = fn;
5208 id.dst_fn = current_function_decl;
5209 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5210 id.decl_map = decl_map;
5211
5212 id.copy_decl = copy_decl_no_change;
5213 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5214 id.transform_new_cfg = false;
5215 id.transform_return_to_modify = true;
5216 id.transform_lang_insert_block = false;
5217
5218 /* Make sure not to unshare trees behind the front-end's back
5219 since front-end specific mechanisms may rely on sharing. */
5220 id.regimplify = false;
5221 id.do_not_unshare = true;
5222
5223 /* We're not inside any EH region. */
5224 id.eh_lp_nr = 0;
5225
5226 t = copy_tree_body (&id);
5227 pointer_map_destroy (decl_map);
5228
5229 /* We can only return something suitable for use in a GENERIC
5230 expression tree. */
5231 if (TREE_CODE (t) == MODIFY_EXPR)
5232 return TREE_OPERAND (t, 1);
5233 }
5234
5235 return NULL_TREE;
5236 }
5237
5238 /* Duplicate a type, fields and all. */
5239
5240 tree
5241 build_duplicate_type (tree type)
5242 {
5243 struct copy_body_data id;
5244
5245 memset (&id, 0, sizeof (id));
5246 id.src_fn = current_function_decl;
5247 id.dst_fn = current_function_decl;
5248 id.src_cfun = cfun;
5249 id.decl_map = pointer_map_create ();
5250 id.debug_map = NULL;
5251 id.copy_decl = copy_decl_no_change;
5252
5253 type = remap_type_1 (type, &id);
5254
5255 pointer_map_destroy (id.decl_map);
5256 if (id.debug_map)
5257 pointer_map_destroy (id.debug_map);
5258
5259 TYPE_CANONICAL (type) = type;
5260
5261 return type;
5262 }
5263
5264 /* Return whether it is safe to inline a function because it used different
5265 target specific options or call site actual types mismatch parameter types.
5266 E is the call edge to be checked. */
5267 bool
5268 tree_can_inline_p (struct cgraph_edge *e)
5269 {
5270 #if 0
5271 /* This causes a regression in SPEC in that it prevents a cold function from
5272 inlining a hot function. Perhaps this should only apply to functions
5273 that the user declares hot/cold/optimize explicitly. */
5274
5275 /* Don't inline a function with a higher optimization level than the
5276 caller, or with different space constraints (hot/cold functions). */
5277 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5278 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5279
5280 if (caller_tree != callee_tree)
5281 {
5282 struct cl_optimization *caller_opt
5283 = TREE_OPTIMIZATION ((caller_tree)
5284 ? caller_tree
5285 : optimization_default_node);
5286
5287 struct cl_optimization *callee_opt
5288 = TREE_OPTIMIZATION ((callee_tree)
5289 ? callee_tree
5290 : optimization_default_node);
5291
5292 if ((caller_opt->optimize > callee_opt->optimize)
5293 || (caller_opt->optimize_size != callee_opt->optimize_size))
5294 return false;
5295 }
5296 #endif
5297 tree caller, callee, lhs;
5298
5299 caller = e->caller->decl;
5300 callee = e->callee->decl;
5301
5302 /* First check that inlining isn't simply forbidden in this case. */
5303 if (inline_forbidden_into_p (caller, callee))
5304 {
5305 e->inline_failed = CIF_UNSPECIFIED;
5306 gimple_call_set_cannot_inline (e->call_stmt, true);
5307 return false;
5308 }
5309
5310 /* Allow the backend to decide if inlining is ok. */
5311 if (!targetm.target_option.can_inline_p (caller, callee))
5312 {
5313 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5314 gimple_call_set_cannot_inline (e->call_stmt, true);
5315 e->call_stmt_cannot_inline_p = true;
5316 return false;
5317 }
5318
5319 /* Do not inline calls where we cannot triviall work around mismatches
5320 in argument or return types. */
5321 if (e->call_stmt
5322 && ((DECL_RESULT (callee)
5323 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5324 && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5325 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5326 TREE_TYPE (lhs))
5327 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5328 || !gimple_check_call_args (e->call_stmt)))
5329 {
5330 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5331 gimple_call_set_cannot_inline (e->call_stmt, true);
5332 e->call_stmt_cannot_inline_p = true;
5333 return false;
5334 }
5335
5336 return true;
5337 }