fold-const.c (fold_comparison): Remove redundant parenthesis.
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "insn-config.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-iterator.h"
37 #include "cgraph.h"
38 #include "intl.h"
39 #include "tree-mudflap.h"
40 #include "tree-flow.h"
41 #include "function.h"
42 #include "tree-flow.h"
43 #include "tree-pretty-print.h"
44 #include "except.h"
45 #include "debug.h"
46 #include "pointer-set.h"
47 #include "ipa-prop.h"
48 #include "value-prof.h"
49 #include "tree-pass.h"
50 #include "target.h"
51 #include "integrate.h"
52
53 #include "rtl.h" /* FIXME: For asm_str_count. */
54
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "gimple.h"
58
59 /* Inlining, Cloning, Versioning, Parallelization
60
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
67
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
72
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
76
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
80
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
88
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
90
91 /* To Do:
92
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
99
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
102
103
104 /* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
106
107 eni_weights eni_size_weights;
108
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
111
112 eni_weights eni_time_weights;
113
114 /* Prototypes. */
115
116 static tree declare_return_variable (copy_body_data *, tree, tree);
117 static void remap_block (tree *, copy_body_data *);
118 static void copy_bind_expr (tree *, int *, copy_body_data *);
119 static tree mark_local_for_remap_r (tree *, int *, void *);
120 static void unsave_expr_1 (tree);
121 static tree unsave_r (tree *, int *, void *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
130
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
133
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
136 {
137 *pointer_map_insert (id->decl_map, key) = value;
138
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
143 }
144
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
147
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150 {
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
153
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
156
157 if (!target_for_debug_bind (key))
158 return;
159
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
162
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
165
166 *pointer_map_insert (id->debug_map, key) = value;
167 }
168
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
174
175 /* Construct new SSA name for old NAME. ID is the inline context. */
176
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
179 {
180 tree new_tree;
181 tree *n;
182
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
184
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
188
189 if (processing_debug_stmt)
190 {
191 processing_debug_stmt = -1;
192 return name;
193 }
194
195 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
196 in copy_bb. */
197 new_tree = remap_decl (SSA_NAME_VAR (name), id);
198
199 /* We might've substituted constant or another SSA_NAME for
200 the variable.
201
202 Replace the SSA name representing RESULT_DECL by variable during
203 inlining: this saves us from need to introduce PHI node in a case
204 return value is just partly initialized. */
205 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
206 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
207 || !id->transform_return_to_modify))
208 {
209 struct ptr_info_def *pi;
210 new_tree = make_ssa_name (new_tree, NULL);
211 insert_decl_map (id, name, new_tree);
212 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
213 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
214 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
215 /* At least IPA points-to info can be directly transferred. */
216 if (id->src_cfun->gimple_df
217 && id->src_cfun->gimple_df->ipa_pta
218 && (pi = SSA_NAME_PTR_INFO (name))
219 && !pi->pt.anything)
220 {
221 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
222 new_pi->pt = pi->pt;
223 }
224 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
225 {
226 /* By inlining function having uninitialized variable, we might
227 extend the lifetime (variable might get reused). This cause
228 ICE in the case we end up extending lifetime of SSA name across
229 abnormal edge, but also increase register pressure.
230
231 We simply initialize all uninitialized vars by 0 except
232 for case we are inlining to very first BB. We can avoid
233 this for all BBs that are not inside strongly connected
234 regions of the CFG, but this is expensive to test. */
235 if (id->entry_bb
236 && is_gimple_reg (SSA_NAME_VAR (name))
237 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
238 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
239 || EDGE_COUNT (id->entry_bb->preds) != 1))
240 {
241 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
242 gimple init_stmt;
243
244 init_stmt = gimple_build_assign (new_tree,
245 fold_convert (TREE_TYPE (new_tree),
246 integer_zero_node));
247 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
248 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
249 }
250 else
251 {
252 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
253 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
254 == name)
255 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
256 }
257 }
258 }
259 else
260 insert_decl_map (id, name, new_tree);
261 return new_tree;
262 }
263
264 /* Remap DECL during the copying of the BLOCK tree for the function. */
265
266 tree
267 remap_decl (tree decl, copy_body_data *id)
268 {
269 tree *n;
270
271 /* We only remap local variables in the current function. */
272
273 /* See if we have remapped this declaration. */
274
275 n = (tree *) pointer_map_contains (id->decl_map, decl);
276
277 if (!n && processing_debug_stmt)
278 {
279 processing_debug_stmt = -1;
280 return decl;
281 }
282
283 /* If we didn't already have an equivalent for this declaration,
284 create one now. */
285 if (!n)
286 {
287 /* Make a copy of the variable or label. */
288 tree t = id->copy_decl (decl, id);
289
290 /* Remember it, so that if we encounter this local entity again
291 we can reuse this copy. Do this early because remap_type may
292 need this decl for TYPE_STUB_DECL. */
293 insert_decl_map (id, decl, t);
294
295 if (!DECL_P (t))
296 return t;
297
298 /* Remap types, if necessary. */
299 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
300 if (TREE_CODE (t) == TYPE_DECL)
301 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
302
303 /* Remap sizes as necessary. */
304 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
305 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
306
307 /* If fields, do likewise for offset and qualifier. */
308 if (TREE_CODE (t) == FIELD_DECL)
309 {
310 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
311 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
312 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
313 }
314
315 if (cfun && gimple_in_ssa_p (cfun)
316 && (TREE_CODE (t) == VAR_DECL
317 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
318 {
319 get_var_ann (t);
320 add_referenced_var (t);
321 }
322 return t;
323 }
324
325 if (id->do_not_unshare)
326 return *n;
327 else
328 return unshare_expr (*n);
329 }
330
331 static tree
332 remap_type_1 (tree type, copy_body_data *id)
333 {
334 tree new_tree, t;
335
336 /* We do need a copy. build and register it now. If this is a pointer or
337 reference type, remap the designated type and make a new pointer or
338 reference type. */
339 if (TREE_CODE (type) == POINTER_TYPE)
340 {
341 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
342 TYPE_MODE (type),
343 TYPE_REF_CAN_ALIAS_ALL (type));
344 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
345 new_tree = build_type_attribute_qual_variant (new_tree,
346 TYPE_ATTRIBUTES (type),
347 TYPE_QUALS (type));
348 insert_decl_map (id, type, new_tree);
349 return new_tree;
350 }
351 else if (TREE_CODE (type) == REFERENCE_TYPE)
352 {
353 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
354 TYPE_MODE (type),
355 TYPE_REF_CAN_ALIAS_ALL (type));
356 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
357 new_tree = build_type_attribute_qual_variant (new_tree,
358 TYPE_ATTRIBUTES (type),
359 TYPE_QUALS (type));
360 insert_decl_map (id, type, new_tree);
361 return new_tree;
362 }
363 else
364 new_tree = copy_node (type);
365
366 insert_decl_map (id, type, new_tree);
367
368 /* This is a new type, not a copy of an old type. Need to reassociate
369 variants. We can handle everything except the main variant lazily. */
370 t = TYPE_MAIN_VARIANT (type);
371 if (type != t)
372 {
373 t = remap_type (t, id);
374 TYPE_MAIN_VARIANT (new_tree) = t;
375 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
376 TYPE_NEXT_VARIANT (t) = new_tree;
377 }
378 else
379 {
380 TYPE_MAIN_VARIANT (new_tree) = new_tree;
381 TYPE_NEXT_VARIANT (new_tree) = NULL;
382 }
383
384 if (TYPE_STUB_DECL (type))
385 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
386
387 /* Lazily create pointer and reference types. */
388 TYPE_POINTER_TO (new_tree) = NULL;
389 TYPE_REFERENCE_TO (new_tree) = NULL;
390
391 switch (TREE_CODE (new_tree))
392 {
393 case INTEGER_TYPE:
394 case REAL_TYPE:
395 case FIXED_POINT_TYPE:
396 case ENUMERAL_TYPE:
397 case BOOLEAN_TYPE:
398 t = TYPE_MIN_VALUE (new_tree);
399 if (t && TREE_CODE (t) != INTEGER_CST)
400 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
401
402 t = TYPE_MAX_VALUE (new_tree);
403 if (t && TREE_CODE (t) != INTEGER_CST)
404 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
405 return new_tree;
406
407 case FUNCTION_TYPE:
408 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
410 return new_tree;
411
412 case ARRAY_TYPE:
413 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
414 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
415 break;
416
417 case RECORD_TYPE:
418 case UNION_TYPE:
419 case QUAL_UNION_TYPE:
420 {
421 tree f, nf = NULL;
422
423 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
424 {
425 t = remap_decl (f, id);
426 DECL_CONTEXT (t) = new_tree;
427 TREE_CHAIN (t) = nf;
428 nf = t;
429 }
430 TYPE_FIELDS (new_tree) = nreverse (nf);
431 }
432 break;
433
434 case OFFSET_TYPE:
435 default:
436 /* Shouldn't have been thought variable sized. */
437 gcc_unreachable ();
438 }
439
440 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
441 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
442
443 return new_tree;
444 }
445
446 tree
447 remap_type (tree type, copy_body_data *id)
448 {
449 tree *node;
450 tree tmp;
451
452 if (type == NULL)
453 return type;
454
455 /* See if we have remapped this type. */
456 node = (tree *) pointer_map_contains (id->decl_map, type);
457 if (node)
458 return *node;
459
460 /* The type only needs remapping if it's variably modified. */
461 if (! variably_modified_type_p (type, id->src_fn))
462 {
463 insert_decl_map (id, type, type);
464 return type;
465 }
466
467 id->remapping_type_depth++;
468 tmp = remap_type_1 (type, id);
469 id->remapping_type_depth--;
470
471 return tmp;
472 }
473
474 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
475 is NULL or TYPE has not been remapped before. */
476
477 static tree
478 remapped_type (tree type, copy_body_data *id)
479 {
480 tree *node;
481
482 if (type == NULL)
483 return type;
484
485 /* See if we have remapped this type. */
486 node = (tree *) pointer_map_contains (id->decl_map, type);
487 if (node)
488 return *node;
489 else
490 return NULL;
491 }
492
493 /* The type only needs remapping if it's variably modified. */
494 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
495
496 static bool
497 can_be_nonlocal (tree decl, copy_body_data *id)
498 {
499 /* We can not duplicate function decls. */
500 if (TREE_CODE (decl) == FUNCTION_DECL)
501 return true;
502
503 /* Local static vars must be non-local or we get multiple declaration
504 problems. */
505 if (TREE_CODE (decl) == VAR_DECL
506 && !auto_var_in_fn_p (decl, id->src_fn))
507 return true;
508
509 /* At the moment dwarf2out can handle only these types of nodes. We
510 can support more later. */
511 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
512 return false;
513
514 /* We must use global type. We call remapped_type instead of
515 remap_type since we don't want to remap this type here if it
516 hasn't been remapped before. */
517 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
518 return false;
519
520 /* Wihtout SSA we can't tell if variable is used. */
521 if (!gimple_in_ssa_p (cfun))
522 return false;
523
524 /* Live variables must be copied so we can attach DECL_RTL. */
525 if (var_ann (decl))
526 return false;
527
528 return true;
529 }
530
531 static tree
532 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
533 {
534 tree old_var;
535 tree new_decls = NULL_TREE;
536
537 /* Remap its variables. */
538 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
539 {
540 tree new_var;
541
542 if (can_be_nonlocal (old_var, id))
543 {
544 if (TREE_CODE (old_var) == VAR_DECL
545 && ! DECL_EXTERNAL (old_var)
546 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
547 cfun->local_decls = tree_cons (NULL_TREE, old_var,
548 cfun->local_decls);
549 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
550 && !DECL_IGNORED_P (old_var)
551 && nonlocalized_list)
552 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
553 continue;
554 }
555
556 /* Remap the variable. */
557 new_var = remap_decl (old_var, id);
558
559 /* If we didn't remap this variable, we can't mess with its
560 TREE_CHAIN. If we remapped this variable to the return slot, it's
561 already declared somewhere else, so don't declare it here. */
562
563 if (new_var == id->retvar)
564 ;
565 else if (!new_var)
566 {
567 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
568 && !DECL_IGNORED_P (old_var)
569 && nonlocalized_list)
570 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
571 }
572 else
573 {
574 gcc_assert (DECL_P (new_var));
575 TREE_CHAIN (new_var) = new_decls;
576 new_decls = new_var;
577
578 /* Also copy value-expressions. */
579 if (TREE_CODE (new_var) == VAR_DECL
580 && DECL_HAS_VALUE_EXPR_P (new_var))
581 {
582 tree tem = DECL_VALUE_EXPR (new_var);
583 bool old_regimplify = id->regimplify;
584 id->remapping_type_depth++;
585 walk_tree (&tem, copy_tree_body_r, id, NULL);
586 id->remapping_type_depth--;
587 id->regimplify = old_regimplify;
588 SET_DECL_VALUE_EXPR (new_var, tem);
589 }
590 }
591 }
592
593 return nreverse (new_decls);
594 }
595
596 /* Copy the BLOCK to contain remapped versions of the variables
597 therein. And hook the new block into the block-tree. */
598
599 static void
600 remap_block (tree *block, copy_body_data *id)
601 {
602 tree old_block;
603 tree new_block;
604
605 /* Make the new block. */
606 old_block = *block;
607 new_block = make_node (BLOCK);
608 TREE_USED (new_block) = TREE_USED (old_block);
609 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
610 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
611 BLOCK_NONLOCALIZED_VARS (new_block)
612 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
613 *block = new_block;
614
615 /* Remap its variables. */
616 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
617 &BLOCK_NONLOCALIZED_VARS (new_block),
618 id);
619
620 if (id->transform_lang_insert_block)
621 id->transform_lang_insert_block (new_block);
622
623 /* Remember the remapped block. */
624 insert_decl_map (id, old_block, new_block);
625 }
626
627 /* Copy the whole block tree and root it in id->block. */
628 static tree
629 remap_blocks (tree block, copy_body_data *id)
630 {
631 tree t;
632 tree new_tree = block;
633
634 if (!block)
635 return NULL;
636
637 remap_block (&new_tree, id);
638 gcc_assert (new_tree != block);
639 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
640 prepend_lexical_block (new_tree, remap_blocks (t, id));
641 /* Blocks are in arbitrary order, but make things slightly prettier and do
642 not swap order when producing a copy. */
643 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
644 return new_tree;
645 }
646
647 static void
648 copy_statement_list (tree *tp)
649 {
650 tree_stmt_iterator oi, ni;
651 tree new_tree;
652
653 new_tree = alloc_stmt_list ();
654 ni = tsi_start (new_tree);
655 oi = tsi_start (*tp);
656 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
657 *tp = new_tree;
658
659 for (; !tsi_end_p (oi); tsi_next (&oi))
660 {
661 tree stmt = tsi_stmt (oi);
662 if (TREE_CODE (stmt) == STATEMENT_LIST)
663 copy_statement_list (&stmt);
664 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
665 }
666 }
667
668 static void
669 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
670 {
671 tree block = BIND_EXPR_BLOCK (*tp);
672 /* Copy (and replace) the statement. */
673 copy_tree_r (tp, walk_subtrees, NULL);
674 if (block)
675 {
676 remap_block (&block, id);
677 BIND_EXPR_BLOCK (*tp) = block;
678 }
679
680 if (BIND_EXPR_VARS (*tp))
681 /* This will remap a lot of the same decls again, but this should be
682 harmless. */
683 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
684 }
685
686
687 /* Create a new gimple_seq by remapping all the statements in BODY
688 using the inlining information in ID. */
689
690 static gimple_seq
691 remap_gimple_seq (gimple_seq body, copy_body_data *id)
692 {
693 gimple_stmt_iterator si;
694 gimple_seq new_body = NULL;
695
696 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
697 {
698 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
699 gimple_seq_add_stmt (&new_body, new_stmt);
700 }
701
702 return new_body;
703 }
704
705
706 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
707 block using the mapping information in ID. */
708
709 static gimple
710 copy_gimple_bind (gimple stmt, copy_body_data *id)
711 {
712 gimple new_bind;
713 tree new_block, new_vars;
714 gimple_seq body, new_body;
715
716 /* Copy the statement. Note that we purposely don't use copy_stmt
717 here because we need to remap statements as we copy. */
718 body = gimple_bind_body (stmt);
719 new_body = remap_gimple_seq (body, id);
720
721 new_block = gimple_bind_block (stmt);
722 if (new_block)
723 remap_block (&new_block, id);
724
725 /* This will remap a lot of the same decls again, but this should be
726 harmless. */
727 new_vars = gimple_bind_vars (stmt);
728 if (new_vars)
729 new_vars = remap_decls (new_vars, NULL, id);
730
731 new_bind = gimple_build_bind (new_vars, new_body, new_block);
732
733 return new_bind;
734 }
735
736
737 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
738 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
739 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
740 recursing into the children nodes of *TP. */
741
742 static tree
743 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
744 {
745 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
746 copy_body_data *id = (copy_body_data *) wi_p->info;
747 tree fn = id->src_fn;
748
749 if (TREE_CODE (*tp) == SSA_NAME)
750 {
751 *tp = remap_ssa_name (*tp, id);
752 *walk_subtrees = 0;
753 return NULL;
754 }
755 else if (auto_var_in_fn_p (*tp, fn))
756 {
757 /* Local variables and labels need to be replaced by equivalent
758 variables. We don't want to copy static variables; there's
759 only one of those, no matter how many times we inline the
760 containing function. Similarly for globals from an outer
761 function. */
762 tree new_decl;
763
764 /* Remap the declaration. */
765 new_decl = remap_decl (*tp, id);
766 gcc_assert (new_decl);
767 /* Replace this variable with the copy. */
768 STRIP_TYPE_NOPS (new_decl);
769 /* ??? The C++ frontend uses void * pointer zero to initialize
770 any other type. This confuses the middle-end type verification.
771 As cloned bodies do not go through gimplification again the fixup
772 there doesn't trigger. */
773 if (TREE_CODE (new_decl) == INTEGER_CST
774 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
775 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
776 *tp = new_decl;
777 *walk_subtrees = 0;
778 }
779 else if (TREE_CODE (*tp) == STATEMENT_LIST)
780 gcc_unreachable ();
781 else if (TREE_CODE (*tp) == SAVE_EXPR)
782 gcc_unreachable ();
783 else if (TREE_CODE (*tp) == LABEL_DECL
784 && (!DECL_CONTEXT (*tp)
785 || decl_function_context (*tp) == id->src_fn))
786 /* These may need to be remapped for EH handling. */
787 *tp = remap_decl (*tp, id);
788 else if (TYPE_P (*tp))
789 /* Types may need remapping as well. */
790 *tp = remap_type (*tp, id);
791 else if (CONSTANT_CLASS_P (*tp))
792 {
793 /* If this is a constant, we have to copy the node iff the type
794 will be remapped. copy_tree_r will not copy a constant. */
795 tree new_type = remap_type (TREE_TYPE (*tp), id);
796
797 if (new_type == TREE_TYPE (*tp))
798 *walk_subtrees = 0;
799
800 else if (TREE_CODE (*tp) == INTEGER_CST)
801 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
802 TREE_INT_CST_HIGH (*tp));
803 else
804 {
805 *tp = copy_node (*tp);
806 TREE_TYPE (*tp) = new_type;
807 }
808 }
809 else
810 {
811 /* Otherwise, just copy the node. Note that copy_tree_r already
812 knows not to copy VAR_DECLs, etc., so this is safe. */
813 if (TREE_CODE (*tp) == INDIRECT_REF)
814 {
815 /* Get rid of *& from inline substitutions that can happen when a
816 pointer argument is an ADDR_EXPR. */
817 tree decl = TREE_OPERAND (*tp, 0);
818 tree *n;
819
820 n = (tree *) pointer_map_contains (id->decl_map, decl);
821 if (n)
822 {
823 tree type, new_tree, old;
824
825 /* If we happen to get an ADDR_EXPR in n->value, strip
826 it manually here as we'll eventually get ADDR_EXPRs
827 which lie about their types pointed to. In this case
828 build_fold_indirect_ref wouldn't strip the
829 INDIRECT_REF, but we absolutely rely on that. As
830 fold_indirect_ref does other useful transformations,
831 try that first, though. */
832 type = TREE_TYPE (TREE_TYPE (*n));
833 new_tree = unshare_expr (*n);
834 old = *tp;
835 *tp = gimple_fold_indirect_ref (new_tree);
836 if (!*tp)
837 {
838 if (TREE_CODE (new_tree) == ADDR_EXPR)
839 {
840 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
841 type, new_tree);
842 /* ??? We should either assert here or build
843 a VIEW_CONVERT_EXPR instead of blindly leaking
844 incompatible types to our IL. */
845 if (! *tp)
846 *tp = TREE_OPERAND (new_tree, 0);
847 }
848 else
849 {
850 *tp = build1 (INDIRECT_REF, type, new_tree);
851 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
852 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
853 }
854 }
855 *walk_subtrees = 0;
856 return NULL;
857 }
858 }
859
860 /* Here is the "usual case". Copy this tree node, and then
861 tweak some special cases. */
862 copy_tree_r (tp, walk_subtrees, NULL);
863
864 /* Global variables we haven't seen yet need to go into referenced
865 vars. If not referenced from types only. */
866 if (gimple_in_ssa_p (cfun)
867 && TREE_CODE (*tp) == VAR_DECL
868 && id->remapping_type_depth == 0
869 && !processing_debug_stmt)
870 add_referenced_var (*tp);
871
872 /* We should never have TREE_BLOCK set on non-statements. */
873 if (EXPR_P (*tp))
874 gcc_assert (!TREE_BLOCK (*tp));
875
876 if (TREE_CODE (*tp) != OMP_CLAUSE)
877 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
878
879 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
880 {
881 /* The copied TARGET_EXPR has never been expanded, even if the
882 original node was expanded already. */
883 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
884 TREE_OPERAND (*tp, 3) = NULL_TREE;
885 }
886 else if (TREE_CODE (*tp) == ADDR_EXPR)
887 {
888 /* Variable substitution need not be simple. In particular,
889 the INDIRECT_REF substitution above. Make sure that
890 TREE_CONSTANT and friends are up-to-date. But make sure
891 to not improperly set TREE_BLOCK on some sub-expressions. */
892 int invariant = is_gimple_min_invariant (*tp);
893 tree block = id->block;
894 id->block = NULL_TREE;
895 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
896 id->block = block;
897
898 /* Handle the case where we substituted an INDIRECT_REF
899 into the operand of the ADDR_EXPR. */
900 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
901 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
902 else
903 recompute_tree_invariant_for_addr_expr (*tp);
904
905 /* If this used to be invariant, but is not any longer,
906 then regimplification is probably needed. */
907 if (invariant && !is_gimple_min_invariant (*tp))
908 id->regimplify = true;
909
910 *walk_subtrees = 0;
911 }
912 }
913
914 /* Keep iterating. */
915 return NULL_TREE;
916 }
917
918
919 /* Called from copy_body_id via walk_tree. DATA is really a
920 `copy_body_data *'. */
921
922 tree
923 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
924 {
925 copy_body_data *id = (copy_body_data *) data;
926 tree fn = id->src_fn;
927 tree new_block;
928
929 /* Begin by recognizing trees that we'll completely rewrite for the
930 inlining context. Our output for these trees is completely
931 different from out input (e.g. RETURN_EXPR is deleted, and morphs
932 into an edge). Further down, we'll handle trees that get
933 duplicated and/or tweaked. */
934
935 /* When requested, RETURN_EXPRs should be transformed to just the
936 contained MODIFY_EXPR. The branch semantics of the return will
937 be handled elsewhere by manipulating the CFG rather than a statement. */
938 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
939 {
940 tree assignment = TREE_OPERAND (*tp, 0);
941
942 /* If we're returning something, just turn that into an
943 assignment into the equivalent of the original RESULT_DECL.
944 If the "assignment" is just the result decl, the result
945 decl has already been set (e.g. a recent "foo (&result_decl,
946 ...)"); just toss the entire RETURN_EXPR. */
947 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
948 {
949 /* Replace the RETURN_EXPR with (a copy of) the
950 MODIFY_EXPR hanging underneath. */
951 *tp = copy_node (assignment);
952 }
953 else /* Else the RETURN_EXPR returns no value. */
954 {
955 *tp = NULL;
956 return (tree) (void *)1;
957 }
958 }
959 else if (TREE_CODE (*tp) == SSA_NAME)
960 {
961 *tp = remap_ssa_name (*tp, id);
962 *walk_subtrees = 0;
963 return NULL;
964 }
965
966 /* Local variables and labels need to be replaced by equivalent
967 variables. We don't want to copy static variables; there's only
968 one of those, no matter how many times we inline the containing
969 function. Similarly for globals from an outer function. */
970 else if (auto_var_in_fn_p (*tp, fn))
971 {
972 tree new_decl;
973
974 /* Remap the declaration. */
975 new_decl = remap_decl (*tp, id);
976 gcc_assert (new_decl);
977 /* Replace this variable with the copy. */
978 STRIP_TYPE_NOPS (new_decl);
979 *tp = new_decl;
980 *walk_subtrees = 0;
981 }
982 else if (TREE_CODE (*tp) == STATEMENT_LIST)
983 copy_statement_list (tp);
984 else if (TREE_CODE (*tp) == SAVE_EXPR
985 || TREE_CODE (*tp) == TARGET_EXPR)
986 remap_save_expr (tp, id->decl_map, walk_subtrees);
987 else if (TREE_CODE (*tp) == LABEL_DECL
988 && (! DECL_CONTEXT (*tp)
989 || decl_function_context (*tp) == id->src_fn))
990 /* These may need to be remapped for EH handling. */
991 *tp = remap_decl (*tp, id);
992 else if (TREE_CODE (*tp) == BIND_EXPR)
993 copy_bind_expr (tp, walk_subtrees, id);
994 /* Types may need remapping as well. */
995 else if (TYPE_P (*tp))
996 *tp = remap_type (*tp, id);
997
998 /* If this is a constant, we have to copy the node iff the type will be
999 remapped. copy_tree_r will not copy a constant. */
1000 else if (CONSTANT_CLASS_P (*tp))
1001 {
1002 tree new_type = remap_type (TREE_TYPE (*tp), id);
1003
1004 if (new_type == TREE_TYPE (*tp))
1005 *walk_subtrees = 0;
1006
1007 else if (TREE_CODE (*tp) == INTEGER_CST)
1008 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1009 TREE_INT_CST_HIGH (*tp));
1010 else
1011 {
1012 *tp = copy_node (*tp);
1013 TREE_TYPE (*tp) = new_type;
1014 }
1015 }
1016
1017 /* Otherwise, just copy the node. Note that copy_tree_r already
1018 knows not to copy VAR_DECLs, etc., so this is safe. */
1019 else
1020 {
1021 /* Here we handle trees that are not completely rewritten.
1022 First we detect some inlining-induced bogosities for
1023 discarding. */
1024 if (TREE_CODE (*tp) == MODIFY_EXPR
1025 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1026 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1027 {
1028 /* Some assignments VAR = VAR; don't generate any rtl code
1029 and thus don't count as variable modification. Avoid
1030 keeping bogosities like 0 = 0. */
1031 tree decl = TREE_OPERAND (*tp, 0), value;
1032 tree *n;
1033
1034 n = (tree *) pointer_map_contains (id->decl_map, decl);
1035 if (n)
1036 {
1037 value = *n;
1038 STRIP_TYPE_NOPS (value);
1039 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1040 {
1041 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1042 return copy_tree_body_r (tp, walk_subtrees, data);
1043 }
1044 }
1045 }
1046 else if (TREE_CODE (*tp) == INDIRECT_REF)
1047 {
1048 /* Get rid of *& from inline substitutions that can happen when a
1049 pointer argument is an ADDR_EXPR. */
1050 tree decl = TREE_OPERAND (*tp, 0);
1051 tree *n;
1052
1053 n = (tree *) pointer_map_contains (id->decl_map, decl);
1054 if (n)
1055 {
1056 tree new_tree;
1057 tree old;
1058 /* If we happen to get an ADDR_EXPR in n->value, strip
1059 it manually here as we'll eventually get ADDR_EXPRs
1060 which lie about their types pointed to. In this case
1061 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1062 but we absolutely rely on that. As fold_indirect_ref
1063 does other useful transformations, try that first, though. */
1064 tree type = TREE_TYPE (TREE_TYPE (*n));
1065 if (id->do_not_unshare)
1066 new_tree = *n;
1067 else
1068 new_tree = unshare_expr (*n);
1069 old = *tp;
1070 *tp = gimple_fold_indirect_ref (new_tree);
1071 if (! *tp)
1072 {
1073 if (TREE_CODE (new_tree) == ADDR_EXPR)
1074 {
1075 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1076 type, new_tree);
1077 /* ??? We should either assert here or build
1078 a VIEW_CONVERT_EXPR instead of blindly leaking
1079 incompatible types to our IL. */
1080 if (! *tp)
1081 *tp = TREE_OPERAND (new_tree, 0);
1082 }
1083 else
1084 {
1085 *tp = build1 (INDIRECT_REF, type, new_tree);
1086 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1087 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1088 }
1089 }
1090 *walk_subtrees = 0;
1091 return NULL;
1092 }
1093 }
1094
1095 /* Here is the "usual case". Copy this tree node, and then
1096 tweak some special cases. */
1097 copy_tree_r (tp, walk_subtrees, NULL);
1098
1099 /* Global variables we haven't seen yet needs to go into referenced
1100 vars. If not referenced from types or debug stmts only. */
1101 if (gimple_in_ssa_p (cfun)
1102 && TREE_CODE (*tp) == VAR_DECL
1103 && id->remapping_type_depth == 0
1104 && !processing_debug_stmt)
1105 add_referenced_var (*tp);
1106
1107 /* If EXPR has block defined, map it to newly constructed block.
1108 When inlining we want EXPRs without block appear in the block
1109 of function call if we are not remapping a type. */
1110 if (EXPR_P (*tp))
1111 {
1112 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1113 if (TREE_BLOCK (*tp))
1114 {
1115 tree *n;
1116 n = (tree *) pointer_map_contains (id->decl_map,
1117 TREE_BLOCK (*tp));
1118 gcc_assert (n || id->remapping_type_depth != 0);
1119 if (n)
1120 new_block = *n;
1121 }
1122 TREE_BLOCK (*tp) = new_block;
1123 }
1124
1125 if (TREE_CODE (*tp) != OMP_CLAUSE)
1126 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1127
1128 /* The copied TARGET_EXPR has never been expanded, even if the
1129 original node was expanded already. */
1130 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1131 {
1132 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1133 TREE_OPERAND (*tp, 3) = NULL_TREE;
1134 }
1135
1136 /* Variable substitution need not be simple. In particular, the
1137 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1138 and friends are up-to-date. */
1139 else if (TREE_CODE (*tp) == ADDR_EXPR)
1140 {
1141 int invariant = is_gimple_min_invariant (*tp);
1142 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1143
1144 /* Handle the case where we substituted an INDIRECT_REF
1145 into the operand of the ADDR_EXPR. */
1146 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1147 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1148 else
1149 recompute_tree_invariant_for_addr_expr (*tp);
1150
1151 /* If this used to be invariant, but is not any longer,
1152 then regimplification is probably needed. */
1153 if (invariant && !is_gimple_min_invariant (*tp))
1154 id->regimplify = true;
1155
1156 *walk_subtrees = 0;
1157 }
1158 }
1159
1160 /* Keep iterating. */
1161 return NULL_TREE;
1162 }
1163
1164 /* Helper for remap_gimple_stmt. Given an EH region number for the
1165 source function, map that to the duplicate EH region number in
1166 the destination function. */
1167
1168 static int
1169 remap_eh_region_nr (int old_nr, copy_body_data *id)
1170 {
1171 eh_region old_r, new_r;
1172 void **slot;
1173
1174 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1175 slot = pointer_map_contains (id->eh_map, old_r);
1176 new_r = (eh_region) *slot;
1177
1178 return new_r->index;
1179 }
1180
1181 /* Similar, but operate on INTEGER_CSTs. */
1182
1183 static tree
1184 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1185 {
1186 int old_nr, new_nr;
1187
1188 old_nr = tree_low_cst (old_t_nr, 0);
1189 new_nr = remap_eh_region_nr (old_nr, id);
1190
1191 return build_int_cst (NULL, new_nr);
1192 }
1193
1194 /* Helper for copy_bb. Remap statement STMT using the inlining
1195 information in ID. Return the new statement copy. */
1196
1197 static gimple
1198 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1199 {
1200 gimple copy = NULL;
1201 struct walk_stmt_info wi;
1202 tree new_block;
1203 bool skip_first = false;
1204
1205 /* Begin by recognizing trees that we'll completely rewrite for the
1206 inlining context. Our output for these trees is completely
1207 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1208 into an edge). Further down, we'll handle trees that get
1209 duplicated and/or tweaked. */
1210
1211 /* When requested, GIMPLE_RETURNs should be transformed to just the
1212 contained GIMPLE_ASSIGN. The branch semantics of the return will
1213 be handled elsewhere by manipulating the CFG rather than the
1214 statement. */
1215 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1216 {
1217 tree retval = gimple_return_retval (stmt);
1218
1219 /* If we're returning something, just turn that into an
1220 assignment into the equivalent of the original RESULT_DECL.
1221 If RETVAL is just the result decl, the result decl has
1222 already been set (e.g. a recent "foo (&result_decl, ...)");
1223 just toss the entire GIMPLE_RETURN. */
1224 if (retval && TREE_CODE (retval) != RESULT_DECL)
1225 {
1226 copy = gimple_build_assign (id->retvar, retval);
1227 /* id->retvar is already substituted. Skip it on later remapping. */
1228 skip_first = true;
1229 }
1230 else
1231 return gimple_build_nop ();
1232 }
1233 else if (gimple_has_substatements (stmt))
1234 {
1235 gimple_seq s1, s2;
1236
1237 /* When cloning bodies from the C++ front end, we will be handed bodies
1238 in High GIMPLE form. Handle here all the High GIMPLE statements that
1239 have embedded statements. */
1240 switch (gimple_code (stmt))
1241 {
1242 case GIMPLE_BIND:
1243 copy = copy_gimple_bind (stmt, id);
1244 break;
1245
1246 case GIMPLE_CATCH:
1247 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1248 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1249 break;
1250
1251 case GIMPLE_EH_FILTER:
1252 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1253 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1254 break;
1255
1256 case GIMPLE_TRY:
1257 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1258 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1259 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1260 break;
1261
1262 case GIMPLE_WITH_CLEANUP_EXPR:
1263 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1264 copy = gimple_build_wce (s1);
1265 break;
1266
1267 case GIMPLE_OMP_PARALLEL:
1268 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1269 copy = gimple_build_omp_parallel
1270 (s1,
1271 gimple_omp_parallel_clauses (stmt),
1272 gimple_omp_parallel_child_fn (stmt),
1273 gimple_omp_parallel_data_arg (stmt));
1274 break;
1275
1276 case GIMPLE_OMP_TASK:
1277 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1278 copy = gimple_build_omp_task
1279 (s1,
1280 gimple_omp_task_clauses (stmt),
1281 gimple_omp_task_child_fn (stmt),
1282 gimple_omp_task_data_arg (stmt),
1283 gimple_omp_task_copy_fn (stmt),
1284 gimple_omp_task_arg_size (stmt),
1285 gimple_omp_task_arg_align (stmt));
1286 break;
1287
1288 case GIMPLE_OMP_FOR:
1289 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1290 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1291 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1292 gimple_omp_for_collapse (stmt), s2);
1293 {
1294 size_t i;
1295 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1296 {
1297 gimple_omp_for_set_index (copy, i,
1298 gimple_omp_for_index (stmt, i));
1299 gimple_omp_for_set_initial (copy, i,
1300 gimple_omp_for_initial (stmt, i));
1301 gimple_omp_for_set_final (copy, i,
1302 gimple_omp_for_final (stmt, i));
1303 gimple_omp_for_set_incr (copy, i,
1304 gimple_omp_for_incr (stmt, i));
1305 gimple_omp_for_set_cond (copy, i,
1306 gimple_omp_for_cond (stmt, i));
1307 }
1308 }
1309 break;
1310
1311 case GIMPLE_OMP_MASTER:
1312 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1313 copy = gimple_build_omp_master (s1);
1314 break;
1315
1316 case GIMPLE_OMP_ORDERED:
1317 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1318 copy = gimple_build_omp_ordered (s1);
1319 break;
1320
1321 case GIMPLE_OMP_SECTION:
1322 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1323 copy = gimple_build_omp_section (s1);
1324 break;
1325
1326 case GIMPLE_OMP_SECTIONS:
1327 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1328 copy = gimple_build_omp_sections
1329 (s1, gimple_omp_sections_clauses (stmt));
1330 break;
1331
1332 case GIMPLE_OMP_SINGLE:
1333 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1334 copy = gimple_build_omp_single
1335 (s1, gimple_omp_single_clauses (stmt));
1336 break;
1337
1338 case GIMPLE_OMP_CRITICAL:
1339 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1340 copy
1341 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1342 break;
1343
1344 default:
1345 gcc_unreachable ();
1346 }
1347 }
1348 else
1349 {
1350 if (gimple_assign_copy_p (stmt)
1351 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1352 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1353 {
1354 /* Here we handle statements that are not completely rewritten.
1355 First we detect some inlining-induced bogosities for
1356 discarding. */
1357
1358 /* Some assignments VAR = VAR; don't generate any rtl code
1359 and thus don't count as variable modification. Avoid
1360 keeping bogosities like 0 = 0. */
1361 tree decl = gimple_assign_lhs (stmt), value;
1362 tree *n;
1363
1364 n = (tree *) pointer_map_contains (id->decl_map, decl);
1365 if (n)
1366 {
1367 value = *n;
1368 STRIP_TYPE_NOPS (value);
1369 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1370 return gimple_build_nop ();
1371 }
1372 }
1373
1374 if (gimple_debug_bind_p (stmt))
1375 {
1376 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1377 gimple_debug_bind_get_value (stmt),
1378 stmt);
1379 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1380 return copy;
1381 }
1382
1383 /* Create a new deep copy of the statement. */
1384 copy = gimple_copy (stmt);
1385
1386 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1387 RESX and EH_DISPATCH. */
1388 if (id->eh_map)
1389 switch (gimple_code (copy))
1390 {
1391 case GIMPLE_CALL:
1392 {
1393 tree r, fndecl = gimple_call_fndecl (copy);
1394 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1395 switch (DECL_FUNCTION_CODE (fndecl))
1396 {
1397 case BUILT_IN_EH_COPY_VALUES:
1398 r = gimple_call_arg (copy, 1);
1399 r = remap_eh_region_tree_nr (r, id);
1400 gimple_call_set_arg (copy, 1, r);
1401 /* FALLTHRU */
1402
1403 case BUILT_IN_EH_POINTER:
1404 case BUILT_IN_EH_FILTER:
1405 r = gimple_call_arg (copy, 0);
1406 r = remap_eh_region_tree_nr (r, id);
1407 gimple_call_set_arg (copy, 0, r);
1408 break;
1409
1410 default:
1411 break;
1412 }
1413
1414 /* Reset alias info if we didn't apply measures to
1415 keep it valid over inlining by setting DECL_PT_UID. */
1416 if (!id->src_cfun->gimple_df
1417 || !id->src_cfun->gimple_df->ipa_pta)
1418 gimple_call_reset_alias_info (copy);
1419 }
1420 break;
1421
1422 case GIMPLE_RESX:
1423 {
1424 int r = gimple_resx_region (copy);
1425 r = remap_eh_region_nr (r, id);
1426 gimple_resx_set_region (copy, r);
1427 }
1428 break;
1429
1430 case GIMPLE_EH_DISPATCH:
1431 {
1432 int r = gimple_eh_dispatch_region (copy);
1433 r = remap_eh_region_nr (r, id);
1434 gimple_eh_dispatch_set_region (copy, r);
1435 }
1436 break;
1437
1438 default:
1439 break;
1440 }
1441 }
1442
1443 /* If STMT has a block defined, map it to the newly constructed
1444 block. When inlining we want statements without a block to
1445 appear in the block of the function call. */
1446 new_block = id->block;
1447 if (gimple_block (copy))
1448 {
1449 tree *n;
1450 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1451 gcc_assert (n);
1452 new_block = *n;
1453 }
1454
1455 gimple_set_block (copy, new_block);
1456
1457 if (gimple_debug_bind_p (copy))
1458 return copy;
1459
1460 /* Remap all the operands in COPY. */
1461 memset (&wi, 0, sizeof (wi));
1462 wi.info = id;
1463 if (skip_first)
1464 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1465 else
1466 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1467
1468 /* Clear the copied virtual operands. We are not remapping them here
1469 but are going to recreate them from scratch. */
1470 if (gimple_has_mem_ops (copy))
1471 {
1472 gimple_set_vdef (copy, NULL_TREE);
1473 gimple_set_vuse (copy, NULL_TREE);
1474 }
1475
1476 return copy;
1477 }
1478
1479
1480 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1481 later */
1482
1483 static basic_block
1484 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1485 gcov_type count_scale)
1486 {
1487 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1488 basic_block copy_basic_block;
1489 tree decl;
1490 gcov_type freq;
1491 basic_block prev;
1492
1493 /* Search for previous copied basic block. */
1494 prev = bb->prev_bb;
1495 while (!prev->aux)
1496 prev = prev->prev_bb;
1497
1498 /* create_basic_block() will append every new block to
1499 basic_block_info automatically. */
1500 copy_basic_block = create_basic_block (NULL, (void *) 0,
1501 (basic_block) prev->aux);
1502 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1503
1504 /* We are going to rebuild frequencies from scratch. These values
1505 have just small importance to drive canonicalize_loop_headers. */
1506 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1507
1508 /* We recompute frequencies after inlining, so this is quite safe. */
1509 if (freq > BB_FREQ_MAX)
1510 freq = BB_FREQ_MAX;
1511 copy_basic_block->frequency = freq;
1512
1513 copy_gsi = gsi_start_bb (copy_basic_block);
1514
1515 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1516 {
1517 gimple stmt = gsi_stmt (gsi);
1518 gimple orig_stmt = stmt;
1519
1520 id->regimplify = false;
1521 stmt = remap_gimple_stmt (stmt, id);
1522 if (gimple_nop_p (stmt))
1523 continue;
1524
1525 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1526 seq_gsi = copy_gsi;
1527
1528 /* With return slot optimization we can end up with
1529 non-gimple (foo *)&this->m, fix that here. */
1530 if (is_gimple_assign (stmt)
1531 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1532 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1533 {
1534 tree new_rhs;
1535 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1536 gimple_assign_rhs1 (stmt),
1537 true, NULL, false, GSI_NEW_STMT);
1538 gimple_assign_set_rhs1 (stmt, new_rhs);
1539 id->regimplify = false;
1540 }
1541
1542 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1543
1544 if (id->regimplify)
1545 gimple_regimplify_operands (stmt, &seq_gsi);
1546
1547 /* If copy_basic_block has been empty at the start of this iteration,
1548 call gsi_start_bb again to get at the newly added statements. */
1549 if (gsi_end_p (copy_gsi))
1550 copy_gsi = gsi_start_bb (copy_basic_block);
1551 else
1552 gsi_next (&copy_gsi);
1553
1554 /* Process the new statement. The call to gimple_regimplify_operands
1555 possibly turned the statement into multiple statements, we
1556 need to process all of them. */
1557 do
1558 {
1559 tree fn;
1560
1561 stmt = gsi_stmt (copy_gsi);
1562 if (is_gimple_call (stmt)
1563 && gimple_call_va_arg_pack_p (stmt)
1564 && id->gimple_call)
1565 {
1566 /* __builtin_va_arg_pack () should be replaced by
1567 all arguments corresponding to ... in the caller. */
1568 tree p;
1569 gimple new_call;
1570 VEC(tree, heap) *argarray;
1571 size_t nargs = gimple_call_num_args (id->gimple_call);
1572 size_t n;
1573
1574 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1575 nargs--;
1576
1577 /* Create the new array of arguments. */
1578 n = nargs + gimple_call_num_args (stmt);
1579 argarray = VEC_alloc (tree, heap, n);
1580 VEC_safe_grow (tree, heap, argarray, n);
1581
1582 /* Copy all the arguments before '...' */
1583 memcpy (VEC_address (tree, argarray),
1584 gimple_call_arg_ptr (stmt, 0),
1585 gimple_call_num_args (stmt) * sizeof (tree));
1586
1587 /* Append the arguments passed in '...' */
1588 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1589 gimple_call_arg_ptr (id->gimple_call, 0)
1590 + (gimple_call_num_args (id->gimple_call) - nargs),
1591 nargs * sizeof (tree));
1592
1593 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1594 argarray);
1595
1596 VEC_free (tree, heap, argarray);
1597
1598 /* Copy all GIMPLE_CALL flags, location and block, except
1599 GF_CALL_VA_ARG_PACK. */
1600 gimple_call_copy_flags (new_call, stmt);
1601 gimple_call_set_va_arg_pack (new_call, false);
1602 gimple_set_location (new_call, gimple_location (stmt));
1603 gimple_set_block (new_call, gimple_block (stmt));
1604 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1605
1606 gsi_replace (&copy_gsi, new_call, false);
1607 gimple_set_bb (stmt, NULL);
1608 stmt = new_call;
1609 }
1610 else if (is_gimple_call (stmt)
1611 && id->gimple_call
1612 && (decl = gimple_call_fndecl (stmt))
1613 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1614 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1615 {
1616 /* __builtin_va_arg_pack_len () should be replaced by
1617 the number of anonymous arguments. */
1618 size_t nargs = gimple_call_num_args (id->gimple_call);
1619 tree count, p;
1620 gimple new_stmt;
1621
1622 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1623 nargs--;
1624
1625 count = build_int_cst (integer_type_node, nargs);
1626 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1627 gsi_replace (&copy_gsi, new_stmt, false);
1628 stmt = new_stmt;
1629 }
1630
1631 /* Statements produced by inlining can be unfolded, especially
1632 when we constant propagated some operands. We can't fold
1633 them right now for two reasons:
1634 1) folding require SSA_NAME_DEF_STMTs to be correct
1635 2) we can't change function calls to builtins.
1636 So we just mark statement for later folding. We mark
1637 all new statements, instead just statements that has changed
1638 by some nontrivial substitution so even statements made
1639 foldable indirectly are updated. If this turns out to be
1640 expensive, copy_body can be told to watch for nontrivial
1641 changes. */
1642 if (id->statements_to_fold)
1643 pointer_set_insert (id->statements_to_fold, stmt);
1644
1645 /* We're duplicating a CALL_EXPR. Find any corresponding
1646 callgraph edges and update or duplicate them. */
1647 if (is_gimple_call (stmt))
1648 {
1649 struct cgraph_edge *edge;
1650 int flags;
1651
1652 switch (id->transform_call_graph_edges)
1653 {
1654 case CB_CGE_DUPLICATE:
1655 edge = cgraph_edge (id->src_node, orig_stmt);
1656 if (edge)
1657 {
1658 int edge_freq = edge->frequency;
1659 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1660 gimple_uid (stmt),
1661 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1662 edge->frequency, true);
1663 /* We could also just rescale the frequency, but
1664 doing so would introduce roundoff errors and make
1665 verifier unhappy. */
1666 edge->frequency
1667 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1668 copy_basic_block);
1669 if (dump_file
1670 && profile_status_for_function (cfun) != PROFILE_ABSENT
1671 && (edge_freq > edge->frequency + 10
1672 || edge_freq < edge->frequency - 10))
1673 {
1674 fprintf (dump_file, "Edge frequency estimated by "
1675 "cgraph %i diverge from inliner's estimate %i\n",
1676 edge_freq,
1677 edge->frequency);
1678 fprintf (dump_file,
1679 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1680 bb->index,
1681 bb->frequency,
1682 copy_basic_block->frequency);
1683 }
1684 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1685 }
1686 break;
1687
1688 case CB_CGE_MOVE_CLONES:
1689 cgraph_set_call_stmt_including_clones (id->dst_node,
1690 orig_stmt, stmt);
1691 edge = cgraph_edge (id->dst_node, stmt);
1692 break;
1693
1694 case CB_CGE_MOVE:
1695 edge = cgraph_edge (id->dst_node, orig_stmt);
1696 if (edge)
1697 cgraph_set_call_stmt (edge, stmt);
1698 break;
1699
1700 default:
1701 gcc_unreachable ();
1702 }
1703
1704 /* Constant propagation on argument done during inlining
1705 may create new direct call. Produce an edge for it. */
1706 if ((!edge
1707 || (edge->indirect_inlining_edge
1708 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1709 && (fn = gimple_call_fndecl (stmt)) != NULL)
1710 {
1711 struct cgraph_node *dest = cgraph_node (fn);
1712
1713 /* We have missing edge in the callgraph. This can happen
1714 when previous inlining turned an indirect call into a
1715 direct call by constant propagating arguments or we are
1716 producing dead clone (for further clonning). In all
1717 other cases we hit a bug (incorrect node sharing is the
1718 most common reason for missing edges). */
1719 gcc_assert (dest->needed || !dest->analyzed
1720 || dest->address_taken
1721 || !id->src_node->analyzed);
1722 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1723 cgraph_create_edge_including_clones
1724 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1725 compute_call_stmt_bb_frequency (id->dst_node->decl,
1726 copy_basic_block),
1727 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1728 else
1729 cgraph_create_edge (id->dst_node, dest, stmt,
1730 bb->count,
1731 compute_call_stmt_bb_frequency
1732 (id->dst_node->decl, copy_basic_block),
1733 bb->loop_depth)->inline_failed
1734 = CIF_ORIGINALLY_INDIRECT_CALL;
1735 if (dump_file)
1736 {
1737 fprintf (dump_file, "Created new direct edge to %s\n",
1738 cgraph_node_name (dest));
1739 }
1740 }
1741
1742 flags = gimple_call_flags (stmt);
1743 if (flags & ECF_MAY_BE_ALLOCA)
1744 cfun->calls_alloca = true;
1745 if (flags & ECF_RETURNS_TWICE)
1746 cfun->calls_setjmp = true;
1747 }
1748
1749 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1750 id->eh_map, id->eh_lp_nr);
1751
1752 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1753 {
1754 ssa_op_iter i;
1755 tree def;
1756
1757 find_new_referenced_vars (gsi_stmt (copy_gsi));
1758 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1759 if (TREE_CODE (def) == SSA_NAME)
1760 SSA_NAME_DEF_STMT (def) = stmt;
1761 }
1762
1763 gsi_next (&copy_gsi);
1764 }
1765 while (!gsi_end_p (copy_gsi));
1766
1767 copy_gsi = gsi_last_bb (copy_basic_block);
1768 }
1769
1770 return copy_basic_block;
1771 }
1772
1773 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1774 form is quite easy, since dominator relationship for old basic blocks does
1775 not change.
1776
1777 There is however exception where inlining might change dominator relation
1778 across EH edges from basic block within inlined functions destinating
1779 to landing pads in function we inline into.
1780
1781 The function fills in PHI_RESULTs of such PHI nodes if they refer
1782 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1783 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1784 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1785 set, and this means that there will be no overlapping live ranges
1786 for the underlying symbol.
1787
1788 This might change in future if we allow redirecting of EH edges and
1789 we might want to change way build CFG pre-inlining to include
1790 all the possible edges then. */
1791 static void
1792 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1793 bool can_throw, bool nonlocal_goto)
1794 {
1795 edge e;
1796 edge_iterator ei;
1797
1798 FOR_EACH_EDGE (e, ei, bb->succs)
1799 if (!e->dest->aux
1800 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1801 {
1802 gimple phi;
1803 gimple_stmt_iterator si;
1804
1805 if (!nonlocal_goto)
1806 gcc_assert (e->flags & EDGE_EH);
1807
1808 if (!can_throw)
1809 gcc_assert (!(e->flags & EDGE_EH));
1810
1811 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1812 {
1813 edge re;
1814
1815 phi = gsi_stmt (si);
1816
1817 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1818 gcc_assert (!e->dest->aux);
1819
1820 gcc_assert ((e->flags & EDGE_EH)
1821 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1822
1823 if (!is_gimple_reg (PHI_RESULT (phi)))
1824 {
1825 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1826 continue;
1827 }
1828
1829 re = find_edge (ret_bb, e->dest);
1830 gcc_assert (re);
1831 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1832 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1833
1834 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1835 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1836 }
1837 }
1838 }
1839
1840
1841 /* Copy edges from BB into its copy constructed earlier, scale profile
1842 accordingly. Edges will be taken care of later. Assume aux
1843 pointers to point to the copies of each BB. Return true if any
1844 debug stmts are left after a statement that must end the basic block. */
1845
1846 static bool
1847 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1848 {
1849 basic_block new_bb = (basic_block) bb->aux;
1850 edge_iterator ei;
1851 edge old_edge;
1852 gimple_stmt_iterator si;
1853 int flags;
1854 bool need_debug_cleanup = false;
1855
1856 /* Use the indices from the original blocks to create edges for the
1857 new ones. */
1858 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1859 if (!(old_edge->flags & EDGE_EH))
1860 {
1861 edge new_edge;
1862
1863 flags = old_edge->flags;
1864
1865 /* Return edges do get a FALLTHRU flag when the get inlined. */
1866 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1867 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1868 flags |= EDGE_FALLTHRU;
1869 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1870 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1871 new_edge->probability = old_edge->probability;
1872 }
1873
1874 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1875 return false;
1876
1877 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1878 {
1879 gimple copy_stmt;
1880 bool can_throw, nonlocal_goto;
1881
1882 copy_stmt = gsi_stmt (si);
1883 if (!is_gimple_debug (copy_stmt))
1884 {
1885 update_stmt (copy_stmt);
1886 if (gimple_in_ssa_p (cfun))
1887 mark_symbols_for_renaming (copy_stmt);
1888 }
1889
1890 /* Do this before the possible split_block. */
1891 gsi_next (&si);
1892
1893 /* If this tree could throw an exception, there are two
1894 cases where we need to add abnormal edge(s): the
1895 tree wasn't in a region and there is a "current
1896 region" in the caller; or the original tree had
1897 EH edges. In both cases split the block after the tree,
1898 and add abnormal edge(s) as needed; we need both
1899 those from the callee and the caller.
1900 We check whether the copy can throw, because the const
1901 propagation can change an INDIRECT_REF which throws
1902 into a COMPONENT_REF which doesn't. If the copy
1903 can throw, the original could also throw. */
1904 can_throw = stmt_can_throw_internal (copy_stmt);
1905 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1906
1907 if (can_throw || nonlocal_goto)
1908 {
1909 if (!gsi_end_p (si))
1910 {
1911 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1912 gsi_next (&si);
1913 if (gsi_end_p (si))
1914 need_debug_cleanup = true;
1915 }
1916 if (!gsi_end_p (si))
1917 /* Note that bb's predecessor edges aren't necessarily
1918 right at this point; split_block doesn't care. */
1919 {
1920 edge e = split_block (new_bb, copy_stmt);
1921
1922 new_bb = e->dest;
1923 new_bb->aux = e->src->aux;
1924 si = gsi_start_bb (new_bb);
1925 }
1926 }
1927
1928 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1929 make_eh_dispatch_edges (copy_stmt);
1930 else if (can_throw)
1931 make_eh_edges (copy_stmt);
1932
1933 if (nonlocal_goto)
1934 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1935
1936 if ((can_throw || nonlocal_goto)
1937 && gimple_in_ssa_p (cfun))
1938 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1939 can_throw, nonlocal_goto);
1940 }
1941 return need_debug_cleanup;
1942 }
1943
1944 /* Copy the PHIs. All blocks and edges are copied, some blocks
1945 was possibly split and new outgoing EH edges inserted.
1946 BB points to the block of original function and AUX pointers links
1947 the original and newly copied blocks. */
1948
1949 static void
1950 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1951 {
1952 basic_block const new_bb = (basic_block) bb->aux;
1953 edge_iterator ei;
1954 gimple phi;
1955 gimple_stmt_iterator si;
1956
1957 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1958 {
1959 tree res, new_res;
1960 gimple new_phi;
1961 edge new_edge;
1962
1963 phi = gsi_stmt (si);
1964 res = PHI_RESULT (phi);
1965 new_res = res;
1966 if (is_gimple_reg (res))
1967 {
1968 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1969 SSA_NAME_DEF_STMT (new_res)
1970 = new_phi = create_phi_node (new_res, new_bb);
1971 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1972 {
1973 edge const old_edge
1974 = find_edge ((basic_block) new_edge->src->aux, bb);
1975 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1976 tree new_arg = arg;
1977 tree block = id->block;
1978 id->block = NULL_TREE;
1979 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1980 id->block = block;
1981 gcc_assert (new_arg);
1982 /* With return slot optimization we can end up with
1983 non-gimple (foo *)&this->m, fix that here. */
1984 if (TREE_CODE (new_arg) != SSA_NAME
1985 && TREE_CODE (new_arg) != FUNCTION_DECL
1986 && !is_gimple_val (new_arg))
1987 {
1988 gimple_seq stmts = NULL;
1989 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1990 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1991 }
1992 add_phi_arg (new_phi, new_arg, new_edge,
1993 gimple_phi_arg_location_from_edge (phi, old_edge));
1994 }
1995 }
1996 }
1997 }
1998
1999
2000 /* Wrapper for remap_decl so it can be used as a callback. */
2001
2002 static tree
2003 remap_decl_1 (tree decl, void *data)
2004 {
2005 return remap_decl (decl, (copy_body_data *) data);
2006 }
2007
2008 /* Build struct function and associated datastructures for the new clone
2009 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2010
2011 static void
2012 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2013 {
2014 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2015 gcov_type count_scale;
2016
2017 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2018 count_scale = (REG_BR_PROB_BASE * count
2019 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2020 else
2021 count_scale = REG_BR_PROB_BASE;
2022
2023 /* Register specific tree functions. */
2024 gimple_register_cfg_hooks ();
2025
2026 /* Get clean struct function. */
2027 push_struct_function (new_fndecl);
2028
2029 /* We will rebuild these, so just sanity check that they are empty. */
2030 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2031 gcc_assert (cfun->local_decls == NULL);
2032 gcc_assert (cfun->cfg == NULL);
2033 gcc_assert (cfun->decl == new_fndecl);
2034
2035 /* Copy items we preserve during clonning. */
2036 cfun->static_chain_decl = src_cfun->static_chain_decl;
2037 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2038 cfun->function_end_locus = src_cfun->function_end_locus;
2039 cfun->curr_properties = src_cfun->curr_properties;
2040 cfun->last_verified = src_cfun->last_verified;
2041 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2042 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2043 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2044 cfun->stdarg = src_cfun->stdarg;
2045 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2046 cfun->after_inlining = src_cfun->after_inlining;
2047 cfun->can_throw_non_call_exceptions
2048 = src_cfun->can_throw_non_call_exceptions;
2049 cfun->returns_struct = src_cfun->returns_struct;
2050 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2051 cfun->after_tree_profile = src_cfun->after_tree_profile;
2052
2053 init_empty_tree_cfg ();
2054
2055 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2056 ENTRY_BLOCK_PTR->count =
2057 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2058 REG_BR_PROB_BASE);
2059 ENTRY_BLOCK_PTR->frequency
2060 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2061 EXIT_BLOCK_PTR->count =
2062 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2063 REG_BR_PROB_BASE);
2064 EXIT_BLOCK_PTR->frequency =
2065 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2066 if (src_cfun->eh)
2067 init_eh_for_function ();
2068
2069 if (src_cfun->gimple_df)
2070 {
2071 init_tree_ssa (cfun);
2072 cfun->gimple_df->in_ssa_p = true;
2073 init_ssa_operands ();
2074 }
2075 pop_cfun ();
2076 }
2077
2078 /* Helper function for copy_cfg_body. Move debug stmts from the end
2079 of NEW_BB to the beginning of successor basic blocks when needed. If the
2080 successor has multiple predecessors, reset them, otherwise keep
2081 their value. */
2082
2083 static void
2084 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2085 {
2086 edge e;
2087 edge_iterator ei;
2088 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2089
2090 if (gsi_end_p (si)
2091 || gsi_one_before_end_p (si)
2092 || !(stmt_can_throw_internal (gsi_stmt (si))
2093 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2094 return;
2095
2096 FOR_EACH_EDGE (e, ei, new_bb->succs)
2097 {
2098 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2099 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2100 while (is_gimple_debug (gsi_stmt (ssi)))
2101 {
2102 gimple stmt = gsi_stmt (ssi), new_stmt;
2103 tree var;
2104 tree value;
2105
2106 /* For the last edge move the debug stmts instead of copying
2107 them. */
2108 if (ei_one_before_end_p (ei))
2109 {
2110 si = ssi;
2111 gsi_prev (&ssi);
2112 if (!single_pred_p (e->dest))
2113 gimple_debug_bind_reset_value (stmt);
2114 gsi_remove (&si, false);
2115 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2116 continue;
2117 }
2118
2119 var = gimple_debug_bind_get_var (stmt);
2120 if (single_pred_p (e->dest))
2121 {
2122 value = gimple_debug_bind_get_value (stmt);
2123 value = unshare_expr (value);
2124 }
2125 else
2126 value = NULL_TREE;
2127 new_stmt = gimple_build_debug_bind (var, value, stmt);
2128 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2129 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2130 gsi_prev (&ssi);
2131 }
2132 }
2133 }
2134
2135 /* Make a copy of the body of FN so that it can be inserted inline in
2136 another function. Walks FN via CFG, returns new fndecl. */
2137
2138 static tree
2139 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2140 basic_block entry_block_map, basic_block exit_block_map,
2141 bitmap blocks_to_copy, basic_block new_entry)
2142 {
2143 tree callee_fndecl = id->src_fn;
2144 /* Original cfun for the callee, doesn't change. */
2145 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2146 struct function *cfun_to_copy;
2147 basic_block bb;
2148 tree new_fndecl = NULL;
2149 bool need_debug_cleanup = false;
2150 gcov_type count_scale;
2151 int last;
2152
2153 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2154 count_scale = (REG_BR_PROB_BASE * count
2155 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2156 else
2157 count_scale = REG_BR_PROB_BASE;
2158
2159 /* Register specific tree functions. */
2160 gimple_register_cfg_hooks ();
2161
2162 /* Must have a CFG here at this point. */
2163 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2164 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2165
2166 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2167
2168 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2169 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2170 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2171 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2172
2173 /* Duplicate any exception-handling regions. */
2174 if (cfun->eh)
2175 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2176 remap_decl_1, id);
2177
2178 /* Use aux pointers to map the original blocks to copy. */
2179 FOR_EACH_BB_FN (bb, cfun_to_copy)
2180 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2181 {
2182 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2183 bb->aux = new_bb;
2184 new_bb->aux = bb;
2185 }
2186
2187 last = last_basic_block;
2188
2189 /* Now that we've duplicated the blocks, duplicate their edges. */
2190 FOR_ALL_BB_FN (bb, cfun_to_copy)
2191 if (!blocks_to_copy
2192 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2193 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2194
2195 if (gimple_in_ssa_p (cfun))
2196 FOR_ALL_BB_FN (bb, cfun_to_copy)
2197 if (!blocks_to_copy
2198 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2199 copy_phis_for_bb (bb, id);
2200
2201 if (new_entry)
2202 {
2203 edge e;
2204 e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2205 e->probability = REG_BR_PROB_BASE;
2206 e->count = entry_block_map->count;
2207 }
2208
2209 FOR_ALL_BB_FN (bb, cfun_to_copy)
2210 if (bb->aux)
2211 {
2212 if (need_debug_cleanup
2213 && bb->index != ENTRY_BLOCK
2214 && bb->index != EXIT_BLOCK)
2215 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2216 ((basic_block)bb->aux)->aux = NULL;
2217 bb->aux = NULL;
2218 }
2219
2220 /* Zero out AUX fields of newly created block during EH edge
2221 insertion. */
2222 for (; last < last_basic_block; last++)
2223 {
2224 if (need_debug_cleanup)
2225 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2226 BASIC_BLOCK (last)->aux = NULL;
2227 }
2228 entry_block_map->aux = NULL;
2229 exit_block_map->aux = NULL;
2230
2231 if (id->eh_map)
2232 {
2233 pointer_map_destroy (id->eh_map);
2234 id->eh_map = NULL;
2235 }
2236
2237 return new_fndecl;
2238 }
2239
2240 /* Copy the debug STMT using ID. We deal with these statements in a
2241 special way: if any variable in their VALUE expression wasn't
2242 remapped yet, we won't remap it, because that would get decl uids
2243 out of sync, causing codegen differences between -g and -g0. If
2244 this arises, we drop the VALUE expression altogether. */
2245
2246 static void
2247 copy_debug_stmt (gimple stmt, copy_body_data *id)
2248 {
2249 tree t, *n;
2250 struct walk_stmt_info wi;
2251
2252 t = id->block;
2253 if (gimple_block (stmt))
2254 {
2255 tree *n;
2256 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2257 if (n)
2258 t = *n;
2259 }
2260 gimple_set_block (stmt, t);
2261
2262 /* Remap all the operands in COPY. */
2263 memset (&wi, 0, sizeof (wi));
2264 wi.info = id;
2265
2266 processing_debug_stmt = 1;
2267
2268 t = gimple_debug_bind_get_var (stmt);
2269
2270 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2271 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2272 {
2273 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2274 t = *n;
2275 }
2276 else if (TREE_CODE (t) == VAR_DECL
2277 && !TREE_STATIC (t)
2278 && gimple_in_ssa_p (cfun)
2279 && !pointer_map_contains (id->decl_map, t)
2280 && !var_ann (t))
2281 /* T is a non-localized variable. */;
2282 else
2283 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2284
2285 gimple_debug_bind_set_var (stmt, t);
2286
2287 if (gimple_debug_bind_has_value_p (stmt))
2288 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2289 remap_gimple_op_r, &wi, NULL);
2290
2291 /* Punt if any decl couldn't be remapped. */
2292 if (processing_debug_stmt < 0)
2293 gimple_debug_bind_reset_value (stmt);
2294
2295 processing_debug_stmt = 0;
2296
2297 update_stmt (stmt);
2298 if (gimple_in_ssa_p (cfun))
2299 mark_symbols_for_renaming (stmt);
2300 }
2301
2302 /* Process deferred debug stmts. In order to give values better odds
2303 of being successfully remapped, we delay the processing of debug
2304 stmts until all other stmts that might require remapping are
2305 processed. */
2306
2307 static void
2308 copy_debug_stmts (copy_body_data *id)
2309 {
2310 size_t i;
2311 gimple stmt;
2312
2313 if (!id->debug_stmts)
2314 return;
2315
2316 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2317 copy_debug_stmt (stmt, id);
2318
2319 VEC_free (gimple, heap, id->debug_stmts);
2320 }
2321
2322 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2323 another function. */
2324
2325 static tree
2326 copy_tree_body (copy_body_data *id)
2327 {
2328 tree fndecl = id->src_fn;
2329 tree body = DECL_SAVED_TREE (fndecl);
2330
2331 walk_tree (&body, copy_tree_body_r, id, NULL);
2332
2333 return body;
2334 }
2335
2336 /* Make a copy of the body of FN so that it can be inserted inline in
2337 another function. */
2338
2339 static tree
2340 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2341 basic_block entry_block_map, basic_block exit_block_map,
2342 bitmap blocks_to_copy, basic_block new_entry)
2343 {
2344 tree fndecl = id->src_fn;
2345 tree body;
2346
2347 /* If this body has a CFG, walk CFG and copy. */
2348 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2349 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2350 blocks_to_copy, new_entry);
2351 copy_debug_stmts (id);
2352
2353 return body;
2354 }
2355
2356 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2357 defined in function FN, or of a data member thereof. */
2358
2359 static bool
2360 self_inlining_addr_expr (tree value, tree fn)
2361 {
2362 tree var;
2363
2364 if (TREE_CODE (value) != ADDR_EXPR)
2365 return false;
2366
2367 var = get_base_address (TREE_OPERAND (value, 0));
2368
2369 return var && auto_var_in_fn_p (var, fn);
2370 }
2371
2372 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2373 lexical block and line number information from base_stmt, if given,
2374 or from the last stmt of the block otherwise. */
2375
2376 static gimple
2377 insert_init_debug_bind (copy_body_data *id,
2378 basic_block bb, tree var, tree value,
2379 gimple base_stmt)
2380 {
2381 gimple note;
2382 gimple_stmt_iterator gsi;
2383 tree tracked_var;
2384
2385 if (!gimple_in_ssa_p (id->src_cfun))
2386 return NULL;
2387
2388 if (!MAY_HAVE_DEBUG_STMTS)
2389 return NULL;
2390
2391 tracked_var = target_for_debug_bind (var);
2392 if (!tracked_var)
2393 return NULL;
2394
2395 if (bb)
2396 {
2397 gsi = gsi_last_bb (bb);
2398 if (!base_stmt && !gsi_end_p (gsi))
2399 base_stmt = gsi_stmt (gsi);
2400 }
2401
2402 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2403
2404 if (bb)
2405 {
2406 if (!gsi_end_p (gsi))
2407 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2408 else
2409 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2410 }
2411
2412 return note;
2413 }
2414
2415 static void
2416 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2417 {
2418 /* If VAR represents a zero-sized variable, it's possible that the
2419 assignment statement may result in no gimple statements. */
2420 if (init_stmt)
2421 {
2422 gimple_stmt_iterator si = gsi_last_bb (bb);
2423
2424 /* We can end up with init statements that store to a non-register
2425 from a rhs with a conversion. Handle that here by forcing the
2426 rhs into a temporary. gimple_regimplify_operands is not
2427 prepared to do this for us. */
2428 if (!is_gimple_debug (init_stmt)
2429 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2430 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2431 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2432 {
2433 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2434 gimple_expr_type (init_stmt),
2435 gimple_assign_rhs1 (init_stmt));
2436 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2437 GSI_NEW_STMT);
2438 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2439 gimple_assign_set_rhs1 (init_stmt, rhs);
2440 }
2441 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2442 gimple_regimplify_operands (init_stmt, &si);
2443 mark_symbols_for_renaming (init_stmt);
2444
2445 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2446 {
2447 tree var, def = gimple_assign_lhs (init_stmt);
2448
2449 if (TREE_CODE (def) == SSA_NAME)
2450 var = SSA_NAME_VAR (def);
2451 else
2452 var = def;
2453
2454 insert_init_debug_bind (id, bb, var, def, init_stmt);
2455 }
2456 }
2457 }
2458
2459 /* Initialize parameter P with VALUE. If needed, produce init statement
2460 at the end of BB. When BB is NULL, we return init statement to be
2461 output later. */
2462 static gimple
2463 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2464 basic_block bb, tree *vars)
2465 {
2466 gimple init_stmt = NULL;
2467 tree var;
2468 tree rhs = value;
2469 tree def = (gimple_in_ssa_p (cfun)
2470 ? gimple_default_def (id->src_cfun, p) : NULL);
2471
2472 if (value
2473 && value != error_mark_node
2474 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2475 {
2476 if (fold_convertible_p (TREE_TYPE (p), value))
2477 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2478 else
2479 /* ??? For valid (GIMPLE) programs we should not end up here.
2480 Still if something has gone wrong and we end up with truly
2481 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2482 to not leak invalid GIMPLE to the following passes. */
2483 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2484 }
2485
2486 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2487 here since the type of this decl must be visible to the calling
2488 function. */
2489 var = copy_decl_to_var (p, id);
2490
2491 /* We're actually using the newly-created var. */
2492 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2493 {
2494 get_var_ann (var);
2495 add_referenced_var (var);
2496 }
2497
2498 /* Declare this new variable. */
2499 TREE_CHAIN (var) = *vars;
2500 *vars = var;
2501
2502 /* Make gimplifier happy about this variable. */
2503 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2504
2505 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2506 we would not need to create a new variable here at all, if it
2507 weren't for debug info. Still, we can just use the argument
2508 value. */
2509 if (TREE_READONLY (p)
2510 && !TREE_ADDRESSABLE (p)
2511 && value && !TREE_SIDE_EFFECTS (value)
2512 && !def)
2513 {
2514 /* We may produce non-gimple trees by adding NOPs or introduce
2515 invalid sharing when operand is not really constant.
2516 It is not big deal to prohibit constant propagation here as
2517 we will constant propagate in DOM1 pass anyway. */
2518 if (is_gimple_min_invariant (value)
2519 && useless_type_conversion_p (TREE_TYPE (p),
2520 TREE_TYPE (value))
2521 /* We have to be very careful about ADDR_EXPR. Make sure
2522 the base variable isn't a local variable of the inlined
2523 function, e.g., when doing recursive inlining, direct or
2524 mutually-recursive or whatever, which is why we don't
2525 just test whether fn == current_function_decl. */
2526 && ! self_inlining_addr_expr (value, fn))
2527 {
2528 insert_decl_map (id, p, value);
2529 insert_debug_decl_map (id, p, var);
2530 return insert_init_debug_bind (id, bb, var, value, NULL);
2531 }
2532 }
2533
2534 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2535 that way, when the PARM_DECL is encountered, it will be
2536 automatically replaced by the VAR_DECL. */
2537 insert_decl_map (id, p, var);
2538
2539 /* Even if P was TREE_READONLY, the new VAR should not be.
2540 In the original code, we would have constructed a
2541 temporary, and then the function body would have never
2542 changed the value of P. However, now, we will be
2543 constructing VAR directly. The constructor body may
2544 change its value multiple times as it is being
2545 constructed. Therefore, it must not be TREE_READONLY;
2546 the back-end assumes that TREE_READONLY variable is
2547 assigned to only once. */
2548 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2549 TREE_READONLY (var) = 0;
2550
2551 /* If there is no setup required and we are in SSA, take the easy route
2552 replacing all SSA names representing the function parameter by the
2553 SSA name passed to function.
2554
2555 We need to construct map for the variable anyway as it might be used
2556 in different SSA names when parameter is set in function.
2557
2558 Do replacement at -O0 for const arguments replaced by constant.
2559 This is important for builtin_constant_p and other construct requiring
2560 constant argument to be visible in inlined function body. */
2561 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2562 && (optimize
2563 || (TREE_READONLY (p)
2564 && is_gimple_min_invariant (rhs)))
2565 && (TREE_CODE (rhs) == SSA_NAME
2566 || is_gimple_min_invariant (rhs))
2567 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2568 {
2569 insert_decl_map (id, def, rhs);
2570 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2571 }
2572
2573 /* If the value of argument is never used, don't care about initializing
2574 it. */
2575 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2576 {
2577 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2578 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2579 }
2580
2581 /* Initialize this VAR_DECL from the equivalent argument. Convert
2582 the argument to the proper type in case it was promoted. */
2583 if (value)
2584 {
2585 if (rhs == error_mark_node)
2586 {
2587 insert_decl_map (id, p, var);
2588 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2589 }
2590
2591 STRIP_USELESS_TYPE_CONVERSION (rhs);
2592
2593 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2594 keep our trees in gimple form. */
2595 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2596 {
2597 def = remap_ssa_name (def, id);
2598 init_stmt = gimple_build_assign (def, rhs);
2599 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2600 set_default_def (var, NULL);
2601 }
2602 else
2603 init_stmt = gimple_build_assign (var, rhs);
2604
2605 if (bb && init_stmt)
2606 insert_init_stmt (id, bb, init_stmt);
2607 }
2608 return init_stmt;
2609 }
2610
2611 /* Generate code to initialize the parameters of the function at the
2612 top of the stack in ID from the GIMPLE_CALL STMT. */
2613
2614 static void
2615 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2616 tree fn, basic_block bb)
2617 {
2618 tree parms;
2619 size_t i;
2620 tree p;
2621 tree vars = NULL_TREE;
2622 tree static_chain = gimple_call_chain (stmt);
2623
2624 /* Figure out what the parameters are. */
2625 parms = DECL_ARGUMENTS (fn);
2626
2627 /* Loop through the parameter declarations, replacing each with an
2628 equivalent VAR_DECL, appropriately initialized. */
2629 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2630 {
2631 tree val;
2632 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2633 setup_one_parameter (id, p, val, fn, bb, &vars);
2634 }
2635
2636 /* Initialize the static chain. */
2637 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2638 gcc_assert (fn != current_function_decl);
2639 if (p)
2640 {
2641 /* No static chain? Seems like a bug in tree-nested.c. */
2642 gcc_assert (static_chain);
2643
2644 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2645 }
2646
2647 declare_inline_vars (id->block, vars);
2648 }
2649
2650
2651 /* Declare a return variable to replace the RESULT_DECL for the
2652 function we are calling. An appropriate DECL_STMT is returned.
2653 The USE_STMT is filled to contain a use of the declaration to
2654 indicate the return value of the function.
2655
2656 RETURN_SLOT, if non-null is place where to store the result. It
2657 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2658 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2659
2660 The return value is a (possibly null) value that holds the result
2661 as seen by the caller. */
2662
2663 static tree
2664 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2665 {
2666 tree callee = id->src_fn;
2667 tree caller = id->dst_fn;
2668 tree result = DECL_RESULT (callee);
2669 tree callee_type = TREE_TYPE (result);
2670 tree caller_type;
2671 tree var, use;
2672
2673 /* Handle type-mismatches in the function declaration return type
2674 vs. the call expression. */
2675 if (modify_dest)
2676 caller_type = TREE_TYPE (modify_dest);
2677 else
2678 caller_type = TREE_TYPE (TREE_TYPE (callee));
2679
2680 /* We don't need to do anything for functions that don't return
2681 anything. */
2682 if (!result || VOID_TYPE_P (callee_type))
2683 return NULL_TREE;
2684
2685 /* If there was a return slot, then the return value is the
2686 dereferenced address of that object. */
2687 if (return_slot)
2688 {
2689 /* The front end shouldn't have used both return_slot and
2690 a modify expression. */
2691 gcc_assert (!modify_dest);
2692 if (DECL_BY_REFERENCE (result))
2693 {
2694 tree return_slot_addr = build_fold_addr_expr (return_slot);
2695 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2696
2697 /* We are going to construct *&return_slot and we can't do that
2698 for variables believed to be not addressable.
2699
2700 FIXME: This check possibly can match, because values returned
2701 via return slot optimization are not believed to have address
2702 taken by alias analysis. */
2703 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2704 if (gimple_in_ssa_p (cfun))
2705 {
2706 HOST_WIDE_INT bitsize;
2707 HOST_WIDE_INT bitpos;
2708 tree offset;
2709 enum machine_mode mode;
2710 int unsignedp;
2711 int volatilep;
2712 tree base;
2713 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2714 &offset,
2715 &mode, &unsignedp, &volatilep,
2716 false);
2717 if (TREE_CODE (base) == INDIRECT_REF)
2718 base = TREE_OPERAND (base, 0);
2719 if (TREE_CODE (base) == SSA_NAME)
2720 base = SSA_NAME_VAR (base);
2721 mark_sym_for_renaming (base);
2722 }
2723 var = return_slot_addr;
2724 }
2725 else
2726 {
2727 var = return_slot;
2728 gcc_assert (TREE_CODE (var) != SSA_NAME);
2729 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2730 }
2731 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2732 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2733 && !DECL_GIMPLE_REG_P (result)
2734 && DECL_P (var))
2735 DECL_GIMPLE_REG_P (var) = 0;
2736 use = NULL;
2737 goto done;
2738 }
2739
2740 /* All types requiring non-trivial constructors should have been handled. */
2741 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2742
2743 /* Attempt to avoid creating a new temporary variable. */
2744 if (modify_dest
2745 && TREE_CODE (modify_dest) != SSA_NAME)
2746 {
2747 bool use_it = false;
2748
2749 /* We can't use MODIFY_DEST if there's type promotion involved. */
2750 if (!useless_type_conversion_p (callee_type, caller_type))
2751 use_it = false;
2752
2753 /* ??? If we're assigning to a variable sized type, then we must
2754 reuse the destination variable, because we've no good way to
2755 create variable sized temporaries at this point. */
2756 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2757 use_it = true;
2758
2759 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2760 reuse it as the result of the call directly. Don't do this if
2761 it would promote MODIFY_DEST to addressable. */
2762 else if (TREE_ADDRESSABLE (result))
2763 use_it = false;
2764 else
2765 {
2766 tree base_m = get_base_address (modify_dest);
2767
2768 /* If the base isn't a decl, then it's a pointer, and we don't
2769 know where that's going to go. */
2770 if (!DECL_P (base_m))
2771 use_it = false;
2772 else if (is_global_var (base_m))
2773 use_it = false;
2774 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2775 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2776 && !DECL_GIMPLE_REG_P (result)
2777 && DECL_GIMPLE_REG_P (base_m))
2778 use_it = false;
2779 else if (!TREE_ADDRESSABLE (base_m))
2780 use_it = true;
2781 }
2782
2783 if (use_it)
2784 {
2785 var = modify_dest;
2786 use = NULL;
2787 goto done;
2788 }
2789 }
2790
2791 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2792
2793 var = copy_result_decl_to_var (result, id);
2794 if (gimple_in_ssa_p (cfun))
2795 {
2796 get_var_ann (var);
2797 add_referenced_var (var);
2798 }
2799
2800 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2801 DECL_STRUCT_FUNCTION (caller)->local_decls
2802 = tree_cons (NULL_TREE, var,
2803 DECL_STRUCT_FUNCTION (caller)->local_decls);
2804
2805 /* Do not have the rest of GCC warn about this variable as it should
2806 not be visible to the user. */
2807 TREE_NO_WARNING (var) = 1;
2808
2809 declare_inline_vars (id->block, var);
2810
2811 /* Build the use expr. If the return type of the function was
2812 promoted, convert it back to the expected type. */
2813 use = var;
2814 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2815 use = fold_convert (caller_type, var);
2816
2817 STRIP_USELESS_TYPE_CONVERSION (use);
2818
2819 if (DECL_BY_REFERENCE (result))
2820 {
2821 TREE_ADDRESSABLE (var) = 1;
2822 var = build_fold_addr_expr (var);
2823 }
2824
2825 done:
2826 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2827 way, when the RESULT_DECL is encountered, it will be
2828 automatically replaced by the VAR_DECL. */
2829 insert_decl_map (id, result, var);
2830
2831 /* Remember this so we can ignore it in remap_decls. */
2832 id->retvar = var;
2833
2834 return use;
2835 }
2836
2837 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2838 to a local label. */
2839
2840 static tree
2841 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2842 {
2843 tree node = *nodep;
2844 tree fn = (tree) fnp;
2845
2846 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2847 return node;
2848
2849 if (TYPE_P (node))
2850 *walk_subtrees = 0;
2851
2852 return NULL_TREE;
2853 }
2854
2855 /* Determine if the function can be copied. If so return NULL. If
2856 not return a string describng the reason for failure. */
2857
2858 static const char *
2859 copy_forbidden (struct function *fun, tree fndecl)
2860 {
2861 const char *reason = fun->cannot_be_copied_reason;
2862 tree step;
2863
2864 /* Only examine the function once. */
2865 if (fun->cannot_be_copied_set)
2866 return reason;
2867
2868 /* We cannot copy a function that receives a non-local goto
2869 because we cannot remap the destination label used in the
2870 function that is performing the non-local goto. */
2871 /* ??? Actually, this should be possible, if we work at it.
2872 No doubt there's just a handful of places that simply
2873 assume it doesn't happen and don't substitute properly. */
2874 if (fun->has_nonlocal_label)
2875 {
2876 reason = G_("function %q+F can never be copied "
2877 "because it receives a non-local goto");
2878 goto fail;
2879 }
2880
2881 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2882 {
2883 tree decl = TREE_VALUE (step);
2884
2885 if (TREE_CODE (decl) == VAR_DECL
2886 && TREE_STATIC (decl)
2887 && !DECL_EXTERNAL (decl)
2888 && DECL_INITIAL (decl)
2889 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2890 has_label_address_in_static_1,
2891 fndecl))
2892 {
2893 reason = G_("function %q+F can never be copied because it saves "
2894 "address of local label in a static variable");
2895 goto fail;
2896 }
2897 }
2898
2899 fail:
2900 fun->cannot_be_copied_reason = reason;
2901 fun->cannot_be_copied_set = true;
2902 return reason;
2903 }
2904
2905
2906 static const char *inline_forbidden_reason;
2907
2908 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2909 iff a function can not be inlined. Also sets the reason why. */
2910
2911 static tree
2912 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2913 struct walk_stmt_info *wip)
2914 {
2915 tree fn = (tree) wip->info;
2916 tree t;
2917 gimple stmt = gsi_stmt (*gsi);
2918
2919 switch (gimple_code (stmt))
2920 {
2921 case GIMPLE_CALL:
2922 /* Refuse to inline alloca call unless user explicitly forced so as
2923 this may change program's memory overhead drastically when the
2924 function using alloca is called in loop. In GCC present in
2925 SPEC2000 inlining into schedule_block cause it to require 2GB of
2926 RAM instead of 256MB. */
2927 if (gimple_alloca_call_p (stmt)
2928 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2929 {
2930 inline_forbidden_reason
2931 = G_("function %q+F can never be inlined because it uses "
2932 "alloca (override using the always_inline attribute)");
2933 *handled_ops_p = true;
2934 return fn;
2935 }
2936
2937 t = gimple_call_fndecl (stmt);
2938 if (t == NULL_TREE)
2939 break;
2940
2941 /* We cannot inline functions that call setjmp. */
2942 if (setjmp_call_p (t))
2943 {
2944 inline_forbidden_reason
2945 = G_("function %q+F can never be inlined because it uses setjmp");
2946 *handled_ops_p = true;
2947 return t;
2948 }
2949
2950 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2951 switch (DECL_FUNCTION_CODE (t))
2952 {
2953 /* We cannot inline functions that take a variable number of
2954 arguments. */
2955 case BUILT_IN_VA_START:
2956 case BUILT_IN_NEXT_ARG:
2957 case BUILT_IN_VA_END:
2958 inline_forbidden_reason
2959 = G_("function %q+F can never be inlined because it "
2960 "uses variable argument lists");
2961 *handled_ops_p = true;
2962 return t;
2963
2964 case BUILT_IN_LONGJMP:
2965 /* We can't inline functions that call __builtin_longjmp at
2966 all. The non-local goto machinery really requires the
2967 destination be in a different function. If we allow the
2968 function calling __builtin_longjmp to be inlined into the
2969 function calling __builtin_setjmp, Things will Go Awry. */
2970 inline_forbidden_reason
2971 = G_("function %q+F can never be inlined because "
2972 "it uses setjmp-longjmp exception handling");
2973 *handled_ops_p = true;
2974 return t;
2975
2976 case BUILT_IN_NONLOCAL_GOTO:
2977 /* Similarly. */
2978 inline_forbidden_reason
2979 = G_("function %q+F can never be inlined because "
2980 "it uses non-local goto");
2981 *handled_ops_p = true;
2982 return t;
2983
2984 case BUILT_IN_RETURN:
2985 case BUILT_IN_APPLY_ARGS:
2986 /* If a __builtin_apply_args caller would be inlined,
2987 it would be saving arguments of the function it has
2988 been inlined into. Similarly __builtin_return would
2989 return from the function the inline has been inlined into. */
2990 inline_forbidden_reason
2991 = G_("function %q+F can never be inlined because "
2992 "it uses __builtin_return or __builtin_apply_args");
2993 *handled_ops_p = true;
2994 return t;
2995
2996 default:
2997 break;
2998 }
2999 break;
3000
3001 case GIMPLE_GOTO:
3002 t = gimple_goto_dest (stmt);
3003
3004 /* We will not inline a function which uses computed goto. The
3005 addresses of its local labels, which may be tucked into
3006 global storage, are of course not constant across
3007 instantiations, which causes unexpected behavior. */
3008 if (TREE_CODE (t) != LABEL_DECL)
3009 {
3010 inline_forbidden_reason
3011 = G_("function %q+F can never be inlined "
3012 "because it contains a computed goto");
3013 *handled_ops_p = true;
3014 return t;
3015 }
3016 break;
3017
3018 default:
3019 break;
3020 }
3021
3022 *handled_ops_p = false;
3023 return NULL_TREE;
3024 }
3025
3026 /* Return true if FNDECL is a function that cannot be inlined into
3027 another one. */
3028
3029 static bool
3030 inline_forbidden_p (tree fndecl)
3031 {
3032 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3033 struct walk_stmt_info wi;
3034 struct pointer_set_t *visited_nodes;
3035 basic_block bb;
3036 bool forbidden_p = false;
3037
3038 /* First check for shared reasons not to copy the code. */
3039 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3040 if (inline_forbidden_reason != NULL)
3041 return true;
3042
3043 /* Next, walk the statements of the function looking for
3044 constraucts we can't handle, or are non-optimal for inlining. */
3045 visited_nodes = pointer_set_create ();
3046 memset (&wi, 0, sizeof (wi));
3047 wi.info = (void *) fndecl;
3048 wi.pset = visited_nodes;
3049
3050 FOR_EACH_BB_FN (bb, fun)
3051 {
3052 gimple ret;
3053 gimple_seq seq = bb_seq (bb);
3054 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3055 forbidden_p = (ret != NULL);
3056 if (forbidden_p)
3057 break;
3058 }
3059
3060 pointer_set_destroy (visited_nodes);
3061 return forbidden_p;
3062 }
3063
3064 /* Return true if CALLEE cannot be inlined into CALLER. */
3065
3066 static bool
3067 inline_forbidden_into_p (tree caller, tree callee)
3068 {
3069 /* Don't inline if the functions have different EH personalities. */
3070 if (DECL_FUNCTION_PERSONALITY (caller)
3071 && DECL_FUNCTION_PERSONALITY (callee)
3072 && (DECL_FUNCTION_PERSONALITY (caller)
3073 != DECL_FUNCTION_PERSONALITY (callee)))
3074 return true;
3075
3076 /* Don't inline if the callee can throw non-call exceptions but the
3077 caller cannot. */
3078 if (DECL_STRUCT_FUNCTION (callee)
3079 && DECL_STRUCT_FUNCTION (callee)->can_throw_non_call_exceptions
3080 && !(DECL_STRUCT_FUNCTION (caller)
3081 && DECL_STRUCT_FUNCTION (caller)->can_throw_non_call_exceptions))
3082 return true;
3083
3084 return false;
3085 }
3086
3087 /* Returns nonzero if FN is a function that does not have any
3088 fundamental inline blocking properties. */
3089
3090 bool
3091 tree_inlinable_function_p (tree fn)
3092 {
3093 bool inlinable = true;
3094 bool do_warning;
3095 tree always_inline;
3096
3097 /* If we've already decided this function shouldn't be inlined,
3098 there's no need to check again. */
3099 if (DECL_UNINLINABLE (fn))
3100 return false;
3101
3102 /* We only warn for functions declared `inline' by the user. */
3103 do_warning = (warn_inline
3104 && DECL_DECLARED_INLINE_P (fn)
3105 && !DECL_NO_INLINE_WARNING_P (fn)
3106 && !DECL_IN_SYSTEM_HEADER (fn));
3107
3108 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3109
3110 if (flag_no_inline
3111 && always_inline == NULL)
3112 {
3113 if (do_warning)
3114 warning (OPT_Winline, "function %q+F can never be inlined because it "
3115 "is suppressed using -fno-inline", fn);
3116 inlinable = false;
3117 }
3118
3119 /* Don't auto-inline anything that might not be bound within
3120 this unit of translation. */
3121 else if (!DECL_DECLARED_INLINE_P (fn)
3122 && DECL_REPLACEABLE_P (fn))
3123 inlinable = false;
3124
3125 else if (!function_attribute_inlinable_p (fn))
3126 {
3127 if (do_warning)
3128 warning (OPT_Winline, "function %q+F can never be inlined because it "
3129 "uses attributes conflicting with inlining", fn);
3130 inlinable = false;
3131 }
3132
3133 else if (inline_forbidden_p (fn))
3134 {
3135 /* See if we should warn about uninlinable functions. Previously,
3136 some of these warnings would be issued while trying to expand
3137 the function inline, but that would cause multiple warnings
3138 about functions that would for example call alloca. But since
3139 this a property of the function, just one warning is enough.
3140 As a bonus we can now give more details about the reason why a
3141 function is not inlinable. */
3142 if (always_inline)
3143 sorry (inline_forbidden_reason, fn);
3144 else if (do_warning)
3145 warning (OPT_Winline, inline_forbidden_reason, fn);
3146
3147 inlinable = false;
3148 }
3149
3150 /* Squirrel away the result so that we don't have to check again. */
3151 DECL_UNINLINABLE (fn) = !inlinable;
3152
3153 return inlinable;
3154 }
3155
3156 /* Estimate the cost of a memory move. Use machine dependent
3157 word size and take possible memcpy call into account. */
3158
3159 int
3160 estimate_move_cost (tree type)
3161 {
3162 HOST_WIDE_INT size;
3163
3164 gcc_assert (!VOID_TYPE_P (type));
3165
3166 size = int_size_in_bytes (type);
3167
3168 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3169 /* Cost of a memcpy call, 3 arguments and the call. */
3170 return 4;
3171 else
3172 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3173 }
3174
3175 /* Returns cost of operation CODE, according to WEIGHTS */
3176
3177 static int
3178 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3179 tree op1 ATTRIBUTE_UNUSED, tree op2)
3180 {
3181 switch (code)
3182 {
3183 /* These are "free" conversions, or their presumed cost
3184 is folded into other operations. */
3185 case RANGE_EXPR:
3186 CASE_CONVERT:
3187 case COMPLEX_EXPR:
3188 case PAREN_EXPR:
3189 return 0;
3190
3191 /* Assign cost of 1 to usual operations.
3192 ??? We may consider mapping RTL costs to this. */
3193 case COND_EXPR:
3194 case VEC_COND_EXPR:
3195
3196 case PLUS_EXPR:
3197 case POINTER_PLUS_EXPR:
3198 case MINUS_EXPR:
3199 case MULT_EXPR:
3200
3201 case ADDR_SPACE_CONVERT_EXPR:
3202 case FIXED_CONVERT_EXPR:
3203 case FIX_TRUNC_EXPR:
3204
3205 case NEGATE_EXPR:
3206 case FLOAT_EXPR:
3207 case MIN_EXPR:
3208 case MAX_EXPR:
3209 case ABS_EXPR:
3210
3211 case LSHIFT_EXPR:
3212 case RSHIFT_EXPR:
3213 case LROTATE_EXPR:
3214 case RROTATE_EXPR:
3215 case VEC_LSHIFT_EXPR:
3216 case VEC_RSHIFT_EXPR:
3217
3218 case BIT_IOR_EXPR:
3219 case BIT_XOR_EXPR:
3220 case BIT_AND_EXPR:
3221 case BIT_NOT_EXPR:
3222
3223 case TRUTH_ANDIF_EXPR:
3224 case TRUTH_ORIF_EXPR:
3225 case TRUTH_AND_EXPR:
3226 case TRUTH_OR_EXPR:
3227 case TRUTH_XOR_EXPR:
3228 case TRUTH_NOT_EXPR:
3229
3230 case LT_EXPR:
3231 case LE_EXPR:
3232 case GT_EXPR:
3233 case GE_EXPR:
3234 case EQ_EXPR:
3235 case NE_EXPR:
3236 case ORDERED_EXPR:
3237 case UNORDERED_EXPR:
3238
3239 case UNLT_EXPR:
3240 case UNLE_EXPR:
3241 case UNGT_EXPR:
3242 case UNGE_EXPR:
3243 case UNEQ_EXPR:
3244 case LTGT_EXPR:
3245
3246 case CONJ_EXPR:
3247
3248 case PREDECREMENT_EXPR:
3249 case PREINCREMENT_EXPR:
3250 case POSTDECREMENT_EXPR:
3251 case POSTINCREMENT_EXPR:
3252
3253 case REALIGN_LOAD_EXPR:
3254
3255 case REDUC_MAX_EXPR:
3256 case REDUC_MIN_EXPR:
3257 case REDUC_PLUS_EXPR:
3258 case WIDEN_SUM_EXPR:
3259 case WIDEN_MULT_EXPR:
3260 case DOT_PROD_EXPR:
3261
3262 case VEC_WIDEN_MULT_HI_EXPR:
3263 case VEC_WIDEN_MULT_LO_EXPR:
3264 case VEC_UNPACK_HI_EXPR:
3265 case VEC_UNPACK_LO_EXPR:
3266 case VEC_UNPACK_FLOAT_HI_EXPR:
3267 case VEC_UNPACK_FLOAT_LO_EXPR:
3268 case VEC_PACK_TRUNC_EXPR:
3269 case VEC_PACK_SAT_EXPR:
3270 case VEC_PACK_FIX_TRUNC_EXPR:
3271 case VEC_EXTRACT_EVEN_EXPR:
3272 case VEC_EXTRACT_ODD_EXPR:
3273 case VEC_INTERLEAVE_HIGH_EXPR:
3274 case VEC_INTERLEAVE_LOW_EXPR:
3275
3276 return 1;
3277
3278 /* Few special cases of expensive operations. This is useful
3279 to avoid inlining on functions having too many of these. */
3280 case TRUNC_DIV_EXPR:
3281 case CEIL_DIV_EXPR:
3282 case FLOOR_DIV_EXPR:
3283 case ROUND_DIV_EXPR:
3284 case EXACT_DIV_EXPR:
3285 case TRUNC_MOD_EXPR:
3286 case CEIL_MOD_EXPR:
3287 case FLOOR_MOD_EXPR:
3288 case ROUND_MOD_EXPR:
3289 case RDIV_EXPR:
3290 if (TREE_CODE (op2) != INTEGER_CST)
3291 return weights->div_mod_cost;
3292 return 1;
3293
3294 default:
3295 /* We expect a copy assignment with no operator. */
3296 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3297 return 0;
3298 }
3299 }
3300
3301
3302 /* Estimate number of instructions that will be created by expanding
3303 the statements in the statement sequence STMTS.
3304 WEIGHTS contains weights attributed to various constructs. */
3305
3306 static
3307 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3308 {
3309 int cost;
3310 gimple_stmt_iterator gsi;
3311
3312 cost = 0;
3313 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3314 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3315
3316 return cost;
3317 }
3318
3319
3320 /* Estimate number of instructions that will be created by expanding STMT.
3321 WEIGHTS contains weights attributed to various constructs. */
3322
3323 int
3324 estimate_num_insns (gimple stmt, eni_weights *weights)
3325 {
3326 unsigned cost, i;
3327 enum gimple_code code = gimple_code (stmt);
3328 tree lhs;
3329 tree rhs;
3330
3331 switch (code)
3332 {
3333 case GIMPLE_ASSIGN:
3334 /* Try to estimate the cost of assignments. We have three cases to
3335 deal with:
3336 1) Simple assignments to registers;
3337 2) Stores to things that must live in memory. This includes
3338 "normal" stores to scalars, but also assignments of large
3339 structures, or constructors of big arrays;
3340
3341 Let us look at the first two cases, assuming we have "a = b + C":
3342 <GIMPLE_ASSIGN <var_decl "a">
3343 <plus_expr <var_decl "b"> <constant C>>
3344 If "a" is a GIMPLE register, the assignment to it is free on almost
3345 any target, because "a" usually ends up in a real register. Hence
3346 the only cost of this expression comes from the PLUS_EXPR, and we
3347 can ignore the GIMPLE_ASSIGN.
3348 If "a" is not a GIMPLE register, the assignment to "a" will most
3349 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3350 of moving something into "a", which we compute using the function
3351 estimate_move_cost. */
3352 lhs = gimple_assign_lhs (stmt);
3353 rhs = gimple_assign_rhs1 (stmt);
3354
3355 if (is_gimple_reg (lhs))
3356 cost = 0;
3357 else
3358 cost = estimate_move_cost (TREE_TYPE (lhs));
3359
3360 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3361 cost += estimate_move_cost (TREE_TYPE (rhs));
3362
3363 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3364 gimple_assign_rhs1 (stmt),
3365 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3366 == GIMPLE_BINARY_RHS
3367 ? gimple_assign_rhs2 (stmt) : NULL);
3368 break;
3369
3370 case GIMPLE_COND:
3371 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3372 gimple_op (stmt, 0),
3373 gimple_op (stmt, 1));
3374 break;
3375
3376 case GIMPLE_SWITCH:
3377 /* Take into account cost of the switch + guess 2 conditional jumps for
3378 each case label.
3379
3380 TODO: once the switch expansion logic is sufficiently separated, we can
3381 do better job on estimating cost of the switch. */
3382 if (weights->time_based)
3383 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3384 else
3385 cost = gimple_switch_num_labels (stmt) * 2;
3386 break;
3387
3388 case GIMPLE_CALL:
3389 {
3390 tree decl = gimple_call_fndecl (stmt);
3391 tree addr = gimple_call_fn (stmt);
3392 tree funtype = TREE_TYPE (addr);
3393 bool stdarg = false;
3394
3395 if (POINTER_TYPE_P (funtype))
3396 funtype = TREE_TYPE (funtype);
3397
3398 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3399 cost = weights->target_builtin_call_cost;
3400 else
3401 cost = weights->call_cost;
3402
3403 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3404 switch (DECL_FUNCTION_CODE (decl))
3405 {
3406 /* Builtins that expand to constants. */
3407 case BUILT_IN_CONSTANT_P:
3408 case BUILT_IN_EXPECT:
3409 case BUILT_IN_OBJECT_SIZE:
3410 case BUILT_IN_UNREACHABLE:
3411 /* Simple register moves or loads from stack. */
3412 case BUILT_IN_RETURN_ADDRESS:
3413 case BUILT_IN_EXTRACT_RETURN_ADDR:
3414 case BUILT_IN_FROB_RETURN_ADDR:
3415 case BUILT_IN_RETURN:
3416 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3417 case BUILT_IN_FRAME_ADDRESS:
3418 case BUILT_IN_VA_END:
3419 case BUILT_IN_STACK_SAVE:
3420 case BUILT_IN_STACK_RESTORE:
3421 /* Exception state returns or moves registers around. */
3422 case BUILT_IN_EH_FILTER:
3423 case BUILT_IN_EH_POINTER:
3424 case BUILT_IN_EH_COPY_VALUES:
3425 return 0;
3426
3427 /* builtins that are not expensive (that is they are most probably
3428 expanded inline into resonably simple code). */
3429 case BUILT_IN_ABS:
3430 case BUILT_IN_ALLOCA:
3431 case BUILT_IN_BSWAP32:
3432 case BUILT_IN_BSWAP64:
3433 case BUILT_IN_CLZ:
3434 case BUILT_IN_CLZIMAX:
3435 case BUILT_IN_CLZL:
3436 case BUILT_IN_CLZLL:
3437 case BUILT_IN_CTZ:
3438 case BUILT_IN_CTZIMAX:
3439 case BUILT_IN_CTZL:
3440 case BUILT_IN_CTZLL:
3441 case BUILT_IN_FFS:
3442 case BUILT_IN_FFSIMAX:
3443 case BUILT_IN_FFSL:
3444 case BUILT_IN_FFSLL:
3445 case BUILT_IN_IMAXABS:
3446 case BUILT_IN_FINITE:
3447 case BUILT_IN_FINITEF:
3448 case BUILT_IN_FINITEL:
3449 case BUILT_IN_FINITED32:
3450 case BUILT_IN_FINITED64:
3451 case BUILT_IN_FINITED128:
3452 case BUILT_IN_FPCLASSIFY:
3453 case BUILT_IN_ISFINITE:
3454 case BUILT_IN_ISINF_SIGN:
3455 case BUILT_IN_ISINF:
3456 case BUILT_IN_ISINFF:
3457 case BUILT_IN_ISINFL:
3458 case BUILT_IN_ISINFD32:
3459 case BUILT_IN_ISINFD64:
3460 case BUILT_IN_ISINFD128:
3461 case BUILT_IN_ISNAN:
3462 case BUILT_IN_ISNANF:
3463 case BUILT_IN_ISNANL:
3464 case BUILT_IN_ISNAND32:
3465 case BUILT_IN_ISNAND64:
3466 case BUILT_IN_ISNAND128:
3467 case BUILT_IN_ISNORMAL:
3468 case BUILT_IN_ISGREATER:
3469 case BUILT_IN_ISGREATEREQUAL:
3470 case BUILT_IN_ISLESS:
3471 case BUILT_IN_ISLESSEQUAL:
3472 case BUILT_IN_ISLESSGREATER:
3473 case BUILT_IN_ISUNORDERED:
3474 case BUILT_IN_VA_ARG_PACK:
3475 case BUILT_IN_VA_ARG_PACK_LEN:
3476 case BUILT_IN_VA_COPY:
3477 case BUILT_IN_TRAP:
3478 case BUILT_IN_SAVEREGS:
3479 case BUILT_IN_POPCOUNTL:
3480 case BUILT_IN_POPCOUNTLL:
3481 case BUILT_IN_POPCOUNTIMAX:
3482 case BUILT_IN_POPCOUNT:
3483 case BUILT_IN_PARITYL:
3484 case BUILT_IN_PARITYLL:
3485 case BUILT_IN_PARITYIMAX:
3486 case BUILT_IN_PARITY:
3487 case BUILT_IN_LABS:
3488 case BUILT_IN_LLABS:
3489 case BUILT_IN_PREFETCH:
3490 cost = weights->target_builtin_call_cost;
3491 break;
3492
3493 default:
3494 break;
3495 }
3496
3497 if (decl)
3498 funtype = TREE_TYPE (decl);
3499
3500 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3501 cost += estimate_move_cost (TREE_TYPE (funtype));
3502
3503 if (funtype)
3504 stdarg = stdarg_p (funtype);
3505
3506 /* Our cost must be kept in sync with
3507 cgraph_estimate_size_after_inlining that does use function
3508 declaration to figure out the arguments.
3509
3510 For functions taking variable list of arguments we must
3511 look into call statement intself. This is safe because
3512 we will get only higher costs and in most cases we will
3513 not inline these anyway. */
3514 if (decl && DECL_ARGUMENTS (decl) && !stdarg)
3515 {
3516 tree arg;
3517 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3518 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3519 cost += estimate_move_cost (TREE_TYPE (arg));
3520 }
3521 else if (funtype && prototype_p (funtype) && !stdarg)
3522 {
3523 tree t;
3524 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3525 t = TREE_CHAIN (t))
3526 if (!VOID_TYPE_P (TREE_VALUE (t)))
3527 cost += estimate_move_cost (TREE_VALUE (t));
3528 }
3529 else
3530 {
3531 for (i = 0; i < gimple_call_num_args (stmt); i++)
3532 {
3533 tree arg = gimple_call_arg (stmt, i);
3534 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3535 cost += estimate_move_cost (TREE_TYPE (arg));
3536 }
3537 }
3538
3539 break;
3540 }
3541
3542 case GIMPLE_GOTO:
3543 case GIMPLE_LABEL:
3544 case GIMPLE_NOP:
3545 case GIMPLE_PHI:
3546 case GIMPLE_RETURN:
3547 case GIMPLE_PREDICT:
3548 case GIMPLE_DEBUG:
3549 return 0;
3550
3551 case GIMPLE_ASM:
3552 return asm_str_count (gimple_asm_string (stmt));
3553
3554 case GIMPLE_RESX:
3555 /* This is either going to be an external function call with one
3556 argument, or two register copy statements plus a goto. */
3557 return 2;
3558
3559 case GIMPLE_EH_DISPATCH:
3560 /* ??? This is going to turn into a switch statement. Ideally
3561 we'd have a look at the eh region and estimate the number of
3562 edges involved. */
3563 return 10;
3564
3565 case GIMPLE_BIND:
3566 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3567
3568 case GIMPLE_EH_FILTER:
3569 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3570
3571 case GIMPLE_CATCH:
3572 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3573
3574 case GIMPLE_TRY:
3575 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3576 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3577
3578 /* OpenMP directives are generally very expensive. */
3579
3580 case GIMPLE_OMP_RETURN:
3581 case GIMPLE_OMP_SECTIONS_SWITCH:
3582 case GIMPLE_OMP_ATOMIC_STORE:
3583 case GIMPLE_OMP_CONTINUE:
3584 /* ...except these, which are cheap. */
3585 return 0;
3586
3587 case GIMPLE_OMP_ATOMIC_LOAD:
3588 return weights->omp_cost;
3589
3590 case GIMPLE_OMP_FOR:
3591 return (weights->omp_cost
3592 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3593 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3594
3595 case GIMPLE_OMP_PARALLEL:
3596 case GIMPLE_OMP_TASK:
3597 case GIMPLE_OMP_CRITICAL:
3598 case GIMPLE_OMP_MASTER:
3599 case GIMPLE_OMP_ORDERED:
3600 case GIMPLE_OMP_SECTION:
3601 case GIMPLE_OMP_SECTIONS:
3602 case GIMPLE_OMP_SINGLE:
3603 return (weights->omp_cost
3604 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3605
3606 default:
3607 gcc_unreachable ();
3608 }
3609
3610 return cost;
3611 }
3612
3613 /* Estimate number of instructions that will be created by expanding
3614 function FNDECL. WEIGHTS contains weights attributed to various
3615 constructs. */
3616
3617 int
3618 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3619 {
3620 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3621 gimple_stmt_iterator bsi;
3622 basic_block bb;
3623 int n = 0;
3624
3625 gcc_assert (my_function && my_function->cfg);
3626 FOR_EACH_BB_FN (bb, my_function)
3627 {
3628 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3629 n += estimate_num_insns (gsi_stmt (bsi), weights);
3630 }
3631
3632 return n;
3633 }
3634
3635
3636 /* Initializes weights used by estimate_num_insns. */
3637
3638 void
3639 init_inline_once (void)
3640 {
3641 eni_size_weights.call_cost = 1;
3642 eni_size_weights.target_builtin_call_cost = 1;
3643 eni_size_weights.div_mod_cost = 1;
3644 eni_size_weights.omp_cost = 40;
3645 eni_size_weights.time_based = false;
3646
3647 /* Estimating time for call is difficult, since we have no idea what the
3648 called function does. In the current uses of eni_time_weights,
3649 underestimating the cost does less harm than overestimating it, so
3650 we choose a rather small value here. */
3651 eni_time_weights.call_cost = 10;
3652 eni_time_weights.target_builtin_call_cost = 10;
3653 eni_time_weights.div_mod_cost = 10;
3654 eni_time_weights.omp_cost = 40;
3655 eni_time_weights.time_based = true;
3656 }
3657
3658 /* Estimate the number of instructions in a gimple_seq. */
3659
3660 int
3661 count_insns_seq (gimple_seq seq, eni_weights *weights)
3662 {
3663 gimple_stmt_iterator gsi;
3664 int n = 0;
3665 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3666 n += estimate_num_insns (gsi_stmt (gsi), weights);
3667
3668 return n;
3669 }
3670
3671
3672 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3673
3674 static void
3675 prepend_lexical_block (tree current_block, tree new_block)
3676 {
3677 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3678 BLOCK_SUBBLOCKS (current_block) = new_block;
3679 BLOCK_SUPERCONTEXT (new_block) = current_block;
3680 }
3681
3682 /* Fetch callee declaration from the call graph edge going from NODE and
3683 associated with STMR call statement. Return NULL_TREE if not found. */
3684 static tree
3685 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3686 {
3687 struct cgraph_edge *cs;
3688
3689 cs = cgraph_edge (node, stmt);
3690 if (cs && !cs->indirect_unknown_callee)
3691 return cs->callee->decl;
3692
3693 return NULL_TREE;
3694 }
3695
3696 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3697
3698 static bool
3699 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3700 {
3701 tree use_retvar;
3702 tree fn;
3703 struct pointer_map_t *st, *dst;
3704 tree return_slot;
3705 tree modify_dest;
3706 location_t saved_location;
3707 struct cgraph_edge *cg_edge;
3708 cgraph_inline_failed_t reason;
3709 basic_block return_block;
3710 edge e;
3711 gimple_stmt_iterator gsi, stmt_gsi;
3712 bool successfully_inlined = FALSE;
3713 bool purge_dead_abnormal_edges;
3714 tree t_step;
3715 tree var;
3716
3717 /* Set input_location here so we get the right instantiation context
3718 if we call instantiate_decl from inlinable_function_p. */
3719 saved_location = input_location;
3720 if (gimple_has_location (stmt))
3721 input_location = gimple_location (stmt);
3722
3723 /* From here on, we're only interested in CALL_EXPRs. */
3724 if (gimple_code (stmt) != GIMPLE_CALL)
3725 goto egress;
3726
3727 /* First, see if we can figure out what function is being called.
3728 If we cannot, then there is no hope of inlining the function. */
3729 fn = gimple_call_fndecl (stmt);
3730 if (!fn)
3731 {
3732 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3733 if (!fn)
3734 goto egress;
3735 }
3736
3737 /* Turn forward declarations into real ones. */
3738 fn = cgraph_node (fn)->decl;
3739
3740 /* If FN is a declaration of a function in a nested scope that was
3741 globally declared inline, we don't set its DECL_INITIAL.
3742 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3743 C++ front-end uses it for cdtors to refer to their internal
3744 declarations, that are not real functions. Fortunately those
3745 don't have trees to be saved, so we can tell by checking their
3746 gimple_body. */
3747 if (!DECL_INITIAL (fn)
3748 && DECL_ABSTRACT_ORIGIN (fn)
3749 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3750 fn = DECL_ABSTRACT_ORIGIN (fn);
3751
3752 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3753 Kill this check once this is fixed. */
3754 if (!id->dst_node->analyzed)
3755 goto egress;
3756
3757 cg_edge = cgraph_edge (id->dst_node, stmt);
3758
3759 /* First check that inlining isn't simply forbidden in this case. */
3760 if (inline_forbidden_into_p (cg_edge->caller->decl, cg_edge->callee->decl))
3761 goto egress;
3762
3763 /* Don't try to inline functions that are not well-suited to inlining. */
3764 if (!cgraph_inline_p (cg_edge, &reason))
3765 {
3766 /* If this call was originally indirect, we do not want to emit any
3767 inlining related warnings or sorry messages because there are no
3768 guarantees regarding those. */
3769 if (cg_edge->indirect_inlining_edge)
3770 goto egress;
3771
3772 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3773 /* Avoid warnings during early inline pass. */
3774 && cgraph_global_info_ready)
3775 {
3776 sorry ("inlining failed in call to %q+F: %s", fn,
3777 _(cgraph_inline_failed_string (reason)));
3778 sorry ("called from here");
3779 }
3780 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3781 && !DECL_IN_SYSTEM_HEADER (fn)
3782 && reason != CIF_UNSPECIFIED
3783 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3784 /* Avoid warnings during early inline pass. */
3785 && cgraph_global_info_ready)
3786 {
3787 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3788 fn, _(cgraph_inline_failed_string (reason)));
3789 warning (OPT_Winline, "called from here");
3790 }
3791 goto egress;
3792 }
3793 fn = cg_edge->callee->decl;
3794
3795 #ifdef ENABLE_CHECKING
3796 if (cg_edge->callee->decl != id->dst_node->decl)
3797 verify_cgraph_node (cg_edge->callee);
3798 #endif
3799
3800 /* We will be inlining this callee. */
3801 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3802
3803 /* Update the callers EH personality. */
3804 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3805 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3806 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3807
3808 /* Split the block holding the GIMPLE_CALL. */
3809 e = split_block (bb, stmt);
3810 bb = e->src;
3811 return_block = e->dest;
3812 remove_edge (e);
3813
3814 /* split_block splits after the statement; work around this by
3815 moving the call into the second block manually. Not pretty,
3816 but seems easier than doing the CFG manipulation by hand
3817 when the GIMPLE_CALL is in the last statement of BB. */
3818 stmt_gsi = gsi_last_bb (bb);
3819 gsi_remove (&stmt_gsi, false);
3820
3821 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3822 been the source of abnormal edges. In this case, schedule
3823 the removal of dead abnormal edges. */
3824 gsi = gsi_start_bb (return_block);
3825 if (gsi_end_p (gsi))
3826 {
3827 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3828 purge_dead_abnormal_edges = true;
3829 }
3830 else
3831 {
3832 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3833 purge_dead_abnormal_edges = false;
3834 }
3835
3836 stmt_gsi = gsi_start_bb (return_block);
3837
3838 /* Build a block containing code to initialize the arguments, the
3839 actual inline expansion of the body, and a label for the return
3840 statements within the function to jump to. The type of the
3841 statement expression is the return type of the function call. */
3842 id->block = make_node (BLOCK);
3843 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3844 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3845 prepend_lexical_block (gimple_block (stmt), id->block);
3846
3847 /* Local declarations will be replaced by their equivalents in this
3848 map. */
3849 st = id->decl_map;
3850 id->decl_map = pointer_map_create ();
3851 dst = id->debug_map;
3852 id->debug_map = NULL;
3853
3854 /* Record the function we are about to inline. */
3855 id->src_fn = fn;
3856 id->src_node = cg_edge->callee;
3857 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3858 id->gimple_call = stmt;
3859
3860 gcc_assert (!id->src_cfun->after_inlining);
3861
3862 id->entry_bb = bb;
3863 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3864 {
3865 gimple_stmt_iterator si = gsi_last_bb (bb);
3866 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3867 NOT_TAKEN),
3868 GSI_NEW_STMT);
3869 }
3870 initialize_inlined_parameters (id, stmt, fn, bb);
3871
3872 if (DECL_INITIAL (fn))
3873 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3874
3875 /* Return statements in the function body will be replaced by jumps
3876 to the RET_LABEL. */
3877 gcc_assert (DECL_INITIAL (fn));
3878 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3879
3880 /* Find the LHS to which the result of this call is assigned. */
3881 return_slot = NULL;
3882 if (gimple_call_lhs (stmt))
3883 {
3884 modify_dest = gimple_call_lhs (stmt);
3885
3886 /* The function which we are inlining might not return a value,
3887 in which case we should issue a warning that the function
3888 does not return a value. In that case the optimizers will
3889 see that the variable to which the value is assigned was not
3890 initialized. We do not want to issue a warning about that
3891 uninitialized variable. */
3892 if (DECL_P (modify_dest))
3893 TREE_NO_WARNING (modify_dest) = 1;
3894
3895 if (gimple_call_return_slot_opt_p (stmt))
3896 {
3897 return_slot = modify_dest;
3898 modify_dest = NULL;
3899 }
3900 }
3901 else
3902 modify_dest = NULL;
3903
3904 /* If we are inlining a call to the C++ operator new, we don't want
3905 to use type based alias analysis on the return value. Otherwise
3906 we may get confused if the compiler sees that the inlined new
3907 function returns a pointer which was just deleted. See bug
3908 33407. */
3909 if (DECL_IS_OPERATOR_NEW (fn))
3910 {
3911 return_slot = NULL;
3912 modify_dest = NULL;
3913 }
3914
3915 /* Declare the return variable for the function. */
3916 use_retvar = declare_return_variable (id, return_slot, modify_dest);
3917
3918 /* Add local vars in this inlined callee to caller. */
3919 t_step = id->src_cfun->local_decls;
3920 for (; t_step; t_step = TREE_CHAIN (t_step))
3921 {
3922 var = TREE_VALUE (t_step);
3923 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3924 {
3925 if (var_ann (var) && add_referenced_var (var))
3926 cfun->local_decls = tree_cons (NULL_TREE, var,
3927 cfun->local_decls);
3928 }
3929 else if (!can_be_nonlocal (var, id))
3930 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3931 cfun->local_decls);
3932 }
3933
3934 if (dump_file && (dump_flags & TDF_DETAILS))
3935 {
3936 fprintf (dump_file, "Inlining ");
3937 print_generic_expr (dump_file, id->src_fn, 0);
3938 fprintf (dump_file, " to ");
3939 print_generic_expr (dump_file, id->dst_fn, 0);
3940 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3941 }
3942
3943 /* This is it. Duplicate the callee body. Assume callee is
3944 pre-gimplified. Note that we must not alter the caller
3945 function in any way before this point, as this CALL_EXPR may be
3946 a self-referential call; if we're calling ourselves, we need to
3947 duplicate our body before altering anything. */
3948 copy_body (id, bb->count,
3949 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3950 bb, return_block, NULL, NULL);
3951
3952 /* Reset the escaped solution. */
3953 if (cfun->gimple_df)
3954 pt_solution_reset (&cfun->gimple_df->escaped);
3955
3956 /* Clean up. */
3957 if (id->debug_map)
3958 {
3959 pointer_map_destroy (id->debug_map);
3960 id->debug_map = dst;
3961 }
3962 pointer_map_destroy (id->decl_map);
3963 id->decl_map = st;
3964
3965 /* Unlink the calls virtual operands before replacing it. */
3966 unlink_stmt_vdef (stmt);
3967
3968 /* If the inlined function returns a result that we care about,
3969 substitute the GIMPLE_CALL with an assignment of the return
3970 variable to the LHS of the call. That is, if STMT was
3971 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3972 if (use_retvar && gimple_call_lhs (stmt))
3973 {
3974 gimple old_stmt = stmt;
3975 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3976 gsi_replace (&stmt_gsi, stmt, false);
3977 if (gimple_in_ssa_p (cfun))
3978 mark_symbols_for_renaming (stmt);
3979 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3980 }
3981 else
3982 {
3983 /* Handle the case of inlining a function with no return
3984 statement, which causes the return value to become undefined. */
3985 if (gimple_call_lhs (stmt)
3986 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3987 {
3988 tree name = gimple_call_lhs (stmt);
3989 tree var = SSA_NAME_VAR (name);
3990 tree def = gimple_default_def (cfun, var);
3991
3992 if (def)
3993 {
3994 /* If the variable is used undefined, make this name
3995 undefined via a move. */
3996 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3997 gsi_replace (&stmt_gsi, stmt, true);
3998 }
3999 else
4000 {
4001 /* Otherwise make this variable undefined. */
4002 gsi_remove (&stmt_gsi, true);
4003 set_default_def (var, name);
4004 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4005 }
4006 }
4007 else
4008 gsi_remove (&stmt_gsi, true);
4009 }
4010
4011 if (purge_dead_abnormal_edges)
4012 gimple_purge_dead_abnormal_call_edges (return_block);
4013
4014 /* If the value of the new expression is ignored, that's OK. We
4015 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4016 the equivalent inlined version either. */
4017 if (is_gimple_assign (stmt))
4018 {
4019 gcc_assert (gimple_assign_single_p (stmt)
4020 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4021 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4022 }
4023
4024 /* Output the inlining info for this abstract function, since it has been
4025 inlined. If we don't do this now, we can lose the information about the
4026 variables in the function when the blocks get blown away as soon as we
4027 remove the cgraph node. */
4028 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4029
4030 /* Update callgraph if needed. */
4031 cgraph_remove_node (cg_edge->callee);
4032
4033 id->block = NULL_TREE;
4034 successfully_inlined = TRUE;
4035
4036 egress:
4037 input_location = saved_location;
4038 return successfully_inlined;
4039 }
4040
4041 /* Expand call statements reachable from STMT_P.
4042 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4043 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
4044 unfortunately not use that function here because we need a pointer
4045 to the CALL_EXPR, not the tree itself. */
4046
4047 static bool
4048 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4049 {
4050 gimple_stmt_iterator gsi;
4051
4052 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4053 {
4054 gimple stmt = gsi_stmt (gsi);
4055
4056 if (is_gimple_call (stmt)
4057 && expand_call_inline (bb, stmt, id))
4058 return true;
4059 }
4060
4061 return false;
4062 }
4063
4064
4065 /* Walk all basic blocks created after FIRST and try to fold every statement
4066 in the STATEMENTS pointer set. */
4067
4068 static void
4069 fold_marked_statements (int first, struct pointer_set_t *statements)
4070 {
4071 for (; first < n_basic_blocks; first++)
4072 if (BASIC_BLOCK (first))
4073 {
4074 gimple_stmt_iterator gsi;
4075
4076 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4077 !gsi_end_p (gsi);
4078 gsi_next (&gsi))
4079 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4080 {
4081 gimple old_stmt = gsi_stmt (gsi);
4082 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4083
4084 if (old_decl && DECL_BUILT_IN (old_decl))
4085 {
4086 /* Folding builtins can create multiple instructions,
4087 we need to look at all of them. */
4088 gimple_stmt_iterator i2 = gsi;
4089 gsi_prev (&i2);
4090 if (fold_stmt (&gsi))
4091 {
4092 gimple new_stmt;
4093 if (gsi_end_p (i2))
4094 i2 = gsi_start_bb (BASIC_BLOCK (first));
4095 else
4096 gsi_next (&i2);
4097 while (1)
4098 {
4099 new_stmt = gsi_stmt (i2);
4100 update_stmt (new_stmt);
4101 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4102 new_stmt);
4103
4104 if (new_stmt == gsi_stmt (gsi))
4105 {
4106 /* It is okay to check only for the very last
4107 of these statements. If it is a throwing
4108 statement nothing will change. If it isn't
4109 this can remove EH edges. If that weren't
4110 correct then because some intermediate stmts
4111 throw, but not the last one. That would mean
4112 we'd have to split the block, which we can't
4113 here and we'd loose anyway. And as builtins
4114 probably never throw, this all
4115 is mood anyway. */
4116 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4117 new_stmt))
4118 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4119 break;
4120 }
4121 gsi_next (&i2);
4122 }
4123 }
4124 }
4125 else if (fold_stmt (&gsi))
4126 {
4127 /* Re-read the statement from GSI as fold_stmt() may
4128 have changed it. */
4129 gimple new_stmt = gsi_stmt (gsi);
4130 update_stmt (new_stmt);
4131
4132 if (is_gimple_call (old_stmt)
4133 || is_gimple_call (new_stmt))
4134 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4135 new_stmt);
4136
4137 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4138 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4139 }
4140 }
4141 }
4142 }
4143
4144 /* Return true if BB has at least one abnormal outgoing edge. */
4145
4146 static inline bool
4147 has_abnormal_outgoing_edge_p (basic_block bb)
4148 {
4149 edge e;
4150 edge_iterator ei;
4151
4152 FOR_EACH_EDGE (e, ei, bb->succs)
4153 if (e->flags & EDGE_ABNORMAL)
4154 return true;
4155
4156 return false;
4157 }
4158
4159 /* Expand calls to inline functions in the body of FN. */
4160
4161 unsigned int
4162 optimize_inline_calls (tree fn)
4163 {
4164 copy_body_data id;
4165 basic_block bb;
4166 int last = n_basic_blocks;
4167 struct gimplify_ctx gctx;
4168
4169 /* There is no point in performing inlining if errors have already
4170 occurred -- and we might crash if we try to inline invalid
4171 code. */
4172 if (seen_error ())
4173 return 0;
4174
4175 /* Clear out ID. */
4176 memset (&id, 0, sizeof (id));
4177
4178 id.src_node = id.dst_node = cgraph_node (fn);
4179 id.dst_fn = fn;
4180 /* Or any functions that aren't finished yet. */
4181 if (current_function_decl)
4182 id.dst_fn = current_function_decl;
4183
4184 id.copy_decl = copy_decl_maybe_to_var;
4185 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4186 id.transform_new_cfg = false;
4187 id.transform_return_to_modify = true;
4188 id.transform_lang_insert_block = NULL;
4189 id.statements_to_fold = pointer_set_create ();
4190
4191 push_gimplify_context (&gctx);
4192
4193 /* We make no attempts to keep dominance info up-to-date. */
4194 free_dominance_info (CDI_DOMINATORS);
4195 free_dominance_info (CDI_POST_DOMINATORS);
4196
4197 /* Register specific gimple functions. */
4198 gimple_register_cfg_hooks ();
4199
4200 /* Reach the trees by walking over the CFG, and note the
4201 enclosing basic-blocks in the call edges. */
4202 /* We walk the blocks going forward, because inlined function bodies
4203 will split id->current_basic_block, and the new blocks will
4204 follow it; we'll trudge through them, processing their CALL_EXPRs
4205 along the way. */
4206 FOR_EACH_BB (bb)
4207 gimple_expand_calls_inline (bb, &id);
4208
4209 pop_gimplify_context (NULL);
4210
4211 #ifdef ENABLE_CHECKING
4212 {
4213 struct cgraph_edge *e;
4214
4215 verify_cgraph_node (id.dst_node);
4216
4217 /* Double check that we inlined everything we are supposed to inline. */
4218 for (e = id.dst_node->callees; e; e = e->next_callee)
4219 gcc_assert (e->inline_failed);
4220 }
4221 #endif
4222
4223 /* Fold the statements before compacting/renumbering the basic blocks. */
4224 fold_marked_statements (last, id.statements_to_fold);
4225 pointer_set_destroy (id.statements_to_fold);
4226
4227 gcc_assert (!id.debug_stmts);
4228
4229 /* Renumber the (code) basic_blocks consecutively. */
4230 compact_blocks ();
4231 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4232 number_blocks (fn);
4233
4234 fold_cond_expr_cond ();
4235 delete_unreachable_blocks_update_callgraph (&id);
4236 #ifdef ENABLE_CHECKING
4237 verify_cgraph_node (id.dst_node);
4238 #endif
4239
4240 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4241 not possible yet - the IPA passes might make various functions to not
4242 throw and they don't care to proactively update local EH info. This is
4243 done later in fixup_cfg pass that also execute the verification. */
4244 return (TODO_update_ssa
4245 | TODO_cleanup_cfg
4246 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4247 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4248 }
4249
4250 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4251
4252 tree
4253 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4254 {
4255 enum tree_code code = TREE_CODE (*tp);
4256 enum tree_code_class cl = TREE_CODE_CLASS (code);
4257
4258 /* We make copies of most nodes. */
4259 if (IS_EXPR_CODE_CLASS (cl)
4260 || code == TREE_LIST
4261 || code == TREE_VEC
4262 || code == TYPE_DECL
4263 || code == OMP_CLAUSE)
4264 {
4265 /* Because the chain gets clobbered when we make a copy, we save it
4266 here. */
4267 tree chain = NULL_TREE, new_tree;
4268
4269 chain = TREE_CHAIN (*tp);
4270
4271 /* Copy the node. */
4272 new_tree = copy_node (*tp);
4273
4274 /* Propagate mudflap marked-ness. */
4275 if (flag_mudflap && mf_marked_p (*tp))
4276 mf_mark (new_tree);
4277
4278 *tp = new_tree;
4279
4280 /* Now, restore the chain, if appropriate. That will cause
4281 walk_tree to walk into the chain as well. */
4282 if (code == PARM_DECL
4283 || code == TREE_LIST
4284 || code == OMP_CLAUSE)
4285 TREE_CHAIN (*tp) = chain;
4286
4287 /* For now, we don't update BLOCKs when we make copies. So, we
4288 have to nullify all BIND_EXPRs. */
4289 if (TREE_CODE (*tp) == BIND_EXPR)
4290 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4291 }
4292 else if (code == CONSTRUCTOR)
4293 {
4294 /* CONSTRUCTOR nodes need special handling because
4295 we need to duplicate the vector of elements. */
4296 tree new_tree;
4297
4298 new_tree = copy_node (*tp);
4299
4300 /* Propagate mudflap marked-ness. */
4301 if (flag_mudflap && mf_marked_p (*tp))
4302 mf_mark (new_tree);
4303
4304 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4305 CONSTRUCTOR_ELTS (*tp));
4306 *tp = new_tree;
4307 }
4308 else if (TREE_CODE_CLASS (code) == tcc_type)
4309 *walk_subtrees = 0;
4310 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4311 *walk_subtrees = 0;
4312 else if (TREE_CODE_CLASS (code) == tcc_constant)
4313 *walk_subtrees = 0;
4314 else
4315 gcc_assert (code != STATEMENT_LIST);
4316 return NULL_TREE;
4317 }
4318
4319 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4320 information indicating to what new SAVE_EXPR this one should be mapped,
4321 use that one. Otherwise, create a new node and enter it in ST. FN is
4322 the function into which the copy will be placed. */
4323
4324 static void
4325 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4326 {
4327 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4328 tree *n;
4329 tree t;
4330
4331 /* See if we already encountered this SAVE_EXPR. */
4332 n = (tree *) pointer_map_contains (st, *tp);
4333
4334 /* If we didn't already remap this SAVE_EXPR, do so now. */
4335 if (!n)
4336 {
4337 t = copy_node (*tp);
4338
4339 /* Remember this SAVE_EXPR. */
4340 *pointer_map_insert (st, *tp) = t;
4341 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4342 *pointer_map_insert (st, t) = t;
4343 }
4344 else
4345 {
4346 /* We've already walked into this SAVE_EXPR; don't do it again. */
4347 *walk_subtrees = 0;
4348 t = *n;
4349 }
4350
4351 /* Replace this SAVE_EXPR with the copy. */
4352 *tp = t;
4353 }
4354
4355 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4356 copies the declaration and enters it in the splay_tree in DATA (which is
4357 really an `copy_body_data *'). */
4358
4359 static tree
4360 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4361 void *data)
4362 {
4363 copy_body_data *id = (copy_body_data *) data;
4364
4365 /* Don't walk into types. */
4366 if (TYPE_P (*tp))
4367 *walk_subtrees = 0;
4368
4369 else if (TREE_CODE (*tp) == LABEL_EXPR)
4370 {
4371 tree decl = TREE_OPERAND (*tp, 0);
4372
4373 /* Copy the decl and remember the copy. */
4374 insert_decl_map (id, decl, id->copy_decl (decl, id));
4375 }
4376
4377 return NULL_TREE;
4378 }
4379
4380 /* Perform any modifications to EXPR required when it is unsaved. Does
4381 not recurse into EXPR's subtrees. */
4382
4383 static void
4384 unsave_expr_1 (tree expr)
4385 {
4386 switch (TREE_CODE (expr))
4387 {
4388 case TARGET_EXPR:
4389 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4390 It's OK for this to happen if it was part of a subtree that
4391 isn't immediately expanded, such as operand 2 of another
4392 TARGET_EXPR. */
4393 if (TREE_OPERAND (expr, 1))
4394 break;
4395
4396 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4397 TREE_OPERAND (expr, 3) = NULL_TREE;
4398 break;
4399
4400 default:
4401 break;
4402 }
4403 }
4404
4405 /* Called via walk_tree when an expression is unsaved. Using the
4406 splay_tree pointed to by ST (which is really a `splay_tree'),
4407 remaps all local declarations to appropriate replacements. */
4408
4409 static tree
4410 unsave_r (tree *tp, int *walk_subtrees, void *data)
4411 {
4412 copy_body_data *id = (copy_body_data *) data;
4413 struct pointer_map_t *st = id->decl_map;
4414 tree *n;
4415
4416 /* Only a local declaration (variable or label). */
4417 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4418 || TREE_CODE (*tp) == LABEL_DECL)
4419 {
4420 /* Lookup the declaration. */
4421 n = (tree *) pointer_map_contains (st, *tp);
4422
4423 /* If it's there, remap it. */
4424 if (n)
4425 *tp = *n;
4426 }
4427
4428 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4429 gcc_unreachable ();
4430 else if (TREE_CODE (*tp) == BIND_EXPR)
4431 copy_bind_expr (tp, walk_subtrees, id);
4432 else if (TREE_CODE (*tp) == SAVE_EXPR
4433 || TREE_CODE (*tp) == TARGET_EXPR)
4434 remap_save_expr (tp, st, walk_subtrees);
4435 else
4436 {
4437 copy_tree_r (tp, walk_subtrees, NULL);
4438
4439 /* Do whatever unsaving is required. */
4440 unsave_expr_1 (*tp);
4441 }
4442
4443 /* Keep iterating. */
4444 return NULL_TREE;
4445 }
4446
4447 /* Copies everything in EXPR and replaces variables, labels
4448 and SAVE_EXPRs local to EXPR. */
4449
4450 tree
4451 unsave_expr_now (tree expr)
4452 {
4453 copy_body_data id;
4454
4455 /* There's nothing to do for NULL_TREE. */
4456 if (expr == 0)
4457 return expr;
4458
4459 /* Set up ID. */
4460 memset (&id, 0, sizeof (id));
4461 id.src_fn = current_function_decl;
4462 id.dst_fn = current_function_decl;
4463 id.decl_map = pointer_map_create ();
4464 id.debug_map = NULL;
4465
4466 id.copy_decl = copy_decl_no_change;
4467 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4468 id.transform_new_cfg = false;
4469 id.transform_return_to_modify = false;
4470 id.transform_lang_insert_block = NULL;
4471
4472 /* Walk the tree once to find local labels. */
4473 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4474
4475 /* Walk the tree again, copying, remapping, and unsaving. */
4476 walk_tree (&expr, unsave_r, &id, NULL);
4477
4478 /* Clean up. */
4479 pointer_map_destroy (id.decl_map);
4480 if (id.debug_map)
4481 pointer_map_destroy (id.debug_map);
4482
4483 return expr;
4484 }
4485
4486 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4487 label, copies the declaration and enters it in the splay_tree in DATA (which
4488 is really a 'copy_body_data *'. */
4489
4490 static tree
4491 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4492 bool *handled_ops_p ATTRIBUTE_UNUSED,
4493 struct walk_stmt_info *wi)
4494 {
4495 copy_body_data *id = (copy_body_data *) wi->info;
4496 gimple stmt = gsi_stmt (*gsip);
4497
4498 if (gimple_code (stmt) == GIMPLE_LABEL)
4499 {
4500 tree decl = gimple_label_label (stmt);
4501
4502 /* Copy the decl and remember the copy. */
4503 insert_decl_map (id, decl, id->copy_decl (decl, id));
4504 }
4505
4506 return NULL_TREE;
4507 }
4508
4509
4510 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4511 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4512 remaps all local declarations to appropriate replacements in gimple
4513 operands. */
4514
4515 static tree
4516 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4517 {
4518 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4519 copy_body_data *id = (copy_body_data *) wi->info;
4520 struct pointer_map_t *st = id->decl_map;
4521 tree *n;
4522 tree expr = *tp;
4523
4524 /* Only a local declaration (variable or label). */
4525 if ((TREE_CODE (expr) == VAR_DECL
4526 && !TREE_STATIC (expr))
4527 || TREE_CODE (expr) == LABEL_DECL)
4528 {
4529 /* Lookup the declaration. */
4530 n = (tree *) pointer_map_contains (st, expr);
4531
4532 /* If it's there, remap it. */
4533 if (n)
4534 *tp = *n;
4535 *walk_subtrees = 0;
4536 }
4537 else if (TREE_CODE (expr) == STATEMENT_LIST
4538 || TREE_CODE (expr) == BIND_EXPR
4539 || TREE_CODE (expr) == SAVE_EXPR)
4540 gcc_unreachable ();
4541 else if (TREE_CODE (expr) == TARGET_EXPR)
4542 {
4543 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4544 It's OK for this to happen if it was part of a subtree that
4545 isn't immediately expanded, such as operand 2 of another
4546 TARGET_EXPR. */
4547 if (!TREE_OPERAND (expr, 1))
4548 {
4549 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4550 TREE_OPERAND (expr, 3) = NULL_TREE;
4551 }
4552 }
4553
4554 /* Keep iterating. */
4555 return NULL_TREE;
4556 }
4557
4558
4559 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4560 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4561 remaps all local declarations to appropriate replacements in gimple
4562 statements. */
4563
4564 static tree
4565 replace_locals_stmt (gimple_stmt_iterator *gsip,
4566 bool *handled_ops_p ATTRIBUTE_UNUSED,
4567 struct walk_stmt_info *wi)
4568 {
4569 copy_body_data *id = (copy_body_data *) wi->info;
4570 gimple stmt = gsi_stmt (*gsip);
4571
4572 if (gimple_code (stmt) == GIMPLE_BIND)
4573 {
4574 tree block = gimple_bind_block (stmt);
4575
4576 if (block)
4577 {
4578 remap_block (&block, id);
4579 gimple_bind_set_block (stmt, block);
4580 }
4581
4582 /* This will remap a lot of the same decls again, but this should be
4583 harmless. */
4584 if (gimple_bind_vars (stmt))
4585 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4586 }
4587
4588 /* Keep iterating. */
4589 return NULL_TREE;
4590 }
4591
4592
4593 /* Copies everything in SEQ and replaces variables and labels local to
4594 current_function_decl. */
4595
4596 gimple_seq
4597 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4598 {
4599 copy_body_data id;
4600 struct walk_stmt_info wi;
4601 struct pointer_set_t *visited;
4602 gimple_seq copy;
4603
4604 /* There's nothing to do for NULL_TREE. */
4605 if (seq == NULL)
4606 return seq;
4607
4608 /* Set up ID. */
4609 memset (&id, 0, sizeof (id));
4610 id.src_fn = current_function_decl;
4611 id.dst_fn = current_function_decl;
4612 id.decl_map = pointer_map_create ();
4613 id.debug_map = NULL;
4614
4615 id.copy_decl = copy_decl_no_change;
4616 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4617 id.transform_new_cfg = false;
4618 id.transform_return_to_modify = false;
4619 id.transform_lang_insert_block = NULL;
4620
4621 /* Walk the tree once to find local labels. */
4622 memset (&wi, 0, sizeof (wi));
4623 visited = pointer_set_create ();
4624 wi.info = &id;
4625 wi.pset = visited;
4626 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4627 pointer_set_destroy (visited);
4628
4629 copy = gimple_seq_copy (seq);
4630
4631 /* Walk the copy, remapping decls. */
4632 memset (&wi, 0, sizeof (wi));
4633 wi.info = &id;
4634 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4635
4636 /* Clean up. */
4637 pointer_map_destroy (id.decl_map);
4638 if (id.debug_map)
4639 pointer_map_destroy (id.debug_map);
4640
4641 return copy;
4642 }
4643
4644
4645 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4646
4647 static tree
4648 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4649 {
4650 if (*tp == data)
4651 return (tree) data;
4652 else
4653 return NULL;
4654 }
4655
4656 DEBUG_FUNCTION bool
4657 debug_find_tree (tree top, tree search)
4658 {
4659 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4660 }
4661
4662
4663 /* Declare the variables created by the inliner. Add all the variables in
4664 VARS to BIND_EXPR. */
4665
4666 static void
4667 declare_inline_vars (tree block, tree vars)
4668 {
4669 tree t;
4670 for (t = vars; t; t = TREE_CHAIN (t))
4671 {
4672 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4673 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4674 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4675 }
4676
4677 if (block)
4678 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4679 }
4680
4681 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4682 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4683 VAR_DECL translation. */
4684
4685 static tree
4686 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4687 {
4688 /* Don't generate debug information for the copy if we wouldn't have
4689 generated it for the copy either. */
4690 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4691 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4692
4693 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4694 declaration inspired this copy. */
4695 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4696
4697 /* The new variable/label has no RTL, yet. */
4698 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4699 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4700 SET_DECL_RTL (copy, 0);
4701
4702 /* These args would always appear unused, if not for this. */
4703 TREE_USED (copy) = 1;
4704
4705 /* Set the context for the new declaration. */
4706 if (!DECL_CONTEXT (decl))
4707 /* Globals stay global. */
4708 ;
4709 else if (DECL_CONTEXT (decl) != id->src_fn)
4710 /* Things that weren't in the scope of the function we're inlining
4711 from aren't in the scope we're inlining to, either. */
4712 ;
4713 else if (TREE_STATIC (decl))
4714 /* Function-scoped static variables should stay in the original
4715 function. */
4716 ;
4717 else
4718 /* Ordinary automatic local variables are now in the scope of the
4719 new function. */
4720 DECL_CONTEXT (copy) = id->dst_fn;
4721
4722 return copy;
4723 }
4724
4725 static tree
4726 copy_decl_to_var (tree decl, copy_body_data *id)
4727 {
4728 tree copy, type;
4729
4730 gcc_assert (TREE_CODE (decl) == PARM_DECL
4731 || TREE_CODE (decl) == RESULT_DECL);
4732
4733 type = TREE_TYPE (decl);
4734
4735 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4736 VAR_DECL, DECL_NAME (decl), type);
4737 if (DECL_PT_UID_SET_P (decl))
4738 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4739 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4740 TREE_READONLY (copy) = TREE_READONLY (decl);
4741 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4742 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4743
4744 return copy_decl_for_dup_finish (id, decl, copy);
4745 }
4746
4747 /* Like copy_decl_to_var, but create a return slot object instead of a
4748 pointer variable for return by invisible reference. */
4749
4750 static tree
4751 copy_result_decl_to_var (tree decl, copy_body_data *id)
4752 {
4753 tree copy, type;
4754
4755 gcc_assert (TREE_CODE (decl) == PARM_DECL
4756 || TREE_CODE (decl) == RESULT_DECL);
4757
4758 type = TREE_TYPE (decl);
4759 if (DECL_BY_REFERENCE (decl))
4760 type = TREE_TYPE (type);
4761
4762 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4763 VAR_DECL, DECL_NAME (decl), type);
4764 if (DECL_PT_UID_SET_P (decl))
4765 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4766 TREE_READONLY (copy) = TREE_READONLY (decl);
4767 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4768 if (!DECL_BY_REFERENCE (decl))
4769 {
4770 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4771 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4772 }
4773
4774 return copy_decl_for_dup_finish (id, decl, copy);
4775 }
4776
4777 tree
4778 copy_decl_no_change (tree decl, copy_body_data *id)
4779 {
4780 tree copy;
4781
4782 copy = copy_node (decl);
4783
4784 /* The COPY is not abstract; it will be generated in DST_FN. */
4785 DECL_ABSTRACT (copy) = 0;
4786 lang_hooks.dup_lang_specific_decl (copy);
4787
4788 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4789 been taken; it's for internal bookkeeping in expand_goto_internal. */
4790 if (TREE_CODE (copy) == LABEL_DECL)
4791 {
4792 TREE_ADDRESSABLE (copy) = 0;
4793 LABEL_DECL_UID (copy) = -1;
4794 }
4795
4796 return copy_decl_for_dup_finish (id, decl, copy);
4797 }
4798
4799 static tree
4800 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4801 {
4802 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4803 return copy_decl_to_var (decl, id);
4804 else
4805 return copy_decl_no_change (decl, id);
4806 }
4807
4808 /* Return a copy of the function's argument tree. */
4809 static tree
4810 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4811 bitmap args_to_skip, tree *vars)
4812 {
4813 tree arg, *parg;
4814 tree new_parm = NULL;
4815 int i = 0;
4816
4817 parg = &new_parm;
4818
4819 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4820 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4821 {
4822 tree new_tree = remap_decl (arg, id);
4823 lang_hooks.dup_lang_specific_decl (new_tree);
4824 *parg = new_tree;
4825 parg = &TREE_CHAIN (new_tree);
4826 }
4827 else if (!pointer_map_contains (id->decl_map, arg))
4828 {
4829 /* Make an equivalent VAR_DECL. If the argument was used
4830 as temporary variable later in function, the uses will be
4831 replaced by local variable. */
4832 tree var = copy_decl_to_var (arg, id);
4833 get_var_ann (var);
4834 add_referenced_var (var);
4835 insert_decl_map (id, arg, var);
4836 /* Declare this new variable. */
4837 TREE_CHAIN (var) = *vars;
4838 *vars = var;
4839 }
4840 return new_parm;
4841 }
4842
4843 /* Return a copy of the function's static chain. */
4844 static tree
4845 copy_static_chain (tree static_chain, copy_body_data * id)
4846 {
4847 tree *chain_copy, *pvar;
4848
4849 chain_copy = &static_chain;
4850 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4851 {
4852 tree new_tree = remap_decl (*pvar, id);
4853 lang_hooks.dup_lang_specific_decl (new_tree);
4854 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4855 *pvar = new_tree;
4856 }
4857 return static_chain;
4858 }
4859
4860 /* Return true if the function is allowed to be versioned.
4861 This is a guard for the versioning functionality. */
4862
4863 bool
4864 tree_versionable_function_p (tree fndecl)
4865 {
4866 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4867 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4868 }
4869
4870 /* Delete all unreachable basic blocks and update callgraph.
4871 Doing so is somewhat nontrivial because we need to update all clones and
4872 remove inline function that become unreachable. */
4873
4874 static bool
4875 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4876 {
4877 bool changed = false;
4878 basic_block b, next_bb;
4879
4880 find_unreachable_blocks ();
4881
4882 /* Delete all unreachable basic blocks. */
4883
4884 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4885 {
4886 next_bb = b->next_bb;
4887
4888 if (!(b->flags & BB_REACHABLE))
4889 {
4890 gimple_stmt_iterator bsi;
4891
4892 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4893 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4894 {
4895 struct cgraph_edge *e;
4896 struct cgraph_node *node;
4897
4898 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4899 {
4900 if (!e->inline_failed)
4901 cgraph_remove_node_and_inline_clones (e->callee);
4902 else
4903 cgraph_remove_edge (e);
4904 }
4905 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4906 && id->dst_node->clones)
4907 for (node = id->dst_node->clones; node != id->dst_node;)
4908 {
4909 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4910 {
4911 if (!e->inline_failed)
4912 cgraph_remove_node_and_inline_clones (e->callee);
4913 else
4914 cgraph_remove_edge (e);
4915 }
4916
4917 if (node->clones)
4918 node = node->clones;
4919 else if (node->next_sibling_clone)
4920 node = node->next_sibling_clone;
4921 else
4922 {
4923 while (node != id->dst_node && !node->next_sibling_clone)
4924 node = node->clone_of;
4925 if (node != id->dst_node)
4926 node = node->next_sibling_clone;
4927 }
4928 }
4929 }
4930 delete_basic_block (b);
4931 changed = true;
4932 }
4933 }
4934
4935 if (changed)
4936 tidy_fallthru_edges ();
4937 return changed;
4938 }
4939
4940 /* Update clone info after duplication. */
4941
4942 static void
4943 update_clone_info (copy_body_data * id)
4944 {
4945 struct cgraph_node *node;
4946 if (!id->dst_node->clones)
4947 return;
4948 for (node = id->dst_node->clones; node != id->dst_node;)
4949 {
4950 /* First update replace maps to match the new body. */
4951 if (node->clone.tree_map)
4952 {
4953 unsigned int i;
4954 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4955 {
4956 struct ipa_replace_map *replace_info;
4957 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4958 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4959 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4960 }
4961 }
4962 if (node->clones)
4963 node = node->clones;
4964 else if (node->next_sibling_clone)
4965 node = node->next_sibling_clone;
4966 else
4967 {
4968 while (node != id->dst_node && !node->next_sibling_clone)
4969 node = node->clone_of;
4970 if (node != id->dst_node)
4971 node = node->next_sibling_clone;
4972 }
4973 }
4974 }
4975
4976 /* Create a copy of a function's tree.
4977 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4978 of the original function and the new copied function
4979 respectively. In case we want to replace a DECL
4980 tree with another tree while duplicating the function's
4981 body, TREE_MAP represents the mapping between these
4982 trees. If UPDATE_CLONES is set, the call_stmt fields
4983 of edges of clones of the function will be updated.
4984
4985 If non-NULL ARGS_TO_SKIP determine function parameters to remove
4986 from new version.
4987 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
4988 If non_NULL NEW_ENTRY determine new entry BB of the clone.
4989 */
4990 void
4991 tree_function_versioning (tree old_decl, tree new_decl,
4992 VEC(ipa_replace_map_p,gc)* tree_map,
4993 bool update_clones, bitmap args_to_skip,
4994 bitmap blocks_to_copy, basic_block new_entry)
4995 {
4996 struct cgraph_node *old_version_node;
4997 struct cgraph_node *new_version_node;
4998 copy_body_data id;
4999 tree p;
5000 unsigned i;
5001 struct ipa_replace_map *replace_info;
5002 basic_block old_entry_block, bb;
5003 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
5004
5005 tree t_step;
5006 tree old_current_function_decl = current_function_decl;
5007 tree vars = NULL_TREE;
5008
5009 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5010 && TREE_CODE (new_decl) == FUNCTION_DECL);
5011 DECL_POSSIBLY_INLINED (old_decl) = 1;
5012
5013 old_version_node = cgraph_node (old_decl);
5014 new_version_node = cgraph_node (new_decl);
5015
5016 /* Output the inlining info for this abstract function, since it has been
5017 inlined. If we don't do this now, we can lose the information about the
5018 variables in the function when the blocks get blown away as soon as we
5019 remove the cgraph node. */
5020 (*debug_hooks->outlining_inline_function) (old_decl);
5021
5022 DECL_ARTIFICIAL (new_decl) = 1;
5023 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5024 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5025
5026 /* Prepare the data structures for the tree copy. */
5027 memset (&id, 0, sizeof (id));
5028
5029 /* Generate a new name for the new version. */
5030 id.statements_to_fold = pointer_set_create ();
5031
5032 id.decl_map = pointer_map_create ();
5033 id.debug_map = NULL;
5034 id.src_fn = old_decl;
5035 id.dst_fn = new_decl;
5036 id.src_node = old_version_node;
5037 id.dst_node = new_version_node;
5038 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5039 if (id.src_node->ipa_transforms_to_apply)
5040 {
5041 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5042 unsigned int i;
5043
5044 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5045 id.src_node->ipa_transforms_to_apply);
5046 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5047 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5048 VEC_index (ipa_opt_pass,
5049 old_transforms_to_apply,
5050 i));
5051 }
5052
5053 id.copy_decl = copy_decl_no_change;
5054 id.transform_call_graph_edges
5055 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5056 id.transform_new_cfg = true;
5057 id.transform_return_to_modify = false;
5058 id.transform_lang_insert_block = NULL;
5059
5060 current_function_decl = new_decl;
5061 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5062 (DECL_STRUCT_FUNCTION (old_decl));
5063 initialize_cfun (new_decl, old_decl,
5064 old_entry_block->count);
5065 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5066 = id.src_cfun->gimple_df->ipa_pta;
5067 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5068
5069 /* Copy the function's static chain. */
5070 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5071 if (p)
5072 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5073 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5074 &id);
5075
5076 /* If there's a tree_map, prepare for substitution. */
5077 if (tree_map)
5078 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5079 {
5080 gimple init;
5081 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5082 if (replace_info->replace_p)
5083 {
5084 tree op = replace_info->new_tree;
5085 if (!replace_info->old_tree)
5086 {
5087 int i = replace_info->parm_num;
5088 tree parm;
5089 for (parm = DECL_ARGUMENTS (old_decl); i; parm = TREE_CHAIN (parm))
5090 i --;
5091 replace_info->old_tree = parm;
5092 }
5093
5094
5095 STRIP_NOPS (op);
5096
5097 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5098 op = TREE_OPERAND (op, 0);
5099
5100 if (TREE_CODE (op) == ADDR_EXPR)
5101 {
5102 op = TREE_OPERAND (op, 0);
5103 while (handled_component_p (op))
5104 op = TREE_OPERAND (op, 0);
5105 if (TREE_CODE (op) == VAR_DECL)
5106 add_referenced_var (op);
5107 }
5108 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5109 init = setup_one_parameter (&id, replace_info->old_tree,
5110 replace_info->new_tree, id.src_fn,
5111 NULL,
5112 &vars);
5113 if (init)
5114 VEC_safe_push (gimple, heap, init_stmts, init);
5115 }
5116 }
5117 /* Copy the function's arguments. */
5118 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5119 DECL_ARGUMENTS (new_decl) =
5120 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5121 args_to_skip, &vars);
5122
5123 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5124
5125 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5126 number_blocks (id.dst_fn);
5127
5128 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5129
5130 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
5131 /* Add local vars. */
5132 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
5133 t_step; t_step = TREE_CHAIN (t_step))
5134 {
5135 tree var = TREE_VALUE (t_step);
5136 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
5137 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
5138 else if (!can_be_nonlocal (var, &id))
5139 cfun->local_decls =
5140 tree_cons (NULL_TREE, remap_decl (var, &id),
5141 cfun->local_decls);
5142 }
5143
5144 /* Copy the Function's body. */
5145 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5146 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5147
5148 if (DECL_RESULT (old_decl) != NULL_TREE)
5149 {
5150 tree *res_decl = &DECL_RESULT (old_decl);
5151 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
5152 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5153 }
5154
5155 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5156 number_blocks (new_decl);
5157
5158 /* We want to create the BB unconditionally, so that the addition of
5159 debug stmts doesn't affect BB count, which may in the end cause
5160 codegen differences. */
5161 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5162 while (VEC_length (gimple, init_stmts))
5163 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5164 update_clone_info (&id);
5165
5166 /* Remap the nonlocal_goto_save_area, if any. */
5167 if (cfun->nonlocal_goto_save_area)
5168 {
5169 struct walk_stmt_info wi;
5170
5171 memset (&wi, 0, sizeof (wi));
5172 wi.info = &id;
5173 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5174 }
5175
5176 /* Clean up. */
5177 pointer_map_destroy (id.decl_map);
5178 if (id.debug_map)
5179 pointer_map_destroy (id.debug_map);
5180 free_dominance_info (CDI_DOMINATORS);
5181 free_dominance_info (CDI_POST_DOMINATORS);
5182
5183 fold_marked_statements (0, id.statements_to_fold);
5184 pointer_set_destroy (id.statements_to_fold);
5185 fold_cond_expr_cond ();
5186 delete_unreachable_blocks_update_callgraph (&id);
5187 if (id.dst_node->analyzed)
5188 cgraph_rebuild_references ();
5189 update_ssa (TODO_update_ssa);
5190 free_dominance_info (CDI_DOMINATORS);
5191 free_dominance_info (CDI_POST_DOMINATORS);
5192
5193 gcc_assert (!id.debug_stmts);
5194 VEC_free (gimple, heap, init_stmts);
5195 pop_cfun ();
5196 current_function_decl = old_current_function_decl;
5197 gcc_assert (!current_function_decl
5198 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5199 return;
5200 }
5201
5202 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5203 the callee and return the inlined body on success. */
5204
5205 tree
5206 maybe_inline_call_in_expr (tree exp)
5207 {
5208 tree fn = get_callee_fndecl (exp);
5209
5210 /* We can only try to inline "const" functions. */
5211 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5212 {
5213 struct pointer_map_t *decl_map = pointer_map_create ();
5214 call_expr_arg_iterator iter;
5215 copy_body_data id;
5216 tree param, arg, t;
5217
5218 /* Remap the parameters. */
5219 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5220 param;
5221 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5222 *pointer_map_insert (decl_map, param) = arg;
5223
5224 memset (&id, 0, sizeof (id));
5225 id.src_fn = fn;
5226 id.dst_fn = current_function_decl;
5227 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5228 id.decl_map = decl_map;
5229
5230 id.copy_decl = copy_decl_no_change;
5231 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5232 id.transform_new_cfg = false;
5233 id.transform_return_to_modify = true;
5234 id.transform_lang_insert_block = false;
5235
5236 /* Make sure not to unshare trees behind the front-end's back
5237 since front-end specific mechanisms may rely on sharing. */
5238 id.regimplify = false;
5239 id.do_not_unshare = true;
5240
5241 /* We're not inside any EH region. */
5242 id.eh_lp_nr = 0;
5243
5244 t = copy_tree_body (&id);
5245 pointer_map_destroy (decl_map);
5246
5247 /* We can only return something suitable for use in a GENERIC
5248 expression tree. */
5249 if (TREE_CODE (t) == MODIFY_EXPR)
5250 return TREE_OPERAND (t, 1);
5251 }
5252
5253 return NULL_TREE;
5254 }
5255
5256 /* Duplicate a type, fields and all. */
5257
5258 tree
5259 build_duplicate_type (tree type)
5260 {
5261 struct copy_body_data id;
5262
5263 memset (&id, 0, sizeof (id));
5264 id.src_fn = current_function_decl;
5265 id.dst_fn = current_function_decl;
5266 id.src_cfun = cfun;
5267 id.decl_map = pointer_map_create ();
5268 id.debug_map = NULL;
5269 id.copy_decl = copy_decl_no_change;
5270
5271 type = remap_type_1 (type, &id);
5272
5273 pointer_map_destroy (id.decl_map);
5274 if (id.debug_map)
5275 pointer_map_destroy (id.debug_map);
5276
5277 TYPE_CANONICAL (type) = type;
5278
5279 return type;
5280 }
5281
5282 /* Return whether it is safe to inline a function because it used different
5283 target specific options or call site actual types mismatch parameter types.
5284 E is the call edge to be checked. */
5285 bool
5286 tree_can_inline_p (struct cgraph_edge *e)
5287 {
5288 #if 0
5289 /* This causes a regression in SPEC in that it prevents a cold function from
5290 inlining a hot function. Perhaps this should only apply to functions
5291 that the user declares hot/cold/optimize explicitly. */
5292
5293 /* Don't inline a function with a higher optimization level than the
5294 caller, or with different space constraints (hot/cold functions). */
5295 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5296 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5297
5298 if (caller_tree != callee_tree)
5299 {
5300 struct cl_optimization *caller_opt
5301 = TREE_OPTIMIZATION ((caller_tree)
5302 ? caller_tree
5303 : optimization_default_node);
5304
5305 struct cl_optimization *callee_opt
5306 = TREE_OPTIMIZATION ((callee_tree)
5307 ? callee_tree
5308 : optimization_default_node);
5309
5310 if ((caller_opt->optimize > callee_opt->optimize)
5311 || (caller_opt->optimize_size != callee_opt->optimize_size))
5312 return false;
5313 }
5314 #endif
5315 tree caller, callee, lhs;
5316
5317 caller = e->caller->decl;
5318 callee = e->callee->decl;
5319
5320 /* First check that inlining isn't simply forbidden in this case. */
5321 if (inline_forbidden_into_p (caller, callee))
5322 {
5323 e->inline_failed = CIF_UNSPECIFIED;
5324 gimple_call_set_cannot_inline (e->call_stmt, true);
5325 return false;
5326 }
5327
5328 /* Allow the backend to decide if inlining is ok. */
5329 if (!targetm.target_option.can_inline_p (caller, callee))
5330 {
5331 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5332 gimple_call_set_cannot_inline (e->call_stmt, true);
5333 e->call_stmt_cannot_inline_p = true;
5334 return false;
5335 }
5336
5337 /* Do not inline calls where we cannot triviall work around mismatches
5338 in argument or return types. */
5339 if (e->call_stmt
5340 && ((DECL_RESULT (callee)
5341 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5342 && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5343 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5344 TREE_TYPE (lhs))
5345 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5346 || !gimple_check_call_args (e->call_stmt)))
5347 {
5348 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5349 gimple_call_set_cannot_inline (e->call_stmt, true);
5350 e->call_stmt_cannot_inline_p = true;
5351 return false;
5352 }
5353
5354 return true;
5355 }