re PR tree-optimization/33434 (inlining miscompilation)
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "langhooks.h"
38 #include "basic-block.h"
39 #include "tree-iterator.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "tree-flow.h"
44 #include "function.h"
45 #include "ggc.h"
46 #include "tree-flow.h"
47 #include "diagnostic.h"
48 #include "except.h"
49 #include "debug.h"
50 #include "pointer-set.h"
51 #include "ipa-prop.h"
52 #include "value-prof.h"
53 #include "tree-pass.h"
54 #include "target.h"
55 #include "integrate.h"
56
57 /* I'm not real happy about this, but we need to handle gimple and
58 non-gimple trees. */
59 #include "tree-gimple.h"
60
61 /* Inlining, Cloning, Versioning, Parallelization
62
63 Inlining: a function body is duplicated, but the PARM_DECLs are
64 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
65 GIMPLE_MODIFY_STMTs that store to a dedicated returned-value variable.
66 The duplicated eh_region info of the copy will later be appended
67 to the info for the caller; the eh_region info in copied throwing
68 statements and RESX_EXPRs is adjusted accordingly.
69
70 Cloning: (only in C++) We have one body for a con/de/structor, and
71 multiple function decls, each with a unique parameter list.
72 Duplicate the body, using the given splay tree; some parameters
73 will become constants (like 0 or 1).
74
75 Versioning: a function body is duplicated and the result is a new
76 function rather than into blocks of an existing function as with
77 inlining. Some parameters will become constants.
78
79 Parallelization: a region of a function is duplicated resulting in
80 a new function. Variables may be replaced with complex expressions
81 to enable shared variable semantics.
82
83 All of these will simultaneously lookup any callgraph edges. If
84 we're going to inline the duplicated function body, and the given
85 function has some cloned callgraph nodes (one for each place this
86 function will be inlined) those callgraph edges will be duplicated.
87 If we're cloning the body, those callgraph edges will be
88 updated to point into the new body. (Note that the original
89 callgraph node and edge list will not be altered.)
90
91 See the CALL_EXPR handling case in copy_body_r (). */
92
93 /* 0 if we should not perform inlining.
94 1 if we should expand functions calls inline at the tree level.
95 2 if we should consider *all* functions to be inline
96 candidates. */
97
98 int flag_inline_trees = 0;
99
100 /* To Do:
101
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
108
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
111
112
113 /* Weights that estimate_num_insns uses for heuristics in inlining. */
114
115 eni_weights eni_inlining_weights;
116
117 /* Weights that estimate_num_insns uses to estimate the size of the
118 produced code. */
119
120 eni_weights eni_size_weights;
121
122 /* Weights that estimate_num_insns uses to estimate the time necessary
123 to execute the produced code. */
124
125 eni_weights eni_time_weights;
126
127 /* Prototypes. */
128
129 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
130 static tree copy_generic_body (copy_body_data *);
131 static bool inlinable_function_p (tree);
132 static void remap_block (tree *, copy_body_data *);
133 static tree remap_decls (tree, copy_body_data *);
134 static void copy_bind_expr (tree *, int *, copy_body_data *);
135 static tree mark_local_for_remap_r (tree *, int *, void *);
136 static void unsave_expr_1 (tree);
137 static tree unsave_r (tree *, int *, void *);
138 static void declare_inline_vars (tree, tree);
139 static void remap_save_expr (tree *, void *, int *);
140 static void add_lexical_block (tree current_block, tree new_block);
141 static tree copy_decl_to_var (tree, copy_body_data *);
142 static tree copy_result_decl_to_var (tree, copy_body_data *);
143 static tree copy_decl_no_change (tree, copy_body_data *);
144 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
145
146 /* Insert a tree->tree mapping for ID. Despite the name suggests
147 that the trees should be variables, it is used for more than that. */
148
149 void
150 insert_decl_map (copy_body_data *id, tree key, tree value)
151 {
152 *pointer_map_insert (id->decl_map, key) = value;
153
154 /* Always insert an identity map as well. If we see this same new
155 node again, we won't want to duplicate it a second time. */
156 if (key != value)
157 *pointer_map_insert (id->decl_map, value) = value;
158 }
159
160 /* Construct new SSA name for old NAME. ID is the inline context. */
161
162 static tree
163 remap_ssa_name (tree name, copy_body_data *id)
164 {
165 tree new;
166 tree *n;
167
168 gcc_assert (TREE_CODE (name) == SSA_NAME);
169
170 n = (tree *) pointer_map_contains (id->decl_map, name);
171 if (n)
172 return *n;
173
174 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
175 in copy_bb. */
176 new = remap_decl (SSA_NAME_VAR (name), id);
177 /* We might've substituted constant or another SSA_NAME for
178 the variable.
179
180 Replace the SSA name representing RESULT_DECL by variable during
181 inlining: this saves us from need to introduce PHI node in a case
182 return value is just partly initialized. */
183 if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
184 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
185 || !id->transform_return_to_modify))
186 {
187 new = make_ssa_name (new, NULL);
188 insert_decl_map (id, name, new);
189 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
190 {
191 SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
192 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
193 set_default_def (SSA_NAME_VAR (new), new);
194 }
195 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
196 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
197 TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
198 }
199 else
200 insert_decl_map (id, name, new);
201 return new;
202 }
203
204 /* Remap DECL during the copying of the BLOCK tree for the function. */
205
206 tree
207 remap_decl (tree decl, copy_body_data *id)
208 {
209 tree *n;
210 tree fn;
211
212 /* We only remap local variables in the current function. */
213 fn = id->src_fn;
214
215 /* See if we have remapped this declaration. */
216
217 n = (tree *) pointer_map_contains (id->decl_map, decl);
218
219 /* If we didn't already have an equivalent for this declaration,
220 create one now. */
221 if (!n)
222 {
223 /* Make a copy of the variable or label. */
224 tree t = id->copy_decl (decl, id);
225
226 /* Remember it, so that if we encounter this local entity again
227 we can reuse this copy. Do this early because remap_type may
228 need this decl for TYPE_STUB_DECL. */
229 insert_decl_map (id, decl, t);
230
231 if (!DECL_P (t))
232 return t;
233
234 /* Remap types, if necessary. */
235 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
236 if (TREE_CODE (t) == TYPE_DECL)
237 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
238
239 /* Remap sizes as necessary. */
240 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
241 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
242
243 /* If fields, do likewise for offset and qualifier. */
244 if (TREE_CODE (t) == FIELD_DECL)
245 {
246 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
247 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
248 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
249 }
250
251 if (cfun && gimple_in_ssa_p (cfun)
252 && (TREE_CODE (t) == VAR_DECL
253 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
254 {
255 tree def = gimple_default_def (id->src_cfun, decl);
256 get_var_ann (t);
257 if (TREE_CODE (decl) != PARM_DECL && def)
258 {
259 tree map = remap_ssa_name (def, id);
260 /* Watch out RESULT_DECLs whose SSA names map directly
261 to them. */
262 if (TREE_CODE (map) == SSA_NAME)
263 set_default_def (t, map);
264 }
265 add_referenced_var (t);
266 }
267 return t;
268 }
269
270 return unshare_expr (*n);
271 }
272
273 static tree
274 remap_type_1 (tree type, copy_body_data *id)
275 {
276 tree new, t;
277
278 /* We do need a copy. build and register it now. If this is a pointer or
279 reference type, remap the designated type and make a new pointer or
280 reference type. */
281 if (TREE_CODE (type) == POINTER_TYPE)
282 {
283 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
284 TYPE_MODE (type),
285 TYPE_REF_CAN_ALIAS_ALL (type));
286 insert_decl_map (id, type, new);
287 return new;
288 }
289 else if (TREE_CODE (type) == REFERENCE_TYPE)
290 {
291 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
292 TYPE_MODE (type),
293 TYPE_REF_CAN_ALIAS_ALL (type));
294 insert_decl_map (id, type, new);
295 return new;
296 }
297 else
298 new = copy_node (type);
299
300 insert_decl_map (id, type, new);
301
302 /* This is a new type, not a copy of an old type. Need to reassociate
303 variants. We can handle everything except the main variant lazily. */
304 t = TYPE_MAIN_VARIANT (type);
305 if (type != t)
306 {
307 t = remap_type (t, id);
308 TYPE_MAIN_VARIANT (new) = t;
309 TYPE_NEXT_VARIANT (new) = TYPE_NEXT_VARIANT (t);
310 TYPE_NEXT_VARIANT (t) = new;
311 }
312 else
313 {
314 TYPE_MAIN_VARIANT (new) = new;
315 TYPE_NEXT_VARIANT (new) = NULL;
316 }
317
318 if (TYPE_STUB_DECL (type))
319 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
320
321 /* Lazily create pointer and reference types. */
322 TYPE_POINTER_TO (new) = NULL;
323 TYPE_REFERENCE_TO (new) = NULL;
324
325 switch (TREE_CODE (new))
326 {
327 case INTEGER_TYPE:
328 case REAL_TYPE:
329 case FIXED_POINT_TYPE:
330 case ENUMERAL_TYPE:
331 case BOOLEAN_TYPE:
332 t = TYPE_MIN_VALUE (new);
333 if (t && TREE_CODE (t) != INTEGER_CST)
334 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
335
336 t = TYPE_MAX_VALUE (new);
337 if (t && TREE_CODE (t) != INTEGER_CST)
338 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
339 return new;
340
341 case FUNCTION_TYPE:
342 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
343 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
344 return new;
345
346 case ARRAY_TYPE:
347 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
348 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
349 break;
350
351 case RECORD_TYPE:
352 case UNION_TYPE:
353 case QUAL_UNION_TYPE:
354 {
355 tree f, nf = NULL;
356
357 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
358 {
359 t = remap_decl (f, id);
360 DECL_CONTEXT (t) = new;
361 TREE_CHAIN (t) = nf;
362 nf = t;
363 }
364 TYPE_FIELDS (new) = nreverse (nf);
365 }
366 break;
367
368 case OFFSET_TYPE:
369 default:
370 /* Shouldn't have been thought variable sized. */
371 gcc_unreachable ();
372 }
373
374 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
375 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
376
377 return new;
378 }
379
380 tree
381 remap_type (tree type, copy_body_data *id)
382 {
383 tree *node;
384
385 if (type == NULL)
386 return type;
387
388 /* See if we have remapped this type. */
389 node = (tree *) pointer_map_contains (id->decl_map, type);
390 if (node)
391 return *node;
392
393 /* The type only needs remapping if it's variably modified. */
394 if (! variably_modified_type_p (type, id->src_fn))
395 {
396 insert_decl_map (id, type, type);
397 return type;
398 }
399
400 return remap_type_1 (type, id);
401 }
402
403 static tree
404 remap_decls (tree decls, copy_body_data *id)
405 {
406 tree old_var;
407 tree new_decls = NULL_TREE;
408
409 /* Remap its variables. */
410 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
411 {
412 tree new_var;
413
414 /* We can not chain the local static declarations into the unexpanded_var_list
415 as we can't duplicate them or break one decl rule. Go ahead and link
416 them into unexpanded_var_list. */
417 if (!auto_var_in_fn_p (old_var, id->src_fn)
418 && !DECL_EXTERNAL (old_var))
419 {
420 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
421 cfun->unexpanded_var_list);
422 continue;
423 }
424
425 /* Remap the variable. */
426 new_var = remap_decl (old_var, id);
427
428 /* If we didn't remap this variable, so we can't mess with its
429 TREE_CHAIN. If we remapped this variable to the return slot, it's
430 already declared somewhere else, so don't declare it here. */
431 if (!new_var || new_var == id->retvar)
432 ;
433 else
434 {
435 gcc_assert (DECL_P (new_var));
436 TREE_CHAIN (new_var) = new_decls;
437 new_decls = new_var;
438 }
439 }
440
441 return nreverse (new_decls);
442 }
443
444 /* Copy the BLOCK to contain remapped versions of the variables
445 therein. And hook the new block into the block-tree. */
446
447 static void
448 remap_block (tree *block, copy_body_data *id)
449 {
450 tree old_block;
451 tree new_block;
452 tree fn;
453
454 /* Make the new block. */
455 old_block = *block;
456 new_block = make_node (BLOCK);
457 TREE_USED (new_block) = TREE_USED (old_block);
458 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
459 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
460 *block = new_block;
461
462 /* Remap its variables. */
463 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
464
465 fn = id->dst_fn;
466
467 if (id->transform_lang_insert_block)
468 lang_hooks.decls.insert_block (new_block);
469
470 /* Remember the remapped block. */
471 insert_decl_map (id, old_block, new_block);
472 }
473
474 /* Copy the whole block tree and root it in id->block. */
475 static tree
476 remap_blocks (tree block, copy_body_data *id)
477 {
478 tree t;
479 tree new = block;
480
481 if (!block)
482 return NULL;
483
484 remap_block (&new, id);
485 gcc_assert (new != block);
486 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
487 add_lexical_block (new, remap_blocks (t, id));
488 return new;
489 }
490
491 static void
492 copy_statement_list (tree *tp)
493 {
494 tree_stmt_iterator oi, ni;
495 tree new;
496
497 new = alloc_stmt_list ();
498 ni = tsi_start (new);
499 oi = tsi_start (*tp);
500 *tp = new;
501
502 for (; !tsi_end_p (oi); tsi_next (&oi))
503 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
504 }
505
506 static void
507 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
508 {
509 tree block = BIND_EXPR_BLOCK (*tp);
510 /* Copy (and replace) the statement. */
511 copy_tree_r (tp, walk_subtrees, NULL);
512 if (block)
513 {
514 remap_block (&block, id);
515 BIND_EXPR_BLOCK (*tp) = block;
516 }
517
518 if (BIND_EXPR_VARS (*tp))
519 /* This will remap a lot of the same decls again, but this should be
520 harmless. */
521 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
522 }
523
524 /* Called from copy_body_id via walk_tree. DATA is really an
525 `copy_body_data *'. */
526
527 tree
528 copy_body_r (tree *tp, int *walk_subtrees, void *data)
529 {
530 copy_body_data *id = (copy_body_data *) data;
531 tree fn = id->src_fn;
532 tree new_block;
533
534 /* Begin by recognizing trees that we'll completely rewrite for the
535 inlining context. Our output for these trees is completely
536 different from out input (e.g. RETURN_EXPR is deleted, and morphs
537 into an edge). Further down, we'll handle trees that get
538 duplicated and/or tweaked. */
539
540 /* When requested, RETURN_EXPRs should be transformed to just the
541 contained GIMPLE_MODIFY_STMT. The branch semantics of the return will
542 be handled elsewhere by manipulating the CFG rather than a statement. */
543 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
544 {
545 tree assignment = TREE_OPERAND (*tp, 0);
546
547 /* If we're returning something, just turn that into an
548 assignment into the equivalent of the original RESULT_DECL.
549 If the "assignment" is just the result decl, the result
550 decl has already been set (e.g. a recent "foo (&result_decl,
551 ...)"); just toss the entire RETURN_EXPR. */
552 if (assignment && TREE_CODE (assignment) == GIMPLE_MODIFY_STMT)
553 {
554 /* Replace the RETURN_EXPR with (a copy of) the
555 GIMPLE_MODIFY_STMT hanging underneath. */
556 *tp = copy_node (assignment);
557 }
558 else /* Else the RETURN_EXPR returns no value. */
559 {
560 *tp = NULL;
561 return (tree) (void *)1;
562 }
563 }
564 else if (TREE_CODE (*tp) == SSA_NAME)
565 {
566 *tp = remap_ssa_name (*tp, id);
567 *walk_subtrees = 0;
568 return NULL;
569 }
570
571 /* Local variables and labels need to be replaced by equivalent
572 variables. We don't want to copy static variables; there's only
573 one of those, no matter how many times we inline the containing
574 function. Similarly for globals from an outer function. */
575 else if (auto_var_in_fn_p (*tp, fn))
576 {
577 tree new_decl;
578
579 /* Remap the declaration. */
580 new_decl = remap_decl (*tp, id);
581 gcc_assert (new_decl);
582 /* Replace this variable with the copy. */
583 STRIP_TYPE_NOPS (new_decl);
584 *tp = new_decl;
585 *walk_subtrees = 0;
586 }
587 else if (TREE_CODE (*tp) == STATEMENT_LIST)
588 copy_statement_list (tp);
589 else if (TREE_CODE (*tp) == SAVE_EXPR)
590 remap_save_expr (tp, id->decl_map, walk_subtrees);
591 else if (TREE_CODE (*tp) == LABEL_DECL
592 && (! DECL_CONTEXT (*tp)
593 || decl_function_context (*tp) == id->src_fn))
594 /* These may need to be remapped for EH handling. */
595 *tp = remap_decl (*tp, id);
596 else if (TREE_CODE (*tp) == BIND_EXPR)
597 copy_bind_expr (tp, walk_subtrees, id);
598 /* Types may need remapping as well. */
599 else if (TYPE_P (*tp))
600 *tp = remap_type (*tp, id);
601
602 /* If this is a constant, we have to copy the node iff the type will be
603 remapped. copy_tree_r will not copy a constant. */
604 else if (CONSTANT_CLASS_P (*tp))
605 {
606 tree new_type = remap_type (TREE_TYPE (*tp), id);
607
608 if (new_type == TREE_TYPE (*tp))
609 *walk_subtrees = 0;
610
611 else if (TREE_CODE (*tp) == INTEGER_CST)
612 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
613 TREE_INT_CST_HIGH (*tp));
614 else
615 {
616 *tp = copy_node (*tp);
617 TREE_TYPE (*tp) = new_type;
618 }
619 }
620
621 /* Otherwise, just copy the node. Note that copy_tree_r already
622 knows not to copy VAR_DECLs, etc., so this is safe. */
623 else
624 {
625 /* Here we handle trees that are not completely rewritten.
626 First we detect some inlining-induced bogosities for
627 discarding. */
628 if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
629 && GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
630 && (auto_var_in_fn_p (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
631 {
632 /* Some assignments VAR = VAR; don't generate any rtl code
633 and thus don't count as variable modification. Avoid
634 keeping bogosities like 0 = 0. */
635 tree decl = GIMPLE_STMT_OPERAND (*tp, 0), value;
636 tree *n;
637
638 n = (tree *) pointer_map_contains (id->decl_map, decl);
639 if (n)
640 {
641 value = *n;
642 STRIP_TYPE_NOPS (value);
643 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
644 {
645 *tp = build_empty_stmt ();
646 return copy_body_r (tp, walk_subtrees, data);
647 }
648 }
649 }
650 else if (TREE_CODE (*tp) == INDIRECT_REF)
651 {
652 /* Get rid of *& from inline substitutions that can happen when a
653 pointer argument is an ADDR_EXPR. */
654 tree decl = TREE_OPERAND (*tp, 0);
655 tree *n;
656
657 n = (tree *) pointer_map_contains (id->decl_map, decl);
658 if (n)
659 {
660 tree new;
661 tree old;
662 /* If we happen to get an ADDR_EXPR in n->value, strip
663 it manually here as we'll eventually get ADDR_EXPRs
664 which lie about their types pointed to. In this case
665 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
666 but we absolutely rely on that. As fold_indirect_ref
667 does other useful transformations, try that first, though. */
668 tree type = TREE_TYPE (TREE_TYPE (*n));
669 new = unshare_expr (*n);
670 old = *tp;
671 *tp = fold_indirect_ref_1 (type, new);
672 if (! *tp)
673 {
674 if (TREE_CODE (new) == ADDR_EXPR)
675 *tp = TREE_OPERAND (new, 0);
676 else
677 {
678 *tp = build1 (INDIRECT_REF, type, new);
679 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
680 }
681 }
682 *walk_subtrees = 0;
683 return NULL;
684 }
685 }
686
687 /* Here is the "usual case". Copy this tree node, and then
688 tweak some special cases. */
689 copy_tree_r (tp, walk_subtrees, NULL);
690
691 /* Global variables we didn't seen yet needs to go into referenced
692 vars. */
693 if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
694 add_referenced_var (*tp);
695
696 /* If EXPR has block defined, map it to newly constructed block.
697 When inlining we want EXPRs without block appear in the block
698 of function call. */
699 if (EXPR_P (*tp) || GIMPLE_STMT_P (*tp))
700 {
701 new_block = id->block;
702 if (TREE_BLOCK (*tp))
703 {
704 tree *n;
705 n = (tree *) pointer_map_contains (id->decl_map,
706 TREE_BLOCK (*tp));
707 gcc_assert (n);
708 new_block = *n;
709 }
710 TREE_BLOCK (*tp) = new_block;
711 }
712
713 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
714 TREE_OPERAND (*tp, 0) =
715 build_int_cst
716 (NULL_TREE,
717 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
718
719 if (!GIMPLE_TUPLE_P (*tp) && TREE_CODE (*tp) != OMP_CLAUSE)
720 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
721
722 /* The copied TARGET_EXPR has never been expanded, even if the
723 original node was expanded already. */
724 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
725 {
726 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
727 TREE_OPERAND (*tp, 3) = NULL_TREE;
728 }
729
730 /* Variable substitution need not be simple. In particular, the
731 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
732 and friends are up-to-date. */
733 else if (TREE_CODE (*tp) == ADDR_EXPR)
734 {
735 int invariant = TREE_INVARIANT (*tp);
736 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
737 /* Handle the case where we substituted an INDIRECT_REF
738 into the operand of the ADDR_EXPR. */
739 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
740 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
741 else
742 recompute_tree_invariant_for_addr_expr (*tp);
743 /* If this used to be invariant, but is not any longer,
744 then regimplification is probably needed. */
745 if (invariant && !TREE_INVARIANT (*tp))
746 id->regimplify = true;
747 *walk_subtrees = 0;
748 }
749 }
750
751 /* Keep iterating. */
752 return NULL_TREE;
753 }
754
755 /* Copy basic block, scale profile accordingly. Edges will be taken care of
756 later */
757
758 static basic_block
759 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
760 {
761 block_stmt_iterator bsi, copy_bsi;
762 basic_block copy_basic_block;
763
764 /* create_basic_block() will append every new block to
765 basic_block_info automatically. */
766 copy_basic_block = create_basic_block (NULL, (void *) 0,
767 (basic_block) bb->prev_bb->aux);
768 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
769
770 /* We are going to rebuild frequencies from scratch. These values have just
771 small importance to drive canonicalize_loop_headers. */
772 copy_basic_block->frequency = ((gcov_type)bb->frequency
773 * frequency_scale / REG_BR_PROB_BASE);
774 if (copy_basic_block->frequency > BB_FREQ_MAX)
775 copy_basic_block->frequency = BB_FREQ_MAX;
776 copy_bsi = bsi_start (copy_basic_block);
777
778 for (bsi = bsi_start (bb);
779 !bsi_end_p (bsi); bsi_next (&bsi))
780 {
781 tree stmt = bsi_stmt (bsi);
782 tree orig_stmt = stmt;
783
784 id->regimplify = false;
785 walk_tree (&stmt, copy_body_r, id, NULL);
786
787 /* RETURN_EXPR might be removed,
788 this is signalled by making stmt pointer NULL. */
789 if (stmt)
790 {
791 tree call, decl;
792
793 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
794
795 /* With return slot optimization we can end up with
796 non-gimple (foo *)&this->m, fix that here. */
797 if ((TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
798 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
799 && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
800 || id->regimplify)
801 gimplify_stmt (&stmt);
802
803 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
804
805 /* Process new statement. gimplify_stmt possibly turned statement
806 into multiple statements, we need to process all of them. */
807 while (!bsi_end_p (copy_bsi))
808 {
809 tree *stmtp = bsi_stmt_ptr (copy_bsi);
810 tree stmt = *stmtp;
811 call = get_call_expr_in (stmt);
812
813 if (call && CALL_EXPR_VA_ARG_PACK (call) && id->call_expr)
814 {
815 /* __builtin_va_arg_pack () should be replaced by
816 all arguments corresponding to ... in the caller. */
817 tree p, *argarray, new_call, *call_ptr;
818 int nargs = call_expr_nargs (id->call_expr);
819
820 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
821 nargs--;
822
823 argarray = (tree *) alloca ((nargs + call_expr_nargs (call))
824 * sizeof (tree));
825
826 memcpy (argarray, CALL_EXPR_ARGP (call),
827 call_expr_nargs (call) * sizeof (*argarray));
828 memcpy (argarray + call_expr_nargs (call),
829 CALL_EXPR_ARGP (id->call_expr)
830 + (call_expr_nargs (id->call_expr) - nargs),
831 nargs * sizeof (*argarray));
832
833 new_call = build_call_array (TREE_TYPE (call),
834 CALL_EXPR_FN (call),
835 nargs + call_expr_nargs (call),
836 argarray);
837 /* Copy all CALL_EXPR flags, locus and block, except
838 CALL_EXPR_VA_ARG_PACK flag. */
839 CALL_EXPR_STATIC_CHAIN (new_call)
840 = CALL_EXPR_STATIC_CHAIN (call);
841 CALL_EXPR_TAILCALL (new_call) = CALL_EXPR_TAILCALL (call);
842 CALL_EXPR_RETURN_SLOT_OPT (new_call)
843 = CALL_EXPR_RETURN_SLOT_OPT (call);
844 CALL_FROM_THUNK_P (new_call) = CALL_FROM_THUNK_P (call);
845 CALL_CANNOT_INLINE_P (new_call)
846 = CALL_CANNOT_INLINE_P (call);
847 TREE_NOTHROW (new_call) = TREE_NOTHROW (call);
848 SET_EXPR_LOCUS (new_call, EXPR_LOCUS (call));
849 TREE_BLOCK (new_call) = TREE_BLOCK (call);
850
851 call_ptr = stmtp;
852 if (TREE_CODE (*call_ptr) == GIMPLE_MODIFY_STMT)
853 call_ptr = &GIMPLE_STMT_OPERAND (*call_ptr, 1);
854 if (TREE_CODE (*call_ptr) == WITH_SIZE_EXPR)
855 call_ptr = &TREE_OPERAND (*call_ptr, 0);
856 gcc_assert (*call_ptr == call);
857 if (call_ptr == stmtp)
858 {
859 bsi_replace (&copy_bsi, new_call, true);
860 stmtp = bsi_stmt_ptr (copy_bsi);
861 stmt = *stmtp;
862 }
863 else
864 {
865 *call_ptr = new_call;
866 stmt = *stmtp;
867 update_stmt (stmt);
868 }
869 }
870 else if (call
871 && id->call_expr
872 && (decl = get_callee_fndecl (call))
873 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
874 && DECL_FUNCTION_CODE (decl)
875 == BUILT_IN_VA_ARG_PACK_LEN)
876 {
877 /* __builtin_va_arg_pack_len () should be replaced by
878 the number of anonymous arguments. */
879 int nargs = call_expr_nargs (id->call_expr);
880 tree count, *call_ptr, p;
881
882 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
883 nargs--;
884
885 count = build_int_cst (integer_type_node, nargs);
886 call_ptr = stmtp;
887 if (TREE_CODE (*call_ptr) == GIMPLE_MODIFY_STMT)
888 call_ptr = &GIMPLE_STMT_OPERAND (*call_ptr, 1);
889 if (TREE_CODE (*call_ptr) == WITH_SIZE_EXPR)
890 call_ptr = &TREE_OPERAND (*call_ptr, 0);
891 gcc_assert (*call_ptr == call && call_ptr != stmtp);
892 *call_ptr = count;
893 stmt = *stmtp;
894 update_stmt (stmt);
895 call = NULL_TREE;
896 }
897
898 /* Statements produced by inlining can be unfolded, especially
899 when we constant propagated some operands. We can't fold
900 them right now for two reasons:
901 1) folding require SSA_NAME_DEF_STMTs to be correct
902 2) we can't change function calls to builtins.
903 So we just mark statement for later folding. We mark
904 all new statements, instead just statements that has changed
905 by some nontrivial substitution so even statements made
906 foldable indirectly are updated. If this turns out to be
907 expensive, copy_body can be told to watch for nontrivial
908 changes. */
909 if (id->statements_to_fold)
910 pointer_set_insert (id->statements_to_fold, stmt);
911 /* We're duplicating a CALL_EXPR. Find any corresponding
912 callgraph edges and update or duplicate them. */
913 if (call && (decl = get_callee_fndecl (call)))
914 {
915 struct cgraph_node *node;
916 struct cgraph_edge *edge;
917
918 switch (id->transform_call_graph_edges)
919 {
920 case CB_CGE_DUPLICATE:
921 edge = cgraph_edge (id->src_node, orig_stmt);
922 if (edge)
923 cgraph_clone_edge (edge, id->dst_node, stmt,
924 REG_BR_PROB_BASE, 1, edge->frequency, true);
925 break;
926
927 case CB_CGE_MOVE_CLONES:
928 for (node = id->dst_node->next_clone;
929 node;
930 node = node->next_clone)
931 {
932 edge = cgraph_edge (node, orig_stmt);
933 gcc_assert (edge);
934 cgraph_set_call_stmt (edge, stmt);
935 }
936 /* FALLTHRU */
937
938 case CB_CGE_MOVE:
939 edge = cgraph_edge (id->dst_node, orig_stmt);
940 if (edge)
941 cgraph_set_call_stmt (edge, stmt);
942 break;
943
944 default:
945 gcc_unreachable ();
946 }
947 }
948 /* If you think we can abort here, you are wrong.
949 There is no region 0 in tree land. */
950 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
951 != 0);
952
953 if (tree_could_throw_p (stmt)
954 /* When we are cloning for inlining, we are supposed to
955 construct a clone that calls precisely the same functions
956 as original. However IPA optimizers might've proved
957 earlier some function calls as non-trapping that might
958 render some basic blocks dead that might become
959 unreachable.
960
961 We can't update SSA with unreachable blocks in CFG and thus
962 we prevent the scenario by preserving even the "dead" eh
963 edges until the point they are later removed by
964 fixup_cfg pass. */
965 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
966 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
967 {
968 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
969 /* Add an entry for the copied tree in the EH hashtable.
970 When cloning or versioning, use the hashtable in
971 cfun, and just copy the EH number. When inlining, use the
972 hashtable in the caller, and adjust the region number. */
973 if (region > 0)
974 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
975
976 /* If this tree doesn't have a region associated with it,
977 and there is a "current region,"
978 then associate this tree with the current region
979 and add edges associated with this region. */
980 if ((lookup_stmt_eh_region_fn (id->src_cfun,
981 orig_stmt) <= 0
982 && id->eh_region > 0)
983 && tree_could_throw_p (stmt))
984 add_stmt_to_eh_region (stmt, id->eh_region);
985 }
986 if (gimple_in_ssa_p (cfun))
987 {
988 ssa_op_iter i;
989 tree def;
990
991 find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
992 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
993 if (TREE_CODE (def) == SSA_NAME)
994 SSA_NAME_DEF_STMT (def) = stmt;
995 }
996 bsi_next (&copy_bsi);
997 }
998 copy_bsi = bsi_last (copy_basic_block);
999 }
1000 }
1001 return copy_basic_block;
1002 }
1003
1004 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1005 form is quite easy, since dominator relationship for old basic blocks does
1006 not change.
1007
1008 There is however exception where inlining might change dominator relation
1009 across EH edges from basic block within inlined functions destinating
1010 to landing pads in function we inline into.
1011
1012 The function fills in PHI_RESULTs of such PHI nodes if they refer
1013 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1014 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1015 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1016 set, and this means that there will be no overlapping live ranges
1017 for the underlying symbol.
1018
1019 This might change in future if we allow redirecting of EH edges and
1020 we might want to change way build CFG pre-inlining to include
1021 all the possible edges then. */
1022 static void
1023 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1024 bool can_throw, bool nonlocal_goto)
1025 {
1026 edge e;
1027 edge_iterator ei;
1028
1029 FOR_EACH_EDGE (e, ei, bb->succs)
1030 if (!e->dest->aux
1031 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1032 {
1033 tree phi;
1034
1035 gcc_assert (e->flags & EDGE_ABNORMAL);
1036 if (!nonlocal_goto)
1037 gcc_assert (e->flags & EDGE_EH);
1038 if (!can_throw)
1039 gcc_assert (!(e->flags & EDGE_EH));
1040 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
1041 {
1042 edge re;
1043
1044 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1045 gcc_assert (!e->dest->aux);
1046
1047 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
1048 (PHI_RESULT (phi)));
1049
1050 if (!is_gimple_reg (PHI_RESULT (phi)))
1051 {
1052 mark_sym_for_renaming
1053 (SSA_NAME_VAR (PHI_RESULT (phi)));
1054 continue;
1055 }
1056
1057 re = find_edge (ret_bb, e->dest);
1058 gcc_assert (re);
1059 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1060 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1061
1062 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1063 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1064 }
1065 }
1066 }
1067
1068 /* Copy edges from BB into its copy constructed earlier, scale profile
1069 accordingly. Edges will be taken care of later. Assume aux
1070 pointers to point to the copies of each BB. */
1071 static void
1072 copy_edges_for_bb (basic_block bb, int count_scale, basic_block ret_bb)
1073 {
1074 basic_block new_bb = (basic_block) bb->aux;
1075 edge_iterator ei;
1076 edge old_edge;
1077 block_stmt_iterator bsi;
1078 int flags;
1079
1080 /* Use the indices from the original blocks to create edges for the
1081 new ones. */
1082 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1083 if (!(old_edge->flags & EDGE_EH))
1084 {
1085 edge new;
1086
1087 flags = old_edge->flags;
1088
1089 /* Return edges do get a FALLTHRU flag when the get inlined. */
1090 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1091 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1092 flags |= EDGE_FALLTHRU;
1093 new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1094 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1095 new->probability = old_edge->probability;
1096 }
1097
1098 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1099 return;
1100
1101 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
1102 {
1103 tree copy_stmt;
1104 bool can_throw, nonlocal_goto;
1105
1106 copy_stmt = bsi_stmt (bsi);
1107 update_stmt (copy_stmt);
1108 if (gimple_in_ssa_p (cfun))
1109 mark_symbols_for_renaming (copy_stmt);
1110 /* Do this before the possible split_block. */
1111 bsi_next (&bsi);
1112
1113 /* If this tree could throw an exception, there are two
1114 cases where we need to add abnormal edge(s): the
1115 tree wasn't in a region and there is a "current
1116 region" in the caller; or the original tree had
1117 EH edges. In both cases split the block after the tree,
1118 and add abnormal edge(s) as needed; we need both
1119 those from the callee and the caller.
1120 We check whether the copy can throw, because the const
1121 propagation can change an INDIRECT_REF which throws
1122 into a COMPONENT_REF which doesn't. If the copy
1123 can throw, the original could also throw. */
1124
1125 can_throw = tree_can_throw_internal (copy_stmt);
1126 nonlocal_goto = tree_can_make_abnormal_goto (copy_stmt);
1127
1128 if (can_throw || nonlocal_goto)
1129 {
1130 if (!bsi_end_p (bsi))
1131 /* Note that bb's predecessor edges aren't necessarily
1132 right at this point; split_block doesn't care. */
1133 {
1134 edge e = split_block (new_bb, copy_stmt);
1135
1136 new_bb = e->dest;
1137 new_bb->aux = e->src->aux;
1138 bsi = bsi_start (new_bb);
1139 }
1140 }
1141
1142 if (can_throw)
1143 make_eh_edges (copy_stmt);
1144
1145 if (nonlocal_goto)
1146 make_abnormal_goto_edges (bb_for_stmt (copy_stmt), true);
1147
1148 if ((can_throw || nonlocal_goto)
1149 && gimple_in_ssa_p (cfun))
1150 update_ssa_across_abnormal_edges (bb_for_stmt (copy_stmt), ret_bb,
1151 can_throw, nonlocal_goto);
1152 }
1153 }
1154
1155 /* Copy the PHIs. All blocks and edges are copied, some blocks
1156 was possibly split and new outgoing EH edges inserted.
1157 BB points to the block of original function and AUX pointers links
1158 the original and newly copied blocks. */
1159
1160 static void
1161 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1162 {
1163 basic_block new_bb = bb->aux;
1164 edge_iterator ei;
1165 tree phi;
1166
1167 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1168 {
1169 tree res = PHI_RESULT (phi);
1170 tree new_res = res;
1171 tree new_phi;
1172 edge new_edge;
1173
1174 if (is_gimple_reg (res))
1175 {
1176 walk_tree (&new_res, copy_body_r, id, NULL);
1177 SSA_NAME_DEF_STMT (new_res)
1178 = new_phi = create_phi_node (new_res, new_bb);
1179 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1180 {
1181 edge old_edge = find_edge (new_edge->src->aux, bb);
1182 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1183 tree new_arg = arg;
1184
1185 walk_tree (&new_arg, copy_body_r, id, NULL);
1186 gcc_assert (new_arg);
1187 /* With return slot optimization we can end up with
1188 non-gimple (foo *)&this->m, fix that here. */
1189 if (TREE_CODE (new_arg) != SSA_NAME
1190 && TREE_CODE (new_arg) != FUNCTION_DECL
1191 && !is_gimple_val (new_arg))
1192 {
1193 tree stmts = NULL_TREE;
1194 new_arg = force_gimple_operand (new_arg, &stmts,
1195 true, NULL);
1196 bsi_insert_on_edge_immediate (new_edge, stmts);
1197 }
1198 add_phi_arg (new_phi, new_arg, new_edge);
1199 }
1200 }
1201 }
1202 }
1203
1204 /* Wrapper for remap_decl so it can be used as a callback. */
1205 static tree
1206 remap_decl_1 (tree decl, void *data)
1207 {
1208 return remap_decl (decl, (copy_body_data *) data);
1209 }
1210
1211 /* Build struct function and associated datastructures for the new clone
1212 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1213
1214 static void
1215 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1216 int frequency)
1217 {
1218 struct function *new_cfun
1219 = (struct function *) ggc_alloc_cleared (sizeof (struct function));
1220 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1221 int count_scale, frequency_scale;
1222
1223 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1224 count_scale = (REG_BR_PROB_BASE * count
1225 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1226 else
1227 count_scale = 1;
1228
1229 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1230 frequency_scale = (REG_BR_PROB_BASE * frequency
1231 /
1232 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1233 else
1234 frequency_scale = count_scale;
1235
1236 /* Register specific tree functions. */
1237 tree_register_cfg_hooks ();
1238 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
1239 new_cfun->funcdef_no = get_next_funcdef_no ();
1240 VALUE_HISTOGRAMS (new_cfun) = NULL;
1241 new_cfun->unexpanded_var_list = NULL;
1242 new_cfun->cfg = NULL;
1243 new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
1244 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
1245 push_cfun (new_cfun);
1246 init_empty_tree_cfg ();
1247
1248 ENTRY_BLOCK_PTR->count =
1249 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1250 REG_BR_PROB_BASE);
1251 ENTRY_BLOCK_PTR->frequency =
1252 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1253 frequency_scale / REG_BR_PROB_BASE);
1254 EXIT_BLOCK_PTR->count =
1255 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1256 REG_BR_PROB_BASE);
1257 EXIT_BLOCK_PTR->frequency =
1258 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1259 frequency_scale / REG_BR_PROB_BASE);
1260 if (src_cfun->eh)
1261 init_eh_for_function ();
1262
1263 if (src_cfun->gimple_df)
1264 {
1265 init_tree_ssa ();
1266 cfun->gimple_df->in_ssa_p = true;
1267 init_ssa_operands ();
1268 }
1269 pop_cfun ();
1270 }
1271
1272 /* Make a copy of the body of FN so that it can be inserted inline in
1273 another function. Walks FN via CFG, returns new fndecl. */
1274
1275 static tree
1276 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
1277 basic_block entry_block_map, basic_block exit_block_map)
1278 {
1279 tree callee_fndecl = id->src_fn;
1280 /* Original cfun for the callee, doesn't change. */
1281 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1282 struct function *cfun_to_copy;
1283 basic_block bb;
1284 tree new_fndecl = NULL;
1285 int count_scale, frequency_scale;
1286 int last;
1287
1288 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1289 count_scale = (REG_BR_PROB_BASE * count
1290 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1291 else
1292 count_scale = 1;
1293
1294 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1295 frequency_scale = (REG_BR_PROB_BASE * frequency
1296 /
1297 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1298 else
1299 frequency_scale = count_scale;
1300
1301 /* Register specific tree functions. */
1302 tree_register_cfg_hooks ();
1303
1304 /* Must have a CFG here at this point. */
1305 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1306 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1307
1308 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1309
1310
1311 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1312 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1313 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1314 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1315
1316 /* Duplicate any exception-handling regions. */
1317 if (cfun->eh)
1318 {
1319 id->eh_region_offset
1320 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1321 0, id->eh_region);
1322 }
1323 /* Use aux pointers to map the original blocks to copy. */
1324 FOR_EACH_BB_FN (bb, cfun_to_copy)
1325 {
1326 basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
1327 bb->aux = new;
1328 new->aux = bb;
1329 }
1330
1331 last = last_basic_block;
1332 /* Now that we've duplicated the blocks, duplicate their edges. */
1333 FOR_ALL_BB_FN (bb, cfun_to_copy)
1334 copy_edges_for_bb (bb, count_scale, exit_block_map);
1335 if (gimple_in_ssa_p (cfun))
1336 FOR_ALL_BB_FN (bb, cfun_to_copy)
1337 copy_phis_for_bb (bb, id);
1338 FOR_ALL_BB_FN (bb, cfun_to_copy)
1339 {
1340 ((basic_block)bb->aux)->aux = NULL;
1341 bb->aux = NULL;
1342 }
1343 /* Zero out AUX fields of newly created block during EH edge
1344 insertion. */
1345 for (; last < last_basic_block; last++)
1346 BASIC_BLOCK (last)->aux = NULL;
1347 entry_block_map->aux = NULL;
1348 exit_block_map->aux = NULL;
1349
1350 return new_fndecl;
1351 }
1352
1353 /* Make a copy of the body of FN so that it can be inserted inline in
1354 another function. */
1355
1356 static tree
1357 copy_generic_body (copy_body_data *id)
1358 {
1359 tree body;
1360 tree fndecl = id->src_fn;
1361
1362 body = DECL_SAVED_TREE (fndecl);
1363 walk_tree (&body, copy_body_r, id, NULL);
1364
1365 return body;
1366 }
1367
1368 static tree
1369 copy_body (copy_body_data *id, gcov_type count, int frequency,
1370 basic_block entry_block_map, basic_block exit_block_map)
1371 {
1372 tree fndecl = id->src_fn;
1373 tree body;
1374
1375 /* If this body has a CFG, walk CFG and copy. */
1376 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1377 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1378
1379 return body;
1380 }
1381
1382 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1383 defined in function FN, or of a data member thereof. */
1384
1385 static bool
1386 self_inlining_addr_expr (tree value, tree fn)
1387 {
1388 tree var;
1389
1390 if (TREE_CODE (value) != ADDR_EXPR)
1391 return false;
1392
1393 var = get_base_address (TREE_OPERAND (value, 0));
1394
1395 return var && auto_var_in_fn_p (var, fn);
1396 }
1397
1398 static void
1399 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
1400 basic_block bb, tree *vars)
1401 {
1402 tree init_stmt;
1403 tree var;
1404 tree var_sub;
1405 tree rhs = value;
1406 tree def = (gimple_in_ssa_p (cfun)
1407 ? gimple_default_def (id->src_cfun, p) : NULL);
1408
1409 if (value
1410 && value != error_mark_node
1411 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
1412 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
1413
1414 /* If the parameter is never assigned to, has no SSA_NAMEs created,
1415 we may not need to create a new variable here at all. Instead, we may
1416 be able to just use the argument value. */
1417 if (TREE_READONLY (p)
1418 && !TREE_ADDRESSABLE (p)
1419 && value && !TREE_SIDE_EFFECTS (value)
1420 && !def)
1421 {
1422 /* We may produce non-gimple trees by adding NOPs or introduce
1423 invalid sharing when operand is not really constant.
1424 It is not big deal to prohibit constant propagation here as
1425 we will constant propagate in DOM1 pass anyway. */
1426 if (is_gimple_min_invariant (value)
1427 && useless_type_conversion_p (TREE_TYPE (p),
1428 TREE_TYPE (value))
1429 /* We have to be very careful about ADDR_EXPR. Make sure
1430 the base variable isn't a local variable of the inlined
1431 function, e.g., when doing recursive inlining, direct or
1432 mutually-recursive or whatever, which is why we don't
1433 just test whether fn == current_function_decl. */
1434 && ! self_inlining_addr_expr (value, fn))
1435 {
1436 insert_decl_map (id, p, value);
1437 return;
1438 }
1439 }
1440
1441 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1442 here since the type of this decl must be visible to the calling
1443 function. */
1444 var = copy_decl_to_var (p, id);
1445 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
1446 {
1447 get_var_ann (var);
1448 add_referenced_var (var);
1449 }
1450
1451 /* See if the frontend wants to pass this by invisible reference. If
1452 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1453 replace uses of the PARM_DECL with dereferences. */
1454 if (TREE_TYPE (var) != TREE_TYPE (p)
1455 && POINTER_TYPE_P (TREE_TYPE (var))
1456 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1457 {
1458 insert_decl_map (id, var, var);
1459 var_sub = build_fold_indirect_ref (var);
1460 }
1461 else
1462 var_sub = var;
1463
1464 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1465 that way, when the PARM_DECL is encountered, it will be
1466 automatically replaced by the VAR_DECL. */
1467 insert_decl_map (id, p, var_sub);
1468
1469 /* Declare this new variable. */
1470 TREE_CHAIN (var) = *vars;
1471 *vars = var;
1472
1473 /* Make gimplifier happy about this variable. */
1474 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1475
1476 /* Even if P was TREE_READONLY, the new VAR should not be.
1477 In the original code, we would have constructed a
1478 temporary, and then the function body would have never
1479 changed the value of P. However, now, we will be
1480 constructing VAR directly. The constructor body may
1481 change its value multiple times as it is being
1482 constructed. Therefore, it must not be TREE_READONLY;
1483 the back-end assumes that TREE_READONLY variable is
1484 assigned to only once. */
1485 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1486 TREE_READONLY (var) = 0;
1487
1488 /* If there is no setup required and we are in SSA, take the easy route
1489 replacing all SSA names representing the function parameter by the
1490 SSA name passed to function.
1491
1492 We need to construct map for the variable anyway as it might be used
1493 in different SSA names when parameter is set in function.
1494
1495 FIXME: This usually kills the last connection in between inlined
1496 function parameter and the actual value in debug info. Can we do
1497 better here? If we just inserted the statement, copy propagation
1498 would kill it anyway as it always did in older versions of GCC.
1499
1500 We might want to introduce a notion that single SSA_NAME might
1501 represent multiple variables for purposes of debugging. */
1502 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
1503 && (TREE_CODE (rhs) == SSA_NAME
1504 || is_gimple_min_invariant (rhs))
1505 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1506 {
1507 insert_decl_map (id, def, rhs);
1508 return;
1509 }
1510
1511 /* If the value of argument is never used, don't care about initializing
1512 it. */
1513 if (gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
1514 {
1515 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
1516 return;
1517 }
1518
1519 /* Initialize this VAR_DECL from the equivalent argument. Convert
1520 the argument to the proper type in case it was promoted. */
1521 if (value)
1522 {
1523 block_stmt_iterator bsi = bsi_last (bb);
1524
1525 if (rhs == error_mark_node)
1526 {
1527 insert_decl_map (id, p, var_sub);
1528 return;
1529 }
1530
1531 STRIP_USELESS_TYPE_CONVERSION (rhs);
1532
1533 /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
1534 keep our trees in gimple form. */
1535 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
1536 {
1537 def = remap_ssa_name (def, id);
1538 init_stmt = build_gimple_modify_stmt (def, rhs);
1539 SSA_NAME_DEF_STMT (def) = init_stmt;
1540 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
1541 set_default_def (var, NULL);
1542 }
1543 else
1544 init_stmt = build_gimple_modify_stmt (var, rhs);
1545
1546 /* If we did not create a gimple value and we did not create a gimple
1547 cast of a gimple value, then we will need to gimplify INIT_STMTS
1548 at the end. Note that is_gimple_cast only checks the outer
1549 tree code, not its operand. Thus the explicit check that its
1550 operand is a gimple value. */
1551 if ((!is_gimple_val (rhs)
1552 && (!is_gimple_cast (rhs)
1553 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1554 || !is_gimple_reg (var))
1555 {
1556 tree_stmt_iterator i;
1557
1558 push_gimplify_context ();
1559 gimplify_stmt (&init_stmt);
1560 if (gimple_in_ssa_p (cfun)
1561 && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
1562 {
1563 /* The replacement can expose previously unreferenced
1564 variables. */
1565 for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
1566 find_new_referenced_vars (tsi_stmt_ptr (i));
1567 }
1568 pop_gimplify_context (NULL);
1569 }
1570
1571 /* If VAR represents a zero-sized variable, it's possible that the
1572 assignment statment may result in no gimple statements. */
1573 if (init_stmt)
1574 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1575 if (gimple_in_ssa_p (cfun))
1576 for (;!bsi_end_p (bsi); bsi_next (&bsi))
1577 mark_symbols_for_renaming (bsi_stmt (bsi));
1578 }
1579 }
1580
1581 /* Generate code to initialize the parameters of the function at the
1582 top of the stack in ID from the CALL_EXPR EXP. */
1583
1584 static void
1585 initialize_inlined_parameters (copy_body_data *id, tree exp,
1586 tree fn, basic_block bb)
1587 {
1588 tree parms;
1589 tree a;
1590 tree p;
1591 tree vars = NULL_TREE;
1592 call_expr_arg_iterator iter;
1593 tree static_chain = CALL_EXPR_STATIC_CHAIN (exp);
1594
1595 /* Figure out what the parameters are. */
1596 parms = DECL_ARGUMENTS (fn);
1597
1598 /* Loop through the parameter declarations, replacing each with an
1599 equivalent VAR_DECL, appropriately initialized. */
1600 for (p = parms, a = first_call_expr_arg (exp, &iter); p;
1601 a = next_call_expr_arg (&iter), p = TREE_CHAIN (p))
1602 setup_one_parameter (id, p, a, fn, bb, &vars);
1603
1604 /* Initialize the static chain. */
1605 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1606 gcc_assert (fn != current_function_decl);
1607 if (p)
1608 {
1609 /* No static chain? Seems like a bug in tree-nested.c. */
1610 gcc_assert (static_chain);
1611
1612 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1613 }
1614
1615 declare_inline_vars (id->block, vars);
1616 }
1617
1618 /* Declare a return variable to replace the RESULT_DECL for the
1619 function we are calling. An appropriate DECL_STMT is returned.
1620 The USE_STMT is filled to contain a use of the declaration to
1621 indicate the return value of the function.
1622
1623 RETURN_SLOT, if non-null is place where to store the result. It
1624 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
1625 was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
1626
1627 The return value is a (possibly null) value that is the result of the
1628 function as seen by the callee. *USE_P is a (possibly null) value that
1629 holds the result as seen by the caller. */
1630
1631 static tree
1632 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
1633 tree *use_p)
1634 {
1635 tree callee = id->src_fn;
1636 tree caller = id->dst_fn;
1637 tree result = DECL_RESULT (callee);
1638 tree callee_type = TREE_TYPE (result);
1639 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1640 tree var, use;
1641
1642 /* We don't need to do anything for functions that don't return
1643 anything. */
1644 if (!result || VOID_TYPE_P (callee_type))
1645 {
1646 *use_p = NULL_TREE;
1647 return NULL_TREE;
1648 }
1649
1650 /* If there was a return slot, then the return value is the
1651 dereferenced address of that object. */
1652 if (return_slot)
1653 {
1654 /* The front end shouldn't have used both return_slot and
1655 a modify expression. */
1656 gcc_assert (!modify_dest);
1657 if (DECL_BY_REFERENCE (result))
1658 {
1659 tree return_slot_addr = build_fold_addr_expr (return_slot);
1660 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
1661
1662 /* We are going to construct *&return_slot and we can't do that
1663 for variables believed to be not addressable.
1664
1665 FIXME: This check possibly can match, because values returned
1666 via return slot optimization are not believed to have address
1667 taken by alias analysis. */
1668 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
1669 if (gimple_in_ssa_p (cfun))
1670 {
1671 HOST_WIDE_INT bitsize;
1672 HOST_WIDE_INT bitpos;
1673 tree offset;
1674 enum machine_mode mode;
1675 int unsignedp;
1676 int volatilep;
1677 tree base;
1678 base = get_inner_reference (return_slot, &bitsize, &bitpos,
1679 &offset,
1680 &mode, &unsignedp, &volatilep,
1681 false);
1682 if (TREE_CODE (base) == INDIRECT_REF)
1683 base = TREE_OPERAND (base, 0);
1684 if (TREE_CODE (base) == SSA_NAME)
1685 base = SSA_NAME_VAR (base);
1686 mark_sym_for_renaming (base);
1687 }
1688 var = return_slot_addr;
1689 }
1690 else
1691 {
1692 var = return_slot;
1693 gcc_assert (TREE_CODE (var) != SSA_NAME);
1694 }
1695 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1696 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1697 && !DECL_GIMPLE_REG_P (result)
1698 && DECL_P (var))
1699 DECL_GIMPLE_REG_P (var) = 0;
1700 use = NULL;
1701 goto done;
1702 }
1703
1704 /* All types requiring non-trivial constructors should have been handled. */
1705 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1706
1707 /* Attempt to avoid creating a new temporary variable. */
1708 if (modify_dest
1709 && TREE_CODE (modify_dest) != SSA_NAME)
1710 {
1711 bool use_it = false;
1712
1713 /* We can't use MODIFY_DEST if there's type promotion involved. */
1714 if (!useless_type_conversion_p (callee_type, caller_type))
1715 use_it = false;
1716
1717 /* ??? If we're assigning to a variable sized type, then we must
1718 reuse the destination variable, because we've no good way to
1719 create variable sized temporaries at this point. */
1720 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1721 use_it = true;
1722
1723 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1724 reuse it as the result of the call directly. Don't do this if
1725 it would promote MODIFY_DEST to addressable. */
1726 else if (TREE_ADDRESSABLE (result))
1727 use_it = false;
1728 else
1729 {
1730 tree base_m = get_base_address (modify_dest);
1731
1732 /* If the base isn't a decl, then it's a pointer, and we don't
1733 know where that's going to go. */
1734 if (!DECL_P (base_m))
1735 use_it = false;
1736 else if (is_global_var (base_m))
1737 use_it = false;
1738 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1739 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1740 && !DECL_GIMPLE_REG_P (result)
1741 && DECL_GIMPLE_REG_P (base_m))
1742 use_it = false;
1743 else if (!TREE_ADDRESSABLE (base_m))
1744 use_it = true;
1745 }
1746
1747 if (use_it)
1748 {
1749 var = modify_dest;
1750 use = NULL;
1751 goto done;
1752 }
1753 }
1754
1755 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1756
1757 var = copy_result_decl_to_var (result, id);
1758 if (gimple_in_ssa_p (cfun))
1759 {
1760 get_var_ann (var);
1761 add_referenced_var (var);
1762 }
1763
1764 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1765 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1766 = tree_cons (NULL_TREE, var,
1767 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1768
1769 /* Do not have the rest of GCC warn about this variable as it should
1770 not be visible to the user. */
1771 TREE_NO_WARNING (var) = 1;
1772
1773 declare_inline_vars (id->block, var);
1774
1775 /* Build the use expr. If the return type of the function was
1776 promoted, convert it back to the expected type. */
1777 use = var;
1778 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
1779 use = fold_convert (caller_type, var);
1780
1781 STRIP_USELESS_TYPE_CONVERSION (use);
1782
1783 if (DECL_BY_REFERENCE (result))
1784 var = build_fold_addr_expr (var);
1785
1786 done:
1787 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1788 way, when the RESULT_DECL is encountered, it will be
1789 automatically replaced by the VAR_DECL. */
1790 insert_decl_map (id, result, var);
1791
1792 /* Remember this so we can ignore it in remap_decls. */
1793 id->retvar = var;
1794
1795 *use_p = use;
1796 return var;
1797 }
1798
1799 /* Returns nonzero if a function can be inlined as a tree. */
1800
1801 bool
1802 tree_inlinable_function_p (tree fn)
1803 {
1804 return inlinable_function_p (fn);
1805 }
1806
1807 static const char *inline_forbidden_reason;
1808
1809 static tree
1810 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1811 void *fnp)
1812 {
1813 tree node = *nodep;
1814 tree fn = (tree) fnp;
1815 tree t;
1816
1817 switch (TREE_CODE (node))
1818 {
1819 case CALL_EXPR:
1820 /* Refuse to inline alloca call unless user explicitly forced so as
1821 this may change program's memory overhead drastically when the
1822 function using alloca is called in loop. In GCC present in
1823 SPEC2000 inlining into schedule_block cause it to require 2GB of
1824 RAM instead of 256MB. */
1825 if (alloca_call_p (node)
1826 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1827 {
1828 inline_forbidden_reason
1829 = G_("function %q+F can never be inlined because it uses "
1830 "alloca (override using the always_inline attribute)");
1831 return node;
1832 }
1833 t = get_callee_fndecl (node);
1834 if (! t)
1835 break;
1836
1837 /* We cannot inline functions that call setjmp. */
1838 if (setjmp_call_p (t))
1839 {
1840 inline_forbidden_reason
1841 = G_("function %q+F can never be inlined because it uses setjmp");
1842 return node;
1843 }
1844
1845 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1846 switch (DECL_FUNCTION_CODE (t))
1847 {
1848 /* We cannot inline functions that take a variable number of
1849 arguments. */
1850 case BUILT_IN_VA_START:
1851 case BUILT_IN_STDARG_START:
1852 case BUILT_IN_NEXT_ARG:
1853 case BUILT_IN_VA_END:
1854 inline_forbidden_reason
1855 = G_("function %q+F can never be inlined because it "
1856 "uses variable argument lists");
1857 return node;
1858
1859 case BUILT_IN_LONGJMP:
1860 /* We can't inline functions that call __builtin_longjmp at
1861 all. The non-local goto machinery really requires the
1862 destination be in a different function. If we allow the
1863 function calling __builtin_longjmp to be inlined into the
1864 function calling __builtin_setjmp, Things will Go Awry. */
1865 inline_forbidden_reason
1866 = G_("function %q+F can never be inlined because "
1867 "it uses setjmp-longjmp exception handling");
1868 return node;
1869
1870 case BUILT_IN_NONLOCAL_GOTO:
1871 /* Similarly. */
1872 inline_forbidden_reason
1873 = G_("function %q+F can never be inlined because "
1874 "it uses non-local goto");
1875 return node;
1876
1877 case BUILT_IN_RETURN:
1878 case BUILT_IN_APPLY_ARGS:
1879 /* If a __builtin_apply_args caller would be inlined,
1880 it would be saving arguments of the function it has
1881 been inlined into. Similarly __builtin_return would
1882 return from the function the inline has been inlined into. */
1883 inline_forbidden_reason
1884 = G_("function %q+F can never be inlined because "
1885 "it uses __builtin_return or __builtin_apply_args");
1886 return node;
1887
1888 default:
1889 break;
1890 }
1891 break;
1892
1893 case GOTO_EXPR:
1894 t = TREE_OPERAND (node, 0);
1895
1896 /* We will not inline a function which uses computed goto. The
1897 addresses of its local labels, which may be tucked into
1898 global storage, are of course not constant across
1899 instantiations, which causes unexpected behavior. */
1900 if (TREE_CODE (t) != LABEL_DECL)
1901 {
1902 inline_forbidden_reason
1903 = G_("function %q+F can never be inlined "
1904 "because it contains a computed goto");
1905 return node;
1906 }
1907 break;
1908
1909 case LABEL_EXPR:
1910 t = TREE_OPERAND (node, 0);
1911 if (DECL_NONLOCAL (t))
1912 {
1913 /* We cannot inline a function that receives a non-local goto
1914 because we cannot remap the destination label used in the
1915 function that is performing the non-local goto. */
1916 inline_forbidden_reason
1917 = G_("function %q+F can never be inlined "
1918 "because it receives a non-local goto");
1919 return node;
1920 }
1921 break;
1922
1923 case RECORD_TYPE:
1924 case UNION_TYPE:
1925 /* We cannot inline a function of the form
1926
1927 void F (int i) { struct S { int ar[i]; } s; }
1928
1929 Attempting to do so produces a catch-22.
1930 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1931 UNION_TYPE nodes, then it goes into infinite recursion on a
1932 structure containing a pointer to its own type. If it doesn't,
1933 then the type node for S doesn't get adjusted properly when
1934 F is inlined.
1935
1936 ??? This is likely no longer true, but it's too late in the 4.0
1937 cycle to try to find out. This should be checked for 4.1. */
1938 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1939 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1940 {
1941 inline_forbidden_reason
1942 = G_("function %q+F can never be inlined "
1943 "because it uses variable sized variables");
1944 return node;
1945 }
1946
1947 default:
1948 break;
1949 }
1950
1951 return NULL_TREE;
1952 }
1953
1954 /* Return subexpression representing possible alloca call, if any. */
1955 static tree
1956 inline_forbidden_p (tree fndecl)
1957 {
1958 location_t saved_loc = input_location;
1959 block_stmt_iterator bsi;
1960 basic_block bb;
1961 tree ret = NULL_TREE;
1962
1963 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1964 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1965 {
1966 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1967 inline_forbidden_p_1, fndecl);
1968 if (ret)
1969 goto egress;
1970 }
1971
1972 egress:
1973 input_location = saved_loc;
1974 return ret;
1975 }
1976
1977 /* Returns nonzero if FN is a function that does not have any
1978 fundamental inline blocking properties. */
1979
1980 static bool
1981 inlinable_function_p (tree fn)
1982 {
1983 bool inlinable = true;
1984 bool do_warning;
1985 tree always_inline;
1986
1987 /* If we've already decided this function shouldn't be inlined,
1988 there's no need to check again. */
1989 if (DECL_UNINLINABLE (fn))
1990 return false;
1991
1992 /* We only warn for functions declared `inline' by the user. */
1993 do_warning = (warn_inline
1994 && DECL_INLINE (fn)
1995 && DECL_DECLARED_INLINE_P (fn)
1996 && !DECL_IN_SYSTEM_HEADER (fn));
1997
1998 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
1999
2000 if (flag_really_no_inline
2001 && always_inline == NULL)
2002 {
2003 if (do_warning)
2004 warning (OPT_Winline, "function %q+F can never be inlined because it "
2005 "is suppressed using -fno-inline", fn);
2006 inlinable = false;
2007 }
2008
2009 /* Don't auto-inline anything that might not be bound within
2010 this unit of translation. */
2011 else if (!DECL_DECLARED_INLINE_P (fn)
2012 && DECL_REPLACEABLE_P (fn))
2013 inlinable = false;
2014
2015 else if (!function_attribute_inlinable_p (fn))
2016 {
2017 if (do_warning)
2018 warning (OPT_Winline, "function %q+F can never be inlined because it "
2019 "uses attributes conflicting with inlining", fn);
2020 inlinable = false;
2021 }
2022
2023 /* If we don't have the function body available, we can't inline it.
2024 However, this should not be recorded since we also get here for
2025 forward declared inline functions. Therefore, return at once. */
2026 if (!DECL_SAVED_TREE (fn))
2027 return false;
2028
2029 /* If we're not inlining at all, then we cannot inline this function. */
2030 else if (!flag_inline_trees)
2031 inlinable = false;
2032
2033 /* Only try to inline functions if DECL_INLINE is set. This should be
2034 true for all functions declared `inline', and for all other functions
2035 as well with -finline-functions.
2036
2037 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
2038 it's the front-end that must set DECL_INLINE in this case, because
2039 dwarf2out loses if a function that does not have DECL_INLINE set is
2040 inlined anyway. That is why we have both DECL_INLINE and
2041 DECL_DECLARED_INLINE_P. */
2042 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
2043 here should be redundant. */
2044 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
2045 inlinable = false;
2046
2047 else if (inline_forbidden_p (fn))
2048 {
2049 /* See if we should warn about uninlinable functions. Previously,
2050 some of these warnings would be issued while trying to expand
2051 the function inline, but that would cause multiple warnings
2052 about functions that would for example call alloca. But since
2053 this a property of the function, just one warning is enough.
2054 As a bonus we can now give more details about the reason why a
2055 function is not inlinable. */
2056 if (always_inline)
2057 sorry (inline_forbidden_reason, fn);
2058 else if (do_warning)
2059 warning (OPT_Winline, inline_forbidden_reason, fn);
2060
2061 inlinable = false;
2062 }
2063
2064 /* Squirrel away the result so that we don't have to check again. */
2065 DECL_UNINLINABLE (fn) = !inlinable;
2066
2067 return inlinable;
2068 }
2069
2070 /* Estimate the cost of a memory move. Use machine dependent
2071 word size and take possible memcpy call into account. */
2072
2073 int
2074 estimate_move_cost (tree type)
2075 {
2076 HOST_WIDE_INT size;
2077
2078 size = int_size_in_bytes (type);
2079
2080 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
2081 /* Cost of a memcpy call, 3 arguments and the call. */
2082 return 4;
2083 else
2084 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
2085 }
2086
2087 /* Arguments for estimate_num_insns_1. */
2088
2089 struct eni_data
2090 {
2091 /* Used to return the number of insns. */
2092 int count;
2093
2094 /* Weights of various constructs. */
2095 eni_weights *weights;
2096 };
2097
2098 /* Used by estimate_num_insns. Estimate number of instructions seen
2099 by given statement. */
2100
2101 static tree
2102 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
2103 {
2104 struct eni_data *d = data;
2105 tree x = *tp;
2106 unsigned cost;
2107
2108 if (IS_TYPE_OR_DECL_P (x))
2109 {
2110 *walk_subtrees = 0;
2111 return NULL;
2112 }
2113 /* Assume that constants and references counts nothing. These should
2114 be majorized by amount of operations among them we count later
2115 and are common target of CSE and similar optimizations. */
2116 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
2117 return NULL;
2118
2119 switch (TREE_CODE (x))
2120 {
2121 /* Containers have no cost. */
2122 case TREE_LIST:
2123 case TREE_VEC:
2124 case BLOCK:
2125 case COMPONENT_REF:
2126 case BIT_FIELD_REF:
2127 case INDIRECT_REF:
2128 case ALIGN_INDIRECT_REF:
2129 case MISALIGNED_INDIRECT_REF:
2130 case ARRAY_REF:
2131 case ARRAY_RANGE_REF:
2132 case OBJ_TYPE_REF:
2133 case EXC_PTR_EXPR: /* ??? */
2134 case FILTER_EXPR: /* ??? */
2135 case COMPOUND_EXPR:
2136 case BIND_EXPR:
2137 case WITH_CLEANUP_EXPR:
2138 case NOP_EXPR:
2139 case CONVERT_EXPR:
2140 case VIEW_CONVERT_EXPR:
2141 case SAVE_EXPR:
2142 case ADDR_EXPR:
2143 case COMPLEX_EXPR:
2144 case RANGE_EXPR:
2145 case CASE_LABEL_EXPR:
2146 case SSA_NAME:
2147 case CATCH_EXPR:
2148 case EH_FILTER_EXPR:
2149 case STATEMENT_LIST:
2150 case ERROR_MARK:
2151 case NON_LVALUE_EXPR:
2152 case FDESC_EXPR:
2153 case VA_ARG_EXPR:
2154 case TRY_CATCH_EXPR:
2155 case TRY_FINALLY_EXPR:
2156 case LABEL_EXPR:
2157 case GOTO_EXPR:
2158 case RETURN_EXPR:
2159 case EXIT_EXPR:
2160 case LOOP_EXPR:
2161 case PHI_NODE:
2162 case WITH_SIZE_EXPR:
2163 case OMP_CLAUSE:
2164 case OMP_RETURN:
2165 case OMP_CONTINUE:
2166 case OMP_SECTIONS_SWITCH:
2167 case OMP_ATOMIC_STORE:
2168 break;
2169
2170 /* We don't account constants for now. Assume that the cost is amortized
2171 by operations that do use them. We may re-consider this decision once
2172 we are able to optimize the tree before estimating its size and break
2173 out static initializers. */
2174 case IDENTIFIER_NODE:
2175 case INTEGER_CST:
2176 case REAL_CST:
2177 case FIXED_CST:
2178 case COMPLEX_CST:
2179 case VECTOR_CST:
2180 case STRING_CST:
2181 *walk_subtrees = 0;
2182 return NULL;
2183
2184 /* CHANGE_DYNAMIC_TYPE_EXPR explicitly expands to nothing. */
2185 case CHANGE_DYNAMIC_TYPE_EXPR:
2186 *walk_subtrees = 0;
2187 return NULL;
2188
2189 /* Try to estimate the cost of assignments. We have three cases to
2190 deal with:
2191 1) Simple assignments to registers;
2192 2) Stores to things that must live in memory. This includes
2193 "normal" stores to scalars, but also assignments of large
2194 structures, or constructors of big arrays;
2195 3) TARGET_EXPRs.
2196
2197 Let us look at the first two cases, assuming we have "a = b + C":
2198 <GIMPLE_MODIFY_STMT <var_decl "a">
2199 <plus_expr <var_decl "b"> <constant C>>
2200 If "a" is a GIMPLE register, the assignment to it is free on almost
2201 any target, because "a" usually ends up in a real register. Hence
2202 the only cost of this expression comes from the PLUS_EXPR, and we
2203 can ignore the GIMPLE_MODIFY_STMT.
2204 If "a" is not a GIMPLE register, the assignment to "a" will most
2205 likely be a real store, so the cost of the GIMPLE_MODIFY_STMT is the cost
2206 of moving something into "a", which we compute using the function
2207 estimate_move_cost.
2208
2209 The third case deals with TARGET_EXPRs, for which the semantics are
2210 that a temporary is assigned, unless the TARGET_EXPR itself is being
2211 assigned to something else. In the latter case we do not need the
2212 temporary. E.g. in:
2213 <GIMPLE_MODIFY_STMT <var_decl "a"> <target_expr>>, the
2214 GIMPLE_MODIFY_STMT is free. */
2215 case INIT_EXPR:
2216 case GIMPLE_MODIFY_STMT:
2217 /* Is the right and side a TARGET_EXPR? */
2218 if (TREE_CODE (GENERIC_TREE_OPERAND (x, 1)) == TARGET_EXPR)
2219 break;
2220 /* ... fall through ... */
2221
2222 case TARGET_EXPR:
2223 x = GENERIC_TREE_OPERAND (x, 0);
2224 /* Is this an assignments to a register? */
2225 if (is_gimple_reg (x))
2226 break;
2227 /* Otherwise it's a store, so fall through to compute the move cost. */
2228
2229 case CONSTRUCTOR:
2230 d->count += estimate_move_cost (TREE_TYPE (x));
2231 break;
2232
2233 /* Assign cost of 1 to usual operations.
2234 ??? We may consider mapping RTL costs to this. */
2235 case COND_EXPR:
2236 case VEC_COND_EXPR:
2237
2238 case PLUS_EXPR:
2239 case POINTER_PLUS_EXPR:
2240 case MINUS_EXPR:
2241 case MULT_EXPR:
2242
2243 case FIXED_CONVERT_EXPR:
2244 case FIX_TRUNC_EXPR:
2245
2246 case NEGATE_EXPR:
2247 case FLOAT_EXPR:
2248 case MIN_EXPR:
2249 case MAX_EXPR:
2250 case ABS_EXPR:
2251
2252 case LSHIFT_EXPR:
2253 case RSHIFT_EXPR:
2254 case LROTATE_EXPR:
2255 case RROTATE_EXPR:
2256 case VEC_LSHIFT_EXPR:
2257 case VEC_RSHIFT_EXPR:
2258
2259 case BIT_IOR_EXPR:
2260 case BIT_XOR_EXPR:
2261 case BIT_AND_EXPR:
2262 case BIT_NOT_EXPR:
2263
2264 case TRUTH_ANDIF_EXPR:
2265 case TRUTH_ORIF_EXPR:
2266 case TRUTH_AND_EXPR:
2267 case TRUTH_OR_EXPR:
2268 case TRUTH_XOR_EXPR:
2269 case TRUTH_NOT_EXPR:
2270
2271 case LT_EXPR:
2272 case LE_EXPR:
2273 case GT_EXPR:
2274 case GE_EXPR:
2275 case EQ_EXPR:
2276 case NE_EXPR:
2277 case ORDERED_EXPR:
2278 case UNORDERED_EXPR:
2279
2280 case UNLT_EXPR:
2281 case UNLE_EXPR:
2282 case UNGT_EXPR:
2283 case UNGE_EXPR:
2284 case UNEQ_EXPR:
2285 case LTGT_EXPR:
2286
2287 case CONJ_EXPR:
2288
2289 case PREDECREMENT_EXPR:
2290 case PREINCREMENT_EXPR:
2291 case POSTDECREMENT_EXPR:
2292 case POSTINCREMENT_EXPR:
2293
2294 case ASM_EXPR:
2295
2296 case REALIGN_LOAD_EXPR:
2297
2298 case REDUC_MAX_EXPR:
2299 case REDUC_MIN_EXPR:
2300 case REDUC_PLUS_EXPR:
2301 case WIDEN_SUM_EXPR:
2302 case DOT_PROD_EXPR:
2303 case VEC_WIDEN_MULT_HI_EXPR:
2304 case VEC_WIDEN_MULT_LO_EXPR:
2305 case VEC_UNPACK_HI_EXPR:
2306 case VEC_UNPACK_LO_EXPR:
2307 case VEC_UNPACK_FLOAT_HI_EXPR:
2308 case VEC_UNPACK_FLOAT_LO_EXPR:
2309 case VEC_PACK_TRUNC_EXPR:
2310 case VEC_PACK_SAT_EXPR:
2311 case VEC_PACK_FIX_TRUNC_EXPR:
2312
2313 case WIDEN_MULT_EXPR:
2314
2315 case VEC_EXTRACT_EVEN_EXPR:
2316 case VEC_EXTRACT_ODD_EXPR:
2317 case VEC_INTERLEAVE_HIGH_EXPR:
2318 case VEC_INTERLEAVE_LOW_EXPR:
2319
2320 case RESX_EXPR:
2321 d->count += 1;
2322 break;
2323
2324 case SWITCH_EXPR:
2325 /* TODO: Cost of a switch should be derived from the number of
2326 branches. */
2327 d->count += d->weights->switch_cost;
2328 break;
2329
2330 /* Few special cases of expensive operations. This is useful
2331 to avoid inlining on functions having too many of these. */
2332 case TRUNC_DIV_EXPR:
2333 case CEIL_DIV_EXPR:
2334 case FLOOR_DIV_EXPR:
2335 case ROUND_DIV_EXPR:
2336 case EXACT_DIV_EXPR:
2337 case TRUNC_MOD_EXPR:
2338 case CEIL_MOD_EXPR:
2339 case FLOOR_MOD_EXPR:
2340 case ROUND_MOD_EXPR:
2341 case RDIV_EXPR:
2342 d->count += d->weights->div_mod_cost;
2343 break;
2344 case CALL_EXPR:
2345 {
2346 tree decl = get_callee_fndecl (x);
2347
2348 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
2349 cost = d->weights->target_builtin_call_cost;
2350 else
2351 cost = d->weights->call_cost;
2352
2353 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
2354 switch (DECL_FUNCTION_CODE (decl))
2355 {
2356 case BUILT_IN_CONSTANT_P:
2357 *walk_subtrees = 0;
2358 return NULL_TREE;
2359 case BUILT_IN_EXPECT:
2360 return NULL_TREE;
2361 /* Prefetch instruction is not expensive. */
2362 case BUILT_IN_PREFETCH:
2363 cost = 1;
2364 break;
2365 default:
2366 break;
2367 }
2368
2369 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
2370 that does use function declaration to figure out the arguments. */
2371 if (!decl)
2372 {
2373 tree a;
2374 call_expr_arg_iterator iter;
2375 FOR_EACH_CALL_EXPR_ARG (a, iter, x)
2376 d->count += estimate_move_cost (TREE_TYPE (a));
2377 }
2378 else
2379 {
2380 tree arg;
2381 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2382 d->count += estimate_move_cost (TREE_TYPE (arg));
2383 }
2384
2385 d->count += cost;
2386 break;
2387 }
2388
2389 case OMP_PARALLEL:
2390 case OMP_FOR:
2391 case OMP_SECTIONS:
2392 case OMP_SINGLE:
2393 case OMP_SECTION:
2394 case OMP_MASTER:
2395 case OMP_ORDERED:
2396 case OMP_CRITICAL:
2397 case OMP_ATOMIC:
2398 case OMP_ATOMIC_LOAD:
2399 /* OpenMP directives are generally very expensive. */
2400 d->count += d->weights->omp_cost;
2401 break;
2402
2403 default:
2404 gcc_unreachable ();
2405 }
2406 return NULL;
2407 }
2408
2409 /* Estimate number of instructions that will be created by expanding EXPR.
2410 WEIGHTS contains weights attributed to various constructs. */
2411
2412 int
2413 estimate_num_insns (tree expr, eni_weights *weights)
2414 {
2415 struct pointer_set_t *visited_nodes;
2416 basic_block bb;
2417 block_stmt_iterator bsi;
2418 struct function *my_function;
2419 struct eni_data data;
2420
2421 data.count = 0;
2422 data.weights = weights;
2423
2424 /* If we're given an entire function, walk the CFG. */
2425 if (TREE_CODE (expr) == FUNCTION_DECL)
2426 {
2427 my_function = DECL_STRUCT_FUNCTION (expr);
2428 gcc_assert (my_function && my_function->cfg);
2429 visited_nodes = pointer_set_create ();
2430 FOR_EACH_BB_FN (bb, my_function)
2431 {
2432 for (bsi = bsi_start (bb);
2433 !bsi_end_p (bsi);
2434 bsi_next (&bsi))
2435 {
2436 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
2437 &data, visited_nodes);
2438 }
2439 }
2440 pointer_set_destroy (visited_nodes);
2441 }
2442 else
2443 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &data);
2444
2445 return data.count;
2446 }
2447
2448 /* Initializes weights used by estimate_num_insns. */
2449
2450 void
2451 init_inline_once (void)
2452 {
2453 eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
2454 eni_inlining_weights.target_builtin_call_cost = 1;
2455 eni_inlining_weights.div_mod_cost = 10;
2456 eni_inlining_weights.switch_cost = 1;
2457 eni_inlining_weights.omp_cost = 40;
2458
2459 eni_size_weights.call_cost = 1;
2460 eni_size_weights.target_builtin_call_cost = 1;
2461 eni_size_weights.div_mod_cost = 1;
2462 eni_size_weights.switch_cost = 10;
2463 eni_size_weights.omp_cost = 40;
2464
2465 /* Estimating time for call is difficult, since we have no idea what the
2466 called function does. In the current uses of eni_time_weights,
2467 underestimating the cost does less harm than overestimating it, so
2468 we choose a rather small value here. */
2469 eni_time_weights.call_cost = 10;
2470 eni_time_weights.target_builtin_call_cost = 10;
2471 eni_time_weights.div_mod_cost = 10;
2472 eni_time_weights.switch_cost = 4;
2473 eni_time_weights.omp_cost = 40;
2474 }
2475
2476 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
2477 static void
2478 add_lexical_block (tree current_block, tree new_block)
2479 {
2480 tree *blk_p;
2481
2482 /* Walk to the last sub-block. */
2483 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
2484 *blk_p;
2485 blk_p = &BLOCK_CHAIN (*blk_p))
2486 ;
2487 *blk_p = new_block;
2488 BLOCK_SUPERCONTEXT (new_block) = current_block;
2489 }
2490
2491 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
2492
2493 static bool
2494 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
2495 {
2496 copy_body_data *id;
2497 tree t;
2498 tree use_retvar;
2499 tree fn;
2500 struct pointer_map_t *st;
2501 tree return_slot;
2502 tree modify_dest;
2503 location_t saved_location;
2504 struct cgraph_edge *cg_edge;
2505 const char *reason;
2506 basic_block return_block;
2507 edge e;
2508 block_stmt_iterator bsi, stmt_bsi;
2509 bool successfully_inlined = FALSE;
2510 bool purge_dead_abnormal_edges;
2511 tree t_step;
2512 tree var;
2513
2514 /* See what we've got. */
2515 id = (copy_body_data *) data;
2516 t = *tp;
2517
2518 /* Set input_location here so we get the right instantiation context
2519 if we call instantiate_decl from inlinable_function_p. */
2520 saved_location = input_location;
2521 if (EXPR_HAS_LOCATION (t))
2522 input_location = EXPR_LOCATION (t);
2523
2524 /* From here on, we're only interested in CALL_EXPRs. */
2525 if (TREE_CODE (t) != CALL_EXPR)
2526 goto egress;
2527
2528 /* First, see if we can figure out what function is being called.
2529 If we cannot, then there is no hope of inlining the function. */
2530 fn = get_callee_fndecl (t);
2531 if (!fn)
2532 goto egress;
2533
2534 /* Turn forward declarations into real ones. */
2535 fn = cgraph_node (fn)->decl;
2536
2537 /* If fn is a declaration of a function in a nested scope that was
2538 globally declared inline, we don't set its DECL_INITIAL.
2539 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2540 C++ front-end uses it for cdtors to refer to their internal
2541 declarations, that are not real functions. Fortunately those
2542 don't have trees to be saved, so we can tell by checking their
2543 DECL_SAVED_TREE. */
2544 if (! DECL_INITIAL (fn)
2545 && DECL_ABSTRACT_ORIGIN (fn)
2546 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2547 fn = DECL_ABSTRACT_ORIGIN (fn);
2548
2549 /* Objective C and fortran still calls tree_rest_of_compilation directly.
2550 Kill this check once this is fixed. */
2551 if (!id->dst_node->analyzed)
2552 goto egress;
2553
2554 cg_edge = cgraph_edge (id->dst_node, stmt);
2555
2556 /* Constant propagation on argument done during previous inlining
2557 may create new direct call. Produce an edge for it. */
2558 if (!cg_edge)
2559 {
2560 struct cgraph_node *dest = cgraph_node (fn);
2561
2562 /* We have missing edge in the callgraph. This can happen in one case
2563 where previous inlining turned indirect call into direct call by
2564 constant propagating arguments. In all other cases we hit a bug
2565 (incorrect node sharing is most common reason for missing edges. */
2566 gcc_assert (dest->needed || !flag_unit_at_a_time);
2567 cgraph_create_edge (id->dst_node, dest, stmt,
2568 bb->count, CGRAPH_FREQ_BASE,
2569 bb->loop_depth)->inline_failed
2570 = N_("originally indirect function call not considered for inlining");
2571 if (dump_file)
2572 {
2573 fprintf (dump_file, "Created new direct edge to %s",
2574 cgraph_node_name (dest));
2575 }
2576 goto egress;
2577 }
2578
2579 /* Don't try to inline functions that are not well-suited to
2580 inlining. */
2581 if (!cgraph_inline_p (cg_edge, &reason))
2582 {
2583 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2584 /* Avoid warnings during early inline pass. */
2585 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2586 {
2587 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2588 sorry ("called from here");
2589 }
2590 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2591 && !DECL_IN_SYSTEM_HEADER (fn)
2592 && strlen (reason)
2593 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2594 /* Avoid warnings during early inline pass. */
2595 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2596 {
2597 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2598 fn, reason);
2599 warning (OPT_Winline, "called from here");
2600 }
2601 goto egress;
2602 }
2603 fn = cg_edge->callee->decl;
2604
2605 #ifdef ENABLE_CHECKING
2606 if (cg_edge->callee->decl != id->dst_node->decl)
2607 verify_cgraph_node (cg_edge->callee);
2608 #endif
2609
2610 /* We will be inlining this callee. */
2611 id->eh_region = lookup_stmt_eh_region (stmt);
2612
2613 /* Split the block holding the CALL_EXPR. */
2614 e = split_block (bb, stmt);
2615 bb = e->src;
2616 return_block = e->dest;
2617 remove_edge (e);
2618
2619 /* split_block splits after the statement; work around this by
2620 moving the call into the second block manually. Not pretty,
2621 but seems easier than doing the CFG manipulation by hand
2622 when the CALL_EXPR is in the last statement of BB. */
2623 stmt_bsi = bsi_last (bb);
2624 bsi_remove (&stmt_bsi, false);
2625
2626 /* If the CALL_EXPR was in the last statement of BB, it may have
2627 been the source of abnormal edges. In this case, schedule
2628 the removal of dead abnormal edges. */
2629 bsi = bsi_start (return_block);
2630 if (bsi_end_p (bsi))
2631 {
2632 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2633 purge_dead_abnormal_edges = true;
2634 }
2635 else
2636 {
2637 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2638 purge_dead_abnormal_edges = false;
2639 }
2640
2641 stmt_bsi = bsi_start (return_block);
2642
2643 /* Build a block containing code to initialize the arguments, the
2644 actual inline expansion of the body, and a label for the return
2645 statements within the function to jump to. The type of the
2646 statement expression is the return type of the function call. */
2647 id->block = make_node (BLOCK);
2648 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2649 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2650 add_lexical_block (TREE_BLOCK (stmt), id->block);
2651
2652 /* Local declarations will be replaced by their equivalents in this
2653 map. */
2654 st = id->decl_map;
2655 id->decl_map = pointer_map_create ();
2656
2657 /* Record the function we are about to inline. */
2658 id->src_fn = fn;
2659 id->src_node = cg_edge->callee;
2660 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
2661 id->call_expr = t;
2662
2663 gcc_assert (!id->src_cfun->after_inlining);
2664
2665 initialize_inlined_parameters (id, t, fn, bb);
2666
2667 if (DECL_INITIAL (fn))
2668 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2669
2670 /* Return statements in the function body will be replaced by jumps
2671 to the RET_LABEL. */
2672
2673 gcc_assert (DECL_INITIAL (fn));
2674 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2675
2676 /* Find the lhs to which the result of this call is assigned. */
2677 return_slot = NULL;
2678 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2679 {
2680 modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
2681
2682 /* The function which we are inlining might not return a value,
2683 in which case we should issue a warning that the function
2684 does not return a value. In that case the optimizers will
2685 see that the variable to which the value is assigned was not
2686 initialized. We do not want to issue a warning about that
2687 uninitialized variable. */
2688 if (DECL_P (modify_dest))
2689 TREE_NO_WARNING (modify_dest) = 1;
2690 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2691 {
2692 return_slot = modify_dest;
2693 modify_dest = NULL;
2694 }
2695 }
2696 else
2697 modify_dest = NULL;
2698
2699 /* Declare the return variable for the function. */
2700 declare_return_variable (id, return_slot,
2701 modify_dest, &use_retvar);
2702
2703 /* This is it. Duplicate the callee body. Assume callee is
2704 pre-gimplified. Note that we must not alter the caller
2705 function in any way before this point, as this CALL_EXPR may be
2706 a self-referential call; if we're calling ourselves, we need to
2707 duplicate our body before altering anything. */
2708 copy_body (id, bb->count, bb->frequency, bb, return_block);
2709
2710 /* Add local vars in this inlined callee to caller. */
2711 t_step = id->src_cfun->unexpanded_var_list;
2712 for (; t_step; t_step = TREE_CHAIN (t_step))
2713 {
2714 var = TREE_VALUE (t_step);
2715 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2716 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2717 cfun->unexpanded_var_list);
2718 else
2719 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2720 cfun->unexpanded_var_list);
2721 }
2722
2723 /* Clean up. */
2724 pointer_map_destroy (id->decl_map);
2725 id->decl_map = st;
2726
2727 /* If the inlined function returns a result that we care about,
2728 clobber the CALL_EXPR with a reference to the return variable. */
2729 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2730 {
2731 *tp = use_retvar;
2732 if (gimple_in_ssa_p (cfun))
2733 {
2734 update_stmt (stmt);
2735 mark_symbols_for_renaming (stmt);
2736 }
2737 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2738 }
2739 else
2740 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2741 tsi_delink() will leave the iterator in a sane state. */
2742 {
2743 /* Handle case of inlining function that miss return statement so
2744 return value becomes undefined. */
2745 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
2746 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
2747 {
2748 tree name = TREE_OPERAND (stmt, 0);
2749 tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
2750 tree def = gimple_default_def (cfun, var);
2751
2752 /* If the variable is used undefined, make this name undefined via
2753 move. */
2754 if (def)
2755 {
2756 TREE_OPERAND (stmt, 1) = def;
2757 update_stmt (stmt);
2758 }
2759 /* Otherwise make this variable undefined. */
2760 else
2761 {
2762 bsi_remove (&stmt_bsi, true);
2763 set_default_def (var, name);
2764 SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
2765 }
2766 }
2767 else
2768 bsi_remove (&stmt_bsi, true);
2769 }
2770
2771 if (purge_dead_abnormal_edges)
2772 tree_purge_dead_abnormal_call_edges (return_block);
2773
2774 /* If the value of the new expression is ignored, that's OK. We
2775 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2776 the equivalent inlined version either. */
2777 TREE_USED (*tp) = 1;
2778
2779 /* Output the inlining info for this abstract function, since it has been
2780 inlined. If we don't do this now, we can lose the information about the
2781 variables in the function when the blocks get blown away as soon as we
2782 remove the cgraph node. */
2783 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2784
2785 /* Update callgraph if needed. */
2786 cgraph_remove_node (cg_edge->callee);
2787
2788 id->block = NULL_TREE;
2789 successfully_inlined = TRUE;
2790
2791 egress:
2792 input_location = saved_location;
2793 return successfully_inlined;
2794 }
2795
2796 /* Expand call statements reachable from STMT_P.
2797 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2798 in a GIMPLE_MODIFY_STMT. See tree-gimple.c:get_call_expr_in(). We can
2799 unfortunately not use that function here because we need a pointer
2800 to the CALL_EXPR, not the tree itself. */
2801
2802 static bool
2803 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
2804 {
2805 block_stmt_iterator bsi;
2806
2807 /* Register specific tree functions. */
2808 tree_register_cfg_hooks ();
2809 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2810 {
2811 tree *expr_p = bsi_stmt_ptr (bsi);
2812 tree stmt = *expr_p;
2813
2814 if (TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT)
2815 expr_p = &GIMPLE_STMT_OPERAND (*expr_p, 1);
2816 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2817 expr_p = &TREE_OPERAND (*expr_p, 0);
2818 if (TREE_CODE (*expr_p) == CALL_EXPR)
2819 if (expand_call_inline (bb, stmt, expr_p, id))
2820 return true;
2821 }
2822 return false;
2823 }
2824
2825 /* Walk all basic blocks created after FIRST and try to fold every statement
2826 in the STATEMENTS pointer set. */
2827 static void
2828 fold_marked_statements (int first, struct pointer_set_t *statements)
2829 {
2830 for (;first < n_basic_blocks;first++)
2831 if (BASIC_BLOCK (first))
2832 {
2833 block_stmt_iterator bsi;
2834 for (bsi = bsi_start (BASIC_BLOCK (first));
2835 !bsi_end_p (bsi); bsi_next (&bsi))
2836 if (pointer_set_contains (statements, bsi_stmt (bsi)))
2837 {
2838 tree old_stmt = bsi_stmt (bsi);
2839 if (fold_stmt (bsi_stmt_ptr (bsi)))
2840 {
2841 update_stmt (bsi_stmt (bsi));
2842 if (maybe_clean_or_replace_eh_stmt (old_stmt, bsi_stmt (bsi)))
2843 tree_purge_dead_eh_edges (BASIC_BLOCK (first));
2844 }
2845 }
2846 }
2847 }
2848
2849 /* Return true if BB has at least one abnormal outgoing edge. */
2850
2851 static inline bool
2852 has_abnormal_outgoing_edge_p (basic_block bb)
2853 {
2854 edge e;
2855 edge_iterator ei;
2856
2857 FOR_EACH_EDGE (e, ei, bb->succs)
2858 if (e->flags & EDGE_ABNORMAL)
2859 return true;
2860
2861 return false;
2862 }
2863
2864 /* Expand calls to inline functions in the body of FN. */
2865
2866 unsigned int
2867 optimize_inline_calls (tree fn)
2868 {
2869 copy_body_data id;
2870 tree prev_fn;
2871 basic_block bb;
2872 int last = n_basic_blocks;
2873 /* There is no point in performing inlining if errors have already
2874 occurred -- and we might crash if we try to inline invalid
2875 code. */
2876 if (errorcount || sorrycount)
2877 return 0;
2878
2879 /* Clear out ID. */
2880 memset (&id, 0, sizeof (id));
2881
2882 id.src_node = id.dst_node = cgraph_node (fn);
2883 id.dst_fn = fn;
2884 /* Or any functions that aren't finished yet. */
2885 prev_fn = NULL_TREE;
2886 if (current_function_decl)
2887 {
2888 id.dst_fn = current_function_decl;
2889 prev_fn = current_function_decl;
2890 }
2891
2892 id.copy_decl = copy_decl_maybe_to_var;
2893 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2894 id.transform_new_cfg = false;
2895 id.transform_return_to_modify = true;
2896 id.transform_lang_insert_block = false;
2897 id.statements_to_fold = pointer_set_create ();
2898
2899 push_gimplify_context ();
2900
2901 /* We make no attempts to keep dominance info up-to-date. */
2902 free_dominance_info (CDI_DOMINATORS);
2903 free_dominance_info (CDI_POST_DOMINATORS);
2904
2905 /* Reach the trees by walking over the CFG, and note the
2906 enclosing basic-blocks in the call edges. */
2907 /* We walk the blocks going forward, because inlined function bodies
2908 will split id->current_basic_block, and the new blocks will
2909 follow it; we'll trudge through them, processing their CALL_EXPRs
2910 along the way. */
2911 FOR_EACH_BB (bb)
2912 gimple_expand_calls_inline (bb, &id);
2913
2914 pop_gimplify_context (NULL);
2915
2916 #ifdef ENABLE_CHECKING
2917 {
2918 struct cgraph_edge *e;
2919
2920 verify_cgraph_node (id.dst_node);
2921
2922 /* Double check that we inlined everything we are supposed to inline. */
2923 for (e = id.dst_node->callees; e; e = e->next_callee)
2924 gcc_assert (e->inline_failed);
2925 }
2926 #endif
2927
2928 /* Fold the statements before compacting/renumbering the basic blocks. */
2929 fold_marked_statements (last, id.statements_to_fold);
2930 pointer_set_destroy (id.statements_to_fold);
2931
2932 /* Renumber the (code) basic_blocks consecutively. */
2933 compact_blocks ();
2934 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2935 number_blocks (fn);
2936
2937 /* We are not going to maintain the cgraph edges up to date.
2938 Kill it so it won't confuse us. */
2939 cgraph_node_remove_callees (id.dst_node);
2940
2941 fold_cond_expr_cond ();
2942 /* It would be nice to check SSA/CFG/statement consistency here, but it is
2943 not possible yet - the IPA passes might make various functions to not
2944 throw and they don't care to proactively update local EH info. This is
2945 done later in fixup_cfg pass that also execute the verification. */
2946 return (TODO_update_ssa | TODO_cleanup_cfg
2947 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
2948 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
2949 }
2950
2951 /* FN is a function that has a complete body, and CLONE is a function whose
2952 body is to be set to a copy of FN, mapping argument declarations according
2953 to the ARG_MAP splay_tree. */
2954
2955 void
2956 clone_body (tree clone, tree fn, void *arg_map)
2957 {
2958 copy_body_data id;
2959
2960 /* Clone the body, as if we were making an inline call. But, remap the
2961 parameters in the callee to the parameters of caller. */
2962 memset (&id, 0, sizeof (id));
2963 id.src_fn = fn;
2964 id.dst_fn = clone;
2965 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
2966 id.decl_map = (struct pointer_map_t *)arg_map;
2967
2968 id.copy_decl = copy_decl_no_change;
2969 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2970 id.transform_new_cfg = true;
2971 id.transform_return_to_modify = false;
2972 id.transform_lang_insert_block = true;
2973
2974 /* We're not inside any EH region. */
2975 id.eh_region = -1;
2976
2977 /* Actually copy the body. */
2978 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2979 }
2980
2981 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2982
2983 tree
2984 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2985 {
2986 enum tree_code code = TREE_CODE (*tp);
2987 enum tree_code_class cl = TREE_CODE_CLASS (code);
2988
2989 /* We make copies of most nodes. */
2990 if (IS_EXPR_CODE_CLASS (cl)
2991 || IS_GIMPLE_STMT_CODE_CLASS (cl)
2992 || code == TREE_LIST
2993 || code == TREE_VEC
2994 || code == TYPE_DECL
2995 || code == OMP_CLAUSE)
2996 {
2997 /* Because the chain gets clobbered when we make a copy, we save it
2998 here. */
2999 tree chain = NULL_TREE, new;
3000
3001 if (!GIMPLE_TUPLE_P (*tp))
3002 chain = TREE_CHAIN (*tp);
3003
3004 /* Copy the node. */
3005 new = copy_node (*tp);
3006
3007 /* Propagate mudflap marked-ness. */
3008 if (flag_mudflap && mf_marked_p (*tp))
3009 mf_mark (new);
3010
3011 *tp = new;
3012
3013 /* Now, restore the chain, if appropriate. That will cause
3014 walk_tree to walk into the chain as well. */
3015 if (code == PARM_DECL
3016 || code == TREE_LIST
3017 || code == OMP_CLAUSE)
3018 TREE_CHAIN (*tp) = chain;
3019
3020 /* For now, we don't update BLOCKs when we make copies. So, we
3021 have to nullify all BIND_EXPRs. */
3022 if (TREE_CODE (*tp) == BIND_EXPR)
3023 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
3024 }
3025 else if (code == CONSTRUCTOR)
3026 {
3027 /* CONSTRUCTOR nodes need special handling because
3028 we need to duplicate the vector of elements. */
3029 tree new;
3030
3031 new = copy_node (*tp);
3032
3033 /* Propagate mudflap marked-ness. */
3034 if (flag_mudflap && mf_marked_p (*tp))
3035 mf_mark (new);
3036
3037 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
3038 CONSTRUCTOR_ELTS (*tp));
3039 *tp = new;
3040 }
3041 else if (TREE_CODE_CLASS (code) == tcc_type)
3042 *walk_subtrees = 0;
3043 else if (TREE_CODE_CLASS (code) == tcc_declaration)
3044 *walk_subtrees = 0;
3045 else if (TREE_CODE_CLASS (code) == tcc_constant)
3046 *walk_subtrees = 0;
3047 else
3048 gcc_assert (code != STATEMENT_LIST);
3049 return NULL_TREE;
3050 }
3051
3052 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
3053 information indicating to what new SAVE_EXPR this one should be mapped,
3054 use that one. Otherwise, create a new node and enter it in ST. FN is
3055 the function into which the copy will be placed. */
3056
3057 static void
3058 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
3059 {
3060 struct pointer_map_t *st = (struct pointer_map_t *) st_;
3061 tree *n;
3062 tree t;
3063
3064 /* See if we already encountered this SAVE_EXPR. */
3065 n = (tree *) pointer_map_contains (st, *tp);
3066
3067 /* If we didn't already remap this SAVE_EXPR, do so now. */
3068 if (!n)
3069 {
3070 t = copy_node (*tp);
3071
3072 /* Remember this SAVE_EXPR. */
3073 *pointer_map_insert (st, *tp) = t;
3074 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
3075 *pointer_map_insert (st, t) = t;
3076 }
3077 else
3078 {
3079 /* We've already walked into this SAVE_EXPR; don't do it again. */
3080 *walk_subtrees = 0;
3081 t = *n;
3082 }
3083
3084 /* Replace this SAVE_EXPR with the copy. */
3085 *tp = t;
3086 }
3087
3088 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
3089 copies the declaration and enters it in the splay_tree in DATA (which is
3090 really an `copy_body_data *'). */
3091
3092 static tree
3093 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3094 void *data)
3095 {
3096 copy_body_data *id = (copy_body_data *) data;
3097
3098 /* Don't walk into types. */
3099 if (TYPE_P (*tp))
3100 *walk_subtrees = 0;
3101
3102 else if (TREE_CODE (*tp) == LABEL_EXPR)
3103 {
3104 tree decl = TREE_OPERAND (*tp, 0);
3105
3106 /* Copy the decl and remember the copy. */
3107 insert_decl_map (id, decl, id->copy_decl (decl, id));
3108 }
3109
3110 return NULL_TREE;
3111 }
3112
3113 /* Perform any modifications to EXPR required when it is unsaved. Does
3114 not recurse into EXPR's subtrees. */
3115
3116 static void
3117 unsave_expr_1 (tree expr)
3118 {
3119 switch (TREE_CODE (expr))
3120 {
3121 case TARGET_EXPR:
3122 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3123 It's OK for this to happen if it was part of a subtree that
3124 isn't immediately expanded, such as operand 2 of another
3125 TARGET_EXPR. */
3126 if (TREE_OPERAND (expr, 1))
3127 break;
3128
3129 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3130 TREE_OPERAND (expr, 3) = NULL_TREE;
3131 break;
3132
3133 default:
3134 break;
3135 }
3136 }
3137
3138 /* Called via walk_tree when an expression is unsaved. Using the
3139 splay_tree pointed to by ST (which is really a `splay_tree'),
3140 remaps all local declarations to appropriate replacements. */
3141
3142 static tree
3143 unsave_r (tree *tp, int *walk_subtrees, void *data)
3144 {
3145 copy_body_data *id = (copy_body_data *) data;
3146 struct pointer_map_t *st = id->decl_map;
3147 tree *n;
3148
3149 /* Only a local declaration (variable or label). */
3150 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3151 || TREE_CODE (*tp) == LABEL_DECL)
3152 {
3153 /* Lookup the declaration. */
3154 n = (tree *) pointer_map_contains (st, *tp);
3155
3156 /* If it's there, remap it. */
3157 if (n)
3158 *tp = *n;
3159 }
3160
3161 else if (TREE_CODE (*tp) == STATEMENT_LIST)
3162 copy_statement_list (tp);
3163 else if (TREE_CODE (*tp) == BIND_EXPR)
3164 copy_bind_expr (tp, walk_subtrees, id);
3165 else if (TREE_CODE (*tp) == SAVE_EXPR)
3166 remap_save_expr (tp, st, walk_subtrees);
3167 else
3168 {
3169 copy_tree_r (tp, walk_subtrees, NULL);
3170
3171 /* Do whatever unsaving is required. */
3172 unsave_expr_1 (*tp);
3173 }
3174
3175 /* Keep iterating. */
3176 return NULL_TREE;
3177 }
3178
3179 /* Copies everything in EXPR and replaces variables, labels
3180 and SAVE_EXPRs local to EXPR. */
3181
3182 tree
3183 unsave_expr_now (tree expr)
3184 {
3185 copy_body_data id;
3186
3187 /* There's nothing to do for NULL_TREE. */
3188 if (expr == 0)
3189 return expr;
3190
3191 /* Set up ID. */
3192 memset (&id, 0, sizeof (id));
3193 id.src_fn = current_function_decl;
3194 id.dst_fn = current_function_decl;
3195 id.decl_map = pointer_map_create ();
3196
3197 id.copy_decl = copy_decl_no_change;
3198 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3199 id.transform_new_cfg = false;
3200 id.transform_return_to_modify = false;
3201 id.transform_lang_insert_block = false;
3202
3203 /* Walk the tree once to find local labels. */
3204 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3205
3206 /* Walk the tree again, copying, remapping, and unsaving. */
3207 walk_tree (&expr, unsave_r, &id, NULL);
3208
3209 /* Clean up. */
3210 pointer_map_destroy (id.decl_map);
3211
3212 return expr;
3213 }
3214
3215 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
3216
3217 static tree
3218 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
3219 {
3220 if (*tp == data)
3221 return (tree) data;
3222 else
3223 return NULL;
3224 }
3225
3226 bool
3227 debug_find_tree (tree top, tree search)
3228 {
3229 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
3230 }
3231
3232
3233 /* Declare the variables created by the inliner. Add all the variables in
3234 VARS to BIND_EXPR. */
3235
3236 static void
3237 declare_inline_vars (tree block, tree vars)
3238 {
3239 tree t;
3240 for (t = vars; t; t = TREE_CHAIN (t))
3241 {
3242 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
3243 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
3244 cfun->unexpanded_var_list =
3245 tree_cons (NULL_TREE, t,
3246 cfun->unexpanded_var_list);
3247 }
3248
3249 if (block)
3250 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
3251 }
3252
3253
3254 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
3255 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
3256 VAR_DECL translation. */
3257
3258 static tree
3259 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
3260 {
3261 /* Don't generate debug information for the copy if we wouldn't have
3262 generated it for the copy either. */
3263 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
3264 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
3265
3266 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
3267 declaration inspired this copy. */
3268 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
3269
3270 /* The new variable/label has no RTL, yet. */
3271 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
3272 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
3273 SET_DECL_RTL (copy, NULL_RTX);
3274
3275 /* These args would always appear unused, if not for this. */
3276 TREE_USED (copy) = 1;
3277
3278 /* Set the context for the new declaration. */
3279 if (!DECL_CONTEXT (decl))
3280 /* Globals stay global. */
3281 ;
3282 else if (DECL_CONTEXT (decl) != id->src_fn)
3283 /* Things that weren't in the scope of the function we're inlining
3284 from aren't in the scope we're inlining to, either. */
3285 ;
3286 else if (TREE_STATIC (decl))
3287 /* Function-scoped static variables should stay in the original
3288 function. */
3289 ;
3290 else
3291 /* Ordinary automatic local variables are now in the scope of the
3292 new function. */
3293 DECL_CONTEXT (copy) = id->dst_fn;
3294
3295 return copy;
3296 }
3297
3298 static tree
3299 copy_decl_to_var (tree decl, copy_body_data *id)
3300 {
3301 tree copy, type;
3302
3303 gcc_assert (TREE_CODE (decl) == PARM_DECL
3304 || TREE_CODE (decl) == RESULT_DECL);
3305
3306 type = TREE_TYPE (decl);
3307
3308 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3309 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3310 TREE_READONLY (copy) = TREE_READONLY (decl);
3311 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3312 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3313 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
3314
3315 return copy_decl_for_dup_finish (id, decl, copy);
3316 }
3317
3318 /* Like copy_decl_to_var, but create a return slot object instead of a
3319 pointer variable for return by invisible reference. */
3320
3321 static tree
3322 copy_result_decl_to_var (tree decl, copy_body_data *id)
3323 {
3324 tree copy, type;
3325
3326 gcc_assert (TREE_CODE (decl) == PARM_DECL
3327 || TREE_CODE (decl) == RESULT_DECL);
3328
3329 type = TREE_TYPE (decl);
3330 if (DECL_BY_REFERENCE (decl))
3331 type = TREE_TYPE (type);
3332
3333 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3334 TREE_READONLY (copy) = TREE_READONLY (decl);
3335 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3336 if (!DECL_BY_REFERENCE (decl))
3337 {
3338 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3339 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3340 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
3341 }
3342
3343 return copy_decl_for_dup_finish (id, decl, copy);
3344 }
3345
3346
3347 static tree
3348 copy_decl_no_change (tree decl, copy_body_data *id)
3349 {
3350 tree copy;
3351
3352 copy = copy_node (decl);
3353
3354 /* The COPY is not abstract; it will be generated in DST_FN. */
3355 DECL_ABSTRACT (copy) = 0;
3356 lang_hooks.dup_lang_specific_decl (copy);
3357
3358 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
3359 been taken; it's for internal bookkeeping in expand_goto_internal. */
3360 if (TREE_CODE (copy) == LABEL_DECL)
3361 {
3362 TREE_ADDRESSABLE (copy) = 0;
3363 LABEL_DECL_UID (copy) = -1;
3364 }
3365
3366 return copy_decl_for_dup_finish (id, decl, copy);
3367 }
3368
3369 static tree
3370 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
3371 {
3372 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
3373 return copy_decl_to_var (decl, id);
3374 else
3375 return copy_decl_no_change (decl, id);
3376 }
3377
3378 /* Return a copy of the function's argument tree. */
3379 static tree
3380 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
3381 {
3382 tree *arg_copy, *parg;
3383
3384 arg_copy = &orig_parm;
3385 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
3386 {
3387 tree new = remap_decl (*parg, id);
3388 lang_hooks.dup_lang_specific_decl (new);
3389 TREE_CHAIN (new) = TREE_CHAIN (*parg);
3390 *parg = new;
3391 }
3392 return orig_parm;
3393 }
3394
3395 /* Return a copy of the function's static chain. */
3396 static tree
3397 copy_static_chain (tree static_chain, copy_body_data * id)
3398 {
3399 tree *chain_copy, *pvar;
3400
3401 chain_copy = &static_chain;
3402 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
3403 {
3404 tree new = remap_decl (*pvar, id);
3405 lang_hooks.dup_lang_specific_decl (new);
3406 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
3407 *pvar = new;
3408 }
3409 return static_chain;
3410 }
3411
3412 /* Return true if the function is allowed to be versioned.
3413 This is a guard for the versioning functionality. */
3414 bool
3415 tree_versionable_function_p (tree fndecl)
3416 {
3417 if (fndecl == NULL_TREE)
3418 return false;
3419 /* ??? There are cases where a function is
3420 uninlinable but can be versioned. */
3421 if (!tree_inlinable_function_p (fndecl))
3422 return false;
3423
3424 return true;
3425 }
3426
3427 /* Create a copy of a function's tree.
3428 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
3429 of the original function and the new copied function
3430 respectively. In case we want to replace a DECL
3431 tree with another tree while duplicating the function's
3432 body, TREE_MAP represents the mapping between these
3433 trees. If UPDATE_CLONES is set, the call_stmt fields
3434 of edges of clones of the function will be updated. */
3435 void
3436 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
3437 bool update_clones)
3438 {
3439 struct cgraph_node *old_version_node;
3440 struct cgraph_node *new_version_node;
3441 copy_body_data id;
3442 tree p;
3443 unsigned i;
3444 struct ipa_replace_map *replace_info;
3445 basic_block old_entry_block;
3446 tree t_step;
3447 tree old_current_function_decl = current_function_decl;
3448
3449 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
3450 && TREE_CODE (new_decl) == FUNCTION_DECL);
3451 DECL_POSSIBLY_INLINED (old_decl) = 1;
3452
3453 old_version_node = cgraph_node (old_decl);
3454 new_version_node = cgraph_node (new_decl);
3455
3456 DECL_ARTIFICIAL (new_decl) = 1;
3457 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
3458
3459 /* Prepare the data structures for the tree copy. */
3460 memset (&id, 0, sizeof (id));
3461
3462 /* Generate a new name for the new version. */
3463 if (!update_clones)
3464 {
3465 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
3466 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
3467 SET_DECL_RTL (new_decl, NULL_RTX);
3468 id.statements_to_fold = pointer_set_create ();
3469 }
3470
3471 id.decl_map = pointer_map_create ();
3472 id.src_fn = old_decl;
3473 id.dst_fn = new_decl;
3474 id.src_node = old_version_node;
3475 id.dst_node = new_version_node;
3476 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
3477
3478 id.copy_decl = copy_decl_no_change;
3479 id.transform_call_graph_edges
3480 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
3481 id.transform_new_cfg = true;
3482 id.transform_return_to_modify = false;
3483 id.transform_lang_insert_block = false;
3484
3485 current_function_decl = new_decl;
3486 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
3487 (DECL_STRUCT_FUNCTION (old_decl));
3488 initialize_cfun (new_decl, old_decl,
3489 old_entry_block->count,
3490 old_entry_block->frequency);
3491 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
3492
3493 /* Copy the function's static chain. */
3494 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
3495 if (p)
3496 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
3497 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
3498 &id);
3499 /* Copy the function's arguments. */
3500 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
3501 DECL_ARGUMENTS (new_decl) =
3502 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
3503
3504 /* If there's a tree_map, prepare for substitution. */
3505 if (tree_map)
3506 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
3507 {
3508 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
3509 if (replace_info->replace_p)
3510 insert_decl_map (&id, replace_info->old_tree,
3511 replace_info->new_tree);
3512 }
3513
3514 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
3515
3516 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3517 number_blocks (id.dst_fn);
3518
3519 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
3520 /* Add local vars. */
3521 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
3522 t_step; t_step = TREE_CHAIN (t_step))
3523 {
3524 tree var = TREE_VALUE (t_step);
3525 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3526 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
3527 cfun->unexpanded_var_list);
3528 else
3529 cfun->unexpanded_var_list =
3530 tree_cons (NULL_TREE, remap_decl (var, &id),
3531 cfun->unexpanded_var_list);
3532 }
3533
3534 /* Copy the Function's body. */
3535 copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
3536
3537 if (DECL_RESULT (old_decl) != NULL_TREE)
3538 {
3539 tree *res_decl = &DECL_RESULT (old_decl);
3540 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
3541 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
3542 }
3543
3544 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3545 number_blocks (new_decl);
3546
3547 /* Clean up. */
3548 pointer_map_destroy (id.decl_map);
3549 if (!update_clones)
3550 {
3551 fold_marked_statements (0, id.statements_to_fold);
3552 pointer_set_destroy (id.statements_to_fold);
3553 fold_cond_expr_cond ();
3554 }
3555 if (gimple_in_ssa_p (cfun))
3556 {
3557 free_dominance_info (CDI_DOMINATORS);
3558 free_dominance_info (CDI_POST_DOMINATORS);
3559 if (!update_clones)
3560 delete_unreachable_blocks ();
3561 update_ssa (TODO_update_ssa);
3562 if (!update_clones)
3563 {
3564 fold_cond_expr_cond ();
3565 if (need_ssa_update_p ())
3566 update_ssa (TODO_update_ssa);
3567 }
3568 }
3569 free_dominance_info (CDI_DOMINATORS);
3570 free_dominance_info (CDI_POST_DOMINATORS);
3571 pop_cfun ();
3572 current_function_decl = old_current_function_decl;
3573 gcc_assert (!current_function_decl
3574 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
3575 return;
3576 }
3577
3578 /* Duplicate a type, fields and all. */
3579
3580 tree
3581 build_duplicate_type (tree type)
3582 {
3583 struct copy_body_data id;
3584
3585 memset (&id, 0, sizeof (id));
3586 id.src_fn = current_function_decl;
3587 id.dst_fn = current_function_decl;
3588 id.src_cfun = cfun;
3589 id.decl_map = pointer_map_create ();
3590 id.copy_decl = copy_decl_no_change;
3591
3592 type = remap_type_1 (type, &id);
3593
3594 pointer_map_destroy (id.decl_map);
3595
3596 return type;
3597 }