inclhack.def (hpux_imaginary_i): Remove spaces.
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "hashtab.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "intl.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
43 #include "function.h"
44 #include "ggc.h"
45 #include "tree-flow.h"
46 #include "diagnostic.h"
47 #include "except.h"
48 #include "debug.h"
49 #include "pointer-set.h"
50 #include "ipa-prop.h"
51 #include "value-prof.h"
52 #include "tree-pass.h"
53 #include "target.h"
54 #include "integrate.h"
55
56 /* I'm not real happy about this, but we need to handle gimple and
57 non-gimple trees. */
58 #include "gimple.h"
59
60 /* Inlining, Cloning, Versioning, Parallelization
61
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 MODIFY_EXPRs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX_EXPRs is adjusted accordingly.
68
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
73
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
77
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
81
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
89
90 See the CALL_EXPR handling case in copy_tree_body_r (). */
91
92 /* To Do:
93
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
99 are not needed.
100
101 o Provide heuristics to clamp inlining of recursive template
102 calls? */
103
104
105 /* Weights that estimate_num_insns uses for heuristics in inlining. */
106
107 eni_weights eni_inlining_weights;
108
109 /* Weights that estimate_num_insns uses to estimate the size of the
110 produced code. */
111
112 eni_weights eni_size_weights;
113
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
116
117 eni_weights eni_time_weights;
118
119 /* Prototypes. */
120
121 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static tree mark_local_for_remap_r (tree *, int *, void *);
125 static void unsave_expr_1 (tree);
126 static tree unsave_r (tree *, int *, void *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, void *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple remap_gimple_stmt (gimple, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
135
136 /* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
138
139 void
140 insert_decl_map (copy_body_data *id, tree key, tree value)
141 {
142 *pointer_map_insert (id->decl_map, key) = value;
143
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
146 if (key != value)
147 *pointer_map_insert (id->decl_map, value) = value;
148 }
149
150 /* Construct new SSA name for old NAME. ID is the inline context. */
151
152 static tree
153 remap_ssa_name (tree name, copy_body_data *id)
154 {
155 tree new_tree;
156 tree *n;
157
158 gcc_assert (TREE_CODE (name) == SSA_NAME);
159
160 n = (tree *) pointer_map_contains (id->decl_map, name);
161 if (n)
162 return unshare_expr (*n);
163
164 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
165 in copy_bb. */
166 new_tree = remap_decl (SSA_NAME_VAR (name), id);
167
168 /* We might've substituted constant or another SSA_NAME for
169 the variable.
170
171 Replace the SSA name representing RESULT_DECL by variable during
172 inlining: this saves us from need to introduce PHI node in a case
173 return value is just partly initialized. */
174 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
175 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
176 || !id->transform_return_to_modify))
177 {
178 new_tree = make_ssa_name (new_tree, NULL);
179 insert_decl_map (id, name, new_tree);
180 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
181 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
182 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
183 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
184 {
185 /* By inlining function having uninitialized variable, we might
186 extend the lifetime (variable might get reused). This cause
187 ICE in the case we end up extending lifetime of SSA name across
188 abnormal edge, but also increase register pressure.
189
190 We simply initialize all uninitialized vars by 0 except
191 for case we are inlining to very first BB. We can avoid
192 this for all BBs that are not inside strongly connected
193 regions of the CFG, but this is expensive to test. */
194 if (id->entry_bb
195 && is_gimple_reg (SSA_NAME_VAR (name))
196 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
197 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
198 || EDGE_COUNT (id->entry_bb->preds) != 1))
199 {
200 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
201 gimple init_stmt;
202
203 init_stmt = gimple_build_assign (new_tree,
204 fold_convert (TREE_TYPE (new_tree),
205 integer_zero_node));
206 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
207 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
208 }
209 else
210 {
211 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
212 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
213 == name)
214 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
215 }
216 }
217 }
218 else
219 insert_decl_map (id, name, new_tree);
220 return new_tree;
221 }
222
223 /* Remap DECL during the copying of the BLOCK tree for the function. */
224
225 tree
226 remap_decl (tree decl, copy_body_data *id)
227 {
228 tree *n;
229 tree fn;
230
231 /* We only remap local variables in the current function. */
232 fn = id->src_fn;
233
234 /* See if we have remapped this declaration. */
235
236 n = (tree *) pointer_map_contains (id->decl_map, decl);
237
238 /* If we didn't already have an equivalent for this declaration,
239 create one now. */
240 if (!n)
241 {
242 /* Make a copy of the variable or label. */
243 tree t = id->copy_decl (decl, id);
244
245 /* Remember it, so that if we encounter this local entity again
246 we can reuse this copy. Do this early because remap_type may
247 need this decl for TYPE_STUB_DECL. */
248 insert_decl_map (id, decl, t);
249
250 if (!DECL_P (t))
251 return t;
252
253 /* Remap types, if necessary. */
254 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
255 if (TREE_CODE (t) == TYPE_DECL)
256 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
257
258 /* Remap sizes as necessary. */
259 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
260 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
261
262 /* If fields, do likewise for offset and qualifier. */
263 if (TREE_CODE (t) == FIELD_DECL)
264 {
265 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
266 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
267 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
268 }
269
270 if (cfun && gimple_in_ssa_p (cfun)
271 && (TREE_CODE (t) == VAR_DECL
272 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
273 {
274 tree def = gimple_default_def (id->src_cfun, decl);
275 get_var_ann (t);
276 if (TREE_CODE (decl) != PARM_DECL && def)
277 {
278 tree map = remap_ssa_name (def, id);
279 /* Watch out RESULT_DECLs whose SSA names map directly
280 to them. */
281 if (TREE_CODE (map) == SSA_NAME
282 && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
283 set_default_def (t, map);
284 }
285 add_referenced_var (t);
286 }
287 return t;
288 }
289
290 if (id->do_not_unshare)
291 return *n;
292 else
293 return unshare_expr (*n);
294 }
295
296 static tree
297 remap_type_1 (tree type, copy_body_data *id)
298 {
299 tree new_tree, t;
300
301 /* We do need a copy. build and register it now. If this is a pointer or
302 reference type, remap the designated type and make a new pointer or
303 reference type. */
304 if (TREE_CODE (type) == POINTER_TYPE)
305 {
306 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
307 TYPE_MODE (type),
308 TYPE_REF_CAN_ALIAS_ALL (type));
309 insert_decl_map (id, type, new_tree);
310 return new_tree;
311 }
312 else if (TREE_CODE (type) == REFERENCE_TYPE)
313 {
314 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
315 TYPE_MODE (type),
316 TYPE_REF_CAN_ALIAS_ALL (type));
317 insert_decl_map (id, type, new_tree);
318 return new_tree;
319 }
320 else
321 new_tree = copy_node (type);
322
323 insert_decl_map (id, type, new_tree);
324
325 /* This is a new type, not a copy of an old type. Need to reassociate
326 variants. We can handle everything except the main variant lazily. */
327 t = TYPE_MAIN_VARIANT (type);
328 if (type != t)
329 {
330 t = remap_type (t, id);
331 TYPE_MAIN_VARIANT (new_tree) = t;
332 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
333 TYPE_NEXT_VARIANT (t) = new_tree;
334 }
335 else
336 {
337 TYPE_MAIN_VARIANT (new_tree) = new_tree;
338 TYPE_NEXT_VARIANT (new_tree) = NULL;
339 }
340
341 if (TYPE_STUB_DECL (type))
342 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
343
344 /* Lazily create pointer and reference types. */
345 TYPE_POINTER_TO (new_tree) = NULL;
346 TYPE_REFERENCE_TO (new_tree) = NULL;
347
348 switch (TREE_CODE (new_tree))
349 {
350 case INTEGER_TYPE:
351 case REAL_TYPE:
352 case FIXED_POINT_TYPE:
353 case ENUMERAL_TYPE:
354 case BOOLEAN_TYPE:
355 t = TYPE_MIN_VALUE (new_tree);
356 if (t && TREE_CODE (t) != INTEGER_CST)
357 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
358
359 t = TYPE_MAX_VALUE (new_tree);
360 if (t && TREE_CODE (t) != INTEGER_CST)
361 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
362 return new_tree;
363
364 case FUNCTION_TYPE:
365 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
366 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
367 return new_tree;
368
369 case ARRAY_TYPE:
370 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
371 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
372 break;
373
374 case RECORD_TYPE:
375 case UNION_TYPE:
376 case QUAL_UNION_TYPE:
377 {
378 tree f, nf = NULL;
379
380 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
381 {
382 t = remap_decl (f, id);
383 DECL_CONTEXT (t) = new_tree;
384 TREE_CHAIN (t) = nf;
385 nf = t;
386 }
387 TYPE_FIELDS (new_tree) = nreverse (nf);
388 }
389 break;
390
391 case OFFSET_TYPE:
392 default:
393 /* Shouldn't have been thought variable sized. */
394 gcc_unreachable ();
395 }
396
397 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
398 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
399
400 return new_tree;
401 }
402
403 tree
404 remap_type (tree type, copy_body_data *id)
405 {
406 tree *node;
407 tree tmp;
408
409 if (type == NULL)
410 return type;
411
412 /* See if we have remapped this type. */
413 node = (tree *) pointer_map_contains (id->decl_map, type);
414 if (node)
415 return *node;
416
417 /* The type only needs remapping if it's variably modified. */
418 if (! variably_modified_type_p (type, id->src_fn))
419 {
420 insert_decl_map (id, type, type);
421 return type;
422 }
423
424 id->remapping_type_depth++;
425 tmp = remap_type_1 (type, id);
426 id->remapping_type_depth--;
427
428 return tmp;
429 }
430
431 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
432 is NULL or TYPE has not been remapped before. */
433
434 static tree
435 remapped_type (tree type, copy_body_data *id)
436 {
437 tree *node;
438
439 if (type == NULL)
440 return type;
441
442 /* See if we have remapped this type. */
443 node = (tree *) pointer_map_contains (id->decl_map, type);
444 if (node)
445 return *node;
446 else
447 return NULL;
448 }
449
450 /* The type only needs remapping if it's variably modified. */
451 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
452
453 static bool
454 can_be_nonlocal (tree decl, copy_body_data *id)
455 {
456 /* We can not duplicate function decls. */
457 if (TREE_CODE (decl) == FUNCTION_DECL)
458 return true;
459
460 /* Local static vars must be non-local or we get multiple declaration
461 problems. */
462 if (TREE_CODE (decl) == VAR_DECL
463 && !auto_var_in_fn_p (decl, id->src_fn))
464 return true;
465
466 /* At the moment dwarf2out can handle only these types of nodes. We
467 can support more later. */
468 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
469 return false;
470
471 /* We must use global type. We call remapped_type instead of
472 remap_type since we don't want to remap this type here if it
473 hasn't been remapped before. */
474 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
475 return false;
476
477 /* Wihtout SSA we can't tell if variable is used. */
478 if (!gimple_in_ssa_p (cfun))
479 return false;
480
481 /* Live variables must be copied so we can attach DECL_RTL. */
482 if (var_ann (decl))
483 return false;
484
485 return true;
486 }
487
488 static tree
489 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
490 {
491 tree old_var;
492 tree new_decls = NULL_TREE;
493
494 /* Remap its variables. */
495 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
496 {
497 tree new_var;
498 tree origin_var = DECL_ORIGIN (old_var);
499
500 if (can_be_nonlocal (old_var, id))
501 {
502 if (TREE_CODE (old_var) == VAR_DECL
503 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
504 cfun->local_decls = tree_cons (NULL_TREE, old_var,
505 cfun->local_decls);
506 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
507 && !DECL_IGNORED_P (old_var)
508 && nonlocalized_list)
509 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
510 continue;
511 }
512
513 /* Remap the variable. */
514 new_var = remap_decl (old_var, id);
515
516 /* If we didn't remap this variable, we can't mess with its
517 TREE_CHAIN. If we remapped this variable to the return slot, it's
518 already declared somewhere else, so don't declare it here. */
519
520 if (new_var == id->retvar)
521 ;
522 else if (!new_var)
523 {
524 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
525 && !DECL_IGNORED_P (old_var)
526 && nonlocalized_list)
527 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
528 }
529 else
530 {
531 gcc_assert (DECL_P (new_var));
532 TREE_CHAIN (new_var) = new_decls;
533 new_decls = new_var;
534 }
535 }
536
537 return nreverse (new_decls);
538 }
539
540 /* Copy the BLOCK to contain remapped versions of the variables
541 therein. And hook the new block into the block-tree. */
542
543 static void
544 remap_block (tree *block, copy_body_data *id)
545 {
546 tree old_block;
547 tree new_block;
548 tree fn;
549
550 /* Make the new block. */
551 old_block = *block;
552 new_block = make_node (BLOCK);
553 TREE_USED (new_block) = TREE_USED (old_block);
554 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
555 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
556 BLOCK_NONLOCALIZED_VARS (new_block)
557 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
558 *block = new_block;
559
560 /* Remap its variables. */
561 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
562 &BLOCK_NONLOCALIZED_VARS (new_block),
563 id);
564
565 fn = id->dst_fn;
566
567 if (id->transform_lang_insert_block)
568 id->transform_lang_insert_block (new_block);
569
570 /* Remember the remapped block. */
571 insert_decl_map (id, old_block, new_block);
572 }
573
574 /* Copy the whole block tree and root it in id->block. */
575 static tree
576 remap_blocks (tree block, copy_body_data *id)
577 {
578 tree t;
579 tree new_tree = block;
580
581 if (!block)
582 return NULL;
583
584 remap_block (&new_tree, id);
585 gcc_assert (new_tree != block);
586 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
587 prepend_lexical_block (new_tree, remap_blocks (t, id));
588 /* Blocks are in arbitrary order, but make things slightly prettier and do
589 not swap order when producing a copy. */
590 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
591 return new_tree;
592 }
593
594 static void
595 copy_statement_list (tree *tp)
596 {
597 tree_stmt_iterator oi, ni;
598 tree new_tree;
599
600 new_tree = alloc_stmt_list ();
601 ni = tsi_start (new_tree);
602 oi = tsi_start (*tp);
603 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
604 *tp = new_tree;
605
606 for (; !tsi_end_p (oi); tsi_next (&oi))
607 {
608 tree stmt = tsi_stmt (oi);
609 if (TREE_CODE (stmt) == STATEMENT_LIST)
610 copy_statement_list (&stmt);
611 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
612 }
613 }
614
615 static void
616 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
617 {
618 tree block = BIND_EXPR_BLOCK (*tp);
619 /* Copy (and replace) the statement. */
620 copy_tree_r (tp, walk_subtrees, NULL);
621 if (block)
622 {
623 remap_block (&block, id);
624 BIND_EXPR_BLOCK (*tp) = block;
625 }
626
627 if (BIND_EXPR_VARS (*tp))
628 /* This will remap a lot of the same decls again, but this should be
629 harmless. */
630 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
631 }
632
633
634 /* Create a new gimple_seq by remapping all the statements in BODY
635 using the inlining information in ID. */
636
637 gimple_seq
638 remap_gimple_seq (gimple_seq body, copy_body_data *id)
639 {
640 gimple_stmt_iterator si;
641 gimple_seq new_body = NULL;
642
643 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
644 {
645 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
646 gimple_seq_add_stmt (&new_body, new_stmt);
647 }
648
649 return new_body;
650 }
651
652
653 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
654 block using the mapping information in ID. */
655
656 static gimple
657 copy_gimple_bind (gimple stmt, copy_body_data *id)
658 {
659 gimple new_bind;
660 tree new_block, new_vars;
661 gimple_seq body, new_body;
662
663 /* Copy the statement. Note that we purposely don't use copy_stmt
664 here because we need to remap statements as we copy. */
665 body = gimple_bind_body (stmt);
666 new_body = remap_gimple_seq (body, id);
667
668 new_block = gimple_bind_block (stmt);
669 if (new_block)
670 remap_block (&new_block, id);
671
672 /* This will remap a lot of the same decls again, but this should be
673 harmless. */
674 new_vars = gimple_bind_vars (stmt);
675 if (new_vars)
676 new_vars = remap_decls (new_vars, NULL, id);
677
678 new_bind = gimple_build_bind (new_vars, new_body, new_block);
679
680 return new_bind;
681 }
682
683
684 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
685 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
686 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
687 recursing into the children nodes of *TP. */
688
689 static tree
690 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
691 {
692 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
693 copy_body_data *id = (copy_body_data *) wi_p->info;
694 tree fn = id->src_fn;
695
696 if (TREE_CODE (*tp) == SSA_NAME)
697 {
698 *tp = remap_ssa_name (*tp, id);
699 *walk_subtrees = 0;
700 return NULL;
701 }
702 else if (auto_var_in_fn_p (*tp, fn))
703 {
704 /* Local variables and labels need to be replaced by equivalent
705 variables. We don't want to copy static variables; there's
706 only one of those, no matter how many times we inline the
707 containing function. Similarly for globals from an outer
708 function. */
709 tree new_decl;
710
711 /* Remap the declaration. */
712 new_decl = remap_decl (*tp, id);
713 gcc_assert (new_decl);
714 /* Replace this variable with the copy. */
715 STRIP_TYPE_NOPS (new_decl);
716 /* ??? The C++ frontend uses void * pointer zero to initialize
717 any other type. This confuses the middle-end type verification.
718 As cloned bodies do not go through gimplification again the fixup
719 there doesn't trigger. */
720 if (TREE_CODE (new_decl) == INTEGER_CST
721 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
722 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
723 *tp = new_decl;
724 *walk_subtrees = 0;
725 }
726 else if (TREE_CODE (*tp) == STATEMENT_LIST)
727 gcc_unreachable ();
728 else if (TREE_CODE (*tp) == SAVE_EXPR)
729 gcc_unreachable ();
730 else if (TREE_CODE (*tp) == LABEL_DECL
731 && (!DECL_CONTEXT (*tp)
732 || decl_function_context (*tp) == id->src_fn))
733 /* These may need to be remapped for EH handling. */
734 *tp = remap_decl (*tp, id);
735 else if (TYPE_P (*tp))
736 /* Types may need remapping as well. */
737 *tp = remap_type (*tp, id);
738 else if (CONSTANT_CLASS_P (*tp))
739 {
740 /* If this is a constant, we have to copy the node iff the type
741 will be remapped. copy_tree_r will not copy a constant. */
742 tree new_type = remap_type (TREE_TYPE (*tp), id);
743
744 if (new_type == TREE_TYPE (*tp))
745 *walk_subtrees = 0;
746
747 else if (TREE_CODE (*tp) == INTEGER_CST)
748 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
749 TREE_INT_CST_HIGH (*tp));
750 else
751 {
752 *tp = copy_node (*tp);
753 TREE_TYPE (*tp) = new_type;
754 }
755 }
756 else
757 {
758 /* Otherwise, just copy the node. Note that copy_tree_r already
759 knows not to copy VAR_DECLs, etc., so this is safe. */
760 if (TREE_CODE (*tp) == INDIRECT_REF)
761 {
762 /* Get rid of *& from inline substitutions that can happen when a
763 pointer argument is an ADDR_EXPR. */
764 tree decl = TREE_OPERAND (*tp, 0);
765 tree *n;
766
767 n = (tree *) pointer_map_contains (id->decl_map, decl);
768 if (n)
769 {
770 tree type, new_tree, old;
771
772 /* If we happen to get an ADDR_EXPR in n->value, strip
773 it manually here as we'll eventually get ADDR_EXPRs
774 which lie about their types pointed to. In this case
775 build_fold_indirect_ref wouldn't strip the
776 INDIRECT_REF, but we absolutely rely on that. As
777 fold_indirect_ref does other useful transformations,
778 try that first, though. */
779 type = TREE_TYPE (TREE_TYPE (*n));
780 new_tree = unshare_expr (*n);
781 old = *tp;
782 *tp = gimple_fold_indirect_ref (new_tree);
783 if (!*tp)
784 {
785 if (TREE_CODE (new_tree) == ADDR_EXPR)
786 {
787 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
788 type, new_tree);
789 /* ??? We should either assert here or build
790 a VIEW_CONVERT_EXPR instead of blindly leaking
791 incompatible types to our IL. */
792 if (! *tp)
793 *tp = TREE_OPERAND (new_tree, 0);
794 }
795 else
796 {
797 *tp = build1 (INDIRECT_REF, type, new_tree);
798 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
799 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
800 }
801 }
802 *walk_subtrees = 0;
803 return NULL;
804 }
805 }
806
807 /* Here is the "usual case". Copy this tree node, and then
808 tweak some special cases. */
809 copy_tree_r (tp, walk_subtrees, NULL);
810
811 /* Global variables we haven't seen yet need to go into referenced
812 vars. If not referenced from types only. */
813 if (gimple_in_ssa_p (cfun)
814 && TREE_CODE (*tp) == VAR_DECL
815 && id->remapping_type_depth == 0)
816 add_referenced_var (*tp);
817
818 /* We should never have TREE_BLOCK set on non-statements. */
819 if (EXPR_P (*tp))
820 gcc_assert (!TREE_BLOCK (*tp));
821
822 if (TREE_CODE (*tp) != OMP_CLAUSE)
823 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
824
825 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
826 {
827 /* The copied TARGET_EXPR has never been expanded, even if the
828 original node was expanded already. */
829 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
830 TREE_OPERAND (*tp, 3) = NULL_TREE;
831 }
832 else if (TREE_CODE (*tp) == ADDR_EXPR)
833 {
834 /* Variable substitution need not be simple. In particular,
835 the INDIRECT_REF substitution above. Make sure that
836 TREE_CONSTANT and friends are up-to-date. But make sure
837 to not improperly set TREE_BLOCK on some sub-expressions. */
838 int invariant = is_gimple_min_invariant (*tp);
839 tree block = id->block;
840 id->block = NULL_TREE;
841 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
842 id->block = block;
843
844 /* Handle the case where we substituted an INDIRECT_REF
845 into the operand of the ADDR_EXPR. */
846 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
847 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
848 else
849 recompute_tree_invariant_for_addr_expr (*tp);
850
851 /* If this used to be invariant, but is not any longer,
852 then regimplification is probably needed. */
853 if (invariant && !is_gimple_min_invariant (*tp))
854 id->regimplify = true;
855
856 *walk_subtrees = 0;
857 }
858 }
859
860 /* Keep iterating. */
861 return NULL_TREE;
862 }
863
864
865 /* Called from copy_body_id via walk_tree. DATA is really a
866 `copy_body_data *'. */
867
868 tree
869 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
870 {
871 copy_body_data *id = (copy_body_data *) data;
872 tree fn = id->src_fn;
873 tree new_block;
874
875 /* Begin by recognizing trees that we'll completely rewrite for the
876 inlining context. Our output for these trees is completely
877 different from out input (e.g. RETURN_EXPR is deleted, and morphs
878 into an edge). Further down, we'll handle trees that get
879 duplicated and/or tweaked. */
880
881 /* When requested, RETURN_EXPRs should be transformed to just the
882 contained MODIFY_EXPR. The branch semantics of the return will
883 be handled elsewhere by manipulating the CFG rather than a statement. */
884 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
885 {
886 tree assignment = TREE_OPERAND (*tp, 0);
887
888 /* If we're returning something, just turn that into an
889 assignment into the equivalent of the original RESULT_DECL.
890 If the "assignment" is just the result decl, the result
891 decl has already been set (e.g. a recent "foo (&result_decl,
892 ...)"); just toss the entire RETURN_EXPR. */
893 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
894 {
895 /* Replace the RETURN_EXPR with (a copy of) the
896 MODIFY_EXPR hanging underneath. */
897 *tp = copy_node (assignment);
898 }
899 else /* Else the RETURN_EXPR returns no value. */
900 {
901 *tp = NULL;
902 return (tree) (void *)1;
903 }
904 }
905 else if (TREE_CODE (*tp) == SSA_NAME)
906 {
907 *tp = remap_ssa_name (*tp, id);
908 *walk_subtrees = 0;
909 return NULL;
910 }
911
912 /* Local variables and labels need to be replaced by equivalent
913 variables. We don't want to copy static variables; there's only
914 one of those, no matter how many times we inline the containing
915 function. Similarly for globals from an outer function. */
916 else if (auto_var_in_fn_p (*tp, fn))
917 {
918 tree new_decl;
919
920 /* Remap the declaration. */
921 new_decl = remap_decl (*tp, id);
922 gcc_assert (new_decl);
923 /* Replace this variable with the copy. */
924 STRIP_TYPE_NOPS (new_decl);
925 *tp = new_decl;
926 *walk_subtrees = 0;
927 }
928 else if (TREE_CODE (*tp) == STATEMENT_LIST)
929 copy_statement_list (tp);
930 else if (TREE_CODE (*tp) == SAVE_EXPR
931 || TREE_CODE (*tp) == TARGET_EXPR)
932 remap_save_expr (tp, id->decl_map, walk_subtrees);
933 else if (TREE_CODE (*tp) == LABEL_DECL
934 && (! DECL_CONTEXT (*tp)
935 || decl_function_context (*tp) == id->src_fn))
936 /* These may need to be remapped for EH handling. */
937 *tp = remap_decl (*tp, id);
938 else if (TREE_CODE (*tp) == BIND_EXPR)
939 copy_bind_expr (tp, walk_subtrees, id);
940 /* Types may need remapping as well. */
941 else if (TYPE_P (*tp))
942 *tp = remap_type (*tp, id);
943
944 /* If this is a constant, we have to copy the node iff the type will be
945 remapped. copy_tree_r will not copy a constant. */
946 else if (CONSTANT_CLASS_P (*tp))
947 {
948 tree new_type = remap_type (TREE_TYPE (*tp), id);
949
950 if (new_type == TREE_TYPE (*tp))
951 *walk_subtrees = 0;
952
953 else if (TREE_CODE (*tp) == INTEGER_CST)
954 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
955 TREE_INT_CST_HIGH (*tp));
956 else
957 {
958 *tp = copy_node (*tp);
959 TREE_TYPE (*tp) = new_type;
960 }
961 }
962
963 /* Otherwise, just copy the node. Note that copy_tree_r already
964 knows not to copy VAR_DECLs, etc., so this is safe. */
965 else
966 {
967 /* Here we handle trees that are not completely rewritten.
968 First we detect some inlining-induced bogosities for
969 discarding. */
970 if (TREE_CODE (*tp) == MODIFY_EXPR
971 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
972 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
973 {
974 /* Some assignments VAR = VAR; don't generate any rtl code
975 and thus don't count as variable modification. Avoid
976 keeping bogosities like 0 = 0. */
977 tree decl = TREE_OPERAND (*tp, 0), value;
978 tree *n;
979
980 n = (tree *) pointer_map_contains (id->decl_map, decl);
981 if (n)
982 {
983 value = *n;
984 STRIP_TYPE_NOPS (value);
985 if (TREE_CONSTANT (value) || TREE_READONLY (value))
986 {
987 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
988 return copy_tree_body_r (tp, walk_subtrees, data);
989 }
990 }
991 }
992 else if (TREE_CODE (*tp) == INDIRECT_REF)
993 {
994 /* Get rid of *& from inline substitutions that can happen when a
995 pointer argument is an ADDR_EXPR. */
996 tree decl = TREE_OPERAND (*tp, 0);
997 tree *n;
998
999 n = (tree *) pointer_map_contains (id->decl_map, decl);
1000 if (n)
1001 {
1002 tree new_tree;
1003 tree old;
1004 /* If we happen to get an ADDR_EXPR in n->value, strip
1005 it manually here as we'll eventually get ADDR_EXPRs
1006 which lie about their types pointed to. In this case
1007 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1008 but we absolutely rely on that. As fold_indirect_ref
1009 does other useful transformations, try that first, though. */
1010 tree type = TREE_TYPE (TREE_TYPE (*n));
1011 if (id->do_not_unshare)
1012 new_tree = *n;
1013 else
1014 new_tree = unshare_expr (*n);
1015 old = *tp;
1016 *tp = gimple_fold_indirect_ref (new_tree);
1017 if (! *tp)
1018 {
1019 if (TREE_CODE (new_tree) == ADDR_EXPR)
1020 {
1021 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1022 type, new_tree);
1023 /* ??? We should either assert here or build
1024 a VIEW_CONVERT_EXPR instead of blindly leaking
1025 incompatible types to our IL. */
1026 if (! *tp)
1027 *tp = TREE_OPERAND (new_tree, 0);
1028 }
1029 else
1030 {
1031 *tp = build1 (INDIRECT_REF, type, new_tree);
1032 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1033 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1034 }
1035 }
1036 *walk_subtrees = 0;
1037 return NULL;
1038 }
1039 }
1040
1041 /* Here is the "usual case". Copy this tree node, and then
1042 tweak some special cases. */
1043 copy_tree_r (tp, walk_subtrees, NULL);
1044
1045 /* Global variables we haven't seen yet needs to go into referenced
1046 vars. If not referenced from types only. */
1047 if (gimple_in_ssa_p (cfun)
1048 && TREE_CODE (*tp) == VAR_DECL
1049 && id->remapping_type_depth == 0)
1050 add_referenced_var (*tp);
1051
1052 /* If EXPR has block defined, map it to newly constructed block.
1053 When inlining we want EXPRs without block appear in the block
1054 of function call. */
1055 if (EXPR_P (*tp))
1056 {
1057 new_block = id->block;
1058 if (TREE_BLOCK (*tp))
1059 {
1060 tree *n;
1061 n = (tree *) pointer_map_contains (id->decl_map,
1062 TREE_BLOCK (*tp));
1063 gcc_assert (n);
1064 new_block = *n;
1065 }
1066 TREE_BLOCK (*tp) = new_block;
1067 }
1068
1069 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
1070 TREE_OPERAND (*tp, 0) =
1071 build_int_cst (NULL_TREE,
1072 id->eh_region_offset
1073 + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
1074
1075 if (TREE_CODE (*tp) != OMP_CLAUSE)
1076 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1077
1078 /* The copied TARGET_EXPR has never been expanded, even if the
1079 original node was expanded already. */
1080 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1081 {
1082 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1083 TREE_OPERAND (*tp, 3) = NULL_TREE;
1084 }
1085
1086 /* Variable substitution need not be simple. In particular, the
1087 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1088 and friends are up-to-date. */
1089 else if (TREE_CODE (*tp) == ADDR_EXPR)
1090 {
1091 int invariant = is_gimple_min_invariant (*tp);
1092 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1093
1094 /* Handle the case where we substituted an INDIRECT_REF
1095 into the operand of the ADDR_EXPR. */
1096 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1097 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1098 else
1099 recompute_tree_invariant_for_addr_expr (*tp);
1100
1101 /* If this used to be invariant, but is not any longer,
1102 then regimplification is probably needed. */
1103 if (invariant && !is_gimple_min_invariant (*tp))
1104 id->regimplify = true;
1105
1106 *walk_subtrees = 0;
1107 }
1108 }
1109
1110 /* Keep iterating. */
1111 return NULL_TREE;
1112 }
1113
1114
1115 /* Helper for copy_bb. Remap statement STMT using the inlining
1116 information in ID. Return the new statement copy. */
1117
1118 static gimple
1119 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1120 {
1121 gimple copy = NULL;
1122 struct walk_stmt_info wi;
1123 tree new_block;
1124 bool skip_first = false;
1125
1126 /* Begin by recognizing trees that we'll completely rewrite for the
1127 inlining context. Our output for these trees is completely
1128 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1129 into an edge). Further down, we'll handle trees that get
1130 duplicated and/or tweaked. */
1131
1132 /* When requested, GIMPLE_RETURNs should be transformed to just the
1133 contained GIMPLE_ASSIGN. The branch semantics of the return will
1134 be handled elsewhere by manipulating the CFG rather than the
1135 statement. */
1136 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1137 {
1138 tree retval = gimple_return_retval (stmt);
1139
1140 /* If we're returning something, just turn that into an
1141 assignment into the equivalent of the original RESULT_DECL.
1142 If RETVAL is just the result decl, the result decl has
1143 already been set (e.g. a recent "foo (&result_decl, ...)");
1144 just toss the entire GIMPLE_RETURN. */
1145 if (retval && TREE_CODE (retval) != RESULT_DECL)
1146 {
1147 copy = gimple_build_assign (id->retvar, retval);
1148 /* id->retvar is already substituted. Skip it on later remapping. */
1149 skip_first = true;
1150 }
1151 else
1152 return gimple_build_nop ();
1153 }
1154 else if (gimple_has_substatements (stmt))
1155 {
1156 gimple_seq s1, s2;
1157
1158 /* When cloning bodies from the C++ front end, we will be handed bodies
1159 in High GIMPLE form. Handle here all the High GIMPLE statements that
1160 have embedded statements. */
1161 switch (gimple_code (stmt))
1162 {
1163 case GIMPLE_BIND:
1164 copy = copy_gimple_bind (stmt, id);
1165 break;
1166
1167 case GIMPLE_CATCH:
1168 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1169 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1170 break;
1171
1172 case GIMPLE_EH_FILTER:
1173 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1174 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1175 break;
1176
1177 case GIMPLE_TRY:
1178 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1179 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1180 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1181 break;
1182
1183 case GIMPLE_WITH_CLEANUP_EXPR:
1184 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1185 copy = gimple_build_wce (s1);
1186 break;
1187
1188 case GIMPLE_OMP_PARALLEL:
1189 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1190 copy = gimple_build_omp_parallel
1191 (s1,
1192 gimple_omp_parallel_clauses (stmt),
1193 gimple_omp_parallel_child_fn (stmt),
1194 gimple_omp_parallel_data_arg (stmt));
1195 break;
1196
1197 case GIMPLE_OMP_TASK:
1198 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1199 copy = gimple_build_omp_task
1200 (s1,
1201 gimple_omp_task_clauses (stmt),
1202 gimple_omp_task_child_fn (stmt),
1203 gimple_omp_task_data_arg (stmt),
1204 gimple_omp_task_copy_fn (stmt),
1205 gimple_omp_task_arg_size (stmt),
1206 gimple_omp_task_arg_align (stmt));
1207 break;
1208
1209 case GIMPLE_OMP_FOR:
1210 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1211 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1212 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1213 gimple_omp_for_collapse (stmt), s2);
1214 {
1215 size_t i;
1216 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1217 {
1218 gimple_omp_for_set_index (copy, i,
1219 gimple_omp_for_index (stmt, i));
1220 gimple_omp_for_set_initial (copy, i,
1221 gimple_omp_for_initial (stmt, i));
1222 gimple_omp_for_set_final (copy, i,
1223 gimple_omp_for_final (stmt, i));
1224 gimple_omp_for_set_incr (copy, i,
1225 gimple_omp_for_incr (stmt, i));
1226 gimple_omp_for_set_cond (copy, i,
1227 gimple_omp_for_cond (stmt, i));
1228 }
1229 }
1230 break;
1231
1232 case GIMPLE_OMP_MASTER:
1233 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1234 copy = gimple_build_omp_master (s1);
1235 break;
1236
1237 case GIMPLE_OMP_ORDERED:
1238 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1239 copy = gimple_build_omp_ordered (s1);
1240 break;
1241
1242 case GIMPLE_OMP_SECTION:
1243 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1244 copy = gimple_build_omp_section (s1);
1245 break;
1246
1247 case GIMPLE_OMP_SECTIONS:
1248 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1249 copy = gimple_build_omp_sections
1250 (s1, gimple_omp_sections_clauses (stmt));
1251 break;
1252
1253 case GIMPLE_OMP_SINGLE:
1254 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1255 copy = gimple_build_omp_single
1256 (s1, gimple_omp_single_clauses (stmt));
1257 break;
1258
1259 case GIMPLE_OMP_CRITICAL:
1260 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1261 copy
1262 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1263 break;
1264
1265 default:
1266 gcc_unreachable ();
1267 }
1268 }
1269 else
1270 {
1271 if (gimple_assign_copy_p (stmt)
1272 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1273 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1274 {
1275 /* Here we handle statements that are not completely rewritten.
1276 First we detect some inlining-induced bogosities for
1277 discarding. */
1278
1279 /* Some assignments VAR = VAR; don't generate any rtl code
1280 and thus don't count as variable modification. Avoid
1281 keeping bogosities like 0 = 0. */
1282 tree decl = gimple_assign_lhs (stmt), value;
1283 tree *n;
1284
1285 n = (tree *) pointer_map_contains (id->decl_map, decl);
1286 if (n)
1287 {
1288 value = *n;
1289 STRIP_TYPE_NOPS (value);
1290 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1291 return gimple_build_nop ();
1292 }
1293 }
1294
1295 /* Create a new deep copy of the statement. */
1296 copy = gimple_copy (stmt);
1297 }
1298
1299 /* If STMT has a block defined, map it to the newly constructed
1300 block. When inlining we want statements without a block to
1301 appear in the block of the function call. */
1302 new_block = id->block;
1303 if (gimple_block (copy))
1304 {
1305 tree *n;
1306 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1307 gcc_assert (n);
1308 new_block = *n;
1309 }
1310
1311 gimple_set_block (copy, new_block);
1312
1313 /* Remap all the operands in COPY. */
1314 memset (&wi, 0, sizeof (wi));
1315 wi.info = id;
1316 if (skip_first)
1317 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1318 else
1319 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1320
1321 /* Clear the copied virtual operands. We are not remapping them here
1322 but are going to recreate them from scratch. */
1323 if (gimple_has_mem_ops (copy))
1324 {
1325 gimple_set_vdef (copy, NULL_TREE);
1326 gimple_set_vuse (copy, NULL_TREE);
1327 }
1328
1329 /* We have to handle EH region remapping of GIMPLE_RESX specially because
1330 the region number is not an operand. */
1331 if (gimple_code (stmt) == GIMPLE_RESX && id->eh_region_offset)
1332 {
1333 gimple_resx_set_region (copy, gimple_resx_region (stmt) + id->eh_region_offset);
1334 }
1335 return copy;
1336 }
1337
1338
1339 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1340 later */
1341
1342 static basic_block
1343 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1344 gcov_type count_scale)
1345 {
1346 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1347 basic_block copy_basic_block;
1348 tree decl;
1349
1350 /* create_basic_block() will append every new block to
1351 basic_block_info automatically. */
1352 copy_basic_block = create_basic_block (NULL, (void *) 0,
1353 (basic_block) bb->prev_bb->aux);
1354 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1355
1356 /* We are going to rebuild frequencies from scratch. These values
1357 have just small importance to drive canonicalize_loop_headers. */
1358 copy_basic_block->frequency = ((gcov_type)bb->frequency
1359 * frequency_scale / REG_BR_PROB_BASE);
1360
1361 if (copy_basic_block->frequency > BB_FREQ_MAX)
1362 copy_basic_block->frequency = BB_FREQ_MAX;
1363
1364 copy_gsi = gsi_start_bb (copy_basic_block);
1365
1366 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1367 {
1368 gimple stmt = gsi_stmt (gsi);
1369 gimple orig_stmt = stmt;
1370
1371 id->regimplify = false;
1372 stmt = remap_gimple_stmt (stmt, id);
1373 if (gimple_nop_p (stmt))
1374 continue;
1375
1376 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1377 seq_gsi = copy_gsi;
1378
1379 /* With return slot optimization we can end up with
1380 non-gimple (foo *)&this->m, fix that here. */
1381 if (is_gimple_assign (stmt)
1382 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1383 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1384 {
1385 tree new_rhs;
1386 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1387 gimple_assign_rhs1 (stmt),
1388 true, NULL, false, GSI_NEW_STMT);
1389 gimple_assign_set_rhs1 (stmt, new_rhs);
1390 id->regimplify = false;
1391 }
1392
1393 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1394
1395 if (id->regimplify)
1396 gimple_regimplify_operands (stmt, &seq_gsi);
1397
1398 /* If copy_basic_block has been empty at the start of this iteration,
1399 call gsi_start_bb again to get at the newly added statements. */
1400 if (gsi_end_p (copy_gsi))
1401 copy_gsi = gsi_start_bb (copy_basic_block);
1402 else
1403 gsi_next (&copy_gsi);
1404
1405 /* Process the new statement. The call to gimple_regimplify_operands
1406 possibly turned the statement into multiple statements, we
1407 need to process all of them. */
1408 do
1409 {
1410 tree fn;
1411
1412 stmt = gsi_stmt (copy_gsi);
1413 if (is_gimple_call (stmt)
1414 && gimple_call_va_arg_pack_p (stmt)
1415 && id->gimple_call)
1416 {
1417 /* __builtin_va_arg_pack () should be replaced by
1418 all arguments corresponding to ... in the caller. */
1419 tree p;
1420 gimple new_call;
1421 VEC(tree, heap) *argarray;
1422 size_t nargs = gimple_call_num_args (id->gimple_call);
1423 size_t n;
1424
1425 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1426 nargs--;
1427
1428 /* Create the new array of arguments. */
1429 n = nargs + gimple_call_num_args (stmt);
1430 argarray = VEC_alloc (tree, heap, n);
1431 VEC_safe_grow (tree, heap, argarray, n);
1432
1433 /* Copy all the arguments before '...' */
1434 memcpy (VEC_address (tree, argarray),
1435 gimple_call_arg_ptr (stmt, 0),
1436 gimple_call_num_args (stmt) * sizeof (tree));
1437
1438 /* Append the arguments passed in '...' */
1439 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1440 gimple_call_arg_ptr (id->gimple_call, 0)
1441 + (gimple_call_num_args (id->gimple_call) - nargs),
1442 nargs * sizeof (tree));
1443
1444 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1445 argarray);
1446
1447 VEC_free (tree, heap, argarray);
1448
1449 /* Copy all GIMPLE_CALL flags, location and block, except
1450 GF_CALL_VA_ARG_PACK. */
1451 gimple_call_copy_flags (new_call, stmt);
1452 gimple_call_set_va_arg_pack (new_call, false);
1453 gimple_set_location (new_call, gimple_location (stmt));
1454 gimple_set_block (new_call, gimple_block (stmt));
1455 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1456
1457 gsi_replace (&copy_gsi, new_call, false);
1458 gimple_set_bb (stmt, NULL);
1459 stmt = new_call;
1460 }
1461 else if (is_gimple_call (stmt)
1462 && id->gimple_call
1463 && (decl = gimple_call_fndecl (stmt))
1464 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1465 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1466 {
1467 /* __builtin_va_arg_pack_len () should be replaced by
1468 the number of anonymous arguments. */
1469 size_t nargs = gimple_call_num_args (id->gimple_call);
1470 tree count, p;
1471 gimple new_stmt;
1472
1473 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1474 nargs--;
1475
1476 count = build_int_cst (integer_type_node, nargs);
1477 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1478 gsi_replace (&copy_gsi, new_stmt, false);
1479 stmt = new_stmt;
1480 }
1481
1482 /* Statements produced by inlining can be unfolded, especially
1483 when we constant propagated some operands. We can't fold
1484 them right now for two reasons:
1485 1) folding require SSA_NAME_DEF_STMTs to be correct
1486 2) we can't change function calls to builtins.
1487 So we just mark statement for later folding. We mark
1488 all new statements, instead just statements that has changed
1489 by some nontrivial substitution so even statements made
1490 foldable indirectly are updated. If this turns out to be
1491 expensive, copy_body can be told to watch for nontrivial
1492 changes. */
1493 if (id->statements_to_fold)
1494 pointer_set_insert (id->statements_to_fold, stmt);
1495
1496 /* We're duplicating a CALL_EXPR. Find any corresponding
1497 callgraph edges and update or duplicate them. */
1498 if (is_gimple_call (stmt))
1499 {
1500 struct cgraph_edge *edge;
1501 int flags;
1502
1503 switch (id->transform_call_graph_edges)
1504 {
1505 case CB_CGE_DUPLICATE:
1506 edge = cgraph_edge (id->src_node, orig_stmt);
1507 if (edge)
1508 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1509 REG_BR_PROB_BASE, 1,
1510 edge->frequency, true);
1511 break;
1512
1513 case CB_CGE_MOVE_CLONES:
1514 cgraph_set_call_stmt_including_clones (id->dst_node,
1515 orig_stmt, stmt);
1516 edge = cgraph_edge (id->dst_node, stmt);
1517 break;
1518
1519 case CB_CGE_MOVE:
1520 edge = cgraph_edge (id->dst_node, orig_stmt);
1521 if (edge)
1522 cgraph_set_call_stmt (edge, stmt);
1523 break;
1524
1525 default:
1526 gcc_unreachable ();
1527 }
1528
1529 /* Constant propagation on argument done during inlining
1530 may create new direct call. Produce an edge for it. */
1531 if ((!edge
1532 || (edge->indirect_call
1533 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1534 && is_gimple_call (stmt)
1535 && (fn = gimple_call_fndecl (stmt)) != NULL)
1536 {
1537 struct cgraph_node *dest = cgraph_node (fn);
1538
1539 /* We have missing edge in the callgraph. This can happen
1540 when previous inlining turned an indirect call into a
1541 direct call by constant propagating arguments. In all
1542 other cases we hit a bug (incorrect node sharing is the
1543 most common reason for missing edges). */
1544 gcc_assert (dest->needed || !dest->analyzed);
1545 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1546 cgraph_create_edge_including_clones
1547 (id->dst_node, dest, stmt, bb->count,
1548 compute_call_stmt_bb_frequency (id->dst_node->decl, bb),
1549 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1550 else
1551 cgraph_create_edge (id->dst_node, dest, stmt,
1552 bb->count, CGRAPH_FREQ_BASE,
1553 bb->loop_depth)->inline_failed
1554 = CIF_ORIGINALLY_INDIRECT_CALL;
1555 if (dump_file)
1556 {
1557 fprintf (dump_file, "Created new direct edge to %s",
1558 cgraph_node_name (dest));
1559 }
1560 }
1561
1562 flags = gimple_call_flags (stmt);
1563 if (flags & ECF_MAY_BE_ALLOCA)
1564 cfun->calls_alloca = true;
1565 if (flags & ECF_RETURNS_TWICE)
1566 cfun->calls_setjmp = true;
1567 }
1568
1569 /* If you think we can abort here, you are wrong.
1570 There is no region 0 in gimple. */
1571 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) != 0);
1572
1573 if (stmt_could_throw_p (stmt)
1574 /* When we are cloning for inlining, we are supposed to
1575 construct a clone that calls precisely the same functions
1576 as original. However IPA optimizers might've proved
1577 earlier some function calls as non-trapping that might
1578 render some basic blocks dead that might become
1579 unreachable.
1580
1581 We can't update SSA with unreachable blocks in CFG and thus
1582 we prevent the scenario by preserving even the "dead" eh
1583 edges until the point they are later removed by
1584 fixup_cfg pass. */
1585 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
1586 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
1587 {
1588 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
1589
1590 /* Add an entry for the copied tree in the EH hashtable.
1591 When cloning or versioning, use the hashtable in
1592 cfun, and just copy the EH number. When inlining, use the
1593 hashtable in the caller, and adjust the region number. */
1594 if (region > 0)
1595 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
1596
1597 /* If this tree doesn't have a region associated with it,
1598 and there is a "current region,"
1599 then associate this tree with the current region
1600 and add edges associated with this region. */
1601 if (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) <= 0
1602 && id->eh_region > 0
1603 && stmt_could_throw_p (stmt))
1604 add_stmt_to_eh_region (stmt, id->eh_region);
1605 }
1606
1607 if (gimple_in_ssa_p (cfun))
1608 {
1609 ssa_op_iter i;
1610 tree def;
1611
1612 find_new_referenced_vars (gsi_stmt (copy_gsi));
1613 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1614 if (TREE_CODE (def) == SSA_NAME)
1615 SSA_NAME_DEF_STMT (def) = stmt;
1616 }
1617
1618 gsi_next (&copy_gsi);
1619 }
1620 while (!gsi_end_p (copy_gsi));
1621
1622 copy_gsi = gsi_last_bb (copy_basic_block);
1623 }
1624
1625 return copy_basic_block;
1626 }
1627
1628 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1629 form is quite easy, since dominator relationship for old basic blocks does
1630 not change.
1631
1632 There is however exception where inlining might change dominator relation
1633 across EH edges from basic block within inlined functions destinating
1634 to landing pads in function we inline into.
1635
1636 The function fills in PHI_RESULTs of such PHI nodes if they refer
1637 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1638 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1639 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1640 set, and this means that there will be no overlapping live ranges
1641 for the underlying symbol.
1642
1643 This might change in future if we allow redirecting of EH edges and
1644 we might want to change way build CFG pre-inlining to include
1645 all the possible edges then. */
1646 static void
1647 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1648 bool can_throw, bool nonlocal_goto)
1649 {
1650 edge e;
1651 edge_iterator ei;
1652
1653 FOR_EACH_EDGE (e, ei, bb->succs)
1654 if (!e->dest->aux
1655 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1656 {
1657 gimple phi;
1658 gimple_stmt_iterator si;
1659
1660 if (!nonlocal_goto)
1661 gcc_assert (e->flags & EDGE_EH);
1662
1663 if (!can_throw)
1664 gcc_assert (!(e->flags & EDGE_EH));
1665
1666 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1667 {
1668 edge re;
1669
1670 phi = gsi_stmt (si);
1671
1672 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1673 gcc_assert (!e->dest->aux);
1674
1675 gcc_assert ((e->flags & EDGE_EH)
1676 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1677
1678 if (!is_gimple_reg (PHI_RESULT (phi)))
1679 {
1680 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1681 continue;
1682 }
1683
1684 re = find_edge (ret_bb, e->dest);
1685 gcc_assert (re);
1686 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1687 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1688
1689 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1690 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1691 }
1692 }
1693 }
1694
1695
1696 /* Copy edges from BB into its copy constructed earlier, scale profile
1697 accordingly. Edges will be taken care of later. Assume aux
1698 pointers to point to the copies of each BB. */
1699
1700 static void
1701 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1702 {
1703 basic_block new_bb = (basic_block) bb->aux;
1704 edge_iterator ei;
1705 edge old_edge;
1706 gimple_stmt_iterator si;
1707 int flags;
1708
1709 /* Use the indices from the original blocks to create edges for the
1710 new ones. */
1711 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1712 if (!(old_edge->flags & EDGE_EH))
1713 {
1714 edge new_edge;
1715
1716 flags = old_edge->flags;
1717
1718 /* Return edges do get a FALLTHRU flag when the get inlined. */
1719 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1720 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1721 flags |= EDGE_FALLTHRU;
1722 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1723 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1724 new_edge->probability = old_edge->probability;
1725 }
1726
1727 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1728 return;
1729
1730 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1731 {
1732 gimple copy_stmt;
1733 bool can_throw, nonlocal_goto;
1734
1735 copy_stmt = gsi_stmt (si);
1736 update_stmt (copy_stmt);
1737 if (gimple_in_ssa_p (cfun))
1738 mark_symbols_for_renaming (copy_stmt);
1739
1740 /* Do this before the possible split_block. */
1741 gsi_next (&si);
1742
1743 /* If this tree could throw an exception, there are two
1744 cases where we need to add abnormal edge(s): the
1745 tree wasn't in a region and there is a "current
1746 region" in the caller; or the original tree had
1747 EH edges. In both cases split the block after the tree,
1748 and add abnormal edge(s) as needed; we need both
1749 those from the callee and the caller.
1750 We check whether the copy can throw, because the const
1751 propagation can change an INDIRECT_REF which throws
1752 into a COMPONENT_REF which doesn't. If the copy
1753 can throw, the original could also throw. */
1754 can_throw = stmt_can_throw_internal (copy_stmt);
1755 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1756
1757 if (can_throw || nonlocal_goto)
1758 {
1759 if (!gsi_end_p (si))
1760 /* Note that bb's predecessor edges aren't necessarily
1761 right at this point; split_block doesn't care. */
1762 {
1763 edge e = split_block (new_bb, copy_stmt);
1764
1765 new_bb = e->dest;
1766 new_bb->aux = e->src->aux;
1767 si = gsi_start_bb (new_bb);
1768 }
1769 }
1770
1771 if (can_throw)
1772 make_eh_edges (copy_stmt);
1773
1774 if (nonlocal_goto)
1775 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1776
1777 if ((can_throw || nonlocal_goto)
1778 && gimple_in_ssa_p (cfun))
1779 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1780 can_throw, nonlocal_goto);
1781 }
1782 }
1783
1784 /* Copy the PHIs. All blocks and edges are copied, some blocks
1785 was possibly split and new outgoing EH edges inserted.
1786 BB points to the block of original function and AUX pointers links
1787 the original and newly copied blocks. */
1788
1789 static void
1790 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1791 {
1792 basic_block const new_bb = (basic_block) bb->aux;
1793 edge_iterator ei;
1794 gimple phi;
1795 gimple_stmt_iterator si;
1796
1797 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1798 {
1799 tree res, new_res;
1800 gimple new_phi;
1801 edge new_edge;
1802
1803 phi = gsi_stmt (si);
1804 res = PHI_RESULT (phi);
1805 new_res = res;
1806 if (is_gimple_reg (res))
1807 {
1808 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1809 SSA_NAME_DEF_STMT (new_res)
1810 = new_phi = create_phi_node (new_res, new_bb);
1811 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1812 {
1813 edge const old_edge
1814 = find_edge ((basic_block) new_edge->src->aux, bb);
1815 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1816 tree new_arg = arg;
1817 tree block = id->block;
1818 id->block = NULL_TREE;
1819 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1820 id->block = block;
1821 gcc_assert (new_arg);
1822 /* With return slot optimization we can end up with
1823 non-gimple (foo *)&this->m, fix that here. */
1824 if (TREE_CODE (new_arg) != SSA_NAME
1825 && TREE_CODE (new_arg) != FUNCTION_DECL
1826 && !is_gimple_val (new_arg))
1827 {
1828 gimple_seq stmts = NULL;
1829 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1830 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1831 }
1832 add_phi_arg (new_phi, new_arg, new_edge,
1833 gimple_phi_arg_location_from_edge (phi, old_edge));
1834 }
1835 }
1836 }
1837 }
1838
1839
1840 /* Wrapper for remap_decl so it can be used as a callback. */
1841
1842 static tree
1843 remap_decl_1 (tree decl, void *data)
1844 {
1845 return remap_decl (decl, (copy_body_data *) data);
1846 }
1847
1848 /* Build struct function and associated datastructures for the new clone
1849 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1850
1851 static void
1852 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1853 int frequency)
1854 {
1855 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1856 gcov_type count_scale, frequency_scale;
1857
1858 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1859 count_scale = (REG_BR_PROB_BASE * count
1860 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1861 else
1862 count_scale = 1;
1863
1864 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1865 frequency_scale = (REG_BR_PROB_BASE * frequency
1866 /
1867 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1868 else
1869 frequency_scale = count_scale;
1870
1871 /* Register specific tree functions. */
1872 gimple_register_cfg_hooks ();
1873
1874 /* Get clean struct function. */
1875 push_struct_function (new_fndecl);
1876
1877 /* We will rebuild these, so just sanity check that they are empty. */
1878 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1879 gcc_assert (cfun->local_decls == NULL);
1880 gcc_assert (cfun->cfg == NULL);
1881 gcc_assert (cfun->decl == new_fndecl);
1882
1883 /* Copy items we preserve during clonning. */
1884 cfun->static_chain_decl = src_cfun->static_chain_decl;
1885 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
1886 cfun->function_end_locus = src_cfun->function_end_locus;
1887 cfun->curr_properties = src_cfun->curr_properties;
1888 cfun->last_verified = src_cfun->last_verified;
1889 if (src_cfun->ipa_transforms_to_apply)
1890 cfun->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
1891 src_cfun->ipa_transforms_to_apply);
1892 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
1893 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
1894 cfun->function_frequency = src_cfun->function_frequency;
1895 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
1896 cfun->stdarg = src_cfun->stdarg;
1897 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
1898 cfun->after_inlining = src_cfun->after_inlining;
1899 cfun->returns_struct = src_cfun->returns_struct;
1900 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
1901 cfun->after_tree_profile = src_cfun->after_tree_profile;
1902
1903 init_empty_tree_cfg ();
1904
1905 ENTRY_BLOCK_PTR->count =
1906 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1907 REG_BR_PROB_BASE);
1908 ENTRY_BLOCK_PTR->frequency =
1909 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1910 frequency_scale / REG_BR_PROB_BASE);
1911 EXIT_BLOCK_PTR->count =
1912 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1913 REG_BR_PROB_BASE);
1914 EXIT_BLOCK_PTR->frequency =
1915 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1916 frequency_scale / REG_BR_PROB_BASE);
1917 if (src_cfun->eh)
1918 init_eh_for_function ();
1919
1920 if (src_cfun->gimple_df)
1921 {
1922 init_tree_ssa (cfun);
1923 cfun->gimple_df->in_ssa_p = true;
1924 init_ssa_operands ();
1925 }
1926 pop_cfun ();
1927 }
1928
1929 /* Make a copy of the body of FN so that it can be inserted inline in
1930 another function. Walks FN via CFG, returns new fndecl. */
1931
1932 static tree
1933 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
1934 basic_block entry_block_map, basic_block exit_block_map)
1935 {
1936 tree callee_fndecl = id->src_fn;
1937 /* Original cfun for the callee, doesn't change. */
1938 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1939 struct function *cfun_to_copy;
1940 basic_block bb;
1941 tree new_fndecl = NULL;
1942 gcov_type count_scale, frequency_scale;
1943 int last;
1944
1945 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1946 count_scale = (REG_BR_PROB_BASE * count
1947 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1948 else
1949 count_scale = 1;
1950
1951 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1952 frequency_scale = (REG_BR_PROB_BASE * frequency
1953 /
1954 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1955 else
1956 frequency_scale = count_scale;
1957
1958 /* Register specific tree functions. */
1959 gimple_register_cfg_hooks ();
1960
1961 /* Must have a CFG here at this point. */
1962 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1963 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1964
1965 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1966
1967 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1968 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1969 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1970 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1971
1972 /* Duplicate any exception-handling regions. */
1973 if (cfun->eh)
1974 {
1975 id->eh_region_offset
1976 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1977 0, id->eh_region);
1978 }
1979
1980 /* Use aux pointers to map the original blocks to copy. */
1981 FOR_EACH_BB_FN (bb, cfun_to_copy)
1982 {
1983 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
1984 bb->aux = new_bb;
1985 new_bb->aux = bb;
1986 }
1987
1988 last = last_basic_block;
1989
1990 /* Now that we've duplicated the blocks, duplicate their edges. */
1991 FOR_ALL_BB_FN (bb, cfun_to_copy)
1992 copy_edges_for_bb (bb, count_scale, exit_block_map);
1993
1994 if (gimple_in_ssa_p (cfun))
1995 FOR_ALL_BB_FN (bb, cfun_to_copy)
1996 copy_phis_for_bb (bb, id);
1997
1998 FOR_ALL_BB_FN (bb, cfun_to_copy)
1999 {
2000 ((basic_block)bb->aux)->aux = NULL;
2001 bb->aux = NULL;
2002 }
2003
2004 /* Zero out AUX fields of newly created block during EH edge
2005 insertion. */
2006 for (; last < last_basic_block; last++)
2007 BASIC_BLOCK (last)->aux = NULL;
2008 entry_block_map->aux = NULL;
2009 exit_block_map->aux = NULL;
2010
2011 return new_fndecl;
2012 }
2013
2014 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2015 another function. */
2016
2017 static tree
2018 copy_tree_body (copy_body_data *id)
2019 {
2020 tree fndecl = id->src_fn;
2021 tree body = DECL_SAVED_TREE (fndecl);
2022
2023 walk_tree (&body, copy_tree_body_r, id, NULL);
2024
2025 return body;
2026 }
2027
2028 static tree
2029 copy_body (copy_body_data *id, gcov_type count, int frequency,
2030 basic_block entry_block_map, basic_block exit_block_map)
2031 {
2032 tree fndecl = id->src_fn;
2033 tree body;
2034
2035 /* If this body has a CFG, walk CFG and copy. */
2036 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2037 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
2038
2039 return body;
2040 }
2041
2042 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2043 defined in function FN, or of a data member thereof. */
2044
2045 static bool
2046 self_inlining_addr_expr (tree value, tree fn)
2047 {
2048 tree var;
2049
2050 if (TREE_CODE (value) != ADDR_EXPR)
2051 return false;
2052
2053 var = get_base_address (TREE_OPERAND (value, 0));
2054
2055 return var && auto_var_in_fn_p (var, fn);
2056 }
2057
2058 static void
2059 insert_init_stmt (basic_block bb, gimple init_stmt)
2060 {
2061 /* If VAR represents a zero-sized variable, it's possible that the
2062 assignment statement may result in no gimple statements. */
2063 if (init_stmt)
2064 {
2065 gimple_stmt_iterator si = gsi_last_bb (bb);
2066
2067 /* We can end up with init statements that store to a non-register
2068 from a rhs with a conversion. Handle that here by forcing the
2069 rhs into a temporary. gimple_regimplify_operands is not
2070 prepared to do this for us. */
2071 if (!is_gimple_reg (gimple_assign_lhs (init_stmt))
2072 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2073 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2074 {
2075 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2076 gimple_expr_type (init_stmt),
2077 gimple_assign_rhs1 (init_stmt));
2078 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2079 GSI_NEW_STMT);
2080 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2081 gimple_assign_set_rhs1 (init_stmt, rhs);
2082 }
2083 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2084 gimple_regimplify_operands (init_stmt, &si);
2085 mark_symbols_for_renaming (init_stmt);
2086 }
2087 }
2088
2089 /* Initialize parameter P with VALUE. If needed, produce init statement
2090 at the end of BB. When BB is NULL, we return init statement to be
2091 output later. */
2092 static gimple
2093 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2094 basic_block bb, tree *vars)
2095 {
2096 gimple init_stmt = NULL;
2097 tree var;
2098 tree rhs = value;
2099 tree def = (gimple_in_ssa_p (cfun)
2100 ? gimple_default_def (id->src_cfun, p) : NULL);
2101
2102 if (value
2103 && value != error_mark_node
2104 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2105 {
2106 if (fold_convertible_p (TREE_TYPE (p), value))
2107 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2108 else
2109 /* ??? For valid (GIMPLE) programs we should not end up here.
2110 Still if something has gone wrong and we end up with truly
2111 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2112 to not leak invalid GIMPLE to the following passes. */
2113 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2114 }
2115
2116 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2117 we may not need to create a new variable here at all. Instead, we may
2118 be able to just use the argument value. */
2119 if (TREE_READONLY (p)
2120 && !TREE_ADDRESSABLE (p)
2121 && value && !TREE_SIDE_EFFECTS (value)
2122 && !def)
2123 {
2124 /* We may produce non-gimple trees by adding NOPs or introduce
2125 invalid sharing when operand is not really constant.
2126 It is not big deal to prohibit constant propagation here as
2127 we will constant propagate in DOM1 pass anyway. */
2128 if (is_gimple_min_invariant (value)
2129 && useless_type_conversion_p (TREE_TYPE (p),
2130 TREE_TYPE (value))
2131 /* We have to be very careful about ADDR_EXPR. Make sure
2132 the base variable isn't a local variable of the inlined
2133 function, e.g., when doing recursive inlining, direct or
2134 mutually-recursive or whatever, which is why we don't
2135 just test whether fn == current_function_decl. */
2136 && ! self_inlining_addr_expr (value, fn))
2137 {
2138 insert_decl_map (id, p, value);
2139 return NULL;
2140 }
2141 }
2142
2143 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2144 here since the type of this decl must be visible to the calling
2145 function. */
2146 var = copy_decl_to_var (p, id);
2147 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2148 {
2149 get_var_ann (var);
2150 add_referenced_var (var);
2151 }
2152
2153 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2154 that way, when the PARM_DECL is encountered, it will be
2155 automatically replaced by the VAR_DECL. */
2156 insert_decl_map (id, p, var);
2157
2158 /* Declare this new variable. */
2159 TREE_CHAIN (var) = *vars;
2160 *vars = var;
2161
2162 /* Make gimplifier happy about this variable. */
2163 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2164
2165 /* Even if P was TREE_READONLY, the new VAR should not be.
2166 In the original code, we would have constructed a
2167 temporary, and then the function body would have never
2168 changed the value of P. However, now, we will be
2169 constructing VAR directly. The constructor body may
2170 change its value multiple times as it is being
2171 constructed. Therefore, it must not be TREE_READONLY;
2172 the back-end assumes that TREE_READONLY variable is
2173 assigned to only once. */
2174 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2175 TREE_READONLY (var) = 0;
2176
2177 /* If there is no setup required and we are in SSA, take the easy route
2178 replacing all SSA names representing the function parameter by the
2179 SSA name passed to function.
2180
2181 We need to construct map for the variable anyway as it might be used
2182 in different SSA names when parameter is set in function.
2183
2184 Do replacement at -O0 for const arguments replaced by constant.
2185 This is important for builtin_constant_p and other construct requiring
2186 constant argument to be visible in inlined function body.
2187
2188 FIXME: This usually kills the last connection in between inlined
2189 function parameter and the actual value in debug info. Can we do
2190 better here? If we just inserted the statement, copy propagation
2191 would kill it anyway as it always did in older versions of GCC.
2192
2193 We might want to introduce a notion that single SSA_NAME might
2194 represent multiple variables for purposes of debugging. */
2195 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2196 && (optimize
2197 || (TREE_READONLY (p)
2198 && is_gimple_min_invariant (rhs)))
2199 && (TREE_CODE (rhs) == SSA_NAME
2200 || is_gimple_min_invariant (rhs))
2201 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2202 {
2203 insert_decl_map (id, def, rhs);
2204 return NULL;
2205 }
2206
2207 /* If the value of argument is never used, don't care about initializing
2208 it. */
2209 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2210 {
2211 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2212 return NULL;
2213 }
2214
2215 /* Initialize this VAR_DECL from the equivalent argument. Convert
2216 the argument to the proper type in case it was promoted. */
2217 if (value)
2218 {
2219 if (rhs == error_mark_node)
2220 {
2221 insert_decl_map (id, p, var);
2222 return NULL;
2223 }
2224
2225 STRIP_USELESS_TYPE_CONVERSION (rhs);
2226
2227 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2228 keep our trees in gimple form. */
2229 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2230 {
2231 def = remap_ssa_name (def, id);
2232 init_stmt = gimple_build_assign (def, rhs);
2233 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2234 set_default_def (var, NULL);
2235 }
2236 else
2237 init_stmt = gimple_build_assign (var, rhs);
2238
2239 if (bb && init_stmt)
2240 insert_init_stmt (bb, init_stmt);
2241 }
2242 return init_stmt;
2243 }
2244
2245 /* Generate code to initialize the parameters of the function at the
2246 top of the stack in ID from the GIMPLE_CALL STMT. */
2247
2248 static void
2249 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2250 tree fn, basic_block bb)
2251 {
2252 tree parms;
2253 size_t i;
2254 tree p;
2255 tree vars = NULL_TREE;
2256 tree static_chain = gimple_call_chain (stmt);
2257
2258 /* Figure out what the parameters are. */
2259 parms = DECL_ARGUMENTS (fn);
2260
2261 /* Loop through the parameter declarations, replacing each with an
2262 equivalent VAR_DECL, appropriately initialized. */
2263 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2264 {
2265 tree val;
2266 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2267 setup_one_parameter (id, p, val, fn, bb, &vars);
2268 }
2269
2270 /* Initialize the static chain. */
2271 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2272 gcc_assert (fn != current_function_decl);
2273 if (p)
2274 {
2275 /* No static chain? Seems like a bug in tree-nested.c. */
2276 gcc_assert (static_chain);
2277
2278 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2279 }
2280
2281 declare_inline_vars (id->block, vars);
2282 }
2283
2284
2285 /* Declare a return variable to replace the RESULT_DECL for the
2286 function we are calling. An appropriate DECL_STMT is returned.
2287 The USE_STMT is filled to contain a use of the declaration to
2288 indicate the return value of the function.
2289
2290 RETURN_SLOT, if non-null is place where to store the result. It
2291 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2292 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2293
2294 The return value is a (possibly null) value that is the result of the
2295 function as seen by the callee. *USE_P is a (possibly null) value that
2296 holds the result as seen by the caller. */
2297
2298 static tree
2299 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2300 tree *use_p)
2301 {
2302 tree callee = id->src_fn;
2303 tree caller = id->dst_fn;
2304 tree result = DECL_RESULT (callee);
2305 tree callee_type = TREE_TYPE (result);
2306 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
2307 tree var, use;
2308
2309 /* We don't need to do anything for functions that don't return
2310 anything. */
2311 if (!result || VOID_TYPE_P (callee_type))
2312 {
2313 *use_p = NULL_TREE;
2314 return NULL_TREE;
2315 }
2316
2317 /* If there was a return slot, then the return value is the
2318 dereferenced address of that object. */
2319 if (return_slot)
2320 {
2321 /* The front end shouldn't have used both return_slot and
2322 a modify expression. */
2323 gcc_assert (!modify_dest);
2324 if (DECL_BY_REFERENCE (result))
2325 {
2326 tree return_slot_addr = build_fold_addr_expr (return_slot);
2327 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2328
2329 /* We are going to construct *&return_slot and we can't do that
2330 for variables believed to be not addressable.
2331
2332 FIXME: This check possibly can match, because values returned
2333 via return slot optimization are not believed to have address
2334 taken by alias analysis. */
2335 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2336 if (gimple_in_ssa_p (cfun))
2337 {
2338 HOST_WIDE_INT bitsize;
2339 HOST_WIDE_INT bitpos;
2340 tree offset;
2341 enum machine_mode mode;
2342 int unsignedp;
2343 int volatilep;
2344 tree base;
2345 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2346 &offset,
2347 &mode, &unsignedp, &volatilep,
2348 false);
2349 if (TREE_CODE (base) == INDIRECT_REF)
2350 base = TREE_OPERAND (base, 0);
2351 if (TREE_CODE (base) == SSA_NAME)
2352 base = SSA_NAME_VAR (base);
2353 mark_sym_for_renaming (base);
2354 }
2355 var = return_slot_addr;
2356 }
2357 else
2358 {
2359 var = return_slot;
2360 gcc_assert (TREE_CODE (var) != SSA_NAME);
2361 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2362 }
2363 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2364 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2365 && !DECL_GIMPLE_REG_P (result)
2366 && DECL_P (var))
2367 DECL_GIMPLE_REG_P (var) = 0;
2368 use = NULL;
2369 goto done;
2370 }
2371
2372 /* All types requiring non-trivial constructors should have been handled. */
2373 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2374
2375 /* Attempt to avoid creating a new temporary variable. */
2376 if (modify_dest
2377 && TREE_CODE (modify_dest) != SSA_NAME)
2378 {
2379 bool use_it = false;
2380
2381 /* We can't use MODIFY_DEST if there's type promotion involved. */
2382 if (!useless_type_conversion_p (callee_type, caller_type))
2383 use_it = false;
2384
2385 /* ??? If we're assigning to a variable sized type, then we must
2386 reuse the destination variable, because we've no good way to
2387 create variable sized temporaries at this point. */
2388 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2389 use_it = true;
2390
2391 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2392 reuse it as the result of the call directly. Don't do this if
2393 it would promote MODIFY_DEST to addressable. */
2394 else if (TREE_ADDRESSABLE (result))
2395 use_it = false;
2396 else
2397 {
2398 tree base_m = get_base_address (modify_dest);
2399
2400 /* If the base isn't a decl, then it's a pointer, and we don't
2401 know where that's going to go. */
2402 if (!DECL_P (base_m))
2403 use_it = false;
2404 else if (is_global_var (base_m))
2405 use_it = false;
2406 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2407 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2408 && !DECL_GIMPLE_REG_P (result)
2409 && DECL_GIMPLE_REG_P (base_m))
2410 use_it = false;
2411 else if (!TREE_ADDRESSABLE (base_m))
2412 use_it = true;
2413 }
2414
2415 if (use_it)
2416 {
2417 var = modify_dest;
2418 use = NULL;
2419 goto done;
2420 }
2421 }
2422
2423 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2424
2425 var = copy_result_decl_to_var (result, id);
2426 if (gimple_in_ssa_p (cfun))
2427 {
2428 get_var_ann (var);
2429 add_referenced_var (var);
2430 }
2431
2432 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2433 DECL_STRUCT_FUNCTION (caller)->local_decls
2434 = tree_cons (NULL_TREE, var,
2435 DECL_STRUCT_FUNCTION (caller)->local_decls);
2436
2437 /* Do not have the rest of GCC warn about this variable as it should
2438 not be visible to the user. */
2439 TREE_NO_WARNING (var) = 1;
2440
2441 declare_inline_vars (id->block, var);
2442
2443 /* Build the use expr. If the return type of the function was
2444 promoted, convert it back to the expected type. */
2445 use = var;
2446 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2447 use = fold_convert (caller_type, var);
2448
2449 STRIP_USELESS_TYPE_CONVERSION (use);
2450
2451 if (DECL_BY_REFERENCE (result))
2452 {
2453 TREE_ADDRESSABLE (var) = 1;
2454 var = build_fold_addr_expr (var);
2455 }
2456
2457 done:
2458 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2459 way, when the RESULT_DECL is encountered, it will be
2460 automatically replaced by the VAR_DECL. */
2461 insert_decl_map (id, result, var);
2462
2463 /* Remember this so we can ignore it in remap_decls. */
2464 id->retvar = var;
2465
2466 *use_p = use;
2467 return var;
2468 }
2469
2470 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2471 to a local label. */
2472
2473 static tree
2474 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2475 {
2476 tree node = *nodep;
2477 tree fn = (tree) fnp;
2478
2479 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2480 return node;
2481
2482 if (TYPE_P (node))
2483 *walk_subtrees = 0;
2484
2485 return NULL_TREE;
2486 }
2487
2488 /* Callback through walk_tree. Determine if we've got an aggregate
2489 type that we can't support; return non-null if so. */
2490
2491 static tree
2492 cannot_copy_type_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
2493 void *data ATTRIBUTE_UNUSED)
2494 {
2495 tree t, node = *nodep;
2496
2497 if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
2498 {
2499 /* We cannot inline a function of the form
2500
2501 void F (int i) { struct S { int ar[i]; } s; }
2502
2503 Attempting to do so produces a catch-22.
2504 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
2505 UNION_TYPE nodes, then it goes into infinite recursion on a
2506 structure containing a pointer to its own type. If it doesn't,
2507 then the type node for S doesn't get adjusted properly when
2508 F is inlined.
2509
2510 ??? This is likely no longer true, but it's too late in the 4.0
2511 cycle to try to find out. This should be checked for 4.1. */
2512 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
2513 if (variably_modified_type_p (TREE_TYPE (t), NULL))
2514 return node;
2515 }
2516
2517 return NULL_TREE;
2518 }
2519
2520
2521 /* Determine if the function can be copied. If so return NULL. If
2522 not return a string describng the reason for failure. */
2523
2524 static const char *
2525 copy_forbidden (struct function *fun, tree fndecl)
2526 {
2527 const char *reason = fun->cannot_be_copied_reason;
2528 tree step;
2529
2530 /* Only examine the function once. */
2531 if (fun->cannot_be_copied_set)
2532 return reason;
2533
2534 /* We cannot copy a function that receives a non-local goto
2535 because we cannot remap the destination label used in the
2536 function that is performing the non-local goto. */
2537 /* ??? Actually, this should be possible, if we work at it.
2538 No doubt there's just a handful of places that simply
2539 assume it doesn't happen and don't substitute properly. */
2540 if (fun->has_nonlocal_label)
2541 {
2542 reason = G_("function %q+F can never be copied "
2543 "because it receives a non-local goto");
2544 goto fail;
2545 }
2546
2547 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2548 {
2549 tree decl = TREE_VALUE (step);
2550
2551 if (TREE_CODE (decl) == VAR_DECL
2552 && TREE_STATIC (decl)
2553 && !DECL_EXTERNAL (decl)
2554 && DECL_INITIAL (decl)
2555 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2556 has_label_address_in_static_1,
2557 fndecl))
2558 {
2559 reason = G_("function %q+F can never be copied because it saves "
2560 "address of local label in a static variable");
2561 goto fail;
2562 }
2563
2564 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2565 && variably_modified_type_p (TREE_TYPE (decl), NULL)
2566 && walk_tree_without_duplicates (&TREE_TYPE (decl),
2567 cannot_copy_type_1, NULL))
2568 {
2569 reason = G_("function %q+F can never be copied "
2570 "because it uses variable sized variables");
2571 goto fail;
2572 }
2573 }
2574
2575 fail:
2576 fun->cannot_be_copied_reason = reason;
2577 fun->cannot_be_copied_set = true;
2578 return reason;
2579 }
2580
2581
2582 static const char *inline_forbidden_reason;
2583
2584 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2585 iff a function can not be inlined. Also sets the reason why. */
2586
2587 static tree
2588 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2589 struct walk_stmt_info *wip)
2590 {
2591 tree fn = (tree) wip->info;
2592 tree t;
2593 gimple stmt = gsi_stmt (*gsi);
2594
2595 switch (gimple_code (stmt))
2596 {
2597 case GIMPLE_CALL:
2598 /* Refuse to inline alloca call unless user explicitly forced so as
2599 this may change program's memory overhead drastically when the
2600 function using alloca is called in loop. In GCC present in
2601 SPEC2000 inlining into schedule_block cause it to require 2GB of
2602 RAM instead of 256MB. */
2603 if (gimple_alloca_call_p (stmt)
2604 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2605 {
2606 inline_forbidden_reason
2607 = G_("function %q+F can never be inlined because it uses "
2608 "alloca (override using the always_inline attribute)");
2609 *handled_ops_p = true;
2610 return fn;
2611 }
2612
2613 t = gimple_call_fndecl (stmt);
2614 if (t == NULL_TREE)
2615 break;
2616
2617 /* We cannot inline functions that call setjmp. */
2618 if (setjmp_call_p (t))
2619 {
2620 inline_forbidden_reason
2621 = G_("function %q+F can never be inlined because it uses setjmp");
2622 *handled_ops_p = true;
2623 return t;
2624 }
2625
2626 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2627 switch (DECL_FUNCTION_CODE (t))
2628 {
2629 /* We cannot inline functions that take a variable number of
2630 arguments. */
2631 case BUILT_IN_VA_START:
2632 case BUILT_IN_NEXT_ARG:
2633 case BUILT_IN_VA_END:
2634 inline_forbidden_reason
2635 = G_("function %q+F can never be inlined because it "
2636 "uses variable argument lists");
2637 *handled_ops_p = true;
2638 return t;
2639
2640 case BUILT_IN_LONGJMP:
2641 /* We can't inline functions that call __builtin_longjmp at
2642 all. The non-local goto machinery really requires the
2643 destination be in a different function. If we allow the
2644 function calling __builtin_longjmp to be inlined into the
2645 function calling __builtin_setjmp, Things will Go Awry. */
2646 inline_forbidden_reason
2647 = G_("function %q+F can never be inlined because "
2648 "it uses setjmp-longjmp exception handling");
2649 *handled_ops_p = true;
2650 return t;
2651
2652 case BUILT_IN_NONLOCAL_GOTO:
2653 /* Similarly. */
2654 inline_forbidden_reason
2655 = G_("function %q+F can never be inlined because "
2656 "it uses non-local goto");
2657 *handled_ops_p = true;
2658 return t;
2659
2660 case BUILT_IN_RETURN:
2661 case BUILT_IN_APPLY_ARGS:
2662 /* If a __builtin_apply_args caller would be inlined,
2663 it would be saving arguments of the function it has
2664 been inlined into. Similarly __builtin_return would
2665 return from the function the inline has been inlined into. */
2666 inline_forbidden_reason
2667 = G_("function %q+F can never be inlined because "
2668 "it uses __builtin_return or __builtin_apply_args");
2669 *handled_ops_p = true;
2670 return t;
2671
2672 default:
2673 break;
2674 }
2675 break;
2676
2677 case GIMPLE_GOTO:
2678 t = gimple_goto_dest (stmt);
2679
2680 /* We will not inline a function which uses computed goto. The
2681 addresses of its local labels, which may be tucked into
2682 global storage, are of course not constant across
2683 instantiations, which causes unexpected behavior. */
2684 if (TREE_CODE (t) != LABEL_DECL)
2685 {
2686 inline_forbidden_reason
2687 = G_("function %q+F can never be inlined "
2688 "because it contains a computed goto");
2689 *handled_ops_p = true;
2690 return t;
2691 }
2692 break;
2693
2694 default:
2695 break;
2696 }
2697
2698 *handled_ops_p = false;
2699 return NULL_TREE;
2700 }
2701
2702 /* Return true if FNDECL is a function that cannot be inlined into
2703 another one. */
2704
2705 static bool
2706 inline_forbidden_p (tree fndecl)
2707 {
2708 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2709 struct walk_stmt_info wi;
2710 struct pointer_set_t *visited_nodes;
2711 basic_block bb;
2712 bool forbidden_p = false;
2713
2714 /* First check for shared reasons not to copy the code. */
2715 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2716 if (inline_forbidden_reason != NULL)
2717 return true;
2718
2719 /* Next, walk the statements of the function looking for
2720 constraucts we can't handle, or are non-optimal for inlining. */
2721 visited_nodes = pointer_set_create ();
2722 memset (&wi, 0, sizeof (wi));
2723 wi.info = (void *) fndecl;
2724 wi.pset = visited_nodes;
2725
2726 FOR_EACH_BB_FN (bb, fun)
2727 {
2728 gimple ret;
2729 gimple_seq seq = bb_seq (bb);
2730 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2731 forbidden_p = (ret != NULL);
2732 if (forbidden_p)
2733 break;
2734 }
2735
2736 pointer_set_destroy (visited_nodes);
2737 return forbidden_p;
2738 }
2739
2740 /* Returns nonzero if FN is a function that does not have any
2741 fundamental inline blocking properties. */
2742
2743 bool
2744 tree_inlinable_function_p (tree fn)
2745 {
2746 bool inlinable = true;
2747 bool do_warning;
2748 tree always_inline;
2749
2750 /* If we've already decided this function shouldn't be inlined,
2751 there's no need to check again. */
2752 if (DECL_UNINLINABLE (fn))
2753 return false;
2754
2755 /* We only warn for functions declared `inline' by the user. */
2756 do_warning = (warn_inline
2757 && DECL_DECLARED_INLINE_P (fn)
2758 && !DECL_NO_INLINE_WARNING_P (fn)
2759 && !DECL_IN_SYSTEM_HEADER (fn));
2760
2761 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2762
2763 if (flag_no_inline
2764 && always_inline == NULL)
2765 {
2766 if (do_warning)
2767 warning (OPT_Winline, "function %q+F can never be inlined because it "
2768 "is suppressed using -fno-inline", fn);
2769 inlinable = false;
2770 }
2771
2772 /* Don't auto-inline anything that might not be bound within
2773 this unit of translation. */
2774 else if (!DECL_DECLARED_INLINE_P (fn)
2775 && DECL_REPLACEABLE_P (fn))
2776 inlinable = false;
2777
2778 else if (!function_attribute_inlinable_p (fn))
2779 {
2780 if (do_warning)
2781 warning (OPT_Winline, "function %q+F can never be inlined because it "
2782 "uses attributes conflicting with inlining", fn);
2783 inlinable = false;
2784 }
2785
2786 else if (inline_forbidden_p (fn))
2787 {
2788 /* See if we should warn about uninlinable functions. Previously,
2789 some of these warnings would be issued while trying to expand
2790 the function inline, but that would cause multiple warnings
2791 about functions that would for example call alloca. But since
2792 this a property of the function, just one warning is enough.
2793 As a bonus we can now give more details about the reason why a
2794 function is not inlinable. */
2795 if (always_inline)
2796 sorry (inline_forbidden_reason, fn);
2797 else if (do_warning)
2798 warning (OPT_Winline, inline_forbidden_reason, fn);
2799
2800 inlinable = false;
2801 }
2802
2803 /* Squirrel away the result so that we don't have to check again. */
2804 DECL_UNINLINABLE (fn) = !inlinable;
2805
2806 return inlinable;
2807 }
2808
2809 /* Estimate the cost of a memory move. Use machine dependent
2810 word size and take possible memcpy call into account. */
2811
2812 int
2813 estimate_move_cost (tree type)
2814 {
2815 HOST_WIDE_INT size;
2816
2817 gcc_assert (!VOID_TYPE_P (type));
2818
2819 size = int_size_in_bytes (type);
2820
2821 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
2822 /* Cost of a memcpy call, 3 arguments and the call. */
2823 return 4;
2824 else
2825 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
2826 }
2827
2828 /* Returns cost of operation CODE, according to WEIGHTS */
2829
2830 static int
2831 estimate_operator_cost (enum tree_code code, eni_weights *weights,
2832 tree op1 ATTRIBUTE_UNUSED, tree op2)
2833 {
2834 switch (code)
2835 {
2836 /* These are "free" conversions, or their presumed cost
2837 is folded into other operations. */
2838 case RANGE_EXPR:
2839 CASE_CONVERT:
2840 case COMPLEX_EXPR:
2841 case PAREN_EXPR:
2842 return 0;
2843
2844 /* Assign cost of 1 to usual operations.
2845 ??? We may consider mapping RTL costs to this. */
2846 case COND_EXPR:
2847 case VEC_COND_EXPR:
2848
2849 case PLUS_EXPR:
2850 case POINTER_PLUS_EXPR:
2851 case MINUS_EXPR:
2852 case MULT_EXPR:
2853
2854 case FIXED_CONVERT_EXPR:
2855 case FIX_TRUNC_EXPR:
2856
2857 case NEGATE_EXPR:
2858 case FLOAT_EXPR:
2859 case MIN_EXPR:
2860 case MAX_EXPR:
2861 case ABS_EXPR:
2862
2863 case LSHIFT_EXPR:
2864 case RSHIFT_EXPR:
2865 case LROTATE_EXPR:
2866 case RROTATE_EXPR:
2867 case VEC_LSHIFT_EXPR:
2868 case VEC_RSHIFT_EXPR:
2869
2870 case BIT_IOR_EXPR:
2871 case BIT_XOR_EXPR:
2872 case BIT_AND_EXPR:
2873 case BIT_NOT_EXPR:
2874
2875 case TRUTH_ANDIF_EXPR:
2876 case TRUTH_ORIF_EXPR:
2877 case TRUTH_AND_EXPR:
2878 case TRUTH_OR_EXPR:
2879 case TRUTH_XOR_EXPR:
2880 case TRUTH_NOT_EXPR:
2881
2882 case LT_EXPR:
2883 case LE_EXPR:
2884 case GT_EXPR:
2885 case GE_EXPR:
2886 case EQ_EXPR:
2887 case NE_EXPR:
2888 case ORDERED_EXPR:
2889 case UNORDERED_EXPR:
2890
2891 case UNLT_EXPR:
2892 case UNLE_EXPR:
2893 case UNGT_EXPR:
2894 case UNGE_EXPR:
2895 case UNEQ_EXPR:
2896 case LTGT_EXPR:
2897
2898 case CONJ_EXPR:
2899
2900 case PREDECREMENT_EXPR:
2901 case PREINCREMENT_EXPR:
2902 case POSTDECREMENT_EXPR:
2903 case POSTINCREMENT_EXPR:
2904
2905 case REALIGN_LOAD_EXPR:
2906
2907 case REDUC_MAX_EXPR:
2908 case REDUC_MIN_EXPR:
2909 case REDUC_PLUS_EXPR:
2910 case WIDEN_SUM_EXPR:
2911 case WIDEN_MULT_EXPR:
2912 case DOT_PROD_EXPR:
2913
2914 case VEC_WIDEN_MULT_HI_EXPR:
2915 case VEC_WIDEN_MULT_LO_EXPR:
2916 case VEC_UNPACK_HI_EXPR:
2917 case VEC_UNPACK_LO_EXPR:
2918 case VEC_UNPACK_FLOAT_HI_EXPR:
2919 case VEC_UNPACK_FLOAT_LO_EXPR:
2920 case VEC_PACK_TRUNC_EXPR:
2921 case VEC_PACK_SAT_EXPR:
2922 case VEC_PACK_FIX_TRUNC_EXPR:
2923 case VEC_EXTRACT_EVEN_EXPR:
2924 case VEC_EXTRACT_ODD_EXPR:
2925 case VEC_INTERLEAVE_HIGH_EXPR:
2926 case VEC_INTERLEAVE_LOW_EXPR:
2927
2928 return 1;
2929
2930 /* Few special cases of expensive operations. This is useful
2931 to avoid inlining on functions having too many of these. */
2932 case TRUNC_DIV_EXPR:
2933 case CEIL_DIV_EXPR:
2934 case FLOOR_DIV_EXPR:
2935 case ROUND_DIV_EXPR:
2936 case EXACT_DIV_EXPR:
2937 case TRUNC_MOD_EXPR:
2938 case CEIL_MOD_EXPR:
2939 case FLOOR_MOD_EXPR:
2940 case ROUND_MOD_EXPR:
2941 case RDIV_EXPR:
2942 if (TREE_CODE (op2) != INTEGER_CST)
2943 return weights->div_mod_cost;
2944 return 1;
2945
2946 default:
2947 /* We expect a copy assignment with no operator. */
2948 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
2949 return 0;
2950 }
2951 }
2952
2953
2954 /* Estimate number of instructions that will be created by expanding
2955 the statements in the statement sequence STMTS.
2956 WEIGHTS contains weights attributed to various constructs. */
2957
2958 static
2959 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
2960 {
2961 int cost;
2962 gimple_stmt_iterator gsi;
2963
2964 cost = 0;
2965 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
2966 cost += estimate_num_insns (gsi_stmt (gsi), weights);
2967
2968 return cost;
2969 }
2970
2971
2972 /* Estimate number of instructions that will be created by expanding STMT.
2973 WEIGHTS contains weights attributed to various constructs. */
2974
2975 int
2976 estimate_num_insns (gimple stmt, eni_weights *weights)
2977 {
2978 unsigned cost, i;
2979 enum gimple_code code = gimple_code (stmt);
2980 tree lhs;
2981 tree rhs;
2982
2983 switch (code)
2984 {
2985 case GIMPLE_ASSIGN:
2986 /* Try to estimate the cost of assignments. We have three cases to
2987 deal with:
2988 1) Simple assignments to registers;
2989 2) Stores to things that must live in memory. This includes
2990 "normal" stores to scalars, but also assignments of large
2991 structures, or constructors of big arrays;
2992
2993 Let us look at the first two cases, assuming we have "a = b + C":
2994 <GIMPLE_ASSIGN <var_decl "a">
2995 <plus_expr <var_decl "b"> <constant C>>
2996 If "a" is a GIMPLE register, the assignment to it is free on almost
2997 any target, because "a" usually ends up in a real register. Hence
2998 the only cost of this expression comes from the PLUS_EXPR, and we
2999 can ignore the GIMPLE_ASSIGN.
3000 If "a" is not a GIMPLE register, the assignment to "a" will most
3001 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3002 of moving something into "a", which we compute using the function
3003 estimate_move_cost. */
3004 lhs = gimple_assign_lhs (stmt);
3005 rhs = gimple_assign_rhs1 (stmt);
3006
3007 /* EH magic stuff is most probably going to be optimized out.
3008 We rarely really need to save EH info for unwinding
3009 nested exceptions. */
3010 if (TREE_CODE (lhs) == FILTER_EXPR
3011 || TREE_CODE (lhs) == EXC_PTR_EXPR
3012 || TREE_CODE (rhs) == FILTER_EXPR
3013 || TREE_CODE (rhs) == EXC_PTR_EXPR)
3014 return 0;
3015 if (is_gimple_reg (lhs))
3016 cost = 0;
3017 else
3018 cost = estimate_move_cost (TREE_TYPE (lhs));
3019
3020 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3021 cost += estimate_move_cost (TREE_TYPE (rhs));
3022
3023 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3024 gimple_assign_rhs1 (stmt),
3025 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3026 == GIMPLE_BINARY_RHS
3027 ? gimple_assign_rhs2 (stmt) : NULL);
3028 break;
3029
3030 case GIMPLE_COND:
3031 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3032 gimple_op (stmt, 0),
3033 gimple_op (stmt, 1));
3034 break;
3035
3036 case GIMPLE_SWITCH:
3037 /* Take into account cost of the switch + guess 2 conditional jumps for
3038 each case label.
3039
3040 TODO: once the switch expansion logic is sufficiently separated, we can
3041 do better job on estimating cost of the switch. */
3042 if (weights->time_based)
3043 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3044 else
3045 cost = gimple_switch_num_labels (stmt) * 2;
3046 break;
3047
3048 case GIMPLE_CALL:
3049 {
3050 tree decl = gimple_call_fndecl (stmt);
3051 tree addr = gimple_call_fn (stmt);
3052 tree funtype = TREE_TYPE (addr);
3053
3054 if (POINTER_TYPE_P (funtype))
3055 funtype = TREE_TYPE (funtype);
3056
3057 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3058 cost = weights->target_builtin_call_cost;
3059 else
3060 cost = weights->call_cost;
3061
3062 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3063 switch (DECL_FUNCTION_CODE (decl))
3064 {
3065 case BUILT_IN_CONSTANT_P:
3066 return 0;
3067 case BUILT_IN_EXPECT:
3068 return 0;
3069
3070 /* Prefetch instruction is not expensive. */
3071 case BUILT_IN_PREFETCH:
3072 cost = weights->target_builtin_call_cost;
3073 break;
3074
3075 default:
3076 break;
3077 }
3078
3079 if (decl)
3080 funtype = TREE_TYPE (decl);
3081
3082 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3083 cost += estimate_move_cost (TREE_TYPE (funtype));
3084 /* Our cost must be kept in sync with
3085 cgraph_estimate_size_after_inlining that does use function
3086 declaration to figure out the arguments. */
3087 if (decl && DECL_ARGUMENTS (decl))
3088 {
3089 tree arg;
3090 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3091 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3092 cost += estimate_move_cost (TREE_TYPE (arg));
3093 }
3094 else if (funtype && prototype_p (funtype))
3095 {
3096 tree t;
3097 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3098 t = TREE_CHAIN (t))
3099 if (!VOID_TYPE_P (TREE_VALUE (t)))
3100 cost += estimate_move_cost (TREE_VALUE (t));
3101 }
3102 else
3103 {
3104 for (i = 0; i < gimple_call_num_args (stmt); i++)
3105 {
3106 tree arg = gimple_call_arg (stmt, i);
3107 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3108 cost += estimate_move_cost (TREE_TYPE (arg));
3109 }
3110 }
3111
3112 break;
3113 }
3114
3115 case GIMPLE_GOTO:
3116 case GIMPLE_LABEL:
3117 case GIMPLE_NOP:
3118 case GIMPLE_PHI:
3119 case GIMPLE_RETURN:
3120 case GIMPLE_PREDICT:
3121 return 0;
3122
3123 case GIMPLE_ASM:
3124 case GIMPLE_RESX:
3125 return 1;
3126
3127 case GIMPLE_BIND:
3128 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3129
3130 case GIMPLE_EH_FILTER:
3131 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3132
3133 case GIMPLE_CATCH:
3134 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3135
3136 case GIMPLE_TRY:
3137 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3138 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3139
3140 /* OpenMP directives are generally very expensive. */
3141
3142 case GIMPLE_OMP_RETURN:
3143 case GIMPLE_OMP_SECTIONS_SWITCH:
3144 case GIMPLE_OMP_ATOMIC_STORE:
3145 case GIMPLE_OMP_CONTINUE:
3146 /* ...except these, which are cheap. */
3147 return 0;
3148
3149 case GIMPLE_OMP_ATOMIC_LOAD:
3150 return weights->omp_cost;
3151
3152 case GIMPLE_OMP_FOR:
3153 return (weights->omp_cost
3154 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3155 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3156
3157 case GIMPLE_OMP_PARALLEL:
3158 case GIMPLE_OMP_TASK:
3159 case GIMPLE_OMP_CRITICAL:
3160 case GIMPLE_OMP_MASTER:
3161 case GIMPLE_OMP_ORDERED:
3162 case GIMPLE_OMP_SECTION:
3163 case GIMPLE_OMP_SECTIONS:
3164 case GIMPLE_OMP_SINGLE:
3165 return (weights->omp_cost
3166 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3167
3168 default:
3169 gcc_unreachable ();
3170 }
3171
3172 return cost;
3173 }
3174
3175 /* Estimate number of instructions that will be created by expanding
3176 function FNDECL. WEIGHTS contains weights attributed to various
3177 constructs. */
3178
3179 int
3180 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3181 {
3182 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3183 gimple_stmt_iterator bsi;
3184 basic_block bb;
3185 int n = 0;
3186
3187 gcc_assert (my_function && my_function->cfg);
3188 FOR_EACH_BB_FN (bb, my_function)
3189 {
3190 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3191 n += estimate_num_insns (gsi_stmt (bsi), weights);
3192 }
3193
3194 return n;
3195 }
3196
3197
3198 /* Initializes weights used by estimate_num_insns. */
3199
3200 void
3201 init_inline_once (void)
3202 {
3203 eni_size_weights.call_cost = 1;
3204 eni_size_weights.target_builtin_call_cost = 1;
3205 eni_size_weights.div_mod_cost = 1;
3206 eni_size_weights.omp_cost = 40;
3207 eni_size_weights.time_based = false;
3208
3209 /* Estimating time for call is difficult, since we have no idea what the
3210 called function does. In the current uses of eni_time_weights,
3211 underestimating the cost does less harm than overestimating it, so
3212 we choose a rather small value here. */
3213 eni_time_weights.call_cost = 10;
3214 eni_time_weights.target_builtin_call_cost = 10;
3215 eni_time_weights.div_mod_cost = 10;
3216 eni_time_weights.omp_cost = 40;
3217 eni_time_weights.time_based = true;
3218 }
3219
3220 /* Estimate the number of instructions in a gimple_seq. */
3221
3222 int
3223 count_insns_seq (gimple_seq seq, eni_weights *weights)
3224 {
3225 gimple_stmt_iterator gsi;
3226 int n = 0;
3227 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3228 n += estimate_num_insns (gsi_stmt (gsi), weights);
3229
3230 return n;
3231 }
3232
3233
3234 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3235
3236 static void
3237 prepend_lexical_block (tree current_block, tree new_block)
3238 {
3239 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3240 BLOCK_SUBBLOCKS (current_block) = new_block;
3241 BLOCK_SUPERCONTEXT (new_block) = current_block;
3242 }
3243
3244 /* Fetch callee declaration from the call graph edge going from NODE and
3245 associated with STMR call statement. Return NULL_TREE if not found. */
3246 static tree
3247 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3248 {
3249 struct cgraph_edge *cs;
3250
3251 cs = cgraph_edge (node, stmt);
3252 if (cs)
3253 return cs->callee->decl;
3254
3255 return NULL_TREE;
3256 }
3257
3258 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3259
3260 static bool
3261 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3262 {
3263 tree retvar, use_retvar;
3264 tree fn;
3265 struct pointer_map_t *st;
3266 tree return_slot;
3267 tree modify_dest;
3268 location_t saved_location;
3269 struct cgraph_edge *cg_edge;
3270 cgraph_inline_failed_t reason;
3271 basic_block return_block;
3272 edge e;
3273 gimple_stmt_iterator gsi, stmt_gsi;
3274 bool successfully_inlined = FALSE;
3275 bool purge_dead_abnormal_edges;
3276 tree t_step;
3277 tree var;
3278
3279 /* Set input_location here so we get the right instantiation context
3280 if we call instantiate_decl from inlinable_function_p. */
3281 saved_location = input_location;
3282 if (gimple_has_location (stmt))
3283 input_location = gimple_location (stmt);
3284
3285 /* From here on, we're only interested in CALL_EXPRs. */
3286 if (gimple_code (stmt) != GIMPLE_CALL)
3287 goto egress;
3288
3289 /* First, see if we can figure out what function is being called.
3290 If we cannot, then there is no hope of inlining the function. */
3291 fn = gimple_call_fndecl (stmt);
3292 if (!fn)
3293 {
3294 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3295 if (!fn)
3296 goto egress;
3297 }
3298
3299 /* Turn forward declarations into real ones. */
3300 fn = cgraph_node (fn)->decl;
3301
3302 /* If FN is a declaration of a function in a nested scope that was
3303 globally declared inline, we don't set its DECL_INITIAL.
3304 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3305 C++ front-end uses it for cdtors to refer to their internal
3306 declarations, that are not real functions. Fortunately those
3307 don't have trees to be saved, so we can tell by checking their
3308 gimple_body. */
3309 if (!DECL_INITIAL (fn)
3310 && DECL_ABSTRACT_ORIGIN (fn)
3311 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3312 fn = DECL_ABSTRACT_ORIGIN (fn);
3313
3314 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3315 Kill this check once this is fixed. */
3316 if (!id->dst_node->analyzed)
3317 goto egress;
3318
3319 cg_edge = cgraph_edge (id->dst_node, stmt);
3320
3321 /* Don't try to inline functions that are not well-suited to
3322 inlining. */
3323 if (!cgraph_inline_p (cg_edge, &reason))
3324 {
3325 /* If this call was originally indirect, we do not want to emit any
3326 inlining related warnings or sorry messages because there are no
3327 guarantees regarding those. */
3328 if (cg_edge->indirect_call)
3329 goto egress;
3330
3331 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3332 /* Avoid warnings during early inline pass. */
3333 && cgraph_global_info_ready)
3334 {
3335 sorry ("inlining failed in call to %q+F: %s", fn,
3336 cgraph_inline_failed_string (reason));
3337 sorry ("called from here");
3338 }
3339 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3340 && !DECL_IN_SYSTEM_HEADER (fn)
3341 && reason != CIF_UNSPECIFIED
3342 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3343 /* Avoid warnings during early inline pass. */
3344 && cgraph_global_info_ready)
3345 {
3346 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3347 fn, cgraph_inline_failed_string (reason));
3348 warning (OPT_Winline, "called from here");
3349 }
3350 goto egress;
3351 }
3352 fn = cg_edge->callee->decl;
3353
3354 #ifdef ENABLE_CHECKING
3355 if (cg_edge->callee->decl != id->dst_node->decl)
3356 verify_cgraph_node (cg_edge->callee);
3357 #endif
3358
3359 /* We will be inlining this callee. */
3360 id->eh_region = lookup_stmt_eh_region (stmt);
3361
3362 /* Split the block holding the GIMPLE_CALL. */
3363 e = split_block (bb, stmt);
3364 bb = e->src;
3365 return_block = e->dest;
3366 remove_edge (e);
3367
3368 /* split_block splits after the statement; work around this by
3369 moving the call into the second block manually. Not pretty,
3370 but seems easier than doing the CFG manipulation by hand
3371 when the GIMPLE_CALL is in the last statement of BB. */
3372 stmt_gsi = gsi_last_bb (bb);
3373 gsi_remove (&stmt_gsi, false);
3374
3375 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3376 been the source of abnormal edges. In this case, schedule
3377 the removal of dead abnormal edges. */
3378 gsi = gsi_start_bb (return_block);
3379 if (gsi_end_p (gsi))
3380 {
3381 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3382 purge_dead_abnormal_edges = true;
3383 }
3384 else
3385 {
3386 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3387 purge_dead_abnormal_edges = false;
3388 }
3389
3390 stmt_gsi = gsi_start_bb (return_block);
3391
3392 /* Build a block containing code to initialize the arguments, the
3393 actual inline expansion of the body, and a label for the return
3394 statements within the function to jump to. The type of the
3395 statement expression is the return type of the function call. */
3396 id->block = make_node (BLOCK);
3397 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3398 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3399 prepend_lexical_block (gimple_block (stmt), id->block);
3400
3401 /* Local declarations will be replaced by their equivalents in this
3402 map. */
3403 st = id->decl_map;
3404 id->decl_map = pointer_map_create ();
3405
3406 /* Record the function we are about to inline. */
3407 id->src_fn = fn;
3408 id->src_node = cg_edge->callee;
3409 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3410 id->gimple_call = stmt;
3411
3412 gcc_assert (!id->src_cfun->after_inlining);
3413
3414 id->entry_bb = bb;
3415 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3416 {
3417 gimple_stmt_iterator si = gsi_last_bb (bb);
3418 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3419 NOT_TAKEN),
3420 GSI_NEW_STMT);
3421 }
3422 initialize_inlined_parameters (id, stmt, fn, bb);
3423
3424 if (DECL_INITIAL (fn))
3425 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3426
3427 /* Return statements in the function body will be replaced by jumps
3428 to the RET_LABEL. */
3429 gcc_assert (DECL_INITIAL (fn));
3430 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3431
3432 /* Find the LHS to which the result of this call is assigned. */
3433 return_slot = NULL;
3434 if (gimple_call_lhs (stmt))
3435 {
3436 modify_dest = gimple_call_lhs (stmt);
3437
3438 /* The function which we are inlining might not return a value,
3439 in which case we should issue a warning that the function
3440 does not return a value. In that case the optimizers will
3441 see that the variable to which the value is assigned was not
3442 initialized. We do not want to issue a warning about that
3443 uninitialized variable. */
3444 if (DECL_P (modify_dest))
3445 TREE_NO_WARNING (modify_dest) = 1;
3446
3447 if (gimple_call_return_slot_opt_p (stmt))
3448 {
3449 return_slot = modify_dest;
3450 modify_dest = NULL;
3451 }
3452 }
3453 else
3454 modify_dest = NULL;
3455
3456 /* If we are inlining a call to the C++ operator new, we don't want
3457 to use type based alias analysis on the return value. Otherwise
3458 we may get confused if the compiler sees that the inlined new
3459 function returns a pointer which was just deleted. See bug
3460 33407. */
3461 if (DECL_IS_OPERATOR_NEW (fn))
3462 {
3463 return_slot = NULL;
3464 modify_dest = NULL;
3465 }
3466
3467 /* Declare the return variable for the function. */
3468 retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar);
3469
3470 /* Add local vars in this inlined callee to caller. */
3471 t_step = id->src_cfun->local_decls;
3472 for (; t_step; t_step = TREE_CHAIN (t_step))
3473 {
3474 var = TREE_VALUE (t_step);
3475 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3476 {
3477 if (var_ann (var) && add_referenced_var (var))
3478 cfun->local_decls = tree_cons (NULL_TREE, var,
3479 cfun->local_decls);
3480 }
3481 else if (!can_be_nonlocal (var, id))
3482 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3483 cfun->local_decls);
3484 }
3485
3486 /* This is it. Duplicate the callee body. Assume callee is
3487 pre-gimplified. Note that we must not alter the caller
3488 function in any way before this point, as this CALL_EXPR may be
3489 a self-referential call; if we're calling ourselves, we need to
3490 duplicate our body before altering anything. */
3491 copy_body (id, bb->count, bb->frequency, bb, return_block);
3492
3493 /* Reset the escaped and callused solutions. */
3494 if (cfun->gimple_df)
3495 {
3496 pt_solution_reset (&cfun->gimple_df->escaped);
3497 pt_solution_reset (&cfun->gimple_df->callused);
3498 }
3499
3500 /* Clean up. */
3501 pointer_map_destroy (id->decl_map);
3502 id->decl_map = st;
3503
3504 /* Unlink the calls virtual operands before replacing it. */
3505 unlink_stmt_vdef (stmt);
3506
3507 /* If the inlined function returns a result that we care about,
3508 substitute the GIMPLE_CALL with an assignment of the return
3509 variable to the LHS of the call. That is, if STMT was
3510 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3511 if (use_retvar && gimple_call_lhs (stmt))
3512 {
3513 gimple old_stmt = stmt;
3514 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3515 gsi_replace (&stmt_gsi, stmt, false);
3516 if (gimple_in_ssa_p (cfun))
3517 mark_symbols_for_renaming (stmt);
3518 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3519 }
3520 else
3521 {
3522 /* Handle the case of inlining a function with no return
3523 statement, which causes the return value to become undefined. */
3524 if (gimple_call_lhs (stmt)
3525 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3526 {
3527 tree name = gimple_call_lhs (stmt);
3528 tree var = SSA_NAME_VAR (name);
3529 tree def = gimple_default_def (cfun, var);
3530
3531 if (def)
3532 {
3533 /* If the variable is used undefined, make this name
3534 undefined via a move. */
3535 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3536 gsi_replace (&stmt_gsi, stmt, true);
3537 }
3538 else
3539 {
3540 /* Otherwise make this variable undefined. */
3541 gsi_remove (&stmt_gsi, true);
3542 set_default_def (var, name);
3543 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3544 }
3545 }
3546 else
3547 gsi_remove (&stmt_gsi, true);
3548 }
3549
3550 if (purge_dead_abnormal_edges)
3551 gimple_purge_dead_abnormal_call_edges (return_block);
3552
3553 /* If the value of the new expression is ignored, that's OK. We
3554 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3555 the equivalent inlined version either. */
3556 if (is_gimple_assign (stmt))
3557 {
3558 gcc_assert (gimple_assign_single_p (stmt)
3559 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3560 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3561 }
3562
3563 /* Output the inlining info for this abstract function, since it has been
3564 inlined. If we don't do this now, we can lose the information about the
3565 variables in the function when the blocks get blown away as soon as we
3566 remove the cgraph node. */
3567 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3568
3569 /* Update callgraph if needed. */
3570 cgraph_remove_node (cg_edge->callee);
3571
3572 id->block = NULL_TREE;
3573 successfully_inlined = TRUE;
3574
3575 egress:
3576 input_location = saved_location;
3577 return successfully_inlined;
3578 }
3579
3580 /* Expand call statements reachable from STMT_P.
3581 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3582 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3583 unfortunately not use that function here because we need a pointer
3584 to the CALL_EXPR, not the tree itself. */
3585
3586 static bool
3587 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3588 {
3589 gimple_stmt_iterator gsi;
3590
3591 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3592 {
3593 gimple stmt = gsi_stmt (gsi);
3594
3595 if (is_gimple_call (stmt)
3596 && expand_call_inline (bb, stmt, id))
3597 return true;
3598 }
3599
3600 return false;
3601 }
3602
3603
3604 /* Walk all basic blocks created after FIRST and try to fold every statement
3605 in the STATEMENTS pointer set. */
3606
3607 static void
3608 fold_marked_statements (int first, struct pointer_set_t *statements)
3609 {
3610 for (; first < n_basic_blocks; first++)
3611 if (BASIC_BLOCK (first))
3612 {
3613 gimple_stmt_iterator gsi;
3614
3615 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3616 !gsi_end_p (gsi);
3617 gsi_next (&gsi))
3618 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3619 {
3620 gimple old_stmt = gsi_stmt (gsi);
3621 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3622
3623 if (fold_stmt (&gsi))
3624 {
3625 /* Re-read the statement from GSI as fold_stmt() may
3626 have changed it. */
3627 gimple new_stmt = gsi_stmt (gsi);
3628 update_stmt (new_stmt);
3629
3630 if (is_gimple_call (old_stmt)
3631 || is_gimple_call (new_stmt))
3632 cgraph_update_edges_for_call_stmt (old_stmt, old_decl, new_stmt);
3633
3634 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3635 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3636 }
3637 }
3638 }
3639 }
3640
3641 /* Return true if BB has at least one abnormal outgoing edge. */
3642
3643 static inline bool
3644 has_abnormal_outgoing_edge_p (basic_block bb)
3645 {
3646 edge e;
3647 edge_iterator ei;
3648
3649 FOR_EACH_EDGE (e, ei, bb->succs)
3650 if (e->flags & EDGE_ABNORMAL)
3651 return true;
3652
3653 return false;
3654 }
3655
3656 /* Expand calls to inline functions in the body of FN. */
3657
3658 unsigned int
3659 optimize_inline_calls (tree fn)
3660 {
3661 copy_body_data id;
3662 tree prev_fn;
3663 basic_block bb;
3664 int last = n_basic_blocks;
3665 struct gimplify_ctx gctx;
3666
3667 /* There is no point in performing inlining if errors have already
3668 occurred -- and we might crash if we try to inline invalid
3669 code. */
3670 if (errorcount || sorrycount)
3671 return 0;
3672
3673 /* Clear out ID. */
3674 memset (&id, 0, sizeof (id));
3675
3676 id.src_node = id.dst_node = cgraph_node (fn);
3677 id.dst_fn = fn;
3678 /* Or any functions that aren't finished yet. */
3679 prev_fn = NULL_TREE;
3680 if (current_function_decl)
3681 {
3682 id.dst_fn = current_function_decl;
3683 prev_fn = current_function_decl;
3684 }
3685
3686 id.copy_decl = copy_decl_maybe_to_var;
3687 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3688 id.transform_new_cfg = false;
3689 id.transform_return_to_modify = true;
3690 id.transform_lang_insert_block = NULL;
3691 id.statements_to_fold = pointer_set_create ();
3692
3693 push_gimplify_context (&gctx);
3694
3695 /* We make no attempts to keep dominance info up-to-date. */
3696 free_dominance_info (CDI_DOMINATORS);
3697 free_dominance_info (CDI_POST_DOMINATORS);
3698
3699 /* Register specific gimple functions. */
3700 gimple_register_cfg_hooks ();
3701
3702 /* Reach the trees by walking over the CFG, and note the
3703 enclosing basic-blocks in the call edges. */
3704 /* We walk the blocks going forward, because inlined function bodies
3705 will split id->current_basic_block, and the new blocks will
3706 follow it; we'll trudge through them, processing their CALL_EXPRs
3707 along the way. */
3708 FOR_EACH_BB (bb)
3709 gimple_expand_calls_inline (bb, &id);
3710
3711 pop_gimplify_context (NULL);
3712
3713 #ifdef ENABLE_CHECKING
3714 {
3715 struct cgraph_edge *e;
3716
3717 verify_cgraph_node (id.dst_node);
3718
3719 /* Double check that we inlined everything we are supposed to inline. */
3720 for (e = id.dst_node->callees; e; e = e->next_callee)
3721 gcc_assert (e->inline_failed);
3722 }
3723 #endif
3724
3725 /* Fold the statements before compacting/renumbering the basic blocks. */
3726 fold_marked_statements (last, id.statements_to_fold);
3727 pointer_set_destroy (id.statements_to_fold);
3728
3729 /* Renumber the (code) basic_blocks consecutively. */
3730 compact_blocks ();
3731 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3732 number_blocks (fn);
3733
3734 fold_cond_expr_cond ();
3735 delete_unreachable_blocks_update_callgraph (&id);
3736 #ifdef ENABLE_CHECKING
3737 verify_cgraph_node (id.dst_node);
3738 #endif
3739
3740 /* It would be nice to check SSA/CFG/statement consistency here, but it is
3741 not possible yet - the IPA passes might make various functions to not
3742 throw and they don't care to proactively update local EH info. This is
3743 done later in fixup_cfg pass that also execute the verification. */
3744 return (TODO_update_ssa
3745 | TODO_cleanup_cfg
3746 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
3747 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
3748 }
3749
3750 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
3751
3752 tree
3753 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3754 {
3755 enum tree_code code = TREE_CODE (*tp);
3756 enum tree_code_class cl = TREE_CODE_CLASS (code);
3757
3758 /* We make copies of most nodes. */
3759 if (IS_EXPR_CODE_CLASS (cl)
3760 || code == TREE_LIST
3761 || code == TREE_VEC
3762 || code == TYPE_DECL
3763 || code == OMP_CLAUSE)
3764 {
3765 /* Because the chain gets clobbered when we make a copy, we save it
3766 here. */
3767 tree chain = NULL_TREE, new_tree;
3768
3769 chain = TREE_CHAIN (*tp);
3770
3771 /* Copy the node. */
3772 new_tree = copy_node (*tp);
3773
3774 /* Propagate mudflap marked-ness. */
3775 if (flag_mudflap && mf_marked_p (*tp))
3776 mf_mark (new_tree);
3777
3778 *tp = new_tree;
3779
3780 /* Now, restore the chain, if appropriate. That will cause
3781 walk_tree to walk into the chain as well. */
3782 if (code == PARM_DECL
3783 || code == TREE_LIST
3784 || code == OMP_CLAUSE)
3785 TREE_CHAIN (*tp) = chain;
3786
3787 /* For now, we don't update BLOCKs when we make copies. So, we
3788 have to nullify all BIND_EXPRs. */
3789 if (TREE_CODE (*tp) == BIND_EXPR)
3790 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
3791 }
3792 else if (code == CONSTRUCTOR)
3793 {
3794 /* CONSTRUCTOR nodes need special handling because
3795 we need to duplicate the vector of elements. */
3796 tree new_tree;
3797
3798 new_tree = copy_node (*tp);
3799
3800 /* Propagate mudflap marked-ness. */
3801 if (flag_mudflap && mf_marked_p (*tp))
3802 mf_mark (new_tree);
3803
3804 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
3805 CONSTRUCTOR_ELTS (*tp));
3806 *tp = new_tree;
3807 }
3808 else if (TREE_CODE_CLASS (code) == tcc_type)
3809 *walk_subtrees = 0;
3810 else if (TREE_CODE_CLASS (code) == tcc_declaration)
3811 *walk_subtrees = 0;
3812 else if (TREE_CODE_CLASS (code) == tcc_constant)
3813 *walk_subtrees = 0;
3814 else
3815 gcc_assert (code != STATEMENT_LIST);
3816 return NULL_TREE;
3817 }
3818
3819 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
3820 information indicating to what new SAVE_EXPR this one should be mapped,
3821 use that one. Otherwise, create a new node and enter it in ST. FN is
3822 the function into which the copy will be placed. */
3823
3824 static void
3825 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
3826 {
3827 struct pointer_map_t *st = (struct pointer_map_t *) st_;
3828 tree *n;
3829 tree t;
3830
3831 /* See if we already encountered this SAVE_EXPR. */
3832 n = (tree *) pointer_map_contains (st, *tp);
3833
3834 /* If we didn't already remap this SAVE_EXPR, do so now. */
3835 if (!n)
3836 {
3837 t = copy_node (*tp);
3838
3839 /* Remember this SAVE_EXPR. */
3840 *pointer_map_insert (st, *tp) = t;
3841 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
3842 *pointer_map_insert (st, t) = t;
3843 }
3844 else
3845 {
3846 /* We've already walked into this SAVE_EXPR; don't do it again. */
3847 *walk_subtrees = 0;
3848 t = *n;
3849 }
3850
3851 /* Replace this SAVE_EXPR with the copy. */
3852 *tp = t;
3853 }
3854
3855 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
3856 copies the declaration and enters it in the splay_tree in DATA (which is
3857 really an `copy_body_data *'). */
3858
3859 static tree
3860 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3861 void *data)
3862 {
3863 copy_body_data *id = (copy_body_data *) data;
3864
3865 /* Don't walk into types. */
3866 if (TYPE_P (*tp))
3867 *walk_subtrees = 0;
3868
3869 else if (TREE_CODE (*tp) == LABEL_EXPR)
3870 {
3871 tree decl = TREE_OPERAND (*tp, 0);
3872
3873 /* Copy the decl and remember the copy. */
3874 insert_decl_map (id, decl, id->copy_decl (decl, id));
3875 }
3876
3877 return NULL_TREE;
3878 }
3879
3880 /* Perform any modifications to EXPR required when it is unsaved. Does
3881 not recurse into EXPR's subtrees. */
3882
3883 static void
3884 unsave_expr_1 (tree expr)
3885 {
3886 switch (TREE_CODE (expr))
3887 {
3888 case TARGET_EXPR:
3889 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3890 It's OK for this to happen if it was part of a subtree that
3891 isn't immediately expanded, such as operand 2 of another
3892 TARGET_EXPR. */
3893 if (TREE_OPERAND (expr, 1))
3894 break;
3895
3896 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3897 TREE_OPERAND (expr, 3) = NULL_TREE;
3898 break;
3899
3900 default:
3901 break;
3902 }
3903 }
3904
3905 /* Called via walk_tree when an expression is unsaved. Using the
3906 splay_tree pointed to by ST (which is really a `splay_tree'),
3907 remaps all local declarations to appropriate replacements. */
3908
3909 static tree
3910 unsave_r (tree *tp, int *walk_subtrees, void *data)
3911 {
3912 copy_body_data *id = (copy_body_data *) data;
3913 struct pointer_map_t *st = id->decl_map;
3914 tree *n;
3915
3916 /* Only a local declaration (variable or label). */
3917 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3918 || TREE_CODE (*tp) == LABEL_DECL)
3919 {
3920 /* Lookup the declaration. */
3921 n = (tree *) pointer_map_contains (st, *tp);
3922
3923 /* If it's there, remap it. */
3924 if (n)
3925 *tp = *n;
3926 }
3927
3928 else if (TREE_CODE (*tp) == STATEMENT_LIST)
3929 gcc_unreachable ();
3930 else if (TREE_CODE (*tp) == BIND_EXPR)
3931 copy_bind_expr (tp, walk_subtrees, id);
3932 else if (TREE_CODE (*tp) == SAVE_EXPR
3933 || TREE_CODE (*tp) == TARGET_EXPR)
3934 remap_save_expr (tp, st, walk_subtrees);
3935 else
3936 {
3937 copy_tree_r (tp, walk_subtrees, NULL);
3938
3939 /* Do whatever unsaving is required. */
3940 unsave_expr_1 (*tp);
3941 }
3942
3943 /* Keep iterating. */
3944 return NULL_TREE;
3945 }
3946
3947 /* Copies everything in EXPR and replaces variables, labels
3948 and SAVE_EXPRs local to EXPR. */
3949
3950 tree
3951 unsave_expr_now (tree expr)
3952 {
3953 copy_body_data id;
3954
3955 /* There's nothing to do for NULL_TREE. */
3956 if (expr == 0)
3957 return expr;
3958
3959 /* Set up ID. */
3960 memset (&id, 0, sizeof (id));
3961 id.src_fn = current_function_decl;
3962 id.dst_fn = current_function_decl;
3963 id.decl_map = pointer_map_create ();
3964
3965 id.copy_decl = copy_decl_no_change;
3966 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3967 id.transform_new_cfg = false;
3968 id.transform_return_to_modify = false;
3969 id.transform_lang_insert_block = NULL;
3970
3971 /* Walk the tree once to find local labels. */
3972 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3973
3974 /* Walk the tree again, copying, remapping, and unsaving. */
3975 walk_tree (&expr, unsave_r, &id, NULL);
3976
3977 /* Clean up. */
3978 pointer_map_destroy (id.decl_map);
3979
3980 return expr;
3981 }
3982
3983 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
3984 label, copies the declaration and enters it in the splay_tree in DATA (which
3985 is really a 'copy_body_data *'. */
3986
3987 static tree
3988 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
3989 bool *handled_ops_p ATTRIBUTE_UNUSED,
3990 struct walk_stmt_info *wi)
3991 {
3992 copy_body_data *id = (copy_body_data *) wi->info;
3993 gimple stmt = gsi_stmt (*gsip);
3994
3995 if (gimple_code (stmt) == GIMPLE_LABEL)
3996 {
3997 tree decl = gimple_label_label (stmt);
3998
3999 /* Copy the decl and remember the copy. */
4000 insert_decl_map (id, decl, id->copy_decl (decl, id));
4001 }
4002
4003 return NULL_TREE;
4004 }
4005
4006
4007 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4008 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4009 remaps all local declarations to appropriate replacements in gimple
4010 operands. */
4011
4012 static tree
4013 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4014 {
4015 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4016 copy_body_data *id = (copy_body_data *) wi->info;
4017 struct pointer_map_t *st = id->decl_map;
4018 tree *n;
4019 tree expr = *tp;
4020
4021 /* Only a local declaration (variable or label). */
4022 if ((TREE_CODE (expr) == VAR_DECL
4023 && !TREE_STATIC (expr))
4024 || TREE_CODE (expr) == LABEL_DECL)
4025 {
4026 /* Lookup the declaration. */
4027 n = (tree *) pointer_map_contains (st, expr);
4028
4029 /* If it's there, remap it. */
4030 if (n)
4031 *tp = *n;
4032 *walk_subtrees = 0;
4033 }
4034 else if (TREE_CODE (expr) == STATEMENT_LIST
4035 || TREE_CODE (expr) == BIND_EXPR
4036 || TREE_CODE (expr) == SAVE_EXPR)
4037 gcc_unreachable ();
4038 else if (TREE_CODE (expr) == TARGET_EXPR)
4039 {
4040 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4041 It's OK for this to happen if it was part of a subtree that
4042 isn't immediately expanded, such as operand 2 of another
4043 TARGET_EXPR. */
4044 if (!TREE_OPERAND (expr, 1))
4045 {
4046 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4047 TREE_OPERAND (expr, 3) = NULL_TREE;
4048 }
4049 }
4050
4051 /* Keep iterating. */
4052 return NULL_TREE;
4053 }
4054
4055
4056 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4057 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4058 remaps all local declarations to appropriate replacements in gimple
4059 statements. */
4060
4061 static tree
4062 replace_locals_stmt (gimple_stmt_iterator *gsip,
4063 bool *handled_ops_p ATTRIBUTE_UNUSED,
4064 struct walk_stmt_info *wi)
4065 {
4066 copy_body_data *id = (copy_body_data *) wi->info;
4067 gimple stmt = gsi_stmt (*gsip);
4068
4069 if (gimple_code (stmt) == GIMPLE_BIND)
4070 {
4071 tree block = gimple_bind_block (stmt);
4072
4073 if (block)
4074 {
4075 remap_block (&block, id);
4076 gimple_bind_set_block (stmt, block);
4077 }
4078
4079 /* This will remap a lot of the same decls again, but this should be
4080 harmless. */
4081 if (gimple_bind_vars (stmt))
4082 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4083 }
4084
4085 /* Keep iterating. */
4086 return NULL_TREE;
4087 }
4088
4089
4090 /* Copies everything in SEQ and replaces variables and labels local to
4091 current_function_decl. */
4092
4093 gimple_seq
4094 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4095 {
4096 copy_body_data id;
4097 struct walk_stmt_info wi;
4098 struct pointer_set_t *visited;
4099 gimple_seq copy;
4100
4101 /* There's nothing to do for NULL_TREE. */
4102 if (seq == NULL)
4103 return seq;
4104
4105 /* Set up ID. */
4106 memset (&id, 0, sizeof (id));
4107 id.src_fn = current_function_decl;
4108 id.dst_fn = current_function_decl;
4109 id.decl_map = pointer_map_create ();
4110
4111 id.copy_decl = copy_decl_no_change;
4112 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4113 id.transform_new_cfg = false;
4114 id.transform_return_to_modify = false;
4115 id.transform_lang_insert_block = NULL;
4116
4117 /* Walk the tree once to find local labels. */
4118 memset (&wi, 0, sizeof (wi));
4119 visited = pointer_set_create ();
4120 wi.info = &id;
4121 wi.pset = visited;
4122 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4123 pointer_set_destroy (visited);
4124
4125 copy = gimple_seq_copy (seq);
4126
4127 /* Walk the copy, remapping decls. */
4128 memset (&wi, 0, sizeof (wi));
4129 wi.info = &id;
4130 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4131
4132 /* Clean up. */
4133 pointer_map_destroy (id.decl_map);
4134
4135 return copy;
4136 }
4137
4138
4139 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4140
4141 static tree
4142 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4143 {
4144 if (*tp == data)
4145 return (tree) data;
4146 else
4147 return NULL;
4148 }
4149
4150 bool
4151 debug_find_tree (tree top, tree search)
4152 {
4153 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4154 }
4155
4156
4157 /* Declare the variables created by the inliner. Add all the variables in
4158 VARS to BIND_EXPR. */
4159
4160 static void
4161 declare_inline_vars (tree block, tree vars)
4162 {
4163 tree t;
4164 for (t = vars; t; t = TREE_CHAIN (t))
4165 {
4166 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4167 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4168 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4169 }
4170
4171 if (block)
4172 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4173 }
4174
4175 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4176 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4177 VAR_DECL translation. */
4178
4179 static tree
4180 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4181 {
4182 /* Don't generate debug information for the copy if we wouldn't have
4183 generated it for the copy either. */
4184 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4185 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4186
4187 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4188 declaration inspired this copy. */
4189 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4190
4191 /* The new variable/label has no RTL, yet. */
4192 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4193 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4194 SET_DECL_RTL (copy, NULL_RTX);
4195
4196 /* These args would always appear unused, if not for this. */
4197 TREE_USED (copy) = 1;
4198
4199 /* Set the context for the new declaration. */
4200 if (!DECL_CONTEXT (decl))
4201 /* Globals stay global. */
4202 ;
4203 else if (DECL_CONTEXT (decl) != id->src_fn)
4204 /* Things that weren't in the scope of the function we're inlining
4205 from aren't in the scope we're inlining to, either. */
4206 ;
4207 else if (TREE_STATIC (decl))
4208 /* Function-scoped static variables should stay in the original
4209 function. */
4210 ;
4211 else
4212 /* Ordinary automatic local variables are now in the scope of the
4213 new function. */
4214 DECL_CONTEXT (copy) = id->dst_fn;
4215
4216 return copy;
4217 }
4218
4219 static tree
4220 copy_decl_to_var (tree decl, copy_body_data *id)
4221 {
4222 tree copy, type;
4223
4224 gcc_assert (TREE_CODE (decl) == PARM_DECL
4225 || TREE_CODE (decl) == RESULT_DECL);
4226
4227 type = TREE_TYPE (decl);
4228
4229 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4230 VAR_DECL, DECL_NAME (decl), type);
4231 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4232 TREE_READONLY (copy) = TREE_READONLY (decl);
4233 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4234 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4235
4236 return copy_decl_for_dup_finish (id, decl, copy);
4237 }
4238
4239 /* Like copy_decl_to_var, but create a return slot object instead of a
4240 pointer variable for return by invisible reference. */
4241
4242 static tree
4243 copy_result_decl_to_var (tree decl, copy_body_data *id)
4244 {
4245 tree copy, type;
4246
4247 gcc_assert (TREE_CODE (decl) == PARM_DECL
4248 || TREE_CODE (decl) == RESULT_DECL);
4249
4250 type = TREE_TYPE (decl);
4251 if (DECL_BY_REFERENCE (decl))
4252 type = TREE_TYPE (type);
4253
4254 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4255 VAR_DECL, DECL_NAME (decl), type);
4256 TREE_READONLY (copy) = TREE_READONLY (decl);
4257 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4258 if (!DECL_BY_REFERENCE (decl))
4259 {
4260 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4261 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4262 }
4263
4264 return copy_decl_for_dup_finish (id, decl, copy);
4265 }
4266
4267 tree
4268 copy_decl_no_change (tree decl, copy_body_data *id)
4269 {
4270 tree copy;
4271
4272 copy = copy_node (decl);
4273
4274 /* The COPY is not abstract; it will be generated in DST_FN. */
4275 DECL_ABSTRACT (copy) = 0;
4276 lang_hooks.dup_lang_specific_decl (copy);
4277
4278 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4279 been taken; it's for internal bookkeeping in expand_goto_internal. */
4280 if (TREE_CODE (copy) == LABEL_DECL)
4281 {
4282 TREE_ADDRESSABLE (copy) = 0;
4283 LABEL_DECL_UID (copy) = -1;
4284 }
4285
4286 return copy_decl_for_dup_finish (id, decl, copy);
4287 }
4288
4289 static tree
4290 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4291 {
4292 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4293 return copy_decl_to_var (decl, id);
4294 else
4295 return copy_decl_no_change (decl, id);
4296 }
4297
4298 /* Return a copy of the function's argument tree. */
4299 static tree
4300 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4301 bitmap args_to_skip, tree *vars)
4302 {
4303 tree arg, *parg;
4304 tree new_parm = NULL;
4305 int i = 0;
4306
4307 parg = &new_parm;
4308
4309 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4310 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4311 {
4312 tree new_tree = remap_decl (arg, id);
4313 lang_hooks.dup_lang_specific_decl (new_tree);
4314 *parg = new_tree;
4315 parg = &TREE_CHAIN (new_tree);
4316 }
4317 else if (!pointer_map_contains (id->decl_map, arg))
4318 {
4319 /* Make an equivalent VAR_DECL. If the argument was used
4320 as temporary variable later in function, the uses will be
4321 replaced by local variable. */
4322 tree var = copy_decl_to_var (arg, id);
4323 get_var_ann (var);
4324 add_referenced_var (var);
4325 insert_decl_map (id, arg, var);
4326 /* Declare this new variable. */
4327 TREE_CHAIN (var) = *vars;
4328 *vars = var;
4329 }
4330 return new_parm;
4331 }
4332
4333 /* Return a copy of the function's static chain. */
4334 static tree
4335 copy_static_chain (tree static_chain, copy_body_data * id)
4336 {
4337 tree *chain_copy, *pvar;
4338
4339 chain_copy = &static_chain;
4340 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4341 {
4342 tree new_tree = remap_decl (*pvar, id);
4343 lang_hooks.dup_lang_specific_decl (new_tree);
4344 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4345 *pvar = new_tree;
4346 }
4347 return static_chain;
4348 }
4349
4350 /* Return true if the function is allowed to be versioned.
4351 This is a guard for the versioning functionality. */
4352
4353 bool
4354 tree_versionable_function_p (tree fndecl)
4355 {
4356 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4357 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4358 }
4359
4360 /* Delete all unreachable basic blocks and update callgraph.
4361 Doing so is somewhat nontrivial because we need to update all clones and
4362 remove inline function that become unreachable. */
4363
4364 static bool
4365 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4366 {
4367 bool changed = false;
4368 basic_block b, next_bb;
4369
4370 find_unreachable_blocks ();
4371
4372 /* Delete all unreachable basic blocks. */
4373
4374 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4375 {
4376 next_bb = b->next_bb;
4377
4378 if (!(b->flags & BB_REACHABLE))
4379 {
4380 gimple_stmt_iterator bsi;
4381
4382 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4383 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4384 {
4385 struct cgraph_edge *e;
4386 struct cgraph_node *node;
4387
4388 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4389 {
4390 if (!e->inline_failed)
4391 cgraph_remove_node_and_inline_clones (e->callee);
4392 else
4393 cgraph_remove_edge (e);
4394 }
4395 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4396 && id->dst_node->clones)
4397 for (node = id->dst_node->clones; node != id->dst_node;)
4398 {
4399 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4400 {
4401 if (!e->inline_failed)
4402 cgraph_remove_node_and_inline_clones (e->callee);
4403 else
4404 cgraph_remove_edge (e);
4405 }
4406
4407 if (node->clones)
4408 node = node->clones;
4409 else if (node->next_sibling_clone)
4410 node = node->next_sibling_clone;
4411 else
4412 {
4413 while (node != id->dst_node && !node->next_sibling_clone)
4414 node = node->clone_of;
4415 if (node != id->dst_node)
4416 node = node->next_sibling_clone;
4417 }
4418 }
4419 }
4420 delete_basic_block (b);
4421 changed = true;
4422 }
4423 }
4424
4425 if (changed)
4426 tidy_fallthru_edges ();
4427 #ifdef ENABLE_CHECKING0
4428 verify_cgraph_node (id->dst_node);
4429 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4430 && id->dst_node->clones)
4431 {
4432 struct cgraph_node *node;
4433 for (node = id->dst_node->clones; node != id->dst_node;)
4434 {
4435 verify_cgraph_node (node);
4436
4437 if (node->clones)
4438 node = node->clones;
4439 else if (node->next_sibling_clone)
4440 node = node->next_sibling_clone;
4441 else
4442 {
4443 while (node != id->dst_node && !node->next_sibling_clone)
4444 node = node->clone_of;
4445 if (node != id->dst_node)
4446 node = node->next_sibling_clone;
4447 }
4448 }
4449 }
4450 #endif
4451 return changed;
4452 }
4453
4454 /* Update clone info after duplication. */
4455
4456 static void
4457 update_clone_info (copy_body_data * id)
4458 {
4459 struct cgraph_node *node;
4460 if (!id->dst_node->clones)
4461 return;
4462 for (node = id->dst_node->clones; node != id->dst_node;)
4463 {
4464 /* First update replace maps to match the new body. */
4465 if (node->clone.tree_map)
4466 {
4467 unsigned int i;
4468 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4469 {
4470 struct ipa_replace_map *replace_info;
4471 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4472 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4473 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4474 }
4475 }
4476 if (node->clones)
4477 node = node->clones;
4478 else if (node->next_sibling_clone)
4479 node = node->next_sibling_clone;
4480 else
4481 {
4482 while (node != id->dst_node && !node->next_sibling_clone)
4483 node = node->clone_of;
4484 if (node != id->dst_node)
4485 node = node->next_sibling_clone;
4486 }
4487 }
4488 }
4489
4490 /* Create a copy of a function's tree.
4491 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4492 of the original function and the new copied function
4493 respectively. In case we want to replace a DECL
4494 tree with another tree while duplicating the function's
4495 body, TREE_MAP represents the mapping between these
4496 trees. If UPDATE_CLONES is set, the call_stmt fields
4497 of edges of clones of the function will be updated. */
4498 void
4499 tree_function_versioning (tree old_decl, tree new_decl,
4500 VEC(ipa_replace_map_p,gc)* tree_map,
4501 bool update_clones, bitmap args_to_skip)
4502 {
4503 struct cgraph_node *old_version_node;
4504 struct cgraph_node *new_version_node;
4505 copy_body_data id;
4506 tree p;
4507 unsigned i;
4508 struct ipa_replace_map *replace_info;
4509 basic_block old_entry_block;
4510 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4511
4512 tree t_step;
4513 tree old_current_function_decl = current_function_decl;
4514 tree vars = NULL_TREE;
4515
4516 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4517 && TREE_CODE (new_decl) == FUNCTION_DECL);
4518 DECL_POSSIBLY_INLINED (old_decl) = 1;
4519
4520 old_version_node = cgraph_node (old_decl);
4521 new_version_node = cgraph_node (new_decl);
4522
4523 /* Output the inlining info for this abstract function, since it has been
4524 inlined. If we don't do this now, we can lose the information about the
4525 variables in the function when the blocks get blown away as soon as we
4526 remove the cgraph node. */
4527 (*debug_hooks->outlining_inline_function) (old_decl);
4528
4529 DECL_ARTIFICIAL (new_decl) = 1;
4530 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4531
4532 /* Prepare the data structures for the tree copy. */
4533 memset (&id, 0, sizeof (id));
4534
4535 /* Generate a new name for the new version. */
4536 id.statements_to_fold = pointer_set_create ();
4537
4538 id.decl_map = pointer_map_create ();
4539 id.src_fn = old_decl;
4540 id.dst_fn = new_decl;
4541 id.src_node = old_version_node;
4542 id.dst_node = new_version_node;
4543 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4544
4545 id.copy_decl = copy_decl_no_change;
4546 id.transform_call_graph_edges
4547 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4548 id.transform_new_cfg = true;
4549 id.transform_return_to_modify = false;
4550 id.transform_lang_insert_block = NULL;
4551
4552 current_function_decl = new_decl;
4553 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4554 (DECL_STRUCT_FUNCTION (old_decl));
4555 initialize_cfun (new_decl, old_decl,
4556 old_entry_block->count,
4557 old_entry_block->frequency);
4558 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4559
4560 /* Copy the function's static chain. */
4561 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4562 if (p)
4563 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4564 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4565 &id);
4566
4567 /* If there's a tree_map, prepare for substitution. */
4568 if (tree_map)
4569 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4570 {
4571 gimple init;
4572 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4573 if (replace_info->replace_p)
4574 {
4575 tree op = replace_info->new_tree;
4576
4577 STRIP_NOPS (op);
4578
4579 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4580 op = TREE_OPERAND (op, 0);
4581
4582 if (TREE_CODE (op) == ADDR_EXPR)
4583 {
4584 op = TREE_OPERAND (op, 0);
4585 while (handled_component_p (op))
4586 op = TREE_OPERAND (op, 0);
4587 if (TREE_CODE (op) == VAR_DECL)
4588 add_referenced_var (op);
4589 }
4590 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4591 init = setup_one_parameter (&id, replace_info->old_tree,
4592 replace_info->new_tree, id.src_fn,
4593 NULL,
4594 &vars);
4595 if (init)
4596 VEC_safe_push (gimple, heap, init_stmts, init);
4597 }
4598 }
4599 /* Copy the function's arguments. */
4600 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4601 DECL_ARGUMENTS (new_decl) =
4602 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4603 args_to_skip, &vars);
4604
4605 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4606
4607 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4608 number_blocks (id.dst_fn);
4609
4610 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4611
4612 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4613 /* Add local vars. */
4614 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4615 t_step; t_step = TREE_CHAIN (t_step))
4616 {
4617 tree var = TREE_VALUE (t_step);
4618 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4619 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4620 else if (!can_be_nonlocal (var, &id))
4621 cfun->local_decls =
4622 tree_cons (NULL_TREE, remap_decl (var, &id),
4623 cfun->local_decls);
4624 }
4625
4626 /* Copy the Function's body. */
4627 copy_body (&id, old_entry_block->count, old_entry_block->frequency,
4628 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4629
4630 if (DECL_RESULT (old_decl) != NULL_TREE)
4631 {
4632 tree *res_decl = &DECL_RESULT (old_decl);
4633 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4634 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4635 }
4636
4637 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4638 number_blocks (new_decl);
4639
4640 if (VEC_length (gimple, init_stmts))
4641 {
4642 basic_block bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4643 while (VEC_length (gimple, init_stmts))
4644 insert_init_stmt (bb, VEC_pop (gimple, init_stmts));
4645 }
4646 update_clone_info (&id);
4647
4648 /* Remap the nonlocal_goto_save_area, if any. */
4649 if (cfun->nonlocal_goto_save_area)
4650 {
4651 struct walk_stmt_info wi;
4652
4653 memset (&wi, 0, sizeof (wi));
4654 wi.info = &id;
4655 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4656 }
4657
4658 /* Clean up. */
4659 pointer_map_destroy (id.decl_map);
4660 free_dominance_info (CDI_DOMINATORS);
4661 free_dominance_info (CDI_POST_DOMINATORS);
4662
4663 fold_marked_statements (0, id.statements_to_fold);
4664 pointer_set_destroy (id.statements_to_fold);
4665 fold_cond_expr_cond ();
4666 delete_unreachable_blocks_update_callgraph (&id);
4667 update_ssa (TODO_update_ssa);
4668 free_dominance_info (CDI_DOMINATORS);
4669 free_dominance_info (CDI_POST_DOMINATORS);
4670
4671 VEC_free (gimple, heap, init_stmts);
4672 pop_cfun ();
4673 current_function_decl = old_current_function_decl;
4674 gcc_assert (!current_function_decl
4675 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
4676 return;
4677 }
4678
4679 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
4680 the callee and return the inlined body on success. */
4681
4682 tree
4683 maybe_inline_call_in_expr (tree exp)
4684 {
4685 tree fn = get_callee_fndecl (exp);
4686
4687 /* We can only try to inline "const" functions. */
4688 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
4689 {
4690 struct pointer_map_t *decl_map = pointer_map_create ();
4691 call_expr_arg_iterator iter;
4692 copy_body_data id;
4693 tree param, arg, t;
4694
4695 /* Remap the parameters. */
4696 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
4697 param;
4698 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
4699 *pointer_map_insert (decl_map, param) = arg;
4700
4701 memset (&id, 0, sizeof (id));
4702 id.src_fn = fn;
4703 id.dst_fn = current_function_decl;
4704 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
4705 id.decl_map = decl_map;
4706
4707 id.copy_decl = copy_decl_no_change;
4708 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4709 id.transform_new_cfg = false;
4710 id.transform_return_to_modify = true;
4711 id.transform_lang_insert_block = false;
4712
4713 /* Make sure not to unshare trees behind the front-end's back
4714 since front-end specific mechanisms may rely on sharing. */
4715 id.regimplify = false;
4716 id.do_not_unshare = true;
4717
4718 /* We're not inside any EH region. */
4719 id.eh_region = -1;
4720
4721 t = copy_tree_body (&id);
4722 pointer_map_destroy (decl_map);
4723
4724 /* We can only return something suitable for use in a GENERIC
4725 expression tree. */
4726 if (TREE_CODE (t) == MODIFY_EXPR)
4727 return TREE_OPERAND (t, 1);
4728 }
4729
4730 return NULL_TREE;
4731 }
4732
4733 /* Duplicate a type, fields and all. */
4734
4735 tree
4736 build_duplicate_type (tree type)
4737 {
4738 struct copy_body_data id;
4739
4740 memset (&id, 0, sizeof (id));
4741 id.src_fn = current_function_decl;
4742 id.dst_fn = current_function_decl;
4743 id.src_cfun = cfun;
4744 id.decl_map = pointer_map_create ();
4745 id.copy_decl = copy_decl_no_change;
4746
4747 type = remap_type_1 (type, &id);
4748
4749 pointer_map_destroy (id.decl_map);
4750
4751 TYPE_CANONICAL (type) = type;
4752
4753 return type;
4754 }
4755
4756 /* Return whether it is safe to inline a function because it used different
4757 target specific options or call site actual types mismatch parameter types.
4758 E is the call edge to be checked. */
4759 bool
4760 tree_can_inline_p (struct cgraph_edge *e)
4761 {
4762 #if 0
4763 /* This causes a regression in SPEC in that it prevents a cold function from
4764 inlining a hot function. Perhaps this should only apply to functions
4765 that the user declares hot/cold/optimize explicitly. */
4766
4767 /* Don't inline a function with a higher optimization level than the
4768 caller, or with different space constraints (hot/cold functions). */
4769 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
4770 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
4771
4772 if (caller_tree != callee_tree)
4773 {
4774 struct cl_optimization *caller_opt
4775 = TREE_OPTIMIZATION ((caller_tree)
4776 ? caller_tree
4777 : optimization_default_node);
4778
4779 struct cl_optimization *callee_opt
4780 = TREE_OPTIMIZATION ((callee_tree)
4781 ? callee_tree
4782 : optimization_default_node);
4783
4784 if ((caller_opt->optimize > callee_opt->optimize)
4785 || (caller_opt->optimize_size != callee_opt->optimize_size))
4786 return false;
4787 }
4788 #endif
4789 tree caller, callee;
4790
4791 caller = e->caller->decl;
4792 callee = e->callee->decl;
4793
4794 /* Allow the backend to decide if inlining is ok. */
4795 if (!targetm.target_option.can_inline_p (caller, callee))
4796 {
4797 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
4798 gimple_call_set_cannot_inline (e->call_stmt, true);
4799 return false;
4800 }
4801
4802 if (!gimple_check_call_args (e->call_stmt))
4803 {
4804 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
4805 gimple_call_set_cannot_inline (e->call_stmt, true);
4806 return false;
4807 }
4808
4809 return true;
4810 }