cgraph.h (struct ipa_replace_map): Add parm_num parameter.
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "hashtab.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "intl.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
43 #include "function.h"
44 #include "tree-flow.h"
45 #include "diagnostic.h"
46 #include "except.h"
47 #include "debug.h"
48 #include "pointer-set.h"
49 #include "ipa-prop.h"
50 #include "value-prof.h"
51 #include "tree-pass.h"
52 #include "target.h"
53 #include "integrate.h"
54
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "gimple.h"
58
59 /* Inlining, Cloning, Versioning, Parallelization
60
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
67
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
72
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
76
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
80
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
88
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
90
91 /* To Do:
92
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
99
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
102
103
104 /* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
106
107 eni_weights eni_size_weights;
108
109 /* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
111
112 eni_weights eni_time_weights;
113
114 /* Prototypes. */
115
116 static tree declare_return_variable (copy_body_data *, tree, tree);
117 static void remap_block (tree *, copy_body_data *);
118 static void copy_bind_expr (tree *, int *, copy_body_data *);
119 static tree mark_local_for_remap_r (tree *, int *, void *);
120 static void unsave_expr_1 (tree);
121 static tree unsave_r (tree *, int *, void *);
122 static void declare_inline_vars (tree, tree);
123 static void remap_save_expr (tree *, void *, int *);
124 static void prepend_lexical_block (tree current_block, tree new_block);
125 static tree copy_decl_to_var (tree, copy_body_data *);
126 static tree copy_result_decl_to_var (tree, copy_body_data *);
127 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
128 static gimple remap_gimple_stmt (gimple, copy_body_data *);
129 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
130
131 /* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
133
134 void
135 insert_decl_map (copy_body_data *id, tree key, tree value)
136 {
137 *pointer_map_insert (id->decl_map, key) = value;
138
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
142 *pointer_map_insert (id->decl_map, value) = value;
143 }
144
145 /* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
147
148 static void
149 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150 {
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
153
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
156
157 if (!target_for_debug_bind (key))
158 return;
159
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
162
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
165
166 *pointer_map_insert (id->debug_map, key) = value;
167 }
168
169 /* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173 static int processing_debug_stmt = 0;
174
175 /* Construct new SSA name for old NAME. ID is the inline context. */
176
177 static tree
178 remap_ssa_name (tree name, copy_body_data *id)
179 {
180 tree new_tree;
181 tree *n;
182
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
184
185 n = (tree *) pointer_map_contains (id->decl_map, name);
186 if (n)
187 return unshare_expr (*n);
188
189 if (processing_debug_stmt)
190 {
191 processing_debug_stmt = -1;
192 return name;
193 }
194
195 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
196 in copy_bb. */
197 new_tree = remap_decl (SSA_NAME_VAR (name), id);
198
199 /* We might've substituted constant or another SSA_NAME for
200 the variable.
201
202 Replace the SSA name representing RESULT_DECL by variable during
203 inlining: this saves us from need to introduce PHI node in a case
204 return value is just partly initialized. */
205 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
206 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
207 || !id->transform_return_to_modify))
208 {
209 struct ptr_info_def *pi;
210 new_tree = make_ssa_name (new_tree, NULL);
211 insert_decl_map (id, name, new_tree);
212 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
213 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
214 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
215 /* At least IPA points-to info can be directly transferred. */
216 if (id->src_cfun->gimple_df
217 && id->src_cfun->gimple_df->ipa_pta
218 && (pi = SSA_NAME_PTR_INFO (name))
219 && !pi->pt.anything)
220 {
221 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
222 new_pi->pt = pi->pt;
223 }
224 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
225 {
226 /* By inlining function having uninitialized variable, we might
227 extend the lifetime (variable might get reused). This cause
228 ICE in the case we end up extending lifetime of SSA name across
229 abnormal edge, but also increase register pressure.
230
231 We simply initialize all uninitialized vars by 0 except
232 for case we are inlining to very first BB. We can avoid
233 this for all BBs that are not inside strongly connected
234 regions of the CFG, but this is expensive to test. */
235 if (id->entry_bb
236 && is_gimple_reg (SSA_NAME_VAR (name))
237 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
238 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
239 || EDGE_COUNT (id->entry_bb->preds) != 1))
240 {
241 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
242 gimple init_stmt;
243
244 init_stmt = gimple_build_assign (new_tree,
245 fold_convert (TREE_TYPE (new_tree),
246 integer_zero_node));
247 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
248 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
249 }
250 else
251 {
252 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
253 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
254 == name)
255 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
256 }
257 }
258 }
259 else
260 insert_decl_map (id, name, new_tree);
261 return new_tree;
262 }
263
264 /* Remap DECL during the copying of the BLOCK tree for the function. */
265
266 tree
267 remap_decl (tree decl, copy_body_data *id)
268 {
269 tree *n;
270
271 /* We only remap local variables in the current function. */
272
273 /* See if we have remapped this declaration. */
274
275 n = (tree *) pointer_map_contains (id->decl_map, decl);
276
277 if (!n && processing_debug_stmt)
278 {
279 processing_debug_stmt = -1;
280 return decl;
281 }
282
283 /* If we didn't already have an equivalent for this declaration,
284 create one now. */
285 if (!n)
286 {
287 /* Make a copy of the variable or label. */
288 tree t = id->copy_decl (decl, id);
289
290 /* Remember it, so that if we encounter this local entity again
291 we can reuse this copy. Do this early because remap_type may
292 need this decl for TYPE_STUB_DECL. */
293 insert_decl_map (id, decl, t);
294
295 if (!DECL_P (t))
296 return t;
297
298 /* Remap types, if necessary. */
299 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
300 if (TREE_CODE (t) == TYPE_DECL)
301 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
302
303 /* Remap sizes as necessary. */
304 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
305 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
306
307 /* If fields, do likewise for offset and qualifier. */
308 if (TREE_CODE (t) == FIELD_DECL)
309 {
310 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
311 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
312 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
313 }
314
315 if (cfun && gimple_in_ssa_p (cfun)
316 && (TREE_CODE (t) == VAR_DECL
317 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
318 {
319 get_var_ann (t);
320 add_referenced_var (t);
321 }
322 return t;
323 }
324
325 if (id->do_not_unshare)
326 return *n;
327 else
328 return unshare_expr (*n);
329 }
330
331 static tree
332 remap_type_1 (tree type, copy_body_data *id)
333 {
334 tree new_tree, t;
335
336 /* We do need a copy. build and register it now. If this is a pointer or
337 reference type, remap the designated type and make a new pointer or
338 reference type. */
339 if (TREE_CODE (type) == POINTER_TYPE)
340 {
341 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
342 TYPE_MODE (type),
343 TYPE_REF_CAN_ALIAS_ALL (type));
344 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
345 new_tree = build_type_attribute_qual_variant (new_tree,
346 TYPE_ATTRIBUTES (type),
347 TYPE_QUALS (type));
348 insert_decl_map (id, type, new_tree);
349 return new_tree;
350 }
351 else if (TREE_CODE (type) == REFERENCE_TYPE)
352 {
353 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
354 TYPE_MODE (type),
355 TYPE_REF_CAN_ALIAS_ALL (type));
356 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
357 new_tree = build_type_attribute_qual_variant (new_tree,
358 TYPE_ATTRIBUTES (type),
359 TYPE_QUALS (type));
360 insert_decl_map (id, type, new_tree);
361 return new_tree;
362 }
363 else
364 new_tree = copy_node (type);
365
366 insert_decl_map (id, type, new_tree);
367
368 /* This is a new type, not a copy of an old type. Need to reassociate
369 variants. We can handle everything except the main variant lazily. */
370 t = TYPE_MAIN_VARIANT (type);
371 if (type != t)
372 {
373 t = remap_type (t, id);
374 TYPE_MAIN_VARIANT (new_tree) = t;
375 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
376 TYPE_NEXT_VARIANT (t) = new_tree;
377 }
378 else
379 {
380 TYPE_MAIN_VARIANT (new_tree) = new_tree;
381 TYPE_NEXT_VARIANT (new_tree) = NULL;
382 }
383
384 if (TYPE_STUB_DECL (type))
385 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
386
387 /* Lazily create pointer and reference types. */
388 TYPE_POINTER_TO (new_tree) = NULL;
389 TYPE_REFERENCE_TO (new_tree) = NULL;
390
391 switch (TREE_CODE (new_tree))
392 {
393 case INTEGER_TYPE:
394 case REAL_TYPE:
395 case FIXED_POINT_TYPE:
396 case ENUMERAL_TYPE:
397 case BOOLEAN_TYPE:
398 t = TYPE_MIN_VALUE (new_tree);
399 if (t && TREE_CODE (t) != INTEGER_CST)
400 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
401
402 t = TYPE_MAX_VALUE (new_tree);
403 if (t && TREE_CODE (t) != INTEGER_CST)
404 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
405 return new_tree;
406
407 case FUNCTION_TYPE:
408 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
410 return new_tree;
411
412 case ARRAY_TYPE:
413 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
414 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
415 break;
416
417 case RECORD_TYPE:
418 case UNION_TYPE:
419 case QUAL_UNION_TYPE:
420 {
421 tree f, nf = NULL;
422
423 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
424 {
425 t = remap_decl (f, id);
426 DECL_CONTEXT (t) = new_tree;
427 TREE_CHAIN (t) = nf;
428 nf = t;
429 }
430 TYPE_FIELDS (new_tree) = nreverse (nf);
431 }
432 break;
433
434 case OFFSET_TYPE:
435 default:
436 /* Shouldn't have been thought variable sized. */
437 gcc_unreachable ();
438 }
439
440 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
441 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
442
443 return new_tree;
444 }
445
446 tree
447 remap_type (tree type, copy_body_data *id)
448 {
449 tree *node;
450 tree tmp;
451
452 if (type == NULL)
453 return type;
454
455 /* See if we have remapped this type. */
456 node = (tree *) pointer_map_contains (id->decl_map, type);
457 if (node)
458 return *node;
459
460 /* The type only needs remapping if it's variably modified. */
461 if (! variably_modified_type_p (type, id->src_fn))
462 {
463 insert_decl_map (id, type, type);
464 return type;
465 }
466
467 id->remapping_type_depth++;
468 tmp = remap_type_1 (type, id);
469 id->remapping_type_depth--;
470
471 return tmp;
472 }
473
474 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
475 is NULL or TYPE has not been remapped before. */
476
477 static tree
478 remapped_type (tree type, copy_body_data *id)
479 {
480 tree *node;
481
482 if (type == NULL)
483 return type;
484
485 /* See if we have remapped this type. */
486 node = (tree *) pointer_map_contains (id->decl_map, type);
487 if (node)
488 return *node;
489 else
490 return NULL;
491 }
492
493 /* The type only needs remapping if it's variably modified. */
494 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
495
496 static bool
497 can_be_nonlocal (tree decl, copy_body_data *id)
498 {
499 /* We can not duplicate function decls. */
500 if (TREE_CODE (decl) == FUNCTION_DECL)
501 return true;
502
503 /* Local static vars must be non-local or we get multiple declaration
504 problems. */
505 if (TREE_CODE (decl) == VAR_DECL
506 && !auto_var_in_fn_p (decl, id->src_fn))
507 return true;
508
509 /* At the moment dwarf2out can handle only these types of nodes. We
510 can support more later. */
511 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
512 return false;
513
514 /* We must use global type. We call remapped_type instead of
515 remap_type since we don't want to remap this type here if it
516 hasn't been remapped before. */
517 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
518 return false;
519
520 /* Wihtout SSA we can't tell if variable is used. */
521 if (!gimple_in_ssa_p (cfun))
522 return false;
523
524 /* Live variables must be copied so we can attach DECL_RTL. */
525 if (var_ann (decl))
526 return false;
527
528 return true;
529 }
530
531 static tree
532 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
533 {
534 tree old_var;
535 tree new_decls = NULL_TREE;
536
537 /* Remap its variables. */
538 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
539 {
540 tree new_var;
541
542 if (can_be_nonlocal (old_var, id))
543 {
544 if (TREE_CODE (old_var) == VAR_DECL
545 && ! DECL_EXTERNAL (old_var)
546 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
547 cfun->local_decls = tree_cons (NULL_TREE, old_var,
548 cfun->local_decls);
549 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
550 && !DECL_IGNORED_P (old_var)
551 && nonlocalized_list)
552 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
553 continue;
554 }
555
556 /* Remap the variable. */
557 new_var = remap_decl (old_var, id);
558
559 /* If we didn't remap this variable, we can't mess with its
560 TREE_CHAIN. If we remapped this variable to the return slot, it's
561 already declared somewhere else, so don't declare it here. */
562
563 if (new_var == id->retvar)
564 ;
565 else if (!new_var)
566 {
567 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
568 && !DECL_IGNORED_P (old_var)
569 && nonlocalized_list)
570 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
571 }
572 else
573 {
574 gcc_assert (DECL_P (new_var));
575 TREE_CHAIN (new_var) = new_decls;
576 new_decls = new_var;
577 }
578 }
579
580 return nreverse (new_decls);
581 }
582
583 /* Copy the BLOCK to contain remapped versions of the variables
584 therein. And hook the new block into the block-tree. */
585
586 static void
587 remap_block (tree *block, copy_body_data *id)
588 {
589 tree old_block;
590 tree new_block;
591
592 /* Make the new block. */
593 old_block = *block;
594 new_block = make_node (BLOCK);
595 TREE_USED (new_block) = TREE_USED (old_block);
596 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
597 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
598 BLOCK_NONLOCALIZED_VARS (new_block)
599 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
600 *block = new_block;
601
602 /* Remap its variables. */
603 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
604 &BLOCK_NONLOCALIZED_VARS (new_block),
605 id);
606
607 if (id->transform_lang_insert_block)
608 id->transform_lang_insert_block (new_block);
609
610 /* Remember the remapped block. */
611 insert_decl_map (id, old_block, new_block);
612 }
613
614 /* Copy the whole block tree and root it in id->block. */
615 static tree
616 remap_blocks (tree block, copy_body_data *id)
617 {
618 tree t;
619 tree new_tree = block;
620
621 if (!block)
622 return NULL;
623
624 remap_block (&new_tree, id);
625 gcc_assert (new_tree != block);
626 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
627 prepend_lexical_block (new_tree, remap_blocks (t, id));
628 /* Blocks are in arbitrary order, but make things slightly prettier and do
629 not swap order when producing a copy. */
630 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
631 return new_tree;
632 }
633
634 static void
635 copy_statement_list (tree *tp)
636 {
637 tree_stmt_iterator oi, ni;
638 tree new_tree;
639
640 new_tree = alloc_stmt_list ();
641 ni = tsi_start (new_tree);
642 oi = tsi_start (*tp);
643 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
644 *tp = new_tree;
645
646 for (; !tsi_end_p (oi); tsi_next (&oi))
647 {
648 tree stmt = tsi_stmt (oi);
649 if (TREE_CODE (stmt) == STATEMENT_LIST)
650 copy_statement_list (&stmt);
651 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
652 }
653 }
654
655 static void
656 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
657 {
658 tree block = BIND_EXPR_BLOCK (*tp);
659 /* Copy (and replace) the statement. */
660 copy_tree_r (tp, walk_subtrees, NULL);
661 if (block)
662 {
663 remap_block (&block, id);
664 BIND_EXPR_BLOCK (*tp) = block;
665 }
666
667 if (BIND_EXPR_VARS (*tp))
668 {
669 tree t;
670
671 /* This will remap a lot of the same decls again, but this should be
672 harmless. */
673 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
674
675 /* Also copy value-expressions. */
676 for (t = BIND_EXPR_VARS (*tp); t; t = TREE_CHAIN (t))
677 if (TREE_CODE (t) == VAR_DECL
678 && DECL_HAS_VALUE_EXPR_P (t))
679 {
680 tree tem = DECL_VALUE_EXPR (t);
681 walk_tree (&tem, copy_tree_body_r, id, NULL);
682 SET_DECL_VALUE_EXPR (t, tem);
683 }
684 }
685 }
686
687
688 /* Create a new gimple_seq by remapping all the statements in BODY
689 using the inlining information in ID. */
690
691 static gimple_seq
692 remap_gimple_seq (gimple_seq body, copy_body_data *id)
693 {
694 gimple_stmt_iterator si;
695 gimple_seq new_body = NULL;
696
697 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
698 {
699 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
700 gimple_seq_add_stmt (&new_body, new_stmt);
701 }
702
703 return new_body;
704 }
705
706
707 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
708 block using the mapping information in ID. */
709
710 static gimple
711 copy_gimple_bind (gimple stmt, copy_body_data *id)
712 {
713 gimple new_bind;
714 tree new_block, new_vars;
715 gimple_seq body, new_body;
716
717 /* Copy the statement. Note that we purposely don't use copy_stmt
718 here because we need to remap statements as we copy. */
719 body = gimple_bind_body (stmt);
720 new_body = remap_gimple_seq (body, id);
721
722 new_block = gimple_bind_block (stmt);
723 if (new_block)
724 remap_block (&new_block, id);
725
726 /* This will remap a lot of the same decls again, but this should be
727 harmless. */
728 new_vars = gimple_bind_vars (stmt);
729 if (new_vars)
730 new_vars = remap_decls (new_vars, NULL, id);
731
732 new_bind = gimple_build_bind (new_vars, new_body, new_block);
733
734 return new_bind;
735 }
736
737
738 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
739 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
740 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
741 recursing into the children nodes of *TP. */
742
743 static tree
744 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
745 {
746 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
747 copy_body_data *id = (copy_body_data *) wi_p->info;
748 tree fn = id->src_fn;
749
750 if (TREE_CODE (*tp) == SSA_NAME)
751 {
752 *tp = remap_ssa_name (*tp, id);
753 *walk_subtrees = 0;
754 return NULL;
755 }
756 else if (auto_var_in_fn_p (*tp, fn))
757 {
758 /* Local variables and labels need to be replaced by equivalent
759 variables. We don't want to copy static variables; there's
760 only one of those, no matter how many times we inline the
761 containing function. Similarly for globals from an outer
762 function. */
763 tree new_decl;
764
765 /* Remap the declaration. */
766 new_decl = remap_decl (*tp, id);
767 gcc_assert (new_decl);
768 /* Replace this variable with the copy. */
769 STRIP_TYPE_NOPS (new_decl);
770 /* ??? The C++ frontend uses void * pointer zero to initialize
771 any other type. This confuses the middle-end type verification.
772 As cloned bodies do not go through gimplification again the fixup
773 there doesn't trigger. */
774 if (TREE_CODE (new_decl) == INTEGER_CST
775 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
776 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
777 *tp = new_decl;
778 *walk_subtrees = 0;
779 }
780 else if (TREE_CODE (*tp) == STATEMENT_LIST)
781 gcc_unreachable ();
782 else if (TREE_CODE (*tp) == SAVE_EXPR)
783 gcc_unreachable ();
784 else if (TREE_CODE (*tp) == LABEL_DECL
785 && (!DECL_CONTEXT (*tp)
786 || decl_function_context (*tp) == id->src_fn))
787 /* These may need to be remapped for EH handling. */
788 *tp = remap_decl (*tp, id);
789 else if (TYPE_P (*tp))
790 /* Types may need remapping as well. */
791 *tp = remap_type (*tp, id);
792 else if (CONSTANT_CLASS_P (*tp))
793 {
794 /* If this is a constant, we have to copy the node iff the type
795 will be remapped. copy_tree_r will not copy a constant. */
796 tree new_type = remap_type (TREE_TYPE (*tp), id);
797
798 if (new_type == TREE_TYPE (*tp))
799 *walk_subtrees = 0;
800
801 else if (TREE_CODE (*tp) == INTEGER_CST)
802 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
803 TREE_INT_CST_HIGH (*tp));
804 else
805 {
806 *tp = copy_node (*tp);
807 TREE_TYPE (*tp) = new_type;
808 }
809 }
810 else
811 {
812 /* Otherwise, just copy the node. Note that copy_tree_r already
813 knows not to copy VAR_DECLs, etc., so this is safe. */
814 if (TREE_CODE (*tp) == INDIRECT_REF)
815 {
816 /* Get rid of *& from inline substitutions that can happen when a
817 pointer argument is an ADDR_EXPR. */
818 tree decl = TREE_OPERAND (*tp, 0);
819 tree *n;
820
821 n = (tree *) pointer_map_contains (id->decl_map, decl);
822 if (n)
823 {
824 tree type, new_tree, old;
825
826 /* If we happen to get an ADDR_EXPR in n->value, strip
827 it manually here as we'll eventually get ADDR_EXPRs
828 which lie about their types pointed to. In this case
829 build_fold_indirect_ref wouldn't strip the
830 INDIRECT_REF, but we absolutely rely on that. As
831 fold_indirect_ref does other useful transformations,
832 try that first, though. */
833 type = TREE_TYPE (TREE_TYPE (*n));
834 new_tree = unshare_expr (*n);
835 old = *tp;
836 *tp = gimple_fold_indirect_ref (new_tree);
837 if (!*tp)
838 {
839 if (TREE_CODE (new_tree) == ADDR_EXPR)
840 {
841 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
842 type, new_tree);
843 /* ??? We should either assert here or build
844 a VIEW_CONVERT_EXPR instead of blindly leaking
845 incompatible types to our IL. */
846 if (! *tp)
847 *tp = TREE_OPERAND (new_tree, 0);
848 }
849 else
850 {
851 *tp = build1 (INDIRECT_REF, type, new_tree);
852 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
853 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
854 }
855 }
856 *walk_subtrees = 0;
857 return NULL;
858 }
859 }
860
861 /* Here is the "usual case". Copy this tree node, and then
862 tweak some special cases. */
863 copy_tree_r (tp, walk_subtrees, NULL);
864
865 /* Global variables we haven't seen yet need to go into referenced
866 vars. If not referenced from types only. */
867 if (gimple_in_ssa_p (cfun)
868 && TREE_CODE (*tp) == VAR_DECL
869 && id->remapping_type_depth == 0
870 && !processing_debug_stmt)
871 add_referenced_var (*tp);
872
873 /* We should never have TREE_BLOCK set on non-statements. */
874 if (EXPR_P (*tp))
875 gcc_assert (!TREE_BLOCK (*tp));
876
877 if (TREE_CODE (*tp) != OMP_CLAUSE)
878 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
879
880 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
881 {
882 /* The copied TARGET_EXPR has never been expanded, even if the
883 original node was expanded already. */
884 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
885 TREE_OPERAND (*tp, 3) = NULL_TREE;
886 }
887 else if (TREE_CODE (*tp) == ADDR_EXPR)
888 {
889 /* Variable substitution need not be simple. In particular,
890 the INDIRECT_REF substitution above. Make sure that
891 TREE_CONSTANT and friends are up-to-date. But make sure
892 to not improperly set TREE_BLOCK on some sub-expressions. */
893 int invariant = is_gimple_min_invariant (*tp);
894 tree block = id->block;
895 id->block = NULL_TREE;
896 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
897 id->block = block;
898
899 /* Handle the case where we substituted an INDIRECT_REF
900 into the operand of the ADDR_EXPR. */
901 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
902 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
903 else
904 recompute_tree_invariant_for_addr_expr (*tp);
905
906 /* If this used to be invariant, but is not any longer,
907 then regimplification is probably needed. */
908 if (invariant && !is_gimple_min_invariant (*tp))
909 id->regimplify = true;
910
911 *walk_subtrees = 0;
912 }
913 }
914
915 /* Keep iterating. */
916 return NULL_TREE;
917 }
918
919
920 /* Called from copy_body_id via walk_tree. DATA is really a
921 `copy_body_data *'. */
922
923 tree
924 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
925 {
926 copy_body_data *id = (copy_body_data *) data;
927 tree fn = id->src_fn;
928 tree new_block;
929
930 /* Begin by recognizing trees that we'll completely rewrite for the
931 inlining context. Our output for these trees is completely
932 different from out input (e.g. RETURN_EXPR is deleted, and morphs
933 into an edge). Further down, we'll handle trees that get
934 duplicated and/or tweaked. */
935
936 /* When requested, RETURN_EXPRs should be transformed to just the
937 contained MODIFY_EXPR. The branch semantics of the return will
938 be handled elsewhere by manipulating the CFG rather than a statement. */
939 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
940 {
941 tree assignment = TREE_OPERAND (*tp, 0);
942
943 /* If we're returning something, just turn that into an
944 assignment into the equivalent of the original RESULT_DECL.
945 If the "assignment" is just the result decl, the result
946 decl has already been set (e.g. a recent "foo (&result_decl,
947 ...)"); just toss the entire RETURN_EXPR. */
948 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
949 {
950 /* Replace the RETURN_EXPR with (a copy of) the
951 MODIFY_EXPR hanging underneath. */
952 *tp = copy_node (assignment);
953 }
954 else /* Else the RETURN_EXPR returns no value. */
955 {
956 *tp = NULL;
957 return (tree) (void *)1;
958 }
959 }
960 else if (TREE_CODE (*tp) == SSA_NAME)
961 {
962 *tp = remap_ssa_name (*tp, id);
963 *walk_subtrees = 0;
964 return NULL;
965 }
966
967 /* Local variables and labels need to be replaced by equivalent
968 variables. We don't want to copy static variables; there's only
969 one of those, no matter how many times we inline the containing
970 function. Similarly for globals from an outer function. */
971 else if (auto_var_in_fn_p (*tp, fn))
972 {
973 tree new_decl;
974
975 /* Remap the declaration. */
976 new_decl = remap_decl (*tp, id);
977 gcc_assert (new_decl);
978 /* Replace this variable with the copy. */
979 STRIP_TYPE_NOPS (new_decl);
980 *tp = new_decl;
981 *walk_subtrees = 0;
982 }
983 else if (TREE_CODE (*tp) == STATEMENT_LIST)
984 copy_statement_list (tp);
985 else if (TREE_CODE (*tp) == SAVE_EXPR
986 || TREE_CODE (*tp) == TARGET_EXPR)
987 remap_save_expr (tp, id->decl_map, walk_subtrees);
988 else if (TREE_CODE (*tp) == LABEL_DECL
989 && (! DECL_CONTEXT (*tp)
990 || decl_function_context (*tp) == id->src_fn))
991 /* These may need to be remapped for EH handling. */
992 *tp = remap_decl (*tp, id);
993 else if (TREE_CODE (*tp) == BIND_EXPR)
994 copy_bind_expr (tp, walk_subtrees, id);
995 /* Types may need remapping as well. */
996 else if (TYPE_P (*tp))
997 *tp = remap_type (*tp, id);
998
999 /* If this is a constant, we have to copy the node iff the type will be
1000 remapped. copy_tree_r will not copy a constant. */
1001 else if (CONSTANT_CLASS_P (*tp))
1002 {
1003 tree new_type = remap_type (TREE_TYPE (*tp), id);
1004
1005 if (new_type == TREE_TYPE (*tp))
1006 *walk_subtrees = 0;
1007
1008 else if (TREE_CODE (*tp) == INTEGER_CST)
1009 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1010 TREE_INT_CST_HIGH (*tp));
1011 else
1012 {
1013 *tp = copy_node (*tp);
1014 TREE_TYPE (*tp) = new_type;
1015 }
1016 }
1017
1018 /* Otherwise, just copy the node. Note that copy_tree_r already
1019 knows not to copy VAR_DECLs, etc., so this is safe. */
1020 else
1021 {
1022 /* Here we handle trees that are not completely rewritten.
1023 First we detect some inlining-induced bogosities for
1024 discarding. */
1025 if (TREE_CODE (*tp) == MODIFY_EXPR
1026 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1027 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1028 {
1029 /* Some assignments VAR = VAR; don't generate any rtl code
1030 and thus don't count as variable modification. Avoid
1031 keeping bogosities like 0 = 0. */
1032 tree decl = TREE_OPERAND (*tp, 0), value;
1033 tree *n;
1034
1035 n = (tree *) pointer_map_contains (id->decl_map, decl);
1036 if (n)
1037 {
1038 value = *n;
1039 STRIP_TYPE_NOPS (value);
1040 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1041 {
1042 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1043 return copy_tree_body_r (tp, walk_subtrees, data);
1044 }
1045 }
1046 }
1047 else if (TREE_CODE (*tp) == INDIRECT_REF)
1048 {
1049 /* Get rid of *& from inline substitutions that can happen when a
1050 pointer argument is an ADDR_EXPR. */
1051 tree decl = TREE_OPERAND (*tp, 0);
1052 tree *n;
1053
1054 n = (tree *) pointer_map_contains (id->decl_map, decl);
1055 if (n)
1056 {
1057 tree new_tree;
1058 tree old;
1059 /* If we happen to get an ADDR_EXPR in n->value, strip
1060 it manually here as we'll eventually get ADDR_EXPRs
1061 which lie about their types pointed to. In this case
1062 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1063 but we absolutely rely on that. As fold_indirect_ref
1064 does other useful transformations, try that first, though. */
1065 tree type = TREE_TYPE (TREE_TYPE (*n));
1066 if (id->do_not_unshare)
1067 new_tree = *n;
1068 else
1069 new_tree = unshare_expr (*n);
1070 old = *tp;
1071 *tp = gimple_fold_indirect_ref (new_tree);
1072 if (! *tp)
1073 {
1074 if (TREE_CODE (new_tree) == ADDR_EXPR)
1075 {
1076 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1077 type, new_tree);
1078 /* ??? We should either assert here or build
1079 a VIEW_CONVERT_EXPR instead of blindly leaking
1080 incompatible types to our IL. */
1081 if (! *tp)
1082 *tp = TREE_OPERAND (new_tree, 0);
1083 }
1084 else
1085 {
1086 *tp = build1 (INDIRECT_REF, type, new_tree);
1087 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1088 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1089 }
1090 }
1091 *walk_subtrees = 0;
1092 return NULL;
1093 }
1094 }
1095
1096 /* Here is the "usual case". Copy this tree node, and then
1097 tweak some special cases. */
1098 copy_tree_r (tp, walk_subtrees, NULL);
1099
1100 /* Global variables we haven't seen yet needs to go into referenced
1101 vars. If not referenced from types or debug stmts only. */
1102 if (gimple_in_ssa_p (cfun)
1103 && TREE_CODE (*tp) == VAR_DECL
1104 && id->remapping_type_depth == 0
1105 && !processing_debug_stmt)
1106 add_referenced_var (*tp);
1107
1108 /* If EXPR has block defined, map it to newly constructed block.
1109 When inlining we want EXPRs without block appear in the block
1110 of function call if we are not remapping a type. */
1111 if (EXPR_P (*tp))
1112 {
1113 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1114 if (TREE_BLOCK (*tp))
1115 {
1116 tree *n;
1117 n = (tree *) pointer_map_contains (id->decl_map,
1118 TREE_BLOCK (*tp));
1119 gcc_assert (n);
1120 new_block = *n;
1121 }
1122 TREE_BLOCK (*tp) = new_block;
1123 }
1124
1125 if (TREE_CODE (*tp) != OMP_CLAUSE)
1126 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1127
1128 /* The copied TARGET_EXPR has never been expanded, even if the
1129 original node was expanded already. */
1130 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1131 {
1132 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1133 TREE_OPERAND (*tp, 3) = NULL_TREE;
1134 }
1135
1136 /* Variable substitution need not be simple. In particular, the
1137 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1138 and friends are up-to-date. */
1139 else if (TREE_CODE (*tp) == ADDR_EXPR)
1140 {
1141 int invariant = is_gimple_min_invariant (*tp);
1142 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1143
1144 /* Handle the case where we substituted an INDIRECT_REF
1145 into the operand of the ADDR_EXPR. */
1146 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1147 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1148 else
1149 recompute_tree_invariant_for_addr_expr (*tp);
1150
1151 /* If this used to be invariant, but is not any longer,
1152 then regimplification is probably needed. */
1153 if (invariant && !is_gimple_min_invariant (*tp))
1154 id->regimplify = true;
1155
1156 *walk_subtrees = 0;
1157 }
1158 }
1159
1160 /* Keep iterating. */
1161 return NULL_TREE;
1162 }
1163
1164 /* Helper for remap_gimple_stmt. Given an EH region number for the
1165 source function, map that to the duplicate EH region number in
1166 the destination function. */
1167
1168 static int
1169 remap_eh_region_nr (int old_nr, copy_body_data *id)
1170 {
1171 eh_region old_r, new_r;
1172 void **slot;
1173
1174 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1175 slot = pointer_map_contains (id->eh_map, old_r);
1176 new_r = (eh_region) *slot;
1177
1178 return new_r->index;
1179 }
1180
1181 /* Similar, but operate on INTEGER_CSTs. */
1182
1183 static tree
1184 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1185 {
1186 int old_nr, new_nr;
1187
1188 old_nr = tree_low_cst (old_t_nr, 0);
1189 new_nr = remap_eh_region_nr (old_nr, id);
1190
1191 return build_int_cst (NULL, new_nr);
1192 }
1193
1194 /* Helper for copy_bb. Remap statement STMT using the inlining
1195 information in ID. Return the new statement copy. */
1196
1197 static gimple
1198 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1199 {
1200 gimple copy = NULL;
1201 struct walk_stmt_info wi;
1202 tree new_block;
1203 bool skip_first = false;
1204
1205 /* Begin by recognizing trees that we'll completely rewrite for the
1206 inlining context. Our output for these trees is completely
1207 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1208 into an edge). Further down, we'll handle trees that get
1209 duplicated and/or tweaked. */
1210
1211 /* When requested, GIMPLE_RETURNs should be transformed to just the
1212 contained GIMPLE_ASSIGN. The branch semantics of the return will
1213 be handled elsewhere by manipulating the CFG rather than the
1214 statement. */
1215 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1216 {
1217 tree retval = gimple_return_retval (stmt);
1218
1219 /* If we're returning something, just turn that into an
1220 assignment into the equivalent of the original RESULT_DECL.
1221 If RETVAL is just the result decl, the result decl has
1222 already been set (e.g. a recent "foo (&result_decl, ...)");
1223 just toss the entire GIMPLE_RETURN. */
1224 if (retval && TREE_CODE (retval) != RESULT_DECL)
1225 {
1226 copy = gimple_build_assign (id->retvar, retval);
1227 /* id->retvar is already substituted. Skip it on later remapping. */
1228 skip_first = true;
1229 }
1230 else
1231 return gimple_build_nop ();
1232 }
1233 else if (gimple_has_substatements (stmt))
1234 {
1235 gimple_seq s1, s2;
1236
1237 /* When cloning bodies from the C++ front end, we will be handed bodies
1238 in High GIMPLE form. Handle here all the High GIMPLE statements that
1239 have embedded statements. */
1240 switch (gimple_code (stmt))
1241 {
1242 case GIMPLE_BIND:
1243 copy = copy_gimple_bind (stmt, id);
1244 break;
1245
1246 case GIMPLE_CATCH:
1247 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1248 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1249 break;
1250
1251 case GIMPLE_EH_FILTER:
1252 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1253 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1254 break;
1255
1256 case GIMPLE_TRY:
1257 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1258 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1259 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1260 break;
1261
1262 case GIMPLE_WITH_CLEANUP_EXPR:
1263 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1264 copy = gimple_build_wce (s1);
1265 break;
1266
1267 case GIMPLE_OMP_PARALLEL:
1268 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1269 copy = gimple_build_omp_parallel
1270 (s1,
1271 gimple_omp_parallel_clauses (stmt),
1272 gimple_omp_parallel_child_fn (stmt),
1273 gimple_omp_parallel_data_arg (stmt));
1274 break;
1275
1276 case GIMPLE_OMP_TASK:
1277 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1278 copy = gimple_build_omp_task
1279 (s1,
1280 gimple_omp_task_clauses (stmt),
1281 gimple_omp_task_child_fn (stmt),
1282 gimple_omp_task_data_arg (stmt),
1283 gimple_omp_task_copy_fn (stmt),
1284 gimple_omp_task_arg_size (stmt),
1285 gimple_omp_task_arg_align (stmt));
1286 break;
1287
1288 case GIMPLE_OMP_FOR:
1289 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1290 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1291 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1292 gimple_omp_for_collapse (stmt), s2);
1293 {
1294 size_t i;
1295 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1296 {
1297 gimple_omp_for_set_index (copy, i,
1298 gimple_omp_for_index (stmt, i));
1299 gimple_omp_for_set_initial (copy, i,
1300 gimple_omp_for_initial (stmt, i));
1301 gimple_omp_for_set_final (copy, i,
1302 gimple_omp_for_final (stmt, i));
1303 gimple_omp_for_set_incr (copy, i,
1304 gimple_omp_for_incr (stmt, i));
1305 gimple_omp_for_set_cond (copy, i,
1306 gimple_omp_for_cond (stmt, i));
1307 }
1308 }
1309 break;
1310
1311 case GIMPLE_OMP_MASTER:
1312 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1313 copy = gimple_build_omp_master (s1);
1314 break;
1315
1316 case GIMPLE_OMP_ORDERED:
1317 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1318 copy = gimple_build_omp_ordered (s1);
1319 break;
1320
1321 case GIMPLE_OMP_SECTION:
1322 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1323 copy = gimple_build_omp_section (s1);
1324 break;
1325
1326 case GIMPLE_OMP_SECTIONS:
1327 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1328 copy = gimple_build_omp_sections
1329 (s1, gimple_omp_sections_clauses (stmt));
1330 break;
1331
1332 case GIMPLE_OMP_SINGLE:
1333 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1334 copy = gimple_build_omp_single
1335 (s1, gimple_omp_single_clauses (stmt));
1336 break;
1337
1338 case GIMPLE_OMP_CRITICAL:
1339 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1340 copy
1341 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1342 break;
1343
1344 default:
1345 gcc_unreachable ();
1346 }
1347 }
1348 else
1349 {
1350 if (gimple_assign_copy_p (stmt)
1351 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1352 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1353 {
1354 /* Here we handle statements that are not completely rewritten.
1355 First we detect some inlining-induced bogosities for
1356 discarding. */
1357
1358 /* Some assignments VAR = VAR; don't generate any rtl code
1359 and thus don't count as variable modification. Avoid
1360 keeping bogosities like 0 = 0. */
1361 tree decl = gimple_assign_lhs (stmt), value;
1362 tree *n;
1363
1364 n = (tree *) pointer_map_contains (id->decl_map, decl);
1365 if (n)
1366 {
1367 value = *n;
1368 STRIP_TYPE_NOPS (value);
1369 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1370 return gimple_build_nop ();
1371 }
1372 }
1373
1374 if (gimple_debug_bind_p (stmt))
1375 {
1376 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1377 gimple_debug_bind_get_value (stmt),
1378 stmt);
1379 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1380 return copy;
1381 }
1382
1383 /* Create a new deep copy of the statement. */
1384 copy = gimple_copy (stmt);
1385
1386 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1387 RESX and EH_DISPATCH. */
1388 if (id->eh_map)
1389 switch (gimple_code (copy))
1390 {
1391 case GIMPLE_CALL:
1392 {
1393 tree r, fndecl = gimple_call_fndecl (copy);
1394 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1395 switch (DECL_FUNCTION_CODE (fndecl))
1396 {
1397 case BUILT_IN_EH_COPY_VALUES:
1398 r = gimple_call_arg (copy, 1);
1399 r = remap_eh_region_tree_nr (r, id);
1400 gimple_call_set_arg (copy, 1, r);
1401 /* FALLTHRU */
1402
1403 case BUILT_IN_EH_POINTER:
1404 case BUILT_IN_EH_FILTER:
1405 r = gimple_call_arg (copy, 0);
1406 r = remap_eh_region_tree_nr (r, id);
1407 gimple_call_set_arg (copy, 0, r);
1408 break;
1409
1410 default:
1411 break;
1412 }
1413
1414 /* Reset alias info if we didn't apply measures to
1415 keep it valid over inlining by setting DECL_PT_UID. */
1416 if (!id->src_cfun->gimple_df
1417 || !id->src_cfun->gimple_df->ipa_pta)
1418 gimple_call_reset_alias_info (copy);
1419 }
1420 break;
1421
1422 case GIMPLE_RESX:
1423 {
1424 int r = gimple_resx_region (copy);
1425 r = remap_eh_region_nr (r, id);
1426 gimple_resx_set_region (copy, r);
1427 }
1428 break;
1429
1430 case GIMPLE_EH_DISPATCH:
1431 {
1432 int r = gimple_eh_dispatch_region (copy);
1433 r = remap_eh_region_nr (r, id);
1434 gimple_eh_dispatch_set_region (copy, r);
1435 }
1436 break;
1437
1438 default:
1439 break;
1440 }
1441 }
1442
1443 /* If STMT has a block defined, map it to the newly constructed
1444 block. When inlining we want statements without a block to
1445 appear in the block of the function call. */
1446 new_block = id->block;
1447 if (gimple_block (copy))
1448 {
1449 tree *n;
1450 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1451 gcc_assert (n);
1452 new_block = *n;
1453 }
1454
1455 gimple_set_block (copy, new_block);
1456
1457 if (gimple_debug_bind_p (copy))
1458 return copy;
1459
1460 /* Remap all the operands in COPY. */
1461 memset (&wi, 0, sizeof (wi));
1462 wi.info = id;
1463 if (skip_first)
1464 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1465 else
1466 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1467
1468 /* Clear the copied virtual operands. We are not remapping them here
1469 but are going to recreate them from scratch. */
1470 if (gimple_has_mem_ops (copy))
1471 {
1472 gimple_set_vdef (copy, NULL_TREE);
1473 gimple_set_vuse (copy, NULL_TREE);
1474 }
1475
1476 return copy;
1477 }
1478
1479
1480 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1481 later */
1482
1483 static basic_block
1484 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1485 gcov_type count_scale)
1486 {
1487 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1488 basic_block copy_basic_block;
1489 tree decl;
1490 gcov_type freq;
1491
1492 /* create_basic_block() will append every new block to
1493 basic_block_info automatically. */
1494 copy_basic_block = create_basic_block (NULL, (void *) 0,
1495 (basic_block) bb->prev_bb->aux);
1496 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1497
1498 /* We are going to rebuild frequencies from scratch. These values
1499 have just small importance to drive canonicalize_loop_headers. */
1500 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1501
1502 /* We recompute frequencies after inlining, so this is quite safe. */
1503 if (freq > BB_FREQ_MAX)
1504 freq = BB_FREQ_MAX;
1505 copy_basic_block->frequency = freq;
1506
1507 copy_gsi = gsi_start_bb (copy_basic_block);
1508
1509 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1510 {
1511 gimple stmt = gsi_stmt (gsi);
1512 gimple orig_stmt = stmt;
1513
1514 id->regimplify = false;
1515 stmt = remap_gimple_stmt (stmt, id);
1516 if (gimple_nop_p (stmt))
1517 continue;
1518
1519 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1520 seq_gsi = copy_gsi;
1521
1522 /* With return slot optimization we can end up with
1523 non-gimple (foo *)&this->m, fix that here. */
1524 if (is_gimple_assign (stmt)
1525 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1526 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1527 {
1528 tree new_rhs;
1529 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1530 gimple_assign_rhs1 (stmt),
1531 true, NULL, false, GSI_NEW_STMT);
1532 gimple_assign_set_rhs1 (stmt, new_rhs);
1533 id->regimplify = false;
1534 }
1535
1536 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1537
1538 if (id->regimplify)
1539 gimple_regimplify_operands (stmt, &seq_gsi);
1540
1541 /* If copy_basic_block has been empty at the start of this iteration,
1542 call gsi_start_bb again to get at the newly added statements. */
1543 if (gsi_end_p (copy_gsi))
1544 copy_gsi = gsi_start_bb (copy_basic_block);
1545 else
1546 gsi_next (&copy_gsi);
1547
1548 /* Process the new statement. The call to gimple_regimplify_operands
1549 possibly turned the statement into multiple statements, we
1550 need to process all of them. */
1551 do
1552 {
1553 tree fn;
1554
1555 stmt = gsi_stmt (copy_gsi);
1556 if (is_gimple_call (stmt)
1557 && gimple_call_va_arg_pack_p (stmt)
1558 && id->gimple_call)
1559 {
1560 /* __builtin_va_arg_pack () should be replaced by
1561 all arguments corresponding to ... in the caller. */
1562 tree p;
1563 gimple new_call;
1564 VEC(tree, heap) *argarray;
1565 size_t nargs = gimple_call_num_args (id->gimple_call);
1566 size_t n;
1567
1568 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1569 nargs--;
1570
1571 /* Create the new array of arguments. */
1572 n = nargs + gimple_call_num_args (stmt);
1573 argarray = VEC_alloc (tree, heap, n);
1574 VEC_safe_grow (tree, heap, argarray, n);
1575
1576 /* Copy all the arguments before '...' */
1577 memcpy (VEC_address (tree, argarray),
1578 gimple_call_arg_ptr (stmt, 0),
1579 gimple_call_num_args (stmt) * sizeof (tree));
1580
1581 /* Append the arguments passed in '...' */
1582 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1583 gimple_call_arg_ptr (id->gimple_call, 0)
1584 + (gimple_call_num_args (id->gimple_call) - nargs),
1585 nargs * sizeof (tree));
1586
1587 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1588 argarray);
1589
1590 VEC_free (tree, heap, argarray);
1591
1592 /* Copy all GIMPLE_CALL flags, location and block, except
1593 GF_CALL_VA_ARG_PACK. */
1594 gimple_call_copy_flags (new_call, stmt);
1595 gimple_call_set_va_arg_pack (new_call, false);
1596 gimple_set_location (new_call, gimple_location (stmt));
1597 gimple_set_block (new_call, gimple_block (stmt));
1598 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1599
1600 gsi_replace (&copy_gsi, new_call, false);
1601 gimple_set_bb (stmt, NULL);
1602 stmt = new_call;
1603 }
1604 else if (is_gimple_call (stmt)
1605 && id->gimple_call
1606 && (decl = gimple_call_fndecl (stmt))
1607 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1608 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1609 {
1610 /* __builtin_va_arg_pack_len () should be replaced by
1611 the number of anonymous arguments. */
1612 size_t nargs = gimple_call_num_args (id->gimple_call);
1613 tree count, p;
1614 gimple new_stmt;
1615
1616 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1617 nargs--;
1618
1619 count = build_int_cst (integer_type_node, nargs);
1620 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1621 gsi_replace (&copy_gsi, new_stmt, false);
1622 stmt = new_stmt;
1623 }
1624
1625 /* Statements produced by inlining can be unfolded, especially
1626 when we constant propagated some operands. We can't fold
1627 them right now for two reasons:
1628 1) folding require SSA_NAME_DEF_STMTs to be correct
1629 2) we can't change function calls to builtins.
1630 So we just mark statement for later folding. We mark
1631 all new statements, instead just statements that has changed
1632 by some nontrivial substitution so even statements made
1633 foldable indirectly are updated. If this turns out to be
1634 expensive, copy_body can be told to watch for nontrivial
1635 changes. */
1636 if (id->statements_to_fold)
1637 pointer_set_insert (id->statements_to_fold, stmt);
1638
1639 /* We're duplicating a CALL_EXPR. Find any corresponding
1640 callgraph edges and update or duplicate them. */
1641 if (is_gimple_call (stmt))
1642 {
1643 struct cgraph_edge *edge;
1644 int flags;
1645
1646 switch (id->transform_call_graph_edges)
1647 {
1648 case CB_CGE_DUPLICATE:
1649 edge = cgraph_edge (id->src_node, orig_stmt);
1650 if (edge)
1651 {
1652 int edge_freq = edge->frequency;
1653 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1654 gimple_uid (stmt),
1655 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1656 edge->frequency, true);
1657 /* We could also just rescale the frequency, but
1658 doing so would introduce roundoff errors and make
1659 verifier unhappy. */
1660 edge->frequency
1661 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1662 copy_basic_block);
1663 if (dump_file
1664 && profile_status_for_function (cfun) != PROFILE_ABSENT
1665 && (edge_freq > edge->frequency + 10
1666 || edge_freq < edge->frequency - 10))
1667 {
1668 fprintf (dump_file, "Edge frequency estimated by "
1669 "cgraph %i diverge from inliner's estimate %i\n",
1670 edge_freq,
1671 edge->frequency);
1672 fprintf (dump_file,
1673 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1674 bb->index,
1675 bb->frequency,
1676 copy_basic_block->frequency);
1677 }
1678 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1679 }
1680 break;
1681
1682 case CB_CGE_MOVE_CLONES:
1683 cgraph_set_call_stmt_including_clones (id->dst_node,
1684 orig_stmt, stmt);
1685 edge = cgraph_edge (id->dst_node, stmt);
1686 break;
1687
1688 case CB_CGE_MOVE:
1689 edge = cgraph_edge (id->dst_node, orig_stmt);
1690 if (edge)
1691 cgraph_set_call_stmt (edge, stmt);
1692 break;
1693
1694 default:
1695 gcc_unreachable ();
1696 }
1697
1698 /* Constant propagation on argument done during inlining
1699 may create new direct call. Produce an edge for it. */
1700 if ((!edge
1701 || (edge->indirect_inlining_edge
1702 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1703 && (fn = gimple_call_fndecl (stmt)) != NULL)
1704 {
1705 struct cgraph_node *dest = cgraph_node (fn);
1706
1707 /* We have missing edge in the callgraph. This can happen
1708 when previous inlining turned an indirect call into a
1709 direct call by constant propagating arguments or we are
1710 producing dead clone (for further clonning). In all
1711 other cases we hit a bug (incorrect node sharing is the
1712 most common reason for missing edges). */
1713 gcc_assert (dest->needed || !dest->analyzed
1714 || dest->address_taken
1715 || !id->src_node->analyzed);
1716 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1717 cgraph_create_edge_including_clones
1718 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1719 compute_call_stmt_bb_frequency (id->dst_node->decl,
1720 copy_basic_block),
1721 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1722 else
1723 cgraph_create_edge (id->dst_node, dest, stmt,
1724 bb->count,
1725 compute_call_stmt_bb_frequency
1726 (id->dst_node->decl, copy_basic_block),
1727 bb->loop_depth)->inline_failed
1728 = CIF_ORIGINALLY_INDIRECT_CALL;
1729 if (dump_file)
1730 {
1731 fprintf (dump_file, "Created new direct edge to %s",
1732 cgraph_node_name (dest));
1733 }
1734 }
1735
1736 flags = gimple_call_flags (stmt);
1737 if (flags & ECF_MAY_BE_ALLOCA)
1738 cfun->calls_alloca = true;
1739 if (flags & ECF_RETURNS_TWICE)
1740 cfun->calls_setjmp = true;
1741 }
1742
1743 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1744 id->eh_map, id->eh_lp_nr);
1745
1746 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1747 {
1748 ssa_op_iter i;
1749 tree def;
1750
1751 find_new_referenced_vars (gsi_stmt (copy_gsi));
1752 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1753 if (TREE_CODE (def) == SSA_NAME)
1754 SSA_NAME_DEF_STMT (def) = stmt;
1755 }
1756
1757 gsi_next (&copy_gsi);
1758 }
1759 while (!gsi_end_p (copy_gsi));
1760
1761 copy_gsi = gsi_last_bb (copy_basic_block);
1762 }
1763
1764 return copy_basic_block;
1765 }
1766
1767 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1768 form is quite easy, since dominator relationship for old basic blocks does
1769 not change.
1770
1771 There is however exception where inlining might change dominator relation
1772 across EH edges from basic block within inlined functions destinating
1773 to landing pads in function we inline into.
1774
1775 The function fills in PHI_RESULTs of such PHI nodes if they refer
1776 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1777 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1778 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1779 set, and this means that there will be no overlapping live ranges
1780 for the underlying symbol.
1781
1782 This might change in future if we allow redirecting of EH edges and
1783 we might want to change way build CFG pre-inlining to include
1784 all the possible edges then. */
1785 static void
1786 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1787 bool can_throw, bool nonlocal_goto)
1788 {
1789 edge e;
1790 edge_iterator ei;
1791
1792 FOR_EACH_EDGE (e, ei, bb->succs)
1793 if (!e->dest->aux
1794 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1795 {
1796 gimple phi;
1797 gimple_stmt_iterator si;
1798
1799 if (!nonlocal_goto)
1800 gcc_assert (e->flags & EDGE_EH);
1801
1802 if (!can_throw)
1803 gcc_assert (!(e->flags & EDGE_EH));
1804
1805 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1806 {
1807 edge re;
1808
1809 phi = gsi_stmt (si);
1810
1811 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1812 gcc_assert (!e->dest->aux);
1813
1814 gcc_assert ((e->flags & EDGE_EH)
1815 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1816
1817 if (!is_gimple_reg (PHI_RESULT (phi)))
1818 {
1819 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1820 continue;
1821 }
1822
1823 re = find_edge (ret_bb, e->dest);
1824 gcc_assert (re);
1825 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1826 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1827
1828 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1829 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1830 }
1831 }
1832 }
1833
1834
1835 /* Copy edges from BB into its copy constructed earlier, scale profile
1836 accordingly. Edges will be taken care of later. Assume aux
1837 pointers to point to the copies of each BB. */
1838
1839 static void
1840 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1841 {
1842 basic_block new_bb = (basic_block) bb->aux;
1843 edge_iterator ei;
1844 edge old_edge;
1845 gimple_stmt_iterator si;
1846 int flags;
1847
1848 /* Use the indices from the original blocks to create edges for the
1849 new ones. */
1850 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1851 if (!(old_edge->flags & EDGE_EH))
1852 {
1853 edge new_edge;
1854
1855 flags = old_edge->flags;
1856
1857 /* Return edges do get a FALLTHRU flag when the get inlined. */
1858 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1859 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1860 flags |= EDGE_FALLTHRU;
1861 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1862 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1863 new_edge->probability = old_edge->probability;
1864 }
1865
1866 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1867 return;
1868
1869 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1870 {
1871 gimple copy_stmt;
1872 bool can_throw, nonlocal_goto;
1873
1874 copy_stmt = gsi_stmt (si);
1875 if (!is_gimple_debug (copy_stmt))
1876 {
1877 update_stmt (copy_stmt);
1878 if (gimple_in_ssa_p (cfun))
1879 mark_symbols_for_renaming (copy_stmt);
1880 }
1881
1882 /* Do this before the possible split_block. */
1883 gsi_next (&si);
1884
1885 /* If this tree could throw an exception, there are two
1886 cases where we need to add abnormal edge(s): the
1887 tree wasn't in a region and there is a "current
1888 region" in the caller; or the original tree had
1889 EH edges. In both cases split the block after the tree,
1890 and add abnormal edge(s) as needed; we need both
1891 those from the callee and the caller.
1892 We check whether the copy can throw, because the const
1893 propagation can change an INDIRECT_REF which throws
1894 into a COMPONENT_REF which doesn't. If the copy
1895 can throw, the original could also throw. */
1896 can_throw = stmt_can_throw_internal (copy_stmt);
1897 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1898
1899 if (can_throw || nonlocal_goto)
1900 {
1901 if (!gsi_end_p (si))
1902 /* Note that bb's predecessor edges aren't necessarily
1903 right at this point; split_block doesn't care. */
1904 {
1905 edge e = split_block (new_bb, copy_stmt);
1906
1907 new_bb = e->dest;
1908 new_bb->aux = e->src->aux;
1909 si = gsi_start_bb (new_bb);
1910 }
1911 }
1912
1913 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1914 make_eh_dispatch_edges (copy_stmt);
1915 else if (can_throw)
1916 make_eh_edges (copy_stmt);
1917
1918 if (nonlocal_goto)
1919 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1920
1921 if ((can_throw || nonlocal_goto)
1922 && gimple_in_ssa_p (cfun))
1923 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1924 can_throw, nonlocal_goto);
1925 }
1926 }
1927
1928 /* Copy the PHIs. All blocks and edges are copied, some blocks
1929 was possibly split and new outgoing EH edges inserted.
1930 BB points to the block of original function and AUX pointers links
1931 the original and newly copied blocks. */
1932
1933 static void
1934 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1935 {
1936 basic_block const new_bb = (basic_block) bb->aux;
1937 edge_iterator ei;
1938 gimple phi;
1939 gimple_stmt_iterator si;
1940
1941 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1942 {
1943 tree res, new_res;
1944 gimple new_phi;
1945 edge new_edge;
1946
1947 phi = gsi_stmt (si);
1948 res = PHI_RESULT (phi);
1949 new_res = res;
1950 if (is_gimple_reg (res))
1951 {
1952 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1953 SSA_NAME_DEF_STMT (new_res)
1954 = new_phi = create_phi_node (new_res, new_bb);
1955 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1956 {
1957 edge const old_edge
1958 = find_edge ((basic_block) new_edge->src->aux, bb);
1959 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1960 tree new_arg = arg;
1961 tree block = id->block;
1962 id->block = NULL_TREE;
1963 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1964 id->block = block;
1965 gcc_assert (new_arg);
1966 /* With return slot optimization we can end up with
1967 non-gimple (foo *)&this->m, fix that here. */
1968 if (TREE_CODE (new_arg) != SSA_NAME
1969 && TREE_CODE (new_arg) != FUNCTION_DECL
1970 && !is_gimple_val (new_arg))
1971 {
1972 gimple_seq stmts = NULL;
1973 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1974 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1975 }
1976 add_phi_arg (new_phi, new_arg, new_edge,
1977 gimple_phi_arg_location_from_edge (phi, old_edge));
1978 }
1979 }
1980 }
1981 }
1982
1983
1984 /* Wrapper for remap_decl so it can be used as a callback. */
1985
1986 static tree
1987 remap_decl_1 (tree decl, void *data)
1988 {
1989 return remap_decl (decl, (copy_body_data *) data);
1990 }
1991
1992 /* Build struct function and associated datastructures for the new clone
1993 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1994
1995 static void
1996 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
1997 {
1998 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1999 gcov_type count_scale;
2000
2001 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2002 count_scale = (REG_BR_PROB_BASE * count
2003 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2004 else
2005 count_scale = REG_BR_PROB_BASE;
2006
2007 /* Register specific tree functions. */
2008 gimple_register_cfg_hooks ();
2009
2010 /* Get clean struct function. */
2011 push_struct_function (new_fndecl);
2012
2013 /* We will rebuild these, so just sanity check that they are empty. */
2014 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2015 gcc_assert (cfun->local_decls == NULL);
2016 gcc_assert (cfun->cfg == NULL);
2017 gcc_assert (cfun->decl == new_fndecl);
2018
2019 /* Copy items we preserve during clonning. */
2020 cfun->static_chain_decl = src_cfun->static_chain_decl;
2021 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2022 cfun->function_end_locus = src_cfun->function_end_locus;
2023 cfun->curr_properties = src_cfun->curr_properties;
2024 cfun->last_verified = src_cfun->last_verified;
2025 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2026 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2027 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2028 cfun->stdarg = src_cfun->stdarg;
2029 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2030 cfun->after_inlining = src_cfun->after_inlining;
2031 cfun->returns_struct = src_cfun->returns_struct;
2032 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2033 cfun->after_tree_profile = src_cfun->after_tree_profile;
2034
2035 init_empty_tree_cfg ();
2036
2037 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2038 ENTRY_BLOCK_PTR->count =
2039 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2040 REG_BR_PROB_BASE);
2041 ENTRY_BLOCK_PTR->frequency
2042 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2043 EXIT_BLOCK_PTR->count =
2044 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2045 REG_BR_PROB_BASE);
2046 EXIT_BLOCK_PTR->frequency =
2047 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2048 if (src_cfun->eh)
2049 init_eh_for_function ();
2050
2051 if (src_cfun->gimple_df)
2052 {
2053 init_tree_ssa (cfun);
2054 cfun->gimple_df->in_ssa_p = true;
2055 init_ssa_operands ();
2056 }
2057 pop_cfun ();
2058 }
2059
2060 /* Make a copy of the body of FN so that it can be inserted inline in
2061 another function. Walks FN via CFG, returns new fndecl. */
2062
2063 static tree
2064 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2065 basic_block entry_block_map, basic_block exit_block_map)
2066 {
2067 tree callee_fndecl = id->src_fn;
2068 /* Original cfun for the callee, doesn't change. */
2069 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2070 struct function *cfun_to_copy;
2071 basic_block bb;
2072 tree new_fndecl = NULL;
2073 gcov_type count_scale;
2074 int last;
2075
2076 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2077 count_scale = (REG_BR_PROB_BASE * count
2078 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2079 else
2080 count_scale = REG_BR_PROB_BASE;
2081
2082 /* Register specific tree functions. */
2083 gimple_register_cfg_hooks ();
2084
2085 /* Must have a CFG here at this point. */
2086 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2087 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2088
2089 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2090
2091 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2092 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2093 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2094 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2095
2096 /* Duplicate any exception-handling regions. */
2097 if (cfun->eh)
2098 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2099 remap_decl_1, id);
2100
2101 /* Use aux pointers to map the original blocks to copy. */
2102 FOR_EACH_BB_FN (bb, cfun_to_copy)
2103 {
2104 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2105 bb->aux = new_bb;
2106 new_bb->aux = bb;
2107 }
2108
2109 last = last_basic_block;
2110
2111 /* Now that we've duplicated the blocks, duplicate their edges. */
2112 FOR_ALL_BB_FN (bb, cfun_to_copy)
2113 copy_edges_for_bb (bb, count_scale, exit_block_map);
2114
2115 if (gimple_in_ssa_p (cfun))
2116 FOR_ALL_BB_FN (bb, cfun_to_copy)
2117 copy_phis_for_bb (bb, id);
2118
2119 FOR_ALL_BB_FN (bb, cfun_to_copy)
2120 {
2121 ((basic_block)bb->aux)->aux = NULL;
2122 bb->aux = NULL;
2123 }
2124
2125 /* Zero out AUX fields of newly created block during EH edge
2126 insertion. */
2127 for (; last < last_basic_block; last++)
2128 BASIC_BLOCK (last)->aux = NULL;
2129 entry_block_map->aux = NULL;
2130 exit_block_map->aux = NULL;
2131
2132 if (id->eh_map)
2133 {
2134 pointer_map_destroy (id->eh_map);
2135 id->eh_map = NULL;
2136 }
2137
2138 return new_fndecl;
2139 }
2140
2141 /* Copy the debug STMT using ID. We deal with these statements in a
2142 special way: if any variable in their VALUE expression wasn't
2143 remapped yet, we won't remap it, because that would get decl uids
2144 out of sync, causing codegen differences between -g and -g0. If
2145 this arises, we drop the VALUE expression altogether. */
2146
2147 static void
2148 copy_debug_stmt (gimple stmt, copy_body_data *id)
2149 {
2150 tree t, *n;
2151 struct walk_stmt_info wi;
2152
2153 t = id->block;
2154 if (gimple_block (stmt))
2155 {
2156 tree *n;
2157 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2158 if (n)
2159 t = *n;
2160 }
2161 gimple_set_block (stmt, t);
2162
2163 /* Remap all the operands in COPY. */
2164 memset (&wi, 0, sizeof (wi));
2165 wi.info = id;
2166
2167 processing_debug_stmt = 1;
2168
2169 t = gimple_debug_bind_get_var (stmt);
2170
2171 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2172 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2173 {
2174 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2175 t = *n;
2176 }
2177 else if (TREE_CODE (t) == VAR_DECL
2178 && !TREE_STATIC (t)
2179 && gimple_in_ssa_p (cfun)
2180 && !pointer_map_contains (id->decl_map, t)
2181 && !var_ann (t))
2182 /* T is a non-localized variable. */;
2183 else
2184 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2185
2186 gimple_debug_bind_set_var (stmt, t);
2187
2188 if (gimple_debug_bind_has_value_p (stmt))
2189 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2190 remap_gimple_op_r, &wi, NULL);
2191
2192 /* Punt if any decl couldn't be remapped. */
2193 if (processing_debug_stmt < 0)
2194 gimple_debug_bind_reset_value (stmt);
2195
2196 processing_debug_stmt = 0;
2197
2198 update_stmt (stmt);
2199 if (gimple_in_ssa_p (cfun))
2200 mark_symbols_for_renaming (stmt);
2201 }
2202
2203 /* Process deferred debug stmts. In order to give values better odds
2204 of being successfully remapped, we delay the processing of debug
2205 stmts until all other stmts that might require remapping are
2206 processed. */
2207
2208 static void
2209 copy_debug_stmts (copy_body_data *id)
2210 {
2211 size_t i;
2212 gimple stmt;
2213
2214 if (!id->debug_stmts)
2215 return;
2216
2217 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2218 copy_debug_stmt (stmt, id);
2219
2220 VEC_free (gimple, heap, id->debug_stmts);
2221 }
2222
2223 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2224 another function. */
2225
2226 static tree
2227 copy_tree_body (copy_body_data *id)
2228 {
2229 tree fndecl = id->src_fn;
2230 tree body = DECL_SAVED_TREE (fndecl);
2231
2232 walk_tree (&body, copy_tree_body_r, id, NULL);
2233
2234 return body;
2235 }
2236
2237 /* Make a copy of the body of FN so that it can be inserted inline in
2238 another function. */
2239
2240 static tree
2241 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2242 basic_block entry_block_map, basic_block exit_block_map)
2243 {
2244 tree fndecl = id->src_fn;
2245 tree body;
2246
2247 /* If this body has a CFG, walk CFG and copy. */
2248 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2249 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
2250 copy_debug_stmts (id);
2251
2252 return body;
2253 }
2254
2255 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2256 defined in function FN, or of a data member thereof. */
2257
2258 static bool
2259 self_inlining_addr_expr (tree value, tree fn)
2260 {
2261 tree var;
2262
2263 if (TREE_CODE (value) != ADDR_EXPR)
2264 return false;
2265
2266 var = get_base_address (TREE_OPERAND (value, 0));
2267
2268 return var && auto_var_in_fn_p (var, fn);
2269 }
2270
2271 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2272 lexical block and line number information from base_stmt, if given,
2273 or from the last stmt of the block otherwise. */
2274
2275 static gimple
2276 insert_init_debug_bind (copy_body_data *id,
2277 basic_block bb, tree var, tree value,
2278 gimple base_stmt)
2279 {
2280 gimple note;
2281 gimple_stmt_iterator gsi;
2282 tree tracked_var;
2283
2284 if (!gimple_in_ssa_p (id->src_cfun))
2285 return NULL;
2286
2287 if (!MAY_HAVE_DEBUG_STMTS)
2288 return NULL;
2289
2290 tracked_var = target_for_debug_bind (var);
2291 if (!tracked_var)
2292 return NULL;
2293
2294 if (bb)
2295 {
2296 gsi = gsi_last_bb (bb);
2297 if (!base_stmt && !gsi_end_p (gsi))
2298 base_stmt = gsi_stmt (gsi);
2299 }
2300
2301 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2302
2303 if (bb)
2304 {
2305 if (!gsi_end_p (gsi))
2306 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2307 else
2308 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2309 }
2310
2311 return note;
2312 }
2313
2314 static void
2315 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2316 {
2317 /* If VAR represents a zero-sized variable, it's possible that the
2318 assignment statement may result in no gimple statements. */
2319 if (init_stmt)
2320 {
2321 gimple_stmt_iterator si = gsi_last_bb (bb);
2322
2323 /* We can end up with init statements that store to a non-register
2324 from a rhs with a conversion. Handle that here by forcing the
2325 rhs into a temporary. gimple_regimplify_operands is not
2326 prepared to do this for us. */
2327 if (!is_gimple_debug (init_stmt)
2328 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2329 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2330 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2331 {
2332 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2333 gimple_expr_type (init_stmt),
2334 gimple_assign_rhs1 (init_stmt));
2335 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2336 GSI_NEW_STMT);
2337 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2338 gimple_assign_set_rhs1 (init_stmt, rhs);
2339 }
2340 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2341 gimple_regimplify_operands (init_stmt, &si);
2342 mark_symbols_for_renaming (init_stmt);
2343
2344 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2345 {
2346 tree var, def = gimple_assign_lhs (init_stmt);
2347
2348 if (TREE_CODE (def) == SSA_NAME)
2349 var = SSA_NAME_VAR (def);
2350 else
2351 var = def;
2352
2353 insert_init_debug_bind (id, bb, var, def, init_stmt);
2354 }
2355 }
2356 }
2357
2358 /* Initialize parameter P with VALUE. If needed, produce init statement
2359 at the end of BB. When BB is NULL, we return init statement to be
2360 output later. */
2361 static gimple
2362 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2363 basic_block bb, tree *vars)
2364 {
2365 gimple init_stmt = NULL;
2366 tree var;
2367 tree rhs = value;
2368 tree def = (gimple_in_ssa_p (cfun)
2369 ? gimple_default_def (id->src_cfun, p) : NULL);
2370
2371 if (value
2372 && value != error_mark_node
2373 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2374 {
2375 if (fold_convertible_p (TREE_TYPE (p), value))
2376 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2377 else
2378 /* ??? For valid (GIMPLE) programs we should not end up here.
2379 Still if something has gone wrong and we end up with truly
2380 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2381 to not leak invalid GIMPLE to the following passes. */
2382 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2383 }
2384
2385 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2386 here since the type of this decl must be visible to the calling
2387 function. */
2388 var = copy_decl_to_var (p, id);
2389
2390 /* We're actually using the newly-created var. */
2391 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2392 {
2393 get_var_ann (var);
2394 add_referenced_var (var);
2395 }
2396
2397 /* Declare this new variable. */
2398 TREE_CHAIN (var) = *vars;
2399 *vars = var;
2400
2401 /* Make gimplifier happy about this variable. */
2402 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2403
2404 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2405 we would not need to create a new variable here at all, if it
2406 weren't for debug info. Still, we can just use the argument
2407 value. */
2408 if (TREE_READONLY (p)
2409 && !TREE_ADDRESSABLE (p)
2410 && value && !TREE_SIDE_EFFECTS (value)
2411 && !def)
2412 {
2413 /* We may produce non-gimple trees by adding NOPs or introduce
2414 invalid sharing when operand is not really constant.
2415 It is not big deal to prohibit constant propagation here as
2416 we will constant propagate in DOM1 pass anyway. */
2417 if (is_gimple_min_invariant (value)
2418 && useless_type_conversion_p (TREE_TYPE (p),
2419 TREE_TYPE (value))
2420 /* We have to be very careful about ADDR_EXPR. Make sure
2421 the base variable isn't a local variable of the inlined
2422 function, e.g., when doing recursive inlining, direct or
2423 mutually-recursive or whatever, which is why we don't
2424 just test whether fn == current_function_decl. */
2425 && ! self_inlining_addr_expr (value, fn))
2426 {
2427 insert_decl_map (id, p, value);
2428 insert_debug_decl_map (id, p, var);
2429 return insert_init_debug_bind (id, bb, var, value, NULL);
2430 }
2431 }
2432
2433 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2434 that way, when the PARM_DECL is encountered, it will be
2435 automatically replaced by the VAR_DECL. */
2436 insert_decl_map (id, p, var);
2437
2438 /* Even if P was TREE_READONLY, the new VAR should not be.
2439 In the original code, we would have constructed a
2440 temporary, and then the function body would have never
2441 changed the value of P. However, now, we will be
2442 constructing VAR directly. The constructor body may
2443 change its value multiple times as it is being
2444 constructed. Therefore, it must not be TREE_READONLY;
2445 the back-end assumes that TREE_READONLY variable is
2446 assigned to only once. */
2447 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2448 TREE_READONLY (var) = 0;
2449
2450 /* If there is no setup required and we are in SSA, take the easy route
2451 replacing all SSA names representing the function parameter by the
2452 SSA name passed to function.
2453
2454 We need to construct map for the variable anyway as it might be used
2455 in different SSA names when parameter is set in function.
2456
2457 Do replacement at -O0 for const arguments replaced by constant.
2458 This is important for builtin_constant_p and other construct requiring
2459 constant argument to be visible in inlined function body. */
2460 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2461 && (optimize
2462 || (TREE_READONLY (p)
2463 && is_gimple_min_invariant (rhs)))
2464 && (TREE_CODE (rhs) == SSA_NAME
2465 || is_gimple_min_invariant (rhs))
2466 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2467 {
2468 insert_decl_map (id, def, rhs);
2469 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2470 }
2471
2472 /* If the value of argument is never used, don't care about initializing
2473 it. */
2474 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2475 {
2476 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2477 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2478 }
2479
2480 /* Initialize this VAR_DECL from the equivalent argument. Convert
2481 the argument to the proper type in case it was promoted. */
2482 if (value)
2483 {
2484 if (rhs == error_mark_node)
2485 {
2486 insert_decl_map (id, p, var);
2487 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2488 }
2489
2490 STRIP_USELESS_TYPE_CONVERSION (rhs);
2491
2492 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2493 keep our trees in gimple form. */
2494 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2495 {
2496 def = remap_ssa_name (def, id);
2497 init_stmt = gimple_build_assign (def, rhs);
2498 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2499 set_default_def (var, NULL);
2500 }
2501 else
2502 init_stmt = gimple_build_assign (var, rhs);
2503
2504 if (bb && init_stmt)
2505 insert_init_stmt (id, bb, init_stmt);
2506 }
2507 return init_stmt;
2508 }
2509
2510 /* Generate code to initialize the parameters of the function at the
2511 top of the stack in ID from the GIMPLE_CALL STMT. */
2512
2513 static void
2514 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2515 tree fn, basic_block bb)
2516 {
2517 tree parms;
2518 size_t i;
2519 tree p;
2520 tree vars = NULL_TREE;
2521 tree static_chain = gimple_call_chain (stmt);
2522
2523 /* Figure out what the parameters are. */
2524 parms = DECL_ARGUMENTS (fn);
2525
2526 /* Loop through the parameter declarations, replacing each with an
2527 equivalent VAR_DECL, appropriately initialized. */
2528 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2529 {
2530 tree val;
2531 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2532 setup_one_parameter (id, p, val, fn, bb, &vars);
2533 }
2534
2535 /* Initialize the static chain. */
2536 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2537 gcc_assert (fn != current_function_decl);
2538 if (p)
2539 {
2540 /* No static chain? Seems like a bug in tree-nested.c. */
2541 gcc_assert (static_chain);
2542
2543 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2544 }
2545
2546 declare_inline_vars (id->block, vars);
2547 }
2548
2549
2550 /* Declare a return variable to replace the RESULT_DECL for the
2551 function we are calling. An appropriate DECL_STMT is returned.
2552 The USE_STMT is filled to contain a use of the declaration to
2553 indicate the return value of the function.
2554
2555 RETURN_SLOT, if non-null is place where to store the result. It
2556 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2557 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2558
2559 The return value is a (possibly null) value that holds the result
2560 as seen by the caller. */
2561
2562 static tree
2563 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2564 {
2565 tree callee = id->src_fn;
2566 tree caller = id->dst_fn;
2567 tree result = DECL_RESULT (callee);
2568 tree callee_type = TREE_TYPE (result);
2569 tree caller_type;
2570 tree var, use;
2571
2572 /* Handle type-mismatches in the function declaration return type
2573 vs. the call expression. */
2574 if (modify_dest)
2575 caller_type = TREE_TYPE (modify_dest);
2576 else
2577 caller_type = TREE_TYPE (TREE_TYPE (callee));
2578
2579 /* We don't need to do anything for functions that don't return
2580 anything. */
2581 if (!result || VOID_TYPE_P (callee_type))
2582 return NULL_TREE;
2583
2584 /* If there was a return slot, then the return value is the
2585 dereferenced address of that object. */
2586 if (return_slot)
2587 {
2588 /* The front end shouldn't have used both return_slot and
2589 a modify expression. */
2590 gcc_assert (!modify_dest);
2591 if (DECL_BY_REFERENCE (result))
2592 {
2593 tree return_slot_addr = build_fold_addr_expr (return_slot);
2594 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2595
2596 /* We are going to construct *&return_slot and we can't do that
2597 for variables believed to be not addressable.
2598
2599 FIXME: This check possibly can match, because values returned
2600 via return slot optimization are not believed to have address
2601 taken by alias analysis. */
2602 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2603 if (gimple_in_ssa_p (cfun))
2604 {
2605 HOST_WIDE_INT bitsize;
2606 HOST_WIDE_INT bitpos;
2607 tree offset;
2608 enum machine_mode mode;
2609 int unsignedp;
2610 int volatilep;
2611 tree base;
2612 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2613 &offset,
2614 &mode, &unsignedp, &volatilep,
2615 false);
2616 if (TREE_CODE (base) == INDIRECT_REF)
2617 base = TREE_OPERAND (base, 0);
2618 if (TREE_CODE (base) == SSA_NAME)
2619 base = SSA_NAME_VAR (base);
2620 mark_sym_for_renaming (base);
2621 }
2622 var = return_slot_addr;
2623 }
2624 else
2625 {
2626 var = return_slot;
2627 gcc_assert (TREE_CODE (var) != SSA_NAME);
2628 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2629 }
2630 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2631 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2632 && !DECL_GIMPLE_REG_P (result)
2633 && DECL_P (var))
2634 DECL_GIMPLE_REG_P (var) = 0;
2635 use = NULL;
2636 goto done;
2637 }
2638
2639 /* All types requiring non-trivial constructors should have been handled. */
2640 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2641
2642 /* Attempt to avoid creating a new temporary variable. */
2643 if (modify_dest
2644 && TREE_CODE (modify_dest) != SSA_NAME)
2645 {
2646 bool use_it = false;
2647
2648 /* We can't use MODIFY_DEST if there's type promotion involved. */
2649 if (!useless_type_conversion_p (callee_type, caller_type))
2650 use_it = false;
2651
2652 /* ??? If we're assigning to a variable sized type, then we must
2653 reuse the destination variable, because we've no good way to
2654 create variable sized temporaries at this point. */
2655 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2656 use_it = true;
2657
2658 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2659 reuse it as the result of the call directly. Don't do this if
2660 it would promote MODIFY_DEST to addressable. */
2661 else if (TREE_ADDRESSABLE (result))
2662 use_it = false;
2663 else
2664 {
2665 tree base_m = get_base_address (modify_dest);
2666
2667 /* If the base isn't a decl, then it's a pointer, and we don't
2668 know where that's going to go. */
2669 if (!DECL_P (base_m))
2670 use_it = false;
2671 else if (is_global_var (base_m))
2672 use_it = false;
2673 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2674 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2675 && !DECL_GIMPLE_REG_P (result)
2676 && DECL_GIMPLE_REG_P (base_m))
2677 use_it = false;
2678 else if (!TREE_ADDRESSABLE (base_m))
2679 use_it = true;
2680 }
2681
2682 if (use_it)
2683 {
2684 var = modify_dest;
2685 use = NULL;
2686 goto done;
2687 }
2688 }
2689
2690 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2691
2692 var = copy_result_decl_to_var (result, id);
2693 if (gimple_in_ssa_p (cfun))
2694 {
2695 get_var_ann (var);
2696 add_referenced_var (var);
2697 }
2698
2699 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2700 DECL_STRUCT_FUNCTION (caller)->local_decls
2701 = tree_cons (NULL_TREE, var,
2702 DECL_STRUCT_FUNCTION (caller)->local_decls);
2703
2704 /* Do not have the rest of GCC warn about this variable as it should
2705 not be visible to the user. */
2706 TREE_NO_WARNING (var) = 1;
2707
2708 declare_inline_vars (id->block, var);
2709
2710 /* Build the use expr. If the return type of the function was
2711 promoted, convert it back to the expected type. */
2712 use = var;
2713 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2714 use = fold_convert (caller_type, var);
2715
2716 STRIP_USELESS_TYPE_CONVERSION (use);
2717
2718 if (DECL_BY_REFERENCE (result))
2719 {
2720 TREE_ADDRESSABLE (var) = 1;
2721 var = build_fold_addr_expr (var);
2722 }
2723
2724 done:
2725 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2726 way, when the RESULT_DECL is encountered, it will be
2727 automatically replaced by the VAR_DECL. */
2728 insert_decl_map (id, result, var);
2729
2730 /* Remember this so we can ignore it in remap_decls. */
2731 id->retvar = var;
2732
2733 return use;
2734 }
2735
2736 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2737 to a local label. */
2738
2739 static tree
2740 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2741 {
2742 tree node = *nodep;
2743 tree fn = (tree) fnp;
2744
2745 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2746 return node;
2747
2748 if (TYPE_P (node))
2749 *walk_subtrees = 0;
2750
2751 return NULL_TREE;
2752 }
2753
2754 /* Determine if the function can be copied. If so return NULL. If
2755 not return a string describng the reason for failure. */
2756
2757 static const char *
2758 copy_forbidden (struct function *fun, tree fndecl)
2759 {
2760 const char *reason = fun->cannot_be_copied_reason;
2761 tree step;
2762
2763 /* Only examine the function once. */
2764 if (fun->cannot_be_copied_set)
2765 return reason;
2766
2767 /* We cannot copy a function that receives a non-local goto
2768 because we cannot remap the destination label used in the
2769 function that is performing the non-local goto. */
2770 /* ??? Actually, this should be possible, if we work at it.
2771 No doubt there's just a handful of places that simply
2772 assume it doesn't happen and don't substitute properly. */
2773 if (fun->has_nonlocal_label)
2774 {
2775 reason = G_("function %q+F can never be copied "
2776 "because it receives a non-local goto");
2777 goto fail;
2778 }
2779
2780 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2781 {
2782 tree decl = TREE_VALUE (step);
2783
2784 if (TREE_CODE (decl) == VAR_DECL
2785 && TREE_STATIC (decl)
2786 && !DECL_EXTERNAL (decl)
2787 && DECL_INITIAL (decl)
2788 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2789 has_label_address_in_static_1,
2790 fndecl))
2791 {
2792 reason = G_("function %q+F can never be copied because it saves "
2793 "address of local label in a static variable");
2794 goto fail;
2795 }
2796 }
2797
2798 fail:
2799 fun->cannot_be_copied_reason = reason;
2800 fun->cannot_be_copied_set = true;
2801 return reason;
2802 }
2803
2804
2805 static const char *inline_forbidden_reason;
2806
2807 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2808 iff a function can not be inlined. Also sets the reason why. */
2809
2810 static tree
2811 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2812 struct walk_stmt_info *wip)
2813 {
2814 tree fn = (tree) wip->info;
2815 tree t;
2816 gimple stmt = gsi_stmt (*gsi);
2817
2818 switch (gimple_code (stmt))
2819 {
2820 case GIMPLE_CALL:
2821 /* Refuse to inline alloca call unless user explicitly forced so as
2822 this may change program's memory overhead drastically when the
2823 function using alloca is called in loop. In GCC present in
2824 SPEC2000 inlining into schedule_block cause it to require 2GB of
2825 RAM instead of 256MB. */
2826 if (gimple_alloca_call_p (stmt)
2827 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2828 {
2829 inline_forbidden_reason
2830 = G_("function %q+F can never be inlined because it uses "
2831 "alloca (override using the always_inline attribute)");
2832 *handled_ops_p = true;
2833 return fn;
2834 }
2835
2836 t = gimple_call_fndecl (stmt);
2837 if (t == NULL_TREE)
2838 break;
2839
2840 /* We cannot inline functions that call setjmp. */
2841 if (setjmp_call_p (t))
2842 {
2843 inline_forbidden_reason
2844 = G_("function %q+F can never be inlined because it uses setjmp");
2845 *handled_ops_p = true;
2846 return t;
2847 }
2848
2849 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2850 switch (DECL_FUNCTION_CODE (t))
2851 {
2852 /* We cannot inline functions that take a variable number of
2853 arguments. */
2854 case BUILT_IN_VA_START:
2855 case BUILT_IN_NEXT_ARG:
2856 case BUILT_IN_VA_END:
2857 inline_forbidden_reason
2858 = G_("function %q+F can never be inlined because it "
2859 "uses variable argument lists");
2860 *handled_ops_p = true;
2861 return t;
2862
2863 case BUILT_IN_LONGJMP:
2864 /* We can't inline functions that call __builtin_longjmp at
2865 all. The non-local goto machinery really requires the
2866 destination be in a different function. If we allow the
2867 function calling __builtin_longjmp to be inlined into the
2868 function calling __builtin_setjmp, Things will Go Awry. */
2869 inline_forbidden_reason
2870 = G_("function %q+F can never be inlined because "
2871 "it uses setjmp-longjmp exception handling");
2872 *handled_ops_p = true;
2873 return t;
2874
2875 case BUILT_IN_NONLOCAL_GOTO:
2876 /* Similarly. */
2877 inline_forbidden_reason
2878 = G_("function %q+F can never be inlined because "
2879 "it uses non-local goto");
2880 *handled_ops_p = true;
2881 return t;
2882
2883 case BUILT_IN_RETURN:
2884 case BUILT_IN_APPLY_ARGS:
2885 /* If a __builtin_apply_args caller would be inlined,
2886 it would be saving arguments of the function it has
2887 been inlined into. Similarly __builtin_return would
2888 return from the function the inline has been inlined into. */
2889 inline_forbidden_reason
2890 = G_("function %q+F can never be inlined because "
2891 "it uses __builtin_return or __builtin_apply_args");
2892 *handled_ops_p = true;
2893 return t;
2894
2895 default:
2896 break;
2897 }
2898 break;
2899
2900 case GIMPLE_GOTO:
2901 t = gimple_goto_dest (stmt);
2902
2903 /* We will not inline a function which uses computed goto. The
2904 addresses of its local labels, which may be tucked into
2905 global storage, are of course not constant across
2906 instantiations, which causes unexpected behavior. */
2907 if (TREE_CODE (t) != LABEL_DECL)
2908 {
2909 inline_forbidden_reason
2910 = G_("function %q+F can never be inlined "
2911 "because it contains a computed goto");
2912 *handled_ops_p = true;
2913 return t;
2914 }
2915 break;
2916
2917 default:
2918 break;
2919 }
2920
2921 *handled_ops_p = false;
2922 return NULL_TREE;
2923 }
2924
2925 /* Return true if FNDECL is a function that cannot be inlined into
2926 another one. */
2927
2928 static bool
2929 inline_forbidden_p (tree fndecl)
2930 {
2931 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2932 struct walk_stmt_info wi;
2933 struct pointer_set_t *visited_nodes;
2934 basic_block bb;
2935 bool forbidden_p = false;
2936
2937 /* First check for shared reasons not to copy the code. */
2938 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2939 if (inline_forbidden_reason != NULL)
2940 return true;
2941
2942 /* Next, walk the statements of the function looking for
2943 constraucts we can't handle, or are non-optimal for inlining. */
2944 visited_nodes = pointer_set_create ();
2945 memset (&wi, 0, sizeof (wi));
2946 wi.info = (void *) fndecl;
2947 wi.pset = visited_nodes;
2948
2949 FOR_EACH_BB_FN (bb, fun)
2950 {
2951 gimple ret;
2952 gimple_seq seq = bb_seq (bb);
2953 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2954 forbidden_p = (ret != NULL);
2955 if (forbidden_p)
2956 break;
2957 }
2958
2959 pointer_set_destroy (visited_nodes);
2960 return forbidden_p;
2961 }
2962
2963 /* Returns nonzero if FN is a function that does not have any
2964 fundamental inline blocking properties. */
2965
2966 bool
2967 tree_inlinable_function_p (tree fn)
2968 {
2969 bool inlinable = true;
2970 bool do_warning;
2971 tree always_inline;
2972
2973 /* If we've already decided this function shouldn't be inlined,
2974 there's no need to check again. */
2975 if (DECL_UNINLINABLE (fn))
2976 return false;
2977
2978 /* We only warn for functions declared `inline' by the user. */
2979 do_warning = (warn_inline
2980 && DECL_DECLARED_INLINE_P (fn)
2981 && !DECL_NO_INLINE_WARNING_P (fn)
2982 && !DECL_IN_SYSTEM_HEADER (fn));
2983
2984 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2985
2986 if (flag_no_inline
2987 && always_inline == NULL)
2988 {
2989 if (do_warning)
2990 warning (OPT_Winline, "function %q+F can never be inlined because it "
2991 "is suppressed using -fno-inline", fn);
2992 inlinable = false;
2993 }
2994
2995 /* Don't auto-inline anything that might not be bound within
2996 this unit of translation. */
2997 else if (!DECL_DECLARED_INLINE_P (fn)
2998 && DECL_REPLACEABLE_P (fn))
2999 inlinable = false;
3000
3001 else if (!function_attribute_inlinable_p (fn))
3002 {
3003 if (do_warning)
3004 warning (OPT_Winline, "function %q+F can never be inlined because it "
3005 "uses attributes conflicting with inlining", fn);
3006 inlinable = false;
3007 }
3008
3009 else if (inline_forbidden_p (fn))
3010 {
3011 /* See if we should warn about uninlinable functions. Previously,
3012 some of these warnings would be issued while trying to expand
3013 the function inline, but that would cause multiple warnings
3014 about functions that would for example call alloca. But since
3015 this a property of the function, just one warning is enough.
3016 As a bonus we can now give more details about the reason why a
3017 function is not inlinable. */
3018 if (always_inline)
3019 sorry (inline_forbidden_reason, fn);
3020 else if (do_warning)
3021 warning (OPT_Winline, inline_forbidden_reason, fn);
3022
3023 inlinable = false;
3024 }
3025
3026 /* Squirrel away the result so that we don't have to check again. */
3027 DECL_UNINLINABLE (fn) = !inlinable;
3028
3029 return inlinable;
3030 }
3031
3032 /* Estimate the cost of a memory move. Use machine dependent
3033 word size and take possible memcpy call into account. */
3034
3035 int
3036 estimate_move_cost (tree type)
3037 {
3038 HOST_WIDE_INT size;
3039
3040 gcc_assert (!VOID_TYPE_P (type));
3041
3042 size = int_size_in_bytes (type);
3043
3044 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3045 /* Cost of a memcpy call, 3 arguments and the call. */
3046 return 4;
3047 else
3048 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3049 }
3050
3051 /* Returns cost of operation CODE, according to WEIGHTS */
3052
3053 static int
3054 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3055 tree op1 ATTRIBUTE_UNUSED, tree op2)
3056 {
3057 switch (code)
3058 {
3059 /* These are "free" conversions, or their presumed cost
3060 is folded into other operations. */
3061 case RANGE_EXPR:
3062 CASE_CONVERT:
3063 case COMPLEX_EXPR:
3064 case PAREN_EXPR:
3065 return 0;
3066
3067 /* Assign cost of 1 to usual operations.
3068 ??? We may consider mapping RTL costs to this. */
3069 case COND_EXPR:
3070 case VEC_COND_EXPR:
3071
3072 case PLUS_EXPR:
3073 case POINTER_PLUS_EXPR:
3074 case MINUS_EXPR:
3075 case MULT_EXPR:
3076
3077 case ADDR_SPACE_CONVERT_EXPR:
3078 case FIXED_CONVERT_EXPR:
3079 case FIX_TRUNC_EXPR:
3080
3081 case NEGATE_EXPR:
3082 case FLOAT_EXPR:
3083 case MIN_EXPR:
3084 case MAX_EXPR:
3085 case ABS_EXPR:
3086
3087 case LSHIFT_EXPR:
3088 case RSHIFT_EXPR:
3089 case LROTATE_EXPR:
3090 case RROTATE_EXPR:
3091 case VEC_LSHIFT_EXPR:
3092 case VEC_RSHIFT_EXPR:
3093
3094 case BIT_IOR_EXPR:
3095 case BIT_XOR_EXPR:
3096 case BIT_AND_EXPR:
3097 case BIT_NOT_EXPR:
3098
3099 case TRUTH_ANDIF_EXPR:
3100 case TRUTH_ORIF_EXPR:
3101 case TRUTH_AND_EXPR:
3102 case TRUTH_OR_EXPR:
3103 case TRUTH_XOR_EXPR:
3104 case TRUTH_NOT_EXPR:
3105
3106 case LT_EXPR:
3107 case LE_EXPR:
3108 case GT_EXPR:
3109 case GE_EXPR:
3110 case EQ_EXPR:
3111 case NE_EXPR:
3112 case ORDERED_EXPR:
3113 case UNORDERED_EXPR:
3114
3115 case UNLT_EXPR:
3116 case UNLE_EXPR:
3117 case UNGT_EXPR:
3118 case UNGE_EXPR:
3119 case UNEQ_EXPR:
3120 case LTGT_EXPR:
3121
3122 case CONJ_EXPR:
3123
3124 case PREDECREMENT_EXPR:
3125 case PREINCREMENT_EXPR:
3126 case POSTDECREMENT_EXPR:
3127 case POSTINCREMENT_EXPR:
3128
3129 case REALIGN_LOAD_EXPR:
3130
3131 case REDUC_MAX_EXPR:
3132 case REDUC_MIN_EXPR:
3133 case REDUC_PLUS_EXPR:
3134 case WIDEN_SUM_EXPR:
3135 case WIDEN_MULT_EXPR:
3136 case DOT_PROD_EXPR:
3137
3138 case VEC_WIDEN_MULT_HI_EXPR:
3139 case VEC_WIDEN_MULT_LO_EXPR:
3140 case VEC_UNPACK_HI_EXPR:
3141 case VEC_UNPACK_LO_EXPR:
3142 case VEC_UNPACK_FLOAT_HI_EXPR:
3143 case VEC_UNPACK_FLOAT_LO_EXPR:
3144 case VEC_PACK_TRUNC_EXPR:
3145 case VEC_PACK_SAT_EXPR:
3146 case VEC_PACK_FIX_TRUNC_EXPR:
3147 case VEC_EXTRACT_EVEN_EXPR:
3148 case VEC_EXTRACT_ODD_EXPR:
3149 case VEC_INTERLEAVE_HIGH_EXPR:
3150 case VEC_INTERLEAVE_LOW_EXPR:
3151
3152 return 1;
3153
3154 /* Few special cases of expensive operations. This is useful
3155 to avoid inlining on functions having too many of these. */
3156 case TRUNC_DIV_EXPR:
3157 case CEIL_DIV_EXPR:
3158 case FLOOR_DIV_EXPR:
3159 case ROUND_DIV_EXPR:
3160 case EXACT_DIV_EXPR:
3161 case TRUNC_MOD_EXPR:
3162 case CEIL_MOD_EXPR:
3163 case FLOOR_MOD_EXPR:
3164 case ROUND_MOD_EXPR:
3165 case RDIV_EXPR:
3166 if (TREE_CODE (op2) != INTEGER_CST)
3167 return weights->div_mod_cost;
3168 return 1;
3169
3170 default:
3171 /* We expect a copy assignment with no operator. */
3172 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3173 return 0;
3174 }
3175 }
3176
3177
3178 /* Estimate number of instructions that will be created by expanding
3179 the statements in the statement sequence STMTS.
3180 WEIGHTS contains weights attributed to various constructs. */
3181
3182 static
3183 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3184 {
3185 int cost;
3186 gimple_stmt_iterator gsi;
3187
3188 cost = 0;
3189 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3190 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3191
3192 return cost;
3193 }
3194
3195
3196 /* Estimate number of instructions that will be created by expanding STMT.
3197 WEIGHTS contains weights attributed to various constructs. */
3198
3199 int
3200 estimate_num_insns (gimple stmt, eni_weights *weights)
3201 {
3202 unsigned cost, i;
3203 enum gimple_code code = gimple_code (stmt);
3204 tree lhs;
3205 tree rhs;
3206
3207 switch (code)
3208 {
3209 case GIMPLE_ASSIGN:
3210 /* Try to estimate the cost of assignments. We have three cases to
3211 deal with:
3212 1) Simple assignments to registers;
3213 2) Stores to things that must live in memory. This includes
3214 "normal" stores to scalars, but also assignments of large
3215 structures, or constructors of big arrays;
3216
3217 Let us look at the first two cases, assuming we have "a = b + C":
3218 <GIMPLE_ASSIGN <var_decl "a">
3219 <plus_expr <var_decl "b"> <constant C>>
3220 If "a" is a GIMPLE register, the assignment to it is free on almost
3221 any target, because "a" usually ends up in a real register. Hence
3222 the only cost of this expression comes from the PLUS_EXPR, and we
3223 can ignore the GIMPLE_ASSIGN.
3224 If "a" is not a GIMPLE register, the assignment to "a" will most
3225 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3226 of moving something into "a", which we compute using the function
3227 estimate_move_cost. */
3228 lhs = gimple_assign_lhs (stmt);
3229 rhs = gimple_assign_rhs1 (stmt);
3230
3231 if (is_gimple_reg (lhs))
3232 cost = 0;
3233 else
3234 cost = estimate_move_cost (TREE_TYPE (lhs));
3235
3236 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3237 cost += estimate_move_cost (TREE_TYPE (rhs));
3238
3239 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3240 gimple_assign_rhs1 (stmt),
3241 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3242 == GIMPLE_BINARY_RHS
3243 ? gimple_assign_rhs2 (stmt) : NULL);
3244 break;
3245
3246 case GIMPLE_COND:
3247 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3248 gimple_op (stmt, 0),
3249 gimple_op (stmt, 1));
3250 break;
3251
3252 case GIMPLE_SWITCH:
3253 /* Take into account cost of the switch + guess 2 conditional jumps for
3254 each case label.
3255
3256 TODO: once the switch expansion logic is sufficiently separated, we can
3257 do better job on estimating cost of the switch. */
3258 if (weights->time_based)
3259 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3260 else
3261 cost = gimple_switch_num_labels (stmt) * 2;
3262 break;
3263
3264 case GIMPLE_CALL:
3265 {
3266 tree decl = gimple_call_fndecl (stmt);
3267 tree addr = gimple_call_fn (stmt);
3268 tree funtype = TREE_TYPE (addr);
3269
3270 if (POINTER_TYPE_P (funtype))
3271 funtype = TREE_TYPE (funtype);
3272
3273 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3274 cost = weights->target_builtin_call_cost;
3275 else
3276 cost = weights->call_cost;
3277
3278 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3279 switch (DECL_FUNCTION_CODE (decl))
3280 {
3281 /* Builtins that expand to constants. */
3282 case BUILT_IN_CONSTANT_P:
3283 case BUILT_IN_EXPECT:
3284 case BUILT_IN_OBJECT_SIZE:
3285 case BUILT_IN_UNREACHABLE:
3286 /* Simple register moves or loads from stack. */
3287 case BUILT_IN_RETURN_ADDRESS:
3288 case BUILT_IN_EXTRACT_RETURN_ADDR:
3289 case BUILT_IN_FROB_RETURN_ADDR:
3290 case BUILT_IN_RETURN:
3291 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3292 case BUILT_IN_FRAME_ADDRESS:
3293 case BUILT_IN_VA_END:
3294 case BUILT_IN_STACK_SAVE:
3295 case BUILT_IN_STACK_RESTORE:
3296 /* Exception state returns or moves registers around. */
3297 case BUILT_IN_EH_FILTER:
3298 case BUILT_IN_EH_POINTER:
3299 case BUILT_IN_EH_COPY_VALUES:
3300 return 0;
3301
3302 /* builtins that are not expensive (that is they are most probably
3303 expanded inline into resonably simple code). */
3304 case BUILT_IN_ABS:
3305 case BUILT_IN_ALLOCA:
3306 case BUILT_IN_BSWAP32:
3307 case BUILT_IN_BSWAP64:
3308 case BUILT_IN_CLZ:
3309 case BUILT_IN_CLZIMAX:
3310 case BUILT_IN_CLZL:
3311 case BUILT_IN_CLZLL:
3312 case BUILT_IN_CTZ:
3313 case BUILT_IN_CTZIMAX:
3314 case BUILT_IN_CTZL:
3315 case BUILT_IN_CTZLL:
3316 case BUILT_IN_FFS:
3317 case BUILT_IN_FFSIMAX:
3318 case BUILT_IN_FFSL:
3319 case BUILT_IN_FFSLL:
3320 case BUILT_IN_IMAXABS:
3321 case BUILT_IN_FINITE:
3322 case BUILT_IN_FINITEF:
3323 case BUILT_IN_FINITEL:
3324 case BUILT_IN_FINITED32:
3325 case BUILT_IN_FINITED64:
3326 case BUILT_IN_FINITED128:
3327 case BUILT_IN_FPCLASSIFY:
3328 case BUILT_IN_ISFINITE:
3329 case BUILT_IN_ISINF_SIGN:
3330 case BUILT_IN_ISINF:
3331 case BUILT_IN_ISINFF:
3332 case BUILT_IN_ISINFL:
3333 case BUILT_IN_ISINFD32:
3334 case BUILT_IN_ISINFD64:
3335 case BUILT_IN_ISINFD128:
3336 case BUILT_IN_ISNAN:
3337 case BUILT_IN_ISNANF:
3338 case BUILT_IN_ISNANL:
3339 case BUILT_IN_ISNAND32:
3340 case BUILT_IN_ISNAND64:
3341 case BUILT_IN_ISNAND128:
3342 case BUILT_IN_ISNORMAL:
3343 case BUILT_IN_ISGREATER:
3344 case BUILT_IN_ISGREATEREQUAL:
3345 case BUILT_IN_ISLESS:
3346 case BUILT_IN_ISLESSEQUAL:
3347 case BUILT_IN_ISLESSGREATER:
3348 case BUILT_IN_ISUNORDERED:
3349 case BUILT_IN_VA_ARG_PACK:
3350 case BUILT_IN_VA_ARG_PACK_LEN:
3351 case BUILT_IN_VA_COPY:
3352 case BUILT_IN_TRAP:
3353 case BUILT_IN_SAVEREGS:
3354 case BUILT_IN_POPCOUNTL:
3355 case BUILT_IN_POPCOUNTLL:
3356 case BUILT_IN_POPCOUNTIMAX:
3357 case BUILT_IN_POPCOUNT:
3358 case BUILT_IN_PARITYL:
3359 case BUILT_IN_PARITYLL:
3360 case BUILT_IN_PARITYIMAX:
3361 case BUILT_IN_PARITY:
3362 case BUILT_IN_LABS:
3363 case BUILT_IN_LLABS:
3364 case BUILT_IN_PREFETCH:
3365 cost = weights->target_builtin_call_cost;
3366 break;
3367
3368 default:
3369 break;
3370 }
3371
3372 if (decl)
3373 funtype = TREE_TYPE (decl);
3374
3375 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3376 cost += estimate_move_cost (TREE_TYPE (funtype));
3377 /* Our cost must be kept in sync with
3378 cgraph_estimate_size_after_inlining that does use function
3379 declaration to figure out the arguments. */
3380 if (decl && DECL_ARGUMENTS (decl))
3381 {
3382 tree arg;
3383 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3384 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3385 cost += estimate_move_cost (TREE_TYPE (arg));
3386 }
3387 else if (funtype && prototype_p (funtype))
3388 {
3389 tree t;
3390 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3391 t = TREE_CHAIN (t))
3392 if (!VOID_TYPE_P (TREE_VALUE (t)))
3393 cost += estimate_move_cost (TREE_VALUE (t));
3394 }
3395 else
3396 {
3397 for (i = 0; i < gimple_call_num_args (stmt); i++)
3398 {
3399 tree arg = gimple_call_arg (stmt, i);
3400 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3401 cost += estimate_move_cost (TREE_TYPE (arg));
3402 }
3403 }
3404
3405 break;
3406 }
3407
3408 case GIMPLE_GOTO:
3409 case GIMPLE_LABEL:
3410 case GIMPLE_NOP:
3411 case GIMPLE_PHI:
3412 case GIMPLE_RETURN:
3413 case GIMPLE_PREDICT:
3414 case GIMPLE_DEBUG:
3415 return 0;
3416
3417 case GIMPLE_ASM:
3418 return asm_str_count (gimple_asm_string (stmt));
3419
3420 case GIMPLE_RESX:
3421 /* This is either going to be an external function call with one
3422 argument, or two register copy statements plus a goto. */
3423 return 2;
3424
3425 case GIMPLE_EH_DISPATCH:
3426 /* ??? This is going to turn into a switch statement. Ideally
3427 we'd have a look at the eh region and estimate the number of
3428 edges involved. */
3429 return 10;
3430
3431 case GIMPLE_BIND:
3432 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3433
3434 case GIMPLE_EH_FILTER:
3435 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3436
3437 case GIMPLE_CATCH:
3438 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3439
3440 case GIMPLE_TRY:
3441 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3442 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3443
3444 /* OpenMP directives are generally very expensive. */
3445
3446 case GIMPLE_OMP_RETURN:
3447 case GIMPLE_OMP_SECTIONS_SWITCH:
3448 case GIMPLE_OMP_ATOMIC_STORE:
3449 case GIMPLE_OMP_CONTINUE:
3450 /* ...except these, which are cheap. */
3451 return 0;
3452
3453 case GIMPLE_OMP_ATOMIC_LOAD:
3454 return weights->omp_cost;
3455
3456 case GIMPLE_OMP_FOR:
3457 return (weights->omp_cost
3458 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3459 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3460
3461 case GIMPLE_OMP_PARALLEL:
3462 case GIMPLE_OMP_TASK:
3463 case GIMPLE_OMP_CRITICAL:
3464 case GIMPLE_OMP_MASTER:
3465 case GIMPLE_OMP_ORDERED:
3466 case GIMPLE_OMP_SECTION:
3467 case GIMPLE_OMP_SECTIONS:
3468 case GIMPLE_OMP_SINGLE:
3469 return (weights->omp_cost
3470 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3471
3472 default:
3473 gcc_unreachable ();
3474 }
3475
3476 return cost;
3477 }
3478
3479 /* Estimate number of instructions that will be created by expanding
3480 function FNDECL. WEIGHTS contains weights attributed to various
3481 constructs. */
3482
3483 int
3484 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3485 {
3486 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3487 gimple_stmt_iterator bsi;
3488 basic_block bb;
3489 int n = 0;
3490
3491 gcc_assert (my_function && my_function->cfg);
3492 FOR_EACH_BB_FN (bb, my_function)
3493 {
3494 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3495 n += estimate_num_insns (gsi_stmt (bsi), weights);
3496 }
3497
3498 return n;
3499 }
3500
3501
3502 /* Initializes weights used by estimate_num_insns. */
3503
3504 void
3505 init_inline_once (void)
3506 {
3507 eni_size_weights.call_cost = 1;
3508 eni_size_weights.target_builtin_call_cost = 1;
3509 eni_size_weights.div_mod_cost = 1;
3510 eni_size_weights.omp_cost = 40;
3511 eni_size_weights.time_based = false;
3512
3513 /* Estimating time for call is difficult, since we have no idea what the
3514 called function does. In the current uses of eni_time_weights,
3515 underestimating the cost does less harm than overestimating it, so
3516 we choose a rather small value here. */
3517 eni_time_weights.call_cost = 10;
3518 eni_time_weights.target_builtin_call_cost = 10;
3519 eni_time_weights.div_mod_cost = 10;
3520 eni_time_weights.omp_cost = 40;
3521 eni_time_weights.time_based = true;
3522 }
3523
3524 /* Estimate the number of instructions in a gimple_seq. */
3525
3526 int
3527 count_insns_seq (gimple_seq seq, eni_weights *weights)
3528 {
3529 gimple_stmt_iterator gsi;
3530 int n = 0;
3531 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3532 n += estimate_num_insns (gsi_stmt (gsi), weights);
3533
3534 return n;
3535 }
3536
3537
3538 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3539
3540 static void
3541 prepend_lexical_block (tree current_block, tree new_block)
3542 {
3543 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3544 BLOCK_SUBBLOCKS (current_block) = new_block;
3545 BLOCK_SUPERCONTEXT (new_block) = current_block;
3546 }
3547
3548 /* Fetch callee declaration from the call graph edge going from NODE and
3549 associated with STMR call statement. Return NULL_TREE if not found. */
3550 static tree
3551 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3552 {
3553 struct cgraph_edge *cs;
3554
3555 cs = cgraph_edge (node, stmt);
3556 if (cs && !cs->indirect_unknown_callee)
3557 return cs->callee->decl;
3558
3559 return NULL_TREE;
3560 }
3561
3562 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3563
3564 static bool
3565 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3566 {
3567 tree use_retvar;
3568 tree fn;
3569 struct pointer_map_t *st, *dst;
3570 tree return_slot;
3571 tree modify_dest;
3572 location_t saved_location;
3573 struct cgraph_edge *cg_edge;
3574 cgraph_inline_failed_t reason;
3575 basic_block return_block;
3576 edge e;
3577 gimple_stmt_iterator gsi, stmt_gsi;
3578 bool successfully_inlined = FALSE;
3579 bool purge_dead_abnormal_edges;
3580 tree t_step;
3581 tree var;
3582
3583 /* Set input_location here so we get the right instantiation context
3584 if we call instantiate_decl from inlinable_function_p. */
3585 saved_location = input_location;
3586 if (gimple_has_location (stmt))
3587 input_location = gimple_location (stmt);
3588
3589 /* From here on, we're only interested in CALL_EXPRs. */
3590 if (gimple_code (stmt) != GIMPLE_CALL)
3591 goto egress;
3592
3593 /* First, see if we can figure out what function is being called.
3594 If we cannot, then there is no hope of inlining the function. */
3595 fn = gimple_call_fndecl (stmt);
3596 if (!fn)
3597 {
3598 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3599 if (!fn)
3600 goto egress;
3601 }
3602
3603 /* Turn forward declarations into real ones. */
3604 fn = cgraph_node (fn)->decl;
3605
3606 /* If FN is a declaration of a function in a nested scope that was
3607 globally declared inline, we don't set its DECL_INITIAL.
3608 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3609 C++ front-end uses it for cdtors to refer to their internal
3610 declarations, that are not real functions. Fortunately those
3611 don't have trees to be saved, so we can tell by checking their
3612 gimple_body. */
3613 if (!DECL_INITIAL (fn)
3614 && DECL_ABSTRACT_ORIGIN (fn)
3615 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3616 fn = DECL_ABSTRACT_ORIGIN (fn);
3617
3618 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3619 Kill this check once this is fixed. */
3620 if (!id->dst_node->analyzed)
3621 goto egress;
3622
3623 cg_edge = cgraph_edge (id->dst_node, stmt);
3624
3625 /* Don't inline functions with different EH personalities. */
3626 if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3627 && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3628 && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3629 != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3630 goto egress;
3631
3632 /* Don't try to inline functions that are not well-suited to
3633 inlining. */
3634 if (!cgraph_inline_p (cg_edge, &reason))
3635 {
3636 /* If this call was originally indirect, we do not want to emit any
3637 inlining related warnings or sorry messages because there are no
3638 guarantees regarding those. */
3639 if (cg_edge->indirect_inlining_edge)
3640 goto egress;
3641
3642 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3643 /* Avoid warnings during early inline pass. */
3644 && cgraph_global_info_ready)
3645 {
3646 sorry ("inlining failed in call to %q+F: %s", fn,
3647 cgraph_inline_failed_string (reason));
3648 sorry ("called from here");
3649 }
3650 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3651 && !DECL_IN_SYSTEM_HEADER (fn)
3652 && reason != CIF_UNSPECIFIED
3653 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3654 /* Avoid warnings during early inline pass. */
3655 && cgraph_global_info_ready)
3656 {
3657 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3658 fn, cgraph_inline_failed_string (reason));
3659 warning (OPT_Winline, "called from here");
3660 }
3661 goto egress;
3662 }
3663 fn = cg_edge->callee->decl;
3664
3665 #ifdef ENABLE_CHECKING
3666 if (cg_edge->callee->decl != id->dst_node->decl)
3667 verify_cgraph_node (cg_edge->callee);
3668 #endif
3669
3670 /* We will be inlining this callee. */
3671 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3672
3673 /* Update the callers EH personality. */
3674 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3675 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3676 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3677
3678 /* Split the block holding the GIMPLE_CALL. */
3679 e = split_block (bb, stmt);
3680 bb = e->src;
3681 return_block = e->dest;
3682 remove_edge (e);
3683
3684 /* split_block splits after the statement; work around this by
3685 moving the call into the second block manually. Not pretty,
3686 but seems easier than doing the CFG manipulation by hand
3687 when the GIMPLE_CALL is in the last statement of BB. */
3688 stmt_gsi = gsi_last_bb (bb);
3689 gsi_remove (&stmt_gsi, false);
3690
3691 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3692 been the source of abnormal edges. In this case, schedule
3693 the removal of dead abnormal edges. */
3694 gsi = gsi_start_bb (return_block);
3695 if (gsi_end_p (gsi))
3696 {
3697 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3698 purge_dead_abnormal_edges = true;
3699 }
3700 else
3701 {
3702 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3703 purge_dead_abnormal_edges = false;
3704 }
3705
3706 stmt_gsi = gsi_start_bb (return_block);
3707
3708 /* Build a block containing code to initialize the arguments, the
3709 actual inline expansion of the body, and a label for the return
3710 statements within the function to jump to. The type of the
3711 statement expression is the return type of the function call. */
3712 id->block = make_node (BLOCK);
3713 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3714 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3715 prepend_lexical_block (gimple_block (stmt), id->block);
3716
3717 /* Local declarations will be replaced by their equivalents in this
3718 map. */
3719 st = id->decl_map;
3720 id->decl_map = pointer_map_create ();
3721 dst = id->debug_map;
3722 id->debug_map = NULL;
3723
3724 /* Record the function we are about to inline. */
3725 id->src_fn = fn;
3726 id->src_node = cg_edge->callee;
3727 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3728 id->gimple_call = stmt;
3729
3730 gcc_assert (!id->src_cfun->after_inlining);
3731
3732 id->entry_bb = bb;
3733 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3734 {
3735 gimple_stmt_iterator si = gsi_last_bb (bb);
3736 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3737 NOT_TAKEN),
3738 GSI_NEW_STMT);
3739 }
3740 initialize_inlined_parameters (id, stmt, fn, bb);
3741
3742 if (DECL_INITIAL (fn))
3743 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3744
3745 /* Return statements in the function body will be replaced by jumps
3746 to the RET_LABEL. */
3747 gcc_assert (DECL_INITIAL (fn));
3748 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3749
3750 /* Find the LHS to which the result of this call is assigned. */
3751 return_slot = NULL;
3752 if (gimple_call_lhs (stmt))
3753 {
3754 modify_dest = gimple_call_lhs (stmt);
3755
3756 /* The function which we are inlining might not return a value,
3757 in which case we should issue a warning that the function
3758 does not return a value. In that case the optimizers will
3759 see that the variable to which the value is assigned was not
3760 initialized. We do not want to issue a warning about that
3761 uninitialized variable. */
3762 if (DECL_P (modify_dest))
3763 TREE_NO_WARNING (modify_dest) = 1;
3764
3765 if (gimple_call_return_slot_opt_p (stmt))
3766 {
3767 return_slot = modify_dest;
3768 modify_dest = NULL;
3769 }
3770 }
3771 else
3772 modify_dest = NULL;
3773
3774 /* If we are inlining a call to the C++ operator new, we don't want
3775 to use type based alias analysis on the return value. Otherwise
3776 we may get confused if the compiler sees that the inlined new
3777 function returns a pointer which was just deleted. See bug
3778 33407. */
3779 if (DECL_IS_OPERATOR_NEW (fn))
3780 {
3781 return_slot = NULL;
3782 modify_dest = NULL;
3783 }
3784
3785 /* Declare the return variable for the function. */
3786 use_retvar = declare_return_variable (id, return_slot, modify_dest);
3787
3788 /* Add local vars in this inlined callee to caller. */
3789 t_step = id->src_cfun->local_decls;
3790 for (; t_step; t_step = TREE_CHAIN (t_step))
3791 {
3792 var = TREE_VALUE (t_step);
3793 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3794 {
3795 if (var_ann (var) && add_referenced_var (var))
3796 cfun->local_decls = tree_cons (NULL_TREE, var,
3797 cfun->local_decls);
3798 }
3799 else if (!can_be_nonlocal (var, id))
3800 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3801 cfun->local_decls);
3802 }
3803
3804 if (dump_file && (dump_flags & TDF_DETAILS))
3805 {
3806 fprintf (dump_file, "Inlining ");
3807 print_generic_expr (dump_file, id->src_fn, 0);
3808 fprintf (dump_file, " to ");
3809 print_generic_expr (dump_file, id->dst_fn, 0);
3810 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3811 }
3812
3813 /* This is it. Duplicate the callee body. Assume callee is
3814 pre-gimplified. Note that we must not alter the caller
3815 function in any way before this point, as this CALL_EXPR may be
3816 a self-referential call; if we're calling ourselves, we need to
3817 duplicate our body before altering anything. */
3818 copy_body (id, bb->count,
3819 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3820 bb, return_block);
3821
3822 /* Reset the escaped solution. */
3823 if (cfun->gimple_df)
3824 pt_solution_reset (&cfun->gimple_df->escaped);
3825
3826 /* Clean up. */
3827 if (id->debug_map)
3828 {
3829 pointer_map_destroy (id->debug_map);
3830 id->debug_map = dst;
3831 }
3832 pointer_map_destroy (id->decl_map);
3833 id->decl_map = st;
3834
3835 /* Unlink the calls virtual operands before replacing it. */
3836 unlink_stmt_vdef (stmt);
3837
3838 /* If the inlined function returns a result that we care about,
3839 substitute the GIMPLE_CALL with an assignment of the return
3840 variable to the LHS of the call. That is, if STMT was
3841 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3842 if (use_retvar && gimple_call_lhs (stmt))
3843 {
3844 gimple old_stmt = stmt;
3845 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3846 gsi_replace (&stmt_gsi, stmt, false);
3847 if (gimple_in_ssa_p (cfun))
3848 mark_symbols_for_renaming (stmt);
3849 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3850 }
3851 else
3852 {
3853 /* Handle the case of inlining a function with no return
3854 statement, which causes the return value to become undefined. */
3855 if (gimple_call_lhs (stmt)
3856 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3857 {
3858 tree name = gimple_call_lhs (stmt);
3859 tree var = SSA_NAME_VAR (name);
3860 tree def = gimple_default_def (cfun, var);
3861
3862 if (def)
3863 {
3864 /* If the variable is used undefined, make this name
3865 undefined via a move. */
3866 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3867 gsi_replace (&stmt_gsi, stmt, true);
3868 }
3869 else
3870 {
3871 /* Otherwise make this variable undefined. */
3872 gsi_remove (&stmt_gsi, true);
3873 set_default_def (var, name);
3874 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3875 }
3876 }
3877 else
3878 gsi_remove (&stmt_gsi, true);
3879 }
3880
3881 if (purge_dead_abnormal_edges)
3882 gimple_purge_dead_abnormal_call_edges (return_block);
3883
3884 /* If the value of the new expression is ignored, that's OK. We
3885 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3886 the equivalent inlined version either. */
3887 if (is_gimple_assign (stmt))
3888 {
3889 gcc_assert (gimple_assign_single_p (stmt)
3890 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3891 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3892 }
3893
3894 /* Output the inlining info for this abstract function, since it has been
3895 inlined. If we don't do this now, we can lose the information about the
3896 variables in the function when the blocks get blown away as soon as we
3897 remove the cgraph node. */
3898 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3899
3900 /* Update callgraph if needed. */
3901 cgraph_remove_node (cg_edge->callee);
3902
3903 id->block = NULL_TREE;
3904 successfully_inlined = TRUE;
3905
3906 egress:
3907 input_location = saved_location;
3908 return successfully_inlined;
3909 }
3910
3911 /* Expand call statements reachable from STMT_P.
3912 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3913 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3914 unfortunately not use that function here because we need a pointer
3915 to the CALL_EXPR, not the tree itself. */
3916
3917 static bool
3918 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3919 {
3920 gimple_stmt_iterator gsi;
3921
3922 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3923 {
3924 gimple stmt = gsi_stmt (gsi);
3925
3926 if (is_gimple_call (stmt)
3927 && expand_call_inline (bb, stmt, id))
3928 return true;
3929 }
3930
3931 return false;
3932 }
3933
3934
3935 /* Walk all basic blocks created after FIRST and try to fold every statement
3936 in the STATEMENTS pointer set. */
3937
3938 static void
3939 fold_marked_statements (int first, struct pointer_set_t *statements)
3940 {
3941 for (; first < n_basic_blocks; first++)
3942 if (BASIC_BLOCK (first))
3943 {
3944 gimple_stmt_iterator gsi;
3945
3946 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3947 !gsi_end_p (gsi);
3948 gsi_next (&gsi))
3949 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3950 {
3951 gimple old_stmt = gsi_stmt (gsi);
3952 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3953
3954 if (old_decl && DECL_BUILT_IN (old_decl))
3955 {
3956 /* Folding builtins can create multiple instructions,
3957 we need to look at all of them. */
3958 gimple_stmt_iterator i2 = gsi;
3959 gsi_prev (&i2);
3960 if (fold_stmt (&gsi))
3961 {
3962 gimple new_stmt;
3963 if (gsi_end_p (i2))
3964 i2 = gsi_start_bb (BASIC_BLOCK (first));
3965 else
3966 gsi_next (&i2);
3967 while (1)
3968 {
3969 new_stmt = gsi_stmt (i2);
3970 update_stmt (new_stmt);
3971 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3972 new_stmt);
3973
3974 if (new_stmt == gsi_stmt (gsi))
3975 {
3976 /* It is okay to check only for the very last
3977 of these statements. If it is a throwing
3978 statement nothing will change. If it isn't
3979 this can remove EH edges. If that weren't
3980 correct then because some intermediate stmts
3981 throw, but not the last one. That would mean
3982 we'd have to split the block, which we can't
3983 here and we'd loose anyway. And as builtins
3984 probably never throw, this all
3985 is mood anyway. */
3986 if (maybe_clean_or_replace_eh_stmt (old_stmt,
3987 new_stmt))
3988 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3989 break;
3990 }
3991 gsi_next (&i2);
3992 }
3993 }
3994 }
3995 else if (fold_stmt (&gsi))
3996 {
3997 /* Re-read the statement from GSI as fold_stmt() may
3998 have changed it. */
3999 gimple new_stmt = gsi_stmt (gsi);
4000 update_stmt (new_stmt);
4001
4002 if (is_gimple_call (old_stmt)
4003 || is_gimple_call (new_stmt))
4004 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4005 new_stmt);
4006
4007 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4008 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4009 }
4010 }
4011 }
4012 }
4013
4014 /* Return true if BB has at least one abnormal outgoing edge. */
4015
4016 static inline bool
4017 has_abnormal_outgoing_edge_p (basic_block bb)
4018 {
4019 edge e;
4020 edge_iterator ei;
4021
4022 FOR_EACH_EDGE (e, ei, bb->succs)
4023 if (e->flags & EDGE_ABNORMAL)
4024 return true;
4025
4026 return false;
4027 }
4028
4029 /* Expand calls to inline functions in the body of FN. */
4030
4031 unsigned int
4032 optimize_inline_calls (tree fn)
4033 {
4034 copy_body_data id;
4035 basic_block bb;
4036 int last = n_basic_blocks;
4037 struct gimplify_ctx gctx;
4038
4039 /* There is no point in performing inlining if errors have already
4040 occurred -- and we might crash if we try to inline invalid
4041 code. */
4042 if (errorcount || sorrycount)
4043 return 0;
4044
4045 /* Clear out ID. */
4046 memset (&id, 0, sizeof (id));
4047
4048 id.src_node = id.dst_node = cgraph_node (fn);
4049 id.dst_fn = fn;
4050 /* Or any functions that aren't finished yet. */
4051 if (current_function_decl)
4052 id.dst_fn = current_function_decl;
4053
4054 id.copy_decl = copy_decl_maybe_to_var;
4055 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4056 id.transform_new_cfg = false;
4057 id.transform_return_to_modify = true;
4058 id.transform_lang_insert_block = NULL;
4059 id.statements_to_fold = pointer_set_create ();
4060
4061 push_gimplify_context (&gctx);
4062
4063 /* We make no attempts to keep dominance info up-to-date. */
4064 free_dominance_info (CDI_DOMINATORS);
4065 free_dominance_info (CDI_POST_DOMINATORS);
4066
4067 /* Register specific gimple functions. */
4068 gimple_register_cfg_hooks ();
4069
4070 /* Reach the trees by walking over the CFG, and note the
4071 enclosing basic-blocks in the call edges. */
4072 /* We walk the blocks going forward, because inlined function bodies
4073 will split id->current_basic_block, and the new blocks will
4074 follow it; we'll trudge through them, processing their CALL_EXPRs
4075 along the way. */
4076 FOR_EACH_BB (bb)
4077 gimple_expand_calls_inline (bb, &id);
4078
4079 pop_gimplify_context (NULL);
4080
4081 #ifdef ENABLE_CHECKING
4082 {
4083 struct cgraph_edge *e;
4084
4085 verify_cgraph_node (id.dst_node);
4086
4087 /* Double check that we inlined everything we are supposed to inline. */
4088 for (e = id.dst_node->callees; e; e = e->next_callee)
4089 gcc_assert (e->inline_failed);
4090 }
4091 #endif
4092
4093 /* Fold the statements before compacting/renumbering the basic blocks. */
4094 fold_marked_statements (last, id.statements_to_fold);
4095 pointer_set_destroy (id.statements_to_fold);
4096
4097 gcc_assert (!id.debug_stmts);
4098
4099 /* Renumber the (code) basic_blocks consecutively. */
4100 compact_blocks ();
4101 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4102 number_blocks (fn);
4103
4104 fold_cond_expr_cond ();
4105 delete_unreachable_blocks_update_callgraph (&id);
4106 #ifdef ENABLE_CHECKING
4107 verify_cgraph_node (id.dst_node);
4108 #endif
4109
4110 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4111 not possible yet - the IPA passes might make various functions to not
4112 throw and they don't care to proactively update local EH info. This is
4113 done later in fixup_cfg pass that also execute the verification. */
4114 return (TODO_update_ssa
4115 | TODO_cleanup_cfg
4116 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4117 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4118 }
4119
4120 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4121
4122 tree
4123 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4124 {
4125 enum tree_code code = TREE_CODE (*tp);
4126 enum tree_code_class cl = TREE_CODE_CLASS (code);
4127
4128 /* We make copies of most nodes. */
4129 if (IS_EXPR_CODE_CLASS (cl)
4130 || code == TREE_LIST
4131 || code == TREE_VEC
4132 || code == TYPE_DECL
4133 || code == OMP_CLAUSE)
4134 {
4135 /* Because the chain gets clobbered when we make a copy, we save it
4136 here. */
4137 tree chain = NULL_TREE, new_tree;
4138
4139 chain = TREE_CHAIN (*tp);
4140
4141 /* Copy the node. */
4142 new_tree = copy_node (*tp);
4143
4144 /* Propagate mudflap marked-ness. */
4145 if (flag_mudflap && mf_marked_p (*tp))
4146 mf_mark (new_tree);
4147
4148 *tp = new_tree;
4149
4150 /* Now, restore the chain, if appropriate. That will cause
4151 walk_tree to walk into the chain as well. */
4152 if (code == PARM_DECL
4153 || code == TREE_LIST
4154 || code == OMP_CLAUSE)
4155 TREE_CHAIN (*tp) = chain;
4156
4157 /* For now, we don't update BLOCKs when we make copies. So, we
4158 have to nullify all BIND_EXPRs. */
4159 if (TREE_CODE (*tp) == BIND_EXPR)
4160 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4161 }
4162 else if (code == CONSTRUCTOR)
4163 {
4164 /* CONSTRUCTOR nodes need special handling because
4165 we need to duplicate the vector of elements. */
4166 tree new_tree;
4167
4168 new_tree = copy_node (*tp);
4169
4170 /* Propagate mudflap marked-ness. */
4171 if (flag_mudflap && mf_marked_p (*tp))
4172 mf_mark (new_tree);
4173
4174 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4175 CONSTRUCTOR_ELTS (*tp));
4176 *tp = new_tree;
4177 }
4178 else if (TREE_CODE_CLASS (code) == tcc_type)
4179 *walk_subtrees = 0;
4180 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4181 *walk_subtrees = 0;
4182 else if (TREE_CODE_CLASS (code) == tcc_constant)
4183 *walk_subtrees = 0;
4184 else
4185 gcc_assert (code != STATEMENT_LIST);
4186 return NULL_TREE;
4187 }
4188
4189 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4190 information indicating to what new SAVE_EXPR this one should be mapped,
4191 use that one. Otherwise, create a new node and enter it in ST. FN is
4192 the function into which the copy will be placed. */
4193
4194 static void
4195 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4196 {
4197 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4198 tree *n;
4199 tree t;
4200
4201 /* See if we already encountered this SAVE_EXPR. */
4202 n = (tree *) pointer_map_contains (st, *tp);
4203
4204 /* If we didn't already remap this SAVE_EXPR, do so now. */
4205 if (!n)
4206 {
4207 t = copy_node (*tp);
4208
4209 /* Remember this SAVE_EXPR. */
4210 *pointer_map_insert (st, *tp) = t;
4211 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4212 *pointer_map_insert (st, t) = t;
4213 }
4214 else
4215 {
4216 /* We've already walked into this SAVE_EXPR; don't do it again. */
4217 *walk_subtrees = 0;
4218 t = *n;
4219 }
4220
4221 /* Replace this SAVE_EXPR with the copy. */
4222 *tp = t;
4223 }
4224
4225 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4226 copies the declaration and enters it in the splay_tree in DATA (which is
4227 really an `copy_body_data *'). */
4228
4229 static tree
4230 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4231 void *data)
4232 {
4233 copy_body_data *id = (copy_body_data *) data;
4234
4235 /* Don't walk into types. */
4236 if (TYPE_P (*tp))
4237 *walk_subtrees = 0;
4238
4239 else if (TREE_CODE (*tp) == LABEL_EXPR)
4240 {
4241 tree decl = TREE_OPERAND (*tp, 0);
4242
4243 /* Copy the decl and remember the copy. */
4244 insert_decl_map (id, decl, id->copy_decl (decl, id));
4245 }
4246
4247 return NULL_TREE;
4248 }
4249
4250 /* Perform any modifications to EXPR required when it is unsaved. Does
4251 not recurse into EXPR's subtrees. */
4252
4253 static void
4254 unsave_expr_1 (tree expr)
4255 {
4256 switch (TREE_CODE (expr))
4257 {
4258 case TARGET_EXPR:
4259 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4260 It's OK for this to happen if it was part of a subtree that
4261 isn't immediately expanded, such as operand 2 of another
4262 TARGET_EXPR. */
4263 if (TREE_OPERAND (expr, 1))
4264 break;
4265
4266 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4267 TREE_OPERAND (expr, 3) = NULL_TREE;
4268 break;
4269
4270 default:
4271 break;
4272 }
4273 }
4274
4275 /* Called via walk_tree when an expression is unsaved. Using the
4276 splay_tree pointed to by ST (which is really a `splay_tree'),
4277 remaps all local declarations to appropriate replacements. */
4278
4279 static tree
4280 unsave_r (tree *tp, int *walk_subtrees, void *data)
4281 {
4282 copy_body_data *id = (copy_body_data *) data;
4283 struct pointer_map_t *st = id->decl_map;
4284 tree *n;
4285
4286 /* Only a local declaration (variable or label). */
4287 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4288 || TREE_CODE (*tp) == LABEL_DECL)
4289 {
4290 /* Lookup the declaration. */
4291 n = (tree *) pointer_map_contains (st, *tp);
4292
4293 /* If it's there, remap it. */
4294 if (n)
4295 *tp = *n;
4296 }
4297
4298 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4299 gcc_unreachable ();
4300 else if (TREE_CODE (*tp) == BIND_EXPR)
4301 copy_bind_expr (tp, walk_subtrees, id);
4302 else if (TREE_CODE (*tp) == SAVE_EXPR
4303 || TREE_CODE (*tp) == TARGET_EXPR)
4304 remap_save_expr (tp, st, walk_subtrees);
4305 else
4306 {
4307 copy_tree_r (tp, walk_subtrees, NULL);
4308
4309 /* Do whatever unsaving is required. */
4310 unsave_expr_1 (*tp);
4311 }
4312
4313 /* Keep iterating. */
4314 return NULL_TREE;
4315 }
4316
4317 /* Copies everything in EXPR and replaces variables, labels
4318 and SAVE_EXPRs local to EXPR. */
4319
4320 tree
4321 unsave_expr_now (tree expr)
4322 {
4323 copy_body_data id;
4324
4325 /* There's nothing to do for NULL_TREE. */
4326 if (expr == 0)
4327 return expr;
4328
4329 /* Set up ID. */
4330 memset (&id, 0, sizeof (id));
4331 id.src_fn = current_function_decl;
4332 id.dst_fn = current_function_decl;
4333 id.decl_map = pointer_map_create ();
4334 id.debug_map = NULL;
4335
4336 id.copy_decl = copy_decl_no_change;
4337 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4338 id.transform_new_cfg = false;
4339 id.transform_return_to_modify = false;
4340 id.transform_lang_insert_block = NULL;
4341
4342 /* Walk the tree once to find local labels. */
4343 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4344
4345 /* Walk the tree again, copying, remapping, and unsaving. */
4346 walk_tree (&expr, unsave_r, &id, NULL);
4347
4348 /* Clean up. */
4349 pointer_map_destroy (id.decl_map);
4350 if (id.debug_map)
4351 pointer_map_destroy (id.debug_map);
4352
4353 return expr;
4354 }
4355
4356 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4357 label, copies the declaration and enters it in the splay_tree in DATA (which
4358 is really a 'copy_body_data *'. */
4359
4360 static tree
4361 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4362 bool *handled_ops_p ATTRIBUTE_UNUSED,
4363 struct walk_stmt_info *wi)
4364 {
4365 copy_body_data *id = (copy_body_data *) wi->info;
4366 gimple stmt = gsi_stmt (*gsip);
4367
4368 if (gimple_code (stmt) == GIMPLE_LABEL)
4369 {
4370 tree decl = gimple_label_label (stmt);
4371
4372 /* Copy the decl and remember the copy. */
4373 insert_decl_map (id, decl, id->copy_decl (decl, id));
4374 }
4375
4376 return NULL_TREE;
4377 }
4378
4379
4380 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4381 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4382 remaps all local declarations to appropriate replacements in gimple
4383 operands. */
4384
4385 static tree
4386 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4387 {
4388 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4389 copy_body_data *id = (copy_body_data *) wi->info;
4390 struct pointer_map_t *st = id->decl_map;
4391 tree *n;
4392 tree expr = *tp;
4393
4394 /* Only a local declaration (variable or label). */
4395 if ((TREE_CODE (expr) == VAR_DECL
4396 && !TREE_STATIC (expr))
4397 || TREE_CODE (expr) == LABEL_DECL)
4398 {
4399 /* Lookup the declaration. */
4400 n = (tree *) pointer_map_contains (st, expr);
4401
4402 /* If it's there, remap it. */
4403 if (n)
4404 *tp = *n;
4405 *walk_subtrees = 0;
4406 }
4407 else if (TREE_CODE (expr) == STATEMENT_LIST
4408 || TREE_CODE (expr) == BIND_EXPR
4409 || TREE_CODE (expr) == SAVE_EXPR)
4410 gcc_unreachable ();
4411 else if (TREE_CODE (expr) == TARGET_EXPR)
4412 {
4413 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4414 It's OK for this to happen if it was part of a subtree that
4415 isn't immediately expanded, such as operand 2 of another
4416 TARGET_EXPR. */
4417 if (!TREE_OPERAND (expr, 1))
4418 {
4419 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4420 TREE_OPERAND (expr, 3) = NULL_TREE;
4421 }
4422 }
4423
4424 /* Keep iterating. */
4425 return NULL_TREE;
4426 }
4427
4428
4429 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4430 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4431 remaps all local declarations to appropriate replacements in gimple
4432 statements. */
4433
4434 static tree
4435 replace_locals_stmt (gimple_stmt_iterator *gsip,
4436 bool *handled_ops_p ATTRIBUTE_UNUSED,
4437 struct walk_stmt_info *wi)
4438 {
4439 copy_body_data *id = (copy_body_data *) wi->info;
4440 gimple stmt = gsi_stmt (*gsip);
4441
4442 if (gimple_code (stmt) == GIMPLE_BIND)
4443 {
4444 tree block = gimple_bind_block (stmt);
4445
4446 if (block)
4447 {
4448 remap_block (&block, id);
4449 gimple_bind_set_block (stmt, block);
4450 }
4451
4452 /* This will remap a lot of the same decls again, but this should be
4453 harmless. */
4454 if (gimple_bind_vars (stmt))
4455 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4456 }
4457
4458 /* Keep iterating. */
4459 return NULL_TREE;
4460 }
4461
4462
4463 /* Copies everything in SEQ and replaces variables and labels local to
4464 current_function_decl. */
4465
4466 gimple_seq
4467 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4468 {
4469 copy_body_data id;
4470 struct walk_stmt_info wi;
4471 struct pointer_set_t *visited;
4472 gimple_seq copy;
4473
4474 /* There's nothing to do for NULL_TREE. */
4475 if (seq == NULL)
4476 return seq;
4477
4478 /* Set up ID. */
4479 memset (&id, 0, sizeof (id));
4480 id.src_fn = current_function_decl;
4481 id.dst_fn = current_function_decl;
4482 id.decl_map = pointer_map_create ();
4483 id.debug_map = NULL;
4484
4485 id.copy_decl = copy_decl_no_change;
4486 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4487 id.transform_new_cfg = false;
4488 id.transform_return_to_modify = false;
4489 id.transform_lang_insert_block = NULL;
4490
4491 /* Walk the tree once to find local labels. */
4492 memset (&wi, 0, sizeof (wi));
4493 visited = pointer_set_create ();
4494 wi.info = &id;
4495 wi.pset = visited;
4496 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4497 pointer_set_destroy (visited);
4498
4499 copy = gimple_seq_copy (seq);
4500
4501 /* Walk the copy, remapping decls. */
4502 memset (&wi, 0, sizeof (wi));
4503 wi.info = &id;
4504 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4505
4506 /* Clean up. */
4507 pointer_map_destroy (id.decl_map);
4508 if (id.debug_map)
4509 pointer_map_destroy (id.debug_map);
4510
4511 return copy;
4512 }
4513
4514
4515 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4516
4517 static tree
4518 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4519 {
4520 if (*tp == data)
4521 return (tree) data;
4522 else
4523 return NULL;
4524 }
4525
4526 bool
4527 debug_find_tree (tree top, tree search)
4528 {
4529 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4530 }
4531
4532
4533 /* Declare the variables created by the inliner. Add all the variables in
4534 VARS to BIND_EXPR. */
4535
4536 static void
4537 declare_inline_vars (tree block, tree vars)
4538 {
4539 tree t;
4540 for (t = vars; t; t = TREE_CHAIN (t))
4541 {
4542 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4543 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4544 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4545 }
4546
4547 if (block)
4548 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4549 }
4550
4551 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4552 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4553 VAR_DECL translation. */
4554
4555 static tree
4556 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4557 {
4558 /* Don't generate debug information for the copy if we wouldn't have
4559 generated it for the copy either. */
4560 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4561 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4562
4563 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4564 declaration inspired this copy. */
4565 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4566
4567 /* The new variable/label has no RTL, yet. */
4568 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4569 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4570 SET_DECL_RTL (copy, NULL_RTX);
4571
4572 /* These args would always appear unused, if not for this. */
4573 TREE_USED (copy) = 1;
4574
4575 /* Set the context for the new declaration. */
4576 if (!DECL_CONTEXT (decl))
4577 /* Globals stay global. */
4578 ;
4579 else if (DECL_CONTEXT (decl) != id->src_fn)
4580 /* Things that weren't in the scope of the function we're inlining
4581 from aren't in the scope we're inlining to, either. */
4582 ;
4583 else if (TREE_STATIC (decl))
4584 /* Function-scoped static variables should stay in the original
4585 function. */
4586 ;
4587 else
4588 /* Ordinary automatic local variables are now in the scope of the
4589 new function. */
4590 DECL_CONTEXT (copy) = id->dst_fn;
4591
4592 return copy;
4593 }
4594
4595 static tree
4596 copy_decl_to_var (tree decl, copy_body_data *id)
4597 {
4598 tree copy, type;
4599
4600 gcc_assert (TREE_CODE (decl) == PARM_DECL
4601 || TREE_CODE (decl) == RESULT_DECL);
4602
4603 type = TREE_TYPE (decl);
4604
4605 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4606 VAR_DECL, DECL_NAME (decl), type);
4607 if (DECL_PT_UID_SET_P (decl))
4608 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4609 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4610 TREE_READONLY (copy) = TREE_READONLY (decl);
4611 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4612 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4613
4614 return copy_decl_for_dup_finish (id, decl, copy);
4615 }
4616
4617 /* Like copy_decl_to_var, but create a return slot object instead of a
4618 pointer variable for return by invisible reference. */
4619
4620 static tree
4621 copy_result_decl_to_var (tree decl, copy_body_data *id)
4622 {
4623 tree copy, type;
4624
4625 gcc_assert (TREE_CODE (decl) == PARM_DECL
4626 || TREE_CODE (decl) == RESULT_DECL);
4627
4628 type = TREE_TYPE (decl);
4629 if (DECL_BY_REFERENCE (decl))
4630 type = TREE_TYPE (type);
4631
4632 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4633 VAR_DECL, DECL_NAME (decl), type);
4634 if (DECL_PT_UID_SET_P (decl))
4635 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4636 TREE_READONLY (copy) = TREE_READONLY (decl);
4637 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4638 if (!DECL_BY_REFERENCE (decl))
4639 {
4640 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4641 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4642 }
4643
4644 return copy_decl_for_dup_finish (id, decl, copy);
4645 }
4646
4647 tree
4648 copy_decl_no_change (tree decl, copy_body_data *id)
4649 {
4650 tree copy;
4651
4652 copy = copy_node (decl);
4653
4654 /* The COPY is not abstract; it will be generated in DST_FN. */
4655 DECL_ABSTRACT (copy) = 0;
4656 lang_hooks.dup_lang_specific_decl (copy);
4657
4658 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4659 been taken; it's for internal bookkeeping in expand_goto_internal. */
4660 if (TREE_CODE (copy) == LABEL_DECL)
4661 {
4662 TREE_ADDRESSABLE (copy) = 0;
4663 LABEL_DECL_UID (copy) = -1;
4664 }
4665
4666 return copy_decl_for_dup_finish (id, decl, copy);
4667 }
4668
4669 static tree
4670 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4671 {
4672 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4673 return copy_decl_to_var (decl, id);
4674 else
4675 return copy_decl_no_change (decl, id);
4676 }
4677
4678 /* Return a copy of the function's argument tree. */
4679 static tree
4680 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4681 bitmap args_to_skip, tree *vars)
4682 {
4683 tree arg, *parg;
4684 tree new_parm = NULL;
4685 int i = 0;
4686
4687 parg = &new_parm;
4688
4689 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4690 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4691 {
4692 tree new_tree = remap_decl (arg, id);
4693 lang_hooks.dup_lang_specific_decl (new_tree);
4694 *parg = new_tree;
4695 parg = &TREE_CHAIN (new_tree);
4696 }
4697 else if (!pointer_map_contains (id->decl_map, arg))
4698 {
4699 /* Make an equivalent VAR_DECL. If the argument was used
4700 as temporary variable later in function, the uses will be
4701 replaced by local variable. */
4702 tree var = copy_decl_to_var (arg, id);
4703 get_var_ann (var);
4704 add_referenced_var (var);
4705 insert_decl_map (id, arg, var);
4706 /* Declare this new variable. */
4707 TREE_CHAIN (var) = *vars;
4708 *vars = var;
4709 }
4710 return new_parm;
4711 }
4712
4713 /* Return a copy of the function's static chain. */
4714 static tree
4715 copy_static_chain (tree static_chain, copy_body_data * id)
4716 {
4717 tree *chain_copy, *pvar;
4718
4719 chain_copy = &static_chain;
4720 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4721 {
4722 tree new_tree = remap_decl (*pvar, id);
4723 lang_hooks.dup_lang_specific_decl (new_tree);
4724 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4725 *pvar = new_tree;
4726 }
4727 return static_chain;
4728 }
4729
4730 /* Return true if the function is allowed to be versioned.
4731 This is a guard for the versioning functionality. */
4732
4733 bool
4734 tree_versionable_function_p (tree fndecl)
4735 {
4736 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4737 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4738 }
4739
4740 /* Delete all unreachable basic blocks and update callgraph.
4741 Doing so is somewhat nontrivial because we need to update all clones and
4742 remove inline function that become unreachable. */
4743
4744 static bool
4745 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4746 {
4747 bool changed = false;
4748 basic_block b, next_bb;
4749
4750 find_unreachable_blocks ();
4751
4752 /* Delete all unreachable basic blocks. */
4753
4754 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4755 {
4756 next_bb = b->next_bb;
4757
4758 if (!(b->flags & BB_REACHABLE))
4759 {
4760 gimple_stmt_iterator bsi;
4761
4762 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4763 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4764 {
4765 struct cgraph_edge *e;
4766 struct cgraph_node *node;
4767
4768 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4769 {
4770 if (!e->inline_failed)
4771 cgraph_remove_node_and_inline_clones (e->callee);
4772 else
4773 cgraph_remove_edge (e);
4774 }
4775 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4776 && id->dst_node->clones)
4777 for (node = id->dst_node->clones; node != id->dst_node;)
4778 {
4779 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4780 {
4781 if (!e->inline_failed)
4782 cgraph_remove_node_and_inline_clones (e->callee);
4783 else
4784 cgraph_remove_edge (e);
4785 }
4786
4787 if (node->clones)
4788 node = node->clones;
4789 else if (node->next_sibling_clone)
4790 node = node->next_sibling_clone;
4791 else
4792 {
4793 while (node != id->dst_node && !node->next_sibling_clone)
4794 node = node->clone_of;
4795 if (node != id->dst_node)
4796 node = node->next_sibling_clone;
4797 }
4798 }
4799 }
4800 delete_basic_block (b);
4801 changed = true;
4802 }
4803 }
4804
4805 if (changed)
4806 tidy_fallthru_edges ();
4807 return changed;
4808 }
4809
4810 /* Update clone info after duplication. */
4811
4812 static void
4813 update_clone_info (copy_body_data * id)
4814 {
4815 struct cgraph_node *node;
4816 if (!id->dst_node->clones)
4817 return;
4818 for (node = id->dst_node->clones; node != id->dst_node;)
4819 {
4820 /* First update replace maps to match the new body. */
4821 if (node->clone.tree_map)
4822 {
4823 unsigned int i;
4824 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4825 {
4826 struct ipa_replace_map *replace_info;
4827 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4828 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4829 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4830 }
4831 }
4832 if (node->clones)
4833 node = node->clones;
4834 else if (node->next_sibling_clone)
4835 node = node->next_sibling_clone;
4836 else
4837 {
4838 while (node != id->dst_node && !node->next_sibling_clone)
4839 node = node->clone_of;
4840 if (node != id->dst_node)
4841 node = node->next_sibling_clone;
4842 }
4843 }
4844 }
4845
4846 /* Create a copy of a function's tree.
4847 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4848 of the original function and the new copied function
4849 respectively. In case we want to replace a DECL
4850 tree with another tree while duplicating the function's
4851 body, TREE_MAP represents the mapping between these
4852 trees. If UPDATE_CLONES is set, the call_stmt fields
4853 of edges of clones of the function will be updated. */
4854 void
4855 tree_function_versioning (tree old_decl, tree new_decl,
4856 VEC(ipa_replace_map_p,gc)* tree_map,
4857 bool update_clones, bitmap args_to_skip)
4858 {
4859 struct cgraph_node *old_version_node;
4860 struct cgraph_node *new_version_node;
4861 copy_body_data id;
4862 tree p;
4863 unsigned i;
4864 struct ipa_replace_map *replace_info;
4865 basic_block old_entry_block, bb;
4866 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4867
4868 tree t_step;
4869 tree old_current_function_decl = current_function_decl;
4870 tree vars = NULL_TREE;
4871
4872 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4873 && TREE_CODE (new_decl) == FUNCTION_DECL);
4874 DECL_POSSIBLY_INLINED (old_decl) = 1;
4875
4876 old_version_node = cgraph_node (old_decl);
4877 new_version_node = cgraph_node (new_decl);
4878
4879 /* Output the inlining info for this abstract function, since it has been
4880 inlined. If we don't do this now, we can lose the information about the
4881 variables in the function when the blocks get blown away as soon as we
4882 remove the cgraph node. */
4883 (*debug_hooks->outlining_inline_function) (old_decl);
4884
4885 DECL_ARTIFICIAL (new_decl) = 1;
4886 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4887 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4888
4889 /* Prepare the data structures for the tree copy. */
4890 memset (&id, 0, sizeof (id));
4891
4892 /* Generate a new name for the new version. */
4893 id.statements_to_fold = pointer_set_create ();
4894
4895 id.decl_map = pointer_map_create ();
4896 id.debug_map = NULL;
4897 id.src_fn = old_decl;
4898 id.dst_fn = new_decl;
4899 id.src_node = old_version_node;
4900 id.dst_node = new_version_node;
4901 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4902 if (id.src_node->ipa_transforms_to_apply)
4903 {
4904 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4905 unsigned int i;
4906
4907 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4908 id.src_node->ipa_transforms_to_apply);
4909 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4910 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4911 VEC_index (ipa_opt_pass,
4912 old_transforms_to_apply,
4913 i));
4914 }
4915
4916 id.copy_decl = copy_decl_no_change;
4917 id.transform_call_graph_edges
4918 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4919 id.transform_new_cfg = true;
4920 id.transform_return_to_modify = false;
4921 id.transform_lang_insert_block = NULL;
4922
4923 current_function_decl = new_decl;
4924 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4925 (DECL_STRUCT_FUNCTION (old_decl));
4926 initialize_cfun (new_decl, old_decl,
4927 old_entry_block->count);
4928 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
4929 = id.src_cfun->gimple_df->ipa_pta;
4930 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4931
4932 /* Copy the function's static chain. */
4933 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4934 if (p)
4935 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4936 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4937 &id);
4938
4939 /* If there's a tree_map, prepare for substitution. */
4940 if (tree_map)
4941 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4942 {
4943 gimple init;
4944 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4945 if (replace_info->replace_p)
4946 {
4947 tree op = replace_info->new_tree;
4948 if (!replace_info->old_tree)
4949 {
4950 int i = replace_info->parm_num;
4951 tree parm;
4952 for (parm = DECL_ARGUMENTS (old_decl); i; parm = TREE_CHAIN (parm))
4953 i --;
4954 replace_info->old_tree = parm;
4955 }
4956
4957
4958 STRIP_NOPS (op);
4959
4960 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4961 op = TREE_OPERAND (op, 0);
4962
4963 if (TREE_CODE (op) == ADDR_EXPR)
4964 {
4965 op = TREE_OPERAND (op, 0);
4966 while (handled_component_p (op))
4967 op = TREE_OPERAND (op, 0);
4968 if (TREE_CODE (op) == VAR_DECL)
4969 add_referenced_var (op);
4970 }
4971 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4972 init = setup_one_parameter (&id, replace_info->old_tree,
4973 replace_info->new_tree, id.src_fn,
4974 NULL,
4975 &vars);
4976 if (init)
4977 VEC_safe_push (gimple, heap, init_stmts, init);
4978 }
4979 }
4980 /* Copy the function's arguments. */
4981 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4982 DECL_ARGUMENTS (new_decl) =
4983 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4984 args_to_skip, &vars);
4985
4986 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4987
4988 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4989 number_blocks (id.dst_fn);
4990
4991 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4992
4993 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4994 /* Add local vars. */
4995 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4996 t_step; t_step = TREE_CHAIN (t_step))
4997 {
4998 tree var = TREE_VALUE (t_step);
4999 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
5000 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
5001 else if (!can_be_nonlocal (var, &id))
5002 cfun->local_decls =
5003 tree_cons (NULL_TREE, remap_decl (var, &id),
5004 cfun->local_decls);
5005 }
5006
5007 /* Copy the Function's body. */
5008 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5009 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
5010
5011 if (DECL_RESULT (old_decl) != NULL_TREE)
5012 {
5013 tree *res_decl = &DECL_RESULT (old_decl);
5014 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
5015 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5016 }
5017
5018 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5019 number_blocks (new_decl);
5020
5021 /* We want to create the BB unconditionally, so that the addition of
5022 debug stmts doesn't affect BB count, which may in the end cause
5023 codegen differences. */
5024 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5025 while (VEC_length (gimple, init_stmts))
5026 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5027 update_clone_info (&id);
5028
5029 /* Remap the nonlocal_goto_save_area, if any. */
5030 if (cfun->nonlocal_goto_save_area)
5031 {
5032 struct walk_stmt_info wi;
5033
5034 memset (&wi, 0, sizeof (wi));
5035 wi.info = &id;
5036 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5037 }
5038
5039 /* Clean up. */
5040 pointer_map_destroy (id.decl_map);
5041 if (id.debug_map)
5042 pointer_map_destroy (id.debug_map);
5043 free_dominance_info (CDI_DOMINATORS);
5044 free_dominance_info (CDI_POST_DOMINATORS);
5045
5046 fold_marked_statements (0, id.statements_to_fold);
5047 pointer_set_destroy (id.statements_to_fold);
5048 fold_cond_expr_cond ();
5049 delete_unreachable_blocks_update_callgraph (&id);
5050 if (id.dst_node->analyzed)
5051 cgraph_rebuild_references ();
5052 update_ssa (TODO_update_ssa);
5053 free_dominance_info (CDI_DOMINATORS);
5054 free_dominance_info (CDI_POST_DOMINATORS);
5055
5056 gcc_assert (!id.debug_stmts);
5057 VEC_free (gimple, heap, init_stmts);
5058 pop_cfun ();
5059 current_function_decl = old_current_function_decl;
5060 gcc_assert (!current_function_decl
5061 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5062 return;
5063 }
5064
5065 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5066 the callee and return the inlined body on success. */
5067
5068 tree
5069 maybe_inline_call_in_expr (tree exp)
5070 {
5071 tree fn = get_callee_fndecl (exp);
5072
5073 /* We can only try to inline "const" functions. */
5074 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5075 {
5076 struct pointer_map_t *decl_map = pointer_map_create ();
5077 call_expr_arg_iterator iter;
5078 copy_body_data id;
5079 tree param, arg, t;
5080
5081 /* Remap the parameters. */
5082 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5083 param;
5084 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5085 *pointer_map_insert (decl_map, param) = arg;
5086
5087 memset (&id, 0, sizeof (id));
5088 id.src_fn = fn;
5089 id.dst_fn = current_function_decl;
5090 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5091 id.decl_map = decl_map;
5092
5093 id.copy_decl = copy_decl_no_change;
5094 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5095 id.transform_new_cfg = false;
5096 id.transform_return_to_modify = true;
5097 id.transform_lang_insert_block = false;
5098
5099 /* Make sure not to unshare trees behind the front-end's back
5100 since front-end specific mechanisms may rely on sharing. */
5101 id.regimplify = false;
5102 id.do_not_unshare = true;
5103
5104 /* We're not inside any EH region. */
5105 id.eh_lp_nr = 0;
5106
5107 t = copy_tree_body (&id);
5108 pointer_map_destroy (decl_map);
5109
5110 /* We can only return something suitable for use in a GENERIC
5111 expression tree. */
5112 if (TREE_CODE (t) == MODIFY_EXPR)
5113 return TREE_OPERAND (t, 1);
5114 }
5115
5116 return NULL_TREE;
5117 }
5118
5119 /* Duplicate a type, fields and all. */
5120
5121 tree
5122 build_duplicate_type (tree type)
5123 {
5124 struct copy_body_data id;
5125
5126 memset (&id, 0, sizeof (id));
5127 id.src_fn = current_function_decl;
5128 id.dst_fn = current_function_decl;
5129 id.src_cfun = cfun;
5130 id.decl_map = pointer_map_create ();
5131 id.debug_map = NULL;
5132 id.copy_decl = copy_decl_no_change;
5133
5134 type = remap_type_1 (type, &id);
5135
5136 pointer_map_destroy (id.decl_map);
5137 if (id.debug_map)
5138 pointer_map_destroy (id.debug_map);
5139
5140 TYPE_CANONICAL (type) = type;
5141
5142 return type;
5143 }
5144
5145 /* Return whether it is safe to inline a function because it used different
5146 target specific options or call site actual types mismatch parameter types.
5147 E is the call edge to be checked. */
5148 bool
5149 tree_can_inline_p (struct cgraph_edge *e)
5150 {
5151 #if 0
5152 /* This causes a regression in SPEC in that it prevents a cold function from
5153 inlining a hot function. Perhaps this should only apply to functions
5154 that the user declares hot/cold/optimize explicitly. */
5155
5156 /* Don't inline a function with a higher optimization level than the
5157 caller, or with different space constraints (hot/cold functions). */
5158 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5159 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5160
5161 if (caller_tree != callee_tree)
5162 {
5163 struct cl_optimization *caller_opt
5164 = TREE_OPTIMIZATION ((caller_tree)
5165 ? caller_tree
5166 : optimization_default_node);
5167
5168 struct cl_optimization *callee_opt
5169 = TREE_OPTIMIZATION ((callee_tree)
5170 ? callee_tree
5171 : optimization_default_node);
5172
5173 if ((caller_opt->optimize > callee_opt->optimize)
5174 || (caller_opt->optimize_size != callee_opt->optimize_size))
5175 return false;
5176 }
5177 #endif
5178 tree caller, callee, lhs;
5179
5180 caller = e->caller->decl;
5181 callee = e->callee->decl;
5182
5183 /* We cannot inline a function that uses a different EH personality
5184 than the caller. */
5185 if (DECL_FUNCTION_PERSONALITY (caller)
5186 && DECL_FUNCTION_PERSONALITY (callee)
5187 && (DECL_FUNCTION_PERSONALITY (caller)
5188 != DECL_FUNCTION_PERSONALITY (callee)))
5189 {
5190 e->inline_failed = CIF_UNSPECIFIED;
5191 gimple_call_set_cannot_inline (e->call_stmt, true);
5192 return false;
5193 }
5194
5195 /* Allow the backend to decide if inlining is ok. */
5196 if (!targetm.target_option.can_inline_p (caller, callee))
5197 {
5198 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5199 gimple_call_set_cannot_inline (e->call_stmt, true);
5200 e->call_stmt_cannot_inline_p = true;
5201 return false;
5202 }
5203
5204 /* Do not inline calls where we cannot triviall work around mismatches
5205 in argument or return types. */
5206 if (e->call_stmt
5207 && ((DECL_RESULT (callee)
5208 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5209 && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5210 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5211 TREE_TYPE (lhs))
5212 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5213 || !gimple_check_call_args (e->call_stmt)))
5214 {
5215 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5216 gimple_call_set_cannot_inline (e->call_stmt, true);
5217 e->call_stmt_cannot_inline_p = true;
5218 return false;
5219 }
5220
5221 return true;
5222 }