re PR tree-optimization/43833 (false warning: array subscript is above array bounds...
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "hashtab.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "intl.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
43 #include "function.h"
44 #include "tree-flow.h"
45 #include "diagnostic.h"
46 #include "except.h"
47 #include "debug.h"
48 #include "pointer-set.h"
49 #include "ipa-prop.h"
50 #include "value-prof.h"
51 #include "tree-pass.h"
52 #include "target.h"
53 #include "integrate.h"
54
55 /* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
57 #include "gimple.h"
58
59 /* Inlining, Cloning, Versioning, Parallelization
60
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
63 MODIFY_EXPRs that store to a dedicated returned-value variable.
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX statements are adjusted accordingly.
67
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
72
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
76
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
80
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
85 If we're cloning the body, those callgraph edges will be
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
88
89 See the CALL_EXPR handling case in copy_tree_body_r (). */
90
91 /* To Do:
92
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
99
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
102
103
104 /* Weights that estimate_num_insns uses for heuristics in inlining. */
105
106 eni_weights eni_inlining_weights;
107
108 /* Weights that estimate_num_insns uses to estimate the size of the
109 produced code. */
110
111 eni_weights eni_size_weights;
112
113 /* Weights that estimate_num_insns uses to estimate the time necessary
114 to execute the produced code. */
115
116 eni_weights eni_time_weights;
117
118 /* Prototypes. */
119
120 static tree declare_return_variable (copy_body_data *, tree, tree);
121 static void remap_block (tree *, copy_body_data *);
122 static void copy_bind_expr (tree *, int *, copy_body_data *);
123 static tree mark_local_for_remap_r (tree *, int *, void *);
124 static void unsave_expr_1 (tree);
125 static tree unsave_r (tree *, int *, void *);
126 static void declare_inline_vars (tree, tree);
127 static void remap_save_expr (tree *, void *, int *);
128 static void prepend_lexical_block (tree current_block, tree new_block);
129 static tree copy_decl_to_var (tree, copy_body_data *);
130 static tree copy_result_decl_to_var (tree, copy_body_data *);
131 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
132 static gimple remap_gimple_stmt (gimple, copy_body_data *);
133 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
134
135 /* Insert a tree->tree mapping for ID. Despite the name suggests
136 that the trees should be variables, it is used for more than that. */
137
138 void
139 insert_decl_map (copy_body_data *id, tree key, tree value)
140 {
141 *pointer_map_insert (id->decl_map, key) = value;
142
143 /* Always insert an identity map as well. If we see this same new
144 node again, we won't want to duplicate it a second time. */
145 if (key != value)
146 *pointer_map_insert (id->decl_map, value) = value;
147 }
148
149 /* Insert a tree->tree mapping for ID. This is only used for
150 variables. */
151
152 static void
153 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
154 {
155 if (!gimple_in_ssa_p (id->src_cfun))
156 return;
157
158 if (!MAY_HAVE_DEBUG_STMTS)
159 return;
160
161 if (!target_for_debug_bind (key))
162 return;
163
164 gcc_assert (TREE_CODE (key) == PARM_DECL);
165 gcc_assert (TREE_CODE (value) == VAR_DECL);
166
167 if (!id->debug_map)
168 id->debug_map = pointer_map_create ();
169
170 *pointer_map_insert (id->debug_map, key) = value;
171 }
172
173 /* If nonzero, we're remapping the contents of inlined debug
174 statements. If negative, an error has occurred, such as a
175 reference to a variable that isn't available in the inlined
176 context. */
177 static int processing_debug_stmt = 0;
178
179 /* Construct new SSA name for old NAME. ID is the inline context. */
180
181 static tree
182 remap_ssa_name (tree name, copy_body_data *id)
183 {
184 tree new_tree;
185 tree *n;
186
187 gcc_assert (TREE_CODE (name) == SSA_NAME);
188
189 n = (tree *) pointer_map_contains (id->decl_map, name);
190 if (n)
191 return unshare_expr (*n);
192
193 if (processing_debug_stmt)
194 {
195 processing_debug_stmt = -1;
196 return name;
197 }
198
199 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
200 in copy_bb. */
201 new_tree = remap_decl (SSA_NAME_VAR (name), id);
202
203 /* We might've substituted constant or another SSA_NAME for
204 the variable.
205
206 Replace the SSA name representing RESULT_DECL by variable during
207 inlining: this saves us from need to introduce PHI node in a case
208 return value is just partly initialized. */
209 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
210 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
211 || !id->transform_return_to_modify))
212 {
213 struct ptr_info_def *pi;
214 new_tree = make_ssa_name (new_tree, NULL);
215 insert_decl_map (id, name, new_tree);
216 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
217 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
218 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
219 /* At least IPA points-to info can be directly transferred. */
220 if (id->src_cfun->gimple_df
221 && id->src_cfun->gimple_df->ipa_pta
222 && (pi = SSA_NAME_PTR_INFO (name))
223 && !pi->pt.anything)
224 {
225 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
226 new_pi->pt = pi->pt;
227 }
228 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
229 {
230 /* By inlining function having uninitialized variable, we might
231 extend the lifetime (variable might get reused). This cause
232 ICE in the case we end up extending lifetime of SSA name across
233 abnormal edge, but also increase register pressure.
234
235 We simply initialize all uninitialized vars by 0 except
236 for case we are inlining to very first BB. We can avoid
237 this for all BBs that are not inside strongly connected
238 regions of the CFG, but this is expensive to test. */
239 if (id->entry_bb
240 && is_gimple_reg (SSA_NAME_VAR (name))
241 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
242 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
243 || EDGE_COUNT (id->entry_bb->preds) != 1))
244 {
245 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
246 gimple init_stmt;
247
248 init_stmt = gimple_build_assign (new_tree,
249 fold_convert (TREE_TYPE (new_tree),
250 integer_zero_node));
251 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
252 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
253 }
254 else
255 {
256 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
257 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
258 == name)
259 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
260 }
261 }
262 }
263 else
264 insert_decl_map (id, name, new_tree);
265 return new_tree;
266 }
267
268 /* Remap DECL during the copying of the BLOCK tree for the function. */
269
270 tree
271 remap_decl (tree decl, copy_body_data *id)
272 {
273 tree *n;
274
275 /* We only remap local variables in the current function. */
276
277 /* See if we have remapped this declaration. */
278
279 n = (tree *) pointer_map_contains (id->decl_map, decl);
280
281 if (!n && processing_debug_stmt)
282 {
283 processing_debug_stmt = -1;
284 return decl;
285 }
286
287 /* If we didn't already have an equivalent for this declaration,
288 create one now. */
289 if (!n)
290 {
291 /* Make a copy of the variable or label. */
292 tree t = id->copy_decl (decl, id);
293
294 /* Remember it, so that if we encounter this local entity again
295 we can reuse this copy. Do this early because remap_type may
296 need this decl for TYPE_STUB_DECL. */
297 insert_decl_map (id, decl, t);
298
299 if (!DECL_P (t))
300 return t;
301
302 /* Remap types, if necessary. */
303 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
304 if (TREE_CODE (t) == TYPE_DECL)
305 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
306
307 /* Remap sizes as necessary. */
308 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
309 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
310
311 /* If fields, do likewise for offset and qualifier. */
312 if (TREE_CODE (t) == FIELD_DECL)
313 {
314 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
315 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
316 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
317 }
318
319 if (cfun && gimple_in_ssa_p (cfun)
320 && (TREE_CODE (t) == VAR_DECL
321 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
322 {
323 get_var_ann (t);
324 add_referenced_var (t);
325 }
326 return t;
327 }
328
329 if (id->do_not_unshare)
330 return *n;
331 else
332 return unshare_expr (*n);
333 }
334
335 static tree
336 remap_type_1 (tree type, copy_body_data *id)
337 {
338 tree new_tree, t;
339
340 /* We do need a copy. build and register it now. If this is a pointer or
341 reference type, remap the designated type and make a new pointer or
342 reference type. */
343 if (TREE_CODE (type) == POINTER_TYPE)
344 {
345 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
346 TYPE_MODE (type),
347 TYPE_REF_CAN_ALIAS_ALL (type));
348 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
349 new_tree = build_type_attribute_qual_variant (new_tree,
350 TYPE_ATTRIBUTES (type),
351 TYPE_QUALS (type));
352 insert_decl_map (id, type, new_tree);
353 return new_tree;
354 }
355 else if (TREE_CODE (type) == REFERENCE_TYPE)
356 {
357 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
358 TYPE_MODE (type),
359 TYPE_REF_CAN_ALIAS_ALL (type));
360 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
361 new_tree = build_type_attribute_qual_variant (new_tree,
362 TYPE_ATTRIBUTES (type),
363 TYPE_QUALS (type));
364 insert_decl_map (id, type, new_tree);
365 return new_tree;
366 }
367 else
368 new_tree = copy_node (type);
369
370 insert_decl_map (id, type, new_tree);
371
372 /* This is a new type, not a copy of an old type. Need to reassociate
373 variants. We can handle everything except the main variant lazily. */
374 t = TYPE_MAIN_VARIANT (type);
375 if (type != t)
376 {
377 t = remap_type (t, id);
378 TYPE_MAIN_VARIANT (new_tree) = t;
379 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
380 TYPE_NEXT_VARIANT (t) = new_tree;
381 }
382 else
383 {
384 TYPE_MAIN_VARIANT (new_tree) = new_tree;
385 TYPE_NEXT_VARIANT (new_tree) = NULL;
386 }
387
388 if (TYPE_STUB_DECL (type))
389 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
390
391 /* Lazily create pointer and reference types. */
392 TYPE_POINTER_TO (new_tree) = NULL;
393 TYPE_REFERENCE_TO (new_tree) = NULL;
394
395 switch (TREE_CODE (new_tree))
396 {
397 case INTEGER_TYPE:
398 case REAL_TYPE:
399 case FIXED_POINT_TYPE:
400 case ENUMERAL_TYPE:
401 case BOOLEAN_TYPE:
402 t = TYPE_MIN_VALUE (new_tree);
403 if (t && TREE_CODE (t) != INTEGER_CST)
404 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
405
406 t = TYPE_MAX_VALUE (new_tree);
407 if (t && TREE_CODE (t) != INTEGER_CST)
408 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
409 return new_tree;
410
411 case FUNCTION_TYPE:
412 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
413 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
414 return new_tree;
415
416 case ARRAY_TYPE:
417 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
418 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
419 break;
420
421 case RECORD_TYPE:
422 case UNION_TYPE:
423 case QUAL_UNION_TYPE:
424 {
425 tree f, nf = NULL;
426
427 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
428 {
429 t = remap_decl (f, id);
430 DECL_CONTEXT (t) = new_tree;
431 TREE_CHAIN (t) = nf;
432 nf = t;
433 }
434 TYPE_FIELDS (new_tree) = nreverse (nf);
435 }
436 break;
437
438 case OFFSET_TYPE:
439 default:
440 /* Shouldn't have been thought variable sized. */
441 gcc_unreachable ();
442 }
443
444 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
445 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
446
447 return new_tree;
448 }
449
450 tree
451 remap_type (tree type, copy_body_data *id)
452 {
453 tree *node;
454 tree tmp;
455
456 if (type == NULL)
457 return type;
458
459 /* See if we have remapped this type. */
460 node = (tree *) pointer_map_contains (id->decl_map, type);
461 if (node)
462 return *node;
463
464 /* The type only needs remapping if it's variably modified. */
465 if (! variably_modified_type_p (type, id->src_fn))
466 {
467 insert_decl_map (id, type, type);
468 return type;
469 }
470
471 id->remapping_type_depth++;
472 tmp = remap_type_1 (type, id);
473 id->remapping_type_depth--;
474
475 return tmp;
476 }
477
478 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
479 is NULL or TYPE has not been remapped before. */
480
481 static tree
482 remapped_type (tree type, copy_body_data *id)
483 {
484 tree *node;
485
486 if (type == NULL)
487 return type;
488
489 /* See if we have remapped this type. */
490 node = (tree *) pointer_map_contains (id->decl_map, type);
491 if (node)
492 return *node;
493 else
494 return NULL;
495 }
496
497 /* The type only needs remapping if it's variably modified. */
498 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
499
500 static bool
501 can_be_nonlocal (tree decl, copy_body_data *id)
502 {
503 /* We can not duplicate function decls. */
504 if (TREE_CODE (decl) == FUNCTION_DECL)
505 return true;
506
507 /* Local static vars must be non-local or we get multiple declaration
508 problems. */
509 if (TREE_CODE (decl) == VAR_DECL
510 && !auto_var_in_fn_p (decl, id->src_fn))
511 return true;
512
513 /* At the moment dwarf2out can handle only these types of nodes. We
514 can support more later. */
515 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
516 return false;
517
518 /* We must use global type. We call remapped_type instead of
519 remap_type since we don't want to remap this type here if it
520 hasn't been remapped before. */
521 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
522 return false;
523
524 /* Wihtout SSA we can't tell if variable is used. */
525 if (!gimple_in_ssa_p (cfun))
526 return false;
527
528 /* Live variables must be copied so we can attach DECL_RTL. */
529 if (var_ann (decl))
530 return false;
531
532 return true;
533 }
534
535 static tree
536 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
537 {
538 tree old_var;
539 tree new_decls = NULL_TREE;
540
541 /* Remap its variables. */
542 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
543 {
544 tree new_var;
545
546 if (can_be_nonlocal (old_var, id))
547 {
548 if (TREE_CODE (old_var) == VAR_DECL
549 && ! DECL_EXTERNAL (old_var)
550 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
551 cfun->local_decls = tree_cons (NULL_TREE, old_var,
552 cfun->local_decls);
553 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
554 && !DECL_IGNORED_P (old_var)
555 && nonlocalized_list)
556 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
557 continue;
558 }
559
560 /* Remap the variable. */
561 new_var = remap_decl (old_var, id);
562
563 /* If we didn't remap this variable, we can't mess with its
564 TREE_CHAIN. If we remapped this variable to the return slot, it's
565 already declared somewhere else, so don't declare it here. */
566
567 if (new_var == id->retvar)
568 ;
569 else if (!new_var)
570 {
571 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
572 && !DECL_IGNORED_P (old_var)
573 && nonlocalized_list)
574 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
575 }
576 else
577 {
578 gcc_assert (DECL_P (new_var));
579 TREE_CHAIN (new_var) = new_decls;
580 new_decls = new_var;
581 }
582 }
583
584 return nreverse (new_decls);
585 }
586
587 /* Copy the BLOCK to contain remapped versions of the variables
588 therein. And hook the new block into the block-tree. */
589
590 static void
591 remap_block (tree *block, copy_body_data *id)
592 {
593 tree old_block;
594 tree new_block;
595
596 /* Make the new block. */
597 old_block = *block;
598 new_block = make_node (BLOCK);
599 TREE_USED (new_block) = TREE_USED (old_block);
600 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
601 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
602 BLOCK_NONLOCALIZED_VARS (new_block)
603 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
604 *block = new_block;
605
606 /* Remap its variables. */
607 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
608 &BLOCK_NONLOCALIZED_VARS (new_block),
609 id);
610
611 if (id->transform_lang_insert_block)
612 id->transform_lang_insert_block (new_block);
613
614 /* Remember the remapped block. */
615 insert_decl_map (id, old_block, new_block);
616 }
617
618 /* Copy the whole block tree and root it in id->block. */
619 static tree
620 remap_blocks (tree block, copy_body_data *id)
621 {
622 tree t;
623 tree new_tree = block;
624
625 if (!block)
626 return NULL;
627
628 remap_block (&new_tree, id);
629 gcc_assert (new_tree != block);
630 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
631 prepend_lexical_block (new_tree, remap_blocks (t, id));
632 /* Blocks are in arbitrary order, but make things slightly prettier and do
633 not swap order when producing a copy. */
634 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
635 return new_tree;
636 }
637
638 static void
639 copy_statement_list (tree *tp)
640 {
641 tree_stmt_iterator oi, ni;
642 tree new_tree;
643
644 new_tree = alloc_stmt_list ();
645 ni = tsi_start (new_tree);
646 oi = tsi_start (*tp);
647 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
648 *tp = new_tree;
649
650 for (; !tsi_end_p (oi); tsi_next (&oi))
651 {
652 tree stmt = tsi_stmt (oi);
653 if (TREE_CODE (stmt) == STATEMENT_LIST)
654 copy_statement_list (&stmt);
655 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
656 }
657 }
658
659 static void
660 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
661 {
662 tree block = BIND_EXPR_BLOCK (*tp);
663 /* Copy (and replace) the statement. */
664 copy_tree_r (tp, walk_subtrees, NULL);
665 if (block)
666 {
667 remap_block (&block, id);
668 BIND_EXPR_BLOCK (*tp) = block;
669 }
670
671 if (BIND_EXPR_VARS (*tp))
672 /* This will remap a lot of the same decls again, but this should be
673 harmless. */
674 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
675 }
676
677
678 /* Create a new gimple_seq by remapping all the statements in BODY
679 using the inlining information in ID. */
680
681 gimple_seq
682 remap_gimple_seq (gimple_seq body, copy_body_data *id)
683 {
684 gimple_stmt_iterator si;
685 gimple_seq new_body = NULL;
686
687 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
688 {
689 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
690 gimple_seq_add_stmt (&new_body, new_stmt);
691 }
692
693 return new_body;
694 }
695
696
697 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
698 block using the mapping information in ID. */
699
700 static gimple
701 copy_gimple_bind (gimple stmt, copy_body_data *id)
702 {
703 gimple new_bind;
704 tree new_block, new_vars;
705 gimple_seq body, new_body;
706
707 /* Copy the statement. Note that we purposely don't use copy_stmt
708 here because we need to remap statements as we copy. */
709 body = gimple_bind_body (stmt);
710 new_body = remap_gimple_seq (body, id);
711
712 new_block = gimple_bind_block (stmt);
713 if (new_block)
714 remap_block (&new_block, id);
715
716 /* This will remap a lot of the same decls again, but this should be
717 harmless. */
718 new_vars = gimple_bind_vars (stmt);
719 if (new_vars)
720 new_vars = remap_decls (new_vars, NULL, id);
721
722 new_bind = gimple_build_bind (new_vars, new_body, new_block);
723
724 return new_bind;
725 }
726
727
728 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
729 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
730 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
731 recursing into the children nodes of *TP. */
732
733 static tree
734 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
735 {
736 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
737 copy_body_data *id = (copy_body_data *) wi_p->info;
738 tree fn = id->src_fn;
739
740 if (TREE_CODE (*tp) == SSA_NAME)
741 {
742 *tp = remap_ssa_name (*tp, id);
743 *walk_subtrees = 0;
744 return NULL;
745 }
746 else if (auto_var_in_fn_p (*tp, fn))
747 {
748 /* Local variables and labels need to be replaced by equivalent
749 variables. We don't want to copy static variables; there's
750 only one of those, no matter how many times we inline the
751 containing function. Similarly for globals from an outer
752 function. */
753 tree new_decl;
754
755 /* Remap the declaration. */
756 new_decl = remap_decl (*tp, id);
757 gcc_assert (new_decl);
758 /* Replace this variable with the copy. */
759 STRIP_TYPE_NOPS (new_decl);
760 /* ??? The C++ frontend uses void * pointer zero to initialize
761 any other type. This confuses the middle-end type verification.
762 As cloned bodies do not go through gimplification again the fixup
763 there doesn't trigger. */
764 if (TREE_CODE (new_decl) == INTEGER_CST
765 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
766 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
767 *tp = new_decl;
768 *walk_subtrees = 0;
769 }
770 else if (TREE_CODE (*tp) == STATEMENT_LIST)
771 gcc_unreachable ();
772 else if (TREE_CODE (*tp) == SAVE_EXPR)
773 gcc_unreachable ();
774 else if (TREE_CODE (*tp) == LABEL_DECL
775 && (!DECL_CONTEXT (*tp)
776 || decl_function_context (*tp) == id->src_fn))
777 /* These may need to be remapped for EH handling. */
778 *tp = remap_decl (*tp, id);
779 else if (TYPE_P (*tp))
780 /* Types may need remapping as well. */
781 *tp = remap_type (*tp, id);
782 else if (CONSTANT_CLASS_P (*tp))
783 {
784 /* If this is a constant, we have to copy the node iff the type
785 will be remapped. copy_tree_r will not copy a constant. */
786 tree new_type = remap_type (TREE_TYPE (*tp), id);
787
788 if (new_type == TREE_TYPE (*tp))
789 *walk_subtrees = 0;
790
791 else if (TREE_CODE (*tp) == INTEGER_CST)
792 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
793 TREE_INT_CST_HIGH (*tp));
794 else
795 {
796 *tp = copy_node (*tp);
797 TREE_TYPE (*tp) = new_type;
798 }
799 }
800 else
801 {
802 /* Otherwise, just copy the node. Note that copy_tree_r already
803 knows not to copy VAR_DECLs, etc., so this is safe. */
804 if (TREE_CODE (*tp) == INDIRECT_REF)
805 {
806 /* Get rid of *& from inline substitutions that can happen when a
807 pointer argument is an ADDR_EXPR. */
808 tree decl = TREE_OPERAND (*tp, 0);
809 tree *n;
810
811 n = (tree *) pointer_map_contains (id->decl_map, decl);
812 if (n)
813 {
814 tree type, new_tree, old;
815
816 /* If we happen to get an ADDR_EXPR in n->value, strip
817 it manually here as we'll eventually get ADDR_EXPRs
818 which lie about their types pointed to. In this case
819 build_fold_indirect_ref wouldn't strip the
820 INDIRECT_REF, but we absolutely rely on that. As
821 fold_indirect_ref does other useful transformations,
822 try that first, though. */
823 type = TREE_TYPE (TREE_TYPE (*n));
824 new_tree = unshare_expr (*n);
825 old = *tp;
826 *tp = gimple_fold_indirect_ref (new_tree);
827 if (!*tp)
828 {
829 if (TREE_CODE (new_tree) == ADDR_EXPR)
830 {
831 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
832 type, new_tree);
833 /* ??? We should either assert here or build
834 a VIEW_CONVERT_EXPR instead of blindly leaking
835 incompatible types to our IL. */
836 if (! *tp)
837 *tp = TREE_OPERAND (new_tree, 0);
838 }
839 else
840 {
841 *tp = build1 (INDIRECT_REF, type, new_tree);
842 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
843 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
844 }
845 }
846 *walk_subtrees = 0;
847 return NULL;
848 }
849 }
850
851 /* Here is the "usual case". Copy this tree node, and then
852 tweak some special cases. */
853 copy_tree_r (tp, walk_subtrees, NULL);
854
855 /* Global variables we haven't seen yet need to go into referenced
856 vars. If not referenced from types only. */
857 if (gimple_in_ssa_p (cfun)
858 && TREE_CODE (*tp) == VAR_DECL
859 && id->remapping_type_depth == 0
860 && !processing_debug_stmt)
861 add_referenced_var (*tp);
862
863 /* We should never have TREE_BLOCK set on non-statements. */
864 if (EXPR_P (*tp))
865 gcc_assert (!TREE_BLOCK (*tp));
866
867 if (TREE_CODE (*tp) != OMP_CLAUSE)
868 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
869
870 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
871 {
872 /* The copied TARGET_EXPR has never been expanded, even if the
873 original node was expanded already. */
874 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
875 TREE_OPERAND (*tp, 3) = NULL_TREE;
876 }
877 else if (TREE_CODE (*tp) == ADDR_EXPR)
878 {
879 /* Variable substitution need not be simple. In particular,
880 the INDIRECT_REF substitution above. Make sure that
881 TREE_CONSTANT and friends are up-to-date. But make sure
882 to not improperly set TREE_BLOCK on some sub-expressions. */
883 int invariant = is_gimple_min_invariant (*tp);
884 tree block = id->block;
885 id->block = NULL_TREE;
886 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
887 id->block = block;
888
889 /* Handle the case where we substituted an INDIRECT_REF
890 into the operand of the ADDR_EXPR. */
891 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
892 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
893 else
894 recompute_tree_invariant_for_addr_expr (*tp);
895
896 /* If this used to be invariant, but is not any longer,
897 then regimplification is probably needed. */
898 if (invariant && !is_gimple_min_invariant (*tp))
899 id->regimplify = true;
900
901 *walk_subtrees = 0;
902 }
903 }
904
905 /* Keep iterating. */
906 return NULL_TREE;
907 }
908
909
910 /* Called from copy_body_id via walk_tree. DATA is really a
911 `copy_body_data *'. */
912
913 tree
914 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
915 {
916 copy_body_data *id = (copy_body_data *) data;
917 tree fn = id->src_fn;
918 tree new_block;
919
920 /* Begin by recognizing trees that we'll completely rewrite for the
921 inlining context. Our output for these trees is completely
922 different from out input (e.g. RETURN_EXPR is deleted, and morphs
923 into an edge). Further down, we'll handle trees that get
924 duplicated and/or tweaked. */
925
926 /* When requested, RETURN_EXPRs should be transformed to just the
927 contained MODIFY_EXPR. The branch semantics of the return will
928 be handled elsewhere by manipulating the CFG rather than a statement. */
929 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
930 {
931 tree assignment = TREE_OPERAND (*tp, 0);
932
933 /* If we're returning something, just turn that into an
934 assignment into the equivalent of the original RESULT_DECL.
935 If the "assignment" is just the result decl, the result
936 decl has already been set (e.g. a recent "foo (&result_decl,
937 ...)"); just toss the entire RETURN_EXPR. */
938 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
939 {
940 /* Replace the RETURN_EXPR with (a copy of) the
941 MODIFY_EXPR hanging underneath. */
942 *tp = copy_node (assignment);
943 }
944 else /* Else the RETURN_EXPR returns no value. */
945 {
946 *tp = NULL;
947 return (tree) (void *)1;
948 }
949 }
950 else if (TREE_CODE (*tp) == SSA_NAME)
951 {
952 *tp = remap_ssa_name (*tp, id);
953 *walk_subtrees = 0;
954 return NULL;
955 }
956
957 /* Local variables and labels need to be replaced by equivalent
958 variables. We don't want to copy static variables; there's only
959 one of those, no matter how many times we inline the containing
960 function. Similarly for globals from an outer function. */
961 else if (auto_var_in_fn_p (*tp, fn))
962 {
963 tree new_decl;
964
965 /* Remap the declaration. */
966 new_decl = remap_decl (*tp, id);
967 gcc_assert (new_decl);
968 /* Replace this variable with the copy. */
969 STRIP_TYPE_NOPS (new_decl);
970 *tp = new_decl;
971 *walk_subtrees = 0;
972 }
973 else if (TREE_CODE (*tp) == STATEMENT_LIST)
974 copy_statement_list (tp);
975 else if (TREE_CODE (*tp) == SAVE_EXPR
976 || TREE_CODE (*tp) == TARGET_EXPR)
977 remap_save_expr (tp, id->decl_map, walk_subtrees);
978 else if (TREE_CODE (*tp) == LABEL_DECL
979 && (! DECL_CONTEXT (*tp)
980 || decl_function_context (*tp) == id->src_fn))
981 /* These may need to be remapped for EH handling. */
982 *tp = remap_decl (*tp, id);
983 else if (TREE_CODE (*tp) == BIND_EXPR)
984 copy_bind_expr (tp, walk_subtrees, id);
985 /* Types may need remapping as well. */
986 else if (TYPE_P (*tp))
987 *tp = remap_type (*tp, id);
988
989 /* If this is a constant, we have to copy the node iff the type will be
990 remapped. copy_tree_r will not copy a constant. */
991 else if (CONSTANT_CLASS_P (*tp))
992 {
993 tree new_type = remap_type (TREE_TYPE (*tp), id);
994
995 if (new_type == TREE_TYPE (*tp))
996 *walk_subtrees = 0;
997
998 else if (TREE_CODE (*tp) == INTEGER_CST)
999 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1000 TREE_INT_CST_HIGH (*tp));
1001 else
1002 {
1003 *tp = copy_node (*tp);
1004 TREE_TYPE (*tp) = new_type;
1005 }
1006 }
1007
1008 /* Otherwise, just copy the node. Note that copy_tree_r already
1009 knows not to copy VAR_DECLs, etc., so this is safe. */
1010 else
1011 {
1012 /* Here we handle trees that are not completely rewritten.
1013 First we detect some inlining-induced bogosities for
1014 discarding. */
1015 if (TREE_CODE (*tp) == MODIFY_EXPR
1016 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1017 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1018 {
1019 /* Some assignments VAR = VAR; don't generate any rtl code
1020 and thus don't count as variable modification. Avoid
1021 keeping bogosities like 0 = 0. */
1022 tree decl = TREE_OPERAND (*tp, 0), value;
1023 tree *n;
1024
1025 n = (tree *) pointer_map_contains (id->decl_map, decl);
1026 if (n)
1027 {
1028 value = *n;
1029 STRIP_TYPE_NOPS (value);
1030 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1031 {
1032 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1033 return copy_tree_body_r (tp, walk_subtrees, data);
1034 }
1035 }
1036 }
1037 else if (TREE_CODE (*tp) == INDIRECT_REF)
1038 {
1039 /* Get rid of *& from inline substitutions that can happen when a
1040 pointer argument is an ADDR_EXPR. */
1041 tree decl = TREE_OPERAND (*tp, 0);
1042 tree *n;
1043
1044 n = (tree *) pointer_map_contains (id->decl_map, decl);
1045 if (n)
1046 {
1047 tree new_tree;
1048 tree old;
1049 /* If we happen to get an ADDR_EXPR in n->value, strip
1050 it manually here as we'll eventually get ADDR_EXPRs
1051 which lie about their types pointed to. In this case
1052 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1053 but we absolutely rely on that. As fold_indirect_ref
1054 does other useful transformations, try that first, though. */
1055 tree type = TREE_TYPE (TREE_TYPE (*n));
1056 if (id->do_not_unshare)
1057 new_tree = *n;
1058 else
1059 new_tree = unshare_expr (*n);
1060 old = *tp;
1061 *tp = gimple_fold_indirect_ref (new_tree);
1062 if (! *tp)
1063 {
1064 if (TREE_CODE (new_tree) == ADDR_EXPR)
1065 {
1066 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1067 type, new_tree);
1068 /* ??? We should either assert here or build
1069 a VIEW_CONVERT_EXPR instead of blindly leaking
1070 incompatible types to our IL. */
1071 if (! *tp)
1072 *tp = TREE_OPERAND (new_tree, 0);
1073 }
1074 else
1075 {
1076 *tp = build1 (INDIRECT_REF, type, new_tree);
1077 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1078 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1079 }
1080 }
1081 *walk_subtrees = 0;
1082 return NULL;
1083 }
1084 }
1085
1086 /* Here is the "usual case". Copy this tree node, and then
1087 tweak some special cases. */
1088 copy_tree_r (tp, walk_subtrees, NULL);
1089
1090 /* Global variables we haven't seen yet needs to go into referenced
1091 vars. If not referenced from types or debug stmts only. */
1092 if (gimple_in_ssa_p (cfun)
1093 && TREE_CODE (*tp) == VAR_DECL
1094 && id->remapping_type_depth == 0
1095 && !processing_debug_stmt)
1096 add_referenced_var (*tp);
1097
1098 /* If EXPR has block defined, map it to newly constructed block.
1099 When inlining we want EXPRs without block appear in the block
1100 of function call if we are not remapping a type. */
1101 if (EXPR_P (*tp))
1102 {
1103 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1104 if (TREE_BLOCK (*tp))
1105 {
1106 tree *n;
1107 n = (tree *) pointer_map_contains (id->decl_map,
1108 TREE_BLOCK (*tp));
1109 gcc_assert (n);
1110 new_block = *n;
1111 }
1112 TREE_BLOCK (*tp) = new_block;
1113 }
1114
1115 if (TREE_CODE (*tp) != OMP_CLAUSE)
1116 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1117
1118 /* The copied TARGET_EXPR has never been expanded, even if the
1119 original node was expanded already. */
1120 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1121 {
1122 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1123 TREE_OPERAND (*tp, 3) = NULL_TREE;
1124 }
1125
1126 /* Variable substitution need not be simple. In particular, the
1127 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1128 and friends are up-to-date. */
1129 else if (TREE_CODE (*tp) == ADDR_EXPR)
1130 {
1131 int invariant = is_gimple_min_invariant (*tp);
1132 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1133
1134 /* Handle the case where we substituted an INDIRECT_REF
1135 into the operand of the ADDR_EXPR. */
1136 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1137 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1138 else
1139 recompute_tree_invariant_for_addr_expr (*tp);
1140
1141 /* If this used to be invariant, but is not any longer,
1142 then regimplification is probably needed. */
1143 if (invariant && !is_gimple_min_invariant (*tp))
1144 id->regimplify = true;
1145
1146 *walk_subtrees = 0;
1147 }
1148 }
1149
1150 /* Keep iterating. */
1151 return NULL_TREE;
1152 }
1153
1154 /* Helper for remap_gimple_stmt. Given an EH region number for the
1155 source function, map that to the duplicate EH region number in
1156 the destination function. */
1157
1158 static int
1159 remap_eh_region_nr (int old_nr, copy_body_data *id)
1160 {
1161 eh_region old_r, new_r;
1162 void **slot;
1163
1164 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1165 slot = pointer_map_contains (id->eh_map, old_r);
1166 new_r = (eh_region) *slot;
1167
1168 return new_r->index;
1169 }
1170
1171 /* Similar, but operate on INTEGER_CSTs. */
1172
1173 static tree
1174 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1175 {
1176 int old_nr, new_nr;
1177
1178 old_nr = tree_low_cst (old_t_nr, 0);
1179 new_nr = remap_eh_region_nr (old_nr, id);
1180
1181 return build_int_cst (NULL, new_nr);
1182 }
1183
1184 /* Helper for copy_bb. Remap statement STMT using the inlining
1185 information in ID. Return the new statement copy. */
1186
1187 static gimple
1188 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1189 {
1190 gimple copy = NULL;
1191 struct walk_stmt_info wi;
1192 tree new_block;
1193 bool skip_first = false;
1194
1195 /* Begin by recognizing trees that we'll completely rewrite for the
1196 inlining context. Our output for these trees is completely
1197 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1198 into an edge). Further down, we'll handle trees that get
1199 duplicated and/or tweaked. */
1200
1201 /* When requested, GIMPLE_RETURNs should be transformed to just the
1202 contained GIMPLE_ASSIGN. The branch semantics of the return will
1203 be handled elsewhere by manipulating the CFG rather than the
1204 statement. */
1205 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1206 {
1207 tree retval = gimple_return_retval (stmt);
1208
1209 /* If we're returning something, just turn that into an
1210 assignment into the equivalent of the original RESULT_DECL.
1211 If RETVAL is just the result decl, the result decl has
1212 already been set (e.g. a recent "foo (&result_decl, ...)");
1213 just toss the entire GIMPLE_RETURN. */
1214 if (retval && TREE_CODE (retval) != RESULT_DECL)
1215 {
1216 copy = gimple_build_assign (id->retvar, retval);
1217 /* id->retvar is already substituted. Skip it on later remapping. */
1218 skip_first = true;
1219 }
1220 else
1221 return gimple_build_nop ();
1222 }
1223 else if (gimple_has_substatements (stmt))
1224 {
1225 gimple_seq s1, s2;
1226
1227 /* When cloning bodies from the C++ front end, we will be handed bodies
1228 in High GIMPLE form. Handle here all the High GIMPLE statements that
1229 have embedded statements. */
1230 switch (gimple_code (stmt))
1231 {
1232 case GIMPLE_BIND:
1233 copy = copy_gimple_bind (stmt, id);
1234 break;
1235
1236 case GIMPLE_CATCH:
1237 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1238 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1239 break;
1240
1241 case GIMPLE_EH_FILTER:
1242 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1243 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1244 break;
1245
1246 case GIMPLE_TRY:
1247 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1248 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1249 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1250 break;
1251
1252 case GIMPLE_WITH_CLEANUP_EXPR:
1253 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1254 copy = gimple_build_wce (s1);
1255 break;
1256
1257 case GIMPLE_OMP_PARALLEL:
1258 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1259 copy = gimple_build_omp_parallel
1260 (s1,
1261 gimple_omp_parallel_clauses (stmt),
1262 gimple_omp_parallel_child_fn (stmt),
1263 gimple_omp_parallel_data_arg (stmt));
1264 break;
1265
1266 case GIMPLE_OMP_TASK:
1267 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1268 copy = gimple_build_omp_task
1269 (s1,
1270 gimple_omp_task_clauses (stmt),
1271 gimple_omp_task_child_fn (stmt),
1272 gimple_omp_task_data_arg (stmt),
1273 gimple_omp_task_copy_fn (stmt),
1274 gimple_omp_task_arg_size (stmt),
1275 gimple_omp_task_arg_align (stmt));
1276 break;
1277
1278 case GIMPLE_OMP_FOR:
1279 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1280 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1281 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1282 gimple_omp_for_collapse (stmt), s2);
1283 {
1284 size_t i;
1285 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1286 {
1287 gimple_omp_for_set_index (copy, i,
1288 gimple_omp_for_index (stmt, i));
1289 gimple_omp_for_set_initial (copy, i,
1290 gimple_omp_for_initial (stmt, i));
1291 gimple_omp_for_set_final (copy, i,
1292 gimple_omp_for_final (stmt, i));
1293 gimple_omp_for_set_incr (copy, i,
1294 gimple_omp_for_incr (stmt, i));
1295 gimple_omp_for_set_cond (copy, i,
1296 gimple_omp_for_cond (stmt, i));
1297 }
1298 }
1299 break;
1300
1301 case GIMPLE_OMP_MASTER:
1302 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1303 copy = gimple_build_omp_master (s1);
1304 break;
1305
1306 case GIMPLE_OMP_ORDERED:
1307 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1308 copy = gimple_build_omp_ordered (s1);
1309 break;
1310
1311 case GIMPLE_OMP_SECTION:
1312 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1313 copy = gimple_build_omp_section (s1);
1314 break;
1315
1316 case GIMPLE_OMP_SECTIONS:
1317 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1318 copy = gimple_build_omp_sections
1319 (s1, gimple_omp_sections_clauses (stmt));
1320 break;
1321
1322 case GIMPLE_OMP_SINGLE:
1323 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1324 copy = gimple_build_omp_single
1325 (s1, gimple_omp_single_clauses (stmt));
1326 break;
1327
1328 case GIMPLE_OMP_CRITICAL:
1329 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1330 copy
1331 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1332 break;
1333
1334 default:
1335 gcc_unreachable ();
1336 }
1337 }
1338 else
1339 {
1340 if (gimple_assign_copy_p (stmt)
1341 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1342 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1343 {
1344 /* Here we handle statements that are not completely rewritten.
1345 First we detect some inlining-induced bogosities for
1346 discarding. */
1347
1348 /* Some assignments VAR = VAR; don't generate any rtl code
1349 and thus don't count as variable modification. Avoid
1350 keeping bogosities like 0 = 0. */
1351 tree decl = gimple_assign_lhs (stmt), value;
1352 tree *n;
1353
1354 n = (tree *) pointer_map_contains (id->decl_map, decl);
1355 if (n)
1356 {
1357 value = *n;
1358 STRIP_TYPE_NOPS (value);
1359 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1360 return gimple_build_nop ();
1361 }
1362 }
1363
1364 if (gimple_debug_bind_p (stmt))
1365 {
1366 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1367 gimple_debug_bind_get_value (stmt),
1368 stmt);
1369 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1370 return copy;
1371 }
1372
1373 /* Create a new deep copy of the statement. */
1374 copy = gimple_copy (stmt);
1375
1376 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1377 RESX and EH_DISPATCH. */
1378 if (id->eh_map)
1379 switch (gimple_code (copy))
1380 {
1381 case GIMPLE_CALL:
1382 {
1383 tree r, fndecl = gimple_call_fndecl (copy);
1384 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1385 switch (DECL_FUNCTION_CODE (fndecl))
1386 {
1387 case BUILT_IN_EH_COPY_VALUES:
1388 r = gimple_call_arg (copy, 1);
1389 r = remap_eh_region_tree_nr (r, id);
1390 gimple_call_set_arg (copy, 1, r);
1391 /* FALLTHRU */
1392
1393 case BUILT_IN_EH_POINTER:
1394 case BUILT_IN_EH_FILTER:
1395 r = gimple_call_arg (copy, 0);
1396 r = remap_eh_region_tree_nr (r, id);
1397 gimple_call_set_arg (copy, 0, r);
1398 break;
1399
1400 default:
1401 break;
1402 }
1403
1404 /* Reset alias info if we didn't apply measures to
1405 keep it valid over inlining by setting DECL_PT_UID. */
1406 if (!id->src_cfun->gimple_df
1407 || !id->src_cfun->gimple_df->ipa_pta)
1408 gimple_call_reset_alias_info (copy);
1409 }
1410 break;
1411
1412 case GIMPLE_RESX:
1413 {
1414 int r = gimple_resx_region (copy);
1415 r = remap_eh_region_nr (r, id);
1416 gimple_resx_set_region (copy, r);
1417 }
1418 break;
1419
1420 case GIMPLE_EH_DISPATCH:
1421 {
1422 int r = gimple_eh_dispatch_region (copy);
1423 r = remap_eh_region_nr (r, id);
1424 gimple_eh_dispatch_set_region (copy, r);
1425 }
1426 break;
1427
1428 default:
1429 break;
1430 }
1431 }
1432
1433 /* If STMT has a block defined, map it to the newly constructed
1434 block. When inlining we want statements without a block to
1435 appear in the block of the function call. */
1436 new_block = id->block;
1437 if (gimple_block (copy))
1438 {
1439 tree *n;
1440 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1441 gcc_assert (n);
1442 new_block = *n;
1443 }
1444
1445 gimple_set_block (copy, new_block);
1446
1447 if (gimple_debug_bind_p (copy))
1448 return copy;
1449
1450 /* Remap all the operands in COPY. */
1451 memset (&wi, 0, sizeof (wi));
1452 wi.info = id;
1453 if (skip_first)
1454 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1455 else
1456 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1457
1458 /* Clear the copied virtual operands. We are not remapping them here
1459 but are going to recreate them from scratch. */
1460 if (gimple_has_mem_ops (copy))
1461 {
1462 gimple_set_vdef (copy, NULL_TREE);
1463 gimple_set_vuse (copy, NULL_TREE);
1464 }
1465
1466 return copy;
1467 }
1468
1469
1470 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1471 later */
1472
1473 static basic_block
1474 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1475 gcov_type count_scale)
1476 {
1477 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1478 basic_block copy_basic_block;
1479 tree decl;
1480 gcov_type freq;
1481
1482 /* create_basic_block() will append every new block to
1483 basic_block_info automatically. */
1484 copy_basic_block = create_basic_block (NULL, (void *) 0,
1485 (basic_block) bb->prev_bb->aux);
1486 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1487
1488 /* We are going to rebuild frequencies from scratch. These values
1489 have just small importance to drive canonicalize_loop_headers. */
1490 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1491
1492 /* We recompute frequencies after inlining, so this is quite safe. */
1493 if (freq > BB_FREQ_MAX)
1494 freq = BB_FREQ_MAX;
1495 copy_basic_block->frequency = freq;
1496
1497 copy_gsi = gsi_start_bb (copy_basic_block);
1498
1499 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1500 {
1501 gimple stmt = gsi_stmt (gsi);
1502 gimple orig_stmt = stmt;
1503
1504 id->regimplify = false;
1505 stmt = remap_gimple_stmt (stmt, id);
1506 if (gimple_nop_p (stmt))
1507 continue;
1508
1509 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1510 seq_gsi = copy_gsi;
1511
1512 /* With return slot optimization we can end up with
1513 non-gimple (foo *)&this->m, fix that here. */
1514 if (is_gimple_assign (stmt)
1515 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1516 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1517 {
1518 tree new_rhs;
1519 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1520 gimple_assign_rhs1 (stmt),
1521 true, NULL, false, GSI_NEW_STMT);
1522 gimple_assign_set_rhs1 (stmt, new_rhs);
1523 id->regimplify = false;
1524 }
1525
1526 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1527
1528 if (id->regimplify)
1529 gimple_regimplify_operands (stmt, &seq_gsi);
1530
1531 /* If copy_basic_block has been empty at the start of this iteration,
1532 call gsi_start_bb again to get at the newly added statements. */
1533 if (gsi_end_p (copy_gsi))
1534 copy_gsi = gsi_start_bb (copy_basic_block);
1535 else
1536 gsi_next (&copy_gsi);
1537
1538 /* Process the new statement. The call to gimple_regimplify_operands
1539 possibly turned the statement into multiple statements, we
1540 need to process all of them. */
1541 do
1542 {
1543 tree fn;
1544
1545 stmt = gsi_stmt (copy_gsi);
1546 if (is_gimple_call (stmt)
1547 && gimple_call_va_arg_pack_p (stmt)
1548 && id->gimple_call)
1549 {
1550 /* __builtin_va_arg_pack () should be replaced by
1551 all arguments corresponding to ... in the caller. */
1552 tree p;
1553 gimple new_call;
1554 VEC(tree, heap) *argarray;
1555 size_t nargs = gimple_call_num_args (id->gimple_call);
1556 size_t n;
1557
1558 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1559 nargs--;
1560
1561 /* Create the new array of arguments. */
1562 n = nargs + gimple_call_num_args (stmt);
1563 argarray = VEC_alloc (tree, heap, n);
1564 VEC_safe_grow (tree, heap, argarray, n);
1565
1566 /* Copy all the arguments before '...' */
1567 memcpy (VEC_address (tree, argarray),
1568 gimple_call_arg_ptr (stmt, 0),
1569 gimple_call_num_args (stmt) * sizeof (tree));
1570
1571 /* Append the arguments passed in '...' */
1572 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1573 gimple_call_arg_ptr (id->gimple_call, 0)
1574 + (gimple_call_num_args (id->gimple_call) - nargs),
1575 nargs * sizeof (tree));
1576
1577 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1578 argarray);
1579
1580 VEC_free (tree, heap, argarray);
1581
1582 /* Copy all GIMPLE_CALL flags, location and block, except
1583 GF_CALL_VA_ARG_PACK. */
1584 gimple_call_copy_flags (new_call, stmt);
1585 gimple_call_set_va_arg_pack (new_call, false);
1586 gimple_set_location (new_call, gimple_location (stmt));
1587 gimple_set_block (new_call, gimple_block (stmt));
1588 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1589
1590 gsi_replace (&copy_gsi, new_call, false);
1591 gimple_set_bb (stmt, NULL);
1592 stmt = new_call;
1593 }
1594 else if (is_gimple_call (stmt)
1595 && id->gimple_call
1596 && (decl = gimple_call_fndecl (stmt))
1597 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1598 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1599 {
1600 /* __builtin_va_arg_pack_len () should be replaced by
1601 the number of anonymous arguments. */
1602 size_t nargs = gimple_call_num_args (id->gimple_call);
1603 tree count, p;
1604 gimple new_stmt;
1605
1606 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1607 nargs--;
1608
1609 count = build_int_cst (integer_type_node, nargs);
1610 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1611 gsi_replace (&copy_gsi, new_stmt, false);
1612 stmt = new_stmt;
1613 }
1614
1615 /* Statements produced by inlining can be unfolded, especially
1616 when we constant propagated some operands. We can't fold
1617 them right now for two reasons:
1618 1) folding require SSA_NAME_DEF_STMTs to be correct
1619 2) we can't change function calls to builtins.
1620 So we just mark statement for later folding. We mark
1621 all new statements, instead just statements that has changed
1622 by some nontrivial substitution so even statements made
1623 foldable indirectly are updated. If this turns out to be
1624 expensive, copy_body can be told to watch for nontrivial
1625 changes. */
1626 if (id->statements_to_fold)
1627 pointer_set_insert (id->statements_to_fold, stmt);
1628
1629 /* We're duplicating a CALL_EXPR. Find any corresponding
1630 callgraph edges and update or duplicate them. */
1631 if (is_gimple_call (stmt))
1632 {
1633 struct cgraph_edge *edge;
1634 int flags;
1635
1636 switch (id->transform_call_graph_edges)
1637 {
1638 case CB_CGE_DUPLICATE:
1639 edge = cgraph_edge (id->src_node, orig_stmt);
1640 if (edge)
1641 {
1642 int edge_freq = edge->frequency;
1643 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1644 gimple_uid (stmt),
1645 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1646 edge->frequency, true);
1647 /* We could also just rescale the frequency, but
1648 doing so would introduce roundoff errors and make
1649 verifier unhappy. */
1650 edge->frequency
1651 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1652 copy_basic_block);
1653 if (dump_file
1654 && profile_status_for_function (cfun) != PROFILE_ABSENT
1655 && (edge_freq > edge->frequency + 10
1656 || edge_freq < edge->frequency - 10))
1657 {
1658 fprintf (dump_file, "Edge frequency estimated by "
1659 "cgraph %i diverge from inliner's estimate %i\n",
1660 edge_freq,
1661 edge->frequency);
1662 fprintf (dump_file,
1663 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1664 bb->index,
1665 bb->frequency,
1666 copy_basic_block->frequency);
1667 }
1668 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1669 }
1670 break;
1671
1672 case CB_CGE_MOVE_CLONES:
1673 cgraph_set_call_stmt_including_clones (id->dst_node,
1674 orig_stmt, stmt);
1675 edge = cgraph_edge (id->dst_node, stmt);
1676 break;
1677
1678 case CB_CGE_MOVE:
1679 edge = cgraph_edge (id->dst_node, orig_stmt);
1680 if (edge)
1681 cgraph_set_call_stmt (edge, stmt);
1682 break;
1683
1684 default:
1685 gcc_unreachable ();
1686 }
1687
1688 /* Constant propagation on argument done during inlining
1689 may create new direct call. Produce an edge for it. */
1690 if ((!edge
1691 || (edge->indirect_call
1692 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1693 && is_gimple_call (stmt)
1694 && (fn = gimple_call_fndecl (stmt)) != NULL)
1695 {
1696 struct cgraph_node *dest = cgraph_node (fn);
1697
1698 /* We have missing edge in the callgraph. This can happen
1699 when previous inlining turned an indirect call into a
1700 direct call by constant propagating arguments or we are
1701 producing dead clone (for further clonning). In all
1702 other cases we hit a bug (incorrect node sharing is the
1703 most common reason for missing edges). */
1704 gcc_assert (dest->needed || !dest->analyzed
1705 || !id->src_node->analyzed);
1706 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1707 cgraph_create_edge_including_clones
1708 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1709 compute_call_stmt_bb_frequency (id->dst_node->decl,
1710 copy_basic_block),
1711 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1712 else
1713 cgraph_create_edge (id->dst_node, dest, stmt,
1714 bb->count,
1715 compute_call_stmt_bb_frequency
1716 (id->dst_node->decl, copy_basic_block),
1717 bb->loop_depth)->inline_failed
1718 = CIF_ORIGINALLY_INDIRECT_CALL;
1719 if (dump_file)
1720 {
1721 fprintf (dump_file, "Created new direct edge to %s",
1722 cgraph_node_name (dest));
1723 }
1724 }
1725
1726 flags = gimple_call_flags (stmt);
1727 if (flags & ECF_MAY_BE_ALLOCA)
1728 cfun->calls_alloca = true;
1729 if (flags & ECF_RETURNS_TWICE)
1730 cfun->calls_setjmp = true;
1731 }
1732
1733 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1734 id->eh_map, id->eh_lp_nr);
1735
1736 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1737 {
1738 ssa_op_iter i;
1739 tree def;
1740
1741 find_new_referenced_vars (gsi_stmt (copy_gsi));
1742 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1743 if (TREE_CODE (def) == SSA_NAME)
1744 SSA_NAME_DEF_STMT (def) = stmt;
1745 }
1746
1747 gsi_next (&copy_gsi);
1748 }
1749 while (!gsi_end_p (copy_gsi));
1750
1751 copy_gsi = gsi_last_bb (copy_basic_block);
1752 }
1753
1754 return copy_basic_block;
1755 }
1756
1757 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1758 form is quite easy, since dominator relationship for old basic blocks does
1759 not change.
1760
1761 There is however exception where inlining might change dominator relation
1762 across EH edges from basic block within inlined functions destinating
1763 to landing pads in function we inline into.
1764
1765 The function fills in PHI_RESULTs of such PHI nodes if they refer
1766 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1767 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1768 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1769 set, and this means that there will be no overlapping live ranges
1770 for the underlying symbol.
1771
1772 This might change in future if we allow redirecting of EH edges and
1773 we might want to change way build CFG pre-inlining to include
1774 all the possible edges then. */
1775 static void
1776 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1777 bool can_throw, bool nonlocal_goto)
1778 {
1779 edge e;
1780 edge_iterator ei;
1781
1782 FOR_EACH_EDGE (e, ei, bb->succs)
1783 if (!e->dest->aux
1784 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1785 {
1786 gimple phi;
1787 gimple_stmt_iterator si;
1788
1789 if (!nonlocal_goto)
1790 gcc_assert (e->flags & EDGE_EH);
1791
1792 if (!can_throw)
1793 gcc_assert (!(e->flags & EDGE_EH));
1794
1795 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1796 {
1797 edge re;
1798
1799 phi = gsi_stmt (si);
1800
1801 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1802 gcc_assert (!e->dest->aux);
1803
1804 gcc_assert ((e->flags & EDGE_EH)
1805 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1806
1807 if (!is_gimple_reg (PHI_RESULT (phi)))
1808 {
1809 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1810 continue;
1811 }
1812
1813 re = find_edge (ret_bb, e->dest);
1814 gcc_assert (re);
1815 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1816 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1817
1818 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1819 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1820 }
1821 }
1822 }
1823
1824
1825 /* Copy edges from BB into its copy constructed earlier, scale profile
1826 accordingly. Edges will be taken care of later. Assume aux
1827 pointers to point to the copies of each BB. */
1828
1829 static void
1830 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1831 {
1832 basic_block new_bb = (basic_block) bb->aux;
1833 edge_iterator ei;
1834 edge old_edge;
1835 gimple_stmt_iterator si;
1836 int flags;
1837
1838 /* Use the indices from the original blocks to create edges for the
1839 new ones. */
1840 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1841 if (!(old_edge->flags & EDGE_EH))
1842 {
1843 edge new_edge;
1844
1845 flags = old_edge->flags;
1846
1847 /* Return edges do get a FALLTHRU flag when the get inlined. */
1848 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1849 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1850 flags |= EDGE_FALLTHRU;
1851 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1852 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1853 new_edge->probability = old_edge->probability;
1854 }
1855
1856 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1857 return;
1858
1859 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1860 {
1861 gimple copy_stmt;
1862 bool can_throw, nonlocal_goto;
1863
1864 copy_stmt = gsi_stmt (si);
1865 if (!is_gimple_debug (copy_stmt))
1866 {
1867 update_stmt (copy_stmt);
1868 if (gimple_in_ssa_p (cfun))
1869 mark_symbols_for_renaming (copy_stmt);
1870 }
1871
1872 /* Do this before the possible split_block. */
1873 gsi_next (&si);
1874
1875 /* If this tree could throw an exception, there are two
1876 cases where we need to add abnormal edge(s): the
1877 tree wasn't in a region and there is a "current
1878 region" in the caller; or the original tree had
1879 EH edges. In both cases split the block after the tree,
1880 and add abnormal edge(s) as needed; we need both
1881 those from the callee and the caller.
1882 We check whether the copy can throw, because the const
1883 propagation can change an INDIRECT_REF which throws
1884 into a COMPONENT_REF which doesn't. If the copy
1885 can throw, the original could also throw. */
1886 can_throw = stmt_can_throw_internal (copy_stmt);
1887 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1888
1889 if (can_throw || nonlocal_goto)
1890 {
1891 if (!gsi_end_p (si))
1892 /* Note that bb's predecessor edges aren't necessarily
1893 right at this point; split_block doesn't care. */
1894 {
1895 edge e = split_block (new_bb, copy_stmt);
1896
1897 new_bb = e->dest;
1898 new_bb->aux = e->src->aux;
1899 si = gsi_start_bb (new_bb);
1900 }
1901 }
1902
1903 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1904 make_eh_dispatch_edges (copy_stmt);
1905 else if (can_throw)
1906 make_eh_edges (copy_stmt);
1907
1908 if (nonlocal_goto)
1909 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1910
1911 if ((can_throw || nonlocal_goto)
1912 && gimple_in_ssa_p (cfun))
1913 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1914 can_throw, nonlocal_goto);
1915 }
1916 }
1917
1918 /* Copy the PHIs. All blocks and edges are copied, some blocks
1919 was possibly split and new outgoing EH edges inserted.
1920 BB points to the block of original function and AUX pointers links
1921 the original and newly copied blocks. */
1922
1923 static void
1924 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1925 {
1926 basic_block const new_bb = (basic_block) bb->aux;
1927 edge_iterator ei;
1928 gimple phi;
1929 gimple_stmt_iterator si;
1930
1931 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1932 {
1933 tree res, new_res;
1934 gimple new_phi;
1935 edge new_edge;
1936
1937 phi = gsi_stmt (si);
1938 res = PHI_RESULT (phi);
1939 new_res = res;
1940 if (is_gimple_reg (res))
1941 {
1942 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1943 SSA_NAME_DEF_STMT (new_res)
1944 = new_phi = create_phi_node (new_res, new_bb);
1945 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1946 {
1947 edge const old_edge
1948 = find_edge ((basic_block) new_edge->src->aux, bb);
1949 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1950 tree new_arg = arg;
1951 tree block = id->block;
1952 id->block = NULL_TREE;
1953 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1954 id->block = block;
1955 gcc_assert (new_arg);
1956 /* With return slot optimization we can end up with
1957 non-gimple (foo *)&this->m, fix that here. */
1958 if (TREE_CODE (new_arg) != SSA_NAME
1959 && TREE_CODE (new_arg) != FUNCTION_DECL
1960 && !is_gimple_val (new_arg))
1961 {
1962 gimple_seq stmts = NULL;
1963 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1964 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1965 }
1966 add_phi_arg (new_phi, new_arg, new_edge,
1967 gimple_phi_arg_location_from_edge (phi, old_edge));
1968 }
1969 }
1970 }
1971 }
1972
1973
1974 /* Wrapper for remap_decl so it can be used as a callback. */
1975
1976 static tree
1977 remap_decl_1 (tree decl, void *data)
1978 {
1979 return remap_decl (decl, (copy_body_data *) data);
1980 }
1981
1982 /* Build struct function and associated datastructures for the new clone
1983 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1984
1985 static void
1986 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
1987 {
1988 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1989 gcov_type count_scale;
1990
1991 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1992 count_scale = (REG_BR_PROB_BASE * count
1993 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1994 else
1995 count_scale = REG_BR_PROB_BASE;
1996
1997 /* Register specific tree functions. */
1998 gimple_register_cfg_hooks ();
1999
2000 /* Get clean struct function. */
2001 push_struct_function (new_fndecl);
2002
2003 /* We will rebuild these, so just sanity check that they are empty. */
2004 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2005 gcc_assert (cfun->local_decls == NULL);
2006 gcc_assert (cfun->cfg == NULL);
2007 gcc_assert (cfun->decl == new_fndecl);
2008
2009 /* Copy items we preserve during clonning. */
2010 cfun->static_chain_decl = src_cfun->static_chain_decl;
2011 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2012 cfun->function_end_locus = src_cfun->function_end_locus;
2013 cfun->curr_properties = src_cfun->curr_properties;
2014 cfun->last_verified = src_cfun->last_verified;
2015 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2016 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2017 cfun->function_frequency = src_cfun->function_frequency;
2018 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2019 cfun->stdarg = src_cfun->stdarg;
2020 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2021 cfun->after_inlining = src_cfun->after_inlining;
2022 cfun->returns_struct = src_cfun->returns_struct;
2023 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2024 cfun->after_tree_profile = src_cfun->after_tree_profile;
2025
2026 init_empty_tree_cfg ();
2027
2028 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2029 ENTRY_BLOCK_PTR->count =
2030 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2031 REG_BR_PROB_BASE);
2032 ENTRY_BLOCK_PTR->frequency
2033 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2034 EXIT_BLOCK_PTR->count =
2035 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2036 REG_BR_PROB_BASE);
2037 EXIT_BLOCK_PTR->frequency =
2038 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2039 if (src_cfun->eh)
2040 init_eh_for_function ();
2041
2042 if (src_cfun->gimple_df)
2043 {
2044 init_tree_ssa (cfun);
2045 cfun->gimple_df->in_ssa_p = true;
2046 init_ssa_operands ();
2047 }
2048 pop_cfun ();
2049 }
2050
2051 /* Make a copy of the body of FN so that it can be inserted inline in
2052 another function. Walks FN via CFG, returns new fndecl. */
2053
2054 static tree
2055 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2056 basic_block entry_block_map, basic_block exit_block_map)
2057 {
2058 tree callee_fndecl = id->src_fn;
2059 /* Original cfun for the callee, doesn't change. */
2060 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2061 struct function *cfun_to_copy;
2062 basic_block bb;
2063 tree new_fndecl = NULL;
2064 gcov_type count_scale;
2065 int last;
2066
2067 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2068 count_scale = (REG_BR_PROB_BASE * count
2069 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2070 else
2071 count_scale = REG_BR_PROB_BASE;
2072
2073 /* Register specific tree functions. */
2074 gimple_register_cfg_hooks ();
2075
2076 /* Must have a CFG here at this point. */
2077 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2078 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2079
2080 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2081
2082 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2083 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2084 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2085 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2086
2087 /* Duplicate any exception-handling regions. */
2088 if (cfun->eh)
2089 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2090 remap_decl_1, id);
2091
2092 /* Use aux pointers to map the original blocks to copy. */
2093 FOR_EACH_BB_FN (bb, cfun_to_copy)
2094 {
2095 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2096 bb->aux = new_bb;
2097 new_bb->aux = bb;
2098 }
2099
2100 last = last_basic_block;
2101
2102 /* Now that we've duplicated the blocks, duplicate their edges. */
2103 FOR_ALL_BB_FN (bb, cfun_to_copy)
2104 copy_edges_for_bb (bb, count_scale, exit_block_map);
2105
2106 if (gimple_in_ssa_p (cfun))
2107 FOR_ALL_BB_FN (bb, cfun_to_copy)
2108 copy_phis_for_bb (bb, id);
2109
2110 FOR_ALL_BB_FN (bb, cfun_to_copy)
2111 {
2112 ((basic_block)bb->aux)->aux = NULL;
2113 bb->aux = NULL;
2114 }
2115
2116 /* Zero out AUX fields of newly created block during EH edge
2117 insertion. */
2118 for (; last < last_basic_block; last++)
2119 BASIC_BLOCK (last)->aux = NULL;
2120 entry_block_map->aux = NULL;
2121 exit_block_map->aux = NULL;
2122
2123 if (id->eh_map)
2124 {
2125 pointer_map_destroy (id->eh_map);
2126 id->eh_map = NULL;
2127 }
2128
2129 return new_fndecl;
2130 }
2131
2132 /* Copy the debug STMT using ID. We deal with these statements in a
2133 special way: if any variable in their VALUE expression wasn't
2134 remapped yet, we won't remap it, because that would get decl uids
2135 out of sync, causing codegen differences between -g and -g0. If
2136 this arises, we drop the VALUE expression altogether. */
2137
2138 static void
2139 copy_debug_stmt (gimple stmt, copy_body_data *id)
2140 {
2141 tree t, *n;
2142 struct walk_stmt_info wi;
2143
2144 t = id->block;
2145 if (gimple_block (stmt))
2146 {
2147 tree *n;
2148 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2149 if (n)
2150 t = *n;
2151 }
2152 gimple_set_block (stmt, t);
2153
2154 /* Remap all the operands in COPY. */
2155 memset (&wi, 0, sizeof (wi));
2156 wi.info = id;
2157
2158 processing_debug_stmt = 1;
2159
2160 t = gimple_debug_bind_get_var (stmt);
2161
2162 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2163 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2164 {
2165 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2166 t = *n;
2167 }
2168 else if (TREE_CODE (t) == VAR_DECL
2169 && !TREE_STATIC (t)
2170 && gimple_in_ssa_p (cfun)
2171 && !pointer_map_contains (id->decl_map, t)
2172 && !var_ann (t))
2173 /* T is a non-localized variable. */;
2174 else
2175 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2176
2177 gimple_debug_bind_set_var (stmt, t);
2178
2179 if (gimple_debug_bind_has_value_p (stmt))
2180 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2181 remap_gimple_op_r, &wi, NULL);
2182
2183 /* Punt if any decl couldn't be remapped. */
2184 if (processing_debug_stmt < 0)
2185 gimple_debug_bind_reset_value (stmt);
2186
2187 processing_debug_stmt = 0;
2188
2189 update_stmt (stmt);
2190 if (gimple_in_ssa_p (cfun))
2191 mark_symbols_for_renaming (stmt);
2192 }
2193
2194 /* Process deferred debug stmts. In order to give values better odds
2195 of being successfully remapped, we delay the processing of debug
2196 stmts until all other stmts that might require remapping are
2197 processed. */
2198
2199 static void
2200 copy_debug_stmts (copy_body_data *id)
2201 {
2202 size_t i;
2203 gimple stmt;
2204
2205 if (!id->debug_stmts)
2206 return;
2207
2208 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2209 copy_debug_stmt (stmt, id);
2210
2211 VEC_free (gimple, heap, id->debug_stmts);
2212 }
2213
2214 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2215 another function. */
2216
2217 static tree
2218 copy_tree_body (copy_body_data *id)
2219 {
2220 tree fndecl = id->src_fn;
2221 tree body = DECL_SAVED_TREE (fndecl);
2222
2223 walk_tree (&body, copy_tree_body_r, id, NULL);
2224
2225 return body;
2226 }
2227
2228 /* Make a copy of the body of FN so that it can be inserted inline in
2229 another function. */
2230
2231 static tree
2232 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2233 basic_block entry_block_map, basic_block exit_block_map)
2234 {
2235 tree fndecl = id->src_fn;
2236 tree body;
2237
2238 /* If this body has a CFG, walk CFG and copy. */
2239 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2240 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
2241 copy_debug_stmts (id);
2242
2243 return body;
2244 }
2245
2246 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2247 defined in function FN, or of a data member thereof. */
2248
2249 static bool
2250 self_inlining_addr_expr (tree value, tree fn)
2251 {
2252 tree var;
2253
2254 if (TREE_CODE (value) != ADDR_EXPR)
2255 return false;
2256
2257 var = get_base_address (TREE_OPERAND (value, 0));
2258
2259 return var && auto_var_in_fn_p (var, fn);
2260 }
2261
2262 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2263 lexical block and line number information from base_stmt, if given,
2264 or from the last stmt of the block otherwise. */
2265
2266 static gimple
2267 insert_init_debug_bind (copy_body_data *id,
2268 basic_block bb, tree var, tree value,
2269 gimple base_stmt)
2270 {
2271 gimple note;
2272 gimple_stmt_iterator gsi;
2273 tree tracked_var;
2274
2275 if (!gimple_in_ssa_p (id->src_cfun))
2276 return NULL;
2277
2278 if (!MAY_HAVE_DEBUG_STMTS)
2279 return NULL;
2280
2281 tracked_var = target_for_debug_bind (var);
2282 if (!tracked_var)
2283 return NULL;
2284
2285 if (bb)
2286 {
2287 gsi = gsi_last_bb (bb);
2288 if (!base_stmt && !gsi_end_p (gsi))
2289 base_stmt = gsi_stmt (gsi);
2290 }
2291
2292 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2293
2294 if (bb)
2295 {
2296 if (!gsi_end_p (gsi))
2297 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2298 else
2299 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2300 }
2301
2302 return note;
2303 }
2304
2305 static void
2306 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2307 {
2308 /* If VAR represents a zero-sized variable, it's possible that the
2309 assignment statement may result in no gimple statements. */
2310 if (init_stmt)
2311 {
2312 gimple_stmt_iterator si = gsi_last_bb (bb);
2313
2314 /* We can end up with init statements that store to a non-register
2315 from a rhs with a conversion. Handle that here by forcing the
2316 rhs into a temporary. gimple_regimplify_operands is not
2317 prepared to do this for us. */
2318 if (!is_gimple_debug (init_stmt)
2319 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2320 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2321 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2322 {
2323 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2324 gimple_expr_type (init_stmt),
2325 gimple_assign_rhs1 (init_stmt));
2326 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2327 GSI_NEW_STMT);
2328 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2329 gimple_assign_set_rhs1 (init_stmt, rhs);
2330 }
2331 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2332 gimple_regimplify_operands (init_stmt, &si);
2333 mark_symbols_for_renaming (init_stmt);
2334
2335 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2336 {
2337 tree var, def = gimple_assign_lhs (init_stmt);
2338
2339 if (TREE_CODE (def) == SSA_NAME)
2340 var = SSA_NAME_VAR (def);
2341 else
2342 var = def;
2343
2344 insert_init_debug_bind (id, bb, var, def, init_stmt);
2345 }
2346 }
2347 }
2348
2349 /* Initialize parameter P with VALUE. If needed, produce init statement
2350 at the end of BB. When BB is NULL, we return init statement to be
2351 output later. */
2352 static gimple
2353 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2354 basic_block bb, tree *vars)
2355 {
2356 gimple init_stmt = NULL;
2357 tree var;
2358 tree rhs = value;
2359 tree def = (gimple_in_ssa_p (cfun)
2360 ? gimple_default_def (id->src_cfun, p) : NULL);
2361
2362 if (value
2363 && value != error_mark_node
2364 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2365 {
2366 if (fold_convertible_p (TREE_TYPE (p), value))
2367 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2368 else
2369 /* ??? For valid (GIMPLE) programs we should not end up here.
2370 Still if something has gone wrong and we end up with truly
2371 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2372 to not leak invalid GIMPLE to the following passes. */
2373 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2374 }
2375
2376 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2377 here since the type of this decl must be visible to the calling
2378 function. */
2379 var = copy_decl_to_var (p, id);
2380
2381 /* We're actually using the newly-created var. */
2382 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2383 {
2384 get_var_ann (var);
2385 add_referenced_var (var);
2386 }
2387
2388 /* Declare this new variable. */
2389 TREE_CHAIN (var) = *vars;
2390 *vars = var;
2391
2392 /* Make gimplifier happy about this variable. */
2393 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2394
2395 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2396 we would not need to create a new variable here at all, if it
2397 weren't for debug info. Still, we can just use the argument
2398 value. */
2399 if (TREE_READONLY (p)
2400 && !TREE_ADDRESSABLE (p)
2401 && value && !TREE_SIDE_EFFECTS (value)
2402 && !def)
2403 {
2404 /* We may produce non-gimple trees by adding NOPs or introduce
2405 invalid sharing when operand is not really constant.
2406 It is not big deal to prohibit constant propagation here as
2407 we will constant propagate in DOM1 pass anyway. */
2408 if (is_gimple_min_invariant (value)
2409 && useless_type_conversion_p (TREE_TYPE (p),
2410 TREE_TYPE (value))
2411 /* We have to be very careful about ADDR_EXPR. Make sure
2412 the base variable isn't a local variable of the inlined
2413 function, e.g., when doing recursive inlining, direct or
2414 mutually-recursive or whatever, which is why we don't
2415 just test whether fn == current_function_decl. */
2416 && ! self_inlining_addr_expr (value, fn))
2417 {
2418 insert_decl_map (id, p, value);
2419 insert_debug_decl_map (id, p, var);
2420 return insert_init_debug_bind (id, bb, var, value, NULL);
2421 }
2422 }
2423
2424 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2425 that way, when the PARM_DECL is encountered, it will be
2426 automatically replaced by the VAR_DECL. */
2427 insert_decl_map (id, p, var);
2428
2429 /* Even if P was TREE_READONLY, the new VAR should not be.
2430 In the original code, we would have constructed a
2431 temporary, and then the function body would have never
2432 changed the value of P. However, now, we will be
2433 constructing VAR directly. The constructor body may
2434 change its value multiple times as it is being
2435 constructed. Therefore, it must not be TREE_READONLY;
2436 the back-end assumes that TREE_READONLY variable is
2437 assigned to only once. */
2438 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2439 TREE_READONLY (var) = 0;
2440
2441 /* If there is no setup required and we are in SSA, take the easy route
2442 replacing all SSA names representing the function parameter by the
2443 SSA name passed to function.
2444
2445 We need to construct map for the variable anyway as it might be used
2446 in different SSA names when parameter is set in function.
2447
2448 Do replacement at -O0 for const arguments replaced by constant.
2449 This is important for builtin_constant_p and other construct requiring
2450 constant argument to be visible in inlined function body. */
2451 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2452 && (optimize
2453 || (TREE_READONLY (p)
2454 && is_gimple_min_invariant (rhs)))
2455 && (TREE_CODE (rhs) == SSA_NAME
2456 || is_gimple_min_invariant (rhs))
2457 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2458 {
2459 insert_decl_map (id, def, rhs);
2460 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2461 }
2462
2463 /* If the value of argument is never used, don't care about initializing
2464 it. */
2465 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2466 {
2467 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2468 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2469 }
2470
2471 /* Initialize this VAR_DECL from the equivalent argument. Convert
2472 the argument to the proper type in case it was promoted. */
2473 if (value)
2474 {
2475 if (rhs == error_mark_node)
2476 {
2477 insert_decl_map (id, p, var);
2478 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2479 }
2480
2481 STRIP_USELESS_TYPE_CONVERSION (rhs);
2482
2483 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2484 keep our trees in gimple form. */
2485 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2486 {
2487 def = remap_ssa_name (def, id);
2488 init_stmt = gimple_build_assign (def, rhs);
2489 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2490 set_default_def (var, NULL);
2491 }
2492 else
2493 init_stmt = gimple_build_assign (var, rhs);
2494
2495 if (bb && init_stmt)
2496 insert_init_stmt (id, bb, init_stmt);
2497 }
2498 return init_stmt;
2499 }
2500
2501 /* Generate code to initialize the parameters of the function at the
2502 top of the stack in ID from the GIMPLE_CALL STMT. */
2503
2504 static void
2505 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2506 tree fn, basic_block bb)
2507 {
2508 tree parms;
2509 size_t i;
2510 tree p;
2511 tree vars = NULL_TREE;
2512 tree static_chain = gimple_call_chain (stmt);
2513
2514 /* Figure out what the parameters are. */
2515 parms = DECL_ARGUMENTS (fn);
2516
2517 /* Loop through the parameter declarations, replacing each with an
2518 equivalent VAR_DECL, appropriately initialized. */
2519 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2520 {
2521 tree val;
2522 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2523 setup_one_parameter (id, p, val, fn, bb, &vars);
2524 }
2525
2526 /* Initialize the static chain. */
2527 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2528 gcc_assert (fn != current_function_decl);
2529 if (p)
2530 {
2531 /* No static chain? Seems like a bug in tree-nested.c. */
2532 gcc_assert (static_chain);
2533
2534 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2535 }
2536
2537 declare_inline_vars (id->block, vars);
2538 }
2539
2540
2541 /* Declare a return variable to replace the RESULT_DECL for the
2542 function we are calling. An appropriate DECL_STMT is returned.
2543 The USE_STMT is filled to contain a use of the declaration to
2544 indicate the return value of the function.
2545
2546 RETURN_SLOT, if non-null is place where to store the result. It
2547 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2548 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2549
2550 The return value is a (possibly null) value that holds the result
2551 as seen by the caller. */
2552
2553 static tree
2554 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2555 {
2556 tree callee = id->src_fn;
2557 tree caller = id->dst_fn;
2558 tree result = DECL_RESULT (callee);
2559 tree callee_type = TREE_TYPE (result);
2560 tree caller_type;
2561 tree var, use;
2562
2563 /* Handle type-mismatches in the function declaration return type
2564 vs. the call expression. */
2565 if (modify_dest)
2566 caller_type = TREE_TYPE (modify_dest);
2567 else
2568 caller_type = TREE_TYPE (TREE_TYPE (callee));
2569
2570 /* We don't need to do anything for functions that don't return
2571 anything. */
2572 if (!result || VOID_TYPE_P (callee_type))
2573 return NULL_TREE;
2574
2575 /* If there was a return slot, then the return value is the
2576 dereferenced address of that object. */
2577 if (return_slot)
2578 {
2579 /* The front end shouldn't have used both return_slot and
2580 a modify expression. */
2581 gcc_assert (!modify_dest);
2582 if (DECL_BY_REFERENCE (result))
2583 {
2584 tree return_slot_addr = build_fold_addr_expr (return_slot);
2585 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2586
2587 /* We are going to construct *&return_slot and we can't do that
2588 for variables believed to be not addressable.
2589
2590 FIXME: This check possibly can match, because values returned
2591 via return slot optimization are not believed to have address
2592 taken by alias analysis. */
2593 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2594 if (gimple_in_ssa_p (cfun))
2595 {
2596 HOST_WIDE_INT bitsize;
2597 HOST_WIDE_INT bitpos;
2598 tree offset;
2599 enum machine_mode mode;
2600 int unsignedp;
2601 int volatilep;
2602 tree base;
2603 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2604 &offset,
2605 &mode, &unsignedp, &volatilep,
2606 false);
2607 if (TREE_CODE (base) == INDIRECT_REF)
2608 base = TREE_OPERAND (base, 0);
2609 if (TREE_CODE (base) == SSA_NAME)
2610 base = SSA_NAME_VAR (base);
2611 mark_sym_for_renaming (base);
2612 }
2613 var = return_slot_addr;
2614 }
2615 else
2616 {
2617 var = return_slot;
2618 gcc_assert (TREE_CODE (var) != SSA_NAME);
2619 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2620 }
2621 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2622 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2623 && !DECL_GIMPLE_REG_P (result)
2624 && DECL_P (var))
2625 DECL_GIMPLE_REG_P (var) = 0;
2626 use = NULL;
2627 goto done;
2628 }
2629
2630 /* All types requiring non-trivial constructors should have been handled. */
2631 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2632
2633 /* Attempt to avoid creating a new temporary variable. */
2634 if (modify_dest
2635 && TREE_CODE (modify_dest) != SSA_NAME)
2636 {
2637 bool use_it = false;
2638
2639 /* We can't use MODIFY_DEST if there's type promotion involved. */
2640 if (!useless_type_conversion_p (callee_type, caller_type))
2641 use_it = false;
2642
2643 /* ??? If we're assigning to a variable sized type, then we must
2644 reuse the destination variable, because we've no good way to
2645 create variable sized temporaries at this point. */
2646 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2647 use_it = true;
2648
2649 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2650 reuse it as the result of the call directly. Don't do this if
2651 it would promote MODIFY_DEST to addressable. */
2652 else if (TREE_ADDRESSABLE (result))
2653 use_it = false;
2654 else
2655 {
2656 tree base_m = get_base_address (modify_dest);
2657
2658 /* If the base isn't a decl, then it's a pointer, and we don't
2659 know where that's going to go. */
2660 if (!DECL_P (base_m))
2661 use_it = false;
2662 else if (is_global_var (base_m))
2663 use_it = false;
2664 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2665 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2666 && !DECL_GIMPLE_REG_P (result)
2667 && DECL_GIMPLE_REG_P (base_m))
2668 use_it = false;
2669 else if (!TREE_ADDRESSABLE (base_m))
2670 use_it = true;
2671 }
2672
2673 if (use_it)
2674 {
2675 var = modify_dest;
2676 use = NULL;
2677 goto done;
2678 }
2679 }
2680
2681 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2682
2683 var = copy_result_decl_to_var (result, id);
2684 if (gimple_in_ssa_p (cfun))
2685 {
2686 get_var_ann (var);
2687 add_referenced_var (var);
2688 }
2689
2690 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2691 DECL_STRUCT_FUNCTION (caller)->local_decls
2692 = tree_cons (NULL_TREE, var,
2693 DECL_STRUCT_FUNCTION (caller)->local_decls);
2694
2695 /* Do not have the rest of GCC warn about this variable as it should
2696 not be visible to the user. */
2697 TREE_NO_WARNING (var) = 1;
2698
2699 declare_inline_vars (id->block, var);
2700
2701 /* Build the use expr. If the return type of the function was
2702 promoted, convert it back to the expected type. */
2703 use = var;
2704 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2705 use = fold_convert (caller_type, var);
2706
2707 STRIP_USELESS_TYPE_CONVERSION (use);
2708
2709 if (DECL_BY_REFERENCE (result))
2710 {
2711 TREE_ADDRESSABLE (var) = 1;
2712 var = build_fold_addr_expr (var);
2713 }
2714
2715 done:
2716 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2717 way, when the RESULT_DECL is encountered, it will be
2718 automatically replaced by the VAR_DECL. */
2719 insert_decl_map (id, result, var);
2720
2721 /* Remember this so we can ignore it in remap_decls. */
2722 id->retvar = var;
2723
2724 return use;
2725 }
2726
2727 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2728 to a local label. */
2729
2730 static tree
2731 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2732 {
2733 tree node = *nodep;
2734 tree fn = (tree) fnp;
2735
2736 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2737 return node;
2738
2739 if (TYPE_P (node))
2740 *walk_subtrees = 0;
2741
2742 return NULL_TREE;
2743 }
2744
2745 /* Determine if the function can be copied. If so return NULL. If
2746 not return a string describng the reason for failure. */
2747
2748 static const char *
2749 copy_forbidden (struct function *fun, tree fndecl)
2750 {
2751 const char *reason = fun->cannot_be_copied_reason;
2752 tree step;
2753
2754 /* Only examine the function once. */
2755 if (fun->cannot_be_copied_set)
2756 return reason;
2757
2758 /* We cannot copy a function that receives a non-local goto
2759 because we cannot remap the destination label used in the
2760 function that is performing the non-local goto. */
2761 /* ??? Actually, this should be possible, if we work at it.
2762 No doubt there's just a handful of places that simply
2763 assume it doesn't happen and don't substitute properly. */
2764 if (fun->has_nonlocal_label)
2765 {
2766 reason = G_("function %q+F can never be copied "
2767 "because it receives a non-local goto");
2768 goto fail;
2769 }
2770
2771 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2772 {
2773 tree decl = TREE_VALUE (step);
2774
2775 if (TREE_CODE (decl) == VAR_DECL
2776 && TREE_STATIC (decl)
2777 && !DECL_EXTERNAL (decl)
2778 && DECL_INITIAL (decl)
2779 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2780 has_label_address_in_static_1,
2781 fndecl))
2782 {
2783 reason = G_("function %q+F can never be copied because it saves "
2784 "address of local label in a static variable");
2785 goto fail;
2786 }
2787 }
2788
2789 fail:
2790 fun->cannot_be_copied_reason = reason;
2791 fun->cannot_be_copied_set = true;
2792 return reason;
2793 }
2794
2795
2796 static const char *inline_forbidden_reason;
2797
2798 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2799 iff a function can not be inlined. Also sets the reason why. */
2800
2801 static tree
2802 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2803 struct walk_stmt_info *wip)
2804 {
2805 tree fn = (tree) wip->info;
2806 tree t;
2807 gimple stmt = gsi_stmt (*gsi);
2808
2809 switch (gimple_code (stmt))
2810 {
2811 case GIMPLE_CALL:
2812 /* Refuse to inline alloca call unless user explicitly forced so as
2813 this may change program's memory overhead drastically when the
2814 function using alloca is called in loop. In GCC present in
2815 SPEC2000 inlining into schedule_block cause it to require 2GB of
2816 RAM instead of 256MB. */
2817 if (gimple_alloca_call_p (stmt)
2818 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2819 {
2820 inline_forbidden_reason
2821 = G_("function %q+F can never be inlined because it uses "
2822 "alloca (override using the always_inline attribute)");
2823 *handled_ops_p = true;
2824 return fn;
2825 }
2826
2827 t = gimple_call_fndecl (stmt);
2828 if (t == NULL_TREE)
2829 break;
2830
2831 /* We cannot inline functions that call setjmp. */
2832 if (setjmp_call_p (t))
2833 {
2834 inline_forbidden_reason
2835 = G_("function %q+F can never be inlined because it uses setjmp");
2836 *handled_ops_p = true;
2837 return t;
2838 }
2839
2840 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2841 switch (DECL_FUNCTION_CODE (t))
2842 {
2843 /* We cannot inline functions that take a variable number of
2844 arguments. */
2845 case BUILT_IN_VA_START:
2846 case BUILT_IN_NEXT_ARG:
2847 case BUILT_IN_VA_END:
2848 inline_forbidden_reason
2849 = G_("function %q+F can never be inlined because it "
2850 "uses variable argument lists");
2851 *handled_ops_p = true;
2852 return t;
2853
2854 case BUILT_IN_LONGJMP:
2855 /* We can't inline functions that call __builtin_longjmp at
2856 all. The non-local goto machinery really requires the
2857 destination be in a different function. If we allow the
2858 function calling __builtin_longjmp to be inlined into the
2859 function calling __builtin_setjmp, Things will Go Awry. */
2860 inline_forbidden_reason
2861 = G_("function %q+F can never be inlined because "
2862 "it uses setjmp-longjmp exception handling");
2863 *handled_ops_p = true;
2864 return t;
2865
2866 case BUILT_IN_NONLOCAL_GOTO:
2867 /* Similarly. */
2868 inline_forbidden_reason
2869 = G_("function %q+F can never be inlined because "
2870 "it uses non-local goto");
2871 *handled_ops_p = true;
2872 return t;
2873
2874 case BUILT_IN_RETURN:
2875 case BUILT_IN_APPLY_ARGS:
2876 /* If a __builtin_apply_args caller would be inlined,
2877 it would be saving arguments of the function it has
2878 been inlined into. Similarly __builtin_return would
2879 return from the function the inline has been inlined into. */
2880 inline_forbidden_reason
2881 = G_("function %q+F can never be inlined because "
2882 "it uses __builtin_return or __builtin_apply_args");
2883 *handled_ops_p = true;
2884 return t;
2885
2886 default:
2887 break;
2888 }
2889 break;
2890
2891 case GIMPLE_GOTO:
2892 t = gimple_goto_dest (stmt);
2893
2894 /* We will not inline a function which uses computed goto. The
2895 addresses of its local labels, which may be tucked into
2896 global storage, are of course not constant across
2897 instantiations, which causes unexpected behavior. */
2898 if (TREE_CODE (t) != LABEL_DECL)
2899 {
2900 inline_forbidden_reason
2901 = G_("function %q+F can never be inlined "
2902 "because it contains a computed goto");
2903 *handled_ops_p = true;
2904 return t;
2905 }
2906 break;
2907
2908 default:
2909 break;
2910 }
2911
2912 *handled_ops_p = false;
2913 return NULL_TREE;
2914 }
2915
2916 /* Return true if FNDECL is a function that cannot be inlined into
2917 another one. */
2918
2919 static bool
2920 inline_forbidden_p (tree fndecl)
2921 {
2922 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2923 struct walk_stmt_info wi;
2924 struct pointer_set_t *visited_nodes;
2925 basic_block bb;
2926 bool forbidden_p = false;
2927
2928 /* First check for shared reasons not to copy the code. */
2929 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2930 if (inline_forbidden_reason != NULL)
2931 return true;
2932
2933 /* Next, walk the statements of the function looking for
2934 constraucts we can't handle, or are non-optimal for inlining. */
2935 visited_nodes = pointer_set_create ();
2936 memset (&wi, 0, sizeof (wi));
2937 wi.info = (void *) fndecl;
2938 wi.pset = visited_nodes;
2939
2940 FOR_EACH_BB_FN (bb, fun)
2941 {
2942 gimple ret;
2943 gimple_seq seq = bb_seq (bb);
2944 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2945 forbidden_p = (ret != NULL);
2946 if (forbidden_p)
2947 break;
2948 }
2949
2950 pointer_set_destroy (visited_nodes);
2951 return forbidden_p;
2952 }
2953
2954 /* Returns nonzero if FN is a function that does not have any
2955 fundamental inline blocking properties. */
2956
2957 bool
2958 tree_inlinable_function_p (tree fn)
2959 {
2960 bool inlinable = true;
2961 bool do_warning;
2962 tree always_inline;
2963
2964 /* If we've already decided this function shouldn't be inlined,
2965 there's no need to check again. */
2966 if (DECL_UNINLINABLE (fn))
2967 return false;
2968
2969 /* We only warn for functions declared `inline' by the user. */
2970 do_warning = (warn_inline
2971 && DECL_DECLARED_INLINE_P (fn)
2972 && !DECL_NO_INLINE_WARNING_P (fn)
2973 && !DECL_IN_SYSTEM_HEADER (fn));
2974
2975 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2976
2977 if (flag_no_inline
2978 && always_inline == NULL)
2979 {
2980 if (do_warning)
2981 warning (OPT_Winline, "function %q+F can never be inlined because it "
2982 "is suppressed using -fno-inline", fn);
2983 inlinable = false;
2984 }
2985
2986 /* Don't auto-inline anything that might not be bound within
2987 this unit of translation. */
2988 else if (!DECL_DECLARED_INLINE_P (fn)
2989 && DECL_REPLACEABLE_P (fn))
2990 inlinable = false;
2991
2992 else if (!function_attribute_inlinable_p (fn))
2993 {
2994 if (do_warning)
2995 warning (OPT_Winline, "function %q+F can never be inlined because it "
2996 "uses attributes conflicting with inlining", fn);
2997 inlinable = false;
2998 }
2999
3000 else if (inline_forbidden_p (fn))
3001 {
3002 /* See if we should warn about uninlinable functions. Previously,
3003 some of these warnings would be issued while trying to expand
3004 the function inline, but that would cause multiple warnings
3005 about functions that would for example call alloca. But since
3006 this a property of the function, just one warning is enough.
3007 As a bonus we can now give more details about the reason why a
3008 function is not inlinable. */
3009 if (always_inline)
3010 sorry (inline_forbidden_reason, fn);
3011 else if (do_warning)
3012 warning (OPT_Winline, inline_forbidden_reason, fn);
3013
3014 inlinable = false;
3015 }
3016
3017 /* Squirrel away the result so that we don't have to check again. */
3018 DECL_UNINLINABLE (fn) = !inlinable;
3019
3020 return inlinable;
3021 }
3022
3023 /* Estimate the cost of a memory move. Use machine dependent
3024 word size and take possible memcpy call into account. */
3025
3026 int
3027 estimate_move_cost (tree type)
3028 {
3029 HOST_WIDE_INT size;
3030
3031 gcc_assert (!VOID_TYPE_P (type));
3032
3033 size = int_size_in_bytes (type);
3034
3035 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3036 /* Cost of a memcpy call, 3 arguments and the call. */
3037 return 4;
3038 else
3039 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3040 }
3041
3042 /* Returns cost of operation CODE, according to WEIGHTS */
3043
3044 static int
3045 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3046 tree op1 ATTRIBUTE_UNUSED, tree op2)
3047 {
3048 switch (code)
3049 {
3050 /* These are "free" conversions, or their presumed cost
3051 is folded into other operations. */
3052 case RANGE_EXPR:
3053 CASE_CONVERT:
3054 case COMPLEX_EXPR:
3055 case PAREN_EXPR:
3056 return 0;
3057
3058 /* Assign cost of 1 to usual operations.
3059 ??? We may consider mapping RTL costs to this. */
3060 case COND_EXPR:
3061 case VEC_COND_EXPR:
3062
3063 case PLUS_EXPR:
3064 case POINTER_PLUS_EXPR:
3065 case MINUS_EXPR:
3066 case MULT_EXPR:
3067
3068 case ADDR_SPACE_CONVERT_EXPR:
3069 case FIXED_CONVERT_EXPR:
3070 case FIX_TRUNC_EXPR:
3071
3072 case NEGATE_EXPR:
3073 case FLOAT_EXPR:
3074 case MIN_EXPR:
3075 case MAX_EXPR:
3076 case ABS_EXPR:
3077
3078 case LSHIFT_EXPR:
3079 case RSHIFT_EXPR:
3080 case LROTATE_EXPR:
3081 case RROTATE_EXPR:
3082 case VEC_LSHIFT_EXPR:
3083 case VEC_RSHIFT_EXPR:
3084
3085 case BIT_IOR_EXPR:
3086 case BIT_XOR_EXPR:
3087 case BIT_AND_EXPR:
3088 case BIT_NOT_EXPR:
3089
3090 case TRUTH_ANDIF_EXPR:
3091 case TRUTH_ORIF_EXPR:
3092 case TRUTH_AND_EXPR:
3093 case TRUTH_OR_EXPR:
3094 case TRUTH_XOR_EXPR:
3095 case TRUTH_NOT_EXPR:
3096
3097 case LT_EXPR:
3098 case LE_EXPR:
3099 case GT_EXPR:
3100 case GE_EXPR:
3101 case EQ_EXPR:
3102 case NE_EXPR:
3103 case ORDERED_EXPR:
3104 case UNORDERED_EXPR:
3105
3106 case UNLT_EXPR:
3107 case UNLE_EXPR:
3108 case UNGT_EXPR:
3109 case UNGE_EXPR:
3110 case UNEQ_EXPR:
3111 case LTGT_EXPR:
3112
3113 case CONJ_EXPR:
3114
3115 case PREDECREMENT_EXPR:
3116 case PREINCREMENT_EXPR:
3117 case POSTDECREMENT_EXPR:
3118 case POSTINCREMENT_EXPR:
3119
3120 case REALIGN_LOAD_EXPR:
3121
3122 case REDUC_MAX_EXPR:
3123 case REDUC_MIN_EXPR:
3124 case REDUC_PLUS_EXPR:
3125 case WIDEN_SUM_EXPR:
3126 case WIDEN_MULT_EXPR:
3127 case DOT_PROD_EXPR:
3128
3129 case VEC_WIDEN_MULT_HI_EXPR:
3130 case VEC_WIDEN_MULT_LO_EXPR:
3131 case VEC_UNPACK_HI_EXPR:
3132 case VEC_UNPACK_LO_EXPR:
3133 case VEC_UNPACK_FLOAT_HI_EXPR:
3134 case VEC_UNPACK_FLOAT_LO_EXPR:
3135 case VEC_PACK_TRUNC_EXPR:
3136 case VEC_PACK_SAT_EXPR:
3137 case VEC_PACK_FIX_TRUNC_EXPR:
3138 case VEC_EXTRACT_EVEN_EXPR:
3139 case VEC_EXTRACT_ODD_EXPR:
3140 case VEC_INTERLEAVE_HIGH_EXPR:
3141 case VEC_INTERLEAVE_LOW_EXPR:
3142
3143 return 1;
3144
3145 /* Few special cases of expensive operations. This is useful
3146 to avoid inlining on functions having too many of these. */
3147 case TRUNC_DIV_EXPR:
3148 case CEIL_DIV_EXPR:
3149 case FLOOR_DIV_EXPR:
3150 case ROUND_DIV_EXPR:
3151 case EXACT_DIV_EXPR:
3152 case TRUNC_MOD_EXPR:
3153 case CEIL_MOD_EXPR:
3154 case FLOOR_MOD_EXPR:
3155 case ROUND_MOD_EXPR:
3156 case RDIV_EXPR:
3157 if (TREE_CODE (op2) != INTEGER_CST)
3158 return weights->div_mod_cost;
3159 return 1;
3160
3161 default:
3162 /* We expect a copy assignment with no operator. */
3163 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3164 return 0;
3165 }
3166 }
3167
3168
3169 /* Estimate number of instructions that will be created by expanding
3170 the statements in the statement sequence STMTS.
3171 WEIGHTS contains weights attributed to various constructs. */
3172
3173 static
3174 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3175 {
3176 int cost;
3177 gimple_stmt_iterator gsi;
3178
3179 cost = 0;
3180 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3181 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3182
3183 return cost;
3184 }
3185
3186
3187 /* Estimate number of instructions that will be created by expanding STMT.
3188 WEIGHTS contains weights attributed to various constructs. */
3189
3190 int
3191 estimate_num_insns (gimple stmt, eni_weights *weights)
3192 {
3193 unsigned cost, i;
3194 enum gimple_code code = gimple_code (stmt);
3195 tree lhs;
3196 tree rhs;
3197
3198 switch (code)
3199 {
3200 case GIMPLE_ASSIGN:
3201 /* Try to estimate the cost of assignments. We have three cases to
3202 deal with:
3203 1) Simple assignments to registers;
3204 2) Stores to things that must live in memory. This includes
3205 "normal" stores to scalars, but also assignments of large
3206 structures, or constructors of big arrays;
3207
3208 Let us look at the first two cases, assuming we have "a = b + C":
3209 <GIMPLE_ASSIGN <var_decl "a">
3210 <plus_expr <var_decl "b"> <constant C>>
3211 If "a" is a GIMPLE register, the assignment to it is free on almost
3212 any target, because "a" usually ends up in a real register. Hence
3213 the only cost of this expression comes from the PLUS_EXPR, and we
3214 can ignore the GIMPLE_ASSIGN.
3215 If "a" is not a GIMPLE register, the assignment to "a" will most
3216 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3217 of moving something into "a", which we compute using the function
3218 estimate_move_cost. */
3219 lhs = gimple_assign_lhs (stmt);
3220 rhs = gimple_assign_rhs1 (stmt);
3221
3222 if (is_gimple_reg (lhs))
3223 cost = 0;
3224 else
3225 cost = estimate_move_cost (TREE_TYPE (lhs));
3226
3227 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3228 cost += estimate_move_cost (TREE_TYPE (rhs));
3229
3230 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3231 gimple_assign_rhs1 (stmt),
3232 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3233 == GIMPLE_BINARY_RHS
3234 ? gimple_assign_rhs2 (stmt) : NULL);
3235 break;
3236
3237 case GIMPLE_COND:
3238 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3239 gimple_op (stmt, 0),
3240 gimple_op (stmt, 1));
3241 break;
3242
3243 case GIMPLE_SWITCH:
3244 /* Take into account cost of the switch + guess 2 conditional jumps for
3245 each case label.
3246
3247 TODO: once the switch expansion logic is sufficiently separated, we can
3248 do better job on estimating cost of the switch. */
3249 if (weights->time_based)
3250 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3251 else
3252 cost = gimple_switch_num_labels (stmt) * 2;
3253 break;
3254
3255 case GIMPLE_CALL:
3256 {
3257 tree decl = gimple_call_fndecl (stmt);
3258 tree addr = gimple_call_fn (stmt);
3259 tree funtype = TREE_TYPE (addr);
3260
3261 if (POINTER_TYPE_P (funtype))
3262 funtype = TREE_TYPE (funtype);
3263
3264 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3265 cost = weights->target_builtin_call_cost;
3266 else
3267 cost = weights->call_cost;
3268
3269 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3270 switch (DECL_FUNCTION_CODE (decl))
3271 {
3272 case BUILT_IN_CONSTANT_P:
3273 return 0;
3274 case BUILT_IN_EXPECT:
3275 return 0;
3276
3277 /* Prefetch instruction is not expensive. */
3278 case BUILT_IN_PREFETCH:
3279 cost = weights->target_builtin_call_cost;
3280 break;
3281
3282 /* Exception state returns or moves registers around. */
3283 case BUILT_IN_EH_FILTER:
3284 case BUILT_IN_EH_POINTER:
3285 case BUILT_IN_EH_COPY_VALUES:
3286 return 0;
3287
3288 default:
3289 break;
3290 }
3291
3292 if (decl)
3293 funtype = TREE_TYPE (decl);
3294
3295 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3296 cost += estimate_move_cost (TREE_TYPE (funtype));
3297 /* Our cost must be kept in sync with
3298 cgraph_estimate_size_after_inlining that does use function
3299 declaration to figure out the arguments. */
3300 if (decl && DECL_ARGUMENTS (decl))
3301 {
3302 tree arg;
3303 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3304 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3305 cost += estimate_move_cost (TREE_TYPE (arg));
3306 }
3307 else if (funtype && prototype_p (funtype))
3308 {
3309 tree t;
3310 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3311 t = TREE_CHAIN (t))
3312 if (!VOID_TYPE_P (TREE_VALUE (t)))
3313 cost += estimate_move_cost (TREE_VALUE (t));
3314 }
3315 else
3316 {
3317 for (i = 0; i < gimple_call_num_args (stmt); i++)
3318 {
3319 tree arg = gimple_call_arg (stmt, i);
3320 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3321 cost += estimate_move_cost (TREE_TYPE (arg));
3322 }
3323 }
3324
3325 break;
3326 }
3327
3328 case GIMPLE_GOTO:
3329 case GIMPLE_LABEL:
3330 case GIMPLE_NOP:
3331 case GIMPLE_PHI:
3332 case GIMPLE_RETURN:
3333 case GIMPLE_PREDICT:
3334 case GIMPLE_DEBUG:
3335 return 0;
3336
3337 case GIMPLE_ASM:
3338 return asm_str_count (gimple_asm_string (stmt));
3339
3340 case GIMPLE_RESX:
3341 /* This is either going to be an external function call with one
3342 argument, or two register copy statements plus a goto. */
3343 return 2;
3344
3345 case GIMPLE_EH_DISPATCH:
3346 /* ??? This is going to turn into a switch statement. Ideally
3347 we'd have a look at the eh region and estimate the number of
3348 edges involved. */
3349 return 10;
3350
3351 case GIMPLE_BIND:
3352 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3353
3354 case GIMPLE_EH_FILTER:
3355 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3356
3357 case GIMPLE_CATCH:
3358 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3359
3360 case GIMPLE_TRY:
3361 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3362 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3363
3364 /* OpenMP directives are generally very expensive. */
3365
3366 case GIMPLE_OMP_RETURN:
3367 case GIMPLE_OMP_SECTIONS_SWITCH:
3368 case GIMPLE_OMP_ATOMIC_STORE:
3369 case GIMPLE_OMP_CONTINUE:
3370 /* ...except these, which are cheap. */
3371 return 0;
3372
3373 case GIMPLE_OMP_ATOMIC_LOAD:
3374 return weights->omp_cost;
3375
3376 case GIMPLE_OMP_FOR:
3377 return (weights->omp_cost
3378 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3379 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3380
3381 case GIMPLE_OMP_PARALLEL:
3382 case GIMPLE_OMP_TASK:
3383 case GIMPLE_OMP_CRITICAL:
3384 case GIMPLE_OMP_MASTER:
3385 case GIMPLE_OMP_ORDERED:
3386 case GIMPLE_OMP_SECTION:
3387 case GIMPLE_OMP_SECTIONS:
3388 case GIMPLE_OMP_SINGLE:
3389 return (weights->omp_cost
3390 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3391
3392 default:
3393 gcc_unreachable ();
3394 }
3395
3396 return cost;
3397 }
3398
3399 /* Estimate number of instructions that will be created by expanding
3400 function FNDECL. WEIGHTS contains weights attributed to various
3401 constructs. */
3402
3403 int
3404 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3405 {
3406 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3407 gimple_stmt_iterator bsi;
3408 basic_block bb;
3409 int n = 0;
3410
3411 gcc_assert (my_function && my_function->cfg);
3412 FOR_EACH_BB_FN (bb, my_function)
3413 {
3414 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3415 n += estimate_num_insns (gsi_stmt (bsi), weights);
3416 }
3417
3418 return n;
3419 }
3420
3421
3422 /* Initializes weights used by estimate_num_insns. */
3423
3424 void
3425 init_inline_once (void)
3426 {
3427 eni_size_weights.call_cost = 1;
3428 eni_size_weights.target_builtin_call_cost = 1;
3429 eni_size_weights.div_mod_cost = 1;
3430 eni_size_weights.omp_cost = 40;
3431 eni_size_weights.time_based = false;
3432
3433 /* Estimating time for call is difficult, since we have no idea what the
3434 called function does. In the current uses of eni_time_weights,
3435 underestimating the cost does less harm than overestimating it, so
3436 we choose a rather small value here. */
3437 eni_time_weights.call_cost = 10;
3438 eni_time_weights.target_builtin_call_cost = 10;
3439 eni_time_weights.div_mod_cost = 10;
3440 eni_time_weights.omp_cost = 40;
3441 eni_time_weights.time_based = true;
3442 }
3443
3444 /* Estimate the number of instructions in a gimple_seq. */
3445
3446 int
3447 count_insns_seq (gimple_seq seq, eni_weights *weights)
3448 {
3449 gimple_stmt_iterator gsi;
3450 int n = 0;
3451 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3452 n += estimate_num_insns (gsi_stmt (gsi), weights);
3453
3454 return n;
3455 }
3456
3457
3458 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3459
3460 static void
3461 prepend_lexical_block (tree current_block, tree new_block)
3462 {
3463 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3464 BLOCK_SUBBLOCKS (current_block) = new_block;
3465 BLOCK_SUPERCONTEXT (new_block) = current_block;
3466 }
3467
3468 /* Fetch callee declaration from the call graph edge going from NODE and
3469 associated with STMR call statement. Return NULL_TREE if not found. */
3470 static tree
3471 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3472 {
3473 struct cgraph_edge *cs;
3474
3475 cs = cgraph_edge (node, stmt);
3476 if (cs)
3477 return cs->callee->decl;
3478
3479 return NULL_TREE;
3480 }
3481
3482 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3483
3484 static bool
3485 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3486 {
3487 tree use_retvar;
3488 tree fn;
3489 struct pointer_map_t *st, *dst;
3490 tree return_slot;
3491 tree modify_dest;
3492 location_t saved_location;
3493 struct cgraph_edge *cg_edge;
3494 cgraph_inline_failed_t reason;
3495 basic_block return_block;
3496 edge e;
3497 gimple_stmt_iterator gsi, stmt_gsi;
3498 bool successfully_inlined = FALSE;
3499 bool purge_dead_abnormal_edges;
3500 tree t_step;
3501 tree var;
3502
3503 /* Set input_location here so we get the right instantiation context
3504 if we call instantiate_decl from inlinable_function_p. */
3505 saved_location = input_location;
3506 if (gimple_has_location (stmt))
3507 input_location = gimple_location (stmt);
3508
3509 /* From here on, we're only interested in CALL_EXPRs. */
3510 if (gimple_code (stmt) != GIMPLE_CALL)
3511 goto egress;
3512
3513 /* First, see if we can figure out what function is being called.
3514 If we cannot, then there is no hope of inlining the function. */
3515 fn = gimple_call_fndecl (stmt);
3516 if (!fn)
3517 {
3518 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3519 if (!fn)
3520 goto egress;
3521 }
3522
3523 /* Turn forward declarations into real ones. */
3524 fn = cgraph_node (fn)->decl;
3525
3526 /* If FN is a declaration of a function in a nested scope that was
3527 globally declared inline, we don't set its DECL_INITIAL.
3528 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3529 C++ front-end uses it for cdtors to refer to their internal
3530 declarations, that are not real functions. Fortunately those
3531 don't have trees to be saved, so we can tell by checking their
3532 gimple_body. */
3533 if (!DECL_INITIAL (fn)
3534 && DECL_ABSTRACT_ORIGIN (fn)
3535 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3536 fn = DECL_ABSTRACT_ORIGIN (fn);
3537
3538 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3539 Kill this check once this is fixed. */
3540 if (!id->dst_node->analyzed)
3541 goto egress;
3542
3543 cg_edge = cgraph_edge (id->dst_node, stmt);
3544
3545 /* Don't inline functions with different EH personalities. */
3546 if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3547 && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3548 && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3549 != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3550 goto egress;
3551
3552 /* Don't try to inline functions that are not well-suited to
3553 inlining. */
3554 if (!cgraph_inline_p (cg_edge, &reason))
3555 {
3556 /* If this call was originally indirect, we do not want to emit any
3557 inlining related warnings or sorry messages because there are no
3558 guarantees regarding those. */
3559 if (cg_edge->indirect_call)
3560 goto egress;
3561
3562 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3563 /* Avoid warnings during early inline pass. */
3564 && cgraph_global_info_ready)
3565 {
3566 sorry ("inlining failed in call to %q+F: %s", fn,
3567 cgraph_inline_failed_string (reason));
3568 sorry ("called from here");
3569 }
3570 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3571 && !DECL_IN_SYSTEM_HEADER (fn)
3572 && reason != CIF_UNSPECIFIED
3573 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3574 /* Avoid warnings during early inline pass. */
3575 && cgraph_global_info_ready)
3576 {
3577 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3578 fn, cgraph_inline_failed_string (reason));
3579 warning (OPT_Winline, "called from here");
3580 }
3581 goto egress;
3582 }
3583 fn = cg_edge->callee->decl;
3584
3585 #ifdef ENABLE_CHECKING
3586 if (cg_edge->callee->decl != id->dst_node->decl)
3587 verify_cgraph_node (cg_edge->callee);
3588 #endif
3589
3590 /* We will be inlining this callee. */
3591 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3592
3593 /* Update the callers EH personality. */
3594 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3595 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3596 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3597
3598 /* Split the block holding the GIMPLE_CALL. */
3599 e = split_block (bb, stmt);
3600 bb = e->src;
3601 return_block = e->dest;
3602 remove_edge (e);
3603
3604 /* split_block splits after the statement; work around this by
3605 moving the call into the second block manually. Not pretty,
3606 but seems easier than doing the CFG manipulation by hand
3607 when the GIMPLE_CALL is in the last statement of BB. */
3608 stmt_gsi = gsi_last_bb (bb);
3609 gsi_remove (&stmt_gsi, false);
3610
3611 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3612 been the source of abnormal edges. In this case, schedule
3613 the removal of dead abnormal edges. */
3614 gsi = gsi_start_bb (return_block);
3615 if (gsi_end_p (gsi))
3616 {
3617 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3618 purge_dead_abnormal_edges = true;
3619 }
3620 else
3621 {
3622 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3623 purge_dead_abnormal_edges = false;
3624 }
3625
3626 stmt_gsi = gsi_start_bb (return_block);
3627
3628 /* Build a block containing code to initialize the arguments, the
3629 actual inline expansion of the body, and a label for the return
3630 statements within the function to jump to. The type of the
3631 statement expression is the return type of the function call. */
3632 id->block = make_node (BLOCK);
3633 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3634 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3635 prepend_lexical_block (gimple_block (stmt), id->block);
3636
3637 /* Local declarations will be replaced by their equivalents in this
3638 map. */
3639 st = id->decl_map;
3640 id->decl_map = pointer_map_create ();
3641 dst = id->debug_map;
3642 id->debug_map = NULL;
3643
3644 /* Record the function we are about to inline. */
3645 id->src_fn = fn;
3646 id->src_node = cg_edge->callee;
3647 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3648 id->gimple_call = stmt;
3649
3650 gcc_assert (!id->src_cfun->after_inlining);
3651
3652 id->entry_bb = bb;
3653 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3654 {
3655 gimple_stmt_iterator si = gsi_last_bb (bb);
3656 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3657 NOT_TAKEN),
3658 GSI_NEW_STMT);
3659 }
3660 initialize_inlined_parameters (id, stmt, fn, bb);
3661
3662 if (DECL_INITIAL (fn))
3663 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3664
3665 /* Return statements in the function body will be replaced by jumps
3666 to the RET_LABEL. */
3667 gcc_assert (DECL_INITIAL (fn));
3668 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3669
3670 /* Find the LHS to which the result of this call is assigned. */
3671 return_slot = NULL;
3672 if (gimple_call_lhs (stmt))
3673 {
3674 modify_dest = gimple_call_lhs (stmt);
3675
3676 /* The function which we are inlining might not return a value,
3677 in which case we should issue a warning that the function
3678 does not return a value. In that case the optimizers will
3679 see that the variable to which the value is assigned was not
3680 initialized. We do not want to issue a warning about that
3681 uninitialized variable. */
3682 if (DECL_P (modify_dest))
3683 TREE_NO_WARNING (modify_dest) = 1;
3684
3685 if (gimple_call_return_slot_opt_p (stmt))
3686 {
3687 return_slot = modify_dest;
3688 modify_dest = NULL;
3689 }
3690 }
3691 else
3692 modify_dest = NULL;
3693
3694 /* If we are inlining a call to the C++ operator new, we don't want
3695 to use type based alias analysis on the return value. Otherwise
3696 we may get confused if the compiler sees that the inlined new
3697 function returns a pointer which was just deleted. See bug
3698 33407. */
3699 if (DECL_IS_OPERATOR_NEW (fn))
3700 {
3701 return_slot = NULL;
3702 modify_dest = NULL;
3703 }
3704
3705 /* Declare the return variable for the function. */
3706 use_retvar = declare_return_variable (id, return_slot, modify_dest);
3707
3708 /* Add local vars in this inlined callee to caller. */
3709 t_step = id->src_cfun->local_decls;
3710 for (; t_step; t_step = TREE_CHAIN (t_step))
3711 {
3712 var = TREE_VALUE (t_step);
3713 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3714 {
3715 if (var_ann (var) && add_referenced_var (var))
3716 cfun->local_decls = tree_cons (NULL_TREE, var,
3717 cfun->local_decls);
3718 }
3719 else if (!can_be_nonlocal (var, id))
3720 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3721 cfun->local_decls);
3722 }
3723
3724 if (dump_file && (dump_flags & TDF_DETAILS))
3725 {
3726 fprintf (dump_file, "Inlining ");
3727 print_generic_expr (dump_file, id->src_fn, 0);
3728 fprintf (dump_file, " to ");
3729 print_generic_expr (dump_file, id->dst_fn, 0);
3730 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3731 }
3732
3733 /* This is it. Duplicate the callee body. Assume callee is
3734 pre-gimplified. Note that we must not alter the caller
3735 function in any way before this point, as this CALL_EXPR may be
3736 a self-referential call; if we're calling ourselves, we need to
3737 duplicate our body before altering anything. */
3738 copy_body (id, bb->count,
3739 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3740 bb, return_block);
3741
3742 /* Reset the escaped solution. */
3743 if (cfun->gimple_df)
3744 pt_solution_reset (&cfun->gimple_df->escaped);
3745
3746 /* Clean up. */
3747 if (id->debug_map)
3748 {
3749 pointer_map_destroy (id->debug_map);
3750 id->debug_map = dst;
3751 }
3752 pointer_map_destroy (id->decl_map);
3753 id->decl_map = st;
3754
3755 /* Unlink the calls virtual operands before replacing it. */
3756 unlink_stmt_vdef (stmt);
3757
3758 /* If the inlined function returns a result that we care about,
3759 substitute the GIMPLE_CALL with an assignment of the return
3760 variable to the LHS of the call. That is, if STMT was
3761 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3762 if (use_retvar && gimple_call_lhs (stmt))
3763 {
3764 gimple old_stmt = stmt;
3765 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3766 gsi_replace (&stmt_gsi, stmt, false);
3767 if (gimple_in_ssa_p (cfun))
3768 mark_symbols_for_renaming (stmt);
3769 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3770 }
3771 else
3772 {
3773 /* Handle the case of inlining a function with no return
3774 statement, which causes the return value to become undefined. */
3775 if (gimple_call_lhs (stmt)
3776 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3777 {
3778 tree name = gimple_call_lhs (stmt);
3779 tree var = SSA_NAME_VAR (name);
3780 tree def = gimple_default_def (cfun, var);
3781
3782 if (def)
3783 {
3784 /* If the variable is used undefined, make this name
3785 undefined via a move. */
3786 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3787 gsi_replace (&stmt_gsi, stmt, true);
3788 }
3789 else
3790 {
3791 /* Otherwise make this variable undefined. */
3792 gsi_remove (&stmt_gsi, true);
3793 set_default_def (var, name);
3794 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3795 }
3796 }
3797 else
3798 gsi_remove (&stmt_gsi, true);
3799 }
3800
3801 if (purge_dead_abnormal_edges)
3802 gimple_purge_dead_abnormal_call_edges (return_block);
3803
3804 /* If the value of the new expression is ignored, that's OK. We
3805 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3806 the equivalent inlined version either. */
3807 if (is_gimple_assign (stmt))
3808 {
3809 gcc_assert (gimple_assign_single_p (stmt)
3810 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3811 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3812 }
3813
3814 /* Output the inlining info for this abstract function, since it has been
3815 inlined. If we don't do this now, we can lose the information about the
3816 variables in the function when the blocks get blown away as soon as we
3817 remove the cgraph node. */
3818 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3819
3820 /* Update callgraph if needed. */
3821 cgraph_remove_node (cg_edge->callee);
3822
3823 id->block = NULL_TREE;
3824 successfully_inlined = TRUE;
3825
3826 egress:
3827 input_location = saved_location;
3828 return successfully_inlined;
3829 }
3830
3831 /* Expand call statements reachable from STMT_P.
3832 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3833 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3834 unfortunately not use that function here because we need a pointer
3835 to the CALL_EXPR, not the tree itself. */
3836
3837 static bool
3838 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3839 {
3840 gimple_stmt_iterator gsi;
3841
3842 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3843 {
3844 gimple stmt = gsi_stmt (gsi);
3845
3846 if (is_gimple_call (stmt)
3847 && expand_call_inline (bb, stmt, id))
3848 return true;
3849 }
3850
3851 return false;
3852 }
3853
3854
3855 /* Walk all basic blocks created after FIRST and try to fold every statement
3856 in the STATEMENTS pointer set. */
3857
3858 static void
3859 fold_marked_statements (int first, struct pointer_set_t *statements)
3860 {
3861 for (; first < n_basic_blocks; first++)
3862 if (BASIC_BLOCK (first))
3863 {
3864 gimple_stmt_iterator gsi;
3865
3866 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3867 !gsi_end_p (gsi);
3868 gsi_next (&gsi))
3869 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3870 {
3871 gimple old_stmt = gsi_stmt (gsi);
3872 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3873
3874 if (old_decl && DECL_BUILT_IN (old_decl))
3875 {
3876 /* Folding builtins can create multiple instructions,
3877 we need to look at all of them. */
3878 gimple_stmt_iterator i2 = gsi;
3879 gsi_prev (&i2);
3880 if (fold_stmt (&gsi))
3881 {
3882 gimple new_stmt;
3883 if (gsi_end_p (i2))
3884 i2 = gsi_start_bb (BASIC_BLOCK (first));
3885 else
3886 gsi_next (&i2);
3887 while (1)
3888 {
3889 new_stmt = gsi_stmt (i2);
3890 update_stmt (new_stmt);
3891 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3892 new_stmt);
3893
3894 if (new_stmt == gsi_stmt (gsi))
3895 {
3896 /* It is okay to check only for the very last
3897 of these statements. If it is a throwing
3898 statement nothing will change. If it isn't
3899 this can remove EH edges. If that weren't
3900 correct then because some intermediate stmts
3901 throw, but not the last one. That would mean
3902 we'd have to split the block, which we can't
3903 here and we'd loose anyway. And as builtins
3904 probably never throw, this all
3905 is mood anyway. */
3906 if (maybe_clean_or_replace_eh_stmt (old_stmt,
3907 new_stmt))
3908 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3909 break;
3910 }
3911 gsi_next (&i2);
3912 }
3913 }
3914 }
3915 else if (fold_stmt (&gsi))
3916 {
3917 /* Re-read the statement from GSI as fold_stmt() may
3918 have changed it. */
3919 gimple new_stmt = gsi_stmt (gsi);
3920 update_stmt (new_stmt);
3921
3922 if (is_gimple_call (old_stmt)
3923 || is_gimple_call (new_stmt))
3924 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3925 new_stmt);
3926
3927 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3928 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3929 }
3930 }
3931 }
3932 }
3933
3934 /* Return true if BB has at least one abnormal outgoing edge. */
3935
3936 static inline bool
3937 has_abnormal_outgoing_edge_p (basic_block bb)
3938 {
3939 edge e;
3940 edge_iterator ei;
3941
3942 FOR_EACH_EDGE (e, ei, bb->succs)
3943 if (e->flags & EDGE_ABNORMAL)
3944 return true;
3945
3946 return false;
3947 }
3948
3949 /* Expand calls to inline functions in the body of FN. */
3950
3951 unsigned int
3952 optimize_inline_calls (tree fn)
3953 {
3954 copy_body_data id;
3955 basic_block bb;
3956 int last = n_basic_blocks;
3957 struct gimplify_ctx gctx;
3958
3959 /* There is no point in performing inlining if errors have already
3960 occurred -- and we might crash if we try to inline invalid
3961 code. */
3962 if (errorcount || sorrycount)
3963 return 0;
3964
3965 /* Clear out ID. */
3966 memset (&id, 0, sizeof (id));
3967
3968 id.src_node = id.dst_node = cgraph_node (fn);
3969 id.dst_fn = fn;
3970 /* Or any functions that aren't finished yet. */
3971 if (current_function_decl)
3972 id.dst_fn = current_function_decl;
3973
3974 id.copy_decl = copy_decl_maybe_to_var;
3975 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3976 id.transform_new_cfg = false;
3977 id.transform_return_to_modify = true;
3978 id.transform_lang_insert_block = NULL;
3979 id.statements_to_fold = pointer_set_create ();
3980
3981 push_gimplify_context (&gctx);
3982
3983 /* We make no attempts to keep dominance info up-to-date. */
3984 free_dominance_info (CDI_DOMINATORS);
3985 free_dominance_info (CDI_POST_DOMINATORS);
3986
3987 /* Register specific gimple functions. */
3988 gimple_register_cfg_hooks ();
3989
3990 /* Reach the trees by walking over the CFG, and note the
3991 enclosing basic-blocks in the call edges. */
3992 /* We walk the blocks going forward, because inlined function bodies
3993 will split id->current_basic_block, and the new blocks will
3994 follow it; we'll trudge through them, processing their CALL_EXPRs
3995 along the way. */
3996 FOR_EACH_BB (bb)
3997 gimple_expand_calls_inline (bb, &id);
3998
3999 pop_gimplify_context (NULL);
4000
4001 #ifdef ENABLE_CHECKING
4002 {
4003 struct cgraph_edge *e;
4004
4005 verify_cgraph_node (id.dst_node);
4006
4007 /* Double check that we inlined everything we are supposed to inline. */
4008 for (e = id.dst_node->callees; e; e = e->next_callee)
4009 gcc_assert (e->inline_failed);
4010 }
4011 #endif
4012
4013 /* Fold the statements before compacting/renumbering the basic blocks. */
4014 fold_marked_statements (last, id.statements_to_fold);
4015 pointer_set_destroy (id.statements_to_fold);
4016
4017 gcc_assert (!id.debug_stmts);
4018
4019 /* Renumber the (code) basic_blocks consecutively. */
4020 compact_blocks ();
4021 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4022 number_blocks (fn);
4023
4024 fold_cond_expr_cond ();
4025 delete_unreachable_blocks_update_callgraph (&id);
4026 #ifdef ENABLE_CHECKING
4027 verify_cgraph_node (id.dst_node);
4028 #endif
4029
4030 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4031 not possible yet - the IPA passes might make various functions to not
4032 throw and they don't care to proactively update local EH info. This is
4033 done later in fixup_cfg pass that also execute the verification. */
4034 return (TODO_update_ssa
4035 | TODO_cleanup_cfg
4036 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4037 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4038 }
4039
4040 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4041
4042 tree
4043 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4044 {
4045 enum tree_code code = TREE_CODE (*tp);
4046 enum tree_code_class cl = TREE_CODE_CLASS (code);
4047
4048 /* We make copies of most nodes. */
4049 if (IS_EXPR_CODE_CLASS (cl)
4050 || code == TREE_LIST
4051 || code == TREE_VEC
4052 || code == TYPE_DECL
4053 || code == OMP_CLAUSE)
4054 {
4055 /* Because the chain gets clobbered when we make a copy, we save it
4056 here. */
4057 tree chain = NULL_TREE, new_tree;
4058
4059 chain = TREE_CHAIN (*tp);
4060
4061 /* Copy the node. */
4062 new_tree = copy_node (*tp);
4063
4064 /* Propagate mudflap marked-ness. */
4065 if (flag_mudflap && mf_marked_p (*tp))
4066 mf_mark (new_tree);
4067
4068 *tp = new_tree;
4069
4070 /* Now, restore the chain, if appropriate. That will cause
4071 walk_tree to walk into the chain as well. */
4072 if (code == PARM_DECL
4073 || code == TREE_LIST
4074 || code == OMP_CLAUSE)
4075 TREE_CHAIN (*tp) = chain;
4076
4077 /* For now, we don't update BLOCKs when we make copies. So, we
4078 have to nullify all BIND_EXPRs. */
4079 if (TREE_CODE (*tp) == BIND_EXPR)
4080 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4081 }
4082 else if (code == CONSTRUCTOR)
4083 {
4084 /* CONSTRUCTOR nodes need special handling because
4085 we need to duplicate the vector of elements. */
4086 tree new_tree;
4087
4088 new_tree = copy_node (*tp);
4089
4090 /* Propagate mudflap marked-ness. */
4091 if (flag_mudflap && mf_marked_p (*tp))
4092 mf_mark (new_tree);
4093
4094 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4095 CONSTRUCTOR_ELTS (*tp));
4096 *tp = new_tree;
4097 }
4098 else if (TREE_CODE_CLASS (code) == tcc_type)
4099 *walk_subtrees = 0;
4100 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4101 *walk_subtrees = 0;
4102 else if (TREE_CODE_CLASS (code) == tcc_constant)
4103 *walk_subtrees = 0;
4104 else
4105 gcc_assert (code != STATEMENT_LIST);
4106 return NULL_TREE;
4107 }
4108
4109 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4110 information indicating to what new SAVE_EXPR this one should be mapped,
4111 use that one. Otherwise, create a new node and enter it in ST. FN is
4112 the function into which the copy will be placed. */
4113
4114 static void
4115 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4116 {
4117 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4118 tree *n;
4119 tree t;
4120
4121 /* See if we already encountered this SAVE_EXPR. */
4122 n = (tree *) pointer_map_contains (st, *tp);
4123
4124 /* If we didn't already remap this SAVE_EXPR, do so now. */
4125 if (!n)
4126 {
4127 t = copy_node (*tp);
4128
4129 /* Remember this SAVE_EXPR. */
4130 *pointer_map_insert (st, *tp) = t;
4131 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4132 *pointer_map_insert (st, t) = t;
4133 }
4134 else
4135 {
4136 /* We've already walked into this SAVE_EXPR; don't do it again. */
4137 *walk_subtrees = 0;
4138 t = *n;
4139 }
4140
4141 /* Replace this SAVE_EXPR with the copy. */
4142 *tp = t;
4143 }
4144
4145 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4146 copies the declaration and enters it in the splay_tree in DATA (which is
4147 really an `copy_body_data *'). */
4148
4149 static tree
4150 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4151 void *data)
4152 {
4153 copy_body_data *id = (copy_body_data *) data;
4154
4155 /* Don't walk into types. */
4156 if (TYPE_P (*tp))
4157 *walk_subtrees = 0;
4158
4159 else if (TREE_CODE (*tp) == LABEL_EXPR)
4160 {
4161 tree decl = TREE_OPERAND (*tp, 0);
4162
4163 /* Copy the decl and remember the copy. */
4164 insert_decl_map (id, decl, id->copy_decl (decl, id));
4165 }
4166
4167 return NULL_TREE;
4168 }
4169
4170 /* Perform any modifications to EXPR required when it is unsaved. Does
4171 not recurse into EXPR's subtrees. */
4172
4173 static void
4174 unsave_expr_1 (tree expr)
4175 {
4176 switch (TREE_CODE (expr))
4177 {
4178 case TARGET_EXPR:
4179 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4180 It's OK for this to happen if it was part of a subtree that
4181 isn't immediately expanded, such as operand 2 of another
4182 TARGET_EXPR. */
4183 if (TREE_OPERAND (expr, 1))
4184 break;
4185
4186 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4187 TREE_OPERAND (expr, 3) = NULL_TREE;
4188 break;
4189
4190 default:
4191 break;
4192 }
4193 }
4194
4195 /* Called via walk_tree when an expression is unsaved. Using the
4196 splay_tree pointed to by ST (which is really a `splay_tree'),
4197 remaps all local declarations to appropriate replacements. */
4198
4199 static tree
4200 unsave_r (tree *tp, int *walk_subtrees, void *data)
4201 {
4202 copy_body_data *id = (copy_body_data *) data;
4203 struct pointer_map_t *st = id->decl_map;
4204 tree *n;
4205
4206 /* Only a local declaration (variable or label). */
4207 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4208 || TREE_CODE (*tp) == LABEL_DECL)
4209 {
4210 /* Lookup the declaration. */
4211 n = (tree *) pointer_map_contains (st, *tp);
4212
4213 /* If it's there, remap it. */
4214 if (n)
4215 *tp = *n;
4216 }
4217
4218 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4219 gcc_unreachable ();
4220 else if (TREE_CODE (*tp) == BIND_EXPR)
4221 copy_bind_expr (tp, walk_subtrees, id);
4222 else if (TREE_CODE (*tp) == SAVE_EXPR
4223 || TREE_CODE (*tp) == TARGET_EXPR)
4224 remap_save_expr (tp, st, walk_subtrees);
4225 else
4226 {
4227 copy_tree_r (tp, walk_subtrees, NULL);
4228
4229 /* Do whatever unsaving is required. */
4230 unsave_expr_1 (*tp);
4231 }
4232
4233 /* Keep iterating. */
4234 return NULL_TREE;
4235 }
4236
4237 /* Copies everything in EXPR and replaces variables, labels
4238 and SAVE_EXPRs local to EXPR. */
4239
4240 tree
4241 unsave_expr_now (tree expr)
4242 {
4243 copy_body_data id;
4244
4245 /* There's nothing to do for NULL_TREE. */
4246 if (expr == 0)
4247 return expr;
4248
4249 /* Set up ID. */
4250 memset (&id, 0, sizeof (id));
4251 id.src_fn = current_function_decl;
4252 id.dst_fn = current_function_decl;
4253 id.decl_map = pointer_map_create ();
4254 id.debug_map = NULL;
4255
4256 id.copy_decl = copy_decl_no_change;
4257 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4258 id.transform_new_cfg = false;
4259 id.transform_return_to_modify = false;
4260 id.transform_lang_insert_block = NULL;
4261
4262 /* Walk the tree once to find local labels. */
4263 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4264
4265 /* Walk the tree again, copying, remapping, and unsaving. */
4266 walk_tree (&expr, unsave_r, &id, NULL);
4267
4268 /* Clean up. */
4269 pointer_map_destroy (id.decl_map);
4270 if (id.debug_map)
4271 pointer_map_destroy (id.debug_map);
4272
4273 return expr;
4274 }
4275
4276 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4277 label, copies the declaration and enters it in the splay_tree in DATA (which
4278 is really a 'copy_body_data *'. */
4279
4280 static tree
4281 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4282 bool *handled_ops_p ATTRIBUTE_UNUSED,
4283 struct walk_stmt_info *wi)
4284 {
4285 copy_body_data *id = (copy_body_data *) wi->info;
4286 gimple stmt = gsi_stmt (*gsip);
4287
4288 if (gimple_code (stmt) == GIMPLE_LABEL)
4289 {
4290 tree decl = gimple_label_label (stmt);
4291
4292 /* Copy the decl and remember the copy. */
4293 insert_decl_map (id, decl, id->copy_decl (decl, id));
4294 }
4295
4296 return NULL_TREE;
4297 }
4298
4299
4300 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4301 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4302 remaps all local declarations to appropriate replacements in gimple
4303 operands. */
4304
4305 static tree
4306 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4307 {
4308 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4309 copy_body_data *id = (copy_body_data *) wi->info;
4310 struct pointer_map_t *st = id->decl_map;
4311 tree *n;
4312 tree expr = *tp;
4313
4314 /* Only a local declaration (variable or label). */
4315 if ((TREE_CODE (expr) == VAR_DECL
4316 && !TREE_STATIC (expr))
4317 || TREE_CODE (expr) == LABEL_DECL)
4318 {
4319 /* Lookup the declaration. */
4320 n = (tree *) pointer_map_contains (st, expr);
4321
4322 /* If it's there, remap it. */
4323 if (n)
4324 *tp = *n;
4325 *walk_subtrees = 0;
4326 }
4327 else if (TREE_CODE (expr) == STATEMENT_LIST
4328 || TREE_CODE (expr) == BIND_EXPR
4329 || TREE_CODE (expr) == SAVE_EXPR)
4330 gcc_unreachable ();
4331 else if (TREE_CODE (expr) == TARGET_EXPR)
4332 {
4333 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4334 It's OK for this to happen if it was part of a subtree that
4335 isn't immediately expanded, such as operand 2 of another
4336 TARGET_EXPR. */
4337 if (!TREE_OPERAND (expr, 1))
4338 {
4339 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4340 TREE_OPERAND (expr, 3) = NULL_TREE;
4341 }
4342 }
4343
4344 /* Keep iterating. */
4345 return NULL_TREE;
4346 }
4347
4348
4349 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4350 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4351 remaps all local declarations to appropriate replacements in gimple
4352 statements. */
4353
4354 static tree
4355 replace_locals_stmt (gimple_stmt_iterator *gsip,
4356 bool *handled_ops_p ATTRIBUTE_UNUSED,
4357 struct walk_stmt_info *wi)
4358 {
4359 copy_body_data *id = (copy_body_data *) wi->info;
4360 gimple stmt = gsi_stmt (*gsip);
4361
4362 if (gimple_code (stmt) == GIMPLE_BIND)
4363 {
4364 tree block = gimple_bind_block (stmt);
4365
4366 if (block)
4367 {
4368 remap_block (&block, id);
4369 gimple_bind_set_block (stmt, block);
4370 }
4371
4372 /* This will remap a lot of the same decls again, but this should be
4373 harmless. */
4374 if (gimple_bind_vars (stmt))
4375 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4376 }
4377
4378 /* Keep iterating. */
4379 return NULL_TREE;
4380 }
4381
4382
4383 /* Copies everything in SEQ and replaces variables and labels local to
4384 current_function_decl. */
4385
4386 gimple_seq
4387 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4388 {
4389 copy_body_data id;
4390 struct walk_stmt_info wi;
4391 struct pointer_set_t *visited;
4392 gimple_seq copy;
4393
4394 /* There's nothing to do for NULL_TREE. */
4395 if (seq == NULL)
4396 return seq;
4397
4398 /* Set up ID. */
4399 memset (&id, 0, sizeof (id));
4400 id.src_fn = current_function_decl;
4401 id.dst_fn = current_function_decl;
4402 id.decl_map = pointer_map_create ();
4403 id.debug_map = NULL;
4404
4405 id.copy_decl = copy_decl_no_change;
4406 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4407 id.transform_new_cfg = false;
4408 id.transform_return_to_modify = false;
4409 id.transform_lang_insert_block = NULL;
4410
4411 /* Walk the tree once to find local labels. */
4412 memset (&wi, 0, sizeof (wi));
4413 visited = pointer_set_create ();
4414 wi.info = &id;
4415 wi.pset = visited;
4416 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4417 pointer_set_destroy (visited);
4418
4419 copy = gimple_seq_copy (seq);
4420
4421 /* Walk the copy, remapping decls. */
4422 memset (&wi, 0, sizeof (wi));
4423 wi.info = &id;
4424 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4425
4426 /* Clean up. */
4427 pointer_map_destroy (id.decl_map);
4428 if (id.debug_map)
4429 pointer_map_destroy (id.debug_map);
4430
4431 return copy;
4432 }
4433
4434
4435 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4436
4437 static tree
4438 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4439 {
4440 if (*tp == data)
4441 return (tree) data;
4442 else
4443 return NULL;
4444 }
4445
4446 bool
4447 debug_find_tree (tree top, tree search)
4448 {
4449 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4450 }
4451
4452
4453 /* Declare the variables created by the inliner. Add all the variables in
4454 VARS to BIND_EXPR. */
4455
4456 static void
4457 declare_inline_vars (tree block, tree vars)
4458 {
4459 tree t;
4460 for (t = vars; t; t = TREE_CHAIN (t))
4461 {
4462 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4463 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4464 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4465 }
4466
4467 if (block)
4468 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4469 }
4470
4471 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4472 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4473 VAR_DECL translation. */
4474
4475 static tree
4476 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4477 {
4478 /* Don't generate debug information for the copy if we wouldn't have
4479 generated it for the copy either. */
4480 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4481 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4482
4483 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4484 declaration inspired this copy. */
4485 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4486
4487 /* The new variable/label has no RTL, yet. */
4488 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4489 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4490 SET_DECL_RTL (copy, NULL_RTX);
4491
4492 /* These args would always appear unused, if not for this. */
4493 TREE_USED (copy) = 1;
4494
4495 /* Set the context for the new declaration. */
4496 if (!DECL_CONTEXT (decl))
4497 /* Globals stay global. */
4498 ;
4499 else if (DECL_CONTEXT (decl) != id->src_fn)
4500 /* Things that weren't in the scope of the function we're inlining
4501 from aren't in the scope we're inlining to, either. */
4502 ;
4503 else if (TREE_STATIC (decl))
4504 /* Function-scoped static variables should stay in the original
4505 function. */
4506 ;
4507 else
4508 /* Ordinary automatic local variables are now in the scope of the
4509 new function. */
4510 DECL_CONTEXT (copy) = id->dst_fn;
4511
4512 return copy;
4513 }
4514
4515 static tree
4516 copy_decl_to_var (tree decl, copy_body_data *id)
4517 {
4518 tree copy, type;
4519
4520 gcc_assert (TREE_CODE (decl) == PARM_DECL
4521 || TREE_CODE (decl) == RESULT_DECL);
4522
4523 type = TREE_TYPE (decl);
4524
4525 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4526 VAR_DECL, DECL_NAME (decl), type);
4527 if (DECL_PT_UID_SET_P (decl))
4528 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4529 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4530 TREE_READONLY (copy) = TREE_READONLY (decl);
4531 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4532 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4533
4534 return copy_decl_for_dup_finish (id, decl, copy);
4535 }
4536
4537 /* Like copy_decl_to_var, but create a return slot object instead of a
4538 pointer variable for return by invisible reference. */
4539
4540 static tree
4541 copy_result_decl_to_var (tree decl, copy_body_data *id)
4542 {
4543 tree copy, type;
4544
4545 gcc_assert (TREE_CODE (decl) == PARM_DECL
4546 || TREE_CODE (decl) == RESULT_DECL);
4547
4548 type = TREE_TYPE (decl);
4549 if (DECL_BY_REFERENCE (decl))
4550 type = TREE_TYPE (type);
4551
4552 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4553 VAR_DECL, DECL_NAME (decl), type);
4554 if (DECL_PT_UID_SET_P (decl))
4555 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4556 TREE_READONLY (copy) = TREE_READONLY (decl);
4557 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4558 if (!DECL_BY_REFERENCE (decl))
4559 {
4560 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4561 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4562 }
4563
4564 return copy_decl_for_dup_finish (id, decl, copy);
4565 }
4566
4567 tree
4568 copy_decl_no_change (tree decl, copy_body_data *id)
4569 {
4570 tree copy;
4571
4572 copy = copy_node (decl);
4573
4574 /* The COPY is not abstract; it will be generated in DST_FN. */
4575 DECL_ABSTRACT (copy) = 0;
4576 lang_hooks.dup_lang_specific_decl (copy);
4577
4578 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4579 been taken; it's for internal bookkeeping in expand_goto_internal. */
4580 if (TREE_CODE (copy) == LABEL_DECL)
4581 {
4582 TREE_ADDRESSABLE (copy) = 0;
4583 LABEL_DECL_UID (copy) = -1;
4584 }
4585
4586 return copy_decl_for_dup_finish (id, decl, copy);
4587 }
4588
4589 static tree
4590 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4591 {
4592 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4593 return copy_decl_to_var (decl, id);
4594 else
4595 return copy_decl_no_change (decl, id);
4596 }
4597
4598 /* Return a copy of the function's argument tree. */
4599 static tree
4600 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4601 bitmap args_to_skip, tree *vars)
4602 {
4603 tree arg, *parg;
4604 tree new_parm = NULL;
4605 int i = 0;
4606
4607 parg = &new_parm;
4608
4609 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4610 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4611 {
4612 tree new_tree = remap_decl (arg, id);
4613 lang_hooks.dup_lang_specific_decl (new_tree);
4614 *parg = new_tree;
4615 parg = &TREE_CHAIN (new_tree);
4616 }
4617 else if (!pointer_map_contains (id->decl_map, arg))
4618 {
4619 /* Make an equivalent VAR_DECL. If the argument was used
4620 as temporary variable later in function, the uses will be
4621 replaced by local variable. */
4622 tree var = copy_decl_to_var (arg, id);
4623 get_var_ann (var);
4624 add_referenced_var (var);
4625 insert_decl_map (id, arg, var);
4626 /* Declare this new variable. */
4627 TREE_CHAIN (var) = *vars;
4628 *vars = var;
4629 }
4630 return new_parm;
4631 }
4632
4633 /* Return a copy of the function's static chain. */
4634 static tree
4635 copy_static_chain (tree static_chain, copy_body_data * id)
4636 {
4637 tree *chain_copy, *pvar;
4638
4639 chain_copy = &static_chain;
4640 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4641 {
4642 tree new_tree = remap_decl (*pvar, id);
4643 lang_hooks.dup_lang_specific_decl (new_tree);
4644 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4645 *pvar = new_tree;
4646 }
4647 return static_chain;
4648 }
4649
4650 /* Return true if the function is allowed to be versioned.
4651 This is a guard for the versioning functionality. */
4652
4653 bool
4654 tree_versionable_function_p (tree fndecl)
4655 {
4656 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4657 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4658 }
4659
4660 /* Delete all unreachable basic blocks and update callgraph.
4661 Doing so is somewhat nontrivial because we need to update all clones and
4662 remove inline function that become unreachable. */
4663
4664 static bool
4665 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4666 {
4667 bool changed = false;
4668 basic_block b, next_bb;
4669
4670 find_unreachable_blocks ();
4671
4672 /* Delete all unreachable basic blocks. */
4673
4674 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4675 {
4676 next_bb = b->next_bb;
4677
4678 if (!(b->flags & BB_REACHABLE))
4679 {
4680 gimple_stmt_iterator bsi;
4681
4682 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4683 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4684 {
4685 struct cgraph_edge *e;
4686 struct cgraph_node *node;
4687
4688 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4689 {
4690 if (!e->inline_failed)
4691 cgraph_remove_node_and_inline_clones (e->callee);
4692 else
4693 cgraph_remove_edge (e);
4694 }
4695 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4696 && id->dst_node->clones)
4697 for (node = id->dst_node->clones; node != id->dst_node;)
4698 {
4699 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4700 {
4701 if (!e->inline_failed)
4702 cgraph_remove_node_and_inline_clones (e->callee);
4703 else
4704 cgraph_remove_edge (e);
4705 }
4706
4707 if (node->clones)
4708 node = node->clones;
4709 else if (node->next_sibling_clone)
4710 node = node->next_sibling_clone;
4711 else
4712 {
4713 while (node != id->dst_node && !node->next_sibling_clone)
4714 node = node->clone_of;
4715 if (node != id->dst_node)
4716 node = node->next_sibling_clone;
4717 }
4718 }
4719 }
4720 delete_basic_block (b);
4721 changed = true;
4722 }
4723 }
4724
4725 if (changed)
4726 tidy_fallthru_edges ();
4727 return changed;
4728 }
4729
4730 /* Update clone info after duplication. */
4731
4732 static void
4733 update_clone_info (copy_body_data * id)
4734 {
4735 struct cgraph_node *node;
4736 if (!id->dst_node->clones)
4737 return;
4738 for (node = id->dst_node->clones; node != id->dst_node;)
4739 {
4740 /* First update replace maps to match the new body. */
4741 if (node->clone.tree_map)
4742 {
4743 unsigned int i;
4744 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4745 {
4746 struct ipa_replace_map *replace_info;
4747 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4748 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4749 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4750 }
4751 }
4752 if (node->clones)
4753 node = node->clones;
4754 else if (node->next_sibling_clone)
4755 node = node->next_sibling_clone;
4756 else
4757 {
4758 while (node != id->dst_node && !node->next_sibling_clone)
4759 node = node->clone_of;
4760 if (node != id->dst_node)
4761 node = node->next_sibling_clone;
4762 }
4763 }
4764 }
4765
4766 /* Create a copy of a function's tree.
4767 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4768 of the original function and the new copied function
4769 respectively. In case we want to replace a DECL
4770 tree with another tree while duplicating the function's
4771 body, TREE_MAP represents the mapping between these
4772 trees. If UPDATE_CLONES is set, the call_stmt fields
4773 of edges of clones of the function will be updated. */
4774 void
4775 tree_function_versioning (tree old_decl, tree new_decl,
4776 VEC(ipa_replace_map_p,gc)* tree_map,
4777 bool update_clones, bitmap args_to_skip)
4778 {
4779 struct cgraph_node *old_version_node;
4780 struct cgraph_node *new_version_node;
4781 copy_body_data id;
4782 tree p;
4783 unsigned i;
4784 struct ipa_replace_map *replace_info;
4785 basic_block old_entry_block, bb;
4786 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4787
4788 tree t_step;
4789 tree old_current_function_decl = current_function_decl;
4790 tree vars = NULL_TREE;
4791
4792 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4793 && TREE_CODE (new_decl) == FUNCTION_DECL);
4794 DECL_POSSIBLY_INLINED (old_decl) = 1;
4795
4796 old_version_node = cgraph_node (old_decl);
4797 new_version_node = cgraph_node (new_decl);
4798
4799 /* Output the inlining info for this abstract function, since it has been
4800 inlined. If we don't do this now, we can lose the information about the
4801 variables in the function when the blocks get blown away as soon as we
4802 remove the cgraph node. */
4803 (*debug_hooks->outlining_inline_function) (old_decl);
4804
4805 DECL_ARTIFICIAL (new_decl) = 1;
4806 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4807 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4808
4809 /* Prepare the data structures for the tree copy. */
4810 memset (&id, 0, sizeof (id));
4811
4812 /* Generate a new name for the new version. */
4813 id.statements_to_fold = pointer_set_create ();
4814
4815 id.decl_map = pointer_map_create ();
4816 id.debug_map = NULL;
4817 id.src_fn = old_decl;
4818 id.dst_fn = new_decl;
4819 id.src_node = old_version_node;
4820 id.dst_node = new_version_node;
4821 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4822 if (id.src_node->ipa_transforms_to_apply)
4823 {
4824 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4825 unsigned int i;
4826
4827 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4828 id.src_node->ipa_transforms_to_apply);
4829 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4830 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4831 VEC_index (ipa_opt_pass,
4832 old_transforms_to_apply,
4833 i));
4834 }
4835
4836 id.copy_decl = copy_decl_no_change;
4837 id.transform_call_graph_edges
4838 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4839 id.transform_new_cfg = true;
4840 id.transform_return_to_modify = false;
4841 id.transform_lang_insert_block = NULL;
4842
4843 current_function_decl = new_decl;
4844 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4845 (DECL_STRUCT_FUNCTION (old_decl));
4846 initialize_cfun (new_decl, old_decl,
4847 old_entry_block->count);
4848 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4849
4850 /* Copy the function's static chain. */
4851 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4852 if (p)
4853 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4854 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4855 &id);
4856
4857 /* If there's a tree_map, prepare for substitution. */
4858 if (tree_map)
4859 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4860 {
4861 gimple init;
4862 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4863 if (replace_info->replace_p)
4864 {
4865 tree op = replace_info->new_tree;
4866
4867 STRIP_NOPS (op);
4868
4869 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4870 op = TREE_OPERAND (op, 0);
4871
4872 if (TREE_CODE (op) == ADDR_EXPR)
4873 {
4874 op = TREE_OPERAND (op, 0);
4875 while (handled_component_p (op))
4876 op = TREE_OPERAND (op, 0);
4877 if (TREE_CODE (op) == VAR_DECL)
4878 add_referenced_var (op);
4879 }
4880 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4881 init = setup_one_parameter (&id, replace_info->old_tree,
4882 replace_info->new_tree, id.src_fn,
4883 NULL,
4884 &vars);
4885 if (init)
4886 VEC_safe_push (gimple, heap, init_stmts, init);
4887 }
4888 }
4889 /* Copy the function's arguments. */
4890 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4891 DECL_ARGUMENTS (new_decl) =
4892 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4893 args_to_skip, &vars);
4894
4895 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4896
4897 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4898 number_blocks (id.dst_fn);
4899
4900 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4901
4902 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4903 /* Add local vars. */
4904 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4905 t_step; t_step = TREE_CHAIN (t_step))
4906 {
4907 tree var = TREE_VALUE (t_step);
4908 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4909 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4910 else if (!can_be_nonlocal (var, &id))
4911 cfun->local_decls =
4912 tree_cons (NULL_TREE, remap_decl (var, &id),
4913 cfun->local_decls);
4914 }
4915
4916 /* Copy the Function's body. */
4917 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
4918 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4919
4920 if (DECL_RESULT (old_decl) != NULL_TREE)
4921 {
4922 tree *res_decl = &DECL_RESULT (old_decl);
4923 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4924 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4925 }
4926
4927 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4928 number_blocks (new_decl);
4929
4930 /* We want to create the BB unconditionally, so that the addition of
4931 debug stmts doesn't affect BB count, which may in the end cause
4932 codegen differences. */
4933 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4934 while (VEC_length (gimple, init_stmts))
4935 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
4936 update_clone_info (&id);
4937
4938 /* Remap the nonlocal_goto_save_area, if any. */
4939 if (cfun->nonlocal_goto_save_area)
4940 {
4941 struct walk_stmt_info wi;
4942
4943 memset (&wi, 0, sizeof (wi));
4944 wi.info = &id;
4945 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4946 }
4947
4948 /* Clean up. */
4949 pointer_map_destroy (id.decl_map);
4950 if (id.debug_map)
4951 pointer_map_destroy (id.debug_map);
4952 free_dominance_info (CDI_DOMINATORS);
4953 free_dominance_info (CDI_POST_DOMINATORS);
4954
4955 fold_marked_statements (0, id.statements_to_fold);
4956 pointer_set_destroy (id.statements_to_fold);
4957 fold_cond_expr_cond ();
4958 delete_unreachable_blocks_update_callgraph (&id);
4959 update_ssa (TODO_update_ssa);
4960 free_dominance_info (CDI_DOMINATORS);
4961 free_dominance_info (CDI_POST_DOMINATORS);
4962
4963 gcc_assert (!id.debug_stmts);
4964 VEC_free (gimple, heap, init_stmts);
4965 pop_cfun ();
4966 current_function_decl = old_current_function_decl;
4967 gcc_assert (!current_function_decl
4968 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
4969 return;
4970 }
4971
4972 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
4973 the callee and return the inlined body on success. */
4974
4975 tree
4976 maybe_inline_call_in_expr (tree exp)
4977 {
4978 tree fn = get_callee_fndecl (exp);
4979
4980 /* We can only try to inline "const" functions. */
4981 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
4982 {
4983 struct pointer_map_t *decl_map = pointer_map_create ();
4984 call_expr_arg_iterator iter;
4985 copy_body_data id;
4986 tree param, arg, t;
4987
4988 /* Remap the parameters. */
4989 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
4990 param;
4991 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
4992 *pointer_map_insert (decl_map, param) = arg;
4993
4994 memset (&id, 0, sizeof (id));
4995 id.src_fn = fn;
4996 id.dst_fn = current_function_decl;
4997 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
4998 id.decl_map = decl_map;
4999
5000 id.copy_decl = copy_decl_no_change;
5001 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5002 id.transform_new_cfg = false;
5003 id.transform_return_to_modify = true;
5004 id.transform_lang_insert_block = false;
5005
5006 /* Make sure not to unshare trees behind the front-end's back
5007 since front-end specific mechanisms may rely on sharing. */
5008 id.regimplify = false;
5009 id.do_not_unshare = true;
5010
5011 /* We're not inside any EH region. */
5012 id.eh_lp_nr = 0;
5013
5014 t = copy_tree_body (&id);
5015 pointer_map_destroy (decl_map);
5016
5017 /* We can only return something suitable for use in a GENERIC
5018 expression tree. */
5019 if (TREE_CODE (t) == MODIFY_EXPR)
5020 return TREE_OPERAND (t, 1);
5021 }
5022
5023 return NULL_TREE;
5024 }
5025
5026 /* Duplicate a type, fields and all. */
5027
5028 tree
5029 build_duplicate_type (tree type)
5030 {
5031 struct copy_body_data id;
5032
5033 memset (&id, 0, sizeof (id));
5034 id.src_fn = current_function_decl;
5035 id.dst_fn = current_function_decl;
5036 id.src_cfun = cfun;
5037 id.decl_map = pointer_map_create ();
5038 id.debug_map = NULL;
5039 id.copy_decl = copy_decl_no_change;
5040
5041 type = remap_type_1 (type, &id);
5042
5043 pointer_map_destroy (id.decl_map);
5044 if (id.debug_map)
5045 pointer_map_destroy (id.debug_map);
5046
5047 TYPE_CANONICAL (type) = type;
5048
5049 return type;
5050 }
5051
5052 /* Return whether it is safe to inline a function because it used different
5053 target specific options or call site actual types mismatch parameter types.
5054 E is the call edge to be checked. */
5055 bool
5056 tree_can_inline_p (struct cgraph_edge *e)
5057 {
5058 #if 0
5059 /* This causes a regression in SPEC in that it prevents a cold function from
5060 inlining a hot function. Perhaps this should only apply to functions
5061 that the user declares hot/cold/optimize explicitly. */
5062
5063 /* Don't inline a function with a higher optimization level than the
5064 caller, or with different space constraints (hot/cold functions). */
5065 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5066 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5067
5068 if (caller_tree != callee_tree)
5069 {
5070 struct cl_optimization *caller_opt
5071 = TREE_OPTIMIZATION ((caller_tree)
5072 ? caller_tree
5073 : optimization_default_node);
5074
5075 struct cl_optimization *callee_opt
5076 = TREE_OPTIMIZATION ((callee_tree)
5077 ? callee_tree
5078 : optimization_default_node);
5079
5080 if ((caller_opt->optimize > callee_opt->optimize)
5081 || (caller_opt->optimize_size != callee_opt->optimize_size))
5082 return false;
5083 }
5084 #endif
5085 tree caller, callee, lhs;
5086
5087 caller = e->caller->decl;
5088 callee = e->callee->decl;
5089
5090 /* We cannot inline a function that uses a different EH personality
5091 than the caller. */
5092 if (DECL_FUNCTION_PERSONALITY (caller)
5093 && DECL_FUNCTION_PERSONALITY (callee)
5094 && (DECL_FUNCTION_PERSONALITY (caller)
5095 != DECL_FUNCTION_PERSONALITY (callee)))
5096 {
5097 e->inline_failed = CIF_UNSPECIFIED;
5098 gimple_call_set_cannot_inline (e->call_stmt, true);
5099 return false;
5100 }
5101
5102 /* Allow the backend to decide if inlining is ok. */
5103 if (!targetm.target_option.can_inline_p (caller, callee))
5104 {
5105 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5106 gimple_call_set_cannot_inline (e->call_stmt, true);
5107 e->call_stmt_cannot_inline_p = true;
5108 return false;
5109 }
5110
5111 /* Do not inline calls where we cannot triviall work around mismatches
5112 in argument or return types. */
5113 if (e->call_stmt
5114 && ((DECL_RESULT (callee)
5115 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5116 && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5117 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5118 TREE_TYPE (lhs))
5119 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5120 || !gimple_check_call_args (e->call_stmt)))
5121 {
5122 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5123 gimple_call_set_cannot_inline (e->call_stmt, true);
5124 e->call_stmt_cannot_inline_p = true;
5125 return false;
5126 }
5127
5128 return true;
5129 }