convert many uses of pointer_map to hash_map
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "calls.h"
29 #include "tree-inline.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "insn-config.h"
34 #include "hashtab.h"
35 #include "langhooks.h"
36 #include "basic-block.h"
37 #include "tree-iterator.h"
38 #include "intl.h"
39 #include "pointer-set.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "tree-cfg.h"
53 #include "tree-phinodes.h"
54 #include "ssa-iterators.h"
55 #include "stringpool.h"
56 #include "tree-ssanames.h"
57 #include "tree-into-ssa.h"
58 #include "expr.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "function.h"
62 #include "tree-pretty-print.h"
63 #include "except.h"
64 #include "debug.h"
65 #include "ipa-prop.h"
66 #include "value-prof.h"
67 #include "tree-pass.h"
68 #include "target.h"
69 #include "cfgloop.h"
70 #include "builtins.h"
71
72 #include "rtl.h" /* FIXME: For asm_str_count. */
73
74 /* I'm not real happy about this, but we need to handle gimple and
75 non-gimple trees. */
76
77 /* Inlining, Cloning, Versioning, Parallelization
78
79 Inlining: a function body is duplicated, but the PARM_DECLs are
80 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
81 MODIFY_EXPRs that store to a dedicated returned-value variable.
82 The duplicated eh_region info of the copy will later be appended
83 to the info for the caller; the eh_region info in copied throwing
84 statements and RESX statements are adjusted accordingly.
85
86 Cloning: (only in C++) We have one body for a con/de/structor, and
87 multiple function decls, each with a unique parameter list.
88 Duplicate the body, using the given splay tree; some parameters
89 will become constants (like 0 or 1).
90
91 Versioning: a function body is duplicated and the result is a new
92 function rather than into blocks of an existing function as with
93 inlining. Some parameters will become constants.
94
95 Parallelization: a region of a function is duplicated resulting in
96 a new function. Variables may be replaced with complex expressions
97 to enable shared variable semantics.
98
99 All of these will simultaneously lookup any callgraph edges. If
100 we're going to inline the duplicated function body, and the given
101 function has some cloned callgraph nodes (one for each place this
102 function will be inlined) those callgraph edges will be duplicated.
103 If we're cloning the body, those callgraph edges will be
104 updated to point into the new body. (Note that the original
105 callgraph node and edge list will not be altered.)
106
107 See the CALL_EXPR handling case in copy_tree_body_r (). */
108
109 /* To Do:
110
111 o In order to make inlining-on-trees work, we pessimized
112 function-local static constants. In particular, they are now
113 always output, even when not addressed. Fix this by treating
114 function-local static constants just like global static
115 constants; the back-end already knows not to output them if they
116 are not needed.
117
118 o Provide heuristics to clamp inlining of recursive template
119 calls? */
120
121
122 /* Weights that estimate_num_insns uses to estimate the size of the
123 produced code. */
124
125 eni_weights eni_size_weights;
126
127 /* Weights that estimate_num_insns uses to estimate the time necessary
128 to execute the produced code. */
129
130 eni_weights eni_time_weights;
131
132 /* Prototypes. */
133
134 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
135 static void remap_block (tree *, copy_body_data *);
136 static void copy_bind_expr (tree *, int *, copy_body_data *);
137 static void declare_inline_vars (tree, tree);
138 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
139 static void prepend_lexical_block (tree current_block, tree new_block);
140 static tree copy_decl_to_var (tree, copy_body_data *);
141 static tree copy_result_decl_to_var (tree, copy_body_data *);
142 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
143 static gimple remap_gimple_stmt (gimple, copy_body_data *);
144 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
145
146 /* Insert a tree->tree mapping for ID. Despite the name suggests
147 that the trees should be variables, it is used for more than that. */
148
149 void
150 insert_decl_map (copy_body_data *id, tree key, tree value)
151 {
152 id->decl_map->put (key, value);
153
154 /* Always insert an identity map as well. If we see this same new
155 node again, we won't want to duplicate it a second time. */
156 if (key != value)
157 id->decl_map->put (value, value);
158 }
159
160 /* Insert a tree->tree mapping for ID. This is only used for
161 variables. */
162
163 static void
164 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
165 {
166 if (!gimple_in_ssa_p (id->src_cfun))
167 return;
168
169 if (!MAY_HAVE_DEBUG_STMTS)
170 return;
171
172 if (!target_for_debug_bind (key))
173 return;
174
175 gcc_assert (TREE_CODE (key) == PARM_DECL);
176 gcc_assert (TREE_CODE (value) == VAR_DECL);
177
178 if (!id->debug_map)
179 id->debug_map = new hash_map<tree, tree>;
180
181 id->debug_map->put (key, value);
182 }
183
184 /* If nonzero, we're remapping the contents of inlined debug
185 statements. If negative, an error has occurred, such as a
186 reference to a variable that isn't available in the inlined
187 context. */
188 static int processing_debug_stmt = 0;
189
190 /* Construct new SSA name for old NAME. ID is the inline context. */
191
192 static tree
193 remap_ssa_name (tree name, copy_body_data *id)
194 {
195 tree new_tree, var;
196 tree *n;
197
198 gcc_assert (TREE_CODE (name) == SSA_NAME);
199
200 n = id->decl_map->get (name);
201 if (n)
202 return unshare_expr (*n);
203
204 if (processing_debug_stmt)
205 {
206 if (SSA_NAME_IS_DEFAULT_DEF (name)
207 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
208 && id->entry_bb == NULL
209 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
210 {
211 tree vexpr = make_node (DEBUG_EXPR_DECL);
212 gimple def_temp;
213 gimple_stmt_iterator gsi;
214 tree val = SSA_NAME_VAR (name);
215
216 n = id->decl_map->get (val);
217 if (n != NULL)
218 val = *n;
219 if (TREE_CODE (val) != PARM_DECL)
220 {
221 processing_debug_stmt = -1;
222 return name;
223 }
224 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
225 DECL_ARTIFICIAL (vexpr) = 1;
226 TREE_TYPE (vexpr) = TREE_TYPE (name);
227 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
228 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
229 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
230 return vexpr;
231 }
232
233 processing_debug_stmt = -1;
234 return name;
235 }
236
237 /* Remap anonymous SSA names or SSA names of anonymous decls. */
238 var = SSA_NAME_VAR (name);
239 if (!var
240 || (!SSA_NAME_IS_DEFAULT_DEF (name)
241 && TREE_CODE (var) == VAR_DECL
242 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
243 && DECL_ARTIFICIAL (var)
244 && DECL_IGNORED_P (var)
245 && !DECL_NAME (var)))
246 {
247 struct ptr_info_def *pi;
248 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
249 if (!var && SSA_NAME_IDENTIFIER (name))
250 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
251 insert_decl_map (id, name, new_tree);
252 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
253 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
254 /* At least IPA points-to info can be directly transferred. */
255 if (id->src_cfun->gimple_df
256 && id->src_cfun->gimple_df->ipa_pta
257 && (pi = SSA_NAME_PTR_INFO (name))
258 && !pi->pt.anything)
259 {
260 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
261 new_pi->pt = pi->pt;
262 }
263 return new_tree;
264 }
265
266 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
267 in copy_bb. */
268 new_tree = remap_decl (var, id);
269
270 /* We might've substituted constant or another SSA_NAME for
271 the variable.
272
273 Replace the SSA name representing RESULT_DECL by variable during
274 inlining: this saves us from need to introduce PHI node in a case
275 return value is just partly initialized. */
276 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
277 && (!SSA_NAME_VAR (name)
278 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
279 || !id->transform_return_to_modify))
280 {
281 struct ptr_info_def *pi;
282 new_tree = make_ssa_name (new_tree, NULL);
283 insert_decl_map (id, name, new_tree);
284 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
285 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
286 /* At least IPA points-to info can be directly transferred. */
287 if (id->src_cfun->gimple_df
288 && id->src_cfun->gimple_df->ipa_pta
289 && (pi = SSA_NAME_PTR_INFO (name))
290 && !pi->pt.anything)
291 {
292 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293 new_pi->pt = pi->pt;
294 }
295 if (SSA_NAME_IS_DEFAULT_DEF (name))
296 {
297 /* By inlining function having uninitialized variable, we might
298 extend the lifetime (variable might get reused). This cause
299 ICE in the case we end up extending lifetime of SSA name across
300 abnormal edge, but also increase register pressure.
301
302 We simply initialize all uninitialized vars by 0 except
303 for case we are inlining to very first BB. We can avoid
304 this for all BBs that are not inside strongly connected
305 regions of the CFG, but this is expensive to test. */
306 if (id->entry_bb
307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
308 && (!SSA_NAME_VAR (name)
309 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
310 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
311 0)->dest
312 || EDGE_COUNT (id->entry_bb->preds) != 1))
313 {
314 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
315 gimple init_stmt;
316 tree zero = build_zero_cst (TREE_TYPE (new_tree));
317
318 init_stmt = gimple_build_assign (new_tree, zero);
319 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
320 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
321 }
322 else
323 {
324 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
325 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
326 }
327 }
328 }
329 else
330 insert_decl_map (id, name, new_tree);
331 return new_tree;
332 }
333
334 /* Remap DECL during the copying of the BLOCK tree for the function. */
335
336 tree
337 remap_decl (tree decl, copy_body_data *id)
338 {
339 tree *n;
340
341 /* We only remap local variables in the current function. */
342
343 /* See if we have remapped this declaration. */
344
345 n = id->decl_map->get (decl);
346
347 if (!n && processing_debug_stmt)
348 {
349 processing_debug_stmt = -1;
350 return decl;
351 }
352
353 /* If we didn't already have an equivalent for this declaration,
354 create one now. */
355 if (!n)
356 {
357 /* Make a copy of the variable or label. */
358 tree t = id->copy_decl (decl, id);
359
360 /* Remember it, so that if we encounter this local entity again
361 we can reuse this copy. Do this early because remap_type may
362 need this decl for TYPE_STUB_DECL. */
363 insert_decl_map (id, decl, t);
364
365 if (!DECL_P (t))
366 return t;
367
368 /* Remap types, if necessary. */
369 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
370 if (TREE_CODE (t) == TYPE_DECL)
371 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
372
373 /* Remap sizes as necessary. */
374 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
375 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
376
377 /* If fields, do likewise for offset and qualifier. */
378 if (TREE_CODE (t) == FIELD_DECL)
379 {
380 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
381 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
382 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
383 }
384
385 return t;
386 }
387
388 if (id->do_not_unshare)
389 return *n;
390 else
391 return unshare_expr (*n);
392 }
393
394 static tree
395 remap_type_1 (tree type, copy_body_data *id)
396 {
397 tree new_tree, t;
398
399 /* We do need a copy. build and register it now. If this is a pointer or
400 reference type, remap the designated type and make a new pointer or
401 reference type. */
402 if (TREE_CODE (type) == POINTER_TYPE)
403 {
404 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
405 TYPE_MODE (type),
406 TYPE_REF_CAN_ALIAS_ALL (type));
407 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
408 new_tree = build_type_attribute_qual_variant (new_tree,
409 TYPE_ATTRIBUTES (type),
410 TYPE_QUALS (type));
411 insert_decl_map (id, type, new_tree);
412 return new_tree;
413 }
414 else if (TREE_CODE (type) == REFERENCE_TYPE)
415 {
416 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
417 TYPE_MODE (type),
418 TYPE_REF_CAN_ALIAS_ALL (type));
419 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
420 new_tree = build_type_attribute_qual_variant (new_tree,
421 TYPE_ATTRIBUTES (type),
422 TYPE_QUALS (type));
423 insert_decl_map (id, type, new_tree);
424 return new_tree;
425 }
426 else
427 new_tree = copy_node (type);
428
429 insert_decl_map (id, type, new_tree);
430
431 /* This is a new type, not a copy of an old type. Need to reassociate
432 variants. We can handle everything except the main variant lazily. */
433 t = TYPE_MAIN_VARIANT (type);
434 if (type != t)
435 {
436 t = remap_type (t, id);
437 TYPE_MAIN_VARIANT (new_tree) = t;
438 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
439 TYPE_NEXT_VARIANT (t) = new_tree;
440 }
441 else
442 {
443 TYPE_MAIN_VARIANT (new_tree) = new_tree;
444 TYPE_NEXT_VARIANT (new_tree) = NULL;
445 }
446
447 if (TYPE_STUB_DECL (type))
448 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
449
450 /* Lazily create pointer and reference types. */
451 TYPE_POINTER_TO (new_tree) = NULL;
452 TYPE_REFERENCE_TO (new_tree) = NULL;
453
454 /* Copy all types that may contain references to local variables; be sure to
455 preserve sharing in between type and its main variant when possible. */
456 switch (TREE_CODE (new_tree))
457 {
458 case INTEGER_TYPE:
459 case REAL_TYPE:
460 case FIXED_POINT_TYPE:
461 case ENUMERAL_TYPE:
462 case BOOLEAN_TYPE:
463 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
464 {
465 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
466 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
467
468 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
469 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
470 }
471 else
472 {
473 t = TYPE_MIN_VALUE (new_tree);
474 if (t && TREE_CODE (t) != INTEGER_CST)
475 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
476
477 t = TYPE_MAX_VALUE (new_tree);
478 if (t && TREE_CODE (t) != INTEGER_CST)
479 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
480 }
481 return new_tree;
482
483 case FUNCTION_TYPE:
484 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
485 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
486 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
487 else
488 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
489 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
490 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
491 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
492 else
493 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
494 return new_tree;
495
496 case ARRAY_TYPE:
497 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
498 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
499 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
500
501 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
502 {
503 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
504 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
505 }
506 else
507 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
508 break;
509
510 case RECORD_TYPE:
511 case UNION_TYPE:
512 case QUAL_UNION_TYPE:
513 if (TYPE_MAIN_VARIANT (type) != type
514 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
515 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
516 else
517 {
518 tree f, nf = NULL;
519
520 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
521 {
522 t = remap_decl (f, id);
523 DECL_CONTEXT (t) = new_tree;
524 DECL_CHAIN (t) = nf;
525 nf = t;
526 }
527 TYPE_FIELDS (new_tree) = nreverse (nf);
528 }
529 break;
530
531 case OFFSET_TYPE:
532 default:
533 /* Shouldn't have been thought variable sized. */
534 gcc_unreachable ();
535 }
536
537 /* All variants of type share the same size, so use the already remaped data. */
538 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
539 {
540 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
541 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
542
543 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
544 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
545 }
546 else
547 {
548 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
549 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
550 }
551
552 return new_tree;
553 }
554
555 tree
556 remap_type (tree type, copy_body_data *id)
557 {
558 tree *node;
559 tree tmp;
560
561 if (type == NULL)
562 return type;
563
564 /* See if we have remapped this type. */
565 node = id->decl_map->get (type);
566 if (node)
567 return *node;
568
569 /* The type only needs remapping if it's variably modified. */
570 if (! variably_modified_type_p (type, id->src_fn))
571 {
572 insert_decl_map (id, type, type);
573 return type;
574 }
575
576 id->remapping_type_depth++;
577 tmp = remap_type_1 (type, id);
578 id->remapping_type_depth--;
579
580 return tmp;
581 }
582
583 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
584
585 static bool
586 can_be_nonlocal (tree decl, copy_body_data *id)
587 {
588 /* We can not duplicate function decls. */
589 if (TREE_CODE (decl) == FUNCTION_DECL)
590 return true;
591
592 /* Local static vars must be non-local or we get multiple declaration
593 problems. */
594 if (TREE_CODE (decl) == VAR_DECL
595 && !auto_var_in_fn_p (decl, id->src_fn))
596 return true;
597
598 return false;
599 }
600
601 static tree
602 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
603 copy_body_data *id)
604 {
605 tree old_var;
606 tree new_decls = NULL_TREE;
607
608 /* Remap its variables. */
609 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
610 {
611 tree new_var;
612
613 if (can_be_nonlocal (old_var, id))
614 {
615 /* We need to add this variable to the local decls as otherwise
616 nothing else will do so. */
617 if (TREE_CODE (old_var) == VAR_DECL
618 && ! DECL_EXTERNAL (old_var))
619 add_local_decl (cfun, old_var);
620 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
621 && !DECL_IGNORED_P (old_var)
622 && nonlocalized_list)
623 vec_safe_push (*nonlocalized_list, old_var);
624 continue;
625 }
626
627 /* Remap the variable. */
628 new_var = remap_decl (old_var, id);
629
630 /* If we didn't remap this variable, we can't mess with its
631 TREE_CHAIN. If we remapped this variable to the return slot, it's
632 already declared somewhere else, so don't declare it here. */
633
634 if (new_var == id->retvar)
635 ;
636 else if (!new_var)
637 {
638 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
639 && !DECL_IGNORED_P (old_var)
640 && nonlocalized_list)
641 vec_safe_push (*nonlocalized_list, old_var);
642 }
643 else
644 {
645 gcc_assert (DECL_P (new_var));
646 DECL_CHAIN (new_var) = new_decls;
647 new_decls = new_var;
648
649 /* Also copy value-expressions. */
650 if (TREE_CODE (new_var) == VAR_DECL
651 && DECL_HAS_VALUE_EXPR_P (new_var))
652 {
653 tree tem = DECL_VALUE_EXPR (new_var);
654 bool old_regimplify = id->regimplify;
655 id->remapping_type_depth++;
656 walk_tree (&tem, copy_tree_body_r, id, NULL);
657 id->remapping_type_depth--;
658 id->regimplify = old_regimplify;
659 SET_DECL_VALUE_EXPR (new_var, tem);
660 }
661 }
662 }
663
664 return nreverse (new_decls);
665 }
666
667 /* Copy the BLOCK to contain remapped versions of the variables
668 therein. And hook the new block into the block-tree. */
669
670 static void
671 remap_block (tree *block, copy_body_data *id)
672 {
673 tree old_block;
674 tree new_block;
675
676 /* Make the new block. */
677 old_block = *block;
678 new_block = make_node (BLOCK);
679 TREE_USED (new_block) = TREE_USED (old_block);
680 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
681 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
682 BLOCK_NONLOCALIZED_VARS (new_block)
683 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
684 *block = new_block;
685
686 /* Remap its variables. */
687 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
688 &BLOCK_NONLOCALIZED_VARS (new_block),
689 id);
690
691 if (id->transform_lang_insert_block)
692 id->transform_lang_insert_block (new_block);
693
694 /* Remember the remapped block. */
695 insert_decl_map (id, old_block, new_block);
696 }
697
698 /* Copy the whole block tree and root it in id->block. */
699 static tree
700 remap_blocks (tree block, copy_body_data *id)
701 {
702 tree t;
703 tree new_tree = block;
704
705 if (!block)
706 return NULL;
707
708 remap_block (&new_tree, id);
709 gcc_assert (new_tree != block);
710 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
711 prepend_lexical_block (new_tree, remap_blocks (t, id));
712 /* Blocks are in arbitrary order, but make things slightly prettier and do
713 not swap order when producing a copy. */
714 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
715 return new_tree;
716 }
717
718 /* Remap the block tree rooted at BLOCK to nothing. */
719 static void
720 remap_blocks_to_null (tree block, copy_body_data *id)
721 {
722 tree t;
723 insert_decl_map (id, block, NULL_TREE);
724 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
725 remap_blocks_to_null (t, id);
726 }
727
728 static void
729 copy_statement_list (tree *tp)
730 {
731 tree_stmt_iterator oi, ni;
732 tree new_tree;
733
734 new_tree = alloc_stmt_list ();
735 ni = tsi_start (new_tree);
736 oi = tsi_start (*tp);
737 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
738 *tp = new_tree;
739
740 for (; !tsi_end_p (oi); tsi_next (&oi))
741 {
742 tree stmt = tsi_stmt (oi);
743 if (TREE_CODE (stmt) == STATEMENT_LIST)
744 /* This copy is not redundant; tsi_link_after will smash this
745 STATEMENT_LIST into the end of the one we're building, and we
746 don't want to do that with the original. */
747 copy_statement_list (&stmt);
748 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
749 }
750 }
751
752 static void
753 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
754 {
755 tree block = BIND_EXPR_BLOCK (*tp);
756 /* Copy (and replace) the statement. */
757 copy_tree_r (tp, walk_subtrees, NULL);
758 if (block)
759 {
760 remap_block (&block, id);
761 BIND_EXPR_BLOCK (*tp) = block;
762 }
763
764 if (BIND_EXPR_VARS (*tp))
765 /* This will remap a lot of the same decls again, but this should be
766 harmless. */
767 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
768 }
769
770
771 /* Create a new gimple_seq by remapping all the statements in BODY
772 using the inlining information in ID. */
773
774 static gimple_seq
775 remap_gimple_seq (gimple_seq body, copy_body_data *id)
776 {
777 gimple_stmt_iterator si;
778 gimple_seq new_body = NULL;
779
780 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
781 {
782 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
783 gimple_seq_add_stmt (&new_body, new_stmt);
784 }
785
786 return new_body;
787 }
788
789
790 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
791 block using the mapping information in ID. */
792
793 static gimple
794 copy_gimple_bind (gimple stmt, copy_body_data *id)
795 {
796 gimple new_bind;
797 tree new_block, new_vars;
798 gimple_seq body, new_body;
799
800 /* Copy the statement. Note that we purposely don't use copy_stmt
801 here because we need to remap statements as we copy. */
802 body = gimple_bind_body (stmt);
803 new_body = remap_gimple_seq (body, id);
804
805 new_block = gimple_bind_block (stmt);
806 if (new_block)
807 remap_block (&new_block, id);
808
809 /* This will remap a lot of the same decls again, but this should be
810 harmless. */
811 new_vars = gimple_bind_vars (stmt);
812 if (new_vars)
813 new_vars = remap_decls (new_vars, NULL, id);
814
815 new_bind = gimple_build_bind (new_vars, new_body, new_block);
816
817 return new_bind;
818 }
819
820 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
821
822 static bool
823 is_parm (tree decl)
824 {
825 if (TREE_CODE (decl) == SSA_NAME)
826 {
827 decl = SSA_NAME_VAR (decl);
828 if (!decl)
829 return false;
830 }
831
832 return (TREE_CODE (decl) == PARM_DECL);
833 }
834
835 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
836 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
837 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
838 recursing into the children nodes of *TP. */
839
840 static tree
841 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
842 {
843 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
844 copy_body_data *id = (copy_body_data *) wi_p->info;
845 tree fn = id->src_fn;
846
847 if (TREE_CODE (*tp) == SSA_NAME)
848 {
849 *tp = remap_ssa_name (*tp, id);
850 *walk_subtrees = 0;
851 return NULL;
852 }
853 else if (auto_var_in_fn_p (*tp, fn))
854 {
855 /* Local variables and labels need to be replaced by equivalent
856 variables. We don't want to copy static variables; there's
857 only one of those, no matter how many times we inline the
858 containing function. Similarly for globals from an outer
859 function. */
860 tree new_decl;
861
862 /* Remap the declaration. */
863 new_decl = remap_decl (*tp, id);
864 gcc_assert (new_decl);
865 /* Replace this variable with the copy. */
866 STRIP_TYPE_NOPS (new_decl);
867 /* ??? The C++ frontend uses void * pointer zero to initialize
868 any other type. This confuses the middle-end type verification.
869 As cloned bodies do not go through gimplification again the fixup
870 there doesn't trigger. */
871 if (TREE_CODE (new_decl) == INTEGER_CST
872 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
873 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
874 *tp = new_decl;
875 *walk_subtrees = 0;
876 }
877 else if (TREE_CODE (*tp) == STATEMENT_LIST)
878 gcc_unreachable ();
879 else if (TREE_CODE (*tp) == SAVE_EXPR)
880 gcc_unreachable ();
881 else if (TREE_CODE (*tp) == LABEL_DECL
882 && (!DECL_CONTEXT (*tp)
883 || decl_function_context (*tp) == id->src_fn))
884 /* These may need to be remapped for EH handling. */
885 *tp = remap_decl (*tp, id);
886 else if (TREE_CODE (*tp) == FIELD_DECL)
887 {
888 /* If the enclosing record type is variably_modified_type_p, the field
889 has already been remapped. Otherwise, it need not be. */
890 tree *n = id->decl_map->get (*tp);
891 if (n)
892 *tp = *n;
893 *walk_subtrees = 0;
894 }
895 else if (TYPE_P (*tp))
896 /* Types may need remapping as well. */
897 *tp = remap_type (*tp, id);
898 else if (CONSTANT_CLASS_P (*tp))
899 {
900 /* If this is a constant, we have to copy the node iff the type
901 will be remapped. copy_tree_r will not copy a constant. */
902 tree new_type = remap_type (TREE_TYPE (*tp), id);
903
904 if (new_type == TREE_TYPE (*tp))
905 *walk_subtrees = 0;
906
907 else if (TREE_CODE (*tp) == INTEGER_CST)
908 *tp = wide_int_to_tree (new_type, *tp);
909 else
910 {
911 *tp = copy_node (*tp);
912 TREE_TYPE (*tp) = new_type;
913 }
914 }
915 else
916 {
917 /* Otherwise, just copy the node. Note that copy_tree_r already
918 knows not to copy VAR_DECLs, etc., so this is safe. */
919
920 if (TREE_CODE (*tp) == MEM_REF)
921 {
922 /* We need to re-canonicalize MEM_REFs from inline substitutions
923 that can happen when a pointer argument is an ADDR_EXPR.
924 Recurse here manually to allow that. */
925 tree ptr = TREE_OPERAND (*tp, 0);
926 tree type = remap_type (TREE_TYPE (*tp), id);
927 tree old = *tp;
928 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
929 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
930 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
931 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
932 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
933 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
934 remapped a parameter as the property might be valid only
935 for the parameter itself. */
936 if (TREE_THIS_NOTRAP (old)
937 && (!is_parm (TREE_OPERAND (old, 0))
938 || (!id->transform_parameter && is_parm (ptr))))
939 TREE_THIS_NOTRAP (*tp) = 1;
940 *walk_subtrees = 0;
941 return NULL;
942 }
943
944 /* Here is the "usual case". Copy this tree node, and then
945 tweak some special cases. */
946 copy_tree_r (tp, walk_subtrees, NULL);
947
948 if (TREE_CODE (*tp) != OMP_CLAUSE)
949 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
950
951 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
952 {
953 /* The copied TARGET_EXPR has never been expanded, even if the
954 original node was expanded already. */
955 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
956 TREE_OPERAND (*tp, 3) = NULL_TREE;
957 }
958 else if (TREE_CODE (*tp) == ADDR_EXPR)
959 {
960 /* Variable substitution need not be simple. In particular,
961 the MEM_REF substitution above. Make sure that
962 TREE_CONSTANT and friends are up-to-date. */
963 int invariant = is_gimple_min_invariant (*tp);
964 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
965 recompute_tree_invariant_for_addr_expr (*tp);
966
967 /* If this used to be invariant, but is not any longer,
968 then regimplification is probably needed. */
969 if (invariant && !is_gimple_min_invariant (*tp))
970 id->regimplify = true;
971
972 *walk_subtrees = 0;
973 }
974 }
975
976 /* Update the TREE_BLOCK for the cloned expr. */
977 if (EXPR_P (*tp))
978 {
979 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
980 tree old_block = TREE_BLOCK (*tp);
981 if (old_block)
982 {
983 tree *n;
984 n = id->decl_map->get (TREE_BLOCK (*tp));
985 if (n)
986 new_block = *n;
987 }
988 TREE_SET_BLOCK (*tp, new_block);
989 }
990
991 /* Keep iterating. */
992 return NULL_TREE;
993 }
994
995
996 /* Called from copy_body_id via walk_tree. DATA is really a
997 `copy_body_data *'. */
998
999 tree
1000 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1001 {
1002 copy_body_data *id = (copy_body_data *) data;
1003 tree fn = id->src_fn;
1004 tree new_block;
1005
1006 /* Begin by recognizing trees that we'll completely rewrite for the
1007 inlining context. Our output for these trees is completely
1008 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1009 into an edge). Further down, we'll handle trees that get
1010 duplicated and/or tweaked. */
1011
1012 /* When requested, RETURN_EXPRs should be transformed to just the
1013 contained MODIFY_EXPR. The branch semantics of the return will
1014 be handled elsewhere by manipulating the CFG rather than a statement. */
1015 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1016 {
1017 tree assignment = TREE_OPERAND (*tp, 0);
1018
1019 /* If we're returning something, just turn that into an
1020 assignment into the equivalent of the original RESULT_DECL.
1021 If the "assignment" is just the result decl, the result
1022 decl has already been set (e.g. a recent "foo (&result_decl,
1023 ...)"); just toss the entire RETURN_EXPR. */
1024 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1025 {
1026 /* Replace the RETURN_EXPR with (a copy of) the
1027 MODIFY_EXPR hanging underneath. */
1028 *tp = copy_node (assignment);
1029 }
1030 else /* Else the RETURN_EXPR returns no value. */
1031 {
1032 *tp = NULL;
1033 return (tree) (void *)1;
1034 }
1035 }
1036 else if (TREE_CODE (*tp) == SSA_NAME)
1037 {
1038 *tp = remap_ssa_name (*tp, id);
1039 *walk_subtrees = 0;
1040 return NULL;
1041 }
1042
1043 /* Local variables and labels need to be replaced by equivalent
1044 variables. We don't want to copy static variables; there's only
1045 one of those, no matter how many times we inline the containing
1046 function. Similarly for globals from an outer function. */
1047 else if (auto_var_in_fn_p (*tp, fn))
1048 {
1049 tree new_decl;
1050
1051 /* Remap the declaration. */
1052 new_decl = remap_decl (*tp, id);
1053 gcc_assert (new_decl);
1054 /* Replace this variable with the copy. */
1055 STRIP_TYPE_NOPS (new_decl);
1056 *tp = new_decl;
1057 *walk_subtrees = 0;
1058 }
1059 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1060 copy_statement_list (tp);
1061 else if (TREE_CODE (*tp) == SAVE_EXPR
1062 || TREE_CODE (*tp) == TARGET_EXPR)
1063 remap_save_expr (tp, id->decl_map, walk_subtrees);
1064 else if (TREE_CODE (*tp) == LABEL_DECL
1065 && (! DECL_CONTEXT (*tp)
1066 || decl_function_context (*tp) == id->src_fn))
1067 /* These may need to be remapped for EH handling. */
1068 *tp = remap_decl (*tp, id);
1069 else if (TREE_CODE (*tp) == BIND_EXPR)
1070 copy_bind_expr (tp, walk_subtrees, id);
1071 /* Types may need remapping as well. */
1072 else if (TYPE_P (*tp))
1073 *tp = remap_type (*tp, id);
1074
1075 /* If this is a constant, we have to copy the node iff the type will be
1076 remapped. copy_tree_r will not copy a constant. */
1077 else if (CONSTANT_CLASS_P (*tp))
1078 {
1079 tree new_type = remap_type (TREE_TYPE (*tp), id);
1080
1081 if (new_type == TREE_TYPE (*tp))
1082 *walk_subtrees = 0;
1083
1084 else if (TREE_CODE (*tp) == INTEGER_CST)
1085 *tp = wide_int_to_tree (new_type, *tp);
1086 else
1087 {
1088 *tp = copy_node (*tp);
1089 TREE_TYPE (*tp) = new_type;
1090 }
1091 }
1092
1093 /* Otherwise, just copy the node. Note that copy_tree_r already
1094 knows not to copy VAR_DECLs, etc., so this is safe. */
1095 else
1096 {
1097 /* Here we handle trees that are not completely rewritten.
1098 First we detect some inlining-induced bogosities for
1099 discarding. */
1100 if (TREE_CODE (*tp) == MODIFY_EXPR
1101 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1102 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1103 {
1104 /* Some assignments VAR = VAR; don't generate any rtl code
1105 and thus don't count as variable modification. Avoid
1106 keeping bogosities like 0 = 0. */
1107 tree decl = TREE_OPERAND (*tp, 0), value;
1108 tree *n;
1109
1110 n = id->decl_map->get (decl);
1111 if (n)
1112 {
1113 value = *n;
1114 STRIP_TYPE_NOPS (value);
1115 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1116 {
1117 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1118 return copy_tree_body_r (tp, walk_subtrees, data);
1119 }
1120 }
1121 }
1122 else if (TREE_CODE (*tp) == INDIRECT_REF)
1123 {
1124 /* Get rid of *& from inline substitutions that can happen when a
1125 pointer argument is an ADDR_EXPR. */
1126 tree decl = TREE_OPERAND (*tp, 0);
1127 tree *n = id->decl_map->get (decl);
1128 if (n)
1129 {
1130 /* If we happen to get an ADDR_EXPR in n->value, strip
1131 it manually here as we'll eventually get ADDR_EXPRs
1132 which lie about their types pointed to. In this case
1133 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1134 but we absolutely rely on that. As fold_indirect_ref
1135 does other useful transformations, try that first, though. */
1136 tree type = TREE_TYPE (*tp);
1137 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1138 tree old = *tp;
1139 *tp = gimple_fold_indirect_ref (ptr);
1140 if (! *tp)
1141 {
1142 if (TREE_CODE (ptr) == ADDR_EXPR)
1143 {
1144 *tp
1145 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1146 /* ??? We should either assert here or build
1147 a VIEW_CONVERT_EXPR instead of blindly leaking
1148 incompatible types to our IL. */
1149 if (! *tp)
1150 *tp = TREE_OPERAND (ptr, 0);
1151 }
1152 else
1153 {
1154 *tp = build1 (INDIRECT_REF, type, ptr);
1155 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1156 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1157 TREE_READONLY (*tp) = TREE_READONLY (old);
1158 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1159 have remapped a parameter as the property might be
1160 valid only for the parameter itself. */
1161 if (TREE_THIS_NOTRAP (old)
1162 && (!is_parm (TREE_OPERAND (old, 0))
1163 || (!id->transform_parameter && is_parm (ptr))))
1164 TREE_THIS_NOTRAP (*tp) = 1;
1165 }
1166 }
1167 *walk_subtrees = 0;
1168 return NULL;
1169 }
1170 }
1171 else if (TREE_CODE (*tp) == MEM_REF)
1172 {
1173 /* We need to re-canonicalize MEM_REFs from inline substitutions
1174 that can happen when a pointer argument is an ADDR_EXPR.
1175 Recurse here manually to allow that. */
1176 tree ptr = TREE_OPERAND (*tp, 0);
1177 tree type = remap_type (TREE_TYPE (*tp), id);
1178 tree old = *tp;
1179 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1180 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1181 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1182 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1183 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1184 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1185 remapped a parameter as the property might be valid only
1186 for the parameter itself. */
1187 if (TREE_THIS_NOTRAP (old)
1188 && (!is_parm (TREE_OPERAND (old, 0))
1189 || (!id->transform_parameter && is_parm (ptr))))
1190 TREE_THIS_NOTRAP (*tp) = 1;
1191 *walk_subtrees = 0;
1192 return NULL;
1193 }
1194
1195 /* Here is the "usual case". Copy this tree node, and then
1196 tweak some special cases. */
1197 copy_tree_r (tp, walk_subtrees, NULL);
1198
1199 /* If EXPR has block defined, map it to newly constructed block.
1200 When inlining we want EXPRs without block appear in the block
1201 of function call if we are not remapping a type. */
1202 if (EXPR_P (*tp))
1203 {
1204 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1205 if (TREE_BLOCK (*tp))
1206 {
1207 tree *n;
1208 n = id->decl_map->get (TREE_BLOCK (*tp));
1209 if (n)
1210 new_block = *n;
1211 }
1212 TREE_SET_BLOCK (*tp, new_block);
1213 }
1214
1215 if (TREE_CODE (*tp) != OMP_CLAUSE)
1216 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1217
1218 /* The copied TARGET_EXPR has never been expanded, even if the
1219 original node was expanded already. */
1220 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1221 {
1222 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1223 TREE_OPERAND (*tp, 3) = NULL_TREE;
1224 }
1225
1226 /* Variable substitution need not be simple. In particular, the
1227 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1228 and friends are up-to-date. */
1229 else if (TREE_CODE (*tp) == ADDR_EXPR)
1230 {
1231 int invariant = is_gimple_min_invariant (*tp);
1232 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1233
1234 /* Handle the case where we substituted an INDIRECT_REF
1235 into the operand of the ADDR_EXPR. */
1236 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1237 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1238 else
1239 recompute_tree_invariant_for_addr_expr (*tp);
1240
1241 /* If this used to be invariant, but is not any longer,
1242 then regimplification is probably needed. */
1243 if (invariant && !is_gimple_min_invariant (*tp))
1244 id->regimplify = true;
1245
1246 *walk_subtrees = 0;
1247 }
1248 }
1249
1250 /* Keep iterating. */
1251 return NULL_TREE;
1252 }
1253
1254 /* Helper for remap_gimple_stmt. Given an EH region number for the
1255 source function, map that to the duplicate EH region number in
1256 the destination function. */
1257
1258 static int
1259 remap_eh_region_nr (int old_nr, copy_body_data *id)
1260 {
1261 eh_region old_r, new_r;
1262
1263 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1264 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1265
1266 return new_r->index;
1267 }
1268
1269 /* Similar, but operate on INTEGER_CSTs. */
1270
1271 static tree
1272 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1273 {
1274 int old_nr, new_nr;
1275
1276 old_nr = tree_to_shwi (old_t_nr);
1277 new_nr = remap_eh_region_nr (old_nr, id);
1278
1279 return build_int_cst (integer_type_node, new_nr);
1280 }
1281
1282 /* Helper for copy_bb. Remap statement STMT using the inlining
1283 information in ID. Return the new statement copy. */
1284
1285 static gimple
1286 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1287 {
1288 gimple copy = NULL;
1289 struct walk_stmt_info wi;
1290 bool skip_first = false;
1291
1292 /* Begin by recognizing trees that we'll completely rewrite for the
1293 inlining context. Our output for these trees is completely
1294 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1295 into an edge). Further down, we'll handle trees that get
1296 duplicated and/or tweaked. */
1297
1298 /* When requested, GIMPLE_RETURNs should be transformed to just the
1299 contained GIMPLE_ASSIGN. The branch semantics of the return will
1300 be handled elsewhere by manipulating the CFG rather than the
1301 statement. */
1302 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1303 {
1304 tree retval = gimple_return_retval (stmt);
1305
1306 /* If we're returning something, just turn that into an
1307 assignment into the equivalent of the original RESULT_DECL.
1308 If RETVAL is just the result decl, the result decl has
1309 already been set (e.g. a recent "foo (&result_decl, ...)");
1310 just toss the entire GIMPLE_RETURN. */
1311 if (retval
1312 && (TREE_CODE (retval) != RESULT_DECL
1313 && (TREE_CODE (retval) != SSA_NAME
1314 || ! SSA_NAME_VAR (retval)
1315 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1316 {
1317 copy = gimple_build_assign (id->do_not_unshare
1318 ? id->retvar : unshare_expr (id->retvar),
1319 retval);
1320 /* id->retvar is already substituted. Skip it on later remapping. */
1321 skip_first = true;
1322 }
1323 else
1324 return gimple_build_nop ();
1325 }
1326 else if (gimple_has_substatements (stmt))
1327 {
1328 gimple_seq s1, s2;
1329
1330 /* When cloning bodies from the C++ front end, we will be handed bodies
1331 in High GIMPLE form. Handle here all the High GIMPLE statements that
1332 have embedded statements. */
1333 switch (gimple_code (stmt))
1334 {
1335 case GIMPLE_BIND:
1336 copy = copy_gimple_bind (stmt, id);
1337 break;
1338
1339 case GIMPLE_CATCH:
1340 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1341 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1342 break;
1343
1344 case GIMPLE_EH_FILTER:
1345 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1346 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1347 break;
1348
1349 case GIMPLE_TRY:
1350 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1351 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1352 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1353 break;
1354
1355 case GIMPLE_WITH_CLEANUP_EXPR:
1356 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1357 copy = gimple_build_wce (s1);
1358 break;
1359
1360 case GIMPLE_OMP_PARALLEL:
1361 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1362 copy = gimple_build_omp_parallel
1363 (s1,
1364 gimple_omp_parallel_clauses (stmt),
1365 gimple_omp_parallel_child_fn (stmt),
1366 gimple_omp_parallel_data_arg (stmt));
1367 break;
1368
1369 case GIMPLE_OMP_TASK:
1370 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1371 copy = gimple_build_omp_task
1372 (s1,
1373 gimple_omp_task_clauses (stmt),
1374 gimple_omp_task_child_fn (stmt),
1375 gimple_omp_task_data_arg (stmt),
1376 gimple_omp_task_copy_fn (stmt),
1377 gimple_omp_task_arg_size (stmt),
1378 gimple_omp_task_arg_align (stmt));
1379 break;
1380
1381 case GIMPLE_OMP_FOR:
1382 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1383 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1384 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1385 gimple_omp_for_clauses (stmt),
1386 gimple_omp_for_collapse (stmt), s2);
1387 {
1388 size_t i;
1389 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1390 {
1391 gimple_omp_for_set_index (copy, i,
1392 gimple_omp_for_index (stmt, i));
1393 gimple_omp_for_set_initial (copy, i,
1394 gimple_omp_for_initial (stmt, i));
1395 gimple_omp_for_set_final (copy, i,
1396 gimple_omp_for_final (stmt, i));
1397 gimple_omp_for_set_incr (copy, i,
1398 gimple_omp_for_incr (stmt, i));
1399 gimple_omp_for_set_cond (copy, i,
1400 gimple_omp_for_cond (stmt, i));
1401 }
1402 }
1403 break;
1404
1405 case GIMPLE_OMP_MASTER:
1406 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1407 copy = gimple_build_omp_master (s1);
1408 break;
1409
1410 case GIMPLE_OMP_TASKGROUP:
1411 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1412 copy = gimple_build_omp_taskgroup (s1);
1413 break;
1414
1415 case GIMPLE_OMP_ORDERED:
1416 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1417 copy = gimple_build_omp_ordered (s1);
1418 break;
1419
1420 case GIMPLE_OMP_SECTION:
1421 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1422 copy = gimple_build_omp_section (s1);
1423 break;
1424
1425 case GIMPLE_OMP_SECTIONS:
1426 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1427 copy = gimple_build_omp_sections
1428 (s1, gimple_omp_sections_clauses (stmt));
1429 break;
1430
1431 case GIMPLE_OMP_SINGLE:
1432 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1433 copy = gimple_build_omp_single
1434 (s1, gimple_omp_single_clauses (stmt));
1435 break;
1436
1437 case GIMPLE_OMP_TARGET:
1438 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1439 copy = gimple_build_omp_target
1440 (s1, gimple_omp_target_kind (stmt),
1441 gimple_omp_target_clauses (stmt));
1442 break;
1443
1444 case GIMPLE_OMP_TEAMS:
1445 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1446 copy = gimple_build_omp_teams
1447 (s1, gimple_omp_teams_clauses (stmt));
1448 break;
1449
1450 case GIMPLE_OMP_CRITICAL:
1451 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1452 copy
1453 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1454 break;
1455
1456 case GIMPLE_TRANSACTION:
1457 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1458 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1459 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1460 break;
1461
1462 default:
1463 gcc_unreachable ();
1464 }
1465 }
1466 else
1467 {
1468 if (gimple_assign_copy_p (stmt)
1469 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1470 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1471 {
1472 /* Here we handle statements that are not completely rewritten.
1473 First we detect some inlining-induced bogosities for
1474 discarding. */
1475
1476 /* Some assignments VAR = VAR; don't generate any rtl code
1477 and thus don't count as variable modification. Avoid
1478 keeping bogosities like 0 = 0. */
1479 tree decl = gimple_assign_lhs (stmt), value;
1480 tree *n;
1481
1482 n = id->decl_map->get (decl);
1483 if (n)
1484 {
1485 value = *n;
1486 STRIP_TYPE_NOPS (value);
1487 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1488 return gimple_build_nop ();
1489 }
1490 }
1491
1492 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1493 in a block that we aren't copying during tree_function_versioning,
1494 just drop the clobber stmt. */
1495 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1496 {
1497 tree lhs = gimple_assign_lhs (stmt);
1498 if (TREE_CODE (lhs) == MEM_REF
1499 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1500 {
1501 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1502 if (gimple_bb (def_stmt)
1503 && !bitmap_bit_p (id->blocks_to_copy,
1504 gimple_bb (def_stmt)->index))
1505 return gimple_build_nop ();
1506 }
1507 }
1508
1509 if (gimple_debug_bind_p (stmt))
1510 {
1511 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1512 gimple_debug_bind_get_value (stmt),
1513 stmt);
1514 id->debug_stmts.safe_push (copy);
1515 return copy;
1516 }
1517 if (gimple_debug_source_bind_p (stmt))
1518 {
1519 copy = gimple_build_debug_source_bind
1520 (gimple_debug_source_bind_get_var (stmt),
1521 gimple_debug_source_bind_get_value (stmt), stmt);
1522 id->debug_stmts.safe_push (copy);
1523 return copy;
1524 }
1525
1526 /* Create a new deep copy of the statement. */
1527 copy = gimple_copy (stmt);
1528
1529 /* Clear flags that need revisiting. */
1530 if (is_gimple_call (copy)
1531 && gimple_call_tail_p (copy))
1532 gimple_call_set_tail (copy, false);
1533
1534 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1535 RESX and EH_DISPATCH. */
1536 if (id->eh_map)
1537 switch (gimple_code (copy))
1538 {
1539 case GIMPLE_CALL:
1540 {
1541 tree r, fndecl = gimple_call_fndecl (copy);
1542 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1543 switch (DECL_FUNCTION_CODE (fndecl))
1544 {
1545 case BUILT_IN_EH_COPY_VALUES:
1546 r = gimple_call_arg (copy, 1);
1547 r = remap_eh_region_tree_nr (r, id);
1548 gimple_call_set_arg (copy, 1, r);
1549 /* FALLTHRU */
1550
1551 case BUILT_IN_EH_POINTER:
1552 case BUILT_IN_EH_FILTER:
1553 r = gimple_call_arg (copy, 0);
1554 r = remap_eh_region_tree_nr (r, id);
1555 gimple_call_set_arg (copy, 0, r);
1556 break;
1557
1558 default:
1559 break;
1560 }
1561
1562 /* Reset alias info if we didn't apply measures to
1563 keep it valid over inlining by setting DECL_PT_UID. */
1564 if (!id->src_cfun->gimple_df
1565 || !id->src_cfun->gimple_df->ipa_pta)
1566 gimple_call_reset_alias_info (copy);
1567 }
1568 break;
1569
1570 case GIMPLE_RESX:
1571 {
1572 int r = gimple_resx_region (copy);
1573 r = remap_eh_region_nr (r, id);
1574 gimple_resx_set_region (copy, r);
1575 }
1576 break;
1577
1578 case GIMPLE_EH_DISPATCH:
1579 {
1580 int r = gimple_eh_dispatch_region (copy);
1581 r = remap_eh_region_nr (r, id);
1582 gimple_eh_dispatch_set_region (copy, r);
1583 }
1584 break;
1585
1586 default:
1587 break;
1588 }
1589 }
1590
1591 /* If STMT has a block defined, map it to the newly constructed
1592 block. */
1593 if (gimple_block (copy))
1594 {
1595 tree *n;
1596 n = id->decl_map->get (gimple_block (copy));
1597 gcc_assert (n);
1598 gimple_set_block (copy, *n);
1599 }
1600
1601 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1602 return copy;
1603
1604 /* Remap all the operands in COPY. */
1605 memset (&wi, 0, sizeof (wi));
1606 wi.info = id;
1607 if (skip_first)
1608 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1609 else
1610 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1611
1612 /* Clear the copied virtual operands. We are not remapping them here
1613 but are going to recreate them from scratch. */
1614 if (gimple_has_mem_ops (copy))
1615 {
1616 gimple_set_vdef (copy, NULL_TREE);
1617 gimple_set_vuse (copy, NULL_TREE);
1618 }
1619
1620 return copy;
1621 }
1622
1623
1624 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1625 later */
1626
1627 static basic_block
1628 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1629 gcov_type count_scale)
1630 {
1631 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1632 basic_block copy_basic_block;
1633 tree decl;
1634 gcov_type freq;
1635 basic_block prev;
1636
1637 /* Search for previous copied basic block. */
1638 prev = bb->prev_bb;
1639 while (!prev->aux)
1640 prev = prev->prev_bb;
1641
1642 /* create_basic_block() will append every new block to
1643 basic_block_info automatically. */
1644 copy_basic_block = create_basic_block (NULL, (void *) 0,
1645 (basic_block) prev->aux);
1646 copy_basic_block->count = apply_scale (bb->count, count_scale);
1647
1648 /* We are going to rebuild frequencies from scratch. These values
1649 have just small importance to drive canonicalize_loop_headers. */
1650 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1651
1652 /* We recompute frequencies after inlining, so this is quite safe. */
1653 if (freq > BB_FREQ_MAX)
1654 freq = BB_FREQ_MAX;
1655 copy_basic_block->frequency = freq;
1656
1657 copy_gsi = gsi_start_bb (copy_basic_block);
1658
1659 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1660 {
1661 gimple stmt = gsi_stmt (gsi);
1662 gimple orig_stmt = stmt;
1663
1664 id->regimplify = false;
1665 stmt = remap_gimple_stmt (stmt, id);
1666 if (gimple_nop_p (stmt))
1667 continue;
1668
1669 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1670 seq_gsi = copy_gsi;
1671
1672 /* With return slot optimization we can end up with
1673 non-gimple (foo *)&this->m, fix that here. */
1674 if (is_gimple_assign (stmt)
1675 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1676 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1677 {
1678 tree new_rhs;
1679 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1680 gimple_assign_rhs1 (stmt),
1681 true, NULL, false,
1682 GSI_CONTINUE_LINKING);
1683 gimple_assign_set_rhs1 (stmt, new_rhs);
1684 id->regimplify = false;
1685 }
1686
1687 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1688
1689 if (id->regimplify)
1690 gimple_regimplify_operands (stmt, &seq_gsi);
1691
1692 /* If copy_basic_block has been empty at the start of this iteration,
1693 call gsi_start_bb again to get at the newly added statements. */
1694 if (gsi_end_p (copy_gsi))
1695 copy_gsi = gsi_start_bb (copy_basic_block);
1696 else
1697 gsi_next (&copy_gsi);
1698
1699 /* Process the new statement. The call to gimple_regimplify_operands
1700 possibly turned the statement into multiple statements, we
1701 need to process all of them. */
1702 do
1703 {
1704 tree fn;
1705
1706 stmt = gsi_stmt (copy_gsi);
1707 if (is_gimple_call (stmt)
1708 && gimple_call_va_arg_pack_p (stmt)
1709 && id->gimple_call)
1710 {
1711 /* __builtin_va_arg_pack () should be replaced by
1712 all arguments corresponding to ... in the caller. */
1713 tree p;
1714 gimple new_call;
1715 vec<tree> argarray;
1716 size_t nargs = gimple_call_num_args (id->gimple_call);
1717 size_t n;
1718
1719 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1720 nargs--;
1721
1722 /* Create the new array of arguments. */
1723 n = nargs + gimple_call_num_args (stmt);
1724 argarray.create (n);
1725 argarray.safe_grow_cleared (n);
1726
1727 /* Copy all the arguments before '...' */
1728 memcpy (argarray.address (),
1729 gimple_call_arg_ptr (stmt, 0),
1730 gimple_call_num_args (stmt) * sizeof (tree));
1731
1732 /* Append the arguments passed in '...' */
1733 memcpy (argarray.address () + gimple_call_num_args (stmt),
1734 gimple_call_arg_ptr (id->gimple_call, 0)
1735 + (gimple_call_num_args (id->gimple_call) - nargs),
1736 nargs * sizeof (tree));
1737
1738 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1739 argarray);
1740
1741 argarray.release ();
1742
1743 /* Copy all GIMPLE_CALL flags, location and block, except
1744 GF_CALL_VA_ARG_PACK. */
1745 gimple_call_copy_flags (new_call, stmt);
1746 gimple_call_set_va_arg_pack (new_call, false);
1747 gimple_set_location (new_call, gimple_location (stmt));
1748 gimple_set_block (new_call, gimple_block (stmt));
1749 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1750
1751 gsi_replace (&copy_gsi, new_call, false);
1752 stmt = new_call;
1753 }
1754 else if (is_gimple_call (stmt)
1755 && id->gimple_call
1756 && (decl = gimple_call_fndecl (stmt))
1757 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1758 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1759 {
1760 /* __builtin_va_arg_pack_len () should be replaced by
1761 the number of anonymous arguments. */
1762 size_t nargs = gimple_call_num_args (id->gimple_call);
1763 tree count, p;
1764 gimple new_stmt;
1765
1766 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1767 nargs--;
1768
1769 count = build_int_cst (integer_type_node, nargs);
1770 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1771 gsi_replace (&copy_gsi, new_stmt, false);
1772 stmt = new_stmt;
1773 }
1774
1775 /* Statements produced by inlining can be unfolded, especially
1776 when we constant propagated some operands. We can't fold
1777 them right now for two reasons:
1778 1) folding require SSA_NAME_DEF_STMTs to be correct
1779 2) we can't change function calls to builtins.
1780 So we just mark statement for later folding. We mark
1781 all new statements, instead just statements that has changed
1782 by some nontrivial substitution so even statements made
1783 foldable indirectly are updated. If this turns out to be
1784 expensive, copy_body can be told to watch for nontrivial
1785 changes. */
1786 if (id->statements_to_fold)
1787 id->statements_to_fold->add (stmt);
1788
1789 /* We're duplicating a CALL_EXPR. Find any corresponding
1790 callgraph edges and update or duplicate them. */
1791 if (is_gimple_call (stmt))
1792 {
1793 struct cgraph_edge *edge;
1794
1795 switch (id->transform_call_graph_edges)
1796 {
1797 case CB_CGE_DUPLICATE:
1798 edge = id->src_node->get_edge (orig_stmt);
1799 if (edge)
1800 {
1801 int edge_freq = edge->frequency;
1802 int new_freq;
1803 struct cgraph_edge *old_edge = edge;
1804 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1805 gimple_uid (stmt),
1806 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1807 true);
1808 /* We could also just rescale the frequency, but
1809 doing so would introduce roundoff errors and make
1810 verifier unhappy. */
1811 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1812 copy_basic_block);
1813
1814 /* Speculative calls consist of two edges - direct and indirect.
1815 Duplicate the whole thing and distribute frequencies accordingly. */
1816 if (edge->speculative)
1817 {
1818 struct cgraph_edge *direct, *indirect;
1819 struct ipa_ref *ref;
1820
1821 gcc_assert (!edge->indirect_unknown_callee);
1822 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1823 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1824 gimple_uid (stmt),
1825 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1826 true);
1827 if (old_edge->frequency + indirect->frequency)
1828 {
1829 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1830 (old_edge->frequency + indirect->frequency)),
1831 CGRAPH_FREQ_MAX);
1832 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1833 (old_edge->frequency + indirect->frequency)),
1834 CGRAPH_FREQ_MAX);
1835 }
1836 id->dst_node->clone_reference (ref, stmt);
1837 }
1838 else
1839 {
1840 edge->frequency = new_freq;
1841 if (dump_file
1842 && profile_status_for_fn (cfun) != PROFILE_ABSENT
1843 && (edge_freq > edge->frequency + 10
1844 || edge_freq < edge->frequency - 10))
1845 {
1846 fprintf (dump_file, "Edge frequency estimated by "
1847 "cgraph %i diverge from inliner's estimate %i\n",
1848 edge_freq,
1849 edge->frequency);
1850 fprintf (dump_file,
1851 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1852 bb->index,
1853 bb->frequency,
1854 copy_basic_block->frequency);
1855 }
1856 }
1857 }
1858 break;
1859
1860 case CB_CGE_MOVE_CLONES:
1861 id->dst_node->set_call_stmt_including_clones (orig_stmt,
1862 stmt);
1863 edge = id->dst_node->get_edge (stmt);
1864 break;
1865
1866 case CB_CGE_MOVE:
1867 edge = id->dst_node->get_edge (orig_stmt);
1868 if (edge)
1869 cgraph_set_call_stmt (edge, stmt);
1870 break;
1871
1872 default:
1873 gcc_unreachable ();
1874 }
1875
1876 /* Constant propagation on argument done during inlining
1877 may create new direct call. Produce an edge for it. */
1878 if ((!edge
1879 || (edge->indirect_inlining_edge
1880 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1881 && id->dst_node->definition
1882 && (fn = gimple_call_fndecl (stmt)) != NULL)
1883 {
1884 struct cgraph_node *dest = cgraph_node::get (fn);
1885
1886 /* We have missing edge in the callgraph. This can happen
1887 when previous inlining turned an indirect call into a
1888 direct call by constant propagating arguments or we are
1889 producing dead clone (for further cloning). In all
1890 other cases we hit a bug (incorrect node sharing is the
1891 most common reason for missing edges). */
1892 gcc_assert (!dest->definition
1893 || dest->address_taken
1894 || !id->src_node->definition
1895 || !id->dst_node->definition);
1896 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1897 id->dst_node->create_edge_including_clones
1898 (dest, orig_stmt, stmt, bb->count,
1899 compute_call_stmt_bb_frequency (id->dst_node->decl,
1900 copy_basic_block),
1901 CIF_ORIGINALLY_INDIRECT_CALL);
1902 else
1903 id->dst_node->create_edge (dest, stmt,
1904 bb->count,
1905 compute_call_stmt_bb_frequency
1906 (id->dst_node->decl,
1907 copy_basic_block))->inline_failed
1908 = CIF_ORIGINALLY_INDIRECT_CALL;
1909 if (dump_file)
1910 {
1911 fprintf (dump_file, "Created new direct edge to %s\n",
1912 dest->name ());
1913 }
1914 }
1915
1916 notice_special_calls (stmt);
1917 }
1918
1919 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1920 id->eh_map, id->eh_lp_nr);
1921
1922 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1923 {
1924 ssa_op_iter i;
1925 tree def;
1926
1927 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1928 if (TREE_CODE (def) == SSA_NAME)
1929 SSA_NAME_DEF_STMT (def) = stmt;
1930 }
1931
1932 gsi_next (&copy_gsi);
1933 }
1934 while (!gsi_end_p (copy_gsi));
1935
1936 copy_gsi = gsi_last_bb (copy_basic_block);
1937 }
1938
1939 return copy_basic_block;
1940 }
1941
1942 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1943 form is quite easy, since dominator relationship for old basic blocks does
1944 not change.
1945
1946 There is however exception where inlining might change dominator relation
1947 across EH edges from basic block within inlined functions destinating
1948 to landing pads in function we inline into.
1949
1950 The function fills in PHI_RESULTs of such PHI nodes if they refer
1951 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1952 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1953 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1954 set, and this means that there will be no overlapping live ranges
1955 for the underlying symbol.
1956
1957 This might change in future if we allow redirecting of EH edges and
1958 we might want to change way build CFG pre-inlining to include
1959 all the possible edges then. */
1960 static void
1961 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1962 bool can_throw, bool nonlocal_goto)
1963 {
1964 edge e;
1965 edge_iterator ei;
1966
1967 FOR_EACH_EDGE (e, ei, bb->succs)
1968 if (!e->dest->aux
1969 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1970 {
1971 gimple phi;
1972 gimple_stmt_iterator si;
1973
1974 if (!nonlocal_goto)
1975 gcc_assert (e->flags & EDGE_EH);
1976
1977 if (!can_throw)
1978 gcc_assert (!(e->flags & EDGE_EH));
1979
1980 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1981 {
1982 edge re;
1983
1984 phi = gsi_stmt (si);
1985
1986 /* For abnormal goto/call edges the receiver can be the
1987 ENTRY_BLOCK. Do not assert this cannot happen. */
1988
1989 gcc_assert ((e->flags & EDGE_EH)
1990 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1991
1992 re = find_edge (ret_bb, e->dest);
1993 gcc_checking_assert (re);
1994 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1995 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1996
1997 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1998 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1999 }
2000 }
2001 }
2002
2003
2004 /* Copy edges from BB into its copy constructed earlier, scale profile
2005 accordingly. Edges will be taken care of later. Assume aux
2006 pointers to point to the copies of each BB. Return true if any
2007 debug stmts are left after a statement that must end the basic block. */
2008
2009 static bool
2010 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2011 basic_block abnormal_goto_dest)
2012 {
2013 basic_block new_bb = (basic_block) bb->aux;
2014 edge_iterator ei;
2015 edge old_edge;
2016 gimple_stmt_iterator si;
2017 int flags;
2018 bool need_debug_cleanup = false;
2019
2020 /* Use the indices from the original blocks to create edges for the
2021 new ones. */
2022 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2023 if (!(old_edge->flags & EDGE_EH))
2024 {
2025 edge new_edge;
2026
2027 flags = old_edge->flags;
2028
2029 /* Return edges do get a FALLTHRU flag when the get inlined. */
2030 if (old_edge->dest->index == EXIT_BLOCK
2031 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2032 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2033 flags |= EDGE_FALLTHRU;
2034 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2035 new_edge->count = apply_scale (old_edge->count, count_scale);
2036 new_edge->probability = old_edge->probability;
2037 }
2038
2039 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2040 return false;
2041
2042 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2043 {
2044 gimple copy_stmt;
2045 bool can_throw, nonlocal_goto;
2046
2047 copy_stmt = gsi_stmt (si);
2048 if (!is_gimple_debug (copy_stmt))
2049 update_stmt (copy_stmt);
2050
2051 /* Do this before the possible split_block. */
2052 gsi_next (&si);
2053
2054 /* If this tree could throw an exception, there are two
2055 cases where we need to add abnormal edge(s): the
2056 tree wasn't in a region and there is a "current
2057 region" in the caller; or the original tree had
2058 EH edges. In both cases split the block after the tree,
2059 and add abnormal edge(s) as needed; we need both
2060 those from the callee and the caller.
2061 We check whether the copy can throw, because the const
2062 propagation can change an INDIRECT_REF which throws
2063 into a COMPONENT_REF which doesn't. If the copy
2064 can throw, the original could also throw. */
2065 can_throw = stmt_can_throw_internal (copy_stmt);
2066 nonlocal_goto
2067 = (stmt_can_make_abnormal_goto (copy_stmt)
2068 && !computed_goto_p (copy_stmt));
2069
2070 if (can_throw || nonlocal_goto)
2071 {
2072 if (!gsi_end_p (si))
2073 {
2074 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2075 gsi_next (&si);
2076 if (gsi_end_p (si))
2077 need_debug_cleanup = true;
2078 }
2079 if (!gsi_end_p (si))
2080 /* Note that bb's predecessor edges aren't necessarily
2081 right at this point; split_block doesn't care. */
2082 {
2083 edge e = split_block (new_bb, copy_stmt);
2084
2085 new_bb = e->dest;
2086 new_bb->aux = e->src->aux;
2087 si = gsi_start_bb (new_bb);
2088 }
2089 }
2090
2091 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2092 make_eh_dispatch_edges (copy_stmt);
2093 else if (can_throw)
2094 make_eh_edges (copy_stmt);
2095
2096 /* If the call we inline cannot make abnormal goto do not add
2097 additional abnormal edges but only retain those already present
2098 in the original function body. */
2099 if (abnormal_goto_dest == NULL)
2100 nonlocal_goto = false;
2101 if (nonlocal_goto)
2102 {
2103 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2104
2105 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2106 nonlocal_goto = false;
2107 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2108 in OpenMP regions which aren't allowed to be left abnormally.
2109 So, no need to add abnormal edge in that case. */
2110 else if (is_gimple_call (copy_stmt)
2111 && gimple_call_internal_p (copy_stmt)
2112 && (gimple_call_internal_fn (copy_stmt)
2113 == IFN_ABNORMAL_DISPATCHER)
2114 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2115 nonlocal_goto = false;
2116 else
2117 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2118 }
2119
2120 if ((can_throw || nonlocal_goto)
2121 && gimple_in_ssa_p (cfun))
2122 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2123 can_throw, nonlocal_goto);
2124 }
2125 return need_debug_cleanup;
2126 }
2127
2128 /* Copy the PHIs. All blocks and edges are copied, some blocks
2129 was possibly split and new outgoing EH edges inserted.
2130 BB points to the block of original function and AUX pointers links
2131 the original and newly copied blocks. */
2132
2133 static void
2134 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2135 {
2136 basic_block const new_bb = (basic_block) bb->aux;
2137 edge_iterator ei;
2138 gimple phi;
2139 gimple_stmt_iterator si;
2140 edge new_edge;
2141 bool inserted = false;
2142
2143 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2144 {
2145 tree res, new_res;
2146 gimple new_phi;
2147
2148 phi = gsi_stmt (si);
2149 res = PHI_RESULT (phi);
2150 new_res = res;
2151 if (!virtual_operand_p (res))
2152 {
2153 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2154 new_phi = create_phi_node (new_res, new_bb);
2155 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2156 {
2157 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2158 tree arg;
2159 tree new_arg;
2160 edge_iterator ei2;
2161 location_t locus;
2162
2163 /* When doing partial cloning, we allow PHIs on the entry block
2164 as long as all the arguments are the same. Find any input
2165 edge to see argument to copy. */
2166 if (!old_edge)
2167 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2168 if (!old_edge->src->aux)
2169 break;
2170
2171 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2172 new_arg = arg;
2173 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2174 gcc_assert (new_arg);
2175 /* With return slot optimization we can end up with
2176 non-gimple (foo *)&this->m, fix that here. */
2177 if (TREE_CODE (new_arg) != SSA_NAME
2178 && TREE_CODE (new_arg) != FUNCTION_DECL
2179 && !is_gimple_val (new_arg))
2180 {
2181 gimple_seq stmts = NULL;
2182 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2183 gsi_insert_seq_on_edge (new_edge, stmts);
2184 inserted = true;
2185 }
2186 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2187 if (LOCATION_BLOCK (locus))
2188 {
2189 tree *n;
2190 n = id->decl_map->get (LOCATION_BLOCK (locus));
2191 gcc_assert (n);
2192 if (*n)
2193 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2194 else
2195 locus = LOCATION_LOCUS (locus);
2196 }
2197 else
2198 locus = LOCATION_LOCUS (locus);
2199
2200 add_phi_arg (new_phi, new_arg, new_edge, locus);
2201 }
2202 }
2203 }
2204
2205 /* Commit the delayed edge insertions. */
2206 if (inserted)
2207 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2208 gsi_commit_one_edge_insert (new_edge, NULL);
2209 }
2210
2211
2212 /* Wrapper for remap_decl so it can be used as a callback. */
2213
2214 static tree
2215 remap_decl_1 (tree decl, void *data)
2216 {
2217 return remap_decl (decl, (copy_body_data *) data);
2218 }
2219
2220 /* Build struct function and associated datastructures for the new clone
2221 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2222 the cfun to the function of new_fndecl (and current_function_decl too). */
2223
2224 static void
2225 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2226 {
2227 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2228 gcov_type count_scale;
2229
2230 if (!DECL_ARGUMENTS (new_fndecl))
2231 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2232 if (!DECL_RESULT (new_fndecl))
2233 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2234
2235 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2236 count_scale
2237 = GCOV_COMPUTE_SCALE (count,
2238 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2239 else
2240 count_scale = REG_BR_PROB_BASE;
2241
2242 /* Register specific tree functions. */
2243 gimple_register_cfg_hooks ();
2244
2245 /* Get clean struct function. */
2246 push_struct_function (new_fndecl);
2247
2248 /* We will rebuild these, so just sanity check that they are empty. */
2249 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2250 gcc_assert (cfun->local_decls == NULL);
2251 gcc_assert (cfun->cfg == NULL);
2252 gcc_assert (cfun->decl == new_fndecl);
2253
2254 /* Copy items we preserve during cloning. */
2255 cfun->static_chain_decl = src_cfun->static_chain_decl;
2256 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2257 cfun->function_end_locus = src_cfun->function_end_locus;
2258 cfun->curr_properties = src_cfun->curr_properties;
2259 cfun->last_verified = src_cfun->last_verified;
2260 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2261 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2262 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2263 cfun->stdarg = src_cfun->stdarg;
2264 cfun->after_inlining = src_cfun->after_inlining;
2265 cfun->can_throw_non_call_exceptions
2266 = src_cfun->can_throw_non_call_exceptions;
2267 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2268 cfun->returns_struct = src_cfun->returns_struct;
2269 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2270
2271 init_empty_tree_cfg ();
2272
2273 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2274 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2275 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2276 REG_BR_PROB_BASE);
2277 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2278 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2279 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2280 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2281 REG_BR_PROB_BASE);
2282 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2283 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2284 if (src_cfun->eh)
2285 init_eh_for_function ();
2286
2287 if (src_cfun->gimple_df)
2288 {
2289 init_tree_ssa (cfun);
2290 cfun->gimple_df->in_ssa_p = true;
2291 init_ssa_operands (cfun);
2292 }
2293 }
2294
2295 /* Helper function for copy_cfg_body. Move debug stmts from the end
2296 of NEW_BB to the beginning of successor basic blocks when needed. If the
2297 successor has multiple predecessors, reset them, otherwise keep
2298 their value. */
2299
2300 static void
2301 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2302 {
2303 edge e;
2304 edge_iterator ei;
2305 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2306
2307 if (gsi_end_p (si)
2308 || gsi_one_before_end_p (si)
2309 || !(stmt_can_throw_internal (gsi_stmt (si))
2310 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2311 return;
2312
2313 FOR_EACH_EDGE (e, ei, new_bb->succs)
2314 {
2315 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2316 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2317 while (is_gimple_debug (gsi_stmt (ssi)))
2318 {
2319 gimple stmt = gsi_stmt (ssi), new_stmt;
2320 tree var;
2321 tree value;
2322
2323 /* For the last edge move the debug stmts instead of copying
2324 them. */
2325 if (ei_one_before_end_p (ei))
2326 {
2327 si = ssi;
2328 gsi_prev (&ssi);
2329 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2330 gimple_debug_bind_reset_value (stmt);
2331 gsi_remove (&si, false);
2332 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2333 continue;
2334 }
2335
2336 if (gimple_debug_bind_p (stmt))
2337 {
2338 var = gimple_debug_bind_get_var (stmt);
2339 if (single_pred_p (e->dest))
2340 {
2341 value = gimple_debug_bind_get_value (stmt);
2342 value = unshare_expr (value);
2343 }
2344 else
2345 value = NULL_TREE;
2346 new_stmt = gimple_build_debug_bind (var, value, stmt);
2347 }
2348 else if (gimple_debug_source_bind_p (stmt))
2349 {
2350 var = gimple_debug_source_bind_get_var (stmt);
2351 value = gimple_debug_source_bind_get_value (stmt);
2352 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2353 }
2354 else
2355 gcc_unreachable ();
2356 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2357 id->debug_stmts.safe_push (new_stmt);
2358 gsi_prev (&ssi);
2359 }
2360 }
2361 }
2362
2363 /* Make a copy of the sub-loops of SRC_PARENT and place them
2364 as siblings of DEST_PARENT. */
2365
2366 static void
2367 copy_loops (copy_body_data *id,
2368 struct loop *dest_parent, struct loop *src_parent)
2369 {
2370 struct loop *src_loop = src_parent->inner;
2371 while (src_loop)
2372 {
2373 if (!id->blocks_to_copy
2374 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2375 {
2376 struct loop *dest_loop = alloc_loop ();
2377
2378 /* Assign the new loop its header and latch and associate
2379 those with the new loop. */
2380 if (src_loop->header != NULL)
2381 {
2382 dest_loop->header = (basic_block)src_loop->header->aux;
2383 dest_loop->header->loop_father = dest_loop;
2384 }
2385 if (src_loop->latch != NULL)
2386 {
2387 dest_loop->latch = (basic_block)src_loop->latch->aux;
2388 dest_loop->latch->loop_father = dest_loop;
2389 }
2390
2391 /* Copy loop meta-data. */
2392 copy_loop_info (src_loop, dest_loop);
2393
2394 /* Finally place it into the loop array and the loop tree. */
2395 place_new_loop (cfun, dest_loop);
2396 flow_loop_tree_node_add (dest_parent, dest_loop);
2397
2398 dest_loop->safelen = src_loop->safelen;
2399 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2400 if (src_loop->force_vectorize)
2401 {
2402 dest_loop->force_vectorize = true;
2403 cfun->has_force_vectorize_loops = true;
2404 }
2405 if (src_loop->simduid)
2406 {
2407 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2408 cfun->has_simduid_loops = true;
2409 }
2410
2411 /* Recurse. */
2412 copy_loops (id, dest_loop, src_loop);
2413 }
2414 src_loop = src_loop->next;
2415 }
2416 }
2417
2418 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2419
2420 void
2421 redirect_all_calls (copy_body_data * id, basic_block bb)
2422 {
2423 gimple_stmt_iterator si;
2424 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2425 {
2426 if (is_gimple_call (gsi_stmt (si)))
2427 {
2428 struct cgraph_edge *edge = id->dst_node->get_edge (gsi_stmt (si));
2429 if (edge)
2430 cgraph_redirect_edge_call_stmt_to_callee (edge);
2431 }
2432 }
2433 }
2434
2435 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2436 with each bb's frequency. Used when NODE has a 0-weight entry
2437 but we are about to inline it into a non-zero count call bb.
2438 See the comments for handle_missing_profiles() in predict.c for
2439 when this can happen for COMDATs. */
2440
2441 void
2442 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2443 {
2444 basic_block bb;
2445 edge_iterator ei;
2446 edge e;
2447 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2448
2449 FOR_ALL_BB_FN(bb, fn)
2450 {
2451 bb->count = apply_scale (count,
2452 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2453 FOR_EACH_EDGE (e, ei, bb->succs)
2454 e->count = apply_probability (e->src->count, e->probability);
2455 }
2456 }
2457
2458 /* Make a copy of the body of FN so that it can be inserted inline in
2459 another function. Walks FN via CFG, returns new fndecl. */
2460
2461 static tree
2462 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2463 basic_block entry_block_map, basic_block exit_block_map,
2464 basic_block new_entry)
2465 {
2466 tree callee_fndecl = id->src_fn;
2467 /* Original cfun for the callee, doesn't change. */
2468 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2469 struct function *cfun_to_copy;
2470 basic_block bb;
2471 tree new_fndecl = NULL;
2472 bool need_debug_cleanup = false;
2473 gcov_type count_scale;
2474 int last;
2475 int incoming_frequency = 0;
2476 gcov_type incoming_count = 0;
2477
2478 /* This can happen for COMDAT routines that end up with 0 counts
2479 despite being called (see the comments for handle_missing_profiles()
2480 in predict.c as to why). Apply counts to the blocks in the callee
2481 before inlining, using the guessed edge frequencies, so that we don't
2482 end up with a 0-count inline body which can confuse downstream
2483 optimizations such as function splitting. */
2484 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2485 {
2486 /* Apply the larger of the call bb count and the total incoming
2487 call edge count to the callee. */
2488 gcov_type in_count = 0;
2489 struct cgraph_edge *in_edge;
2490 for (in_edge = id->src_node->callers; in_edge;
2491 in_edge = in_edge->next_caller)
2492 in_count += in_edge->count;
2493 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2494 }
2495
2496 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2497 count_scale
2498 = GCOV_COMPUTE_SCALE (count,
2499 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2500 else
2501 count_scale = REG_BR_PROB_BASE;
2502
2503 /* Register specific tree functions. */
2504 gimple_register_cfg_hooks ();
2505
2506 /* If we are inlining just region of the function, make sure to connect
2507 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2508 part of loop, we must compute frequency and probability of
2509 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2510 probabilities of edges incoming from nonduplicated region. */
2511 if (new_entry)
2512 {
2513 edge e;
2514 edge_iterator ei;
2515
2516 FOR_EACH_EDGE (e, ei, new_entry->preds)
2517 if (!e->src->aux)
2518 {
2519 incoming_frequency += EDGE_FREQUENCY (e);
2520 incoming_count += e->count;
2521 }
2522 incoming_count = apply_scale (incoming_count, count_scale);
2523 incoming_frequency
2524 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2525 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2526 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2527 }
2528
2529 /* Must have a CFG here at this point. */
2530 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2531 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2532
2533 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2534
2535 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2536 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2537 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2538 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2539
2540 /* Duplicate any exception-handling regions. */
2541 if (cfun->eh)
2542 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2543 remap_decl_1, id);
2544
2545 /* Use aux pointers to map the original blocks to copy. */
2546 FOR_EACH_BB_FN (bb, cfun_to_copy)
2547 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2548 {
2549 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2550 bb->aux = new_bb;
2551 new_bb->aux = bb;
2552 new_bb->loop_father = entry_block_map->loop_father;
2553 }
2554
2555 last = last_basic_block_for_fn (cfun);
2556
2557 /* Now that we've duplicated the blocks, duplicate their edges. */
2558 basic_block abnormal_goto_dest = NULL;
2559 if (id->gimple_call
2560 && stmt_can_make_abnormal_goto (id->gimple_call))
2561 {
2562 gimple_stmt_iterator gsi = gsi_for_stmt (id->gimple_call);
2563
2564 bb = gimple_bb (id->gimple_call);
2565 gsi_next (&gsi);
2566 if (gsi_end_p (gsi))
2567 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2568 }
2569 FOR_ALL_BB_FN (bb, cfun_to_copy)
2570 if (!id->blocks_to_copy
2571 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2572 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2573 abnormal_goto_dest);
2574
2575 if (new_entry)
2576 {
2577 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2578 e->probability = REG_BR_PROB_BASE;
2579 e->count = incoming_count;
2580 }
2581
2582 /* Duplicate the loop tree, if available and wanted. */
2583 if (loops_for_fn (src_cfun) != NULL
2584 && current_loops != NULL)
2585 {
2586 copy_loops (id, entry_block_map->loop_father,
2587 get_loop (src_cfun, 0));
2588 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2589 loops_state_set (LOOPS_NEED_FIXUP);
2590 }
2591
2592 /* If the loop tree in the source function needed fixup, mark the
2593 destination loop tree for fixup, too. */
2594 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2595 loops_state_set (LOOPS_NEED_FIXUP);
2596
2597 if (gimple_in_ssa_p (cfun))
2598 FOR_ALL_BB_FN (bb, cfun_to_copy)
2599 if (!id->blocks_to_copy
2600 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2601 copy_phis_for_bb (bb, id);
2602
2603 FOR_ALL_BB_FN (bb, cfun_to_copy)
2604 if (bb->aux)
2605 {
2606 if (need_debug_cleanup
2607 && bb->index != ENTRY_BLOCK
2608 && bb->index != EXIT_BLOCK)
2609 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2610 /* Update call edge destinations. This can not be done before loop
2611 info is updated, because we may split basic blocks. */
2612 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2613 redirect_all_calls (id, (basic_block)bb->aux);
2614 ((basic_block)bb->aux)->aux = NULL;
2615 bb->aux = NULL;
2616 }
2617
2618 /* Zero out AUX fields of newly created block during EH edge
2619 insertion. */
2620 for (; last < last_basic_block_for_fn (cfun); last++)
2621 {
2622 if (need_debug_cleanup)
2623 maybe_move_debug_stmts_to_successors (id,
2624 BASIC_BLOCK_FOR_FN (cfun, last));
2625 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2626 /* Update call edge destinations. This can not be done before loop
2627 info is updated, because we may split basic blocks. */
2628 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2629 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2630 }
2631 entry_block_map->aux = NULL;
2632 exit_block_map->aux = NULL;
2633
2634 if (id->eh_map)
2635 {
2636 delete id->eh_map;
2637 id->eh_map = NULL;
2638 }
2639
2640 return new_fndecl;
2641 }
2642
2643 /* Copy the debug STMT using ID. We deal with these statements in a
2644 special way: if any variable in their VALUE expression wasn't
2645 remapped yet, we won't remap it, because that would get decl uids
2646 out of sync, causing codegen differences between -g and -g0. If
2647 this arises, we drop the VALUE expression altogether. */
2648
2649 static void
2650 copy_debug_stmt (gimple stmt, copy_body_data *id)
2651 {
2652 tree t, *n;
2653 struct walk_stmt_info wi;
2654
2655 if (gimple_block (stmt))
2656 {
2657 n = id->decl_map->get (gimple_block (stmt));
2658 gimple_set_block (stmt, n ? *n : id->block);
2659 }
2660
2661 /* Remap all the operands in COPY. */
2662 memset (&wi, 0, sizeof (wi));
2663 wi.info = id;
2664
2665 processing_debug_stmt = 1;
2666
2667 if (gimple_debug_source_bind_p (stmt))
2668 t = gimple_debug_source_bind_get_var (stmt);
2669 else
2670 t = gimple_debug_bind_get_var (stmt);
2671
2672 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2673 && (n = id->debug_map->get (t)))
2674 {
2675 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2676 t = *n;
2677 }
2678 else if (TREE_CODE (t) == VAR_DECL
2679 && !is_global_var (t)
2680 && !id->decl_map->get (t))
2681 /* T is a non-localized variable. */;
2682 else
2683 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2684
2685 if (gimple_debug_bind_p (stmt))
2686 {
2687 gimple_debug_bind_set_var (stmt, t);
2688
2689 if (gimple_debug_bind_has_value_p (stmt))
2690 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2691 remap_gimple_op_r, &wi, NULL);
2692
2693 /* Punt if any decl couldn't be remapped. */
2694 if (processing_debug_stmt < 0)
2695 gimple_debug_bind_reset_value (stmt);
2696 }
2697 else if (gimple_debug_source_bind_p (stmt))
2698 {
2699 gimple_debug_source_bind_set_var (stmt, t);
2700 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2701 remap_gimple_op_r, &wi, NULL);
2702 /* When inlining and source bind refers to one of the optimized
2703 away parameters, change the source bind into normal debug bind
2704 referring to the corresponding DEBUG_EXPR_DECL that should have
2705 been bound before the call stmt. */
2706 t = gimple_debug_source_bind_get_value (stmt);
2707 if (t != NULL_TREE
2708 && TREE_CODE (t) == PARM_DECL
2709 && id->gimple_call)
2710 {
2711 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2712 unsigned int i;
2713 if (debug_args != NULL)
2714 {
2715 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2716 if ((**debug_args)[i] == DECL_ORIGIN (t)
2717 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2718 {
2719 t = (**debug_args)[i + 1];
2720 stmt->subcode = GIMPLE_DEBUG_BIND;
2721 gimple_debug_bind_set_value (stmt, t);
2722 break;
2723 }
2724 }
2725 }
2726 }
2727
2728 processing_debug_stmt = 0;
2729
2730 update_stmt (stmt);
2731 }
2732
2733 /* Process deferred debug stmts. In order to give values better odds
2734 of being successfully remapped, we delay the processing of debug
2735 stmts until all other stmts that might require remapping are
2736 processed. */
2737
2738 static void
2739 copy_debug_stmts (copy_body_data *id)
2740 {
2741 size_t i;
2742 gimple stmt;
2743
2744 if (!id->debug_stmts.exists ())
2745 return;
2746
2747 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2748 copy_debug_stmt (stmt, id);
2749
2750 id->debug_stmts.release ();
2751 }
2752
2753 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2754 another function. */
2755
2756 static tree
2757 copy_tree_body (copy_body_data *id)
2758 {
2759 tree fndecl = id->src_fn;
2760 tree body = DECL_SAVED_TREE (fndecl);
2761
2762 walk_tree (&body, copy_tree_body_r, id, NULL);
2763
2764 return body;
2765 }
2766
2767 /* Make a copy of the body of FN so that it can be inserted inline in
2768 another function. */
2769
2770 static tree
2771 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2772 basic_block entry_block_map, basic_block exit_block_map,
2773 basic_block new_entry)
2774 {
2775 tree fndecl = id->src_fn;
2776 tree body;
2777
2778 /* If this body has a CFG, walk CFG and copy. */
2779 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2780 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2781 new_entry);
2782 copy_debug_stmts (id);
2783
2784 return body;
2785 }
2786
2787 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2788 defined in function FN, or of a data member thereof. */
2789
2790 static bool
2791 self_inlining_addr_expr (tree value, tree fn)
2792 {
2793 tree var;
2794
2795 if (TREE_CODE (value) != ADDR_EXPR)
2796 return false;
2797
2798 var = get_base_address (TREE_OPERAND (value, 0));
2799
2800 return var && auto_var_in_fn_p (var, fn);
2801 }
2802
2803 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2804 lexical block and line number information from base_stmt, if given,
2805 or from the last stmt of the block otherwise. */
2806
2807 static gimple
2808 insert_init_debug_bind (copy_body_data *id,
2809 basic_block bb, tree var, tree value,
2810 gimple base_stmt)
2811 {
2812 gimple note;
2813 gimple_stmt_iterator gsi;
2814 tree tracked_var;
2815
2816 if (!gimple_in_ssa_p (id->src_cfun))
2817 return NULL;
2818
2819 if (!MAY_HAVE_DEBUG_STMTS)
2820 return NULL;
2821
2822 tracked_var = target_for_debug_bind (var);
2823 if (!tracked_var)
2824 return NULL;
2825
2826 if (bb)
2827 {
2828 gsi = gsi_last_bb (bb);
2829 if (!base_stmt && !gsi_end_p (gsi))
2830 base_stmt = gsi_stmt (gsi);
2831 }
2832
2833 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2834
2835 if (bb)
2836 {
2837 if (!gsi_end_p (gsi))
2838 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2839 else
2840 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2841 }
2842
2843 return note;
2844 }
2845
2846 static void
2847 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2848 {
2849 /* If VAR represents a zero-sized variable, it's possible that the
2850 assignment statement may result in no gimple statements. */
2851 if (init_stmt)
2852 {
2853 gimple_stmt_iterator si = gsi_last_bb (bb);
2854
2855 /* We can end up with init statements that store to a non-register
2856 from a rhs with a conversion. Handle that here by forcing the
2857 rhs into a temporary. gimple_regimplify_operands is not
2858 prepared to do this for us. */
2859 if (!is_gimple_debug (init_stmt)
2860 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2861 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2862 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2863 {
2864 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2865 gimple_expr_type (init_stmt),
2866 gimple_assign_rhs1 (init_stmt));
2867 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2868 GSI_NEW_STMT);
2869 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2870 gimple_assign_set_rhs1 (init_stmt, rhs);
2871 }
2872 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2873 gimple_regimplify_operands (init_stmt, &si);
2874
2875 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2876 {
2877 tree def = gimple_assign_lhs (init_stmt);
2878 insert_init_debug_bind (id, bb, def, def, init_stmt);
2879 }
2880 }
2881 }
2882
2883 /* Initialize parameter P with VALUE. If needed, produce init statement
2884 at the end of BB. When BB is NULL, we return init statement to be
2885 output later. */
2886 static gimple
2887 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2888 basic_block bb, tree *vars)
2889 {
2890 gimple init_stmt = NULL;
2891 tree var;
2892 tree rhs = value;
2893 tree def = (gimple_in_ssa_p (cfun)
2894 ? ssa_default_def (id->src_cfun, p) : NULL);
2895
2896 if (value
2897 && value != error_mark_node
2898 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2899 {
2900 /* If we can match up types by promotion/demotion do so. */
2901 if (fold_convertible_p (TREE_TYPE (p), value))
2902 rhs = fold_convert (TREE_TYPE (p), value);
2903 else
2904 {
2905 /* ??? For valid programs we should not end up here.
2906 Still if we end up with truly mismatched types here, fall back
2907 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2908 GIMPLE to the following passes. */
2909 if (!is_gimple_reg_type (TREE_TYPE (value))
2910 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2911 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2912 else
2913 rhs = build_zero_cst (TREE_TYPE (p));
2914 }
2915 }
2916
2917 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2918 here since the type of this decl must be visible to the calling
2919 function. */
2920 var = copy_decl_to_var (p, id);
2921
2922 /* Declare this new variable. */
2923 DECL_CHAIN (var) = *vars;
2924 *vars = var;
2925
2926 /* Make gimplifier happy about this variable. */
2927 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2928
2929 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2930 we would not need to create a new variable here at all, if it
2931 weren't for debug info. Still, we can just use the argument
2932 value. */
2933 if (TREE_READONLY (p)
2934 && !TREE_ADDRESSABLE (p)
2935 && value && !TREE_SIDE_EFFECTS (value)
2936 && !def)
2937 {
2938 /* We may produce non-gimple trees by adding NOPs or introduce
2939 invalid sharing when operand is not really constant.
2940 It is not big deal to prohibit constant propagation here as
2941 we will constant propagate in DOM1 pass anyway. */
2942 if (is_gimple_min_invariant (value)
2943 && useless_type_conversion_p (TREE_TYPE (p),
2944 TREE_TYPE (value))
2945 /* We have to be very careful about ADDR_EXPR. Make sure
2946 the base variable isn't a local variable of the inlined
2947 function, e.g., when doing recursive inlining, direct or
2948 mutually-recursive or whatever, which is why we don't
2949 just test whether fn == current_function_decl. */
2950 && ! self_inlining_addr_expr (value, fn))
2951 {
2952 insert_decl_map (id, p, value);
2953 insert_debug_decl_map (id, p, var);
2954 return insert_init_debug_bind (id, bb, var, value, NULL);
2955 }
2956 }
2957
2958 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2959 that way, when the PARM_DECL is encountered, it will be
2960 automatically replaced by the VAR_DECL. */
2961 insert_decl_map (id, p, var);
2962
2963 /* Even if P was TREE_READONLY, the new VAR should not be.
2964 In the original code, we would have constructed a
2965 temporary, and then the function body would have never
2966 changed the value of P. However, now, we will be
2967 constructing VAR directly. The constructor body may
2968 change its value multiple times as it is being
2969 constructed. Therefore, it must not be TREE_READONLY;
2970 the back-end assumes that TREE_READONLY variable is
2971 assigned to only once. */
2972 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2973 TREE_READONLY (var) = 0;
2974
2975 /* If there is no setup required and we are in SSA, take the easy route
2976 replacing all SSA names representing the function parameter by the
2977 SSA name passed to function.
2978
2979 We need to construct map for the variable anyway as it might be used
2980 in different SSA names when parameter is set in function.
2981
2982 Do replacement at -O0 for const arguments replaced by constant.
2983 This is important for builtin_constant_p and other construct requiring
2984 constant argument to be visible in inlined function body. */
2985 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2986 && (optimize
2987 || (TREE_READONLY (p)
2988 && is_gimple_min_invariant (rhs)))
2989 && (TREE_CODE (rhs) == SSA_NAME
2990 || is_gimple_min_invariant (rhs))
2991 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2992 {
2993 insert_decl_map (id, def, rhs);
2994 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2995 }
2996
2997 /* If the value of argument is never used, don't care about initializing
2998 it. */
2999 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3000 {
3001 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3002 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3003 }
3004
3005 /* Initialize this VAR_DECL from the equivalent argument. Convert
3006 the argument to the proper type in case it was promoted. */
3007 if (value)
3008 {
3009 if (rhs == error_mark_node)
3010 {
3011 insert_decl_map (id, p, var);
3012 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3013 }
3014
3015 STRIP_USELESS_TYPE_CONVERSION (rhs);
3016
3017 /* If we are in SSA form properly remap the default definition
3018 or assign to a dummy SSA name if the parameter is unused and
3019 we are not optimizing. */
3020 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3021 {
3022 if (def)
3023 {
3024 def = remap_ssa_name (def, id);
3025 init_stmt = gimple_build_assign (def, rhs);
3026 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3027 set_ssa_default_def (cfun, var, NULL);
3028 }
3029 else if (!optimize)
3030 {
3031 def = make_ssa_name (var, NULL);
3032 init_stmt = gimple_build_assign (def, rhs);
3033 }
3034 }
3035 else
3036 init_stmt = gimple_build_assign (var, rhs);
3037
3038 if (bb && init_stmt)
3039 insert_init_stmt (id, bb, init_stmt);
3040 }
3041 return init_stmt;
3042 }
3043
3044 /* Generate code to initialize the parameters of the function at the
3045 top of the stack in ID from the GIMPLE_CALL STMT. */
3046
3047 static void
3048 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3049 tree fn, basic_block bb)
3050 {
3051 tree parms;
3052 size_t i;
3053 tree p;
3054 tree vars = NULL_TREE;
3055 tree static_chain = gimple_call_chain (stmt);
3056
3057 /* Figure out what the parameters are. */
3058 parms = DECL_ARGUMENTS (fn);
3059
3060 /* Loop through the parameter declarations, replacing each with an
3061 equivalent VAR_DECL, appropriately initialized. */
3062 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3063 {
3064 tree val;
3065 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3066 setup_one_parameter (id, p, val, fn, bb, &vars);
3067 }
3068 /* After remapping parameters remap their types. This has to be done
3069 in a second loop over all parameters to appropriately remap
3070 variable sized arrays when the size is specified in a
3071 parameter following the array. */
3072 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3073 {
3074 tree *varp = id->decl_map->get (p);
3075 if (varp
3076 && TREE_CODE (*varp) == VAR_DECL)
3077 {
3078 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3079 ? ssa_default_def (id->src_cfun, p) : NULL);
3080 tree var = *varp;
3081 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3082 /* Also remap the default definition if it was remapped
3083 to the default definition of the parameter replacement
3084 by the parameter setup. */
3085 if (def)
3086 {
3087 tree *defp = id->decl_map->get (def);
3088 if (defp
3089 && TREE_CODE (*defp) == SSA_NAME
3090 && SSA_NAME_VAR (*defp) == var)
3091 TREE_TYPE (*defp) = TREE_TYPE (var);
3092 }
3093 }
3094 }
3095
3096 /* Initialize the static chain. */
3097 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3098 gcc_assert (fn != current_function_decl);
3099 if (p)
3100 {
3101 /* No static chain? Seems like a bug in tree-nested.c. */
3102 gcc_assert (static_chain);
3103
3104 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3105 }
3106
3107 declare_inline_vars (id->block, vars);
3108 }
3109
3110
3111 /* Declare a return variable to replace the RESULT_DECL for the
3112 function we are calling. An appropriate DECL_STMT is returned.
3113 The USE_STMT is filled to contain a use of the declaration to
3114 indicate the return value of the function.
3115
3116 RETURN_SLOT, if non-null is place where to store the result. It
3117 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3118 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3119
3120 The return value is a (possibly null) value that holds the result
3121 as seen by the caller. */
3122
3123 static tree
3124 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3125 basic_block entry_bb)
3126 {
3127 tree callee = id->src_fn;
3128 tree result = DECL_RESULT (callee);
3129 tree callee_type = TREE_TYPE (result);
3130 tree caller_type;
3131 tree var, use;
3132
3133 /* Handle type-mismatches in the function declaration return type
3134 vs. the call expression. */
3135 if (modify_dest)
3136 caller_type = TREE_TYPE (modify_dest);
3137 else
3138 caller_type = TREE_TYPE (TREE_TYPE (callee));
3139
3140 /* We don't need to do anything for functions that don't return anything. */
3141 if (VOID_TYPE_P (callee_type))
3142 return NULL_TREE;
3143
3144 /* If there was a return slot, then the return value is the
3145 dereferenced address of that object. */
3146 if (return_slot)
3147 {
3148 /* The front end shouldn't have used both return_slot and
3149 a modify expression. */
3150 gcc_assert (!modify_dest);
3151 if (DECL_BY_REFERENCE (result))
3152 {
3153 tree return_slot_addr = build_fold_addr_expr (return_slot);
3154 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3155
3156 /* We are going to construct *&return_slot and we can't do that
3157 for variables believed to be not addressable.
3158
3159 FIXME: This check possibly can match, because values returned
3160 via return slot optimization are not believed to have address
3161 taken by alias analysis. */
3162 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3163 var = return_slot_addr;
3164 }
3165 else
3166 {
3167 var = return_slot;
3168 gcc_assert (TREE_CODE (var) != SSA_NAME);
3169 if (TREE_ADDRESSABLE (result))
3170 mark_addressable (var);
3171 }
3172 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3173 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3174 && !DECL_GIMPLE_REG_P (result)
3175 && DECL_P (var))
3176 DECL_GIMPLE_REG_P (var) = 0;
3177 use = NULL;
3178 goto done;
3179 }
3180
3181 /* All types requiring non-trivial constructors should have been handled. */
3182 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3183
3184 /* Attempt to avoid creating a new temporary variable. */
3185 if (modify_dest
3186 && TREE_CODE (modify_dest) != SSA_NAME)
3187 {
3188 bool use_it = false;
3189
3190 /* We can't use MODIFY_DEST if there's type promotion involved. */
3191 if (!useless_type_conversion_p (callee_type, caller_type))
3192 use_it = false;
3193
3194 /* ??? If we're assigning to a variable sized type, then we must
3195 reuse the destination variable, because we've no good way to
3196 create variable sized temporaries at this point. */
3197 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3198 use_it = true;
3199
3200 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3201 reuse it as the result of the call directly. Don't do this if
3202 it would promote MODIFY_DEST to addressable. */
3203 else if (TREE_ADDRESSABLE (result))
3204 use_it = false;
3205 else
3206 {
3207 tree base_m = get_base_address (modify_dest);
3208
3209 /* If the base isn't a decl, then it's a pointer, and we don't
3210 know where that's going to go. */
3211 if (!DECL_P (base_m))
3212 use_it = false;
3213 else if (is_global_var (base_m))
3214 use_it = false;
3215 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3216 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3217 && !DECL_GIMPLE_REG_P (result)
3218 && DECL_GIMPLE_REG_P (base_m))
3219 use_it = false;
3220 else if (!TREE_ADDRESSABLE (base_m))
3221 use_it = true;
3222 }
3223
3224 if (use_it)
3225 {
3226 var = modify_dest;
3227 use = NULL;
3228 goto done;
3229 }
3230 }
3231
3232 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3233
3234 var = copy_result_decl_to_var (result, id);
3235 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3236
3237 /* Do not have the rest of GCC warn about this variable as it should
3238 not be visible to the user. */
3239 TREE_NO_WARNING (var) = 1;
3240
3241 declare_inline_vars (id->block, var);
3242
3243 /* Build the use expr. If the return type of the function was
3244 promoted, convert it back to the expected type. */
3245 use = var;
3246 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3247 {
3248 /* If we can match up types by promotion/demotion do so. */
3249 if (fold_convertible_p (caller_type, var))
3250 use = fold_convert (caller_type, var);
3251 else
3252 {
3253 /* ??? For valid programs we should not end up here.
3254 Still if we end up with truly mismatched types here, fall back
3255 to using a MEM_REF to not leak invalid GIMPLE to the following
3256 passes. */
3257 /* Prevent var from being written into SSA form. */
3258 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3259 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3260 DECL_GIMPLE_REG_P (var) = false;
3261 else if (is_gimple_reg_type (TREE_TYPE (var)))
3262 TREE_ADDRESSABLE (var) = true;
3263 use = fold_build2 (MEM_REF, caller_type,
3264 build_fold_addr_expr (var),
3265 build_int_cst (ptr_type_node, 0));
3266 }
3267 }
3268
3269 STRIP_USELESS_TYPE_CONVERSION (use);
3270
3271 if (DECL_BY_REFERENCE (result))
3272 {
3273 TREE_ADDRESSABLE (var) = 1;
3274 var = build_fold_addr_expr (var);
3275 }
3276
3277 done:
3278 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3279 way, when the RESULT_DECL is encountered, it will be
3280 automatically replaced by the VAR_DECL.
3281
3282 When returning by reference, ensure that RESULT_DECL remaps to
3283 gimple_val. */
3284 if (DECL_BY_REFERENCE (result)
3285 && !is_gimple_val (var))
3286 {
3287 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3288 insert_decl_map (id, result, temp);
3289 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3290 it's default_def SSA_NAME. */
3291 if (gimple_in_ssa_p (id->src_cfun)
3292 && is_gimple_reg (result))
3293 {
3294 temp = make_ssa_name (temp, NULL);
3295 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3296 }
3297 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3298 }
3299 else
3300 insert_decl_map (id, result, var);
3301
3302 /* Remember this so we can ignore it in remap_decls. */
3303 id->retvar = var;
3304
3305 return use;
3306 }
3307
3308 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3309 to a local label. */
3310
3311 static tree
3312 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3313 {
3314 tree node = *nodep;
3315 tree fn = (tree) fnp;
3316
3317 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3318 return node;
3319
3320 if (TYPE_P (node))
3321 *walk_subtrees = 0;
3322
3323 return NULL_TREE;
3324 }
3325
3326 /* Determine if the function can be copied. If so return NULL. If
3327 not return a string describng the reason for failure. */
3328
3329 static const char *
3330 copy_forbidden (struct function *fun, tree fndecl)
3331 {
3332 const char *reason = fun->cannot_be_copied_reason;
3333 tree decl;
3334 unsigned ix;
3335
3336 /* Only examine the function once. */
3337 if (fun->cannot_be_copied_set)
3338 return reason;
3339
3340 /* We cannot copy a function that receives a non-local goto
3341 because we cannot remap the destination label used in the
3342 function that is performing the non-local goto. */
3343 /* ??? Actually, this should be possible, if we work at it.
3344 No doubt there's just a handful of places that simply
3345 assume it doesn't happen and don't substitute properly. */
3346 if (fun->has_nonlocal_label)
3347 {
3348 reason = G_("function %q+F can never be copied "
3349 "because it receives a non-local goto");
3350 goto fail;
3351 }
3352
3353 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3354 if (TREE_CODE (decl) == VAR_DECL
3355 && TREE_STATIC (decl)
3356 && !DECL_EXTERNAL (decl)
3357 && DECL_INITIAL (decl)
3358 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3359 has_label_address_in_static_1,
3360 fndecl))
3361 {
3362 reason = G_("function %q+F can never be copied because it saves "
3363 "address of local label in a static variable");
3364 goto fail;
3365 }
3366
3367 fail:
3368 fun->cannot_be_copied_reason = reason;
3369 fun->cannot_be_copied_set = true;
3370 return reason;
3371 }
3372
3373
3374 static const char *inline_forbidden_reason;
3375
3376 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3377 iff a function can not be inlined. Also sets the reason why. */
3378
3379 static tree
3380 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3381 struct walk_stmt_info *wip)
3382 {
3383 tree fn = (tree) wip->info;
3384 tree t;
3385 gimple stmt = gsi_stmt (*gsi);
3386
3387 switch (gimple_code (stmt))
3388 {
3389 case GIMPLE_CALL:
3390 /* Refuse to inline alloca call unless user explicitly forced so as
3391 this may change program's memory overhead drastically when the
3392 function using alloca is called in loop. In GCC present in
3393 SPEC2000 inlining into schedule_block cause it to require 2GB of
3394 RAM instead of 256MB. Don't do so for alloca calls emitted for
3395 VLA objects as those can't cause unbounded growth (they're always
3396 wrapped inside stack_save/stack_restore regions. */
3397 if (gimple_alloca_call_p (stmt)
3398 && !gimple_call_alloca_for_var_p (stmt)
3399 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3400 {
3401 inline_forbidden_reason
3402 = G_("function %q+F can never be inlined because it uses "
3403 "alloca (override using the always_inline attribute)");
3404 *handled_ops_p = true;
3405 return fn;
3406 }
3407
3408 t = gimple_call_fndecl (stmt);
3409 if (t == NULL_TREE)
3410 break;
3411
3412 /* We cannot inline functions that call setjmp. */
3413 if (setjmp_call_p (t))
3414 {
3415 inline_forbidden_reason
3416 = G_("function %q+F can never be inlined because it uses setjmp");
3417 *handled_ops_p = true;
3418 return t;
3419 }
3420
3421 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3422 switch (DECL_FUNCTION_CODE (t))
3423 {
3424 /* We cannot inline functions that take a variable number of
3425 arguments. */
3426 case BUILT_IN_VA_START:
3427 case BUILT_IN_NEXT_ARG:
3428 case BUILT_IN_VA_END:
3429 inline_forbidden_reason
3430 = G_("function %q+F can never be inlined because it "
3431 "uses variable argument lists");
3432 *handled_ops_p = true;
3433 return t;
3434
3435 case BUILT_IN_LONGJMP:
3436 /* We can't inline functions that call __builtin_longjmp at
3437 all. The non-local goto machinery really requires the
3438 destination be in a different function. If we allow the
3439 function calling __builtin_longjmp to be inlined into the
3440 function calling __builtin_setjmp, Things will Go Awry. */
3441 inline_forbidden_reason
3442 = G_("function %q+F can never be inlined because "
3443 "it uses setjmp-longjmp exception handling");
3444 *handled_ops_p = true;
3445 return t;
3446
3447 case BUILT_IN_NONLOCAL_GOTO:
3448 /* Similarly. */
3449 inline_forbidden_reason
3450 = G_("function %q+F can never be inlined because "
3451 "it uses non-local goto");
3452 *handled_ops_p = true;
3453 return t;
3454
3455 case BUILT_IN_RETURN:
3456 case BUILT_IN_APPLY_ARGS:
3457 /* If a __builtin_apply_args caller would be inlined,
3458 it would be saving arguments of the function it has
3459 been inlined into. Similarly __builtin_return would
3460 return from the function the inline has been inlined into. */
3461 inline_forbidden_reason
3462 = G_("function %q+F can never be inlined because "
3463 "it uses __builtin_return or __builtin_apply_args");
3464 *handled_ops_p = true;
3465 return t;
3466
3467 default:
3468 break;
3469 }
3470 break;
3471
3472 case GIMPLE_GOTO:
3473 t = gimple_goto_dest (stmt);
3474
3475 /* We will not inline a function which uses computed goto. The
3476 addresses of its local labels, which may be tucked into
3477 global storage, are of course not constant across
3478 instantiations, which causes unexpected behavior. */
3479 if (TREE_CODE (t) != LABEL_DECL)
3480 {
3481 inline_forbidden_reason
3482 = G_("function %q+F can never be inlined "
3483 "because it contains a computed goto");
3484 *handled_ops_p = true;
3485 return t;
3486 }
3487 break;
3488
3489 default:
3490 break;
3491 }
3492
3493 *handled_ops_p = false;
3494 return NULL_TREE;
3495 }
3496
3497 /* Return true if FNDECL is a function that cannot be inlined into
3498 another one. */
3499
3500 static bool
3501 inline_forbidden_p (tree fndecl)
3502 {
3503 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3504 struct walk_stmt_info wi;
3505 basic_block bb;
3506 bool forbidden_p = false;
3507
3508 /* First check for shared reasons not to copy the code. */
3509 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3510 if (inline_forbidden_reason != NULL)
3511 return true;
3512
3513 /* Next, walk the statements of the function looking for
3514 constraucts we can't handle, or are non-optimal for inlining. */
3515 hash_set<tree> visited_nodes;
3516 memset (&wi, 0, sizeof (wi));
3517 wi.info = (void *) fndecl;
3518 wi.pset = &visited_nodes;
3519
3520 FOR_EACH_BB_FN (bb, fun)
3521 {
3522 gimple ret;
3523 gimple_seq seq = bb_seq (bb);
3524 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3525 forbidden_p = (ret != NULL);
3526 if (forbidden_p)
3527 break;
3528 }
3529
3530 return forbidden_p;
3531 }
3532 \f
3533 /* Return false if the function FNDECL cannot be inlined on account of its
3534 attributes, true otherwise. */
3535 static bool
3536 function_attribute_inlinable_p (const_tree fndecl)
3537 {
3538 if (targetm.attribute_table)
3539 {
3540 const_tree a;
3541
3542 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3543 {
3544 const_tree name = TREE_PURPOSE (a);
3545 int i;
3546
3547 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3548 if (is_attribute_p (targetm.attribute_table[i].name, name))
3549 return targetm.function_attribute_inlinable_p (fndecl);
3550 }
3551 }
3552
3553 return true;
3554 }
3555
3556 /* Returns nonzero if FN is a function that does not have any
3557 fundamental inline blocking properties. */
3558
3559 bool
3560 tree_inlinable_function_p (tree fn)
3561 {
3562 bool inlinable = true;
3563 bool do_warning;
3564 tree always_inline;
3565
3566 /* If we've already decided this function shouldn't be inlined,
3567 there's no need to check again. */
3568 if (DECL_UNINLINABLE (fn))
3569 return false;
3570
3571 /* We only warn for functions declared `inline' by the user. */
3572 do_warning = (warn_inline
3573 && DECL_DECLARED_INLINE_P (fn)
3574 && !DECL_NO_INLINE_WARNING_P (fn)
3575 && !DECL_IN_SYSTEM_HEADER (fn));
3576
3577 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3578
3579 if (flag_no_inline
3580 && always_inline == NULL)
3581 {
3582 if (do_warning)
3583 warning (OPT_Winline, "function %q+F can never be inlined because it "
3584 "is suppressed using -fno-inline", fn);
3585 inlinable = false;
3586 }
3587
3588 else if (!function_attribute_inlinable_p (fn))
3589 {
3590 if (do_warning)
3591 warning (OPT_Winline, "function %q+F can never be inlined because it "
3592 "uses attributes conflicting with inlining", fn);
3593 inlinable = false;
3594 }
3595
3596 else if (inline_forbidden_p (fn))
3597 {
3598 /* See if we should warn about uninlinable functions. Previously,
3599 some of these warnings would be issued while trying to expand
3600 the function inline, but that would cause multiple warnings
3601 about functions that would for example call alloca. But since
3602 this a property of the function, just one warning is enough.
3603 As a bonus we can now give more details about the reason why a
3604 function is not inlinable. */
3605 if (always_inline)
3606 error (inline_forbidden_reason, fn);
3607 else if (do_warning)
3608 warning (OPT_Winline, inline_forbidden_reason, fn);
3609
3610 inlinable = false;
3611 }
3612
3613 /* Squirrel away the result so that we don't have to check again. */
3614 DECL_UNINLINABLE (fn) = !inlinable;
3615
3616 return inlinable;
3617 }
3618
3619 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3620 word size and take possible memcpy call into account and return
3621 cost based on whether optimizing for size or speed according to SPEED_P. */
3622
3623 int
3624 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3625 {
3626 HOST_WIDE_INT size;
3627
3628 gcc_assert (!VOID_TYPE_P (type));
3629
3630 if (TREE_CODE (type) == VECTOR_TYPE)
3631 {
3632 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3633 enum machine_mode simd
3634 = targetm.vectorize.preferred_simd_mode (inner);
3635 int simd_mode_size = GET_MODE_SIZE (simd);
3636 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3637 / simd_mode_size);
3638 }
3639
3640 size = int_size_in_bytes (type);
3641
3642 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3643 /* Cost of a memcpy call, 3 arguments and the call. */
3644 return 4;
3645 else
3646 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3647 }
3648
3649 /* Returns cost of operation CODE, according to WEIGHTS */
3650
3651 static int
3652 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3653 tree op1 ATTRIBUTE_UNUSED, tree op2)
3654 {
3655 switch (code)
3656 {
3657 /* These are "free" conversions, or their presumed cost
3658 is folded into other operations. */
3659 case RANGE_EXPR:
3660 CASE_CONVERT:
3661 case COMPLEX_EXPR:
3662 case PAREN_EXPR:
3663 case VIEW_CONVERT_EXPR:
3664 return 0;
3665
3666 /* Assign cost of 1 to usual operations.
3667 ??? We may consider mapping RTL costs to this. */
3668 case COND_EXPR:
3669 case VEC_COND_EXPR:
3670 case VEC_PERM_EXPR:
3671
3672 case PLUS_EXPR:
3673 case POINTER_PLUS_EXPR:
3674 case MINUS_EXPR:
3675 case MULT_EXPR:
3676 case MULT_HIGHPART_EXPR:
3677 case FMA_EXPR:
3678
3679 case ADDR_SPACE_CONVERT_EXPR:
3680 case FIXED_CONVERT_EXPR:
3681 case FIX_TRUNC_EXPR:
3682
3683 case NEGATE_EXPR:
3684 case FLOAT_EXPR:
3685 case MIN_EXPR:
3686 case MAX_EXPR:
3687 case ABS_EXPR:
3688
3689 case LSHIFT_EXPR:
3690 case RSHIFT_EXPR:
3691 case LROTATE_EXPR:
3692 case RROTATE_EXPR:
3693 case VEC_LSHIFT_EXPR:
3694 case VEC_RSHIFT_EXPR:
3695
3696 case BIT_IOR_EXPR:
3697 case BIT_XOR_EXPR:
3698 case BIT_AND_EXPR:
3699 case BIT_NOT_EXPR:
3700
3701 case TRUTH_ANDIF_EXPR:
3702 case TRUTH_ORIF_EXPR:
3703 case TRUTH_AND_EXPR:
3704 case TRUTH_OR_EXPR:
3705 case TRUTH_XOR_EXPR:
3706 case TRUTH_NOT_EXPR:
3707
3708 case LT_EXPR:
3709 case LE_EXPR:
3710 case GT_EXPR:
3711 case GE_EXPR:
3712 case EQ_EXPR:
3713 case NE_EXPR:
3714 case ORDERED_EXPR:
3715 case UNORDERED_EXPR:
3716
3717 case UNLT_EXPR:
3718 case UNLE_EXPR:
3719 case UNGT_EXPR:
3720 case UNGE_EXPR:
3721 case UNEQ_EXPR:
3722 case LTGT_EXPR:
3723
3724 case CONJ_EXPR:
3725
3726 case PREDECREMENT_EXPR:
3727 case PREINCREMENT_EXPR:
3728 case POSTDECREMENT_EXPR:
3729 case POSTINCREMENT_EXPR:
3730
3731 case REALIGN_LOAD_EXPR:
3732
3733 case REDUC_MAX_EXPR:
3734 case REDUC_MIN_EXPR:
3735 case REDUC_PLUS_EXPR:
3736 case WIDEN_SUM_EXPR:
3737 case WIDEN_MULT_EXPR:
3738 case DOT_PROD_EXPR:
3739 case SAD_EXPR:
3740 case WIDEN_MULT_PLUS_EXPR:
3741 case WIDEN_MULT_MINUS_EXPR:
3742 case WIDEN_LSHIFT_EXPR:
3743
3744 case VEC_WIDEN_MULT_HI_EXPR:
3745 case VEC_WIDEN_MULT_LO_EXPR:
3746 case VEC_WIDEN_MULT_EVEN_EXPR:
3747 case VEC_WIDEN_MULT_ODD_EXPR:
3748 case VEC_UNPACK_HI_EXPR:
3749 case VEC_UNPACK_LO_EXPR:
3750 case VEC_UNPACK_FLOAT_HI_EXPR:
3751 case VEC_UNPACK_FLOAT_LO_EXPR:
3752 case VEC_PACK_TRUNC_EXPR:
3753 case VEC_PACK_SAT_EXPR:
3754 case VEC_PACK_FIX_TRUNC_EXPR:
3755 case VEC_WIDEN_LSHIFT_HI_EXPR:
3756 case VEC_WIDEN_LSHIFT_LO_EXPR:
3757
3758 return 1;
3759
3760 /* Few special cases of expensive operations. This is useful
3761 to avoid inlining on functions having too many of these. */
3762 case TRUNC_DIV_EXPR:
3763 case CEIL_DIV_EXPR:
3764 case FLOOR_DIV_EXPR:
3765 case ROUND_DIV_EXPR:
3766 case EXACT_DIV_EXPR:
3767 case TRUNC_MOD_EXPR:
3768 case CEIL_MOD_EXPR:
3769 case FLOOR_MOD_EXPR:
3770 case ROUND_MOD_EXPR:
3771 case RDIV_EXPR:
3772 if (TREE_CODE (op2) != INTEGER_CST)
3773 return weights->div_mod_cost;
3774 return 1;
3775
3776 default:
3777 /* We expect a copy assignment with no operator. */
3778 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3779 return 0;
3780 }
3781 }
3782
3783
3784 /* Estimate number of instructions that will be created by expanding
3785 the statements in the statement sequence STMTS.
3786 WEIGHTS contains weights attributed to various constructs. */
3787
3788 static
3789 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3790 {
3791 int cost;
3792 gimple_stmt_iterator gsi;
3793
3794 cost = 0;
3795 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3796 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3797
3798 return cost;
3799 }
3800
3801
3802 /* Estimate number of instructions that will be created by expanding STMT.
3803 WEIGHTS contains weights attributed to various constructs. */
3804
3805 int
3806 estimate_num_insns (gimple stmt, eni_weights *weights)
3807 {
3808 unsigned cost, i;
3809 enum gimple_code code = gimple_code (stmt);
3810 tree lhs;
3811 tree rhs;
3812
3813 switch (code)
3814 {
3815 case GIMPLE_ASSIGN:
3816 /* Try to estimate the cost of assignments. We have three cases to
3817 deal with:
3818 1) Simple assignments to registers;
3819 2) Stores to things that must live in memory. This includes
3820 "normal" stores to scalars, but also assignments of large
3821 structures, or constructors of big arrays;
3822
3823 Let us look at the first two cases, assuming we have "a = b + C":
3824 <GIMPLE_ASSIGN <var_decl "a">
3825 <plus_expr <var_decl "b"> <constant C>>
3826 If "a" is a GIMPLE register, the assignment to it is free on almost
3827 any target, because "a" usually ends up in a real register. Hence
3828 the only cost of this expression comes from the PLUS_EXPR, and we
3829 can ignore the GIMPLE_ASSIGN.
3830 If "a" is not a GIMPLE register, the assignment to "a" will most
3831 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3832 of moving something into "a", which we compute using the function
3833 estimate_move_cost. */
3834 if (gimple_clobber_p (stmt))
3835 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3836
3837 lhs = gimple_assign_lhs (stmt);
3838 rhs = gimple_assign_rhs1 (stmt);
3839
3840 cost = 0;
3841
3842 /* Account for the cost of moving to / from memory. */
3843 if (gimple_store_p (stmt))
3844 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
3845 if (gimple_assign_load_p (stmt))
3846 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
3847
3848 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3849 gimple_assign_rhs1 (stmt),
3850 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3851 == GIMPLE_BINARY_RHS
3852 ? gimple_assign_rhs2 (stmt) : NULL);
3853 break;
3854
3855 case GIMPLE_COND:
3856 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3857 gimple_op (stmt, 0),
3858 gimple_op (stmt, 1));
3859 break;
3860
3861 case GIMPLE_SWITCH:
3862 /* Take into account cost of the switch + guess 2 conditional jumps for
3863 each case label.
3864
3865 TODO: once the switch expansion logic is sufficiently separated, we can
3866 do better job on estimating cost of the switch. */
3867 if (weights->time_based)
3868 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3869 else
3870 cost = gimple_switch_num_labels (stmt) * 2;
3871 break;
3872
3873 case GIMPLE_CALL:
3874 {
3875 tree decl;
3876
3877 if (gimple_call_internal_p (stmt))
3878 return 0;
3879 else if ((decl = gimple_call_fndecl (stmt))
3880 && DECL_BUILT_IN (decl))
3881 {
3882 /* Do not special case builtins where we see the body.
3883 This just confuse inliner. */
3884 struct cgraph_node *node;
3885 if (!(node = cgraph_node::get (decl))
3886 || node->definition)
3887 ;
3888 /* For buitins that are likely expanded to nothing or
3889 inlined do not account operand costs. */
3890 else if (is_simple_builtin (decl))
3891 return 0;
3892 else if (is_inexpensive_builtin (decl))
3893 return weights->target_builtin_call_cost;
3894 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3895 {
3896 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3897 specialize the cheap expansion we do here.
3898 ??? This asks for a more general solution. */
3899 switch (DECL_FUNCTION_CODE (decl))
3900 {
3901 case BUILT_IN_POW:
3902 case BUILT_IN_POWF:
3903 case BUILT_IN_POWL:
3904 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3905 && REAL_VALUES_EQUAL
3906 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3907 return estimate_operator_cost
3908 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
3909 gimple_call_arg (stmt, 0));
3910 break;
3911
3912 default:
3913 break;
3914 }
3915 }
3916 }
3917
3918 cost = decl ? weights->call_cost : weights->indirect_call_cost;
3919 if (gimple_call_lhs (stmt))
3920 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
3921 weights->time_based);
3922 for (i = 0; i < gimple_call_num_args (stmt); i++)
3923 {
3924 tree arg = gimple_call_arg (stmt, i);
3925 cost += estimate_move_cost (TREE_TYPE (arg),
3926 weights->time_based);
3927 }
3928 break;
3929 }
3930
3931 case GIMPLE_RETURN:
3932 return weights->return_cost;
3933
3934 case GIMPLE_GOTO:
3935 case GIMPLE_LABEL:
3936 case GIMPLE_NOP:
3937 case GIMPLE_PHI:
3938 case GIMPLE_PREDICT:
3939 case GIMPLE_DEBUG:
3940 return 0;
3941
3942 case GIMPLE_ASM:
3943 {
3944 int count = asm_str_count (gimple_asm_string (stmt));
3945 /* 1000 means infinity. This avoids overflows later
3946 with very long asm statements. */
3947 if (count > 1000)
3948 count = 1000;
3949 return count;
3950 }
3951
3952 case GIMPLE_RESX:
3953 /* This is either going to be an external function call with one
3954 argument, or two register copy statements plus a goto. */
3955 return 2;
3956
3957 case GIMPLE_EH_DISPATCH:
3958 /* ??? This is going to turn into a switch statement. Ideally
3959 we'd have a look at the eh region and estimate the number of
3960 edges involved. */
3961 return 10;
3962
3963 case GIMPLE_BIND:
3964 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3965
3966 case GIMPLE_EH_FILTER:
3967 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3968
3969 case GIMPLE_CATCH:
3970 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3971
3972 case GIMPLE_TRY:
3973 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3974 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3975
3976 /* OpenMP directives are generally very expensive. */
3977
3978 case GIMPLE_OMP_RETURN:
3979 case GIMPLE_OMP_SECTIONS_SWITCH:
3980 case GIMPLE_OMP_ATOMIC_STORE:
3981 case GIMPLE_OMP_CONTINUE:
3982 /* ...except these, which are cheap. */
3983 return 0;
3984
3985 case GIMPLE_OMP_ATOMIC_LOAD:
3986 return weights->omp_cost;
3987
3988 case GIMPLE_OMP_FOR:
3989 return (weights->omp_cost
3990 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3991 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3992
3993 case GIMPLE_OMP_PARALLEL:
3994 case GIMPLE_OMP_TASK:
3995 case GIMPLE_OMP_CRITICAL:
3996 case GIMPLE_OMP_MASTER:
3997 case GIMPLE_OMP_TASKGROUP:
3998 case GIMPLE_OMP_ORDERED:
3999 case GIMPLE_OMP_SECTION:
4000 case GIMPLE_OMP_SECTIONS:
4001 case GIMPLE_OMP_SINGLE:
4002 case GIMPLE_OMP_TARGET:
4003 case GIMPLE_OMP_TEAMS:
4004 return (weights->omp_cost
4005 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4006
4007 case GIMPLE_TRANSACTION:
4008 return (weights->tm_cost
4009 + estimate_num_insns_seq (gimple_transaction_body (stmt),
4010 weights));
4011
4012 default:
4013 gcc_unreachable ();
4014 }
4015
4016 return cost;
4017 }
4018
4019 /* Estimate number of instructions that will be created by expanding
4020 function FNDECL. WEIGHTS contains weights attributed to various
4021 constructs. */
4022
4023 int
4024 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4025 {
4026 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4027 gimple_stmt_iterator bsi;
4028 basic_block bb;
4029 int n = 0;
4030
4031 gcc_assert (my_function && my_function->cfg);
4032 FOR_EACH_BB_FN (bb, my_function)
4033 {
4034 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4035 n += estimate_num_insns (gsi_stmt (bsi), weights);
4036 }
4037
4038 return n;
4039 }
4040
4041
4042 /* Initializes weights used by estimate_num_insns. */
4043
4044 void
4045 init_inline_once (void)
4046 {
4047 eni_size_weights.call_cost = 1;
4048 eni_size_weights.indirect_call_cost = 3;
4049 eni_size_weights.target_builtin_call_cost = 1;
4050 eni_size_weights.div_mod_cost = 1;
4051 eni_size_weights.omp_cost = 40;
4052 eni_size_weights.tm_cost = 10;
4053 eni_size_weights.time_based = false;
4054 eni_size_weights.return_cost = 1;
4055
4056 /* Estimating time for call is difficult, since we have no idea what the
4057 called function does. In the current uses of eni_time_weights,
4058 underestimating the cost does less harm than overestimating it, so
4059 we choose a rather small value here. */
4060 eni_time_weights.call_cost = 10;
4061 eni_time_weights.indirect_call_cost = 15;
4062 eni_time_weights.target_builtin_call_cost = 1;
4063 eni_time_weights.div_mod_cost = 10;
4064 eni_time_weights.omp_cost = 40;
4065 eni_time_weights.tm_cost = 40;
4066 eni_time_weights.time_based = true;
4067 eni_time_weights.return_cost = 2;
4068 }
4069
4070 /* Estimate the number of instructions in a gimple_seq. */
4071
4072 int
4073 count_insns_seq (gimple_seq seq, eni_weights *weights)
4074 {
4075 gimple_stmt_iterator gsi;
4076 int n = 0;
4077 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4078 n += estimate_num_insns (gsi_stmt (gsi), weights);
4079
4080 return n;
4081 }
4082
4083
4084 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4085
4086 static void
4087 prepend_lexical_block (tree current_block, tree new_block)
4088 {
4089 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4090 BLOCK_SUBBLOCKS (current_block) = new_block;
4091 BLOCK_SUPERCONTEXT (new_block) = current_block;
4092 }
4093
4094 /* Add local variables from CALLEE to CALLER. */
4095
4096 static inline void
4097 add_local_variables (struct function *callee, struct function *caller,
4098 copy_body_data *id)
4099 {
4100 tree var;
4101 unsigned ix;
4102
4103 FOR_EACH_LOCAL_DECL (callee, ix, var)
4104 if (!can_be_nonlocal (var, id))
4105 {
4106 tree new_var = remap_decl (var, id);
4107
4108 /* Remap debug-expressions. */
4109 if (TREE_CODE (new_var) == VAR_DECL
4110 && DECL_HAS_DEBUG_EXPR_P (var)
4111 && new_var != var)
4112 {
4113 tree tem = DECL_DEBUG_EXPR (var);
4114 bool old_regimplify = id->regimplify;
4115 id->remapping_type_depth++;
4116 walk_tree (&tem, copy_tree_body_r, id, NULL);
4117 id->remapping_type_depth--;
4118 id->regimplify = old_regimplify;
4119 SET_DECL_DEBUG_EXPR (new_var, tem);
4120 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4121 }
4122 add_local_decl (caller, new_var);
4123 }
4124 }
4125
4126 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4127
4128 static bool
4129 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4130 {
4131 tree use_retvar;
4132 tree fn;
4133 hash_map<tree, tree> *dst;
4134 hash_map<tree, tree> *st = NULL;
4135 tree return_slot;
4136 tree modify_dest;
4137 location_t saved_location;
4138 struct cgraph_edge *cg_edge;
4139 cgraph_inline_failed_t reason;
4140 basic_block return_block;
4141 edge e;
4142 gimple_stmt_iterator gsi, stmt_gsi;
4143 bool successfully_inlined = FALSE;
4144 bool purge_dead_abnormal_edges;
4145
4146 /* Set input_location here so we get the right instantiation context
4147 if we call instantiate_decl from inlinable_function_p. */
4148 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4149 saved_location = input_location;
4150 input_location = gimple_location (stmt);
4151
4152 /* From here on, we're only interested in CALL_EXPRs. */
4153 if (gimple_code (stmt) != GIMPLE_CALL)
4154 goto egress;
4155
4156 cg_edge = id->dst_node->get_edge (stmt);
4157 gcc_checking_assert (cg_edge);
4158 /* First, see if we can figure out what function is being called.
4159 If we cannot, then there is no hope of inlining the function. */
4160 if (cg_edge->indirect_unknown_callee)
4161 goto egress;
4162 fn = cg_edge->callee->decl;
4163 gcc_checking_assert (fn);
4164
4165 /* If FN is a declaration of a function in a nested scope that was
4166 globally declared inline, we don't set its DECL_INITIAL.
4167 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4168 C++ front-end uses it for cdtors to refer to their internal
4169 declarations, that are not real functions. Fortunately those
4170 don't have trees to be saved, so we can tell by checking their
4171 gimple_body. */
4172 if (!DECL_INITIAL (fn)
4173 && DECL_ABSTRACT_ORIGIN (fn)
4174 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4175 fn = DECL_ABSTRACT_ORIGIN (fn);
4176
4177 /* Don't try to inline functions that are not well-suited to inlining. */
4178 if (cg_edge->inline_failed)
4179 {
4180 reason = cg_edge->inline_failed;
4181 /* If this call was originally indirect, we do not want to emit any
4182 inlining related warnings or sorry messages because there are no
4183 guarantees regarding those. */
4184 if (cg_edge->indirect_inlining_edge)
4185 goto egress;
4186
4187 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4188 /* For extern inline functions that get redefined we always
4189 silently ignored always_inline flag. Better behaviour would
4190 be to be able to keep both bodies and use extern inline body
4191 for inlining, but we can't do that because frontends overwrite
4192 the body. */
4193 && !cg_edge->callee->local.redefined_extern_inline
4194 /* During early inline pass, report only when optimization is
4195 not turned on. */
4196 && (cgraph_global_info_ready
4197 || !optimize
4198 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4199 /* PR 20090218-1_0.c. Body can be provided by another module. */
4200 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4201 {
4202 error ("inlining failed in call to always_inline %q+F: %s", fn,
4203 cgraph_inline_failed_string (reason));
4204 error ("called from here");
4205 }
4206 else if (warn_inline
4207 && DECL_DECLARED_INLINE_P (fn)
4208 && !DECL_NO_INLINE_WARNING_P (fn)
4209 && !DECL_IN_SYSTEM_HEADER (fn)
4210 && reason != CIF_UNSPECIFIED
4211 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4212 /* Do not warn about not inlined recursive calls. */
4213 && !cgraph_edge_recursive_p (cg_edge)
4214 /* Avoid warnings during early inline pass. */
4215 && cgraph_global_info_ready)
4216 {
4217 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4218 fn, _(cgraph_inline_failed_string (reason)));
4219 warning (OPT_Winline, "called from here");
4220 }
4221 goto egress;
4222 }
4223 fn = cg_edge->callee->decl;
4224 cg_edge->callee->get_body ();
4225
4226 #ifdef ENABLE_CHECKING
4227 if (cg_edge->callee->decl != id->dst_node->decl)
4228 cg_edge->callee->verify ();
4229 #endif
4230
4231 /* We will be inlining this callee. */
4232 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4233
4234 /* Update the callers EH personality. */
4235 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4236 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4237 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4238
4239 /* Split the block holding the GIMPLE_CALL. */
4240 e = split_block (bb, stmt);
4241 bb = e->src;
4242 return_block = e->dest;
4243 remove_edge (e);
4244
4245 /* split_block splits after the statement; work around this by
4246 moving the call into the second block manually. Not pretty,
4247 but seems easier than doing the CFG manipulation by hand
4248 when the GIMPLE_CALL is in the last statement of BB. */
4249 stmt_gsi = gsi_last_bb (bb);
4250 gsi_remove (&stmt_gsi, false);
4251
4252 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4253 been the source of abnormal edges. In this case, schedule
4254 the removal of dead abnormal edges. */
4255 gsi = gsi_start_bb (return_block);
4256 if (gsi_end_p (gsi))
4257 {
4258 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4259 purge_dead_abnormal_edges = true;
4260 }
4261 else
4262 {
4263 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4264 purge_dead_abnormal_edges = false;
4265 }
4266
4267 stmt_gsi = gsi_start_bb (return_block);
4268
4269 /* Build a block containing code to initialize the arguments, the
4270 actual inline expansion of the body, and a label for the return
4271 statements within the function to jump to. The type of the
4272 statement expression is the return type of the function call.
4273 ??? If the call does not have an associated block then we will
4274 remap all callee blocks to NULL, effectively dropping most of
4275 its debug information. This should only happen for calls to
4276 artificial decls inserted by the compiler itself. We need to
4277 either link the inlined blocks into the caller block tree or
4278 not refer to them in any way to not break GC for locations. */
4279 if (gimple_block (stmt))
4280 {
4281 id->block = make_node (BLOCK);
4282 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4283 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4284 prepend_lexical_block (gimple_block (stmt), id->block);
4285 }
4286
4287 /* Local declarations will be replaced by their equivalents in this
4288 map. */
4289 st = id->decl_map;
4290 id->decl_map = new hash_map<tree, tree>;
4291 dst = id->debug_map;
4292 id->debug_map = NULL;
4293
4294 /* Record the function we are about to inline. */
4295 id->src_fn = fn;
4296 id->src_node = cg_edge->callee;
4297 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4298 id->gimple_call = stmt;
4299
4300 gcc_assert (!id->src_cfun->after_inlining);
4301
4302 id->entry_bb = bb;
4303 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4304 {
4305 gimple_stmt_iterator si = gsi_last_bb (bb);
4306 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4307 NOT_TAKEN),
4308 GSI_NEW_STMT);
4309 }
4310 initialize_inlined_parameters (id, stmt, fn, bb);
4311
4312 if (DECL_INITIAL (fn))
4313 {
4314 if (gimple_block (stmt))
4315 {
4316 tree *var;
4317
4318 prepend_lexical_block (id->block,
4319 remap_blocks (DECL_INITIAL (fn), id));
4320 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4321 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4322 == NULL_TREE));
4323 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4324 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4325 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4326 under it. The parameters can be then evaluated in the debugger,
4327 but don't show in backtraces. */
4328 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4329 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4330 {
4331 tree v = *var;
4332 *var = TREE_CHAIN (v);
4333 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4334 BLOCK_VARS (id->block) = v;
4335 }
4336 else
4337 var = &TREE_CHAIN (*var);
4338 }
4339 else
4340 remap_blocks_to_null (DECL_INITIAL (fn), id);
4341 }
4342
4343 /* Return statements in the function body will be replaced by jumps
4344 to the RET_LABEL. */
4345 gcc_assert (DECL_INITIAL (fn));
4346 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4347
4348 /* Find the LHS to which the result of this call is assigned. */
4349 return_slot = NULL;
4350 if (gimple_call_lhs (stmt))
4351 {
4352 modify_dest = gimple_call_lhs (stmt);
4353
4354 /* The function which we are inlining might not return a value,
4355 in which case we should issue a warning that the function
4356 does not return a value. In that case the optimizers will
4357 see that the variable to which the value is assigned was not
4358 initialized. We do not want to issue a warning about that
4359 uninitialized variable. */
4360 if (DECL_P (modify_dest))
4361 TREE_NO_WARNING (modify_dest) = 1;
4362
4363 if (gimple_call_return_slot_opt_p (stmt))
4364 {
4365 return_slot = modify_dest;
4366 modify_dest = NULL;
4367 }
4368 }
4369 else
4370 modify_dest = NULL;
4371
4372 /* If we are inlining a call to the C++ operator new, we don't want
4373 to use type based alias analysis on the return value. Otherwise
4374 we may get confused if the compiler sees that the inlined new
4375 function returns a pointer which was just deleted. See bug
4376 33407. */
4377 if (DECL_IS_OPERATOR_NEW (fn))
4378 {
4379 return_slot = NULL;
4380 modify_dest = NULL;
4381 }
4382
4383 /* Declare the return variable for the function. */
4384 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4385
4386 /* Add local vars in this inlined callee to caller. */
4387 add_local_variables (id->src_cfun, cfun, id);
4388
4389 if (dump_file && (dump_flags & TDF_DETAILS))
4390 {
4391 fprintf (dump_file, "Inlining ");
4392 print_generic_expr (dump_file, id->src_fn, 0);
4393 fprintf (dump_file, " to ");
4394 print_generic_expr (dump_file, id->dst_fn, 0);
4395 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4396 }
4397
4398 /* This is it. Duplicate the callee body. Assume callee is
4399 pre-gimplified. Note that we must not alter the caller
4400 function in any way before this point, as this CALL_EXPR may be
4401 a self-referential call; if we're calling ourselves, we need to
4402 duplicate our body before altering anything. */
4403 copy_body (id, cg_edge->callee->count,
4404 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4405 bb, return_block, NULL);
4406
4407 /* Reset the escaped solution. */
4408 if (cfun->gimple_df)
4409 pt_solution_reset (&cfun->gimple_df->escaped);
4410
4411 /* Clean up. */
4412 if (id->debug_map)
4413 {
4414 delete id->debug_map;
4415 id->debug_map = dst;
4416 }
4417 delete id->decl_map;
4418 id->decl_map = st;
4419
4420 /* Unlink the calls virtual operands before replacing it. */
4421 unlink_stmt_vdef (stmt);
4422 if (gimple_vdef (stmt)
4423 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4424 release_ssa_name (gimple_vdef (stmt));
4425
4426 /* If the inlined function returns a result that we care about,
4427 substitute the GIMPLE_CALL with an assignment of the return
4428 variable to the LHS of the call. That is, if STMT was
4429 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4430 if (use_retvar && gimple_call_lhs (stmt))
4431 {
4432 gimple old_stmt = stmt;
4433 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4434 gsi_replace (&stmt_gsi, stmt, false);
4435 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4436 }
4437 else
4438 {
4439 /* Handle the case of inlining a function with no return
4440 statement, which causes the return value to become undefined. */
4441 if (gimple_call_lhs (stmt)
4442 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4443 {
4444 tree name = gimple_call_lhs (stmt);
4445 tree var = SSA_NAME_VAR (name);
4446 tree def = ssa_default_def (cfun, var);
4447
4448 if (def)
4449 {
4450 /* If the variable is used undefined, make this name
4451 undefined via a move. */
4452 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4453 gsi_replace (&stmt_gsi, stmt, true);
4454 }
4455 else
4456 {
4457 /* Otherwise make this variable undefined. */
4458 gsi_remove (&stmt_gsi, true);
4459 set_ssa_default_def (cfun, var, name);
4460 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4461 }
4462 }
4463 else
4464 gsi_remove (&stmt_gsi, true);
4465 }
4466
4467 if (purge_dead_abnormal_edges)
4468 {
4469 gimple_purge_dead_eh_edges (return_block);
4470 gimple_purge_dead_abnormal_call_edges (return_block);
4471 }
4472
4473 /* If the value of the new expression is ignored, that's OK. We
4474 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4475 the equivalent inlined version either. */
4476 if (is_gimple_assign (stmt))
4477 {
4478 gcc_assert (gimple_assign_single_p (stmt)
4479 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4480 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4481 }
4482
4483 /* Output the inlining info for this abstract function, since it has been
4484 inlined. If we don't do this now, we can lose the information about the
4485 variables in the function when the blocks get blown away as soon as we
4486 remove the cgraph node. */
4487 if (gimple_block (stmt))
4488 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4489
4490 /* Update callgraph if needed. */
4491 cg_edge->callee->remove ();
4492
4493 id->block = NULL_TREE;
4494 successfully_inlined = TRUE;
4495
4496 egress:
4497 input_location = saved_location;
4498 return successfully_inlined;
4499 }
4500
4501 /* Expand call statements reachable from STMT_P.
4502 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4503 in a MODIFY_EXPR. */
4504
4505 static bool
4506 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4507 {
4508 gimple_stmt_iterator gsi;
4509
4510 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4511 {
4512 gimple stmt = gsi_stmt (gsi);
4513
4514 if (is_gimple_call (stmt)
4515 && !gimple_call_internal_p (stmt)
4516 && expand_call_inline (bb, stmt, id))
4517 return true;
4518 }
4519
4520 return false;
4521 }
4522
4523
4524 /* Walk all basic blocks created after FIRST and try to fold every statement
4525 in the STATEMENTS pointer set. */
4526
4527 static void
4528 fold_marked_statements (int first, hash_set<gimple> *statements)
4529 {
4530 for (; first < n_basic_blocks_for_fn (cfun); first++)
4531 if (BASIC_BLOCK_FOR_FN (cfun, first))
4532 {
4533 gimple_stmt_iterator gsi;
4534
4535 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4536 !gsi_end_p (gsi);
4537 gsi_next (&gsi))
4538 if (statements->contains (gsi_stmt (gsi)))
4539 {
4540 gimple old_stmt = gsi_stmt (gsi);
4541 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4542
4543 if (old_decl && DECL_BUILT_IN (old_decl))
4544 {
4545 /* Folding builtins can create multiple instructions,
4546 we need to look at all of them. */
4547 gimple_stmt_iterator i2 = gsi;
4548 gsi_prev (&i2);
4549 if (fold_stmt (&gsi))
4550 {
4551 gimple new_stmt;
4552 /* If a builtin at the end of a bb folded into nothing,
4553 the following loop won't work. */
4554 if (gsi_end_p (gsi))
4555 {
4556 cgraph_update_edges_for_call_stmt (old_stmt,
4557 old_decl, NULL);
4558 break;
4559 }
4560 if (gsi_end_p (i2))
4561 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4562 else
4563 gsi_next (&i2);
4564 while (1)
4565 {
4566 new_stmt = gsi_stmt (i2);
4567 update_stmt (new_stmt);
4568 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4569 new_stmt);
4570
4571 if (new_stmt == gsi_stmt (gsi))
4572 {
4573 /* It is okay to check only for the very last
4574 of these statements. If it is a throwing
4575 statement nothing will change. If it isn't
4576 this can remove EH edges. If that weren't
4577 correct then because some intermediate stmts
4578 throw, but not the last one. That would mean
4579 we'd have to split the block, which we can't
4580 here and we'd loose anyway. And as builtins
4581 probably never throw, this all
4582 is mood anyway. */
4583 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4584 new_stmt))
4585 gimple_purge_dead_eh_edges (
4586 BASIC_BLOCK_FOR_FN (cfun, first));
4587 break;
4588 }
4589 gsi_next (&i2);
4590 }
4591 }
4592 }
4593 else if (fold_stmt (&gsi))
4594 {
4595 /* Re-read the statement from GSI as fold_stmt() may
4596 have changed it. */
4597 gimple new_stmt = gsi_stmt (gsi);
4598 update_stmt (new_stmt);
4599
4600 if (is_gimple_call (old_stmt)
4601 || is_gimple_call (new_stmt))
4602 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4603 new_stmt);
4604
4605 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4606 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4607 first));
4608 }
4609 }
4610 }
4611 }
4612
4613 /* Expand calls to inline functions in the body of FN. */
4614
4615 unsigned int
4616 optimize_inline_calls (tree fn)
4617 {
4618 copy_body_data id;
4619 basic_block bb;
4620 int last = n_basic_blocks_for_fn (cfun);
4621 bool inlined_p = false;
4622
4623 /* Clear out ID. */
4624 memset (&id, 0, sizeof (id));
4625
4626 id.src_node = id.dst_node = cgraph_node::get (fn);
4627 gcc_assert (id.dst_node->definition);
4628 id.dst_fn = fn;
4629 /* Or any functions that aren't finished yet. */
4630 if (current_function_decl)
4631 id.dst_fn = current_function_decl;
4632
4633 id.copy_decl = copy_decl_maybe_to_var;
4634 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4635 id.transform_new_cfg = false;
4636 id.transform_return_to_modify = true;
4637 id.transform_parameter = true;
4638 id.transform_lang_insert_block = NULL;
4639 id.statements_to_fold = new hash_set<gimple>;
4640
4641 push_gimplify_context ();
4642
4643 /* We make no attempts to keep dominance info up-to-date. */
4644 free_dominance_info (CDI_DOMINATORS);
4645 free_dominance_info (CDI_POST_DOMINATORS);
4646
4647 /* Register specific gimple functions. */
4648 gimple_register_cfg_hooks ();
4649
4650 /* Reach the trees by walking over the CFG, and note the
4651 enclosing basic-blocks in the call edges. */
4652 /* We walk the blocks going forward, because inlined function bodies
4653 will split id->current_basic_block, and the new blocks will
4654 follow it; we'll trudge through them, processing their CALL_EXPRs
4655 along the way. */
4656 FOR_EACH_BB_FN (bb, cfun)
4657 inlined_p |= gimple_expand_calls_inline (bb, &id);
4658
4659 pop_gimplify_context (NULL);
4660
4661 #ifdef ENABLE_CHECKING
4662 {
4663 struct cgraph_edge *e;
4664
4665 id.dst_node->verify ();
4666
4667 /* Double check that we inlined everything we are supposed to inline. */
4668 for (e = id.dst_node->callees; e; e = e->next_callee)
4669 gcc_assert (e->inline_failed);
4670 }
4671 #endif
4672
4673 /* Fold queued statements. */
4674 fold_marked_statements (last, id.statements_to_fold);
4675 delete id.statements_to_fold;
4676
4677 gcc_assert (!id.debug_stmts.exists ());
4678
4679 /* If we didn't inline into the function there is nothing to do. */
4680 if (!inlined_p)
4681 return 0;
4682
4683 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4684 number_blocks (fn);
4685
4686 delete_unreachable_blocks_update_callgraph (&id);
4687 #ifdef ENABLE_CHECKING
4688 id.dst_node->verify ();
4689 #endif
4690
4691 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4692 not possible yet - the IPA passes might make various functions to not
4693 throw and they don't care to proactively update local EH info. This is
4694 done later in fixup_cfg pass that also execute the verification. */
4695 return (TODO_update_ssa
4696 | TODO_cleanup_cfg
4697 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4698 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4699 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4700 ? TODO_rebuild_frequencies : 0));
4701 }
4702
4703 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4704
4705 tree
4706 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4707 {
4708 enum tree_code code = TREE_CODE (*tp);
4709 enum tree_code_class cl = TREE_CODE_CLASS (code);
4710
4711 /* We make copies of most nodes. */
4712 if (IS_EXPR_CODE_CLASS (cl)
4713 || code == TREE_LIST
4714 || code == TREE_VEC
4715 || code == TYPE_DECL
4716 || code == OMP_CLAUSE)
4717 {
4718 /* Because the chain gets clobbered when we make a copy, we save it
4719 here. */
4720 tree chain = NULL_TREE, new_tree;
4721
4722 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4723 chain = TREE_CHAIN (*tp);
4724
4725 /* Copy the node. */
4726 new_tree = copy_node (*tp);
4727
4728 *tp = new_tree;
4729
4730 /* Now, restore the chain, if appropriate. That will cause
4731 walk_tree to walk into the chain as well. */
4732 if (code == PARM_DECL
4733 || code == TREE_LIST
4734 || code == OMP_CLAUSE)
4735 TREE_CHAIN (*tp) = chain;
4736
4737 /* For now, we don't update BLOCKs when we make copies. So, we
4738 have to nullify all BIND_EXPRs. */
4739 if (TREE_CODE (*tp) == BIND_EXPR)
4740 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4741 }
4742 else if (code == CONSTRUCTOR)
4743 {
4744 /* CONSTRUCTOR nodes need special handling because
4745 we need to duplicate the vector of elements. */
4746 tree new_tree;
4747
4748 new_tree = copy_node (*tp);
4749 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4750 *tp = new_tree;
4751 }
4752 else if (code == STATEMENT_LIST)
4753 /* We used to just abort on STATEMENT_LIST, but we can run into them
4754 with statement-expressions (c++/40975). */
4755 copy_statement_list (tp);
4756 else if (TREE_CODE_CLASS (code) == tcc_type)
4757 *walk_subtrees = 0;
4758 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4759 *walk_subtrees = 0;
4760 else if (TREE_CODE_CLASS (code) == tcc_constant)
4761 *walk_subtrees = 0;
4762 return NULL_TREE;
4763 }
4764
4765 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4766 information indicating to what new SAVE_EXPR this one should be mapped,
4767 use that one. Otherwise, create a new node and enter it in ST. FN is
4768 the function into which the copy will be placed. */
4769
4770 static void
4771 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
4772 {
4773 tree *n;
4774 tree t;
4775
4776 /* See if we already encountered this SAVE_EXPR. */
4777 n = st->get (*tp);
4778
4779 /* If we didn't already remap this SAVE_EXPR, do so now. */
4780 if (!n)
4781 {
4782 t = copy_node (*tp);
4783
4784 /* Remember this SAVE_EXPR. */
4785 st->put (*tp, t);
4786 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4787 st->put (t, t);
4788 }
4789 else
4790 {
4791 /* We've already walked into this SAVE_EXPR; don't do it again. */
4792 *walk_subtrees = 0;
4793 t = *n;
4794 }
4795
4796 /* Replace this SAVE_EXPR with the copy. */
4797 *tp = t;
4798 }
4799
4800 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4801 label, copies the declaration and enters it in the splay_tree in DATA (which
4802 is really a 'copy_body_data *'. */
4803
4804 static tree
4805 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4806 bool *handled_ops_p ATTRIBUTE_UNUSED,
4807 struct walk_stmt_info *wi)
4808 {
4809 copy_body_data *id = (copy_body_data *) wi->info;
4810 gimple stmt = gsi_stmt (*gsip);
4811
4812 if (gimple_code (stmt) == GIMPLE_LABEL)
4813 {
4814 tree decl = gimple_label_label (stmt);
4815
4816 /* Copy the decl and remember the copy. */
4817 insert_decl_map (id, decl, id->copy_decl (decl, id));
4818 }
4819
4820 return NULL_TREE;
4821 }
4822
4823
4824 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4825 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4826 remaps all local declarations to appropriate replacements in gimple
4827 operands. */
4828
4829 static tree
4830 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4831 {
4832 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4833 copy_body_data *id = (copy_body_data *) wi->info;
4834 hash_map<tree, tree> *st = id->decl_map;
4835 tree *n;
4836 tree expr = *tp;
4837
4838 /* Only a local declaration (variable or label). */
4839 if ((TREE_CODE (expr) == VAR_DECL
4840 && !TREE_STATIC (expr))
4841 || TREE_CODE (expr) == LABEL_DECL)
4842 {
4843 /* Lookup the declaration. */
4844 n = st->get (expr);
4845
4846 /* If it's there, remap it. */
4847 if (n)
4848 *tp = *n;
4849 *walk_subtrees = 0;
4850 }
4851 else if (TREE_CODE (expr) == STATEMENT_LIST
4852 || TREE_CODE (expr) == BIND_EXPR
4853 || TREE_CODE (expr) == SAVE_EXPR)
4854 gcc_unreachable ();
4855 else if (TREE_CODE (expr) == TARGET_EXPR)
4856 {
4857 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4858 It's OK for this to happen if it was part of a subtree that
4859 isn't immediately expanded, such as operand 2 of another
4860 TARGET_EXPR. */
4861 if (!TREE_OPERAND (expr, 1))
4862 {
4863 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4864 TREE_OPERAND (expr, 3) = NULL_TREE;
4865 }
4866 }
4867
4868 /* Keep iterating. */
4869 return NULL_TREE;
4870 }
4871
4872
4873 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4874 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4875 remaps all local declarations to appropriate replacements in gimple
4876 statements. */
4877
4878 static tree
4879 replace_locals_stmt (gimple_stmt_iterator *gsip,
4880 bool *handled_ops_p ATTRIBUTE_UNUSED,
4881 struct walk_stmt_info *wi)
4882 {
4883 copy_body_data *id = (copy_body_data *) wi->info;
4884 gimple stmt = gsi_stmt (*gsip);
4885
4886 if (gimple_code (stmt) == GIMPLE_BIND)
4887 {
4888 tree block = gimple_bind_block (stmt);
4889
4890 if (block)
4891 {
4892 remap_block (&block, id);
4893 gimple_bind_set_block (stmt, block);
4894 }
4895
4896 /* This will remap a lot of the same decls again, but this should be
4897 harmless. */
4898 if (gimple_bind_vars (stmt))
4899 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4900 NULL, id));
4901 }
4902
4903 /* Keep iterating. */
4904 return NULL_TREE;
4905 }
4906
4907
4908 /* Copies everything in SEQ and replaces variables and labels local to
4909 current_function_decl. */
4910
4911 gimple_seq
4912 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4913 {
4914 copy_body_data id;
4915 struct walk_stmt_info wi;
4916 gimple_seq copy;
4917
4918 /* There's nothing to do for NULL_TREE. */
4919 if (seq == NULL)
4920 return seq;
4921
4922 /* Set up ID. */
4923 memset (&id, 0, sizeof (id));
4924 id.src_fn = current_function_decl;
4925 id.dst_fn = current_function_decl;
4926 id.decl_map = new hash_map<tree, tree>;
4927 id.debug_map = NULL;
4928
4929 id.copy_decl = copy_decl_no_change;
4930 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4931 id.transform_new_cfg = false;
4932 id.transform_return_to_modify = false;
4933 id.transform_parameter = false;
4934 id.transform_lang_insert_block = NULL;
4935
4936 /* Walk the tree once to find local labels. */
4937 memset (&wi, 0, sizeof (wi));
4938 hash_set<tree> visited;
4939 wi.info = &id;
4940 wi.pset = &visited;
4941 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4942
4943 copy = gimple_seq_copy (seq);
4944
4945 /* Walk the copy, remapping decls. */
4946 memset (&wi, 0, sizeof (wi));
4947 wi.info = &id;
4948 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4949
4950 /* Clean up. */
4951 delete id.decl_map;
4952 if (id.debug_map)
4953 delete id.debug_map;
4954
4955 return copy;
4956 }
4957
4958
4959 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4960
4961 static tree
4962 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4963 {
4964 if (*tp == data)
4965 return (tree) data;
4966 else
4967 return NULL;
4968 }
4969
4970 DEBUG_FUNCTION bool
4971 debug_find_tree (tree top, tree search)
4972 {
4973 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4974 }
4975
4976
4977 /* Declare the variables created by the inliner. Add all the variables in
4978 VARS to BIND_EXPR. */
4979
4980 static void
4981 declare_inline_vars (tree block, tree vars)
4982 {
4983 tree t;
4984 for (t = vars; t; t = DECL_CHAIN (t))
4985 {
4986 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4987 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4988 add_local_decl (cfun, t);
4989 }
4990
4991 if (block)
4992 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4993 }
4994
4995 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4996 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4997 VAR_DECL translation. */
4998
4999 static tree
5000 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5001 {
5002 /* Don't generate debug information for the copy if we wouldn't have
5003 generated it for the copy either. */
5004 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5005 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5006
5007 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5008 declaration inspired this copy. */
5009 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5010
5011 /* The new variable/label has no RTL, yet. */
5012 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5013 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5014 SET_DECL_RTL (copy, 0);
5015
5016 /* These args would always appear unused, if not for this. */
5017 TREE_USED (copy) = 1;
5018
5019 /* Set the context for the new declaration. */
5020 if (!DECL_CONTEXT (decl))
5021 /* Globals stay global. */
5022 ;
5023 else if (DECL_CONTEXT (decl) != id->src_fn)
5024 /* Things that weren't in the scope of the function we're inlining
5025 from aren't in the scope we're inlining to, either. */
5026 ;
5027 else if (TREE_STATIC (decl))
5028 /* Function-scoped static variables should stay in the original
5029 function. */
5030 ;
5031 else
5032 /* Ordinary automatic local variables are now in the scope of the
5033 new function. */
5034 DECL_CONTEXT (copy) = id->dst_fn;
5035
5036 return copy;
5037 }
5038
5039 static tree
5040 copy_decl_to_var (tree decl, copy_body_data *id)
5041 {
5042 tree copy, type;
5043
5044 gcc_assert (TREE_CODE (decl) == PARM_DECL
5045 || TREE_CODE (decl) == RESULT_DECL);
5046
5047 type = TREE_TYPE (decl);
5048
5049 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5050 VAR_DECL, DECL_NAME (decl), type);
5051 if (DECL_PT_UID_SET_P (decl))
5052 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5053 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5054 TREE_READONLY (copy) = TREE_READONLY (decl);
5055 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5056 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5057
5058 return copy_decl_for_dup_finish (id, decl, copy);
5059 }
5060
5061 /* Like copy_decl_to_var, but create a return slot object instead of a
5062 pointer variable for return by invisible reference. */
5063
5064 static tree
5065 copy_result_decl_to_var (tree decl, copy_body_data *id)
5066 {
5067 tree copy, type;
5068
5069 gcc_assert (TREE_CODE (decl) == PARM_DECL
5070 || TREE_CODE (decl) == RESULT_DECL);
5071
5072 type = TREE_TYPE (decl);
5073 if (DECL_BY_REFERENCE (decl))
5074 type = TREE_TYPE (type);
5075
5076 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5077 VAR_DECL, DECL_NAME (decl), type);
5078 if (DECL_PT_UID_SET_P (decl))
5079 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5080 TREE_READONLY (copy) = TREE_READONLY (decl);
5081 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5082 if (!DECL_BY_REFERENCE (decl))
5083 {
5084 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5085 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5086 }
5087
5088 return copy_decl_for_dup_finish (id, decl, copy);
5089 }
5090
5091 tree
5092 copy_decl_no_change (tree decl, copy_body_data *id)
5093 {
5094 tree copy;
5095
5096 copy = copy_node (decl);
5097
5098 /* The COPY is not abstract; it will be generated in DST_FN. */
5099 DECL_ABSTRACT (copy) = 0;
5100 lang_hooks.dup_lang_specific_decl (copy);
5101
5102 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5103 been taken; it's for internal bookkeeping in expand_goto_internal. */
5104 if (TREE_CODE (copy) == LABEL_DECL)
5105 {
5106 TREE_ADDRESSABLE (copy) = 0;
5107 LABEL_DECL_UID (copy) = -1;
5108 }
5109
5110 return copy_decl_for_dup_finish (id, decl, copy);
5111 }
5112
5113 static tree
5114 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5115 {
5116 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5117 return copy_decl_to_var (decl, id);
5118 else
5119 return copy_decl_no_change (decl, id);
5120 }
5121
5122 /* Return a copy of the function's argument tree. */
5123 static tree
5124 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5125 bitmap args_to_skip, tree *vars)
5126 {
5127 tree arg, *parg;
5128 tree new_parm = NULL;
5129 int i = 0;
5130
5131 parg = &new_parm;
5132
5133 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5134 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5135 {
5136 tree new_tree = remap_decl (arg, id);
5137 if (TREE_CODE (new_tree) != PARM_DECL)
5138 new_tree = id->copy_decl (arg, id);
5139 lang_hooks.dup_lang_specific_decl (new_tree);
5140 *parg = new_tree;
5141 parg = &DECL_CHAIN (new_tree);
5142 }
5143 else if (!id->decl_map->get (arg))
5144 {
5145 /* Make an equivalent VAR_DECL. If the argument was used
5146 as temporary variable later in function, the uses will be
5147 replaced by local variable. */
5148 tree var = copy_decl_to_var (arg, id);
5149 insert_decl_map (id, arg, var);
5150 /* Declare this new variable. */
5151 DECL_CHAIN (var) = *vars;
5152 *vars = var;
5153 }
5154 return new_parm;
5155 }
5156
5157 /* Return a copy of the function's static chain. */
5158 static tree
5159 copy_static_chain (tree static_chain, copy_body_data * id)
5160 {
5161 tree *chain_copy, *pvar;
5162
5163 chain_copy = &static_chain;
5164 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5165 {
5166 tree new_tree = remap_decl (*pvar, id);
5167 lang_hooks.dup_lang_specific_decl (new_tree);
5168 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5169 *pvar = new_tree;
5170 }
5171 return static_chain;
5172 }
5173
5174 /* Return true if the function is allowed to be versioned.
5175 This is a guard for the versioning functionality. */
5176
5177 bool
5178 tree_versionable_function_p (tree fndecl)
5179 {
5180 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5181 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5182 }
5183
5184 /* Delete all unreachable basic blocks and update callgraph.
5185 Doing so is somewhat nontrivial because we need to update all clones and
5186 remove inline function that become unreachable. */
5187
5188 static bool
5189 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5190 {
5191 bool changed = false;
5192 basic_block b, next_bb;
5193
5194 find_unreachable_blocks ();
5195
5196 /* Delete all unreachable basic blocks. */
5197
5198 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5199 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5200 {
5201 next_bb = b->next_bb;
5202
5203 if (!(b->flags & BB_REACHABLE))
5204 {
5205 gimple_stmt_iterator bsi;
5206
5207 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5208 {
5209 struct cgraph_edge *e;
5210 struct cgraph_node *node;
5211
5212 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5213
5214 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5215 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5216 {
5217 if (!e->inline_failed)
5218 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5219 else
5220 cgraph_remove_edge (e);
5221 }
5222 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5223 && id->dst_node->clones)
5224 for (node = id->dst_node->clones; node != id->dst_node;)
5225 {
5226 node->remove_stmt_references (gsi_stmt (bsi));
5227 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5228 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5229 {
5230 if (!e->inline_failed)
5231 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5232 else
5233 cgraph_remove_edge (e);
5234 }
5235
5236 if (node->clones)
5237 node = node->clones;
5238 else if (node->next_sibling_clone)
5239 node = node->next_sibling_clone;
5240 else
5241 {
5242 while (node != id->dst_node && !node->next_sibling_clone)
5243 node = node->clone_of;
5244 if (node != id->dst_node)
5245 node = node->next_sibling_clone;
5246 }
5247 }
5248 }
5249 delete_basic_block (b);
5250 changed = true;
5251 }
5252 }
5253
5254 return changed;
5255 }
5256
5257 /* Update clone info after duplication. */
5258
5259 static void
5260 update_clone_info (copy_body_data * id)
5261 {
5262 struct cgraph_node *node;
5263 if (!id->dst_node->clones)
5264 return;
5265 for (node = id->dst_node->clones; node != id->dst_node;)
5266 {
5267 /* First update replace maps to match the new body. */
5268 if (node->clone.tree_map)
5269 {
5270 unsigned int i;
5271 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5272 {
5273 struct ipa_replace_map *replace_info;
5274 replace_info = (*node->clone.tree_map)[i];
5275 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5276 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5277 }
5278 }
5279 if (node->clones)
5280 node = node->clones;
5281 else if (node->next_sibling_clone)
5282 node = node->next_sibling_clone;
5283 else
5284 {
5285 while (node != id->dst_node && !node->next_sibling_clone)
5286 node = node->clone_of;
5287 if (node != id->dst_node)
5288 node = node->next_sibling_clone;
5289 }
5290 }
5291 }
5292
5293 /* Create a copy of a function's tree.
5294 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5295 of the original function and the new copied function
5296 respectively. In case we want to replace a DECL
5297 tree with another tree while duplicating the function's
5298 body, TREE_MAP represents the mapping between these
5299 trees. If UPDATE_CLONES is set, the call_stmt fields
5300 of edges of clones of the function will be updated.
5301
5302 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5303 from new version.
5304 If SKIP_RETURN is true, the new version will return void.
5305 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5306 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5307 */
5308 void
5309 tree_function_versioning (tree old_decl, tree new_decl,
5310 vec<ipa_replace_map *, va_gc> *tree_map,
5311 bool update_clones, bitmap args_to_skip,
5312 bool skip_return, bitmap blocks_to_copy,
5313 basic_block new_entry)
5314 {
5315 struct cgraph_node *old_version_node;
5316 struct cgraph_node *new_version_node;
5317 copy_body_data id;
5318 tree p;
5319 unsigned i;
5320 struct ipa_replace_map *replace_info;
5321 basic_block old_entry_block, bb;
5322 auto_vec<gimple, 10> init_stmts;
5323 tree vars = NULL_TREE;
5324
5325 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5326 && TREE_CODE (new_decl) == FUNCTION_DECL);
5327 DECL_POSSIBLY_INLINED (old_decl) = 1;
5328
5329 old_version_node = cgraph_node::get (old_decl);
5330 gcc_checking_assert (old_version_node);
5331 new_version_node = cgraph_node::get (new_decl);
5332 gcc_checking_assert (new_version_node);
5333
5334 /* Copy over debug args. */
5335 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5336 {
5337 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5338 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5339 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5340 old_debug_args = decl_debug_args_lookup (old_decl);
5341 if (old_debug_args)
5342 {
5343 new_debug_args = decl_debug_args_insert (new_decl);
5344 *new_debug_args = vec_safe_copy (*old_debug_args);
5345 }
5346 }
5347
5348 /* Output the inlining info for this abstract function, since it has been
5349 inlined. If we don't do this now, we can lose the information about the
5350 variables in the function when the blocks get blown away as soon as we
5351 remove the cgraph node. */
5352 (*debug_hooks->outlining_inline_function) (old_decl);
5353
5354 DECL_ARTIFICIAL (new_decl) = 1;
5355 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5356 if (DECL_ORIGIN (old_decl) == old_decl)
5357 old_version_node->used_as_abstract_origin = true;
5358 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5359
5360 /* Prepare the data structures for the tree copy. */
5361 memset (&id, 0, sizeof (id));
5362
5363 /* Generate a new name for the new version. */
5364 id.statements_to_fold = new hash_set<gimple>;
5365
5366 id.decl_map = new hash_map<tree, tree>;
5367 id.debug_map = NULL;
5368 id.src_fn = old_decl;
5369 id.dst_fn = new_decl;
5370 id.src_node = old_version_node;
5371 id.dst_node = new_version_node;
5372 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5373 id.blocks_to_copy = blocks_to_copy;
5374
5375 id.copy_decl = copy_decl_no_change;
5376 id.transform_call_graph_edges
5377 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5378 id.transform_new_cfg = true;
5379 id.transform_return_to_modify = false;
5380 id.transform_parameter = false;
5381 id.transform_lang_insert_block = NULL;
5382
5383 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5384 (DECL_STRUCT_FUNCTION (old_decl));
5385 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5386 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5387 initialize_cfun (new_decl, old_decl,
5388 old_entry_block->count);
5389 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5390 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5391 = id.src_cfun->gimple_df->ipa_pta;
5392
5393 /* Copy the function's static chain. */
5394 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5395 if (p)
5396 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5397 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5398 &id);
5399
5400 /* If there's a tree_map, prepare for substitution. */
5401 if (tree_map)
5402 for (i = 0; i < tree_map->length (); i++)
5403 {
5404 gimple init;
5405 replace_info = (*tree_map)[i];
5406 if (replace_info->replace_p)
5407 {
5408 if (!replace_info->old_tree)
5409 {
5410 int i = replace_info->parm_num;
5411 tree parm;
5412 tree req_type;
5413
5414 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5415 i --;
5416 replace_info->old_tree = parm;
5417 req_type = TREE_TYPE (parm);
5418 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5419 {
5420 if (fold_convertible_p (req_type, replace_info->new_tree))
5421 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5422 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5423 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5424 else
5425 {
5426 if (dump_file)
5427 {
5428 fprintf (dump_file, " const ");
5429 print_generic_expr (dump_file, replace_info->new_tree, 0);
5430 fprintf (dump_file, " can't be converted to param ");
5431 print_generic_expr (dump_file, parm, 0);
5432 fprintf (dump_file, "\n");
5433 }
5434 replace_info->old_tree = NULL;
5435 }
5436 }
5437 }
5438 else
5439 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5440 if (replace_info->old_tree)
5441 {
5442 init = setup_one_parameter (&id, replace_info->old_tree,
5443 replace_info->new_tree, id.src_fn,
5444 NULL,
5445 &vars);
5446 if (init)
5447 init_stmts.safe_push (init);
5448 }
5449 }
5450 }
5451 /* Copy the function's arguments. */
5452 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5453 DECL_ARGUMENTS (new_decl) =
5454 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5455 args_to_skip, &vars);
5456
5457 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5458 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5459
5460 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5461
5462 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5463 /* Add local vars. */
5464 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5465
5466 if (DECL_RESULT (old_decl) == NULL_TREE)
5467 ;
5468 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5469 {
5470 DECL_RESULT (new_decl)
5471 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5472 RESULT_DECL, NULL_TREE, void_type_node);
5473 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5474 cfun->returns_struct = 0;
5475 cfun->returns_pcc_struct = 0;
5476 }
5477 else
5478 {
5479 tree old_name;
5480 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5481 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5482 if (gimple_in_ssa_p (id.src_cfun)
5483 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5484 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5485 {
5486 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5487 insert_decl_map (&id, old_name, new_name);
5488 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5489 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5490 }
5491 }
5492
5493 /* Set up the destination functions loop tree. */
5494 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5495 {
5496 cfun->curr_properties &= ~PROP_loops;
5497 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5498 cfun->curr_properties |= PROP_loops;
5499 }
5500
5501 /* Copy the Function's body. */
5502 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5503 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5504 new_entry);
5505
5506 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5507 number_blocks (new_decl);
5508
5509 /* We want to create the BB unconditionally, so that the addition of
5510 debug stmts doesn't affect BB count, which may in the end cause
5511 codegen differences. */
5512 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5513 while (init_stmts.length ())
5514 insert_init_stmt (&id, bb, init_stmts.pop ());
5515 update_clone_info (&id);
5516
5517 /* Remap the nonlocal_goto_save_area, if any. */
5518 if (cfun->nonlocal_goto_save_area)
5519 {
5520 struct walk_stmt_info wi;
5521
5522 memset (&wi, 0, sizeof (wi));
5523 wi.info = &id;
5524 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5525 }
5526
5527 /* Clean up. */
5528 delete id.decl_map;
5529 if (id.debug_map)
5530 delete id.debug_map;
5531 free_dominance_info (CDI_DOMINATORS);
5532 free_dominance_info (CDI_POST_DOMINATORS);
5533
5534 fold_marked_statements (0, id.statements_to_fold);
5535 delete id.statements_to_fold;
5536 fold_cond_expr_cond ();
5537 delete_unreachable_blocks_update_callgraph (&id);
5538 if (id.dst_node->definition)
5539 cgraph_rebuild_references ();
5540 update_ssa (TODO_update_ssa);
5541
5542 /* After partial cloning we need to rescale frequencies, so they are
5543 within proper range in the cloned function. */
5544 if (new_entry)
5545 {
5546 struct cgraph_edge *e;
5547 rebuild_frequencies ();
5548
5549 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5550 for (e = new_version_node->callees; e; e = e->next_callee)
5551 {
5552 basic_block bb = gimple_bb (e->call_stmt);
5553 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5554 bb);
5555 e->count = bb->count;
5556 }
5557 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5558 {
5559 basic_block bb = gimple_bb (e->call_stmt);
5560 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5561 bb);
5562 e->count = bb->count;
5563 }
5564 }
5565
5566 free_dominance_info (CDI_DOMINATORS);
5567 free_dominance_info (CDI_POST_DOMINATORS);
5568
5569 gcc_assert (!id.debug_stmts.exists ());
5570 pop_cfun ();
5571 return;
5572 }
5573
5574 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5575 the callee and return the inlined body on success. */
5576
5577 tree
5578 maybe_inline_call_in_expr (tree exp)
5579 {
5580 tree fn = get_callee_fndecl (exp);
5581
5582 /* We can only try to inline "const" functions. */
5583 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5584 {
5585 call_expr_arg_iterator iter;
5586 copy_body_data id;
5587 tree param, arg, t;
5588 hash_map<tree, tree> decl_map;
5589
5590 /* Remap the parameters. */
5591 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5592 param;
5593 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5594 decl_map.put (param, arg);
5595
5596 memset (&id, 0, sizeof (id));
5597 id.src_fn = fn;
5598 id.dst_fn = current_function_decl;
5599 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5600 id.decl_map = &decl_map;
5601
5602 id.copy_decl = copy_decl_no_change;
5603 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5604 id.transform_new_cfg = false;
5605 id.transform_return_to_modify = true;
5606 id.transform_parameter = true;
5607 id.transform_lang_insert_block = NULL;
5608
5609 /* Make sure not to unshare trees behind the front-end's back
5610 since front-end specific mechanisms may rely on sharing. */
5611 id.regimplify = false;
5612 id.do_not_unshare = true;
5613
5614 /* We're not inside any EH region. */
5615 id.eh_lp_nr = 0;
5616
5617 t = copy_tree_body (&id);
5618
5619 /* We can only return something suitable for use in a GENERIC
5620 expression tree. */
5621 if (TREE_CODE (t) == MODIFY_EXPR)
5622 return TREE_OPERAND (t, 1);
5623 }
5624
5625 return NULL_TREE;
5626 }
5627
5628 /* Duplicate a type, fields and all. */
5629
5630 tree
5631 build_duplicate_type (tree type)
5632 {
5633 struct copy_body_data id;
5634
5635 memset (&id, 0, sizeof (id));
5636 id.src_fn = current_function_decl;
5637 id.dst_fn = current_function_decl;
5638 id.src_cfun = cfun;
5639 id.decl_map = new hash_map<tree, tree>;
5640 id.debug_map = NULL;
5641 id.copy_decl = copy_decl_no_change;
5642
5643 type = remap_type_1 (type, &id);
5644
5645 delete id.decl_map;
5646 if (id.debug_map)
5647 delete id.debug_map;
5648
5649 TYPE_CANONICAL (type) = type;
5650
5651 return type;
5652 }