Remove VEC_LSHIFT_EXPR and vec_shl_optab
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "calls.h"
29 #include "tree-inline.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "insn-config.h"
34 #include "hashtab.h"
35 #include "langhooks.h"
36 #include "predict.h"
37 #include "vec.h"
38 #include "hash-set.h"
39 #include "machmode.h"
40 #include "hard-reg-set.h"
41 #include "function.h"
42 #include "dominance.h"
43 #include "cfg.h"
44 #include "cfganal.h"
45 #include "basic-block.h"
46 #include "tree-iterator.h"
47 #include "intl.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-fold.h"
51 #include "tree-eh.h"
52 #include "gimple-expr.h"
53 #include "is-a.h"
54 #include "gimple.h"
55 #include "gimplify.h"
56 #include "gimple-iterator.h"
57 #include "gimplify-me.h"
58 #include "gimple-walk.h"
59 #include "gimple-ssa.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "stringpool.h"
64 #include "tree-ssanames.h"
65 #include "tree-into-ssa.h"
66 #include "expr.h"
67 #include "tree-dfa.h"
68 #include "tree-ssa.h"
69 #include "tree-pretty-print.h"
70 #include "except.h"
71 #include "debug.h"
72 #include "ipa-prop.h"
73 #include "value-prof.h"
74 #include "tree-pass.h"
75 #include "target.h"
76 #include "cfgloop.h"
77 #include "builtins.h"
78
79 #include "rtl.h" /* FIXME: For asm_str_count. */
80
81 /* I'm not real happy about this, but we need to handle gimple and
82 non-gimple trees. */
83
84 /* Inlining, Cloning, Versioning, Parallelization
85
86 Inlining: a function body is duplicated, but the PARM_DECLs are
87 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
88 MODIFY_EXPRs that store to a dedicated returned-value variable.
89 The duplicated eh_region info of the copy will later be appended
90 to the info for the caller; the eh_region info in copied throwing
91 statements and RESX statements are adjusted accordingly.
92
93 Cloning: (only in C++) We have one body for a con/de/structor, and
94 multiple function decls, each with a unique parameter list.
95 Duplicate the body, using the given splay tree; some parameters
96 will become constants (like 0 or 1).
97
98 Versioning: a function body is duplicated and the result is a new
99 function rather than into blocks of an existing function as with
100 inlining. Some parameters will become constants.
101
102 Parallelization: a region of a function is duplicated resulting in
103 a new function. Variables may be replaced with complex expressions
104 to enable shared variable semantics.
105
106 All of these will simultaneously lookup any callgraph edges. If
107 we're going to inline the duplicated function body, and the given
108 function has some cloned callgraph nodes (one for each place this
109 function will be inlined) those callgraph edges will be duplicated.
110 If we're cloning the body, those callgraph edges will be
111 updated to point into the new body. (Note that the original
112 callgraph node and edge list will not be altered.)
113
114 See the CALL_EXPR handling case in copy_tree_body_r (). */
115
116 /* To Do:
117
118 o In order to make inlining-on-trees work, we pessimized
119 function-local static constants. In particular, they are now
120 always output, even when not addressed. Fix this by treating
121 function-local static constants just like global static
122 constants; the back-end already knows not to output them if they
123 are not needed.
124
125 o Provide heuristics to clamp inlining of recursive template
126 calls? */
127
128
129 /* Weights that estimate_num_insns uses to estimate the size of the
130 produced code. */
131
132 eni_weights eni_size_weights;
133
134 /* Weights that estimate_num_insns uses to estimate the time necessary
135 to execute the produced code. */
136
137 eni_weights eni_time_weights;
138
139 /* Prototypes. */
140
141 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
142 static void remap_block (tree *, copy_body_data *);
143 static void copy_bind_expr (tree *, int *, copy_body_data *);
144 static void declare_inline_vars (tree, tree);
145 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
146 static void prepend_lexical_block (tree current_block, tree new_block);
147 static tree copy_decl_to_var (tree, copy_body_data *);
148 static tree copy_result_decl_to_var (tree, copy_body_data *);
149 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
150 static gimple remap_gimple_stmt (gimple, copy_body_data *);
151 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
152
153 /* Insert a tree->tree mapping for ID. Despite the name suggests
154 that the trees should be variables, it is used for more than that. */
155
156 void
157 insert_decl_map (copy_body_data *id, tree key, tree value)
158 {
159 id->decl_map->put (key, value);
160
161 /* Always insert an identity map as well. If we see this same new
162 node again, we won't want to duplicate it a second time. */
163 if (key != value)
164 id->decl_map->put (value, value);
165 }
166
167 /* Insert a tree->tree mapping for ID. This is only used for
168 variables. */
169
170 static void
171 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
172 {
173 if (!gimple_in_ssa_p (id->src_cfun))
174 return;
175
176 if (!MAY_HAVE_DEBUG_STMTS)
177 return;
178
179 if (!target_for_debug_bind (key))
180 return;
181
182 gcc_assert (TREE_CODE (key) == PARM_DECL);
183 gcc_assert (TREE_CODE (value) == VAR_DECL);
184
185 if (!id->debug_map)
186 id->debug_map = new hash_map<tree, tree>;
187
188 id->debug_map->put (key, value);
189 }
190
191 /* If nonzero, we're remapping the contents of inlined debug
192 statements. If negative, an error has occurred, such as a
193 reference to a variable that isn't available in the inlined
194 context. */
195 static int processing_debug_stmt = 0;
196
197 /* Construct new SSA name for old NAME. ID is the inline context. */
198
199 static tree
200 remap_ssa_name (tree name, copy_body_data *id)
201 {
202 tree new_tree, var;
203 tree *n;
204
205 gcc_assert (TREE_CODE (name) == SSA_NAME);
206
207 n = id->decl_map->get (name);
208 if (n)
209 return unshare_expr (*n);
210
211 if (processing_debug_stmt)
212 {
213 if (SSA_NAME_IS_DEFAULT_DEF (name)
214 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
215 && id->entry_bb == NULL
216 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
217 {
218 tree vexpr = make_node (DEBUG_EXPR_DECL);
219 gimple def_temp;
220 gimple_stmt_iterator gsi;
221 tree val = SSA_NAME_VAR (name);
222
223 n = id->decl_map->get (val);
224 if (n != NULL)
225 val = *n;
226 if (TREE_CODE (val) != PARM_DECL)
227 {
228 processing_debug_stmt = -1;
229 return name;
230 }
231 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
232 DECL_ARTIFICIAL (vexpr) = 1;
233 TREE_TYPE (vexpr) = TREE_TYPE (name);
234 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
235 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
236 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
237 return vexpr;
238 }
239
240 processing_debug_stmt = -1;
241 return name;
242 }
243
244 /* Remap anonymous SSA names or SSA names of anonymous decls. */
245 var = SSA_NAME_VAR (name);
246 if (!var
247 || (!SSA_NAME_IS_DEFAULT_DEF (name)
248 && TREE_CODE (var) == VAR_DECL
249 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
250 && DECL_ARTIFICIAL (var)
251 && DECL_IGNORED_P (var)
252 && !DECL_NAME (var)))
253 {
254 struct ptr_info_def *pi;
255 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
256 if (!var && SSA_NAME_IDENTIFIER (name))
257 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
258 insert_decl_map (id, name, new_tree);
259 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
260 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
261 /* At least IPA points-to info can be directly transferred. */
262 if (id->src_cfun->gimple_df
263 && id->src_cfun->gimple_df->ipa_pta
264 && (pi = SSA_NAME_PTR_INFO (name))
265 && !pi->pt.anything)
266 {
267 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
268 new_pi->pt = pi->pt;
269 }
270 return new_tree;
271 }
272
273 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
274 in copy_bb. */
275 new_tree = remap_decl (var, id);
276
277 /* We might've substituted constant or another SSA_NAME for
278 the variable.
279
280 Replace the SSA name representing RESULT_DECL by variable during
281 inlining: this saves us from need to introduce PHI node in a case
282 return value is just partly initialized. */
283 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
284 && (!SSA_NAME_VAR (name)
285 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
286 || !id->transform_return_to_modify))
287 {
288 struct ptr_info_def *pi;
289 new_tree = make_ssa_name (new_tree, NULL);
290 insert_decl_map (id, name, new_tree);
291 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
292 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
293 /* At least IPA points-to info can be directly transferred. */
294 if (id->src_cfun->gimple_df
295 && id->src_cfun->gimple_df->ipa_pta
296 && (pi = SSA_NAME_PTR_INFO (name))
297 && !pi->pt.anything)
298 {
299 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
300 new_pi->pt = pi->pt;
301 }
302 if (SSA_NAME_IS_DEFAULT_DEF (name))
303 {
304 /* By inlining function having uninitialized variable, we might
305 extend the lifetime (variable might get reused). This cause
306 ICE in the case we end up extending lifetime of SSA name across
307 abnormal edge, but also increase register pressure.
308
309 We simply initialize all uninitialized vars by 0 except
310 for case we are inlining to very first BB. We can avoid
311 this for all BBs that are not inside strongly connected
312 regions of the CFG, but this is expensive to test. */
313 if (id->entry_bb
314 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
315 && (!SSA_NAME_VAR (name)
316 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
317 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
318 0)->dest
319 || EDGE_COUNT (id->entry_bb->preds) != 1))
320 {
321 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
322 gimple init_stmt;
323 tree zero = build_zero_cst (TREE_TYPE (new_tree));
324
325 init_stmt = gimple_build_assign (new_tree, zero);
326 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
327 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
328 }
329 else
330 {
331 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
332 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
333 }
334 }
335 }
336 else
337 insert_decl_map (id, name, new_tree);
338 return new_tree;
339 }
340
341 /* Remap DECL during the copying of the BLOCK tree for the function. */
342
343 tree
344 remap_decl (tree decl, copy_body_data *id)
345 {
346 tree *n;
347
348 /* We only remap local variables in the current function. */
349
350 /* See if we have remapped this declaration. */
351
352 n = id->decl_map->get (decl);
353
354 if (!n && processing_debug_stmt)
355 {
356 processing_debug_stmt = -1;
357 return decl;
358 }
359
360 /* If we didn't already have an equivalent for this declaration,
361 create one now. */
362 if (!n)
363 {
364 /* Make a copy of the variable or label. */
365 tree t = id->copy_decl (decl, id);
366
367 /* Remember it, so that if we encounter this local entity again
368 we can reuse this copy. Do this early because remap_type may
369 need this decl for TYPE_STUB_DECL. */
370 insert_decl_map (id, decl, t);
371
372 if (!DECL_P (t))
373 return t;
374
375 /* Remap types, if necessary. */
376 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
377 if (TREE_CODE (t) == TYPE_DECL)
378 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
379
380 /* Remap sizes as necessary. */
381 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
382 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
383
384 /* If fields, do likewise for offset and qualifier. */
385 if (TREE_CODE (t) == FIELD_DECL)
386 {
387 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
388 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
389 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
390 }
391
392 return t;
393 }
394
395 if (id->do_not_unshare)
396 return *n;
397 else
398 return unshare_expr (*n);
399 }
400
401 static tree
402 remap_type_1 (tree type, copy_body_data *id)
403 {
404 tree new_tree, t;
405
406 /* We do need a copy. build and register it now. If this is a pointer or
407 reference type, remap the designated type and make a new pointer or
408 reference type. */
409 if (TREE_CODE (type) == POINTER_TYPE)
410 {
411 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
412 TYPE_MODE (type),
413 TYPE_REF_CAN_ALIAS_ALL (type));
414 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
415 new_tree = build_type_attribute_qual_variant (new_tree,
416 TYPE_ATTRIBUTES (type),
417 TYPE_QUALS (type));
418 insert_decl_map (id, type, new_tree);
419 return new_tree;
420 }
421 else if (TREE_CODE (type) == REFERENCE_TYPE)
422 {
423 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
424 TYPE_MODE (type),
425 TYPE_REF_CAN_ALIAS_ALL (type));
426 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
427 new_tree = build_type_attribute_qual_variant (new_tree,
428 TYPE_ATTRIBUTES (type),
429 TYPE_QUALS (type));
430 insert_decl_map (id, type, new_tree);
431 return new_tree;
432 }
433 else
434 new_tree = copy_node (type);
435
436 insert_decl_map (id, type, new_tree);
437
438 /* This is a new type, not a copy of an old type. Need to reassociate
439 variants. We can handle everything except the main variant lazily. */
440 t = TYPE_MAIN_VARIANT (type);
441 if (type != t)
442 {
443 t = remap_type (t, id);
444 TYPE_MAIN_VARIANT (new_tree) = t;
445 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
446 TYPE_NEXT_VARIANT (t) = new_tree;
447 }
448 else
449 {
450 TYPE_MAIN_VARIANT (new_tree) = new_tree;
451 TYPE_NEXT_VARIANT (new_tree) = NULL;
452 }
453
454 if (TYPE_STUB_DECL (type))
455 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
456
457 /* Lazily create pointer and reference types. */
458 TYPE_POINTER_TO (new_tree) = NULL;
459 TYPE_REFERENCE_TO (new_tree) = NULL;
460
461 /* Copy all types that may contain references to local variables; be sure to
462 preserve sharing in between type and its main variant when possible. */
463 switch (TREE_CODE (new_tree))
464 {
465 case INTEGER_TYPE:
466 case REAL_TYPE:
467 case FIXED_POINT_TYPE:
468 case ENUMERAL_TYPE:
469 case BOOLEAN_TYPE:
470 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
471 {
472 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
473 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
474
475 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
476 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
477 }
478 else
479 {
480 t = TYPE_MIN_VALUE (new_tree);
481 if (t && TREE_CODE (t) != INTEGER_CST)
482 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
483
484 t = TYPE_MAX_VALUE (new_tree);
485 if (t && TREE_CODE (t) != INTEGER_CST)
486 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
487 }
488 return new_tree;
489
490 case FUNCTION_TYPE:
491 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
492 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
493 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
494 else
495 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
496 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
497 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
498 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
499 else
500 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
501 return new_tree;
502
503 case ARRAY_TYPE:
504 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
505 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
506 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
507 else
508 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
509
510 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
511 {
512 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
513 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
514 }
515 else
516 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
517 break;
518
519 case RECORD_TYPE:
520 case UNION_TYPE:
521 case QUAL_UNION_TYPE:
522 if (TYPE_MAIN_VARIANT (type) != type
523 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
524 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
525 else
526 {
527 tree f, nf = NULL;
528
529 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
530 {
531 t = remap_decl (f, id);
532 DECL_CONTEXT (t) = new_tree;
533 DECL_CHAIN (t) = nf;
534 nf = t;
535 }
536 TYPE_FIELDS (new_tree) = nreverse (nf);
537 }
538 break;
539
540 case OFFSET_TYPE:
541 default:
542 /* Shouldn't have been thought variable sized. */
543 gcc_unreachable ();
544 }
545
546 /* All variants of type share the same size, so use the already remaped data. */
547 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
548 {
549 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
550 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
551
552 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
553 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
554 }
555 else
556 {
557 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
558 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
559 }
560
561 return new_tree;
562 }
563
564 tree
565 remap_type (tree type, copy_body_data *id)
566 {
567 tree *node;
568 tree tmp;
569
570 if (type == NULL)
571 return type;
572
573 /* See if we have remapped this type. */
574 node = id->decl_map->get (type);
575 if (node)
576 return *node;
577
578 /* The type only needs remapping if it's variably modified. */
579 if (! variably_modified_type_p (type, id->src_fn))
580 {
581 insert_decl_map (id, type, type);
582 return type;
583 }
584
585 id->remapping_type_depth++;
586 tmp = remap_type_1 (type, id);
587 id->remapping_type_depth--;
588
589 return tmp;
590 }
591
592 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
593
594 static bool
595 can_be_nonlocal (tree decl, copy_body_data *id)
596 {
597 /* We can not duplicate function decls. */
598 if (TREE_CODE (decl) == FUNCTION_DECL)
599 return true;
600
601 /* Local static vars must be non-local or we get multiple declaration
602 problems. */
603 if (TREE_CODE (decl) == VAR_DECL
604 && !auto_var_in_fn_p (decl, id->src_fn))
605 return true;
606
607 return false;
608 }
609
610 static tree
611 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
612 copy_body_data *id)
613 {
614 tree old_var;
615 tree new_decls = NULL_TREE;
616
617 /* Remap its variables. */
618 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
619 {
620 tree new_var;
621
622 if (can_be_nonlocal (old_var, id))
623 {
624 /* We need to add this variable to the local decls as otherwise
625 nothing else will do so. */
626 if (TREE_CODE (old_var) == VAR_DECL
627 && ! DECL_EXTERNAL (old_var))
628 add_local_decl (cfun, old_var);
629 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
630 && !DECL_IGNORED_P (old_var)
631 && nonlocalized_list)
632 vec_safe_push (*nonlocalized_list, old_var);
633 continue;
634 }
635
636 /* Remap the variable. */
637 new_var = remap_decl (old_var, id);
638
639 /* If we didn't remap this variable, we can't mess with its
640 TREE_CHAIN. If we remapped this variable to the return slot, it's
641 already declared somewhere else, so don't declare it here. */
642
643 if (new_var == id->retvar)
644 ;
645 else if (!new_var)
646 {
647 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
648 && !DECL_IGNORED_P (old_var)
649 && nonlocalized_list)
650 vec_safe_push (*nonlocalized_list, old_var);
651 }
652 else
653 {
654 gcc_assert (DECL_P (new_var));
655 DECL_CHAIN (new_var) = new_decls;
656 new_decls = new_var;
657
658 /* Also copy value-expressions. */
659 if (TREE_CODE (new_var) == VAR_DECL
660 && DECL_HAS_VALUE_EXPR_P (new_var))
661 {
662 tree tem = DECL_VALUE_EXPR (new_var);
663 bool old_regimplify = id->regimplify;
664 id->remapping_type_depth++;
665 walk_tree (&tem, copy_tree_body_r, id, NULL);
666 id->remapping_type_depth--;
667 id->regimplify = old_regimplify;
668 SET_DECL_VALUE_EXPR (new_var, tem);
669 }
670 }
671 }
672
673 return nreverse (new_decls);
674 }
675
676 /* Copy the BLOCK to contain remapped versions of the variables
677 therein. And hook the new block into the block-tree. */
678
679 static void
680 remap_block (tree *block, copy_body_data *id)
681 {
682 tree old_block;
683 tree new_block;
684
685 /* Make the new block. */
686 old_block = *block;
687 new_block = make_node (BLOCK);
688 TREE_USED (new_block) = TREE_USED (old_block);
689 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
690 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
691 BLOCK_NONLOCALIZED_VARS (new_block)
692 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
693 *block = new_block;
694
695 /* Remap its variables. */
696 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
697 &BLOCK_NONLOCALIZED_VARS (new_block),
698 id);
699
700 if (id->transform_lang_insert_block)
701 id->transform_lang_insert_block (new_block);
702
703 /* Remember the remapped block. */
704 insert_decl_map (id, old_block, new_block);
705 }
706
707 /* Copy the whole block tree and root it in id->block. */
708 static tree
709 remap_blocks (tree block, copy_body_data *id)
710 {
711 tree t;
712 tree new_tree = block;
713
714 if (!block)
715 return NULL;
716
717 remap_block (&new_tree, id);
718 gcc_assert (new_tree != block);
719 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
720 prepend_lexical_block (new_tree, remap_blocks (t, id));
721 /* Blocks are in arbitrary order, but make things slightly prettier and do
722 not swap order when producing a copy. */
723 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
724 return new_tree;
725 }
726
727 /* Remap the block tree rooted at BLOCK to nothing. */
728 static void
729 remap_blocks_to_null (tree block, copy_body_data *id)
730 {
731 tree t;
732 insert_decl_map (id, block, NULL_TREE);
733 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
734 remap_blocks_to_null (t, id);
735 }
736
737 static void
738 copy_statement_list (tree *tp)
739 {
740 tree_stmt_iterator oi, ni;
741 tree new_tree;
742
743 new_tree = alloc_stmt_list ();
744 ni = tsi_start (new_tree);
745 oi = tsi_start (*tp);
746 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
747 *tp = new_tree;
748
749 for (; !tsi_end_p (oi); tsi_next (&oi))
750 {
751 tree stmt = tsi_stmt (oi);
752 if (TREE_CODE (stmt) == STATEMENT_LIST)
753 /* This copy is not redundant; tsi_link_after will smash this
754 STATEMENT_LIST into the end of the one we're building, and we
755 don't want to do that with the original. */
756 copy_statement_list (&stmt);
757 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
758 }
759 }
760
761 static void
762 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
763 {
764 tree block = BIND_EXPR_BLOCK (*tp);
765 /* Copy (and replace) the statement. */
766 copy_tree_r (tp, walk_subtrees, NULL);
767 if (block)
768 {
769 remap_block (&block, id);
770 BIND_EXPR_BLOCK (*tp) = block;
771 }
772
773 if (BIND_EXPR_VARS (*tp))
774 /* This will remap a lot of the same decls again, but this should be
775 harmless. */
776 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
777 }
778
779
780 /* Create a new gimple_seq by remapping all the statements in BODY
781 using the inlining information in ID. */
782
783 static gimple_seq
784 remap_gimple_seq (gimple_seq body, copy_body_data *id)
785 {
786 gimple_stmt_iterator si;
787 gimple_seq new_body = NULL;
788
789 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
790 {
791 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
792 gimple_seq_add_stmt (&new_body, new_stmt);
793 }
794
795 return new_body;
796 }
797
798
799 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
800 block using the mapping information in ID. */
801
802 static gimple
803 copy_gimple_bind (gimple stmt, copy_body_data *id)
804 {
805 gimple new_bind;
806 tree new_block, new_vars;
807 gimple_seq body, new_body;
808
809 /* Copy the statement. Note that we purposely don't use copy_stmt
810 here because we need to remap statements as we copy. */
811 body = gimple_bind_body (stmt);
812 new_body = remap_gimple_seq (body, id);
813
814 new_block = gimple_bind_block (stmt);
815 if (new_block)
816 remap_block (&new_block, id);
817
818 /* This will remap a lot of the same decls again, but this should be
819 harmless. */
820 new_vars = gimple_bind_vars (stmt);
821 if (new_vars)
822 new_vars = remap_decls (new_vars, NULL, id);
823
824 new_bind = gimple_build_bind (new_vars, new_body, new_block);
825
826 return new_bind;
827 }
828
829 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
830
831 static bool
832 is_parm (tree decl)
833 {
834 if (TREE_CODE (decl) == SSA_NAME)
835 {
836 decl = SSA_NAME_VAR (decl);
837 if (!decl)
838 return false;
839 }
840
841 return (TREE_CODE (decl) == PARM_DECL);
842 }
843
844 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
845 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
846 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
847 recursing into the children nodes of *TP. */
848
849 static tree
850 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
851 {
852 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
853 copy_body_data *id = (copy_body_data *) wi_p->info;
854 tree fn = id->src_fn;
855
856 if (TREE_CODE (*tp) == SSA_NAME)
857 {
858 *tp = remap_ssa_name (*tp, id);
859 *walk_subtrees = 0;
860 return NULL;
861 }
862 else if (auto_var_in_fn_p (*tp, fn))
863 {
864 /* Local variables and labels need to be replaced by equivalent
865 variables. We don't want to copy static variables; there's
866 only one of those, no matter how many times we inline the
867 containing function. Similarly for globals from an outer
868 function. */
869 tree new_decl;
870
871 /* Remap the declaration. */
872 new_decl = remap_decl (*tp, id);
873 gcc_assert (new_decl);
874 /* Replace this variable with the copy. */
875 STRIP_TYPE_NOPS (new_decl);
876 /* ??? The C++ frontend uses void * pointer zero to initialize
877 any other type. This confuses the middle-end type verification.
878 As cloned bodies do not go through gimplification again the fixup
879 there doesn't trigger. */
880 if (TREE_CODE (new_decl) == INTEGER_CST
881 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
882 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
883 *tp = new_decl;
884 *walk_subtrees = 0;
885 }
886 else if (TREE_CODE (*tp) == STATEMENT_LIST)
887 gcc_unreachable ();
888 else if (TREE_CODE (*tp) == SAVE_EXPR)
889 gcc_unreachable ();
890 else if (TREE_CODE (*tp) == LABEL_DECL
891 && (!DECL_CONTEXT (*tp)
892 || decl_function_context (*tp) == id->src_fn))
893 /* These may need to be remapped for EH handling. */
894 *tp = remap_decl (*tp, id);
895 else if (TREE_CODE (*tp) == FIELD_DECL)
896 {
897 /* If the enclosing record type is variably_modified_type_p, the field
898 has already been remapped. Otherwise, it need not be. */
899 tree *n = id->decl_map->get (*tp);
900 if (n)
901 *tp = *n;
902 *walk_subtrees = 0;
903 }
904 else if (TYPE_P (*tp))
905 /* Types may need remapping as well. */
906 *tp = remap_type (*tp, id);
907 else if (CONSTANT_CLASS_P (*tp))
908 {
909 /* If this is a constant, we have to copy the node iff the type
910 will be remapped. copy_tree_r will not copy a constant. */
911 tree new_type = remap_type (TREE_TYPE (*tp), id);
912
913 if (new_type == TREE_TYPE (*tp))
914 *walk_subtrees = 0;
915
916 else if (TREE_CODE (*tp) == INTEGER_CST)
917 *tp = wide_int_to_tree (new_type, *tp);
918 else
919 {
920 *tp = copy_node (*tp);
921 TREE_TYPE (*tp) = new_type;
922 }
923 }
924 else
925 {
926 /* Otherwise, just copy the node. Note that copy_tree_r already
927 knows not to copy VAR_DECLs, etc., so this is safe. */
928
929 if (TREE_CODE (*tp) == MEM_REF)
930 {
931 /* We need to re-canonicalize MEM_REFs from inline substitutions
932 that can happen when a pointer argument is an ADDR_EXPR.
933 Recurse here manually to allow that. */
934 tree ptr = TREE_OPERAND (*tp, 0);
935 tree type = remap_type (TREE_TYPE (*tp), id);
936 tree old = *tp;
937 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
938 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
939 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
940 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
941 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
942 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
943 remapped a parameter as the property might be valid only
944 for the parameter itself. */
945 if (TREE_THIS_NOTRAP (old)
946 && (!is_parm (TREE_OPERAND (old, 0))
947 || (!id->transform_parameter && is_parm (ptr))))
948 TREE_THIS_NOTRAP (*tp) = 1;
949 *walk_subtrees = 0;
950 return NULL;
951 }
952
953 /* Here is the "usual case". Copy this tree node, and then
954 tweak some special cases. */
955 copy_tree_r (tp, walk_subtrees, NULL);
956
957 if (TREE_CODE (*tp) != OMP_CLAUSE)
958 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
959
960 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
961 {
962 /* The copied TARGET_EXPR has never been expanded, even if the
963 original node was expanded already. */
964 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
965 TREE_OPERAND (*tp, 3) = NULL_TREE;
966 }
967 else if (TREE_CODE (*tp) == ADDR_EXPR)
968 {
969 /* Variable substitution need not be simple. In particular,
970 the MEM_REF substitution above. Make sure that
971 TREE_CONSTANT and friends are up-to-date. */
972 int invariant = is_gimple_min_invariant (*tp);
973 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
974 recompute_tree_invariant_for_addr_expr (*tp);
975
976 /* If this used to be invariant, but is not any longer,
977 then regimplification is probably needed. */
978 if (invariant && !is_gimple_min_invariant (*tp))
979 id->regimplify = true;
980
981 *walk_subtrees = 0;
982 }
983 }
984
985 /* Update the TREE_BLOCK for the cloned expr. */
986 if (EXPR_P (*tp))
987 {
988 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
989 tree old_block = TREE_BLOCK (*tp);
990 if (old_block)
991 {
992 tree *n;
993 n = id->decl_map->get (TREE_BLOCK (*tp));
994 if (n)
995 new_block = *n;
996 }
997 TREE_SET_BLOCK (*tp, new_block);
998 }
999
1000 /* Keep iterating. */
1001 return NULL_TREE;
1002 }
1003
1004
1005 /* Called from copy_body_id via walk_tree. DATA is really a
1006 `copy_body_data *'. */
1007
1008 tree
1009 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1010 {
1011 copy_body_data *id = (copy_body_data *) data;
1012 tree fn = id->src_fn;
1013 tree new_block;
1014
1015 /* Begin by recognizing trees that we'll completely rewrite for the
1016 inlining context. Our output for these trees is completely
1017 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1018 into an edge). Further down, we'll handle trees that get
1019 duplicated and/or tweaked. */
1020
1021 /* When requested, RETURN_EXPRs should be transformed to just the
1022 contained MODIFY_EXPR. The branch semantics of the return will
1023 be handled elsewhere by manipulating the CFG rather than a statement. */
1024 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1025 {
1026 tree assignment = TREE_OPERAND (*tp, 0);
1027
1028 /* If we're returning something, just turn that into an
1029 assignment into the equivalent of the original RESULT_DECL.
1030 If the "assignment" is just the result decl, the result
1031 decl has already been set (e.g. a recent "foo (&result_decl,
1032 ...)"); just toss the entire RETURN_EXPR. */
1033 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1034 {
1035 /* Replace the RETURN_EXPR with (a copy of) the
1036 MODIFY_EXPR hanging underneath. */
1037 *tp = copy_node (assignment);
1038 }
1039 else /* Else the RETURN_EXPR returns no value. */
1040 {
1041 *tp = NULL;
1042 return (tree) (void *)1;
1043 }
1044 }
1045 else if (TREE_CODE (*tp) == SSA_NAME)
1046 {
1047 *tp = remap_ssa_name (*tp, id);
1048 *walk_subtrees = 0;
1049 return NULL;
1050 }
1051
1052 /* Local variables and labels need to be replaced by equivalent
1053 variables. We don't want to copy static variables; there's only
1054 one of those, no matter how many times we inline the containing
1055 function. Similarly for globals from an outer function. */
1056 else if (auto_var_in_fn_p (*tp, fn))
1057 {
1058 tree new_decl;
1059
1060 /* Remap the declaration. */
1061 new_decl = remap_decl (*tp, id);
1062 gcc_assert (new_decl);
1063 /* Replace this variable with the copy. */
1064 STRIP_TYPE_NOPS (new_decl);
1065 *tp = new_decl;
1066 *walk_subtrees = 0;
1067 }
1068 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1069 copy_statement_list (tp);
1070 else if (TREE_CODE (*tp) == SAVE_EXPR
1071 || TREE_CODE (*tp) == TARGET_EXPR)
1072 remap_save_expr (tp, id->decl_map, walk_subtrees);
1073 else if (TREE_CODE (*tp) == LABEL_DECL
1074 && (! DECL_CONTEXT (*tp)
1075 || decl_function_context (*tp) == id->src_fn))
1076 /* These may need to be remapped for EH handling. */
1077 *tp = remap_decl (*tp, id);
1078 else if (TREE_CODE (*tp) == BIND_EXPR)
1079 copy_bind_expr (tp, walk_subtrees, id);
1080 /* Types may need remapping as well. */
1081 else if (TYPE_P (*tp))
1082 *tp = remap_type (*tp, id);
1083
1084 /* If this is a constant, we have to copy the node iff the type will be
1085 remapped. copy_tree_r will not copy a constant. */
1086 else if (CONSTANT_CLASS_P (*tp))
1087 {
1088 tree new_type = remap_type (TREE_TYPE (*tp), id);
1089
1090 if (new_type == TREE_TYPE (*tp))
1091 *walk_subtrees = 0;
1092
1093 else if (TREE_CODE (*tp) == INTEGER_CST)
1094 *tp = wide_int_to_tree (new_type, *tp);
1095 else
1096 {
1097 *tp = copy_node (*tp);
1098 TREE_TYPE (*tp) = new_type;
1099 }
1100 }
1101
1102 /* Otherwise, just copy the node. Note that copy_tree_r already
1103 knows not to copy VAR_DECLs, etc., so this is safe. */
1104 else
1105 {
1106 /* Here we handle trees that are not completely rewritten.
1107 First we detect some inlining-induced bogosities for
1108 discarding. */
1109 if (TREE_CODE (*tp) == MODIFY_EXPR
1110 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1111 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1112 {
1113 /* Some assignments VAR = VAR; don't generate any rtl code
1114 and thus don't count as variable modification. Avoid
1115 keeping bogosities like 0 = 0. */
1116 tree decl = TREE_OPERAND (*tp, 0), value;
1117 tree *n;
1118
1119 n = id->decl_map->get (decl);
1120 if (n)
1121 {
1122 value = *n;
1123 STRIP_TYPE_NOPS (value);
1124 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1125 {
1126 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1127 return copy_tree_body_r (tp, walk_subtrees, data);
1128 }
1129 }
1130 }
1131 else if (TREE_CODE (*tp) == INDIRECT_REF)
1132 {
1133 /* Get rid of *& from inline substitutions that can happen when a
1134 pointer argument is an ADDR_EXPR. */
1135 tree decl = TREE_OPERAND (*tp, 0);
1136 tree *n = id->decl_map->get (decl);
1137 if (n)
1138 {
1139 /* If we happen to get an ADDR_EXPR in n->value, strip
1140 it manually here as we'll eventually get ADDR_EXPRs
1141 which lie about their types pointed to. In this case
1142 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1143 but we absolutely rely on that. As fold_indirect_ref
1144 does other useful transformations, try that first, though. */
1145 tree type = TREE_TYPE (*tp);
1146 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1147 tree old = *tp;
1148 *tp = gimple_fold_indirect_ref (ptr);
1149 if (! *tp)
1150 {
1151 if (TREE_CODE (ptr) == ADDR_EXPR)
1152 {
1153 *tp
1154 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1155 /* ??? We should either assert here or build
1156 a VIEW_CONVERT_EXPR instead of blindly leaking
1157 incompatible types to our IL. */
1158 if (! *tp)
1159 *tp = TREE_OPERAND (ptr, 0);
1160 }
1161 else
1162 {
1163 *tp = build1 (INDIRECT_REF, type, ptr);
1164 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1165 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1166 TREE_READONLY (*tp) = TREE_READONLY (old);
1167 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1168 have remapped a parameter as the property might be
1169 valid only for the parameter itself. */
1170 if (TREE_THIS_NOTRAP (old)
1171 && (!is_parm (TREE_OPERAND (old, 0))
1172 || (!id->transform_parameter && is_parm (ptr))))
1173 TREE_THIS_NOTRAP (*tp) = 1;
1174 }
1175 }
1176 *walk_subtrees = 0;
1177 return NULL;
1178 }
1179 }
1180 else if (TREE_CODE (*tp) == MEM_REF)
1181 {
1182 /* We need to re-canonicalize MEM_REFs from inline substitutions
1183 that can happen when a pointer argument is an ADDR_EXPR.
1184 Recurse here manually to allow that. */
1185 tree ptr = TREE_OPERAND (*tp, 0);
1186 tree type = remap_type (TREE_TYPE (*tp), id);
1187 tree old = *tp;
1188 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1189 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1190 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1191 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1192 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1193 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1194 remapped a parameter as the property might be valid only
1195 for the parameter itself. */
1196 if (TREE_THIS_NOTRAP (old)
1197 && (!is_parm (TREE_OPERAND (old, 0))
1198 || (!id->transform_parameter && is_parm (ptr))))
1199 TREE_THIS_NOTRAP (*tp) = 1;
1200 *walk_subtrees = 0;
1201 return NULL;
1202 }
1203
1204 /* Here is the "usual case". Copy this tree node, and then
1205 tweak some special cases. */
1206 copy_tree_r (tp, walk_subtrees, NULL);
1207
1208 /* If EXPR has block defined, map it to newly constructed block.
1209 When inlining we want EXPRs without block appear in the block
1210 of function call if we are not remapping a type. */
1211 if (EXPR_P (*tp))
1212 {
1213 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1214 if (TREE_BLOCK (*tp))
1215 {
1216 tree *n;
1217 n = id->decl_map->get (TREE_BLOCK (*tp));
1218 if (n)
1219 new_block = *n;
1220 }
1221 TREE_SET_BLOCK (*tp, new_block);
1222 }
1223
1224 if (TREE_CODE (*tp) != OMP_CLAUSE)
1225 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1226
1227 /* The copied TARGET_EXPR has never been expanded, even if the
1228 original node was expanded already. */
1229 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1230 {
1231 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1232 TREE_OPERAND (*tp, 3) = NULL_TREE;
1233 }
1234
1235 /* Variable substitution need not be simple. In particular, the
1236 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1237 and friends are up-to-date. */
1238 else if (TREE_CODE (*tp) == ADDR_EXPR)
1239 {
1240 int invariant = is_gimple_min_invariant (*tp);
1241 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1242
1243 /* Handle the case where we substituted an INDIRECT_REF
1244 into the operand of the ADDR_EXPR. */
1245 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1246 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1247 else
1248 recompute_tree_invariant_for_addr_expr (*tp);
1249
1250 /* If this used to be invariant, but is not any longer,
1251 then regimplification is probably needed. */
1252 if (invariant && !is_gimple_min_invariant (*tp))
1253 id->regimplify = true;
1254
1255 *walk_subtrees = 0;
1256 }
1257 }
1258
1259 /* Keep iterating. */
1260 return NULL_TREE;
1261 }
1262
1263 /* Helper for remap_gimple_stmt. Given an EH region number for the
1264 source function, map that to the duplicate EH region number in
1265 the destination function. */
1266
1267 static int
1268 remap_eh_region_nr (int old_nr, copy_body_data *id)
1269 {
1270 eh_region old_r, new_r;
1271
1272 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1273 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1274
1275 return new_r->index;
1276 }
1277
1278 /* Similar, but operate on INTEGER_CSTs. */
1279
1280 static tree
1281 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1282 {
1283 int old_nr, new_nr;
1284
1285 old_nr = tree_to_shwi (old_t_nr);
1286 new_nr = remap_eh_region_nr (old_nr, id);
1287
1288 return build_int_cst (integer_type_node, new_nr);
1289 }
1290
1291 /* Helper for copy_bb. Remap statement STMT using the inlining
1292 information in ID. Return the new statement copy. */
1293
1294 static gimple
1295 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1296 {
1297 gimple copy = NULL;
1298 struct walk_stmt_info wi;
1299 bool skip_first = false;
1300
1301 /* Begin by recognizing trees that we'll completely rewrite for the
1302 inlining context. Our output for these trees is completely
1303 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1304 into an edge). Further down, we'll handle trees that get
1305 duplicated and/or tweaked. */
1306
1307 /* When requested, GIMPLE_RETURNs should be transformed to just the
1308 contained GIMPLE_ASSIGN. The branch semantics of the return will
1309 be handled elsewhere by manipulating the CFG rather than the
1310 statement. */
1311 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1312 {
1313 tree retval = gimple_return_retval (stmt);
1314
1315 /* If we're returning something, just turn that into an
1316 assignment into the equivalent of the original RESULT_DECL.
1317 If RETVAL is just the result decl, the result decl has
1318 already been set (e.g. a recent "foo (&result_decl, ...)");
1319 just toss the entire GIMPLE_RETURN. */
1320 if (retval
1321 && (TREE_CODE (retval) != RESULT_DECL
1322 && (TREE_CODE (retval) != SSA_NAME
1323 || ! SSA_NAME_VAR (retval)
1324 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1325 {
1326 copy = gimple_build_assign (id->do_not_unshare
1327 ? id->retvar : unshare_expr (id->retvar),
1328 retval);
1329 /* id->retvar is already substituted. Skip it on later remapping. */
1330 skip_first = true;
1331 }
1332 else
1333 return gimple_build_nop ();
1334 }
1335 else if (gimple_has_substatements (stmt))
1336 {
1337 gimple_seq s1, s2;
1338
1339 /* When cloning bodies from the C++ front end, we will be handed bodies
1340 in High GIMPLE form. Handle here all the High GIMPLE statements that
1341 have embedded statements. */
1342 switch (gimple_code (stmt))
1343 {
1344 case GIMPLE_BIND:
1345 copy = copy_gimple_bind (stmt, id);
1346 break;
1347
1348 case GIMPLE_CATCH:
1349 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1350 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1351 break;
1352
1353 case GIMPLE_EH_FILTER:
1354 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1355 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1356 break;
1357
1358 case GIMPLE_TRY:
1359 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1360 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1361 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1362 break;
1363
1364 case GIMPLE_WITH_CLEANUP_EXPR:
1365 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1366 copy = gimple_build_wce (s1);
1367 break;
1368
1369 case GIMPLE_OMP_PARALLEL:
1370 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1371 copy = gimple_build_omp_parallel
1372 (s1,
1373 gimple_omp_parallel_clauses (stmt),
1374 gimple_omp_parallel_child_fn (stmt),
1375 gimple_omp_parallel_data_arg (stmt));
1376 break;
1377
1378 case GIMPLE_OMP_TASK:
1379 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1380 copy = gimple_build_omp_task
1381 (s1,
1382 gimple_omp_task_clauses (stmt),
1383 gimple_omp_task_child_fn (stmt),
1384 gimple_omp_task_data_arg (stmt),
1385 gimple_omp_task_copy_fn (stmt),
1386 gimple_omp_task_arg_size (stmt),
1387 gimple_omp_task_arg_align (stmt));
1388 break;
1389
1390 case GIMPLE_OMP_FOR:
1391 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1392 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1393 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1394 gimple_omp_for_clauses (stmt),
1395 gimple_omp_for_collapse (stmt), s2);
1396 {
1397 size_t i;
1398 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1399 {
1400 gimple_omp_for_set_index (copy, i,
1401 gimple_omp_for_index (stmt, i));
1402 gimple_omp_for_set_initial (copy, i,
1403 gimple_omp_for_initial (stmt, i));
1404 gimple_omp_for_set_final (copy, i,
1405 gimple_omp_for_final (stmt, i));
1406 gimple_omp_for_set_incr (copy, i,
1407 gimple_omp_for_incr (stmt, i));
1408 gimple_omp_for_set_cond (copy, i,
1409 gimple_omp_for_cond (stmt, i));
1410 }
1411 }
1412 break;
1413
1414 case GIMPLE_OMP_MASTER:
1415 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1416 copy = gimple_build_omp_master (s1);
1417 break;
1418
1419 case GIMPLE_OMP_TASKGROUP:
1420 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1421 copy = gimple_build_omp_taskgroup (s1);
1422 break;
1423
1424 case GIMPLE_OMP_ORDERED:
1425 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1426 copy = gimple_build_omp_ordered (s1);
1427 break;
1428
1429 case GIMPLE_OMP_SECTION:
1430 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1431 copy = gimple_build_omp_section (s1);
1432 break;
1433
1434 case GIMPLE_OMP_SECTIONS:
1435 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1436 copy = gimple_build_omp_sections
1437 (s1, gimple_omp_sections_clauses (stmt));
1438 break;
1439
1440 case GIMPLE_OMP_SINGLE:
1441 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1442 copy = gimple_build_omp_single
1443 (s1, gimple_omp_single_clauses (stmt));
1444 break;
1445
1446 case GIMPLE_OMP_TARGET:
1447 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1448 copy = gimple_build_omp_target
1449 (s1, gimple_omp_target_kind (stmt),
1450 gimple_omp_target_clauses (stmt));
1451 break;
1452
1453 case GIMPLE_OMP_TEAMS:
1454 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1455 copy = gimple_build_omp_teams
1456 (s1, gimple_omp_teams_clauses (stmt));
1457 break;
1458
1459 case GIMPLE_OMP_CRITICAL:
1460 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1461 copy
1462 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1463 break;
1464
1465 case GIMPLE_TRANSACTION:
1466 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1467 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1468 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1469 break;
1470
1471 default:
1472 gcc_unreachable ();
1473 }
1474 }
1475 else
1476 {
1477 if (gimple_assign_copy_p (stmt)
1478 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1479 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1480 {
1481 /* Here we handle statements that are not completely rewritten.
1482 First we detect some inlining-induced bogosities for
1483 discarding. */
1484
1485 /* Some assignments VAR = VAR; don't generate any rtl code
1486 and thus don't count as variable modification. Avoid
1487 keeping bogosities like 0 = 0. */
1488 tree decl = gimple_assign_lhs (stmt), value;
1489 tree *n;
1490
1491 n = id->decl_map->get (decl);
1492 if (n)
1493 {
1494 value = *n;
1495 STRIP_TYPE_NOPS (value);
1496 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1497 return gimple_build_nop ();
1498 }
1499 }
1500
1501 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1502 in a block that we aren't copying during tree_function_versioning,
1503 just drop the clobber stmt. */
1504 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1505 {
1506 tree lhs = gimple_assign_lhs (stmt);
1507 if (TREE_CODE (lhs) == MEM_REF
1508 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1509 {
1510 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1511 if (gimple_bb (def_stmt)
1512 && !bitmap_bit_p (id->blocks_to_copy,
1513 gimple_bb (def_stmt)->index))
1514 return gimple_build_nop ();
1515 }
1516 }
1517
1518 if (gimple_debug_bind_p (stmt))
1519 {
1520 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1521 gimple_debug_bind_get_value (stmt),
1522 stmt);
1523 id->debug_stmts.safe_push (copy);
1524 return copy;
1525 }
1526 if (gimple_debug_source_bind_p (stmt))
1527 {
1528 copy = gimple_build_debug_source_bind
1529 (gimple_debug_source_bind_get_var (stmt),
1530 gimple_debug_source_bind_get_value (stmt), stmt);
1531 id->debug_stmts.safe_push (copy);
1532 return copy;
1533 }
1534
1535 /* Create a new deep copy of the statement. */
1536 copy = gimple_copy (stmt);
1537
1538 /* Clear flags that need revisiting. */
1539 if (is_gimple_call (copy)
1540 && gimple_call_tail_p (copy))
1541 gimple_call_set_tail (copy, false);
1542
1543 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1544 RESX and EH_DISPATCH. */
1545 if (id->eh_map)
1546 switch (gimple_code (copy))
1547 {
1548 case GIMPLE_CALL:
1549 {
1550 tree r, fndecl = gimple_call_fndecl (copy);
1551 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1552 switch (DECL_FUNCTION_CODE (fndecl))
1553 {
1554 case BUILT_IN_EH_COPY_VALUES:
1555 r = gimple_call_arg (copy, 1);
1556 r = remap_eh_region_tree_nr (r, id);
1557 gimple_call_set_arg (copy, 1, r);
1558 /* FALLTHRU */
1559
1560 case BUILT_IN_EH_POINTER:
1561 case BUILT_IN_EH_FILTER:
1562 r = gimple_call_arg (copy, 0);
1563 r = remap_eh_region_tree_nr (r, id);
1564 gimple_call_set_arg (copy, 0, r);
1565 break;
1566
1567 default:
1568 break;
1569 }
1570
1571 /* Reset alias info if we didn't apply measures to
1572 keep it valid over inlining by setting DECL_PT_UID. */
1573 if (!id->src_cfun->gimple_df
1574 || !id->src_cfun->gimple_df->ipa_pta)
1575 gimple_call_reset_alias_info (copy);
1576 }
1577 break;
1578
1579 case GIMPLE_RESX:
1580 {
1581 int r = gimple_resx_region (copy);
1582 r = remap_eh_region_nr (r, id);
1583 gimple_resx_set_region (copy, r);
1584 }
1585 break;
1586
1587 case GIMPLE_EH_DISPATCH:
1588 {
1589 int r = gimple_eh_dispatch_region (copy);
1590 r = remap_eh_region_nr (r, id);
1591 gimple_eh_dispatch_set_region (copy, r);
1592 }
1593 break;
1594
1595 default:
1596 break;
1597 }
1598 }
1599
1600 /* If STMT has a block defined, map it to the newly constructed
1601 block. */
1602 if (gimple_block (copy))
1603 {
1604 tree *n;
1605 n = id->decl_map->get (gimple_block (copy));
1606 gcc_assert (n);
1607 gimple_set_block (copy, *n);
1608 }
1609
1610 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1611 return copy;
1612
1613 /* Remap all the operands in COPY. */
1614 memset (&wi, 0, sizeof (wi));
1615 wi.info = id;
1616 if (skip_first)
1617 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1618 else
1619 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1620
1621 /* Clear the copied virtual operands. We are not remapping them here
1622 but are going to recreate them from scratch. */
1623 if (gimple_has_mem_ops (copy))
1624 {
1625 gimple_set_vdef (copy, NULL_TREE);
1626 gimple_set_vuse (copy, NULL_TREE);
1627 }
1628
1629 return copy;
1630 }
1631
1632
1633 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1634 later */
1635
1636 static basic_block
1637 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1638 gcov_type count_scale)
1639 {
1640 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1641 basic_block copy_basic_block;
1642 tree decl;
1643 gcov_type freq;
1644 basic_block prev;
1645
1646 /* Search for previous copied basic block. */
1647 prev = bb->prev_bb;
1648 while (!prev->aux)
1649 prev = prev->prev_bb;
1650
1651 /* create_basic_block() will append every new block to
1652 basic_block_info automatically. */
1653 copy_basic_block = create_basic_block (NULL, (void *) 0,
1654 (basic_block) prev->aux);
1655 copy_basic_block->count = apply_scale (bb->count, count_scale);
1656
1657 /* We are going to rebuild frequencies from scratch. These values
1658 have just small importance to drive canonicalize_loop_headers. */
1659 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1660
1661 /* We recompute frequencies after inlining, so this is quite safe. */
1662 if (freq > BB_FREQ_MAX)
1663 freq = BB_FREQ_MAX;
1664 copy_basic_block->frequency = freq;
1665
1666 copy_gsi = gsi_start_bb (copy_basic_block);
1667
1668 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1669 {
1670 gimple stmt = gsi_stmt (gsi);
1671 gimple orig_stmt = stmt;
1672
1673 id->regimplify = false;
1674 stmt = remap_gimple_stmt (stmt, id);
1675 if (gimple_nop_p (stmt))
1676 continue;
1677
1678 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1679 seq_gsi = copy_gsi;
1680
1681 /* With return slot optimization we can end up with
1682 non-gimple (foo *)&this->m, fix that here. */
1683 if (is_gimple_assign (stmt)
1684 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1685 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1686 {
1687 tree new_rhs;
1688 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1689 gimple_assign_rhs1 (stmt),
1690 true, NULL, false,
1691 GSI_CONTINUE_LINKING);
1692 gimple_assign_set_rhs1 (stmt, new_rhs);
1693 id->regimplify = false;
1694 }
1695
1696 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1697
1698 if (id->regimplify)
1699 gimple_regimplify_operands (stmt, &seq_gsi);
1700
1701 /* If copy_basic_block has been empty at the start of this iteration,
1702 call gsi_start_bb again to get at the newly added statements. */
1703 if (gsi_end_p (copy_gsi))
1704 copy_gsi = gsi_start_bb (copy_basic_block);
1705 else
1706 gsi_next (&copy_gsi);
1707
1708 /* Process the new statement. The call to gimple_regimplify_operands
1709 possibly turned the statement into multiple statements, we
1710 need to process all of them. */
1711 do
1712 {
1713 tree fn;
1714
1715 stmt = gsi_stmt (copy_gsi);
1716 if (is_gimple_call (stmt)
1717 && gimple_call_va_arg_pack_p (stmt)
1718 && id->gimple_call)
1719 {
1720 /* __builtin_va_arg_pack () should be replaced by
1721 all arguments corresponding to ... in the caller. */
1722 tree p;
1723 gimple new_call;
1724 vec<tree> argarray;
1725 size_t nargs = gimple_call_num_args (id->gimple_call);
1726 size_t n;
1727
1728 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1729 nargs--;
1730
1731 /* Create the new array of arguments. */
1732 n = nargs + gimple_call_num_args (stmt);
1733 argarray.create (n);
1734 argarray.safe_grow_cleared (n);
1735
1736 /* Copy all the arguments before '...' */
1737 memcpy (argarray.address (),
1738 gimple_call_arg_ptr (stmt, 0),
1739 gimple_call_num_args (stmt) * sizeof (tree));
1740
1741 /* Append the arguments passed in '...' */
1742 memcpy (argarray.address () + gimple_call_num_args (stmt),
1743 gimple_call_arg_ptr (id->gimple_call, 0)
1744 + (gimple_call_num_args (id->gimple_call) - nargs),
1745 nargs * sizeof (tree));
1746
1747 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1748 argarray);
1749
1750 argarray.release ();
1751
1752 /* Copy all GIMPLE_CALL flags, location and block, except
1753 GF_CALL_VA_ARG_PACK. */
1754 gimple_call_copy_flags (new_call, stmt);
1755 gimple_call_set_va_arg_pack (new_call, false);
1756 gimple_set_location (new_call, gimple_location (stmt));
1757 gimple_set_block (new_call, gimple_block (stmt));
1758 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1759
1760 gsi_replace (&copy_gsi, new_call, false);
1761 stmt = new_call;
1762 }
1763 else if (is_gimple_call (stmt)
1764 && id->gimple_call
1765 && (decl = gimple_call_fndecl (stmt))
1766 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1767 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1768 {
1769 /* __builtin_va_arg_pack_len () should be replaced by
1770 the number of anonymous arguments. */
1771 size_t nargs = gimple_call_num_args (id->gimple_call);
1772 tree count, p;
1773 gimple new_stmt;
1774
1775 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1776 nargs--;
1777
1778 count = build_int_cst (integer_type_node, nargs);
1779 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1780 gsi_replace (&copy_gsi, new_stmt, false);
1781 stmt = new_stmt;
1782 }
1783
1784 /* Statements produced by inlining can be unfolded, especially
1785 when we constant propagated some operands. We can't fold
1786 them right now for two reasons:
1787 1) folding require SSA_NAME_DEF_STMTs to be correct
1788 2) we can't change function calls to builtins.
1789 So we just mark statement for later folding. We mark
1790 all new statements, instead just statements that has changed
1791 by some nontrivial substitution so even statements made
1792 foldable indirectly are updated. If this turns out to be
1793 expensive, copy_body can be told to watch for nontrivial
1794 changes. */
1795 if (id->statements_to_fold)
1796 id->statements_to_fold->add (stmt);
1797
1798 /* We're duplicating a CALL_EXPR. Find any corresponding
1799 callgraph edges and update or duplicate them. */
1800 if (is_gimple_call (stmt))
1801 {
1802 struct cgraph_edge *edge;
1803
1804 switch (id->transform_call_graph_edges)
1805 {
1806 case CB_CGE_DUPLICATE:
1807 edge = id->src_node->get_edge (orig_stmt);
1808 if (edge)
1809 {
1810 int edge_freq = edge->frequency;
1811 int new_freq;
1812 struct cgraph_edge *old_edge = edge;
1813 edge = edge->clone (id->dst_node, stmt,
1814 gimple_uid (stmt),
1815 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1816 true);
1817 /* We could also just rescale the frequency, but
1818 doing so would introduce roundoff errors and make
1819 verifier unhappy. */
1820 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1821 copy_basic_block);
1822
1823 /* Speculative calls consist of two edges - direct and indirect.
1824 Duplicate the whole thing and distribute frequencies accordingly. */
1825 if (edge->speculative)
1826 {
1827 struct cgraph_edge *direct, *indirect;
1828 struct ipa_ref *ref;
1829
1830 gcc_assert (!edge->indirect_unknown_callee);
1831 old_edge->speculative_call_info (direct, indirect, ref);
1832 indirect = indirect->clone (id->dst_node, stmt,
1833 gimple_uid (stmt),
1834 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1835 true);
1836 if (old_edge->frequency + indirect->frequency)
1837 {
1838 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1839 (old_edge->frequency + indirect->frequency)),
1840 CGRAPH_FREQ_MAX);
1841 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1842 (old_edge->frequency + indirect->frequency)),
1843 CGRAPH_FREQ_MAX);
1844 }
1845 id->dst_node->clone_reference (ref, stmt);
1846 }
1847 else
1848 {
1849 edge->frequency = new_freq;
1850 if (dump_file
1851 && profile_status_for_fn (cfun) != PROFILE_ABSENT
1852 && (edge_freq > edge->frequency + 10
1853 || edge_freq < edge->frequency - 10))
1854 {
1855 fprintf (dump_file, "Edge frequency estimated by "
1856 "cgraph %i diverge from inliner's estimate %i\n",
1857 edge_freq,
1858 edge->frequency);
1859 fprintf (dump_file,
1860 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1861 bb->index,
1862 bb->frequency,
1863 copy_basic_block->frequency);
1864 }
1865 }
1866 }
1867 break;
1868
1869 case CB_CGE_MOVE_CLONES:
1870 id->dst_node->set_call_stmt_including_clones (orig_stmt,
1871 stmt);
1872 edge = id->dst_node->get_edge (stmt);
1873 break;
1874
1875 case CB_CGE_MOVE:
1876 edge = id->dst_node->get_edge (orig_stmt);
1877 if (edge)
1878 edge->set_call_stmt (stmt);
1879 break;
1880
1881 default:
1882 gcc_unreachable ();
1883 }
1884
1885 /* Constant propagation on argument done during inlining
1886 may create new direct call. Produce an edge for it. */
1887 if ((!edge
1888 || (edge->indirect_inlining_edge
1889 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1890 && id->dst_node->definition
1891 && (fn = gimple_call_fndecl (stmt)) != NULL)
1892 {
1893 struct cgraph_node *dest = cgraph_node::get (fn);
1894
1895 /* We have missing edge in the callgraph. This can happen
1896 when previous inlining turned an indirect call into a
1897 direct call by constant propagating arguments or we are
1898 producing dead clone (for further cloning). In all
1899 other cases we hit a bug (incorrect node sharing is the
1900 most common reason for missing edges). */
1901 gcc_assert (!dest->definition
1902 || dest->address_taken
1903 || !id->src_node->definition
1904 || !id->dst_node->definition);
1905 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1906 id->dst_node->create_edge_including_clones
1907 (dest, orig_stmt, stmt, bb->count,
1908 compute_call_stmt_bb_frequency (id->dst_node->decl,
1909 copy_basic_block),
1910 CIF_ORIGINALLY_INDIRECT_CALL);
1911 else
1912 id->dst_node->create_edge (dest, stmt,
1913 bb->count,
1914 compute_call_stmt_bb_frequency
1915 (id->dst_node->decl,
1916 copy_basic_block))->inline_failed
1917 = CIF_ORIGINALLY_INDIRECT_CALL;
1918 if (dump_file)
1919 {
1920 fprintf (dump_file, "Created new direct edge to %s\n",
1921 dest->name ());
1922 }
1923 }
1924
1925 notice_special_calls (stmt);
1926 }
1927
1928 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1929 id->eh_map, id->eh_lp_nr);
1930
1931 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1932 {
1933 ssa_op_iter i;
1934 tree def;
1935
1936 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1937 if (TREE_CODE (def) == SSA_NAME)
1938 SSA_NAME_DEF_STMT (def) = stmt;
1939 }
1940
1941 gsi_next (&copy_gsi);
1942 }
1943 while (!gsi_end_p (copy_gsi));
1944
1945 copy_gsi = gsi_last_bb (copy_basic_block);
1946 }
1947
1948 return copy_basic_block;
1949 }
1950
1951 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1952 form is quite easy, since dominator relationship for old basic blocks does
1953 not change.
1954
1955 There is however exception where inlining might change dominator relation
1956 across EH edges from basic block within inlined functions destinating
1957 to landing pads in function we inline into.
1958
1959 The function fills in PHI_RESULTs of such PHI nodes if they refer
1960 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1961 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1962 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1963 set, and this means that there will be no overlapping live ranges
1964 for the underlying symbol.
1965
1966 This might change in future if we allow redirecting of EH edges and
1967 we might want to change way build CFG pre-inlining to include
1968 all the possible edges then. */
1969 static void
1970 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1971 bool can_throw, bool nonlocal_goto)
1972 {
1973 edge e;
1974 edge_iterator ei;
1975
1976 FOR_EACH_EDGE (e, ei, bb->succs)
1977 if (!e->dest->aux
1978 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1979 {
1980 gimple phi;
1981 gimple_stmt_iterator si;
1982
1983 if (!nonlocal_goto)
1984 gcc_assert (e->flags & EDGE_EH);
1985
1986 if (!can_throw)
1987 gcc_assert (!(e->flags & EDGE_EH));
1988
1989 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1990 {
1991 edge re;
1992
1993 phi = gsi_stmt (si);
1994
1995 /* For abnormal goto/call edges the receiver can be the
1996 ENTRY_BLOCK. Do not assert this cannot happen. */
1997
1998 gcc_assert ((e->flags & EDGE_EH)
1999 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2000
2001 re = find_edge (ret_bb, e->dest);
2002 gcc_checking_assert (re);
2003 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2004 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2005
2006 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2007 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2008 }
2009 }
2010 }
2011
2012
2013 /* Copy edges from BB into its copy constructed earlier, scale profile
2014 accordingly. Edges will be taken care of later. Assume aux
2015 pointers to point to the copies of each BB. Return true if any
2016 debug stmts are left after a statement that must end the basic block. */
2017
2018 static bool
2019 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2020 basic_block abnormal_goto_dest)
2021 {
2022 basic_block new_bb = (basic_block) bb->aux;
2023 edge_iterator ei;
2024 edge old_edge;
2025 gimple_stmt_iterator si;
2026 int flags;
2027 bool need_debug_cleanup = false;
2028
2029 /* Use the indices from the original blocks to create edges for the
2030 new ones. */
2031 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2032 if (!(old_edge->flags & EDGE_EH))
2033 {
2034 edge new_edge;
2035
2036 flags = old_edge->flags;
2037
2038 /* Return edges do get a FALLTHRU flag when the get inlined. */
2039 if (old_edge->dest->index == EXIT_BLOCK
2040 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2041 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2042 flags |= EDGE_FALLTHRU;
2043 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2044 new_edge->count = apply_scale (old_edge->count, count_scale);
2045 new_edge->probability = old_edge->probability;
2046 }
2047
2048 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2049 return false;
2050
2051 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2052 {
2053 gimple copy_stmt;
2054 bool can_throw, nonlocal_goto;
2055
2056 copy_stmt = gsi_stmt (si);
2057 if (!is_gimple_debug (copy_stmt))
2058 update_stmt (copy_stmt);
2059
2060 /* Do this before the possible split_block. */
2061 gsi_next (&si);
2062
2063 /* If this tree could throw an exception, there are two
2064 cases where we need to add abnormal edge(s): the
2065 tree wasn't in a region and there is a "current
2066 region" in the caller; or the original tree had
2067 EH edges. In both cases split the block after the tree,
2068 and add abnormal edge(s) as needed; we need both
2069 those from the callee and the caller.
2070 We check whether the copy can throw, because the const
2071 propagation can change an INDIRECT_REF which throws
2072 into a COMPONENT_REF which doesn't. If the copy
2073 can throw, the original could also throw. */
2074 can_throw = stmt_can_throw_internal (copy_stmt);
2075 nonlocal_goto
2076 = (stmt_can_make_abnormal_goto (copy_stmt)
2077 && !computed_goto_p (copy_stmt));
2078
2079 if (can_throw || nonlocal_goto)
2080 {
2081 if (!gsi_end_p (si))
2082 {
2083 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2084 gsi_next (&si);
2085 if (gsi_end_p (si))
2086 need_debug_cleanup = true;
2087 }
2088 if (!gsi_end_p (si))
2089 /* Note that bb's predecessor edges aren't necessarily
2090 right at this point; split_block doesn't care. */
2091 {
2092 edge e = split_block (new_bb, copy_stmt);
2093
2094 new_bb = e->dest;
2095 new_bb->aux = e->src->aux;
2096 si = gsi_start_bb (new_bb);
2097 }
2098 }
2099
2100 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2101 make_eh_dispatch_edges (copy_stmt);
2102 else if (can_throw)
2103 make_eh_edges (copy_stmt);
2104
2105 /* If the call we inline cannot make abnormal goto do not add
2106 additional abnormal edges but only retain those already present
2107 in the original function body. */
2108 if (abnormal_goto_dest == NULL)
2109 nonlocal_goto = false;
2110 if (nonlocal_goto)
2111 {
2112 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2113
2114 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2115 nonlocal_goto = false;
2116 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2117 in OpenMP regions which aren't allowed to be left abnormally.
2118 So, no need to add abnormal edge in that case. */
2119 else if (is_gimple_call (copy_stmt)
2120 && gimple_call_internal_p (copy_stmt)
2121 && (gimple_call_internal_fn (copy_stmt)
2122 == IFN_ABNORMAL_DISPATCHER)
2123 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2124 nonlocal_goto = false;
2125 else
2126 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2127 }
2128
2129 if ((can_throw || nonlocal_goto)
2130 && gimple_in_ssa_p (cfun))
2131 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2132 can_throw, nonlocal_goto);
2133 }
2134 return need_debug_cleanup;
2135 }
2136
2137 /* Copy the PHIs. All blocks and edges are copied, some blocks
2138 was possibly split and new outgoing EH edges inserted.
2139 BB points to the block of original function and AUX pointers links
2140 the original and newly copied blocks. */
2141
2142 static void
2143 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2144 {
2145 basic_block const new_bb = (basic_block) bb->aux;
2146 edge_iterator ei;
2147 gimple phi;
2148 gimple_stmt_iterator si;
2149 edge new_edge;
2150 bool inserted = false;
2151
2152 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2153 {
2154 tree res, new_res;
2155 gimple new_phi;
2156
2157 phi = gsi_stmt (si);
2158 res = PHI_RESULT (phi);
2159 new_res = res;
2160 if (!virtual_operand_p (res))
2161 {
2162 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2163 new_phi = create_phi_node (new_res, new_bb);
2164 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2165 {
2166 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2167 tree arg;
2168 tree new_arg;
2169 edge_iterator ei2;
2170 location_t locus;
2171
2172 /* When doing partial cloning, we allow PHIs on the entry block
2173 as long as all the arguments are the same. Find any input
2174 edge to see argument to copy. */
2175 if (!old_edge)
2176 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2177 if (!old_edge->src->aux)
2178 break;
2179
2180 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2181 new_arg = arg;
2182 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2183 gcc_assert (new_arg);
2184 /* With return slot optimization we can end up with
2185 non-gimple (foo *)&this->m, fix that here. */
2186 if (TREE_CODE (new_arg) != SSA_NAME
2187 && TREE_CODE (new_arg) != FUNCTION_DECL
2188 && !is_gimple_val (new_arg))
2189 {
2190 gimple_seq stmts = NULL;
2191 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2192 gsi_insert_seq_on_edge (new_edge, stmts);
2193 inserted = true;
2194 }
2195 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2196 if (LOCATION_BLOCK (locus))
2197 {
2198 tree *n;
2199 n = id->decl_map->get (LOCATION_BLOCK (locus));
2200 gcc_assert (n);
2201 if (*n)
2202 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2203 else
2204 locus = LOCATION_LOCUS (locus);
2205 }
2206 else
2207 locus = LOCATION_LOCUS (locus);
2208
2209 add_phi_arg (new_phi, new_arg, new_edge, locus);
2210 }
2211 }
2212 }
2213
2214 /* Commit the delayed edge insertions. */
2215 if (inserted)
2216 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2217 gsi_commit_one_edge_insert (new_edge, NULL);
2218 }
2219
2220
2221 /* Wrapper for remap_decl so it can be used as a callback. */
2222
2223 static tree
2224 remap_decl_1 (tree decl, void *data)
2225 {
2226 return remap_decl (decl, (copy_body_data *) data);
2227 }
2228
2229 /* Build struct function and associated datastructures for the new clone
2230 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2231 the cfun to the function of new_fndecl (and current_function_decl too). */
2232
2233 static void
2234 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2235 {
2236 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2237 gcov_type count_scale;
2238
2239 if (!DECL_ARGUMENTS (new_fndecl))
2240 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2241 if (!DECL_RESULT (new_fndecl))
2242 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2243
2244 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2245 count_scale
2246 = GCOV_COMPUTE_SCALE (count,
2247 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2248 else
2249 count_scale = REG_BR_PROB_BASE;
2250
2251 /* Register specific tree functions. */
2252 gimple_register_cfg_hooks ();
2253
2254 /* Get clean struct function. */
2255 push_struct_function (new_fndecl);
2256
2257 /* We will rebuild these, so just sanity check that they are empty. */
2258 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2259 gcc_assert (cfun->local_decls == NULL);
2260 gcc_assert (cfun->cfg == NULL);
2261 gcc_assert (cfun->decl == new_fndecl);
2262
2263 /* Copy items we preserve during cloning. */
2264 cfun->static_chain_decl = src_cfun->static_chain_decl;
2265 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2266 cfun->function_end_locus = src_cfun->function_end_locus;
2267 cfun->curr_properties = src_cfun->curr_properties;
2268 cfun->last_verified = src_cfun->last_verified;
2269 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2270 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2271 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2272 cfun->stdarg = src_cfun->stdarg;
2273 cfun->after_inlining = src_cfun->after_inlining;
2274 cfun->can_throw_non_call_exceptions
2275 = src_cfun->can_throw_non_call_exceptions;
2276 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2277 cfun->returns_struct = src_cfun->returns_struct;
2278 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2279
2280 init_empty_tree_cfg ();
2281
2282 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2283 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2284 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2285 REG_BR_PROB_BASE);
2286 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2287 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2288 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2289 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2290 REG_BR_PROB_BASE);
2291 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2292 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2293 if (src_cfun->eh)
2294 init_eh_for_function ();
2295
2296 if (src_cfun->gimple_df)
2297 {
2298 init_tree_ssa (cfun);
2299 cfun->gimple_df->in_ssa_p = true;
2300 init_ssa_operands (cfun);
2301 }
2302 }
2303
2304 /* Helper function for copy_cfg_body. Move debug stmts from the end
2305 of NEW_BB to the beginning of successor basic blocks when needed. If the
2306 successor has multiple predecessors, reset them, otherwise keep
2307 their value. */
2308
2309 static void
2310 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2311 {
2312 edge e;
2313 edge_iterator ei;
2314 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2315
2316 if (gsi_end_p (si)
2317 || gsi_one_before_end_p (si)
2318 || !(stmt_can_throw_internal (gsi_stmt (si))
2319 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2320 return;
2321
2322 FOR_EACH_EDGE (e, ei, new_bb->succs)
2323 {
2324 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2325 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2326 while (is_gimple_debug (gsi_stmt (ssi)))
2327 {
2328 gimple stmt = gsi_stmt (ssi), new_stmt;
2329 tree var;
2330 tree value;
2331
2332 /* For the last edge move the debug stmts instead of copying
2333 them. */
2334 if (ei_one_before_end_p (ei))
2335 {
2336 si = ssi;
2337 gsi_prev (&ssi);
2338 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2339 gimple_debug_bind_reset_value (stmt);
2340 gsi_remove (&si, false);
2341 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2342 continue;
2343 }
2344
2345 if (gimple_debug_bind_p (stmt))
2346 {
2347 var = gimple_debug_bind_get_var (stmt);
2348 if (single_pred_p (e->dest))
2349 {
2350 value = gimple_debug_bind_get_value (stmt);
2351 value = unshare_expr (value);
2352 }
2353 else
2354 value = NULL_TREE;
2355 new_stmt = gimple_build_debug_bind (var, value, stmt);
2356 }
2357 else if (gimple_debug_source_bind_p (stmt))
2358 {
2359 var = gimple_debug_source_bind_get_var (stmt);
2360 value = gimple_debug_source_bind_get_value (stmt);
2361 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2362 }
2363 else
2364 gcc_unreachable ();
2365 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2366 id->debug_stmts.safe_push (new_stmt);
2367 gsi_prev (&ssi);
2368 }
2369 }
2370 }
2371
2372 /* Make a copy of the sub-loops of SRC_PARENT and place them
2373 as siblings of DEST_PARENT. */
2374
2375 static void
2376 copy_loops (copy_body_data *id,
2377 struct loop *dest_parent, struct loop *src_parent)
2378 {
2379 struct loop *src_loop = src_parent->inner;
2380 while (src_loop)
2381 {
2382 if (!id->blocks_to_copy
2383 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2384 {
2385 struct loop *dest_loop = alloc_loop ();
2386
2387 /* Assign the new loop its header and latch and associate
2388 those with the new loop. */
2389 dest_loop->header = (basic_block)src_loop->header->aux;
2390 dest_loop->header->loop_father = dest_loop;
2391 if (src_loop->latch != NULL)
2392 {
2393 dest_loop->latch = (basic_block)src_loop->latch->aux;
2394 dest_loop->latch->loop_father = dest_loop;
2395 }
2396
2397 /* Copy loop meta-data. */
2398 copy_loop_info (src_loop, dest_loop);
2399
2400 /* Finally place it into the loop array and the loop tree. */
2401 place_new_loop (cfun, dest_loop);
2402 flow_loop_tree_node_add (dest_parent, dest_loop);
2403
2404 dest_loop->safelen = src_loop->safelen;
2405 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2406 if (src_loop->force_vectorize)
2407 {
2408 dest_loop->force_vectorize = true;
2409 cfun->has_force_vectorize_loops = true;
2410 }
2411 if (src_loop->simduid)
2412 {
2413 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2414 cfun->has_simduid_loops = true;
2415 }
2416
2417 /* Recurse. */
2418 copy_loops (id, dest_loop, src_loop);
2419 }
2420 src_loop = src_loop->next;
2421 }
2422 }
2423
2424 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2425
2426 void
2427 redirect_all_calls (copy_body_data * id, basic_block bb)
2428 {
2429 gimple_stmt_iterator si;
2430 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2431 {
2432 if (is_gimple_call (gsi_stmt (si)))
2433 {
2434 struct cgraph_edge *edge = id->dst_node->get_edge (gsi_stmt (si));
2435 if (edge)
2436 edge->redirect_call_stmt_to_callee ();
2437 }
2438 }
2439 }
2440
2441 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2442 with each bb's frequency. Used when NODE has a 0-weight entry
2443 but we are about to inline it into a non-zero count call bb.
2444 See the comments for handle_missing_profiles() in predict.c for
2445 when this can happen for COMDATs. */
2446
2447 void
2448 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2449 {
2450 basic_block bb;
2451 edge_iterator ei;
2452 edge e;
2453 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2454
2455 FOR_ALL_BB_FN(bb, fn)
2456 {
2457 bb->count = apply_scale (count,
2458 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2459 FOR_EACH_EDGE (e, ei, bb->succs)
2460 e->count = apply_probability (e->src->count, e->probability);
2461 }
2462 }
2463
2464 /* Make a copy of the body of FN so that it can be inserted inline in
2465 another function. Walks FN via CFG, returns new fndecl. */
2466
2467 static tree
2468 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2469 basic_block entry_block_map, basic_block exit_block_map,
2470 basic_block new_entry)
2471 {
2472 tree callee_fndecl = id->src_fn;
2473 /* Original cfun for the callee, doesn't change. */
2474 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2475 struct function *cfun_to_copy;
2476 basic_block bb;
2477 tree new_fndecl = NULL;
2478 bool need_debug_cleanup = false;
2479 gcov_type count_scale;
2480 int last;
2481 int incoming_frequency = 0;
2482 gcov_type incoming_count = 0;
2483
2484 /* This can happen for COMDAT routines that end up with 0 counts
2485 despite being called (see the comments for handle_missing_profiles()
2486 in predict.c as to why). Apply counts to the blocks in the callee
2487 before inlining, using the guessed edge frequencies, so that we don't
2488 end up with a 0-count inline body which can confuse downstream
2489 optimizations such as function splitting. */
2490 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2491 {
2492 /* Apply the larger of the call bb count and the total incoming
2493 call edge count to the callee. */
2494 gcov_type in_count = 0;
2495 struct cgraph_edge *in_edge;
2496 for (in_edge = id->src_node->callers; in_edge;
2497 in_edge = in_edge->next_caller)
2498 in_count += in_edge->count;
2499 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2500 }
2501
2502 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2503 count_scale
2504 = GCOV_COMPUTE_SCALE (count,
2505 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2506 else
2507 count_scale = REG_BR_PROB_BASE;
2508
2509 /* Register specific tree functions. */
2510 gimple_register_cfg_hooks ();
2511
2512 /* If we are inlining just region of the function, make sure to connect
2513 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2514 part of loop, we must compute frequency and probability of
2515 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2516 probabilities of edges incoming from nonduplicated region. */
2517 if (new_entry)
2518 {
2519 edge e;
2520 edge_iterator ei;
2521
2522 FOR_EACH_EDGE (e, ei, new_entry->preds)
2523 if (!e->src->aux)
2524 {
2525 incoming_frequency += EDGE_FREQUENCY (e);
2526 incoming_count += e->count;
2527 }
2528 incoming_count = apply_scale (incoming_count, count_scale);
2529 incoming_frequency
2530 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2531 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2532 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2533 }
2534
2535 /* Must have a CFG here at this point. */
2536 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2537 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2538
2539 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2540
2541 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2542 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2543 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2544 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2545
2546 /* Duplicate any exception-handling regions. */
2547 if (cfun->eh)
2548 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2549 remap_decl_1, id);
2550
2551 /* Use aux pointers to map the original blocks to copy. */
2552 FOR_EACH_BB_FN (bb, cfun_to_copy)
2553 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2554 {
2555 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2556 bb->aux = new_bb;
2557 new_bb->aux = bb;
2558 new_bb->loop_father = entry_block_map->loop_father;
2559 }
2560
2561 last = last_basic_block_for_fn (cfun);
2562
2563 /* Now that we've duplicated the blocks, duplicate their edges. */
2564 basic_block abnormal_goto_dest = NULL;
2565 if (id->gimple_call
2566 && stmt_can_make_abnormal_goto (id->gimple_call))
2567 {
2568 gimple_stmt_iterator gsi = gsi_for_stmt (id->gimple_call);
2569
2570 bb = gimple_bb (id->gimple_call);
2571 gsi_next (&gsi);
2572 if (gsi_end_p (gsi))
2573 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2574 }
2575 FOR_ALL_BB_FN (bb, cfun_to_copy)
2576 if (!id->blocks_to_copy
2577 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2578 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2579 abnormal_goto_dest);
2580
2581 if (new_entry)
2582 {
2583 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2584 e->probability = REG_BR_PROB_BASE;
2585 e->count = incoming_count;
2586 }
2587
2588 /* Duplicate the loop tree, if available and wanted. */
2589 if (loops_for_fn (src_cfun) != NULL
2590 && current_loops != NULL)
2591 {
2592 copy_loops (id, entry_block_map->loop_father,
2593 get_loop (src_cfun, 0));
2594 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2595 loops_state_set (LOOPS_NEED_FIXUP);
2596 }
2597
2598 /* If the loop tree in the source function needed fixup, mark the
2599 destination loop tree for fixup, too. */
2600 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2601 loops_state_set (LOOPS_NEED_FIXUP);
2602
2603 if (gimple_in_ssa_p (cfun))
2604 FOR_ALL_BB_FN (bb, cfun_to_copy)
2605 if (!id->blocks_to_copy
2606 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2607 copy_phis_for_bb (bb, id);
2608
2609 FOR_ALL_BB_FN (bb, cfun_to_copy)
2610 if (bb->aux)
2611 {
2612 if (need_debug_cleanup
2613 && bb->index != ENTRY_BLOCK
2614 && bb->index != EXIT_BLOCK)
2615 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2616 /* Update call edge destinations. This can not be done before loop
2617 info is updated, because we may split basic blocks. */
2618 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2619 redirect_all_calls (id, (basic_block)bb->aux);
2620 ((basic_block)bb->aux)->aux = NULL;
2621 bb->aux = NULL;
2622 }
2623
2624 /* Zero out AUX fields of newly created block during EH edge
2625 insertion. */
2626 for (; last < last_basic_block_for_fn (cfun); last++)
2627 {
2628 if (need_debug_cleanup)
2629 maybe_move_debug_stmts_to_successors (id,
2630 BASIC_BLOCK_FOR_FN (cfun, last));
2631 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2632 /* Update call edge destinations. This can not be done before loop
2633 info is updated, because we may split basic blocks. */
2634 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2635 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2636 }
2637 entry_block_map->aux = NULL;
2638 exit_block_map->aux = NULL;
2639
2640 if (id->eh_map)
2641 {
2642 delete id->eh_map;
2643 id->eh_map = NULL;
2644 }
2645
2646 return new_fndecl;
2647 }
2648
2649 /* Copy the debug STMT using ID. We deal with these statements in a
2650 special way: if any variable in their VALUE expression wasn't
2651 remapped yet, we won't remap it, because that would get decl uids
2652 out of sync, causing codegen differences between -g and -g0. If
2653 this arises, we drop the VALUE expression altogether. */
2654
2655 static void
2656 copy_debug_stmt (gimple stmt, copy_body_data *id)
2657 {
2658 tree t, *n;
2659 struct walk_stmt_info wi;
2660
2661 if (gimple_block (stmt))
2662 {
2663 n = id->decl_map->get (gimple_block (stmt));
2664 gimple_set_block (stmt, n ? *n : id->block);
2665 }
2666
2667 /* Remap all the operands in COPY. */
2668 memset (&wi, 0, sizeof (wi));
2669 wi.info = id;
2670
2671 processing_debug_stmt = 1;
2672
2673 if (gimple_debug_source_bind_p (stmt))
2674 t = gimple_debug_source_bind_get_var (stmt);
2675 else
2676 t = gimple_debug_bind_get_var (stmt);
2677
2678 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2679 && (n = id->debug_map->get (t)))
2680 {
2681 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2682 t = *n;
2683 }
2684 else if (TREE_CODE (t) == VAR_DECL
2685 && !is_global_var (t)
2686 && !id->decl_map->get (t))
2687 /* T is a non-localized variable. */;
2688 else
2689 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2690
2691 if (gimple_debug_bind_p (stmt))
2692 {
2693 gimple_debug_bind_set_var (stmt, t);
2694
2695 if (gimple_debug_bind_has_value_p (stmt))
2696 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2697 remap_gimple_op_r, &wi, NULL);
2698
2699 /* Punt if any decl couldn't be remapped. */
2700 if (processing_debug_stmt < 0)
2701 gimple_debug_bind_reset_value (stmt);
2702 }
2703 else if (gimple_debug_source_bind_p (stmt))
2704 {
2705 gimple_debug_source_bind_set_var (stmt, t);
2706 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2707 remap_gimple_op_r, &wi, NULL);
2708 /* When inlining and source bind refers to one of the optimized
2709 away parameters, change the source bind into normal debug bind
2710 referring to the corresponding DEBUG_EXPR_DECL that should have
2711 been bound before the call stmt. */
2712 t = gimple_debug_source_bind_get_value (stmt);
2713 if (t != NULL_TREE
2714 && TREE_CODE (t) == PARM_DECL
2715 && id->gimple_call)
2716 {
2717 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2718 unsigned int i;
2719 if (debug_args != NULL)
2720 {
2721 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2722 if ((**debug_args)[i] == DECL_ORIGIN (t)
2723 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2724 {
2725 t = (**debug_args)[i + 1];
2726 stmt->subcode = GIMPLE_DEBUG_BIND;
2727 gimple_debug_bind_set_value (stmt, t);
2728 break;
2729 }
2730 }
2731 }
2732 }
2733
2734 processing_debug_stmt = 0;
2735
2736 update_stmt (stmt);
2737 }
2738
2739 /* Process deferred debug stmts. In order to give values better odds
2740 of being successfully remapped, we delay the processing of debug
2741 stmts until all other stmts that might require remapping are
2742 processed. */
2743
2744 static void
2745 copy_debug_stmts (copy_body_data *id)
2746 {
2747 size_t i;
2748 gimple stmt;
2749
2750 if (!id->debug_stmts.exists ())
2751 return;
2752
2753 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2754 copy_debug_stmt (stmt, id);
2755
2756 id->debug_stmts.release ();
2757 }
2758
2759 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2760 another function. */
2761
2762 static tree
2763 copy_tree_body (copy_body_data *id)
2764 {
2765 tree fndecl = id->src_fn;
2766 tree body = DECL_SAVED_TREE (fndecl);
2767
2768 walk_tree (&body, copy_tree_body_r, id, NULL);
2769
2770 return body;
2771 }
2772
2773 /* Make a copy of the body of FN so that it can be inserted inline in
2774 another function. */
2775
2776 static tree
2777 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2778 basic_block entry_block_map, basic_block exit_block_map,
2779 basic_block new_entry)
2780 {
2781 tree fndecl = id->src_fn;
2782 tree body;
2783
2784 /* If this body has a CFG, walk CFG and copy. */
2785 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2786 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2787 new_entry);
2788 copy_debug_stmts (id);
2789
2790 return body;
2791 }
2792
2793 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2794 defined in function FN, or of a data member thereof. */
2795
2796 static bool
2797 self_inlining_addr_expr (tree value, tree fn)
2798 {
2799 tree var;
2800
2801 if (TREE_CODE (value) != ADDR_EXPR)
2802 return false;
2803
2804 var = get_base_address (TREE_OPERAND (value, 0));
2805
2806 return var && auto_var_in_fn_p (var, fn);
2807 }
2808
2809 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2810 lexical block and line number information from base_stmt, if given,
2811 or from the last stmt of the block otherwise. */
2812
2813 static gimple
2814 insert_init_debug_bind (copy_body_data *id,
2815 basic_block bb, tree var, tree value,
2816 gimple base_stmt)
2817 {
2818 gimple note;
2819 gimple_stmt_iterator gsi;
2820 tree tracked_var;
2821
2822 if (!gimple_in_ssa_p (id->src_cfun))
2823 return NULL;
2824
2825 if (!MAY_HAVE_DEBUG_STMTS)
2826 return NULL;
2827
2828 tracked_var = target_for_debug_bind (var);
2829 if (!tracked_var)
2830 return NULL;
2831
2832 if (bb)
2833 {
2834 gsi = gsi_last_bb (bb);
2835 if (!base_stmt && !gsi_end_p (gsi))
2836 base_stmt = gsi_stmt (gsi);
2837 }
2838
2839 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2840
2841 if (bb)
2842 {
2843 if (!gsi_end_p (gsi))
2844 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2845 else
2846 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2847 }
2848
2849 return note;
2850 }
2851
2852 static void
2853 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2854 {
2855 /* If VAR represents a zero-sized variable, it's possible that the
2856 assignment statement may result in no gimple statements. */
2857 if (init_stmt)
2858 {
2859 gimple_stmt_iterator si = gsi_last_bb (bb);
2860
2861 /* We can end up with init statements that store to a non-register
2862 from a rhs with a conversion. Handle that here by forcing the
2863 rhs into a temporary. gimple_regimplify_operands is not
2864 prepared to do this for us. */
2865 if (!is_gimple_debug (init_stmt)
2866 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2867 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2868 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2869 {
2870 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2871 gimple_expr_type (init_stmt),
2872 gimple_assign_rhs1 (init_stmt));
2873 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2874 GSI_NEW_STMT);
2875 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2876 gimple_assign_set_rhs1 (init_stmt, rhs);
2877 }
2878 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2879 gimple_regimplify_operands (init_stmt, &si);
2880
2881 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2882 {
2883 tree def = gimple_assign_lhs (init_stmt);
2884 insert_init_debug_bind (id, bb, def, def, init_stmt);
2885 }
2886 }
2887 }
2888
2889 /* Initialize parameter P with VALUE. If needed, produce init statement
2890 at the end of BB. When BB is NULL, we return init statement to be
2891 output later. */
2892 static gimple
2893 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2894 basic_block bb, tree *vars)
2895 {
2896 gimple init_stmt = NULL;
2897 tree var;
2898 tree rhs = value;
2899 tree def = (gimple_in_ssa_p (cfun)
2900 ? ssa_default_def (id->src_cfun, p) : NULL);
2901
2902 if (value
2903 && value != error_mark_node
2904 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2905 {
2906 /* If we can match up types by promotion/demotion do so. */
2907 if (fold_convertible_p (TREE_TYPE (p), value))
2908 rhs = fold_convert (TREE_TYPE (p), value);
2909 else
2910 {
2911 /* ??? For valid programs we should not end up here.
2912 Still if we end up with truly mismatched types here, fall back
2913 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2914 GIMPLE to the following passes. */
2915 if (!is_gimple_reg_type (TREE_TYPE (value))
2916 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2917 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2918 else
2919 rhs = build_zero_cst (TREE_TYPE (p));
2920 }
2921 }
2922
2923 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2924 here since the type of this decl must be visible to the calling
2925 function. */
2926 var = copy_decl_to_var (p, id);
2927
2928 /* Declare this new variable. */
2929 DECL_CHAIN (var) = *vars;
2930 *vars = var;
2931
2932 /* Make gimplifier happy about this variable. */
2933 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2934
2935 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2936 we would not need to create a new variable here at all, if it
2937 weren't for debug info. Still, we can just use the argument
2938 value. */
2939 if (TREE_READONLY (p)
2940 && !TREE_ADDRESSABLE (p)
2941 && value && !TREE_SIDE_EFFECTS (value)
2942 && !def)
2943 {
2944 /* We may produce non-gimple trees by adding NOPs or introduce
2945 invalid sharing when operand is not really constant.
2946 It is not big deal to prohibit constant propagation here as
2947 we will constant propagate in DOM1 pass anyway. */
2948 if (is_gimple_min_invariant (value)
2949 && useless_type_conversion_p (TREE_TYPE (p),
2950 TREE_TYPE (value))
2951 /* We have to be very careful about ADDR_EXPR. Make sure
2952 the base variable isn't a local variable of the inlined
2953 function, e.g., when doing recursive inlining, direct or
2954 mutually-recursive or whatever, which is why we don't
2955 just test whether fn == current_function_decl. */
2956 && ! self_inlining_addr_expr (value, fn))
2957 {
2958 insert_decl_map (id, p, value);
2959 insert_debug_decl_map (id, p, var);
2960 return insert_init_debug_bind (id, bb, var, value, NULL);
2961 }
2962 }
2963
2964 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2965 that way, when the PARM_DECL is encountered, it will be
2966 automatically replaced by the VAR_DECL. */
2967 insert_decl_map (id, p, var);
2968
2969 /* Even if P was TREE_READONLY, the new VAR should not be.
2970 In the original code, we would have constructed a
2971 temporary, and then the function body would have never
2972 changed the value of P. However, now, we will be
2973 constructing VAR directly. The constructor body may
2974 change its value multiple times as it is being
2975 constructed. Therefore, it must not be TREE_READONLY;
2976 the back-end assumes that TREE_READONLY variable is
2977 assigned to only once. */
2978 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2979 TREE_READONLY (var) = 0;
2980
2981 /* If there is no setup required and we are in SSA, take the easy route
2982 replacing all SSA names representing the function parameter by the
2983 SSA name passed to function.
2984
2985 We need to construct map for the variable anyway as it might be used
2986 in different SSA names when parameter is set in function.
2987
2988 Do replacement at -O0 for const arguments replaced by constant.
2989 This is important for builtin_constant_p and other construct requiring
2990 constant argument to be visible in inlined function body. */
2991 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2992 && (optimize
2993 || (TREE_READONLY (p)
2994 && is_gimple_min_invariant (rhs)))
2995 && (TREE_CODE (rhs) == SSA_NAME
2996 || is_gimple_min_invariant (rhs))
2997 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2998 {
2999 insert_decl_map (id, def, rhs);
3000 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3001 }
3002
3003 /* If the value of argument is never used, don't care about initializing
3004 it. */
3005 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3006 {
3007 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3008 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3009 }
3010
3011 /* Initialize this VAR_DECL from the equivalent argument. Convert
3012 the argument to the proper type in case it was promoted. */
3013 if (value)
3014 {
3015 if (rhs == error_mark_node)
3016 {
3017 insert_decl_map (id, p, var);
3018 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3019 }
3020
3021 STRIP_USELESS_TYPE_CONVERSION (rhs);
3022
3023 /* If we are in SSA form properly remap the default definition
3024 or assign to a dummy SSA name if the parameter is unused and
3025 we are not optimizing. */
3026 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3027 {
3028 if (def)
3029 {
3030 def = remap_ssa_name (def, id);
3031 init_stmt = gimple_build_assign (def, rhs);
3032 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3033 set_ssa_default_def (cfun, var, NULL);
3034 }
3035 else if (!optimize)
3036 {
3037 def = make_ssa_name (var, NULL);
3038 init_stmt = gimple_build_assign (def, rhs);
3039 }
3040 }
3041 else
3042 init_stmt = gimple_build_assign (var, rhs);
3043
3044 if (bb && init_stmt)
3045 insert_init_stmt (id, bb, init_stmt);
3046 }
3047 return init_stmt;
3048 }
3049
3050 /* Generate code to initialize the parameters of the function at the
3051 top of the stack in ID from the GIMPLE_CALL STMT. */
3052
3053 static void
3054 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3055 tree fn, basic_block bb)
3056 {
3057 tree parms;
3058 size_t i;
3059 tree p;
3060 tree vars = NULL_TREE;
3061 tree static_chain = gimple_call_chain (stmt);
3062
3063 /* Figure out what the parameters are. */
3064 parms = DECL_ARGUMENTS (fn);
3065
3066 /* Loop through the parameter declarations, replacing each with an
3067 equivalent VAR_DECL, appropriately initialized. */
3068 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3069 {
3070 tree val;
3071 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3072 setup_one_parameter (id, p, val, fn, bb, &vars);
3073 }
3074 /* After remapping parameters remap their types. This has to be done
3075 in a second loop over all parameters to appropriately remap
3076 variable sized arrays when the size is specified in a
3077 parameter following the array. */
3078 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3079 {
3080 tree *varp = id->decl_map->get (p);
3081 if (varp
3082 && TREE_CODE (*varp) == VAR_DECL)
3083 {
3084 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3085 ? ssa_default_def (id->src_cfun, p) : NULL);
3086 tree var = *varp;
3087 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3088 /* Also remap the default definition if it was remapped
3089 to the default definition of the parameter replacement
3090 by the parameter setup. */
3091 if (def)
3092 {
3093 tree *defp = id->decl_map->get (def);
3094 if (defp
3095 && TREE_CODE (*defp) == SSA_NAME
3096 && SSA_NAME_VAR (*defp) == var)
3097 TREE_TYPE (*defp) = TREE_TYPE (var);
3098 }
3099 }
3100 }
3101
3102 /* Initialize the static chain. */
3103 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3104 gcc_assert (fn != current_function_decl);
3105 if (p)
3106 {
3107 /* No static chain? Seems like a bug in tree-nested.c. */
3108 gcc_assert (static_chain);
3109
3110 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3111 }
3112
3113 declare_inline_vars (id->block, vars);
3114 }
3115
3116
3117 /* Declare a return variable to replace the RESULT_DECL for the
3118 function we are calling. An appropriate DECL_STMT is returned.
3119 The USE_STMT is filled to contain a use of the declaration to
3120 indicate the return value of the function.
3121
3122 RETURN_SLOT, if non-null is place where to store the result. It
3123 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3124 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3125
3126 The return value is a (possibly null) value that holds the result
3127 as seen by the caller. */
3128
3129 static tree
3130 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3131 basic_block entry_bb)
3132 {
3133 tree callee = id->src_fn;
3134 tree result = DECL_RESULT (callee);
3135 tree callee_type = TREE_TYPE (result);
3136 tree caller_type;
3137 tree var, use;
3138
3139 /* Handle type-mismatches in the function declaration return type
3140 vs. the call expression. */
3141 if (modify_dest)
3142 caller_type = TREE_TYPE (modify_dest);
3143 else
3144 caller_type = TREE_TYPE (TREE_TYPE (callee));
3145
3146 /* We don't need to do anything for functions that don't return anything. */
3147 if (VOID_TYPE_P (callee_type))
3148 return NULL_TREE;
3149
3150 /* If there was a return slot, then the return value is the
3151 dereferenced address of that object. */
3152 if (return_slot)
3153 {
3154 /* The front end shouldn't have used both return_slot and
3155 a modify expression. */
3156 gcc_assert (!modify_dest);
3157 if (DECL_BY_REFERENCE (result))
3158 {
3159 tree return_slot_addr = build_fold_addr_expr (return_slot);
3160 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3161
3162 /* We are going to construct *&return_slot and we can't do that
3163 for variables believed to be not addressable.
3164
3165 FIXME: This check possibly can match, because values returned
3166 via return slot optimization are not believed to have address
3167 taken by alias analysis. */
3168 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3169 var = return_slot_addr;
3170 }
3171 else
3172 {
3173 var = return_slot;
3174 gcc_assert (TREE_CODE (var) != SSA_NAME);
3175 if (TREE_ADDRESSABLE (result))
3176 mark_addressable (var);
3177 }
3178 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3179 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3180 && !DECL_GIMPLE_REG_P (result)
3181 && DECL_P (var))
3182 DECL_GIMPLE_REG_P (var) = 0;
3183 use = NULL;
3184 goto done;
3185 }
3186
3187 /* All types requiring non-trivial constructors should have been handled. */
3188 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3189
3190 /* Attempt to avoid creating a new temporary variable. */
3191 if (modify_dest
3192 && TREE_CODE (modify_dest) != SSA_NAME)
3193 {
3194 bool use_it = false;
3195
3196 /* We can't use MODIFY_DEST if there's type promotion involved. */
3197 if (!useless_type_conversion_p (callee_type, caller_type))
3198 use_it = false;
3199
3200 /* ??? If we're assigning to a variable sized type, then we must
3201 reuse the destination variable, because we've no good way to
3202 create variable sized temporaries at this point. */
3203 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3204 use_it = true;
3205
3206 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3207 reuse it as the result of the call directly. Don't do this if
3208 it would promote MODIFY_DEST to addressable. */
3209 else if (TREE_ADDRESSABLE (result))
3210 use_it = false;
3211 else
3212 {
3213 tree base_m = get_base_address (modify_dest);
3214
3215 /* If the base isn't a decl, then it's a pointer, and we don't
3216 know where that's going to go. */
3217 if (!DECL_P (base_m))
3218 use_it = false;
3219 else if (is_global_var (base_m))
3220 use_it = false;
3221 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3222 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3223 && !DECL_GIMPLE_REG_P (result)
3224 && DECL_GIMPLE_REG_P (base_m))
3225 use_it = false;
3226 else if (!TREE_ADDRESSABLE (base_m))
3227 use_it = true;
3228 }
3229
3230 if (use_it)
3231 {
3232 var = modify_dest;
3233 use = NULL;
3234 goto done;
3235 }
3236 }
3237
3238 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3239
3240 var = copy_result_decl_to_var (result, id);
3241 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3242
3243 /* Do not have the rest of GCC warn about this variable as it should
3244 not be visible to the user. */
3245 TREE_NO_WARNING (var) = 1;
3246
3247 declare_inline_vars (id->block, var);
3248
3249 /* Build the use expr. If the return type of the function was
3250 promoted, convert it back to the expected type. */
3251 use = var;
3252 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3253 {
3254 /* If we can match up types by promotion/demotion do so. */
3255 if (fold_convertible_p (caller_type, var))
3256 use = fold_convert (caller_type, var);
3257 else
3258 {
3259 /* ??? For valid programs we should not end up here.
3260 Still if we end up with truly mismatched types here, fall back
3261 to using a MEM_REF to not leak invalid GIMPLE to the following
3262 passes. */
3263 /* Prevent var from being written into SSA form. */
3264 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3265 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3266 DECL_GIMPLE_REG_P (var) = false;
3267 else if (is_gimple_reg_type (TREE_TYPE (var)))
3268 TREE_ADDRESSABLE (var) = true;
3269 use = fold_build2 (MEM_REF, caller_type,
3270 build_fold_addr_expr (var),
3271 build_int_cst (ptr_type_node, 0));
3272 }
3273 }
3274
3275 STRIP_USELESS_TYPE_CONVERSION (use);
3276
3277 if (DECL_BY_REFERENCE (result))
3278 {
3279 TREE_ADDRESSABLE (var) = 1;
3280 var = build_fold_addr_expr (var);
3281 }
3282
3283 done:
3284 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3285 way, when the RESULT_DECL is encountered, it will be
3286 automatically replaced by the VAR_DECL.
3287
3288 When returning by reference, ensure that RESULT_DECL remaps to
3289 gimple_val. */
3290 if (DECL_BY_REFERENCE (result)
3291 && !is_gimple_val (var))
3292 {
3293 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3294 insert_decl_map (id, result, temp);
3295 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3296 it's default_def SSA_NAME. */
3297 if (gimple_in_ssa_p (id->src_cfun)
3298 && is_gimple_reg (result))
3299 {
3300 temp = make_ssa_name (temp, NULL);
3301 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3302 }
3303 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3304 }
3305 else
3306 insert_decl_map (id, result, var);
3307
3308 /* Remember this so we can ignore it in remap_decls. */
3309 id->retvar = var;
3310
3311 return use;
3312 }
3313
3314 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3315 to a local label. */
3316
3317 static tree
3318 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3319 {
3320 tree node = *nodep;
3321 tree fn = (tree) fnp;
3322
3323 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3324 return node;
3325
3326 if (TYPE_P (node))
3327 *walk_subtrees = 0;
3328
3329 return NULL_TREE;
3330 }
3331
3332 /* Determine if the function can be copied. If so return NULL. If
3333 not return a string describng the reason for failure. */
3334
3335 static const char *
3336 copy_forbidden (struct function *fun, tree fndecl)
3337 {
3338 const char *reason = fun->cannot_be_copied_reason;
3339 tree decl;
3340 unsigned ix;
3341
3342 /* Only examine the function once. */
3343 if (fun->cannot_be_copied_set)
3344 return reason;
3345
3346 /* We cannot copy a function that receives a non-local goto
3347 because we cannot remap the destination label used in the
3348 function that is performing the non-local goto. */
3349 /* ??? Actually, this should be possible, if we work at it.
3350 No doubt there's just a handful of places that simply
3351 assume it doesn't happen and don't substitute properly. */
3352 if (fun->has_nonlocal_label)
3353 {
3354 reason = G_("function %q+F can never be copied "
3355 "because it receives a non-local goto");
3356 goto fail;
3357 }
3358
3359 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3360 if (TREE_CODE (decl) == VAR_DECL
3361 && TREE_STATIC (decl)
3362 && !DECL_EXTERNAL (decl)
3363 && DECL_INITIAL (decl)
3364 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3365 has_label_address_in_static_1,
3366 fndecl))
3367 {
3368 reason = G_("function %q+F can never be copied because it saves "
3369 "address of local label in a static variable");
3370 goto fail;
3371 }
3372
3373 fail:
3374 fun->cannot_be_copied_reason = reason;
3375 fun->cannot_be_copied_set = true;
3376 return reason;
3377 }
3378
3379
3380 static const char *inline_forbidden_reason;
3381
3382 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3383 iff a function can not be inlined. Also sets the reason why. */
3384
3385 static tree
3386 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3387 struct walk_stmt_info *wip)
3388 {
3389 tree fn = (tree) wip->info;
3390 tree t;
3391 gimple stmt = gsi_stmt (*gsi);
3392
3393 switch (gimple_code (stmt))
3394 {
3395 case GIMPLE_CALL:
3396 /* Refuse to inline alloca call unless user explicitly forced so as
3397 this may change program's memory overhead drastically when the
3398 function using alloca is called in loop. In GCC present in
3399 SPEC2000 inlining into schedule_block cause it to require 2GB of
3400 RAM instead of 256MB. Don't do so for alloca calls emitted for
3401 VLA objects as those can't cause unbounded growth (they're always
3402 wrapped inside stack_save/stack_restore regions. */
3403 if (gimple_alloca_call_p (stmt)
3404 && !gimple_call_alloca_for_var_p (stmt)
3405 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3406 {
3407 inline_forbidden_reason
3408 = G_("function %q+F can never be inlined because it uses "
3409 "alloca (override using the always_inline attribute)");
3410 *handled_ops_p = true;
3411 return fn;
3412 }
3413
3414 t = gimple_call_fndecl (stmt);
3415 if (t == NULL_TREE)
3416 break;
3417
3418 /* We cannot inline functions that call setjmp. */
3419 if (setjmp_call_p (t))
3420 {
3421 inline_forbidden_reason
3422 = G_("function %q+F can never be inlined because it uses setjmp");
3423 *handled_ops_p = true;
3424 return t;
3425 }
3426
3427 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3428 switch (DECL_FUNCTION_CODE (t))
3429 {
3430 /* We cannot inline functions that take a variable number of
3431 arguments. */
3432 case BUILT_IN_VA_START:
3433 case BUILT_IN_NEXT_ARG:
3434 case BUILT_IN_VA_END:
3435 inline_forbidden_reason
3436 = G_("function %q+F can never be inlined because it "
3437 "uses variable argument lists");
3438 *handled_ops_p = true;
3439 return t;
3440
3441 case BUILT_IN_LONGJMP:
3442 /* We can't inline functions that call __builtin_longjmp at
3443 all. The non-local goto machinery really requires the
3444 destination be in a different function. If we allow the
3445 function calling __builtin_longjmp to be inlined into the
3446 function calling __builtin_setjmp, Things will Go Awry. */
3447 inline_forbidden_reason
3448 = G_("function %q+F can never be inlined because "
3449 "it uses setjmp-longjmp exception handling");
3450 *handled_ops_p = true;
3451 return t;
3452
3453 case BUILT_IN_NONLOCAL_GOTO:
3454 /* Similarly. */
3455 inline_forbidden_reason
3456 = G_("function %q+F can never be inlined because "
3457 "it uses non-local goto");
3458 *handled_ops_p = true;
3459 return t;
3460
3461 case BUILT_IN_RETURN:
3462 case BUILT_IN_APPLY_ARGS:
3463 /* If a __builtin_apply_args caller would be inlined,
3464 it would be saving arguments of the function it has
3465 been inlined into. Similarly __builtin_return would
3466 return from the function the inline has been inlined into. */
3467 inline_forbidden_reason
3468 = G_("function %q+F can never be inlined because "
3469 "it uses __builtin_return or __builtin_apply_args");
3470 *handled_ops_p = true;
3471 return t;
3472
3473 default:
3474 break;
3475 }
3476 break;
3477
3478 case GIMPLE_GOTO:
3479 t = gimple_goto_dest (stmt);
3480
3481 /* We will not inline a function which uses computed goto. The
3482 addresses of its local labels, which may be tucked into
3483 global storage, are of course not constant across
3484 instantiations, which causes unexpected behavior. */
3485 if (TREE_CODE (t) != LABEL_DECL)
3486 {
3487 inline_forbidden_reason
3488 = G_("function %q+F can never be inlined "
3489 "because it contains a computed goto");
3490 *handled_ops_p = true;
3491 return t;
3492 }
3493 break;
3494
3495 default:
3496 break;
3497 }
3498
3499 *handled_ops_p = false;
3500 return NULL_TREE;
3501 }
3502
3503 /* Return true if FNDECL is a function that cannot be inlined into
3504 another one. */
3505
3506 static bool
3507 inline_forbidden_p (tree fndecl)
3508 {
3509 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3510 struct walk_stmt_info wi;
3511 basic_block bb;
3512 bool forbidden_p = false;
3513
3514 /* First check for shared reasons not to copy the code. */
3515 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3516 if (inline_forbidden_reason != NULL)
3517 return true;
3518
3519 /* Next, walk the statements of the function looking for
3520 constraucts we can't handle, or are non-optimal for inlining. */
3521 hash_set<tree> visited_nodes;
3522 memset (&wi, 0, sizeof (wi));
3523 wi.info = (void *) fndecl;
3524 wi.pset = &visited_nodes;
3525
3526 FOR_EACH_BB_FN (bb, fun)
3527 {
3528 gimple ret;
3529 gimple_seq seq = bb_seq (bb);
3530 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3531 forbidden_p = (ret != NULL);
3532 if (forbidden_p)
3533 break;
3534 }
3535
3536 return forbidden_p;
3537 }
3538 \f
3539 /* Return false if the function FNDECL cannot be inlined on account of its
3540 attributes, true otherwise. */
3541 static bool
3542 function_attribute_inlinable_p (const_tree fndecl)
3543 {
3544 if (targetm.attribute_table)
3545 {
3546 const_tree a;
3547
3548 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3549 {
3550 const_tree name = TREE_PURPOSE (a);
3551 int i;
3552
3553 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3554 if (is_attribute_p (targetm.attribute_table[i].name, name))
3555 return targetm.function_attribute_inlinable_p (fndecl);
3556 }
3557 }
3558
3559 return true;
3560 }
3561
3562 /* Returns nonzero if FN is a function that does not have any
3563 fundamental inline blocking properties. */
3564
3565 bool
3566 tree_inlinable_function_p (tree fn)
3567 {
3568 bool inlinable = true;
3569 bool do_warning;
3570 tree always_inline;
3571
3572 /* If we've already decided this function shouldn't be inlined,
3573 there's no need to check again. */
3574 if (DECL_UNINLINABLE (fn))
3575 return false;
3576
3577 /* We only warn for functions declared `inline' by the user. */
3578 do_warning = (warn_inline
3579 && DECL_DECLARED_INLINE_P (fn)
3580 && !DECL_NO_INLINE_WARNING_P (fn)
3581 && !DECL_IN_SYSTEM_HEADER (fn));
3582
3583 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3584
3585 if (flag_no_inline
3586 && always_inline == NULL)
3587 {
3588 if (do_warning)
3589 warning (OPT_Winline, "function %q+F can never be inlined because it "
3590 "is suppressed using -fno-inline", fn);
3591 inlinable = false;
3592 }
3593
3594 else if (!function_attribute_inlinable_p (fn))
3595 {
3596 if (do_warning)
3597 warning (OPT_Winline, "function %q+F can never be inlined because it "
3598 "uses attributes conflicting with inlining", fn);
3599 inlinable = false;
3600 }
3601
3602 else if (inline_forbidden_p (fn))
3603 {
3604 /* See if we should warn about uninlinable functions. Previously,
3605 some of these warnings would be issued while trying to expand
3606 the function inline, but that would cause multiple warnings
3607 about functions that would for example call alloca. But since
3608 this a property of the function, just one warning is enough.
3609 As a bonus we can now give more details about the reason why a
3610 function is not inlinable. */
3611 if (always_inline)
3612 error (inline_forbidden_reason, fn);
3613 else if (do_warning)
3614 warning (OPT_Winline, inline_forbidden_reason, fn);
3615
3616 inlinable = false;
3617 }
3618
3619 /* Squirrel away the result so that we don't have to check again. */
3620 DECL_UNINLINABLE (fn) = !inlinable;
3621
3622 return inlinable;
3623 }
3624
3625 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3626 word size and take possible memcpy call into account and return
3627 cost based on whether optimizing for size or speed according to SPEED_P. */
3628
3629 int
3630 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3631 {
3632 HOST_WIDE_INT size;
3633
3634 gcc_assert (!VOID_TYPE_P (type));
3635
3636 if (TREE_CODE (type) == VECTOR_TYPE)
3637 {
3638 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3639 enum machine_mode simd
3640 = targetm.vectorize.preferred_simd_mode (inner);
3641 int simd_mode_size = GET_MODE_SIZE (simd);
3642 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3643 / simd_mode_size);
3644 }
3645
3646 size = int_size_in_bytes (type);
3647
3648 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3649 /* Cost of a memcpy call, 3 arguments and the call. */
3650 return 4;
3651 else
3652 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3653 }
3654
3655 /* Returns cost of operation CODE, according to WEIGHTS */
3656
3657 static int
3658 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3659 tree op1 ATTRIBUTE_UNUSED, tree op2)
3660 {
3661 switch (code)
3662 {
3663 /* These are "free" conversions, or their presumed cost
3664 is folded into other operations. */
3665 case RANGE_EXPR:
3666 CASE_CONVERT:
3667 case COMPLEX_EXPR:
3668 case PAREN_EXPR:
3669 case VIEW_CONVERT_EXPR:
3670 return 0;
3671
3672 /* Assign cost of 1 to usual operations.
3673 ??? We may consider mapping RTL costs to this. */
3674 case COND_EXPR:
3675 case VEC_COND_EXPR:
3676 case VEC_PERM_EXPR:
3677
3678 case PLUS_EXPR:
3679 case POINTER_PLUS_EXPR:
3680 case MINUS_EXPR:
3681 case MULT_EXPR:
3682 case MULT_HIGHPART_EXPR:
3683 case FMA_EXPR:
3684
3685 case ADDR_SPACE_CONVERT_EXPR:
3686 case FIXED_CONVERT_EXPR:
3687 case FIX_TRUNC_EXPR:
3688
3689 case NEGATE_EXPR:
3690 case FLOAT_EXPR:
3691 case MIN_EXPR:
3692 case MAX_EXPR:
3693 case ABS_EXPR:
3694
3695 case LSHIFT_EXPR:
3696 case RSHIFT_EXPR:
3697 case LROTATE_EXPR:
3698 case RROTATE_EXPR:
3699 case VEC_RSHIFT_EXPR:
3700
3701 case BIT_IOR_EXPR:
3702 case BIT_XOR_EXPR:
3703 case BIT_AND_EXPR:
3704 case BIT_NOT_EXPR:
3705
3706 case TRUTH_ANDIF_EXPR:
3707 case TRUTH_ORIF_EXPR:
3708 case TRUTH_AND_EXPR:
3709 case TRUTH_OR_EXPR:
3710 case TRUTH_XOR_EXPR:
3711 case TRUTH_NOT_EXPR:
3712
3713 case LT_EXPR:
3714 case LE_EXPR:
3715 case GT_EXPR:
3716 case GE_EXPR:
3717 case EQ_EXPR:
3718 case NE_EXPR:
3719 case ORDERED_EXPR:
3720 case UNORDERED_EXPR:
3721
3722 case UNLT_EXPR:
3723 case UNLE_EXPR:
3724 case UNGT_EXPR:
3725 case UNGE_EXPR:
3726 case UNEQ_EXPR:
3727 case LTGT_EXPR:
3728
3729 case CONJ_EXPR:
3730
3731 case PREDECREMENT_EXPR:
3732 case PREINCREMENT_EXPR:
3733 case POSTDECREMENT_EXPR:
3734 case POSTINCREMENT_EXPR:
3735
3736 case REALIGN_LOAD_EXPR:
3737
3738 case REDUC_MAX_EXPR:
3739 case REDUC_MIN_EXPR:
3740 case REDUC_PLUS_EXPR:
3741 case WIDEN_SUM_EXPR:
3742 case WIDEN_MULT_EXPR:
3743 case DOT_PROD_EXPR:
3744 case SAD_EXPR:
3745 case WIDEN_MULT_PLUS_EXPR:
3746 case WIDEN_MULT_MINUS_EXPR:
3747 case WIDEN_LSHIFT_EXPR:
3748
3749 case VEC_WIDEN_MULT_HI_EXPR:
3750 case VEC_WIDEN_MULT_LO_EXPR:
3751 case VEC_WIDEN_MULT_EVEN_EXPR:
3752 case VEC_WIDEN_MULT_ODD_EXPR:
3753 case VEC_UNPACK_HI_EXPR:
3754 case VEC_UNPACK_LO_EXPR:
3755 case VEC_UNPACK_FLOAT_HI_EXPR:
3756 case VEC_UNPACK_FLOAT_LO_EXPR:
3757 case VEC_PACK_TRUNC_EXPR:
3758 case VEC_PACK_SAT_EXPR:
3759 case VEC_PACK_FIX_TRUNC_EXPR:
3760 case VEC_WIDEN_LSHIFT_HI_EXPR:
3761 case VEC_WIDEN_LSHIFT_LO_EXPR:
3762
3763 return 1;
3764
3765 /* Few special cases of expensive operations. This is useful
3766 to avoid inlining on functions having too many of these. */
3767 case TRUNC_DIV_EXPR:
3768 case CEIL_DIV_EXPR:
3769 case FLOOR_DIV_EXPR:
3770 case ROUND_DIV_EXPR:
3771 case EXACT_DIV_EXPR:
3772 case TRUNC_MOD_EXPR:
3773 case CEIL_MOD_EXPR:
3774 case FLOOR_MOD_EXPR:
3775 case ROUND_MOD_EXPR:
3776 case RDIV_EXPR:
3777 if (TREE_CODE (op2) != INTEGER_CST)
3778 return weights->div_mod_cost;
3779 return 1;
3780
3781 default:
3782 /* We expect a copy assignment with no operator. */
3783 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3784 return 0;
3785 }
3786 }
3787
3788
3789 /* Estimate number of instructions that will be created by expanding
3790 the statements in the statement sequence STMTS.
3791 WEIGHTS contains weights attributed to various constructs. */
3792
3793 static
3794 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3795 {
3796 int cost;
3797 gimple_stmt_iterator gsi;
3798
3799 cost = 0;
3800 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3801 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3802
3803 return cost;
3804 }
3805
3806
3807 /* Estimate number of instructions that will be created by expanding STMT.
3808 WEIGHTS contains weights attributed to various constructs. */
3809
3810 int
3811 estimate_num_insns (gimple stmt, eni_weights *weights)
3812 {
3813 unsigned cost, i;
3814 enum gimple_code code = gimple_code (stmt);
3815 tree lhs;
3816 tree rhs;
3817
3818 switch (code)
3819 {
3820 case GIMPLE_ASSIGN:
3821 /* Try to estimate the cost of assignments. We have three cases to
3822 deal with:
3823 1) Simple assignments to registers;
3824 2) Stores to things that must live in memory. This includes
3825 "normal" stores to scalars, but also assignments of large
3826 structures, or constructors of big arrays;
3827
3828 Let us look at the first two cases, assuming we have "a = b + C":
3829 <GIMPLE_ASSIGN <var_decl "a">
3830 <plus_expr <var_decl "b"> <constant C>>
3831 If "a" is a GIMPLE register, the assignment to it is free on almost
3832 any target, because "a" usually ends up in a real register. Hence
3833 the only cost of this expression comes from the PLUS_EXPR, and we
3834 can ignore the GIMPLE_ASSIGN.
3835 If "a" is not a GIMPLE register, the assignment to "a" will most
3836 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3837 of moving something into "a", which we compute using the function
3838 estimate_move_cost. */
3839 if (gimple_clobber_p (stmt))
3840 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3841
3842 lhs = gimple_assign_lhs (stmt);
3843 rhs = gimple_assign_rhs1 (stmt);
3844
3845 cost = 0;
3846
3847 /* Account for the cost of moving to / from memory. */
3848 if (gimple_store_p (stmt))
3849 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
3850 if (gimple_assign_load_p (stmt))
3851 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
3852
3853 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3854 gimple_assign_rhs1 (stmt),
3855 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3856 == GIMPLE_BINARY_RHS
3857 ? gimple_assign_rhs2 (stmt) : NULL);
3858 break;
3859
3860 case GIMPLE_COND:
3861 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3862 gimple_op (stmt, 0),
3863 gimple_op (stmt, 1));
3864 break;
3865
3866 case GIMPLE_SWITCH:
3867 /* Take into account cost of the switch + guess 2 conditional jumps for
3868 each case label.
3869
3870 TODO: once the switch expansion logic is sufficiently separated, we can
3871 do better job on estimating cost of the switch. */
3872 if (weights->time_based)
3873 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3874 else
3875 cost = gimple_switch_num_labels (stmt) * 2;
3876 break;
3877
3878 case GIMPLE_CALL:
3879 {
3880 tree decl;
3881
3882 if (gimple_call_internal_p (stmt))
3883 return 0;
3884 else if ((decl = gimple_call_fndecl (stmt))
3885 && DECL_BUILT_IN (decl))
3886 {
3887 /* Do not special case builtins where we see the body.
3888 This just confuse inliner. */
3889 struct cgraph_node *node;
3890 if (!(node = cgraph_node::get (decl))
3891 || node->definition)
3892 ;
3893 /* For buitins that are likely expanded to nothing or
3894 inlined do not account operand costs. */
3895 else if (is_simple_builtin (decl))
3896 return 0;
3897 else if (is_inexpensive_builtin (decl))
3898 return weights->target_builtin_call_cost;
3899 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3900 {
3901 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3902 specialize the cheap expansion we do here.
3903 ??? This asks for a more general solution. */
3904 switch (DECL_FUNCTION_CODE (decl))
3905 {
3906 case BUILT_IN_POW:
3907 case BUILT_IN_POWF:
3908 case BUILT_IN_POWL:
3909 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3910 && REAL_VALUES_EQUAL
3911 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3912 return estimate_operator_cost
3913 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
3914 gimple_call_arg (stmt, 0));
3915 break;
3916
3917 default:
3918 break;
3919 }
3920 }
3921 }
3922
3923 cost = decl ? weights->call_cost : weights->indirect_call_cost;
3924 if (gimple_call_lhs (stmt))
3925 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
3926 weights->time_based);
3927 for (i = 0; i < gimple_call_num_args (stmt); i++)
3928 {
3929 tree arg = gimple_call_arg (stmt, i);
3930 cost += estimate_move_cost (TREE_TYPE (arg),
3931 weights->time_based);
3932 }
3933 break;
3934 }
3935
3936 case GIMPLE_RETURN:
3937 return weights->return_cost;
3938
3939 case GIMPLE_GOTO:
3940 case GIMPLE_LABEL:
3941 case GIMPLE_NOP:
3942 case GIMPLE_PHI:
3943 case GIMPLE_PREDICT:
3944 case GIMPLE_DEBUG:
3945 return 0;
3946
3947 case GIMPLE_ASM:
3948 {
3949 int count = asm_str_count (gimple_asm_string (stmt));
3950 /* 1000 means infinity. This avoids overflows later
3951 with very long asm statements. */
3952 if (count > 1000)
3953 count = 1000;
3954 return count;
3955 }
3956
3957 case GIMPLE_RESX:
3958 /* This is either going to be an external function call with one
3959 argument, or two register copy statements plus a goto. */
3960 return 2;
3961
3962 case GIMPLE_EH_DISPATCH:
3963 /* ??? This is going to turn into a switch statement. Ideally
3964 we'd have a look at the eh region and estimate the number of
3965 edges involved. */
3966 return 10;
3967
3968 case GIMPLE_BIND:
3969 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3970
3971 case GIMPLE_EH_FILTER:
3972 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3973
3974 case GIMPLE_CATCH:
3975 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3976
3977 case GIMPLE_TRY:
3978 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3979 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3980
3981 /* OpenMP directives are generally very expensive. */
3982
3983 case GIMPLE_OMP_RETURN:
3984 case GIMPLE_OMP_SECTIONS_SWITCH:
3985 case GIMPLE_OMP_ATOMIC_STORE:
3986 case GIMPLE_OMP_CONTINUE:
3987 /* ...except these, which are cheap. */
3988 return 0;
3989
3990 case GIMPLE_OMP_ATOMIC_LOAD:
3991 return weights->omp_cost;
3992
3993 case GIMPLE_OMP_FOR:
3994 return (weights->omp_cost
3995 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3996 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3997
3998 case GIMPLE_OMP_PARALLEL:
3999 case GIMPLE_OMP_TASK:
4000 case GIMPLE_OMP_CRITICAL:
4001 case GIMPLE_OMP_MASTER:
4002 case GIMPLE_OMP_TASKGROUP:
4003 case GIMPLE_OMP_ORDERED:
4004 case GIMPLE_OMP_SECTION:
4005 case GIMPLE_OMP_SECTIONS:
4006 case GIMPLE_OMP_SINGLE:
4007 case GIMPLE_OMP_TARGET:
4008 case GIMPLE_OMP_TEAMS:
4009 return (weights->omp_cost
4010 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4011
4012 case GIMPLE_TRANSACTION:
4013 return (weights->tm_cost
4014 + estimate_num_insns_seq (gimple_transaction_body (stmt),
4015 weights));
4016
4017 default:
4018 gcc_unreachable ();
4019 }
4020
4021 return cost;
4022 }
4023
4024 /* Estimate number of instructions that will be created by expanding
4025 function FNDECL. WEIGHTS contains weights attributed to various
4026 constructs. */
4027
4028 int
4029 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4030 {
4031 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4032 gimple_stmt_iterator bsi;
4033 basic_block bb;
4034 int n = 0;
4035
4036 gcc_assert (my_function && my_function->cfg);
4037 FOR_EACH_BB_FN (bb, my_function)
4038 {
4039 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4040 n += estimate_num_insns (gsi_stmt (bsi), weights);
4041 }
4042
4043 return n;
4044 }
4045
4046
4047 /* Initializes weights used by estimate_num_insns. */
4048
4049 void
4050 init_inline_once (void)
4051 {
4052 eni_size_weights.call_cost = 1;
4053 eni_size_weights.indirect_call_cost = 3;
4054 eni_size_weights.target_builtin_call_cost = 1;
4055 eni_size_weights.div_mod_cost = 1;
4056 eni_size_weights.omp_cost = 40;
4057 eni_size_weights.tm_cost = 10;
4058 eni_size_weights.time_based = false;
4059 eni_size_weights.return_cost = 1;
4060
4061 /* Estimating time for call is difficult, since we have no idea what the
4062 called function does. In the current uses of eni_time_weights,
4063 underestimating the cost does less harm than overestimating it, so
4064 we choose a rather small value here. */
4065 eni_time_weights.call_cost = 10;
4066 eni_time_weights.indirect_call_cost = 15;
4067 eni_time_weights.target_builtin_call_cost = 1;
4068 eni_time_weights.div_mod_cost = 10;
4069 eni_time_weights.omp_cost = 40;
4070 eni_time_weights.tm_cost = 40;
4071 eni_time_weights.time_based = true;
4072 eni_time_weights.return_cost = 2;
4073 }
4074
4075 /* Estimate the number of instructions in a gimple_seq. */
4076
4077 int
4078 count_insns_seq (gimple_seq seq, eni_weights *weights)
4079 {
4080 gimple_stmt_iterator gsi;
4081 int n = 0;
4082 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4083 n += estimate_num_insns (gsi_stmt (gsi), weights);
4084
4085 return n;
4086 }
4087
4088
4089 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4090
4091 static void
4092 prepend_lexical_block (tree current_block, tree new_block)
4093 {
4094 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4095 BLOCK_SUBBLOCKS (current_block) = new_block;
4096 BLOCK_SUPERCONTEXT (new_block) = current_block;
4097 }
4098
4099 /* Add local variables from CALLEE to CALLER. */
4100
4101 static inline void
4102 add_local_variables (struct function *callee, struct function *caller,
4103 copy_body_data *id)
4104 {
4105 tree var;
4106 unsigned ix;
4107
4108 FOR_EACH_LOCAL_DECL (callee, ix, var)
4109 if (!can_be_nonlocal (var, id))
4110 {
4111 tree new_var = remap_decl (var, id);
4112
4113 /* Remap debug-expressions. */
4114 if (TREE_CODE (new_var) == VAR_DECL
4115 && DECL_HAS_DEBUG_EXPR_P (var)
4116 && new_var != var)
4117 {
4118 tree tem = DECL_DEBUG_EXPR (var);
4119 bool old_regimplify = id->regimplify;
4120 id->remapping_type_depth++;
4121 walk_tree (&tem, copy_tree_body_r, id, NULL);
4122 id->remapping_type_depth--;
4123 id->regimplify = old_regimplify;
4124 SET_DECL_DEBUG_EXPR (new_var, tem);
4125 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4126 }
4127 add_local_decl (caller, new_var);
4128 }
4129 }
4130
4131 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4132
4133 static bool
4134 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4135 {
4136 tree use_retvar;
4137 tree fn;
4138 hash_map<tree, tree> *dst;
4139 hash_map<tree, tree> *st = NULL;
4140 tree return_slot;
4141 tree modify_dest;
4142 location_t saved_location;
4143 struct cgraph_edge *cg_edge;
4144 cgraph_inline_failed_t reason;
4145 basic_block return_block;
4146 edge e;
4147 gimple_stmt_iterator gsi, stmt_gsi;
4148 bool successfully_inlined = FALSE;
4149 bool purge_dead_abnormal_edges;
4150
4151 /* Set input_location here so we get the right instantiation context
4152 if we call instantiate_decl from inlinable_function_p. */
4153 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4154 saved_location = input_location;
4155 input_location = gimple_location (stmt);
4156
4157 /* From here on, we're only interested in CALL_EXPRs. */
4158 if (gimple_code (stmt) != GIMPLE_CALL)
4159 goto egress;
4160
4161 cg_edge = id->dst_node->get_edge (stmt);
4162 gcc_checking_assert (cg_edge);
4163 /* First, see if we can figure out what function is being called.
4164 If we cannot, then there is no hope of inlining the function. */
4165 if (cg_edge->indirect_unknown_callee)
4166 goto egress;
4167 fn = cg_edge->callee->decl;
4168 gcc_checking_assert (fn);
4169
4170 /* If FN is a declaration of a function in a nested scope that was
4171 globally declared inline, we don't set its DECL_INITIAL.
4172 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4173 C++ front-end uses it for cdtors to refer to their internal
4174 declarations, that are not real functions. Fortunately those
4175 don't have trees to be saved, so we can tell by checking their
4176 gimple_body. */
4177 if (!DECL_INITIAL (fn)
4178 && DECL_ABSTRACT_ORIGIN (fn)
4179 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4180 fn = DECL_ABSTRACT_ORIGIN (fn);
4181
4182 /* Don't try to inline functions that are not well-suited to inlining. */
4183 if (cg_edge->inline_failed)
4184 {
4185 reason = cg_edge->inline_failed;
4186 /* If this call was originally indirect, we do not want to emit any
4187 inlining related warnings or sorry messages because there are no
4188 guarantees regarding those. */
4189 if (cg_edge->indirect_inlining_edge)
4190 goto egress;
4191
4192 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4193 /* For extern inline functions that get redefined we always
4194 silently ignored always_inline flag. Better behaviour would
4195 be to be able to keep both bodies and use extern inline body
4196 for inlining, but we can't do that because frontends overwrite
4197 the body. */
4198 && !cg_edge->callee->local.redefined_extern_inline
4199 /* During early inline pass, report only when optimization is
4200 not turned on. */
4201 && (symtab->global_info_ready
4202 || !optimize
4203 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4204 /* PR 20090218-1_0.c. Body can be provided by another module. */
4205 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4206 {
4207 error ("inlining failed in call to always_inline %q+F: %s", fn,
4208 cgraph_inline_failed_string (reason));
4209 error ("called from here");
4210 }
4211 else if (warn_inline
4212 && DECL_DECLARED_INLINE_P (fn)
4213 && !DECL_NO_INLINE_WARNING_P (fn)
4214 && !DECL_IN_SYSTEM_HEADER (fn)
4215 && reason != CIF_UNSPECIFIED
4216 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4217 /* Do not warn about not inlined recursive calls. */
4218 && !cg_edge->recursive_p ()
4219 /* Avoid warnings during early inline pass. */
4220 && symtab->global_info_ready)
4221 {
4222 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4223 fn, _(cgraph_inline_failed_string (reason)));
4224 warning (OPT_Winline, "called from here");
4225 }
4226 goto egress;
4227 }
4228 fn = cg_edge->callee->decl;
4229 cg_edge->callee->get_body ();
4230
4231 #ifdef ENABLE_CHECKING
4232 if (cg_edge->callee->decl != id->dst_node->decl)
4233 cg_edge->callee->verify ();
4234 #endif
4235
4236 /* We will be inlining this callee. */
4237 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4238
4239 /* Update the callers EH personality. */
4240 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4241 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4242 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4243
4244 /* Split the block holding the GIMPLE_CALL. */
4245 e = split_block (bb, stmt);
4246 bb = e->src;
4247 return_block = e->dest;
4248 remove_edge (e);
4249
4250 /* split_block splits after the statement; work around this by
4251 moving the call into the second block manually. Not pretty,
4252 but seems easier than doing the CFG manipulation by hand
4253 when the GIMPLE_CALL is in the last statement of BB. */
4254 stmt_gsi = gsi_last_bb (bb);
4255 gsi_remove (&stmt_gsi, false);
4256
4257 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4258 been the source of abnormal edges. In this case, schedule
4259 the removal of dead abnormal edges. */
4260 gsi = gsi_start_bb (return_block);
4261 if (gsi_end_p (gsi))
4262 {
4263 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4264 purge_dead_abnormal_edges = true;
4265 }
4266 else
4267 {
4268 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4269 purge_dead_abnormal_edges = false;
4270 }
4271
4272 stmt_gsi = gsi_start_bb (return_block);
4273
4274 /* Build a block containing code to initialize the arguments, the
4275 actual inline expansion of the body, and a label for the return
4276 statements within the function to jump to. The type of the
4277 statement expression is the return type of the function call.
4278 ??? If the call does not have an associated block then we will
4279 remap all callee blocks to NULL, effectively dropping most of
4280 its debug information. This should only happen for calls to
4281 artificial decls inserted by the compiler itself. We need to
4282 either link the inlined blocks into the caller block tree or
4283 not refer to them in any way to not break GC for locations. */
4284 if (gimple_block (stmt))
4285 {
4286 id->block = make_node (BLOCK);
4287 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4288 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4289 prepend_lexical_block (gimple_block (stmt), id->block);
4290 }
4291
4292 /* Local declarations will be replaced by their equivalents in this
4293 map. */
4294 st = id->decl_map;
4295 id->decl_map = new hash_map<tree, tree>;
4296 dst = id->debug_map;
4297 id->debug_map = NULL;
4298
4299 /* Record the function we are about to inline. */
4300 id->src_fn = fn;
4301 id->src_node = cg_edge->callee;
4302 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4303 id->gimple_call = stmt;
4304
4305 gcc_assert (!id->src_cfun->after_inlining);
4306
4307 id->entry_bb = bb;
4308 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4309 {
4310 gimple_stmt_iterator si = gsi_last_bb (bb);
4311 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4312 NOT_TAKEN),
4313 GSI_NEW_STMT);
4314 }
4315 initialize_inlined_parameters (id, stmt, fn, bb);
4316
4317 if (DECL_INITIAL (fn))
4318 {
4319 if (gimple_block (stmt))
4320 {
4321 tree *var;
4322
4323 prepend_lexical_block (id->block,
4324 remap_blocks (DECL_INITIAL (fn), id));
4325 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4326 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4327 == NULL_TREE));
4328 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4329 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4330 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4331 under it. The parameters can be then evaluated in the debugger,
4332 but don't show in backtraces. */
4333 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4334 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4335 {
4336 tree v = *var;
4337 *var = TREE_CHAIN (v);
4338 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4339 BLOCK_VARS (id->block) = v;
4340 }
4341 else
4342 var = &TREE_CHAIN (*var);
4343 }
4344 else
4345 remap_blocks_to_null (DECL_INITIAL (fn), id);
4346 }
4347
4348 /* Return statements in the function body will be replaced by jumps
4349 to the RET_LABEL. */
4350 gcc_assert (DECL_INITIAL (fn));
4351 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4352
4353 /* Find the LHS to which the result of this call is assigned. */
4354 return_slot = NULL;
4355 if (gimple_call_lhs (stmt))
4356 {
4357 modify_dest = gimple_call_lhs (stmt);
4358
4359 /* The function which we are inlining might not return a value,
4360 in which case we should issue a warning that the function
4361 does not return a value. In that case the optimizers will
4362 see that the variable to which the value is assigned was not
4363 initialized. We do not want to issue a warning about that
4364 uninitialized variable. */
4365 if (DECL_P (modify_dest))
4366 TREE_NO_WARNING (modify_dest) = 1;
4367
4368 if (gimple_call_return_slot_opt_p (stmt))
4369 {
4370 return_slot = modify_dest;
4371 modify_dest = NULL;
4372 }
4373 }
4374 else
4375 modify_dest = NULL;
4376
4377 /* If we are inlining a call to the C++ operator new, we don't want
4378 to use type based alias analysis on the return value. Otherwise
4379 we may get confused if the compiler sees that the inlined new
4380 function returns a pointer which was just deleted. See bug
4381 33407. */
4382 if (DECL_IS_OPERATOR_NEW (fn))
4383 {
4384 return_slot = NULL;
4385 modify_dest = NULL;
4386 }
4387
4388 /* Declare the return variable for the function. */
4389 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4390
4391 /* Add local vars in this inlined callee to caller. */
4392 add_local_variables (id->src_cfun, cfun, id);
4393
4394 if (dump_file && (dump_flags & TDF_DETAILS))
4395 {
4396 fprintf (dump_file, "Inlining ");
4397 print_generic_expr (dump_file, id->src_fn, 0);
4398 fprintf (dump_file, " to ");
4399 print_generic_expr (dump_file, id->dst_fn, 0);
4400 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4401 }
4402
4403 /* This is it. Duplicate the callee body. Assume callee is
4404 pre-gimplified. Note that we must not alter the caller
4405 function in any way before this point, as this CALL_EXPR may be
4406 a self-referential call; if we're calling ourselves, we need to
4407 duplicate our body before altering anything. */
4408 copy_body (id, cg_edge->callee->count,
4409 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4410 bb, return_block, NULL);
4411
4412 /* Reset the escaped solution. */
4413 if (cfun->gimple_df)
4414 pt_solution_reset (&cfun->gimple_df->escaped);
4415
4416 /* Clean up. */
4417 if (id->debug_map)
4418 {
4419 delete id->debug_map;
4420 id->debug_map = dst;
4421 }
4422 delete id->decl_map;
4423 id->decl_map = st;
4424
4425 /* Unlink the calls virtual operands before replacing it. */
4426 unlink_stmt_vdef (stmt);
4427 if (gimple_vdef (stmt)
4428 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4429 release_ssa_name (gimple_vdef (stmt));
4430
4431 /* If the inlined function returns a result that we care about,
4432 substitute the GIMPLE_CALL with an assignment of the return
4433 variable to the LHS of the call. That is, if STMT was
4434 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4435 if (use_retvar && gimple_call_lhs (stmt))
4436 {
4437 gimple old_stmt = stmt;
4438 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4439 gsi_replace (&stmt_gsi, stmt, false);
4440 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4441 }
4442 else
4443 {
4444 /* Handle the case of inlining a function with no return
4445 statement, which causes the return value to become undefined. */
4446 if (gimple_call_lhs (stmt)
4447 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4448 {
4449 tree name = gimple_call_lhs (stmt);
4450 tree var = SSA_NAME_VAR (name);
4451 tree def = ssa_default_def (cfun, var);
4452
4453 if (def)
4454 {
4455 /* If the variable is used undefined, make this name
4456 undefined via a move. */
4457 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4458 gsi_replace (&stmt_gsi, stmt, true);
4459 }
4460 else
4461 {
4462 /* Otherwise make this variable undefined. */
4463 gsi_remove (&stmt_gsi, true);
4464 set_ssa_default_def (cfun, var, name);
4465 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4466 }
4467 }
4468 else
4469 gsi_remove (&stmt_gsi, true);
4470 }
4471
4472 if (purge_dead_abnormal_edges)
4473 {
4474 gimple_purge_dead_eh_edges (return_block);
4475 gimple_purge_dead_abnormal_call_edges (return_block);
4476 }
4477
4478 /* If the value of the new expression is ignored, that's OK. We
4479 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4480 the equivalent inlined version either. */
4481 if (is_gimple_assign (stmt))
4482 {
4483 gcc_assert (gimple_assign_single_p (stmt)
4484 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4485 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4486 }
4487
4488 /* Output the inlining info for this abstract function, since it has been
4489 inlined. If we don't do this now, we can lose the information about the
4490 variables in the function when the blocks get blown away as soon as we
4491 remove the cgraph node. */
4492 if (gimple_block (stmt))
4493 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4494
4495 /* Update callgraph if needed. */
4496 cg_edge->callee->remove ();
4497
4498 id->block = NULL_TREE;
4499 successfully_inlined = TRUE;
4500
4501 egress:
4502 input_location = saved_location;
4503 return successfully_inlined;
4504 }
4505
4506 /* Expand call statements reachable from STMT_P.
4507 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4508 in a MODIFY_EXPR. */
4509
4510 static bool
4511 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4512 {
4513 gimple_stmt_iterator gsi;
4514
4515 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4516 {
4517 gimple stmt = gsi_stmt (gsi);
4518
4519 if (is_gimple_call (stmt)
4520 && !gimple_call_internal_p (stmt)
4521 && expand_call_inline (bb, stmt, id))
4522 return true;
4523 }
4524
4525 return false;
4526 }
4527
4528
4529 /* Walk all basic blocks created after FIRST and try to fold every statement
4530 in the STATEMENTS pointer set. */
4531
4532 static void
4533 fold_marked_statements (int first, hash_set<gimple> *statements)
4534 {
4535 for (; first < n_basic_blocks_for_fn (cfun); first++)
4536 if (BASIC_BLOCK_FOR_FN (cfun, first))
4537 {
4538 gimple_stmt_iterator gsi;
4539
4540 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4541 !gsi_end_p (gsi);
4542 gsi_next (&gsi))
4543 if (statements->contains (gsi_stmt (gsi)))
4544 {
4545 gimple old_stmt = gsi_stmt (gsi);
4546 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4547
4548 if (old_decl && DECL_BUILT_IN (old_decl))
4549 {
4550 /* Folding builtins can create multiple instructions,
4551 we need to look at all of them. */
4552 gimple_stmt_iterator i2 = gsi;
4553 gsi_prev (&i2);
4554 if (fold_stmt (&gsi))
4555 {
4556 gimple new_stmt;
4557 /* If a builtin at the end of a bb folded into nothing,
4558 the following loop won't work. */
4559 if (gsi_end_p (gsi))
4560 {
4561 cgraph_update_edges_for_call_stmt (old_stmt,
4562 old_decl, NULL);
4563 break;
4564 }
4565 if (gsi_end_p (i2))
4566 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4567 else
4568 gsi_next (&i2);
4569 while (1)
4570 {
4571 new_stmt = gsi_stmt (i2);
4572 update_stmt (new_stmt);
4573 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4574 new_stmt);
4575
4576 if (new_stmt == gsi_stmt (gsi))
4577 {
4578 /* It is okay to check only for the very last
4579 of these statements. If it is a throwing
4580 statement nothing will change. If it isn't
4581 this can remove EH edges. If that weren't
4582 correct then because some intermediate stmts
4583 throw, but not the last one. That would mean
4584 we'd have to split the block, which we can't
4585 here and we'd loose anyway. And as builtins
4586 probably never throw, this all
4587 is mood anyway. */
4588 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4589 new_stmt))
4590 gimple_purge_dead_eh_edges (
4591 BASIC_BLOCK_FOR_FN (cfun, first));
4592 break;
4593 }
4594 gsi_next (&i2);
4595 }
4596 }
4597 }
4598 else if (fold_stmt (&gsi))
4599 {
4600 /* Re-read the statement from GSI as fold_stmt() may
4601 have changed it. */
4602 gimple new_stmt = gsi_stmt (gsi);
4603 update_stmt (new_stmt);
4604
4605 if (is_gimple_call (old_stmt)
4606 || is_gimple_call (new_stmt))
4607 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4608 new_stmt);
4609
4610 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4611 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4612 first));
4613 }
4614 }
4615 }
4616 }
4617
4618 /* Expand calls to inline functions in the body of FN. */
4619
4620 unsigned int
4621 optimize_inline_calls (tree fn)
4622 {
4623 copy_body_data id;
4624 basic_block bb;
4625 int last = n_basic_blocks_for_fn (cfun);
4626 bool inlined_p = false;
4627
4628 /* Clear out ID. */
4629 memset (&id, 0, sizeof (id));
4630
4631 id.src_node = id.dst_node = cgraph_node::get (fn);
4632 gcc_assert (id.dst_node->definition);
4633 id.dst_fn = fn;
4634 /* Or any functions that aren't finished yet. */
4635 if (current_function_decl)
4636 id.dst_fn = current_function_decl;
4637
4638 id.copy_decl = copy_decl_maybe_to_var;
4639 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4640 id.transform_new_cfg = false;
4641 id.transform_return_to_modify = true;
4642 id.transform_parameter = true;
4643 id.transform_lang_insert_block = NULL;
4644 id.statements_to_fold = new hash_set<gimple>;
4645
4646 push_gimplify_context ();
4647
4648 /* We make no attempts to keep dominance info up-to-date. */
4649 free_dominance_info (CDI_DOMINATORS);
4650 free_dominance_info (CDI_POST_DOMINATORS);
4651
4652 /* Register specific gimple functions. */
4653 gimple_register_cfg_hooks ();
4654
4655 /* Reach the trees by walking over the CFG, and note the
4656 enclosing basic-blocks in the call edges. */
4657 /* We walk the blocks going forward, because inlined function bodies
4658 will split id->current_basic_block, and the new blocks will
4659 follow it; we'll trudge through them, processing their CALL_EXPRs
4660 along the way. */
4661 FOR_EACH_BB_FN (bb, cfun)
4662 inlined_p |= gimple_expand_calls_inline (bb, &id);
4663
4664 pop_gimplify_context (NULL);
4665
4666 #ifdef ENABLE_CHECKING
4667 {
4668 struct cgraph_edge *e;
4669
4670 id.dst_node->verify ();
4671
4672 /* Double check that we inlined everything we are supposed to inline. */
4673 for (e = id.dst_node->callees; e; e = e->next_callee)
4674 gcc_assert (e->inline_failed);
4675 }
4676 #endif
4677
4678 /* Fold queued statements. */
4679 fold_marked_statements (last, id.statements_to_fold);
4680 delete id.statements_to_fold;
4681
4682 gcc_assert (!id.debug_stmts.exists ());
4683
4684 /* If we didn't inline into the function there is nothing to do. */
4685 if (!inlined_p)
4686 return 0;
4687
4688 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4689 number_blocks (fn);
4690
4691 delete_unreachable_blocks_update_callgraph (&id);
4692 #ifdef ENABLE_CHECKING
4693 id.dst_node->verify ();
4694 #endif
4695
4696 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4697 not possible yet - the IPA passes might make various functions to not
4698 throw and they don't care to proactively update local EH info. This is
4699 done later in fixup_cfg pass that also execute the verification. */
4700 return (TODO_update_ssa
4701 | TODO_cleanup_cfg
4702 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4703 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4704 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4705 ? TODO_rebuild_frequencies : 0));
4706 }
4707
4708 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4709
4710 tree
4711 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4712 {
4713 enum tree_code code = TREE_CODE (*tp);
4714 enum tree_code_class cl = TREE_CODE_CLASS (code);
4715
4716 /* We make copies of most nodes. */
4717 if (IS_EXPR_CODE_CLASS (cl)
4718 || code == TREE_LIST
4719 || code == TREE_VEC
4720 || code == TYPE_DECL
4721 || code == OMP_CLAUSE)
4722 {
4723 /* Because the chain gets clobbered when we make a copy, we save it
4724 here. */
4725 tree chain = NULL_TREE, new_tree;
4726
4727 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4728 chain = TREE_CHAIN (*tp);
4729
4730 /* Copy the node. */
4731 new_tree = copy_node (*tp);
4732
4733 *tp = new_tree;
4734
4735 /* Now, restore the chain, if appropriate. That will cause
4736 walk_tree to walk into the chain as well. */
4737 if (code == PARM_DECL
4738 || code == TREE_LIST
4739 || code == OMP_CLAUSE)
4740 TREE_CHAIN (*tp) = chain;
4741
4742 /* For now, we don't update BLOCKs when we make copies. So, we
4743 have to nullify all BIND_EXPRs. */
4744 if (TREE_CODE (*tp) == BIND_EXPR)
4745 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4746 }
4747 else if (code == CONSTRUCTOR)
4748 {
4749 /* CONSTRUCTOR nodes need special handling because
4750 we need to duplicate the vector of elements. */
4751 tree new_tree;
4752
4753 new_tree = copy_node (*tp);
4754 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4755 *tp = new_tree;
4756 }
4757 else if (code == STATEMENT_LIST)
4758 /* We used to just abort on STATEMENT_LIST, but we can run into them
4759 with statement-expressions (c++/40975). */
4760 copy_statement_list (tp);
4761 else if (TREE_CODE_CLASS (code) == tcc_type)
4762 *walk_subtrees = 0;
4763 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4764 *walk_subtrees = 0;
4765 else if (TREE_CODE_CLASS (code) == tcc_constant)
4766 *walk_subtrees = 0;
4767 return NULL_TREE;
4768 }
4769
4770 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4771 information indicating to what new SAVE_EXPR this one should be mapped,
4772 use that one. Otherwise, create a new node and enter it in ST. FN is
4773 the function into which the copy will be placed. */
4774
4775 static void
4776 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
4777 {
4778 tree *n;
4779 tree t;
4780
4781 /* See if we already encountered this SAVE_EXPR. */
4782 n = st->get (*tp);
4783
4784 /* If we didn't already remap this SAVE_EXPR, do so now. */
4785 if (!n)
4786 {
4787 t = copy_node (*tp);
4788
4789 /* Remember this SAVE_EXPR. */
4790 st->put (*tp, t);
4791 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4792 st->put (t, t);
4793 }
4794 else
4795 {
4796 /* We've already walked into this SAVE_EXPR; don't do it again. */
4797 *walk_subtrees = 0;
4798 t = *n;
4799 }
4800
4801 /* Replace this SAVE_EXPR with the copy. */
4802 *tp = t;
4803 }
4804
4805 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4806 label, copies the declaration and enters it in the splay_tree in DATA (which
4807 is really a 'copy_body_data *'. */
4808
4809 static tree
4810 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4811 bool *handled_ops_p ATTRIBUTE_UNUSED,
4812 struct walk_stmt_info *wi)
4813 {
4814 copy_body_data *id = (copy_body_data *) wi->info;
4815 gimple stmt = gsi_stmt (*gsip);
4816
4817 if (gimple_code (stmt) == GIMPLE_LABEL)
4818 {
4819 tree decl = gimple_label_label (stmt);
4820
4821 /* Copy the decl and remember the copy. */
4822 insert_decl_map (id, decl, id->copy_decl (decl, id));
4823 }
4824
4825 return NULL_TREE;
4826 }
4827
4828
4829 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4830 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4831 remaps all local declarations to appropriate replacements in gimple
4832 operands. */
4833
4834 static tree
4835 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4836 {
4837 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4838 copy_body_data *id = (copy_body_data *) wi->info;
4839 hash_map<tree, tree> *st = id->decl_map;
4840 tree *n;
4841 tree expr = *tp;
4842
4843 /* Only a local declaration (variable or label). */
4844 if ((TREE_CODE (expr) == VAR_DECL
4845 && !TREE_STATIC (expr))
4846 || TREE_CODE (expr) == LABEL_DECL)
4847 {
4848 /* Lookup the declaration. */
4849 n = st->get (expr);
4850
4851 /* If it's there, remap it. */
4852 if (n)
4853 *tp = *n;
4854 *walk_subtrees = 0;
4855 }
4856 else if (TREE_CODE (expr) == STATEMENT_LIST
4857 || TREE_CODE (expr) == BIND_EXPR
4858 || TREE_CODE (expr) == SAVE_EXPR)
4859 gcc_unreachable ();
4860 else if (TREE_CODE (expr) == TARGET_EXPR)
4861 {
4862 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4863 It's OK for this to happen if it was part of a subtree that
4864 isn't immediately expanded, such as operand 2 of another
4865 TARGET_EXPR. */
4866 if (!TREE_OPERAND (expr, 1))
4867 {
4868 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4869 TREE_OPERAND (expr, 3) = NULL_TREE;
4870 }
4871 }
4872
4873 /* Keep iterating. */
4874 return NULL_TREE;
4875 }
4876
4877
4878 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4879 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4880 remaps all local declarations to appropriate replacements in gimple
4881 statements. */
4882
4883 static tree
4884 replace_locals_stmt (gimple_stmt_iterator *gsip,
4885 bool *handled_ops_p ATTRIBUTE_UNUSED,
4886 struct walk_stmt_info *wi)
4887 {
4888 copy_body_data *id = (copy_body_data *) wi->info;
4889 gimple stmt = gsi_stmt (*gsip);
4890
4891 if (gimple_code (stmt) == GIMPLE_BIND)
4892 {
4893 tree block = gimple_bind_block (stmt);
4894
4895 if (block)
4896 {
4897 remap_block (&block, id);
4898 gimple_bind_set_block (stmt, block);
4899 }
4900
4901 /* This will remap a lot of the same decls again, but this should be
4902 harmless. */
4903 if (gimple_bind_vars (stmt))
4904 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4905 NULL, id));
4906 }
4907
4908 /* Keep iterating. */
4909 return NULL_TREE;
4910 }
4911
4912
4913 /* Copies everything in SEQ and replaces variables and labels local to
4914 current_function_decl. */
4915
4916 gimple_seq
4917 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4918 {
4919 copy_body_data id;
4920 struct walk_stmt_info wi;
4921 gimple_seq copy;
4922
4923 /* There's nothing to do for NULL_TREE. */
4924 if (seq == NULL)
4925 return seq;
4926
4927 /* Set up ID. */
4928 memset (&id, 0, sizeof (id));
4929 id.src_fn = current_function_decl;
4930 id.dst_fn = current_function_decl;
4931 id.decl_map = new hash_map<tree, tree>;
4932 id.debug_map = NULL;
4933
4934 id.copy_decl = copy_decl_no_change;
4935 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4936 id.transform_new_cfg = false;
4937 id.transform_return_to_modify = false;
4938 id.transform_parameter = false;
4939 id.transform_lang_insert_block = NULL;
4940
4941 /* Walk the tree once to find local labels. */
4942 memset (&wi, 0, sizeof (wi));
4943 hash_set<tree> visited;
4944 wi.info = &id;
4945 wi.pset = &visited;
4946 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4947
4948 copy = gimple_seq_copy (seq);
4949
4950 /* Walk the copy, remapping decls. */
4951 memset (&wi, 0, sizeof (wi));
4952 wi.info = &id;
4953 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4954
4955 /* Clean up. */
4956 delete id.decl_map;
4957 if (id.debug_map)
4958 delete id.debug_map;
4959
4960 return copy;
4961 }
4962
4963
4964 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4965
4966 static tree
4967 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4968 {
4969 if (*tp == data)
4970 return (tree) data;
4971 else
4972 return NULL;
4973 }
4974
4975 DEBUG_FUNCTION bool
4976 debug_find_tree (tree top, tree search)
4977 {
4978 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4979 }
4980
4981
4982 /* Declare the variables created by the inliner. Add all the variables in
4983 VARS to BIND_EXPR. */
4984
4985 static void
4986 declare_inline_vars (tree block, tree vars)
4987 {
4988 tree t;
4989 for (t = vars; t; t = DECL_CHAIN (t))
4990 {
4991 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4992 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4993 add_local_decl (cfun, t);
4994 }
4995
4996 if (block)
4997 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4998 }
4999
5000 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5001 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5002 VAR_DECL translation. */
5003
5004 static tree
5005 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5006 {
5007 /* Don't generate debug information for the copy if we wouldn't have
5008 generated it for the copy either. */
5009 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5010 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5011
5012 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5013 declaration inspired this copy. */
5014 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5015
5016 /* The new variable/label has no RTL, yet. */
5017 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5018 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5019 SET_DECL_RTL (copy, 0);
5020
5021 /* These args would always appear unused, if not for this. */
5022 TREE_USED (copy) = 1;
5023
5024 /* Set the context for the new declaration. */
5025 if (!DECL_CONTEXT (decl))
5026 /* Globals stay global. */
5027 ;
5028 else if (DECL_CONTEXT (decl) != id->src_fn)
5029 /* Things that weren't in the scope of the function we're inlining
5030 from aren't in the scope we're inlining to, either. */
5031 ;
5032 else if (TREE_STATIC (decl))
5033 /* Function-scoped static variables should stay in the original
5034 function. */
5035 ;
5036 else
5037 /* Ordinary automatic local variables are now in the scope of the
5038 new function. */
5039 DECL_CONTEXT (copy) = id->dst_fn;
5040
5041 return copy;
5042 }
5043
5044 static tree
5045 copy_decl_to_var (tree decl, copy_body_data *id)
5046 {
5047 tree copy, type;
5048
5049 gcc_assert (TREE_CODE (decl) == PARM_DECL
5050 || TREE_CODE (decl) == RESULT_DECL);
5051
5052 type = TREE_TYPE (decl);
5053
5054 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5055 VAR_DECL, DECL_NAME (decl), type);
5056 if (DECL_PT_UID_SET_P (decl))
5057 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5058 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5059 TREE_READONLY (copy) = TREE_READONLY (decl);
5060 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5061 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5062
5063 return copy_decl_for_dup_finish (id, decl, copy);
5064 }
5065
5066 /* Like copy_decl_to_var, but create a return slot object instead of a
5067 pointer variable for return by invisible reference. */
5068
5069 static tree
5070 copy_result_decl_to_var (tree decl, copy_body_data *id)
5071 {
5072 tree copy, type;
5073
5074 gcc_assert (TREE_CODE (decl) == PARM_DECL
5075 || TREE_CODE (decl) == RESULT_DECL);
5076
5077 type = TREE_TYPE (decl);
5078 if (DECL_BY_REFERENCE (decl))
5079 type = TREE_TYPE (type);
5080
5081 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5082 VAR_DECL, DECL_NAME (decl), type);
5083 if (DECL_PT_UID_SET_P (decl))
5084 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5085 TREE_READONLY (copy) = TREE_READONLY (decl);
5086 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5087 if (!DECL_BY_REFERENCE (decl))
5088 {
5089 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5090 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5091 }
5092
5093 return copy_decl_for_dup_finish (id, decl, copy);
5094 }
5095
5096 tree
5097 copy_decl_no_change (tree decl, copy_body_data *id)
5098 {
5099 tree copy;
5100
5101 copy = copy_node (decl);
5102
5103 /* The COPY is not abstract; it will be generated in DST_FN. */
5104 DECL_ABSTRACT_P (copy) = false;
5105 lang_hooks.dup_lang_specific_decl (copy);
5106
5107 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5108 been taken; it's for internal bookkeeping in expand_goto_internal. */
5109 if (TREE_CODE (copy) == LABEL_DECL)
5110 {
5111 TREE_ADDRESSABLE (copy) = 0;
5112 LABEL_DECL_UID (copy) = -1;
5113 }
5114
5115 return copy_decl_for_dup_finish (id, decl, copy);
5116 }
5117
5118 static tree
5119 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5120 {
5121 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5122 return copy_decl_to_var (decl, id);
5123 else
5124 return copy_decl_no_change (decl, id);
5125 }
5126
5127 /* Return a copy of the function's argument tree. */
5128 static tree
5129 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5130 bitmap args_to_skip, tree *vars)
5131 {
5132 tree arg, *parg;
5133 tree new_parm = NULL;
5134 int i = 0;
5135
5136 parg = &new_parm;
5137
5138 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5139 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5140 {
5141 tree new_tree = remap_decl (arg, id);
5142 if (TREE_CODE (new_tree) != PARM_DECL)
5143 new_tree = id->copy_decl (arg, id);
5144 lang_hooks.dup_lang_specific_decl (new_tree);
5145 *parg = new_tree;
5146 parg = &DECL_CHAIN (new_tree);
5147 }
5148 else if (!id->decl_map->get (arg))
5149 {
5150 /* Make an equivalent VAR_DECL. If the argument was used
5151 as temporary variable later in function, the uses will be
5152 replaced by local variable. */
5153 tree var = copy_decl_to_var (arg, id);
5154 insert_decl_map (id, arg, var);
5155 /* Declare this new variable. */
5156 DECL_CHAIN (var) = *vars;
5157 *vars = var;
5158 }
5159 return new_parm;
5160 }
5161
5162 /* Return a copy of the function's static chain. */
5163 static tree
5164 copy_static_chain (tree static_chain, copy_body_data * id)
5165 {
5166 tree *chain_copy, *pvar;
5167
5168 chain_copy = &static_chain;
5169 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5170 {
5171 tree new_tree = remap_decl (*pvar, id);
5172 lang_hooks.dup_lang_specific_decl (new_tree);
5173 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5174 *pvar = new_tree;
5175 }
5176 return static_chain;
5177 }
5178
5179 /* Return true if the function is allowed to be versioned.
5180 This is a guard for the versioning functionality. */
5181
5182 bool
5183 tree_versionable_function_p (tree fndecl)
5184 {
5185 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5186 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5187 }
5188
5189 /* Delete all unreachable basic blocks and update callgraph.
5190 Doing so is somewhat nontrivial because we need to update all clones and
5191 remove inline function that become unreachable. */
5192
5193 static bool
5194 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5195 {
5196 bool changed = false;
5197 basic_block b, next_bb;
5198
5199 find_unreachable_blocks ();
5200
5201 /* Delete all unreachable basic blocks. */
5202
5203 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5204 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5205 {
5206 next_bb = b->next_bb;
5207
5208 if (!(b->flags & BB_REACHABLE))
5209 {
5210 gimple_stmt_iterator bsi;
5211
5212 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5213 {
5214 struct cgraph_edge *e;
5215 struct cgraph_node *node;
5216
5217 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5218
5219 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5220 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5221 {
5222 if (!e->inline_failed)
5223 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5224 else
5225 e->remove ();
5226 }
5227 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5228 && id->dst_node->clones)
5229 for (node = id->dst_node->clones; node != id->dst_node;)
5230 {
5231 node->remove_stmt_references (gsi_stmt (bsi));
5232 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5233 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5234 {
5235 if (!e->inline_failed)
5236 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5237 else
5238 e->remove ();
5239 }
5240
5241 if (node->clones)
5242 node = node->clones;
5243 else if (node->next_sibling_clone)
5244 node = node->next_sibling_clone;
5245 else
5246 {
5247 while (node != id->dst_node && !node->next_sibling_clone)
5248 node = node->clone_of;
5249 if (node != id->dst_node)
5250 node = node->next_sibling_clone;
5251 }
5252 }
5253 }
5254 delete_basic_block (b);
5255 changed = true;
5256 }
5257 }
5258
5259 return changed;
5260 }
5261
5262 /* Update clone info after duplication. */
5263
5264 static void
5265 update_clone_info (copy_body_data * id)
5266 {
5267 struct cgraph_node *node;
5268 if (!id->dst_node->clones)
5269 return;
5270 for (node = id->dst_node->clones; node != id->dst_node;)
5271 {
5272 /* First update replace maps to match the new body. */
5273 if (node->clone.tree_map)
5274 {
5275 unsigned int i;
5276 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5277 {
5278 struct ipa_replace_map *replace_info;
5279 replace_info = (*node->clone.tree_map)[i];
5280 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5281 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5282 }
5283 }
5284 if (node->clones)
5285 node = node->clones;
5286 else if (node->next_sibling_clone)
5287 node = node->next_sibling_clone;
5288 else
5289 {
5290 while (node != id->dst_node && !node->next_sibling_clone)
5291 node = node->clone_of;
5292 if (node != id->dst_node)
5293 node = node->next_sibling_clone;
5294 }
5295 }
5296 }
5297
5298 /* Create a copy of a function's tree.
5299 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5300 of the original function and the new copied function
5301 respectively. In case we want to replace a DECL
5302 tree with another tree while duplicating the function's
5303 body, TREE_MAP represents the mapping between these
5304 trees. If UPDATE_CLONES is set, the call_stmt fields
5305 of edges of clones of the function will be updated.
5306
5307 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5308 from new version.
5309 If SKIP_RETURN is true, the new version will return void.
5310 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5311 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5312 */
5313 void
5314 tree_function_versioning (tree old_decl, tree new_decl,
5315 vec<ipa_replace_map *, va_gc> *tree_map,
5316 bool update_clones, bitmap args_to_skip,
5317 bool skip_return, bitmap blocks_to_copy,
5318 basic_block new_entry)
5319 {
5320 struct cgraph_node *old_version_node;
5321 struct cgraph_node *new_version_node;
5322 copy_body_data id;
5323 tree p;
5324 unsigned i;
5325 struct ipa_replace_map *replace_info;
5326 basic_block old_entry_block, bb;
5327 auto_vec<gimple, 10> init_stmts;
5328 tree vars = NULL_TREE;
5329
5330 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5331 && TREE_CODE (new_decl) == FUNCTION_DECL);
5332 DECL_POSSIBLY_INLINED (old_decl) = 1;
5333
5334 old_version_node = cgraph_node::get (old_decl);
5335 gcc_checking_assert (old_version_node);
5336 new_version_node = cgraph_node::get (new_decl);
5337 gcc_checking_assert (new_version_node);
5338
5339 /* Copy over debug args. */
5340 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5341 {
5342 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5343 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5344 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5345 old_debug_args = decl_debug_args_lookup (old_decl);
5346 if (old_debug_args)
5347 {
5348 new_debug_args = decl_debug_args_insert (new_decl);
5349 *new_debug_args = vec_safe_copy (*old_debug_args);
5350 }
5351 }
5352
5353 /* Output the inlining info for this abstract function, since it has been
5354 inlined. If we don't do this now, we can lose the information about the
5355 variables in the function when the blocks get blown away as soon as we
5356 remove the cgraph node. */
5357 (*debug_hooks->outlining_inline_function) (old_decl);
5358
5359 DECL_ARTIFICIAL (new_decl) = 1;
5360 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5361 if (DECL_ORIGIN (old_decl) == old_decl)
5362 old_version_node->used_as_abstract_origin = true;
5363 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5364
5365 /* Prepare the data structures for the tree copy. */
5366 memset (&id, 0, sizeof (id));
5367
5368 /* Generate a new name for the new version. */
5369 id.statements_to_fold = new hash_set<gimple>;
5370
5371 id.decl_map = new hash_map<tree, tree>;
5372 id.debug_map = NULL;
5373 id.src_fn = old_decl;
5374 id.dst_fn = new_decl;
5375 id.src_node = old_version_node;
5376 id.dst_node = new_version_node;
5377 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5378 id.blocks_to_copy = blocks_to_copy;
5379
5380 id.copy_decl = copy_decl_no_change;
5381 id.transform_call_graph_edges
5382 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5383 id.transform_new_cfg = true;
5384 id.transform_return_to_modify = false;
5385 id.transform_parameter = false;
5386 id.transform_lang_insert_block = NULL;
5387
5388 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5389 (DECL_STRUCT_FUNCTION (old_decl));
5390 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5391 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5392 initialize_cfun (new_decl, old_decl,
5393 old_entry_block->count);
5394 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5395 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5396 = id.src_cfun->gimple_df->ipa_pta;
5397
5398 /* Copy the function's static chain. */
5399 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5400 if (p)
5401 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5402 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5403 &id);
5404
5405 /* If there's a tree_map, prepare for substitution. */
5406 if (tree_map)
5407 for (i = 0; i < tree_map->length (); i++)
5408 {
5409 gimple init;
5410 replace_info = (*tree_map)[i];
5411 if (replace_info->replace_p)
5412 {
5413 if (!replace_info->old_tree)
5414 {
5415 int i = replace_info->parm_num;
5416 tree parm;
5417 tree req_type;
5418
5419 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5420 i --;
5421 replace_info->old_tree = parm;
5422 req_type = TREE_TYPE (parm);
5423 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5424 {
5425 if (fold_convertible_p (req_type, replace_info->new_tree))
5426 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5427 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5428 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5429 else
5430 {
5431 if (dump_file)
5432 {
5433 fprintf (dump_file, " const ");
5434 print_generic_expr (dump_file, replace_info->new_tree, 0);
5435 fprintf (dump_file, " can't be converted to param ");
5436 print_generic_expr (dump_file, parm, 0);
5437 fprintf (dump_file, "\n");
5438 }
5439 replace_info->old_tree = NULL;
5440 }
5441 }
5442 }
5443 else
5444 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5445 if (replace_info->old_tree)
5446 {
5447 init = setup_one_parameter (&id, replace_info->old_tree,
5448 replace_info->new_tree, id.src_fn,
5449 NULL,
5450 &vars);
5451 if (init)
5452 init_stmts.safe_push (init);
5453 }
5454 }
5455 }
5456 /* Copy the function's arguments. */
5457 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5458 DECL_ARGUMENTS (new_decl) =
5459 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5460 args_to_skip, &vars);
5461
5462 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5463 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5464
5465 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5466
5467 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5468 /* Add local vars. */
5469 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5470
5471 if (DECL_RESULT (old_decl) == NULL_TREE)
5472 ;
5473 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5474 {
5475 DECL_RESULT (new_decl)
5476 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5477 RESULT_DECL, NULL_TREE, void_type_node);
5478 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5479 cfun->returns_struct = 0;
5480 cfun->returns_pcc_struct = 0;
5481 }
5482 else
5483 {
5484 tree old_name;
5485 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5486 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5487 if (gimple_in_ssa_p (id.src_cfun)
5488 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5489 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5490 {
5491 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5492 insert_decl_map (&id, old_name, new_name);
5493 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5494 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5495 }
5496 }
5497
5498 /* Set up the destination functions loop tree. */
5499 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5500 {
5501 cfun->curr_properties &= ~PROP_loops;
5502 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5503 cfun->curr_properties |= PROP_loops;
5504 }
5505
5506 /* Copy the Function's body. */
5507 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5508 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5509 new_entry);
5510
5511 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5512 number_blocks (new_decl);
5513
5514 /* We want to create the BB unconditionally, so that the addition of
5515 debug stmts doesn't affect BB count, which may in the end cause
5516 codegen differences. */
5517 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5518 while (init_stmts.length ())
5519 insert_init_stmt (&id, bb, init_stmts.pop ());
5520 update_clone_info (&id);
5521
5522 /* Remap the nonlocal_goto_save_area, if any. */
5523 if (cfun->nonlocal_goto_save_area)
5524 {
5525 struct walk_stmt_info wi;
5526
5527 memset (&wi, 0, sizeof (wi));
5528 wi.info = &id;
5529 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5530 }
5531
5532 /* Clean up. */
5533 delete id.decl_map;
5534 if (id.debug_map)
5535 delete id.debug_map;
5536 free_dominance_info (CDI_DOMINATORS);
5537 free_dominance_info (CDI_POST_DOMINATORS);
5538
5539 fold_marked_statements (0, id.statements_to_fold);
5540 delete id.statements_to_fold;
5541 fold_cond_expr_cond ();
5542 delete_unreachable_blocks_update_callgraph (&id);
5543 if (id.dst_node->definition)
5544 cgraph_edge::rebuild_references ();
5545 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5546 {
5547 calculate_dominance_info (CDI_DOMINATORS);
5548 fix_loop_structure (NULL);
5549 }
5550 update_ssa (TODO_update_ssa);
5551
5552 /* After partial cloning we need to rescale frequencies, so they are
5553 within proper range in the cloned function. */
5554 if (new_entry)
5555 {
5556 struct cgraph_edge *e;
5557 rebuild_frequencies ();
5558
5559 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5560 for (e = new_version_node->callees; e; e = e->next_callee)
5561 {
5562 basic_block bb = gimple_bb (e->call_stmt);
5563 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5564 bb);
5565 e->count = bb->count;
5566 }
5567 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5568 {
5569 basic_block bb = gimple_bb (e->call_stmt);
5570 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5571 bb);
5572 e->count = bb->count;
5573 }
5574 }
5575
5576 free_dominance_info (CDI_DOMINATORS);
5577 free_dominance_info (CDI_POST_DOMINATORS);
5578
5579 gcc_assert (!id.debug_stmts.exists ());
5580 pop_cfun ();
5581 return;
5582 }
5583
5584 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5585 the callee and return the inlined body on success. */
5586
5587 tree
5588 maybe_inline_call_in_expr (tree exp)
5589 {
5590 tree fn = get_callee_fndecl (exp);
5591
5592 /* We can only try to inline "const" functions. */
5593 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5594 {
5595 call_expr_arg_iterator iter;
5596 copy_body_data id;
5597 tree param, arg, t;
5598 hash_map<tree, tree> decl_map;
5599
5600 /* Remap the parameters. */
5601 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5602 param;
5603 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5604 decl_map.put (param, arg);
5605
5606 memset (&id, 0, sizeof (id));
5607 id.src_fn = fn;
5608 id.dst_fn = current_function_decl;
5609 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5610 id.decl_map = &decl_map;
5611
5612 id.copy_decl = copy_decl_no_change;
5613 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5614 id.transform_new_cfg = false;
5615 id.transform_return_to_modify = true;
5616 id.transform_parameter = true;
5617 id.transform_lang_insert_block = NULL;
5618
5619 /* Make sure not to unshare trees behind the front-end's back
5620 since front-end specific mechanisms may rely on sharing. */
5621 id.regimplify = false;
5622 id.do_not_unshare = true;
5623
5624 /* We're not inside any EH region. */
5625 id.eh_lp_nr = 0;
5626
5627 t = copy_tree_body (&id);
5628
5629 /* We can only return something suitable for use in a GENERIC
5630 expression tree. */
5631 if (TREE_CODE (t) == MODIFY_EXPR)
5632 return TREE_OPERAND (t, 1);
5633 }
5634
5635 return NULL_TREE;
5636 }
5637
5638 /* Duplicate a type, fields and all. */
5639
5640 tree
5641 build_duplicate_type (tree type)
5642 {
5643 struct copy_body_data id;
5644
5645 memset (&id, 0, sizeof (id));
5646 id.src_fn = current_function_decl;
5647 id.dst_fn = current_function_decl;
5648 id.src_cfun = cfun;
5649 id.decl_map = new hash_map<tree, tree>;
5650 id.debug_map = NULL;
5651 id.copy_decl = copy_decl_no_change;
5652
5653 type = remap_type_1 (type, &id);
5654
5655 delete id.decl_map;
5656 if (id.debug_map)
5657 delete id.debug_map;
5658
5659 TYPE_CANONICAL (type) = type;
5660
5661 return type;
5662 }