[ARM] Add ACLE 2.0 predefined marco __ARM_FEATURE_IDIV
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "calls.h"
29 #include "tree-inline.h"
30 #include "flags.h"
31 #include "params.h"
32 #include "input.h"
33 #include "insn-config.h"
34 #include "hashtab.h"
35 #include "langhooks.h"
36 #include "basic-block.h"
37 #include "tree-iterator.h"
38 #include "intl.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimple-expr.h"
44 #include "is-a.h"
45 #include "gimple.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "gimple-ssa.h"
51 #include "tree-cfg.h"
52 #include "tree-phinodes.h"
53 #include "ssa-iterators.h"
54 #include "stringpool.h"
55 #include "tree-ssanames.h"
56 #include "tree-into-ssa.h"
57 #include "expr.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "hash-set.h"
61 #include "vec.h"
62 #include "machmode.h"
63 #include "hard-reg-set.h"
64 #include "function.h"
65 #include "tree-pretty-print.h"
66 #include "except.h"
67 #include "debug.h"
68 #include "ipa-prop.h"
69 #include "value-prof.h"
70 #include "tree-pass.h"
71 #include "target.h"
72 #include "cfgloop.h"
73 #include "builtins.h"
74
75 #include "rtl.h" /* FIXME: For asm_str_count. */
76
77 /* I'm not real happy about this, but we need to handle gimple and
78 non-gimple trees. */
79
80 /* Inlining, Cloning, Versioning, Parallelization
81
82 Inlining: a function body is duplicated, but the PARM_DECLs are
83 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
84 MODIFY_EXPRs that store to a dedicated returned-value variable.
85 The duplicated eh_region info of the copy will later be appended
86 to the info for the caller; the eh_region info in copied throwing
87 statements and RESX statements are adjusted accordingly.
88
89 Cloning: (only in C++) We have one body for a con/de/structor, and
90 multiple function decls, each with a unique parameter list.
91 Duplicate the body, using the given splay tree; some parameters
92 will become constants (like 0 or 1).
93
94 Versioning: a function body is duplicated and the result is a new
95 function rather than into blocks of an existing function as with
96 inlining. Some parameters will become constants.
97
98 Parallelization: a region of a function is duplicated resulting in
99 a new function. Variables may be replaced with complex expressions
100 to enable shared variable semantics.
101
102 All of these will simultaneously lookup any callgraph edges. If
103 we're going to inline the duplicated function body, and the given
104 function has some cloned callgraph nodes (one for each place this
105 function will be inlined) those callgraph edges will be duplicated.
106 If we're cloning the body, those callgraph edges will be
107 updated to point into the new body. (Note that the original
108 callgraph node and edge list will not be altered.)
109
110 See the CALL_EXPR handling case in copy_tree_body_r (). */
111
112 /* To Do:
113
114 o In order to make inlining-on-trees work, we pessimized
115 function-local static constants. In particular, they are now
116 always output, even when not addressed. Fix this by treating
117 function-local static constants just like global static
118 constants; the back-end already knows not to output them if they
119 are not needed.
120
121 o Provide heuristics to clamp inlining of recursive template
122 calls? */
123
124
125 /* Weights that estimate_num_insns uses to estimate the size of the
126 produced code. */
127
128 eni_weights eni_size_weights;
129
130 /* Weights that estimate_num_insns uses to estimate the time necessary
131 to execute the produced code. */
132
133 eni_weights eni_time_weights;
134
135 /* Prototypes. */
136
137 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
138 static void remap_block (tree *, copy_body_data *);
139 static void copy_bind_expr (tree *, int *, copy_body_data *);
140 static void declare_inline_vars (tree, tree);
141 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
142 static void prepend_lexical_block (tree current_block, tree new_block);
143 static tree copy_decl_to_var (tree, copy_body_data *);
144 static tree copy_result_decl_to_var (tree, copy_body_data *);
145 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
146 static gimple remap_gimple_stmt (gimple, copy_body_data *);
147 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
148
149 /* Insert a tree->tree mapping for ID. Despite the name suggests
150 that the trees should be variables, it is used for more than that. */
151
152 void
153 insert_decl_map (copy_body_data *id, tree key, tree value)
154 {
155 id->decl_map->put (key, value);
156
157 /* Always insert an identity map as well. If we see this same new
158 node again, we won't want to duplicate it a second time. */
159 if (key != value)
160 id->decl_map->put (value, value);
161 }
162
163 /* Insert a tree->tree mapping for ID. This is only used for
164 variables. */
165
166 static void
167 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
168 {
169 if (!gimple_in_ssa_p (id->src_cfun))
170 return;
171
172 if (!MAY_HAVE_DEBUG_STMTS)
173 return;
174
175 if (!target_for_debug_bind (key))
176 return;
177
178 gcc_assert (TREE_CODE (key) == PARM_DECL);
179 gcc_assert (TREE_CODE (value) == VAR_DECL);
180
181 if (!id->debug_map)
182 id->debug_map = new hash_map<tree, tree>;
183
184 id->debug_map->put (key, value);
185 }
186
187 /* If nonzero, we're remapping the contents of inlined debug
188 statements. If negative, an error has occurred, such as a
189 reference to a variable that isn't available in the inlined
190 context. */
191 static int processing_debug_stmt = 0;
192
193 /* Construct new SSA name for old NAME. ID is the inline context. */
194
195 static tree
196 remap_ssa_name (tree name, copy_body_data *id)
197 {
198 tree new_tree, var;
199 tree *n;
200
201 gcc_assert (TREE_CODE (name) == SSA_NAME);
202
203 n = id->decl_map->get (name);
204 if (n)
205 return unshare_expr (*n);
206
207 if (processing_debug_stmt)
208 {
209 if (SSA_NAME_IS_DEFAULT_DEF (name)
210 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
211 && id->entry_bb == NULL
212 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
213 {
214 tree vexpr = make_node (DEBUG_EXPR_DECL);
215 gimple def_temp;
216 gimple_stmt_iterator gsi;
217 tree val = SSA_NAME_VAR (name);
218
219 n = id->decl_map->get (val);
220 if (n != NULL)
221 val = *n;
222 if (TREE_CODE (val) != PARM_DECL)
223 {
224 processing_debug_stmt = -1;
225 return name;
226 }
227 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
228 DECL_ARTIFICIAL (vexpr) = 1;
229 TREE_TYPE (vexpr) = TREE_TYPE (name);
230 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
231 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
232 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
233 return vexpr;
234 }
235
236 processing_debug_stmt = -1;
237 return name;
238 }
239
240 /* Remap anonymous SSA names or SSA names of anonymous decls. */
241 var = SSA_NAME_VAR (name);
242 if (!var
243 || (!SSA_NAME_IS_DEFAULT_DEF (name)
244 && TREE_CODE (var) == VAR_DECL
245 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
246 && DECL_ARTIFICIAL (var)
247 && DECL_IGNORED_P (var)
248 && !DECL_NAME (var)))
249 {
250 struct ptr_info_def *pi;
251 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
252 if (!var && SSA_NAME_IDENTIFIER (name))
253 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
254 insert_decl_map (id, name, new_tree);
255 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
256 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
257 /* At least IPA points-to info can be directly transferred. */
258 if (id->src_cfun->gimple_df
259 && id->src_cfun->gimple_df->ipa_pta
260 && (pi = SSA_NAME_PTR_INFO (name))
261 && !pi->pt.anything)
262 {
263 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
264 new_pi->pt = pi->pt;
265 }
266 return new_tree;
267 }
268
269 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
270 in copy_bb. */
271 new_tree = remap_decl (var, id);
272
273 /* We might've substituted constant or another SSA_NAME for
274 the variable.
275
276 Replace the SSA name representing RESULT_DECL by variable during
277 inlining: this saves us from need to introduce PHI node in a case
278 return value is just partly initialized. */
279 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
280 && (!SSA_NAME_VAR (name)
281 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
282 || !id->transform_return_to_modify))
283 {
284 struct ptr_info_def *pi;
285 new_tree = make_ssa_name (new_tree, NULL);
286 insert_decl_map (id, name, new_tree);
287 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
288 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
289 /* At least IPA points-to info can be directly transferred. */
290 if (id->src_cfun->gimple_df
291 && id->src_cfun->gimple_df->ipa_pta
292 && (pi = SSA_NAME_PTR_INFO (name))
293 && !pi->pt.anything)
294 {
295 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
296 new_pi->pt = pi->pt;
297 }
298 if (SSA_NAME_IS_DEFAULT_DEF (name))
299 {
300 /* By inlining function having uninitialized variable, we might
301 extend the lifetime (variable might get reused). This cause
302 ICE in the case we end up extending lifetime of SSA name across
303 abnormal edge, but also increase register pressure.
304
305 We simply initialize all uninitialized vars by 0 except
306 for case we are inlining to very first BB. We can avoid
307 this for all BBs that are not inside strongly connected
308 regions of the CFG, but this is expensive to test. */
309 if (id->entry_bb
310 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
311 && (!SSA_NAME_VAR (name)
312 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
313 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
314 0)->dest
315 || EDGE_COUNT (id->entry_bb->preds) != 1))
316 {
317 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
318 gimple init_stmt;
319 tree zero = build_zero_cst (TREE_TYPE (new_tree));
320
321 init_stmt = gimple_build_assign (new_tree, zero);
322 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
323 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
324 }
325 else
326 {
327 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
328 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
329 }
330 }
331 }
332 else
333 insert_decl_map (id, name, new_tree);
334 return new_tree;
335 }
336
337 /* Remap DECL during the copying of the BLOCK tree for the function. */
338
339 tree
340 remap_decl (tree decl, copy_body_data *id)
341 {
342 tree *n;
343
344 /* We only remap local variables in the current function. */
345
346 /* See if we have remapped this declaration. */
347
348 n = id->decl_map->get (decl);
349
350 if (!n && processing_debug_stmt)
351 {
352 processing_debug_stmt = -1;
353 return decl;
354 }
355
356 /* If we didn't already have an equivalent for this declaration,
357 create one now. */
358 if (!n)
359 {
360 /* Make a copy of the variable or label. */
361 tree t = id->copy_decl (decl, id);
362
363 /* Remember it, so that if we encounter this local entity again
364 we can reuse this copy. Do this early because remap_type may
365 need this decl for TYPE_STUB_DECL. */
366 insert_decl_map (id, decl, t);
367
368 if (!DECL_P (t))
369 return t;
370
371 /* Remap types, if necessary. */
372 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
373 if (TREE_CODE (t) == TYPE_DECL)
374 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
375
376 /* Remap sizes as necessary. */
377 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
378 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
379
380 /* If fields, do likewise for offset and qualifier. */
381 if (TREE_CODE (t) == FIELD_DECL)
382 {
383 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
384 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
385 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
386 }
387
388 return t;
389 }
390
391 if (id->do_not_unshare)
392 return *n;
393 else
394 return unshare_expr (*n);
395 }
396
397 static tree
398 remap_type_1 (tree type, copy_body_data *id)
399 {
400 tree new_tree, t;
401
402 /* We do need a copy. build and register it now. If this is a pointer or
403 reference type, remap the designated type and make a new pointer or
404 reference type. */
405 if (TREE_CODE (type) == POINTER_TYPE)
406 {
407 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
408 TYPE_MODE (type),
409 TYPE_REF_CAN_ALIAS_ALL (type));
410 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
411 new_tree = build_type_attribute_qual_variant (new_tree,
412 TYPE_ATTRIBUTES (type),
413 TYPE_QUALS (type));
414 insert_decl_map (id, type, new_tree);
415 return new_tree;
416 }
417 else if (TREE_CODE (type) == REFERENCE_TYPE)
418 {
419 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
420 TYPE_MODE (type),
421 TYPE_REF_CAN_ALIAS_ALL (type));
422 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
423 new_tree = build_type_attribute_qual_variant (new_tree,
424 TYPE_ATTRIBUTES (type),
425 TYPE_QUALS (type));
426 insert_decl_map (id, type, new_tree);
427 return new_tree;
428 }
429 else
430 new_tree = copy_node (type);
431
432 insert_decl_map (id, type, new_tree);
433
434 /* This is a new type, not a copy of an old type. Need to reassociate
435 variants. We can handle everything except the main variant lazily. */
436 t = TYPE_MAIN_VARIANT (type);
437 if (type != t)
438 {
439 t = remap_type (t, id);
440 TYPE_MAIN_VARIANT (new_tree) = t;
441 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
442 TYPE_NEXT_VARIANT (t) = new_tree;
443 }
444 else
445 {
446 TYPE_MAIN_VARIANT (new_tree) = new_tree;
447 TYPE_NEXT_VARIANT (new_tree) = NULL;
448 }
449
450 if (TYPE_STUB_DECL (type))
451 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
452
453 /* Lazily create pointer and reference types. */
454 TYPE_POINTER_TO (new_tree) = NULL;
455 TYPE_REFERENCE_TO (new_tree) = NULL;
456
457 /* Copy all types that may contain references to local variables; be sure to
458 preserve sharing in between type and its main variant when possible. */
459 switch (TREE_CODE (new_tree))
460 {
461 case INTEGER_TYPE:
462 case REAL_TYPE:
463 case FIXED_POINT_TYPE:
464 case ENUMERAL_TYPE:
465 case BOOLEAN_TYPE:
466 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
467 {
468 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
469 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
470
471 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
472 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
473 }
474 else
475 {
476 t = TYPE_MIN_VALUE (new_tree);
477 if (t && TREE_CODE (t) != INTEGER_CST)
478 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
479
480 t = TYPE_MAX_VALUE (new_tree);
481 if (t && TREE_CODE (t) != INTEGER_CST)
482 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
483 }
484 return new_tree;
485
486 case FUNCTION_TYPE:
487 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
488 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
489 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
490 else
491 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
492 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
493 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
494 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
495 else
496 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
497 return new_tree;
498
499 case ARRAY_TYPE:
500 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
501 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
502 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
503 else
504 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
505
506 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
507 {
508 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
509 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
510 }
511 else
512 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
513 break;
514
515 case RECORD_TYPE:
516 case UNION_TYPE:
517 case QUAL_UNION_TYPE:
518 if (TYPE_MAIN_VARIANT (type) != type
519 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
520 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
521 else
522 {
523 tree f, nf = NULL;
524
525 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
526 {
527 t = remap_decl (f, id);
528 DECL_CONTEXT (t) = new_tree;
529 DECL_CHAIN (t) = nf;
530 nf = t;
531 }
532 TYPE_FIELDS (new_tree) = nreverse (nf);
533 }
534 break;
535
536 case OFFSET_TYPE:
537 default:
538 /* Shouldn't have been thought variable sized. */
539 gcc_unreachable ();
540 }
541
542 /* All variants of type share the same size, so use the already remaped data. */
543 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
544 {
545 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
546 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
547
548 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
549 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
550 }
551 else
552 {
553 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
554 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
555 }
556
557 return new_tree;
558 }
559
560 tree
561 remap_type (tree type, copy_body_data *id)
562 {
563 tree *node;
564 tree tmp;
565
566 if (type == NULL)
567 return type;
568
569 /* See if we have remapped this type. */
570 node = id->decl_map->get (type);
571 if (node)
572 return *node;
573
574 /* The type only needs remapping if it's variably modified. */
575 if (! variably_modified_type_p (type, id->src_fn))
576 {
577 insert_decl_map (id, type, type);
578 return type;
579 }
580
581 id->remapping_type_depth++;
582 tmp = remap_type_1 (type, id);
583 id->remapping_type_depth--;
584
585 return tmp;
586 }
587
588 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
589
590 static bool
591 can_be_nonlocal (tree decl, copy_body_data *id)
592 {
593 /* We can not duplicate function decls. */
594 if (TREE_CODE (decl) == FUNCTION_DECL)
595 return true;
596
597 /* Local static vars must be non-local or we get multiple declaration
598 problems. */
599 if (TREE_CODE (decl) == VAR_DECL
600 && !auto_var_in_fn_p (decl, id->src_fn))
601 return true;
602
603 return false;
604 }
605
606 static tree
607 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
608 copy_body_data *id)
609 {
610 tree old_var;
611 tree new_decls = NULL_TREE;
612
613 /* Remap its variables. */
614 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
615 {
616 tree new_var;
617
618 if (can_be_nonlocal (old_var, id))
619 {
620 /* We need to add this variable to the local decls as otherwise
621 nothing else will do so. */
622 if (TREE_CODE (old_var) == VAR_DECL
623 && ! DECL_EXTERNAL (old_var))
624 add_local_decl (cfun, old_var);
625 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
626 && !DECL_IGNORED_P (old_var)
627 && nonlocalized_list)
628 vec_safe_push (*nonlocalized_list, old_var);
629 continue;
630 }
631
632 /* Remap the variable. */
633 new_var = remap_decl (old_var, id);
634
635 /* If we didn't remap this variable, we can't mess with its
636 TREE_CHAIN. If we remapped this variable to the return slot, it's
637 already declared somewhere else, so don't declare it here. */
638
639 if (new_var == id->retvar)
640 ;
641 else if (!new_var)
642 {
643 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
644 && !DECL_IGNORED_P (old_var)
645 && nonlocalized_list)
646 vec_safe_push (*nonlocalized_list, old_var);
647 }
648 else
649 {
650 gcc_assert (DECL_P (new_var));
651 DECL_CHAIN (new_var) = new_decls;
652 new_decls = new_var;
653
654 /* Also copy value-expressions. */
655 if (TREE_CODE (new_var) == VAR_DECL
656 && DECL_HAS_VALUE_EXPR_P (new_var))
657 {
658 tree tem = DECL_VALUE_EXPR (new_var);
659 bool old_regimplify = id->regimplify;
660 id->remapping_type_depth++;
661 walk_tree (&tem, copy_tree_body_r, id, NULL);
662 id->remapping_type_depth--;
663 id->regimplify = old_regimplify;
664 SET_DECL_VALUE_EXPR (new_var, tem);
665 }
666 }
667 }
668
669 return nreverse (new_decls);
670 }
671
672 /* Copy the BLOCK to contain remapped versions of the variables
673 therein. And hook the new block into the block-tree. */
674
675 static void
676 remap_block (tree *block, copy_body_data *id)
677 {
678 tree old_block;
679 tree new_block;
680
681 /* Make the new block. */
682 old_block = *block;
683 new_block = make_node (BLOCK);
684 TREE_USED (new_block) = TREE_USED (old_block);
685 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
686 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
687 BLOCK_NONLOCALIZED_VARS (new_block)
688 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
689 *block = new_block;
690
691 /* Remap its variables. */
692 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
693 &BLOCK_NONLOCALIZED_VARS (new_block),
694 id);
695
696 if (id->transform_lang_insert_block)
697 id->transform_lang_insert_block (new_block);
698
699 /* Remember the remapped block. */
700 insert_decl_map (id, old_block, new_block);
701 }
702
703 /* Copy the whole block tree and root it in id->block. */
704 static tree
705 remap_blocks (tree block, copy_body_data *id)
706 {
707 tree t;
708 tree new_tree = block;
709
710 if (!block)
711 return NULL;
712
713 remap_block (&new_tree, id);
714 gcc_assert (new_tree != block);
715 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
716 prepend_lexical_block (new_tree, remap_blocks (t, id));
717 /* Blocks are in arbitrary order, but make things slightly prettier and do
718 not swap order when producing a copy. */
719 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
720 return new_tree;
721 }
722
723 /* Remap the block tree rooted at BLOCK to nothing. */
724 static void
725 remap_blocks_to_null (tree block, copy_body_data *id)
726 {
727 tree t;
728 insert_decl_map (id, block, NULL_TREE);
729 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
730 remap_blocks_to_null (t, id);
731 }
732
733 static void
734 copy_statement_list (tree *tp)
735 {
736 tree_stmt_iterator oi, ni;
737 tree new_tree;
738
739 new_tree = alloc_stmt_list ();
740 ni = tsi_start (new_tree);
741 oi = tsi_start (*tp);
742 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
743 *tp = new_tree;
744
745 for (; !tsi_end_p (oi); tsi_next (&oi))
746 {
747 tree stmt = tsi_stmt (oi);
748 if (TREE_CODE (stmt) == STATEMENT_LIST)
749 /* This copy is not redundant; tsi_link_after will smash this
750 STATEMENT_LIST into the end of the one we're building, and we
751 don't want to do that with the original. */
752 copy_statement_list (&stmt);
753 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
754 }
755 }
756
757 static void
758 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
759 {
760 tree block = BIND_EXPR_BLOCK (*tp);
761 /* Copy (and replace) the statement. */
762 copy_tree_r (tp, walk_subtrees, NULL);
763 if (block)
764 {
765 remap_block (&block, id);
766 BIND_EXPR_BLOCK (*tp) = block;
767 }
768
769 if (BIND_EXPR_VARS (*tp))
770 /* This will remap a lot of the same decls again, but this should be
771 harmless. */
772 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
773 }
774
775
776 /* Create a new gimple_seq by remapping all the statements in BODY
777 using the inlining information in ID. */
778
779 static gimple_seq
780 remap_gimple_seq (gimple_seq body, copy_body_data *id)
781 {
782 gimple_stmt_iterator si;
783 gimple_seq new_body = NULL;
784
785 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
786 {
787 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
788 gimple_seq_add_stmt (&new_body, new_stmt);
789 }
790
791 return new_body;
792 }
793
794
795 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
796 block using the mapping information in ID. */
797
798 static gimple
799 copy_gimple_bind (gimple stmt, copy_body_data *id)
800 {
801 gimple new_bind;
802 tree new_block, new_vars;
803 gimple_seq body, new_body;
804
805 /* Copy the statement. Note that we purposely don't use copy_stmt
806 here because we need to remap statements as we copy. */
807 body = gimple_bind_body (stmt);
808 new_body = remap_gimple_seq (body, id);
809
810 new_block = gimple_bind_block (stmt);
811 if (new_block)
812 remap_block (&new_block, id);
813
814 /* This will remap a lot of the same decls again, but this should be
815 harmless. */
816 new_vars = gimple_bind_vars (stmt);
817 if (new_vars)
818 new_vars = remap_decls (new_vars, NULL, id);
819
820 new_bind = gimple_build_bind (new_vars, new_body, new_block);
821
822 return new_bind;
823 }
824
825 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
826
827 static bool
828 is_parm (tree decl)
829 {
830 if (TREE_CODE (decl) == SSA_NAME)
831 {
832 decl = SSA_NAME_VAR (decl);
833 if (!decl)
834 return false;
835 }
836
837 return (TREE_CODE (decl) == PARM_DECL);
838 }
839
840 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
841 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
842 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
843 recursing into the children nodes of *TP. */
844
845 static tree
846 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
847 {
848 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
849 copy_body_data *id = (copy_body_data *) wi_p->info;
850 tree fn = id->src_fn;
851
852 if (TREE_CODE (*tp) == SSA_NAME)
853 {
854 *tp = remap_ssa_name (*tp, id);
855 *walk_subtrees = 0;
856 return NULL;
857 }
858 else if (auto_var_in_fn_p (*tp, fn))
859 {
860 /* Local variables and labels need to be replaced by equivalent
861 variables. We don't want to copy static variables; there's
862 only one of those, no matter how many times we inline the
863 containing function. Similarly for globals from an outer
864 function. */
865 tree new_decl;
866
867 /* Remap the declaration. */
868 new_decl = remap_decl (*tp, id);
869 gcc_assert (new_decl);
870 /* Replace this variable with the copy. */
871 STRIP_TYPE_NOPS (new_decl);
872 /* ??? The C++ frontend uses void * pointer zero to initialize
873 any other type. This confuses the middle-end type verification.
874 As cloned bodies do not go through gimplification again the fixup
875 there doesn't trigger. */
876 if (TREE_CODE (new_decl) == INTEGER_CST
877 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
878 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
879 *tp = new_decl;
880 *walk_subtrees = 0;
881 }
882 else if (TREE_CODE (*tp) == STATEMENT_LIST)
883 gcc_unreachable ();
884 else if (TREE_CODE (*tp) == SAVE_EXPR)
885 gcc_unreachable ();
886 else if (TREE_CODE (*tp) == LABEL_DECL
887 && (!DECL_CONTEXT (*tp)
888 || decl_function_context (*tp) == id->src_fn))
889 /* These may need to be remapped for EH handling. */
890 *tp = remap_decl (*tp, id);
891 else if (TREE_CODE (*tp) == FIELD_DECL)
892 {
893 /* If the enclosing record type is variably_modified_type_p, the field
894 has already been remapped. Otherwise, it need not be. */
895 tree *n = id->decl_map->get (*tp);
896 if (n)
897 *tp = *n;
898 *walk_subtrees = 0;
899 }
900 else if (TYPE_P (*tp))
901 /* Types may need remapping as well. */
902 *tp = remap_type (*tp, id);
903 else if (CONSTANT_CLASS_P (*tp))
904 {
905 /* If this is a constant, we have to copy the node iff the type
906 will be remapped. copy_tree_r will not copy a constant. */
907 tree new_type = remap_type (TREE_TYPE (*tp), id);
908
909 if (new_type == TREE_TYPE (*tp))
910 *walk_subtrees = 0;
911
912 else if (TREE_CODE (*tp) == INTEGER_CST)
913 *tp = wide_int_to_tree (new_type, *tp);
914 else
915 {
916 *tp = copy_node (*tp);
917 TREE_TYPE (*tp) = new_type;
918 }
919 }
920 else
921 {
922 /* Otherwise, just copy the node. Note that copy_tree_r already
923 knows not to copy VAR_DECLs, etc., so this is safe. */
924
925 if (TREE_CODE (*tp) == MEM_REF)
926 {
927 /* We need to re-canonicalize MEM_REFs from inline substitutions
928 that can happen when a pointer argument is an ADDR_EXPR.
929 Recurse here manually to allow that. */
930 tree ptr = TREE_OPERAND (*tp, 0);
931 tree type = remap_type (TREE_TYPE (*tp), id);
932 tree old = *tp;
933 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
934 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
935 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
936 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
937 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
938 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
939 remapped a parameter as the property might be valid only
940 for the parameter itself. */
941 if (TREE_THIS_NOTRAP (old)
942 && (!is_parm (TREE_OPERAND (old, 0))
943 || (!id->transform_parameter && is_parm (ptr))))
944 TREE_THIS_NOTRAP (*tp) = 1;
945 *walk_subtrees = 0;
946 return NULL;
947 }
948
949 /* Here is the "usual case". Copy this tree node, and then
950 tweak some special cases. */
951 copy_tree_r (tp, walk_subtrees, NULL);
952
953 if (TREE_CODE (*tp) != OMP_CLAUSE)
954 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
955
956 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
957 {
958 /* The copied TARGET_EXPR has never been expanded, even if the
959 original node was expanded already. */
960 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
961 TREE_OPERAND (*tp, 3) = NULL_TREE;
962 }
963 else if (TREE_CODE (*tp) == ADDR_EXPR)
964 {
965 /* Variable substitution need not be simple. In particular,
966 the MEM_REF substitution above. Make sure that
967 TREE_CONSTANT and friends are up-to-date. */
968 int invariant = is_gimple_min_invariant (*tp);
969 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
970 recompute_tree_invariant_for_addr_expr (*tp);
971
972 /* If this used to be invariant, but is not any longer,
973 then regimplification is probably needed. */
974 if (invariant && !is_gimple_min_invariant (*tp))
975 id->regimplify = true;
976
977 *walk_subtrees = 0;
978 }
979 }
980
981 /* Update the TREE_BLOCK for the cloned expr. */
982 if (EXPR_P (*tp))
983 {
984 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
985 tree old_block = TREE_BLOCK (*tp);
986 if (old_block)
987 {
988 tree *n;
989 n = id->decl_map->get (TREE_BLOCK (*tp));
990 if (n)
991 new_block = *n;
992 }
993 TREE_SET_BLOCK (*tp, new_block);
994 }
995
996 /* Keep iterating. */
997 return NULL_TREE;
998 }
999
1000
1001 /* Called from copy_body_id via walk_tree. DATA is really a
1002 `copy_body_data *'. */
1003
1004 tree
1005 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1006 {
1007 copy_body_data *id = (copy_body_data *) data;
1008 tree fn = id->src_fn;
1009 tree new_block;
1010
1011 /* Begin by recognizing trees that we'll completely rewrite for the
1012 inlining context. Our output for these trees is completely
1013 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1014 into an edge). Further down, we'll handle trees that get
1015 duplicated and/or tweaked. */
1016
1017 /* When requested, RETURN_EXPRs should be transformed to just the
1018 contained MODIFY_EXPR. The branch semantics of the return will
1019 be handled elsewhere by manipulating the CFG rather than a statement. */
1020 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1021 {
1022 tree assignment = TREE_OPERAND (*tp, 0);
1023
1024 /* If we're returning something, just turn that into an
1025 assignment into the equivalent of the original RESULT_DECL.
1026 If the "assignment" is just the result decl, the result
1027 decl has already been set (e.g. a recent "foo (&result_decl,
1028 ...)"); just toss the entire RETURN_EXPR. */
1029 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1030 {
1031 /* Replace the RETURN_EXPR with (a copy of) the
1032 MODIFY_EXPR hanging underneath. */
1033 *tp = copy_node (assignment);
1034 }
1035 else /* Else the RETURN_EXPR returns no value. */
1036 {
1037 *tp = NULL;
1038 return (tree) (void *)1;
1039 }
1040 }
1041 else if (TREE_CODE (*tp) == SSA_NAME)
1042 {
1043 *tp = remap_ssa_name (*tp, id);
1044 *walk_subtrees = 0;
1045 return NULL;
1046 }
1047
1048 /* Local variables and labels need to be replaced by equivalent
1049 variables. We don't want to copy static variables; there's only
1050 one of those, no matter how many times we inline the containing
1051 function. Similarly for globals from an outer function. */
1052 else if (auto_var_in_fn_p (*tp, fn))
1053 {
1054 tree new_decl;
1055
1056 /* Remap the declaration. */
1057 new_decl = remap_decl (*tp, id);
1058 gcc_assert (new_decl);
1059 /* Replace this variable with the copy. */
1060 STRIP_TYPE_NOPS (new_decl);
1061 *tp = new_decl;
1062 *walk_subtrees = 0;
1063 }
1064 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1065 copy_statement_list (tp);
1066 else if (TREE_CODE (*tp) == SAVE_EXPR
1067 || TREE_CODE (*tp) == TARGET_EXPR)
1068 remap_save_expr (tp, id->decl_map, walk_subtrees);
1069 else if (TREE_CODE (*tp) == LABEL_DECL
1070 && (! DECL_CONTEXT (*tp)
1071 || decl_function_context (*tp) == id->src_fn))
1072 /* These may need to be remapped for EH handling. */
1073 *tp = remap_decl (*tp, id);
1074 else if (TREE_CODE (*tp) == BIND_EXPR)
1075 copy_bind_expr (tp, walk_subtrees, id);
1076 /* Types may need remapping as well. */
1077 else if (TYPE_P (*tp))
1078 *tp = remap_type (*tp, id);
1079
1080 /* If this is a constant, we have to copy the node iff the type will be
1081 remapped. copy_tree_r will not copy a constant. */
1082 else if (CONSTANT_CLASS_P (*tp))
1083 {
1084 tree new_type = remap_type (TREE_TYPE (*tp), id);
1085
1086 if (new_type == TREE_TYPE (*tp))
1087 *walk_subtrees = 0;
1088
1089 else if (TREE_CODE (*tp) == INTEGER_CST)
1090 *tp = wide_int_to_tree (new_type, *tp);
1091 else
1092 {
1093 *tp = copy_node (*tp);
1094 TREE_TYPE (*tp) = new_type;
1095 }
1096 }
1097
1098 /* Otherwise, just copy the node. Note that copy_tree_r already
1099 knows not to copy VAR_DECLs, etc., so this is safe. */
1100 else
1101 {
1102 /* Here we handle trees that are not completely rewritten.
1103 First we detect some inlining-induced bogosities for
1104 discarding. */
1105 if (TREE_CODE (*tp) == MODIFY_EXPR
1106 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1107 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1108 {
1109 /* Some assignments VAR = VAR; don't generate any rtl code
1110 and thus don't count as variable modification. Avoid
1111 keeping bogosities like 0 = 0. */
1112 tree decl = TREE_OPERAND (*tp, 0), value;
1113 tree *n;
1114
1115 n = id->decl_map->get (decl);
1116 if (n)
1117 {
1118 value = *n;
1119 STRIP_TYPE_NOPS (value);
1120 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1121 {
1122 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1123 return copy_tree_body_r (tp, walk_subtrees, data);
1124 }
1125 }
1126 }
1127 else if (TREE_CODE (*tp) == INDIRECT_REF)
1128 {
1129 /* Get rid of *& from inline substitutions that can happen when a
1130 pointer argument is an ADDR_EXPR. */
1131 tree decl = TREE_OPERAND (*tp, 0);
1132 tree *n = id->decl_map->get (decl);
1133 if (n)
1134 {
1135 /* If we happen to get an ADDR_EXPR in n->value, strip
1136 it manually here as we'll eventually get ADDR_EXPRs
1137 which lie about their types pointed to. In this case
1138 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1139 but we absolutely rely on that. As fold_indirect_ref
1140 does other useful transformations, try that first, though. */
1141 tree type = TREE_TYPE (*tp);
1142 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1143 tree old = *tp;
1144 *tp = gimple_fold_indirect_ref (ptr);
1145 if (! *tp)
1146 {
1147 if (TREE_CODE (ptr) == ADDR_EXPR)
1148 {
1149 *tp
1150 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1151 /* ??? We should either assert here or build
1152 a VIEW_CONVERT_EXPR instead of blindly leaking
1153 incompatible types to our IL. */
1154 if (! *tp)
1155 *tp = TREE_OPERAND (ptr, 0);
1156 }
1157 else
1158 {
1159 *tp = build1 (INDIRECT_REF, type, ptr);
1160 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1161 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1162 TREE_READONLY (*tp) = TREE_READONLY (old);
1163 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1164 have remapped a parameter as the property might be
1165 valid only for the parameter itself. */
1166 if (TREE_THIS_NOTRAP (old)
1167 && (!is_parm (TREE_OPERAND (old, 0))
1168 || (!id->transform_parameter && is_parm (ptr))))
1169 TREE_THIS_NOTRAP (*tp) = 1;
1170 }
1171 }
1172 *walk_subtrees = 0;
1173 return NULL;
1174 }
1175 }
1176 else if (TREE_CODE (*tp) == MEM_REF)
1177 {
1178 /* We need to re-canonicalize MEM_REFs from inline substitutions
1179 that can happen when a pointer argument is an ADDR_EXPR.
1180 Recurse here manually to allow that. */
1181 tree ptr = TREE_OPERAND (*tp, 0);
1182 tree type = remap_type (TREE_TYPE (*tp), id);
1183 tree old = *tp;
1184 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1185 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1186 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1187 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1188 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1189 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1190 remapped a parameter as the property might be valid only
1191 for the parameter itself. */
1192 if (TREE_THIS_NOTRAP (old)
1193 && (!is_parm (TREE_OPERAND (old, 0))
1194 || (!id->transform_parameter && is_parm (ptr))))
1195 TREE_THIS_NOTRAP (*tp) = 1;
1196 *walk_subtrees = 0;
1197 return NULL;
1198 }
1199
1200 /* Here is the "usual case". Copy this tree node, and then
1201 tweak some special cases. */
1202 copy_tree_r (tp, walk_subtrees, NULL);
1203
1204 /* If EXPR has block defined, map it to newly constructed block.
1205 When inlining we want EXPRs without block appear in the block
1206 of function call if we are not remapping a type. */
1207 if (EXPR_P (*tp))
1208 {
1209 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1210 if (TREE_BLOCK (*tp))
1211 {
1212 tree *n;
1213 n = id->decl_map->get (TREE_BLOCK (*tp));
1214 if (n)
1215 new_block = *n;
1216 }
1217 TREE_SET_BLOCK (*tp, new_block);
1218 }
1219
1220 if (TREE_CODE (*tp) != OMP_CLAUSE)
1221 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1222
1223 /* The copied TARGET_EXPR has never been expanded, even if the
1224 original node was expanded already. */
1225 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1226 {
1227 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1228 TREE_OPERAND (*tp, 3) = NULL_TREE;
1229 }
1230
1231 /* Variable substitution need not be simple. In particular, the
1232 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1233 and friends are up-to-date. */
1234 else if (TREE_CODE (*tp) == ADDR_EXPR)
1235 {
1236 int invariant = is_gimple_min_invariant (*tp);
1237 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1238
1239 /* Handle the case where we substituted an INDIRECT_REF
1240 into the operand of the ADDR_EXPR. */
1241 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1242 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1243 else
1244 recompute_tree_invariant_for_addr_expr (*tp);
1245
1246 /* If this used to be invariant, but is not any longer,
1247 then regimplification is probably needed. */
1248 if (invariant && !is_gimple_min_invariant (*tp))
1249 id->regimplify = true;
1250
1251 *walk_subtrees = 0;
1252 }
1253 }
1254
1255 /* Keep iterating. */
1256 return NULL_TREE;
1257 }
1258
1259 /* Helper for remap_gimple_stmt. Given an EH region number for the
1260 source function, map that to the duplicate EH region number in
1261 the destination function. */
1262
1263 static int
1264 remap_eh_region_nr (int old_nr, copy_body_data *id)
1265 {
1266 eh_region old_r, new_r;
1267
1268 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1269 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1270
1271 return new_r->index;
1272 }
1273
1274 /* Similar, but operate on INTEGER_CSTs. */
1275
1276 static tree
1277 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1278 {
1279 int old_nr, new_nr;
1280
1281 old_nr = tree_to_shwi (old_t_nr);
1282 new_nr = remap_eh_region_nr (old_nr, id);
1283
1284 return build_int_cst (integer_type_node, new_nr);
1285 }
1286
1287 /* Helper for copy_bb. Remap statement STMT using the inlining
1288 information in ID. Return the new statement copy. */
1289
1290 static gimple
1291 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1292 {
1293 gimple copy = NULL;
1294 struct walk_stmt_info wi;
1295 bool skip_first = false;
1296
1297 /* Begin by recognizing trees that we'll completely rewrite for the
1298 inlining context. Our output for these trees is completely
1299 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1300 into an edge). Further down, we'll handle trees that get
1301 duplicated and/or tweaked. */
1302
1303 /* When requested, GIMPLE_RETURNs should be transformed to just the
1304 contained GIMPLE_ASSIGN. The branch semantics of the return will
1305 be handled elsewhere by manipulating the CFG rather than the
1306 statement. */
1307 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1308 {
1309 tree retval = gimple_return_retval (stmt);
1310
1311 /* If we're returning something, just turn that into an
1312 assignment into the equivalent of the original RESULT_DECL.
1313 If RETVAL is just the result decl, the result decl has
1314 already been set (e.g. a recent "foo (&result_decl, ...)");
1315 just toss the entire GIMPLE_RETURN. */
1316 if (retval
1317 && (TREE_CODE (retval) != RESULT_DECL
1318 && (TREE_CODE (retval) != SSA_NAME
1319 || ! SSA_NAME_VAR (retval)
1320 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1321 {
1322 copy = gimple_build_assign (id->do_not_unshare
1323 ? id->retvar : unshare_expr (id->retvar),
1324 retval);
1325 /* id->retvar is already substituted. Skip it on later remapping. */
1326 skip_first = true;
1327 }
1328 else
1329 return gimple_build_nop ();
1330 }
1331 else if (gimple_has_substatements (stmt))
1332 {
1333 gimple_seq s1, s2;
1334
1335 /* When cloning bodies from the C++ front end, we will be handed bodies
1336 in High GIMPLE form. Handle here all the High GIMPLE statements that
1337 have embedded statements. */
1338 switch (gimple_code (stmt))
1339 {
1340 case GIMPLE_BIND:
1341 copy = copy_gimple_bind (stmt, id);
1342 break;
1343
1344 case GIMPLE_CATCH:
1345 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1346 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1347 break;
1348
1349 case GIMPLE_EH_FILTER:
1350 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1351 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1352 break;
1353
1354 case GIMPLE_TRY:
1355 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1356 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1357 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1358 break;
1359
1360 case GIMPLE_WITH_CLEANUP_EXPR:
1361 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1362 copy = gimple_build_wce (s1);
1363 break;
1364
1365 case GIMPLE_OMP_PARALLEL:
1366 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1367 copy = gimple_build_omp_parallel
1368 (s1,
1369 gimple_omp_parallel_clauses (stmt),
1370 gimple_omp_parallel_child_fn (stmt),
1371 gimple_omp_parallel_data_arg (stmt));
1372 break;
1373
1374 case GIMPLE_OMP_TASK:
1375 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1376 copy = gimple_build_omp_task
1377 (s1,
1378 gimple_omp_task_clauses (stmt),
1379 gimple_omp_task_child_fn (stmt),
1380 gimple_omp_task_data_arg (stmt),
1381 gimple_omp_task_copy_fn (stmt),
1382 gimple_omp_task_arg_size (stmt),
1383 gimple_omp_task_arg_align (stmt));
1384 break;
1385
1386 case GIMPLE_OMP_FOR:
1387 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1388 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1389 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1390 gimple_omp_for_clauses (stmt),
1391 gimple_omp_for_collapse (stmt), s2);
1392 {
1393 size_t i;
1394 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1395 {
1396 gimple_omp_for_set_index (copy, i,
1397 gimple_omp_for_index (stmt, i));
1398 gimple_omp_for_set_initial (copy, i,
1399 gimple_omp_for_initial (stmt, i));
1400 gimple_omp_for_set_final (copy, i,
1401 gimple_omp_for_final (stmt, i));
1402 gimple_omp_for_set_incr (copy, i,
1403 gimple_omp_for_incr (stmt, i));
1404 gimple_omp_for_set_cond (copy, i,
1405 gimple_omp_for_cond (stmt, i));
1406 }
1407 }
1408 break;
1409
1410 case GIMPLE_OMP_MASTER:
1411 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1412 copy = gimple_build_omp_master (s1);
1413 break;
1414
1415 case GIMPLE_OMP_TASKGROUP:
1416 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1417 copy = gimple_build_omp_taskgroup (s1);
1418 break;
1419
1420 case GIMPLE_OMP_ORDERED:
1421 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1422 copy = gimple_build_omp_ordered (s1);
1423 break;
1424
1425 case GIMPLE_OMP_SECTION:
1426 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1427 copy = gimple_build_omp_section (s1);
1428 break;
1429
1430 case GIMPLE_OMP_SECTIONS:
1431 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1432 copy = gimple_build_omp_sections
1433 (s1, gimple_omp_sections_clauses (stmt));
1434 break;
1435
1436 case GIMPLE_OMP_SINGLE:
1437 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1438 copy = gimple_build_omp_single
1439 (s1, gimple_omp_single_clauses (stmt));
1440 break;
1441
1442 case GIMPLE_OMP_TARGET:
1443 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1444 copy = gimple_build_omp_target
1445 (s1, gimple_omp_target_kind (stmt),
1446 gimple_omp_target_clauses (stmt));
1447 break;
1448
1449 case GIMPLE_OMP_TEAMS:
1450 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1451 copy = gimple_build_omp_teams
1452 (s1, gimple_omp_teams_clauses (stmt));
1453 break;
1454
1455 case GIMPLE_OMP_CRITICAL:
1456 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1457 copy
1458 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1459 break;
1460
1461 case GIMPLE_TRANSACTION:
1462 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1463 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1464 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1465 break;
1466
1467 default:
1468 gcc_unreachable ();
1469 }
1470 }
1471 else
1472 {
1473 if (gimple_assign_copy_p (stmt)
1474 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1475 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1476 {
1477 /* Here we handle statements that are not completely rewritten.
1478 First we detect some inlining-induced bogosities for
1479 discarding. */
1480
1481 /* Some assignments VAR = VAR; don't generate any rtl code
1482 and thus don't count as variable modification. Avoid
1483 keeping bogosities like 0 = 0. */
1484 tree decl = gimple_assign_lhs (stmt), value;
1485 tree *n;
1486
1487 n = id->decl_map->get (decl);
1488 if (n)
1489 {
1490 value = *n;
1491 STRIP_TYPE_NOPS (value);
1492 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1493 return gimple_build_nop ();
1494 }
1495 }
1496
1497 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1498 in a block that we aren't copying during tree_function_versioning,
1499 just drop the clobber stmt. */
1500 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1501 {
1502 tree lhs = gimple_assign_lhs (stmt);
1503 if (TREE_CODE (lhs) == MEM_REF
1504 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1505 {
1506 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1507 if (gimple_bb (def_stmt)
1508 && !bitmap_bit_p (id->blocks_to_copy,
1509 gimple_bb (def_stmt)->index))
1510 return gimple_build_nop ();
1511 }
1512 }
1513
1514 if (gimple_debug_bind_p (stmt))
1515 {
1516 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1517 gimple_debug_bind_get_value (stmt),
1518 stmt);
1519 id->debug_stmts.safe_push (copy);
1520 return copy;
1521 }
1522 if (gimple_debug_source_bind_p (stmt))
1523 {
1524 copy = gimple_build_debug_source_bind
1525 (gimple_debug_source_bind_get_var (stmt),
1526 gimple_debug_source_bind_get_value (stmt), stmt);
1527 id->debug_stmts.safe_push (copy);
1528 return copy;
1529 }
1530
1531 /* Create a new deep copy of the statement. */
1532 copy = gimple_copy (stmt);
1533
1534 /* Clear flags that need revisiting. */
1535 if (is_gimple_call (copy)
1536 && gimple_call_tail_p (copy))
1537 gimple_call_set_tail (copy, false);
1538
1539 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1540 RESX and EH_DISPATCH. */
1541 if (id->eh_map)
1542 switch (gimple_code (copy))
1543 {
1544 case GIMPLE_CALL:
1545 {
1546 tree r, fndecl = gimple_call_fndecl (copy);
1547 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1548 switch (DECL_FUNCTION_CODE (fndecl))
1549 {
1550 case BUILT_IN_EH_COPY_VALUES:
1551 r = gimple_call_arg (copy, 1);
1552 r = remap_eh_region_tree_nr (r, id);
1553 gimple_call_set_arg (copy, 1, r);
1554 /* FALLTHRU */
1555
1556 case BUILT_IN_EH_POINTER:
1557 case BUILT_IN_EH_FILTER:
1558 r = gimple_call_arg (copy, 0);
1559 r = remap_eh_region_tree_nr (r, id);
1560 gimple_call_set_arg (copy, 0, r);
1561 break;
1562
1563 default:
1564 break;
1565 }
1566
1567 /* Reset alias info if we didn't apply measures to
1568 keep it valid over inlining by setting DECL_PT_UID. */
1569 if (!id->src_cfun->gimple_df
1570 || !id->src_cfun->gimple_df->ipa_pta)
1571 gimple_call_reset_alias_info (copy);
1572 }
1573 break;
1574
1575 case GIMPLE_RESX:
1576 {
1577 int r = gimple_resx_region (copy);
1578 r = remap_eh_region_nr (r, id);
1579 gimple_resx_set_region (copy, r);
1580 }
1581 break;
1582
1583 case GIMPLE_EH_DISPATCH:
1584 {
1585 int r = gimple_eh_dispatch_region (copy);
1586 r = remap_eh_region_nr (r, id);
1587 gimple_eh_dispatch_set_region (copy, r);
1588 }
1589 break;
1590
1591 default:
1592 break;
1593 }
1594 }
1595
1596 /* If STMT has a block defined, map it to the newly constructed
1597 block. */
1598 if (gimple_block (copy))
1599 {
1600 tree *n;
1601 n = id->decl_map->get (gimple_block (copy));
1602 gcc_assert (n);
1603 gimple_set_block (copy, *n);
1604 }
1605
1606 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1607 return copy;
1608
1609 /* Remap all the operands in COPY. */
1610 memset (&wi, 0, sizeof (wi));
1611 wi.info = id;
1612 if (skip_first)
1613 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1614 else
1615 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1616
1617 /* Clear the copied virtual operands. We are not remapping them here
1618 but are going to recreate them from scratch. */
1619 if (gimple_has_mem_ops (copy))
1620 {
1621 gimple_set_vdef (copy, NULL_TREE);
1622 gimple_set_vuse (copy, NULL_TREE);
1623 }
1624
1625 return copy;
1626 }
1627
1628
1629 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1630 later */
1631
1632 static basic_block
1633 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1634 gcov_type count_scale)
1635 {
1636 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1637 basic_block copy_basic_block;
1638 tree decl;
1639 gcov_type freq;
1640 basic_block prev;
1641
1642 /* Search for previous copied basic block. */
1643 prev = bb->prev_bb;
1644 while (!prev->aux)
1645 prev = prev->prev_bb;
1646
1647 /* create_basic_block() will append every new block to
1648 basic_block_info automatically. */
1649 copy_basic_block = create_basic_block (NULL, (void *) 0,
1650 (basic_block) prev->aux);
1651 copy_basic_block->count = apply_scale (bb->count, count_scale);
1652
1653 /* We are going to rebuild frequencies from scratch. These values
1654 have just small importance to drive canonicalize_loop_headers. */
1655 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1656
1657 /* We recompute frequencies after inlining, so this is quite safe. */
1658 if (freq > BB_FREQ_MAX)
1659 freq = BB_FREQ_MAX;
1660 copy_basic_block->frequency = freq;
1661
1662 copy_gsi = gsi_start_bb (copy_basic_block);
1663
1664 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1665 {
1666 gimple stmt = gsi_stmt (gsi);
1667 gimple orig_stmt = stmt;
1668
1669 id->regimplify = false;
1670 stmt = remap_gimple_stmt (stmt, id);
1671 if (gimple_nop_p (stmt))
1672 continue;
1673
1674 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1675 seq_gsi = copy_gsi;
1676
1677 /* With return slot optimization we can end up with
1678 non-gimple (foo *)&this->m, fix that here. */
1679 if (is_gimple_assign (stmt)
1680 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1681 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1682 {
1683 tree new_rhs;
1684 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1685 gimple_assign_rhs1 (stmt),
1686 true, NULL, false,
1687 GSI_CONTINUE_LINKING);
1688 gimple_assign_set_rhs1 (stmt, new_rhs);
1689 id->regimplify = false;
1690 }
1691
1692 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1693
1694 if (id->regimplify)
1695 gimple_regimplify_operands (stmt, &seq_gsi);
1696
1697 /* If copy_basic_block has been empty at the start of this iteration,
1698 call gsi_start_bb again to get at the newly added statements. */
1699 if (gsi_end_p (copy_gsi))
1700 copy_gsi = gsi_start_bb (copy_basic_block);
1701 else
1702 gsi_next (&copy_gsi);
1703
1704 /* Process the new statement. The call to gimple_regimplify_operands
1705 possibly turned the statement into multiple statements, we
1706 need to process all of them. */
1707 do
1708 {
1709 tree fn;
1710
1711 stmt = gsi_stmt (copy_gsi);
1712 if (is_gimple_call (stmt)
1713 && gimple_call_va_arg_pack_p (stmt)
1714 && id->gimple_call)
1715 {
1716 /* __builtin_va_arg_pack () should be replaced by
1717 all arguments corresponding to ... in the caller. */
1718 tree p;
1719 gimple new_call;
1720 vec<tree> argarray;
1721 size_t nargs = gimple_call_num_args (id->gimple_call);
1722 size_t n;
1723
1724 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1725 nargs--;
1726
1727 /* Create the new array of arguments. */
1728 n = nargs + gimple_call_num_args (stmt);
1729 argarray.create (n);
1730 argarray.safe_grow_cleared (n);
1731
1732 /* Copy all the arguments before '...' */
1733 memcpy (argarray.address (),
1734 gimple_call_arg_ptr (stmt, 0),
1735 gimple_call_num_args (stmt) * sizeof (tree));
1736
1737 /* Append the arguments passed in '...' */
1738 memcpy (argarray.address () + gimple_call_num_args (stmt),
1739 gimple_call_arg_ptr (id->gimple_call, 0)
1740 + (gimple_call_num_args (id->gimple_call) - nargs),
1741 nargs * sizeof (tree));
1742
1743 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1744 argarray);
1745
1746 argarray.release ();
1747
1748 /* Copy all GIMPLE_CALL flags, location and block, except
1749 GF_CALL_VA_ARG_PACK. */
1750 gimple_call_copy_flags (new_call, stmt);
1751 gimple_call_set_va_arg_pack (new_call, false);
1752 gimple_set_location (new_call, gimple_location (stmt));
1753 gimple_set_block (new_call, gimple_block (stmt));
1754 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1755
1756 gsi_replace (&copy_gsi, new_call, false);
1757 stmt = new_call;
1758 }
1759 else if (is_gimple_call (stmt)
1760 && id->gimple_call
1761 && (decl = gimple_call_fndecl (stmt))
1762 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1763 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1764 {
1765 /* __builtin_va_arg_pack_len () should be replaced by
1766 the number of anonymous arguments. */
1767 size_t nargs = gimple_call_num_args (id->gimple_call);
1768 tree count, p;
1769 gimple new_stmt;
1770
1771 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1772 nargs--;
1773
1774 count = build_int_cst (integer_type_node, nargs);
1775 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1776 gsi_replace (&copy_gsi, new_stmt, false);
1777 stmt = new_stmt;
1778 }
1779
1780 /* Statements produced by inlining can be unfolded, especially
1781 when we constant propagated some operands. We can't fold
1782 them right now for two reasons:
1783 1) folding require SSA_NAME_DEF_STMTs to be correct
1784 2) we can't change function calls to builtins.
1785 So we just mark statement for later folding. We mark
1786 all new statements, instead just statements that has changed
1787 by some nontrivial substitution so even statements made
1788 foldable indirectly are updated. If this turns out to be
1789 expensive, copy_body can be told to watch for nontrivial
1790 changes. */
1791 if (id->statements_to_fold)
1792 id->statements_to_fold->add (stmt);
1793
1794 /* We're duplicating a CALL_EXPR. Find any corresponding
1795 callgraph edges and update or duplicate them. */
1796 if (is_gimple_call (stmt))
1797 {
1798 struct cgraph_edge *edge;
1799
1800 switch (id->transform_call_graph_edges)
1801 {
1802 case CB_CGE_DUPLICATE:
1803 edge = id->src_node->get_edge (orig_stmt);
1804 if (edge)
1805 {
1806 int edge_freq = edge->frequency;
1807 int new_freq;
1808 struct cgraph_edge *old_edge = edge;
1809 edge = edge->clone (id->dst_node, stmt,
1810 gimple_uid (stmt),
1811 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1812 true);
1813 /* We could also just rescale the frequency, but
1814 doing so would introduce roundoff errors and make
1815 verifier unhappy. */
1816 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1817 copy_basic_block);
1818
1819 /* Speculative calls consist of two edges - direct and indirect.
1820 Duplicate the whole thing and distribute frequencies accordingly. */
1821 if (edge->speculative)
1822 {
1823 struct cgraph_edge *direct, *indirect;
1824 struct ipa_ref *ref;
1825
1826 gcc_assert (!edge->indirect_unknown_callee);
1827 old_edge->speculative_call_info (direct, indirect, ref);
1828 indirect = indirect->clone (id->dst_node, stmt,
1829 gimple_uid (stmt),
1830 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1831 true);
1832 if (old_edge->frequency + indirect->frequency)
1833 {
1834 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1835 (old_edge->frequency + indirect->frequency)),
1836 CGRAPH_FREQ_MAX);
1837 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1838 (old_edge->frequency + indirect->frequency)),
1839 CGRAPH_FREQ_MAX);
1840 }
1841 id->dst_node->clone_reference (ref, stmt);
1842 }
1843 else
1844 {
1845 edge->frequency = new_freq;
1846 if (dump_file
1847 && profile_status_for_fn (cfun) != PROFILE_ABSENT
1848 && (edge_freq > edge->frequency + 10
1849 || edge_freq < edge->frequency - 10))
1850 {
1851 fprintf (dump_file, "Edge frequency estimated by "
1852 "cgraph %i diverge from inliner's estimate %i\n",
1853 edge_freq,
1854 edge->frequency);
1855 fprintf (dump_file,
1856 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1857 bb->index,
1858 bb->frequency,
1859 copy_basic_block->frequency);
1860 }
1861 }
1862 }
1863 break;
1864
1865 case CB_CGE_MOVE_CLONES:
1866 id->dst_node->set_call_stmt_including_clones (orig_stmt,
1867 stmt);
1868 edge = id->dst_node->get_edge (stmt);
1869 break;
1870
1871 case CB_CGE_MOVE:
1872 edge = id->dst_node->get_edge (orig_stmt);
1873 if (edge)
1874 edge->set_call_stmt (stmt);
1875 break;
1876
1877 default:
1878 gcc_unreachable ();
1879 }
1880
1881 /* Constant propagation on argument done during inlining
1882 may create new direct call. Produce an edge for it. */
1883 if ((!edge
1884 || (edge->indirect_inlining_edge
1885 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1886 && id->dst_node->definition
1887 && (fn = gimple_call_fndecl (stmt)) != NULL)
1888 {
1889 struct cgraph_node *dest = cgraph_node::get (fn);
1890
1891 /* We have missing edge in the callgraph. This can happen
1892 when previous inlining turned an indirect call into a
1893 direct call by constant propagating arguments or we are
1894 producing dead clone (for further cloning). In all
1895 other cases we hit a bug (incorrect node sharing is the
1896 most common reason for missing edges). */
1897 gcc_assert (!dest->definition
1898 || dest->address_taken
1899 || !id->src_node->definition
1900 || !id->dst_node->definition);
1901 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1902 id->dst_node->create_edge_including_clones
1903 (dest, orig_stmt, stmt, bb->count,
1904 compute_call_stmt_bb_frequency (id->dst_node->decl,
1905 copy_basic_block),
1906 CIF_ORIGINALLY_INDIRECT_CALL);
1907 else
1908 id->dst_node->create_edge (dest, stmt,
1909 bb->count,
1910 compute_call_stmt_bb_frequency
1911 (id->dst_node->decl,
1912 copy_basic_block))->inline_failed
1913 = CIF_ORIGINALLY_INDIRECT_CALL;
1914 if (dump_file)
1915 {
1916 fprintf (dump_file, "Created new direct edge to %s\n",
1917 dest->name ());
1918 }
1919 }
1920
1921 notice_special_calls (stmt);
1922 }
1923
1924 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1925 id->eh_map, id->eh_lp_nr);
1926
1927 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1928 {
1929 ssa_op_iter i;
1930 tree def;
1931
1932 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1933 if (TREE_CODE (def) == SSA_NAME)
1934 SSA_NAME_DEF_STMT (def) = stmt;
1935 }
1936
1937 gsi_next (&copy_gsi);
1938 }
1939 while (!gsi_end_p (copy_gsi));
1940
1941 copy_gsi = gsi_last_bb (copy_basic_block);
1942 }
1943
1944 return copy_basic_block;
1945 }
1946
1947 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1948 form is quite easy, since dominator relationship for old basic blocks does
1949 not change.
1950
1951 There is however exception where inlining might change dominator relation
1952 across EH edges from basic block within inlined functions destinating
1953 to landing pads in function we inline into.
1954
1955 The function fills in PHI_RESULTs of such PHI nodes if they refer
1956 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1957 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1958 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1959 set, and this means that there will be no overlapping live ranges
1960 for the underlying symbol.
1961
1962 This might change in future if we allow redirecting of EH edges and
1963 we might want to change way build CFG pre-inlining to include
1964 all the possible edges then. */
1965 static void
1966 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1967 bool can_throw, bool nonlocal_goto)
1968 {
1969 edge e;
1970 edge_iterator ei;
1971
1972 FOR_EACH_EDGE (e, ei, bb->succs)
1973 if (!e->dest->aux
1974 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1975 {
1976 gimple phi;
1977 gimple_stmt_iterator si;
1978
1979 if (!nonlocal_goto)
1980 gcc_assert (e->flags & EDGE_EH);
1981
1982 if (!can_throw)
1983 gcc_assert (!(e->flags & EDGE_EH));
1984
1985 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1986 {
1987 edge re;
1988
1989 phi = gsi_stmt (si);
1990
1991 /* For abnormal goto/call edges the receiver can be the
1992 ENTRY_BLOCK. Do not assert this cannot happen. */
1993
1994 gcc_assert ((e->flags & EDGE_EH)
1995 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1996
1997 re = find_edge (ret_bb, e->dest);
1998 gcc_checking_assert (re);
1999 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2000 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2001
2002 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2003 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2004 }
2005 }
2006 }
2007
2008
2009 /* Copy edges from BB into its copy constructed earlier, scale profile
2010 accordingly. Edges will be taken care of later. Assume aux
2011 pointers to point to the copies of each BB. Return true if any
2012 debug stmts are left after a statement that must end the basic block. */
2013
2014 static bool
2015 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2016 basic_block abnormal_goto_dest)
2017 {
2018 basic_block new_bb = (basic_block) bb->aux;
2019 edge_iterator ei;
2020 edge old_edge;
2021 gimple_stmt_iterator si;
2022 int flags;
2023 bool need_debug_cleanup = false;
2024
2025 /* Use the indices from the original blocks to create edges for the
2026 new ones. */
2027 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2028 if (!(old_edge->flags & EDGE_EH))
2029 {
2030 edge new_edge;
2031
2032 flags = old_edge->flags;
2033
2034 /* Return edges do get a FALLTHRU flag when the get inlined. */
2035 if (old_edge->dest->index == EXIT_BLOCK
2036 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2037 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2038 flags |= EDGE_FALLTHRU;
2039 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2040 new_edge->count = apply_scale (old_edge->count, count_scale);
2041 new_edge->probability = old_edge->probability;
2042 }
2043
2044 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2045 return false;
2046
2047 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2048 {
2049 gimple copy_stmt;
2050 bool can_throw, nonlocal_goto;
2051
2052 copy_stmt = gsi_stmt (si);
2053 if (!is_gimple_debug (copy_stmt))
2054 update_stmt (copy_stmt);
2055
2056 /* Do this before the possible split_block. */
2057 gsi_next (&si);
2058
2059 /* If this tree could throw an exception, there are two
2060 cases where we need to add abnormal edge(s): the
2061 tree wasn't in a region and there is a "current
2062 region" in the caller; or the original tree had
2063 EH edges. In both cases split the block after the tree,
2064 and add abnormal edge(s) as needed; we need both
2065 those from the callee and the caller.
2066 We check whether the copy can throw, because the const
2067 propagation can change an INDIRECT_REF which throws
2068 into a COMPONENT_REF which doesn't. If the copy
2069 can throw, the original could also throw. */
2070 can_throw = stmt_can_throw_internal (copy_stmt);
2071 nonlocal_goto
2072 = (stmt_can_make_abnormal_goto (copy_stmt)
2073 && !computed_goto_p (copy_stmt));
2074
2075 if (can_throw || nonlocal_goto)
2076 {
2077 if (!gsi_end_p (si))
2078 {
2079 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2080 gsi_next (&si);
2081 if (gsi_end_p (si))
2082 need_debug_cleanup = true;
2083 }
2084 if (!gsi_end_p (si))
2085 /* Note that bb's predecessor edges aren't necessarily
2086 right at this point; split_block doesn't care. */
2087 {
2088 edge e = split_block (new_bb, copy_stmt);
2089
2090 new_bb = e->dest;
2091 new_bb->aux = e->src->aux;
2092 si = gsi_start_bb (new_bb);
2093 }
2094 }
2095
2096 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2097 make_eh_dispatch_edges (copy_stmt);
2098 else if (can_throw)
2099 make_eh_edges (copy_stmt);
2100
2101 /* If the call we inline cannot make abnormal goto do not add
2102 additional abnormal edges but only retain those already present
2103 in the original function body. */
2104 if (abnormal_goto_dest == NULL)
2105 nonlocal_goto = false;
2106 if (nonlocal_goto)
2107 {
2108 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2109
2110 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2111 nonlocal_goto = false;
2112 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2113 in OpenMP regions which aren't allowed to be left abnormally.
2114 So, no need to add abnormal edge in that case. */
2115 else if (is_gimple_call (copy_stmt)
2116 && gimple_call_internal_p (copy_stmt)
2117 && (gimple_call_internal_fn (copy_stmt)
2118 == IFN_ABNORMAL_DISPATCHER)
2119 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2120 nonlocal_goto = false;
2121 else
2122 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2123 }
2124
2125 if ((can_throw || nonlocal_goto)
2126 && gimple_in_ssa_p (cfun))
2127 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2128 can_throw, nonlocal_goto);
2129 }
2130 return need_debug_cleanup;
2131 }
2132
2133 /* Copy the PHIs. All blocks and edges are copied, some blocks
2134 was possibly split and new outgoing EH edges inserted.
2135 BB points to the block of original function and AUX pointers links
2136 the original and newly copied blocks. */
2137
2138 static void
2139 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2140 {
2141 basic_block const new_bb = (basic_block) bb->aux;
2142 edge_iterator ei;
2143 gimple phi;
2144 gimple_stmt_iterator si;
2145 edge new_edge;
2146 bool inserted = false;
2147
2148 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2149 {
2150 tree res, new_res;
2151 gimple new_phi;
2152
2153 phi = gsi_stmt (si);
2154 res = PHI_RESULT (phi);
2155 new_res = res;
2156 if (!virtual_operand_p (res))
2157 {
2158 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2159 new_phi = create_phi_node (new_res, new_bb);
2160 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2161 {
2162 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2163 tree arg;
2164 tree new_arg;
2165 edge_iterator ei2;
2166 location_t locus;
2167
2168 /* When doing partial cloning, we allow PHIs on the entry block
2169 as long as all the arguments are the same. Find any input
2170 edge to see argument to copy. */
2171 if (!old_edge)
2172 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2173 if (!old_edge->src->aux)
2174 break;
2175
2176 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2177 new_arg = arg;
2178 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2179 gcc_assert (new_arg);
2180 /* With return slot optimization we can end up with
2181 non-gimple (foo *)&this->m, fix that here. */
2182 if (TREE_CODE (new_arg) != SSA_NAME
2183 && TREE_CODE (new_arg) != FUNCTION_DECL
2184 && !is_gimple_val (new_arg))
2185 {
2186 gimple_seq stmts = NULL;
2187 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2188 gsi_insert_seq_on_edge (new_edge, stmts);
2189 inserted = true;
2190 }
2191 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2192 if (LOCATION_BLOCK (locus))
2193 {
2194 tree *n;
2195 n = id->decl_map->get (LOCATION_BLOCK (locus));
2196 gcc_assert (n);
2197 if (*n)
2198 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2199 else
2200 locus = LOCATION_LOCUS (locus);
2201 }
2202 else
2203 locus = LOCATION_LOCUS (locus);
2204
2205 add_phi_arg (new_phi, new_arg, new_edge, locus);
2206 }
2207 }
2208 }
2209
2210 /* Commit the delayed edge insertions. */
2211 if (inserted)
2212 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2213 gsi_commit_one_edge_insert (new_edge, NULL);
2214 }
2215
2216
2217 /* Wrapper for remap_decl so it can be used as a callback. */
2218
2219 static tree
2220 remap_decl_1 (tree decl, void *data)
2221 {
2222 return remap_decl (decl, (copy_body_data *) data);
2223 }
2224
2225 /* Build struct function and associated datastructures for the new clone
2226 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2227 the cfun to the function of new_fndecl (and current_function_decl too). */
2228
2229 static void
2230 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2231 {
2232 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2233 gcov_type count_scale;
2234
2235 if (!DECL_ARGUMENTS (new_fndecl))
2236 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2237 if (!DECL_RESULT (new_fndecl))
2238 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2239
2240 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2241 count_scale
2242 = GCOV_COMPUTE_SCALE (count,
2243 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2244 else
2245 count_scale = REG_BR_PROB_BASE;
2246
2247 /* Register specific tree functions. */
2248 gimple_register_cfg_hooks ();
2249
2250 /* Get clean struct function. */
2251 push_struct_function (new_fndecl);
2252
2253 /* We will rebuild these, so just sanity check that they are empty. */
2254 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2255 gcc_assert (cfun->local_decls == NULL);
2256 gcc_assert (cfun->cfg == NULL);
2257 gcc_assert (cfun->decl == new_fndecl);
2258
2259 /* Copy items we preserve during cloning. */
2260 cfun->static_chain_decl = src_cfun->static_chain_decl;
2261 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2262 cfun->function_end_locus = src_cfun->function_end_locus;
2263 cfun->curr_properties = src_cfun->curr_properties;
2264 cfun->last_verified = src_cfun->last_verified;
2265 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2266 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2267 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2268 cfun->stdarg = src_cfun->stdarg;
2269 cfun->after_inlining = src_cfun->after_inlining;
2270 cfun->can_throw_non_call_exceptions
2271 = src_cfun->can_throw_non_call_exceptions;
2272 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2273 cfun->returns_struct = src_cfun->returns_struct;
2274 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2275
2276 init_empty_tree_cfg ();
2277
2278 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2279 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2280 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2281 REG_BR_PROB_BASE);
2282 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2283 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2284 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2285 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2286 REG_BR_PROB_BASE);
2287 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2288 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2289 if (src_cfun->eh)
2290 init_eh_for_function ();
2291
2292 if (src_cfun->gimple_df)
2293 {
2294 init_tree_ssa (cfun);
2295 cfun->gimple_df->in_ssa_p = true;
2296 init_ssa_operands (cfun);
2297 }
2298 }
2299
2300 /* Helper function for copy_cfg_body. Move debug stmts from the end
2301 of NEW_BB to the beginning of successor basic blocks when needed. If the
2302 successor has multiple predecessors, reset them, otherwise keep
2303 their value. */
2304
2305 static void
2306 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2307 {
2308 edge e;
2309 edge_iterator ei;
2310 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2311
2312 if (gsi_end_p (si)
2313 || gsi_one_before_end_p (si)
2314 || !(stmt_can_throw_internal (gsi_stmt (si))
2315 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2316 return;
2317
2318 FOR_EACH_EDGE (e, ei, new_bb->succs)
2319 {
2320 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2321 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2322 while (is_gimple_debug (gsi_stmt (ssi)))
2323 {
2324 gimple stmt = gsi_stmt (ssi), new_stmt;
2325 tree var;
2326 tree value;
2327
2328 /* For the last edge move the debug stmts instead of copying
2329 them. */
2330 if (ei_one_before_end_p (ei))
2331 {
2332 si = ssi;
2333 gsi_prev (&ssi);
2334 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2335 gimple_debug_bind_reset_value (stmt);
2336 gsi_remove (&si, false);
2337 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2338 continue;
2339 }
2340
2341 if (gimple_debug_bind_p (stmt))
2342 {
2343 var = gimple_debug_bind_get_var (stmt);
2344 if (single_pred_p (e->dest))
2345 {
2346 value = gimple_debug_bind_get_value (stmt);
2347 value = unshare_expr (value);
2348 }
2349 else
2350 value = NULL_TREE;
2351 new_stmt = gimple_build_debug_bind (var, value, stmt);
2352 }
2353 else if (gimple_debug_source_bind_p (stmt))
2354 {
2355 var = gimple_debug_source_bind_get_var (stmt);
2356 value = gimple_debug_source_bind_get_value (stmt);
2357 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2358 }
2359 else
2360 gcc_unreachable ();
2361 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2362 id->debug_stmts.safe_push (new_stmt);
2363 gsi_prev (&ssi);
2364 }
2365 }
2366 }
2367
2368 /* Make a copy of the sub-loops of SRC_PARENT and place them
2369 as siblings of DEST_PARENT. */
2370
2371 static void
2372 copy_loops (copy_body_data *id,
2373 struct loop *dest_parent, struct loop *src_parent)
2374 {
2375 struct loop *src_loop = src_parent->inner;
2376 while (src_loop)
2377 {
2378 if (!id->blocks_to_copy
2379 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2380 {
2381 struct loop *dest_loop = alloc_loop ();
2382
2383 /* Assign the new loop its header and latch and associate
2384 those with the new loop. */
2385 dest_loop->header = (basic_block)src_loop->header->aux;
2386 dest_loop->header->loop_father = dest_loop;
2387 if (src_loop->latch != NULL)
2388 {
2389 dest_loop->latch = (basic_block)src_loop->latch->aux;
2390 dest_loop->latch->loop_father = dest_loop;
2391 }
2392
2393 /* Copy loop meta-data. */
2394 copy_loop_info (src_loop, dest_loop);
2395
2396 /* Finally place it into the loop array and the loop tree. */
2397 place_new_loop (cfun, dest_loop);
2398 flow_loop_tree_node_add (dest_parent, dest_loop);
2399
2400 dest_loop->safelen = src_loop->safelen;
2401 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2402 if (src_loop->force_vectorize)
2403 {
2404 dest_loop->force_vectorize = true;
2405 cfun->has_force_vectorize_loops = true;
2406 }
2407 if (src_loop->simduid)
2408 {
2409 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2410 cfun->has_simduid_loops = true;
2411 }
2412
2413 /* Recurse. */
2414 copy_loops (id, dest_loop, src_loop);
2415 }
2416 src_loop = src_loop->next;
2417 }
2418 }
2419
2420 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2421
2422 void
2423 redirect_all_calls (copy_body_data * id, basic_block bb)
2424 {
2425 gimple_stmt_iterator si;
2426 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2427 {
2428 if (is_gimple_call (gsi_stmt (si)))
2429 {
2430 struct cgraph_edge *edge = id->dst_node->get_edge (gsi_stmt (si));
2431 if (edge)
2432 edge->redirect_call_stmt_to_callee ();
2433 }
2434 }
2435 }
2436
2437 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2438 with each bb's frequency. Used when NODE has a 0-weight entry
2439 but we are about to inline it into a non-zero count call bb.
2440 See the comments for handle_missing_profiles() in predict.c for
2441 when this can happen for COMDATs. */
2442
2443 void
2444 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2445 {
2446 basic_block bb;
2447 edge_iterator ei;
2448 edge e;
2449 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2450
2451 FOR_ALL_BB_FN(bb, fn)
2452 {
2453 bb->count = apply_scale (count,
2454 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2455 FOR_EACH_EDGE (e, ei, bb->succs)
2456 e->count = apply_probability (e->src->count, e->probability);
2457 }
2458 }
2459
2460 /* Make a copy of the body of FN so that it can be inserted inline in
2461 another function. Walks FN via CFG, returns new fndecl. */
2462
2463 static tree
2464 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2465 basic_block entry_block_map, basic_block exit_block_map,
2466 basic_block new_entry)
2467 {
2468 tree callee_fndecl = id->src_fn;
2469 /* Original cfun for the callee, doesn't change. */
2470 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2471 struct function *cfun_to_copy;
2472 basic_block bb;
2473 tree new_fndecl = NULL;
2474 bool need_debug_cleanup = false;
2475 gcov_type count_scale;
2476 int last;
2477 int incoming_frequency = 0;
2478 gcov_type incoming_count = 0;
2479
2480 /* This can happen for COMDAT routines that end up with 0 counts
2481 despite being called (see the comments for handle_missing_profiles()
2482 in predict.c as to why). Apply counts to the blocks in the callee
2483 before inlining, using the guessed edge frequencies, so that we don't
2484 end up with a 0-count inline body which can confuse downstream
2485 optimizations such as function splitting. */
2486 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2487 {
2488 /* Apply the larger of the call bb count and the total incoming
2489 call edge count to the callee. */
2490 gcov_type in_count = 0;
2491 struct cgraph_edge *in_edge;
2492 for (in_edge = id->src_node->callers; in_edge;
2493 in_edge = in_edge->next_caller)
2494 in_count += in_edge->count;
2495 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2496 }
2497
2498 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2499 count_scale
2500 = GCOV_COMPUTE_SCALE (count,
2501 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2502 else
2503 count_scale = REG_BR_PROB_BASE;
2504
2505 /* Register specific tree functions. */
2506 gimple_register_cfg_hooks ();
2507
2508 /* If we are inlining just region of the function, make sure to connect
2509 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2510 part of loop, we must compute frequency and probability of
2511 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2512 probabilities of edges incoming from nonduplicated region. */
2513 if (new_entry)
2514 {
2515 edge e;
2516 edge_iterator ei;
2517
2518 FOR_EACH_EDGE (e, ei, new_entry->preds)
2519 if (!e->src->aux)
2520 {
2521 incoming_frequency += EDGE_FREQUENCY (e);
2522 incoming_count += e->count;
2523 }
2524 incoming_count = apply_scale (incoming_count, count_scale);
2525 incoming_frequency
2526 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2527 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2528 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2529 }
2530
2531 /* Must have a CFG here at this point. */
2532 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2533 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2534
2535 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2536
2537 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2538 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2539 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2540 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2541
2542 /* Duplicate any exception-handling regions. */
2543 if (cfun->eh)
2544 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2545 remap_decl_1, id);
2546
2547 /* Use aux pointers to map the original blocks to copy. */
2548 FOR_EACH_BB_FN (bb, cfun_to_copy)
2549 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2550 {
2551 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2552 bb->aux = new_bb;
2553 new_bb->aux = bb;
2554 new_bb->loop_father = entry_block_map->loop_father;
2555 }
2556
2557 last = last_basic_block_for_fn (cfun);
2558
2559 /* Now that we've duplicated the blocks, duplicate their edges. */
2560 basic_block abnormal_goto_dest = NULL;
2561 if (id->gimple_call
2562 && stmt_can_make_abnormal_goto (id->gimple_call))
2563 {
2564 gimple_stmt_iterator gsi = gsi_for_stmt (id->gimple_call);
2565
2566 bb = gimple_bb (id->gimple_call);
2567 gsi_next (&gsi);
2568 if (gsi_end_p (gsi))
2569 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2570 }
2571 FOR_ALL_BB_FN (bb, cfun_to_copy)
2572 if (!id->blocks_to_copy
2573 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2574 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2575 abnormal_goto_dest);
2576
2577 if (new_entry)
2578 {
2579 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2580 e->probability = REG_BR_PROB_BASE;
2581 e->count = incoming_count;
2582 }
2583
2584 /* Duplicate the loop tree, if available and wanted. */
2585 if (loops_for_fn (src_cfun) != NULL
2586 && current_loops != NULL)
2587 {
2588 copy_loops (id, entry_block_map->loop_father,
2589 get_loop (src_cfun, 0));
2590 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2591 loops_state_set (LOOPS_NEED_FIXUP);
2592 }
2593
2594 /* If the loop tree in the source function needed fixup, mark the
2595 destination loop tree for fixup, too. */
2596 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2597 loops_state_set (LOOPS_NEED_FIXUP);
2598
2599 if (gimple_in_ssa_p (cfun))
2600 FOR_ALL_BB_FN (bb, cfun_to_copy)
2601 if (!id->blocks_to_copy
2602 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2603 copy_phis_for_bb (bb, id);
2604
2605 FOR_ALL_BB_FN (bb, cfun_to_copy)
2606 if (bb->aux)
2607 {
2608 if (need_debug_cleanup
2609 && bb->index != ENTRY_BLOCK
2610 && bb->index != EXIT_BLOCK)
2611 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2612 /* Update call edge destinations. This can not be done before loop
2613 info is updated, because we may split basic blocks. */
2614 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2615 redirect_all_calls (id, (basic_block)bb->aux);
2616 ((basic_block)bb->aux)->aux = NULL;
2617 bb->aux = NULL;
2618 }
2619
2620 /* Zero out AUX fields of newly created block during EH edge
2621 insertion. */
2622 for (; last < last_basic_block_for_fn (cfun); last++)
2623 {
2624 if (need_debug_cleanup)
2625 maybe_move_debug_stmts_to_successors (id,
2626 BASIC_BLOCK_FOR_FN (cfun, last));
2627 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2628 /* Update call edge destinations. This can not be done before loop
2629 info is updated, because we may split basic blocks. */
2630 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2631 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2632 }
2633 entry_block_map->aux = NULL;
2634 exit_block_map->aux = NULL;
2635
2636 if (id->eh_map)
2637 {
2638 delete id->eh_map;
2639 id->eh_map = NULL;
2640 }
2641
2642 return new_fndecl;
2643 }
2644
2645 /* Copy the debug STMT using ID. We deal with these statements in a
2646 special way: if any variable in their VALUE expression wasn't
2647 remapped yet, we won't remap it, because that would get decl uids
2648 out of sync, causing codegen differences between -g and -g0. If
2649 this arises, we drop the VALUE expression altogether. */
2650
2651 static void
2652 copy_debug_stmt (gimple stmt, copy_body_data *id)
2653 {
2654 tree t, *n;
2655 struct walk_stmt_info wi;
2656
2657 if (gimple_block (stmt))
2658 {
2659 n = id->decl_map->get (gimple_block (stmt));
2660 gimple_set_block (stmt, n ? *n : id->block);
2661 }
2662
2663 /* Remap all the operands in COPY. */
2664 memset (&wi, 0, sizeof (wi));
2665 wi.info = id;
2666
2667 processing_debug_stmt = 1;
2668
2669 if (gimple_debug_source_bind_p (stmt))
2670 t = gimple_debug_source_bind_get_var (stmt);
2671 else
2672 t = gimple_debug_bind_get_var (stmt);
2673
2674 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2675 && (n = id->debug_map->get (t)))
2676 {
2677 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2678 t = *n;
2679 }
2680 else if (TREE_CODE (t) == VAR_DECL
2681 && !is_global_var (t)
2682 && !id->decl_map->get (t))
2683 /* T is a non-localized variable. */;
2684 else
2685 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2686
2687 if (gimple_debug_bind_p (stmt))
2688 {
2689 gimple_debug_bind_set_var (stmt, t);
2690
2691 if (gimple_debug_bind_has_value_p (stmt))
2692 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2693 remap_gimple_op_r, &wi, NULL);
2694
2695 /* Punt if any decl couldn't be remapped. */
2696 if (processing_debug_stmt < 0)
2697 gimple_debug_bind_reset_value (stmt);
2698 }
2699 else if (gimple_debug_source_bind_p (stmt))
2700 {
2701 gimple_debug_source_bind_set_var (stmt, t);
2702 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2703 remap_gimple_op_r, &wi, NULL);
2704 /* When inlining and source bind refers to one of the optimized
2705 away parameters, change the source bind into normal debug bind
2706 referring to the corresponding DEBUG_EXPR_DECL that should have
2707 been bound before the call stmt. */
2708 t = gimple_debug_source_bind_get_value (stmt);
2709 if (t != NULL_TREE
2710 && TREE_CODE (t) == PARM_DECL
2711 && id->gimple_call)
2712 {
2713 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2714 unsigned int i;
2715 if (debug_args != NULL)
2716 {
2717 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2718 if ((**debug_args)[i] == DECL_ORIGIN (t)
2719 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2720 {
2721 t = (**debug_args)[i + 1];
2722 stmt->subcode = GIMPLE_DEBUG_BIND;
2723 gimple_debug_bind_set_value (stmt, t);
2724 break;
2725 }
2726 }
2727 }
2728 }
2729
2730 processing_debug_stmt = 0;
2731
2732 update_stmt (stmt);
2733 }
2734
2735 /* Process deferred debug stmts. In order to give values better odds
2736 of being successfully remapped, we delay the processing of debug
2737 stmts until all other stmts that might require remapping are
2738 processed. */
2739
2740 static void
2741 copy_debug_stmts (copy_body_data *id)
2742 {
2743 size_t i;
2744 gimple stmt;
2745
2746 if (!id->debug_stmts.exists ())
2747 return;
2748
2749 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2750 copy_debug_stmt (stmt, id);
2751
2752 id->debug_stmts.release ();
2753 }
2754
2755 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2756 another function. */
2757
2758 static tree
2759 copy_tree_body (copy_body_data *id)
2760 {
2761 tree fndecl = id->src_fn;
2762 tree body = DECL_SAVED_TREE (fndecl);
2763
2764 walk_tree (&body, copy_tree_body_r, id, NULL);
2765
2766 return body;
2767 }
2768
2769 /* Make a copy of the body of FN so that it can be inserted inline in
2770 another function. */
2771
2772 static tree
2773 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2774 basic_block entry_block_map, basic_block exit_block_map,
2775 basic_block new_entry)
2776 {
2777 tree fndecl = id->src_fn;
2778 tree body;
2779
2780 /* If this body has a CFG, walk CFG and copy. */
2781 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2782 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2783 new_entry);
2784 copy_debug_stmts (id);
2785
2786 return body;
2787 }
2788
2789 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2790 defined in function FN, or of a data member thereof. */
2791
2792 static bool
2793 self_inlining_addr_expr (tree value, tree fn)
2794 {
2795 tree var;
2796
2797 if (TREE_CODE (value) != ADDR_EXPR)
2798 return false;
2799
2800 var = get_base_address (TREE_OPERAND (value, 0));
2801
2802 return var && auto_var_in_fn_p (var, fn);
2803 }
2804
2805 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2806 lexical block and line number information from base_stmt, if given,
2807 or from the last stmt of the block otherwise. */
2808
2809 static gimple
2810 insert_init_debug_bind (copy_body_data *id,
2811 basic_block bb, tree var, tree value,
2812 gimple base_stmt)
2813 {
2814 gimple note;
2815 gimple_stmt_iterator gsi;
2816 tree tracked_var;
2817
2818 if (!gimple_in_ssa_p (id->src_cfun))
2819 return NULL;
2820
2821 if (!MAY_HAVE_DEBUG_STMTS)
2822 return NULL;
2823
2824 tracked_var = target_for_debug_bind (var);
2825 if (!tracked_var)
2826 return NULL;
2827
2828 if (bb)
2829 {
2830 gsi = gsi_last_bb (bb);
2831 if (!base_stmt && !gsi_end_p (gsi))
2832 base_stmt = gsi_stmt (gsi);
2833 }
2834
2835 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2836
2837 if (bb)
2838 {
2839 if (!gsi_end_p (gsi))
2840 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2841 else
2842 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2843 }
2844
2845 return note;
2846 }
2847
2848 static void
2849 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2850 {
2851 /* If VAR represents a zero-sized variable, it's possible that the
2852 assignment statement may result in no gimple statements. */
2853 if (init_stmt)
2854 {
2855 gimple_stmt_iterator si = gsi_last_bb (bb);
2856
2857 /* We can end up with init statements that store to a non-register
2858 from a rhs with a conversion. Handle that here by forcing the
2859 rhs into a temporary. gimple_regimplify_operands is not
2860 prepared to do this for us. */
2861 if (!is_gimple_debug (init_stmt)
2862 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2863 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2864 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2865 {
2866 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2867 gimple_expr_type (init_stmt),
2868 gimple_assign_rhs1 (init_stmt));
2869 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2870 GSI_NEW_STMT);
2871 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2872 gimple_assign_set_rhs1 (init_stmt, rhs);
2873 }
2874 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2875 gimple_regimplify_operands (init_stmt, &si);
2876
2877 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2878 {
2879 tree def = gimple_assign_lhs (init_stmt);
2880 insert_init_debug_bind (id, bb, def, def, init_stmt);
2881 }
2882 }
2883 }
2884
2885 /* Initialize parameter P with VALUE. If needed, produce init statement
2886 at the end of BB. When BB is NULL, we return init statement to be
2887 output later. */
2888 static gimple
2889 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2890 basic_block bb, tree *vars)
2891 {
2892 gimple init_stmt = NULL;
2893 tree var;
2894 tree rhs = value;
2895 tree def = (gimple_in_ssa_p (cfun)
2896 ? ssa_default_def (id->src_cfun, p) : NULL);
2897
2898 if (value
2899 && value != error_mark_node
2900 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2901 {
2902 /* If we can match up types by promotion/demotion do so. */
2903 if (fold_convertible_p (TREE_TYPE (p), value))
2904 rhs = fold_convert (TREE_TYPE (p), value);
2905 else
2906 {
2907 /* ??? For valid programs we should not end up here.
2908 Still if we end up with truly mismatched types here, fall back
2909 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2910 GIMPLE to the following passes. */
2911 if (!is_gimple_reg_type (TREE_TYPE (value))
2912 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2913 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2914 else
2915 rhs = build_zero_cst (TREE_TYPE (p));
2916 }
2917 }
2918
2919 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2920 here since the type of this decl must be visible to the calling
2921 function. */
2922 var = copy_decl_to_var (p, id);
2923
2924 /* Declare this new variable. */
2925 DECL_CHAIN (var) = *vars;
2926 *vars = var;
2927
2928 /* Make gimplifier happy about this variable. */
2929 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2930
2931 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2932 we would not need to create a new variable here at all, if it
2933 weren't for debug info. Still, we can just use the argument
2934 value. */
2935 if (TREE_READONLY (p)
2936 && !TREE_ADDRESSABLE (p)
2937 && value && !TREE_SIDE_EFFECTS (value)
2938 && !def)
2939 {
2940 /* We may produce non-gimple trees by adding NOPs or introduce
2941 invalid sharing when operand is not really constant.
2942 It is not big deal to prohibit constant propagation here as
2943 we will constant propagate in DOM1 pass anyway. */
2944 if (is_gimple_min_invariant (value)
2945 && useless_type_conversion_p (TREE_TYPE (p),
2946 TREE_TYPE (value))
2947 /* We have to be very careful about ADDR_EXPR. Make sure
2948 the base variable isn't a local variable of the inlined
2949 function, e.g., when doing recursive inlining, direct or
2950 mutually-recursive or whatever, which is why we don't
2951 just test whether fn == current_function_decl. */
2952 && ! self_inlining_addr_expr (value, fn))
2953 {
2954 insert_decl_map (id, p, value);
2955 insert_debug_decl_map (id, p, var);
2956 return insert_init_debug_bind (id, bb, var, value, NULL);
2957 }
2958 }
2959
2960 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2961 that way, when the PARM_DECL is encountered, it will be
2962 automatically replaced by the VAR_DECL. */
2963 insert_decl_map (id, p, var);
2964
2965 /* Even if P was TREE_READONLY, the new VAR should not be.
2966 In the original code, we would have constructed a
2967 temporary, and then the function body would have never
2968 changed the value of P. However, now, we will be
2969 constructing VAR directly. The constructor body may
2970 change its value multiple times as it is being
2971 constructed. Therefore, it must not be TREE_READONLY;
2972 the back-end assumes that TREE_READONLY variable is
2973 assigned to only once. */
2974 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2975 TREE_READONLY (var) = 0;
2976
2977 /* If there is no setup required and we are in SSA, take the easy route
2978 replacing all SSA names representing the function parameter by the
2979 SSA name passed to function.
2980
2981 We need to construct map for the variable anyway as it might be used
2982 in different SSA names when parameter is set in function.
2983
2984 Do replacement at -O0 for const arguments replaced by constant.
2985 This is important for builtin_constant_p and other construct requiring
2986 constant argument to be visible in inlined function body. */
2987 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2988 && (optimize
2989 || (TREE_READONLY (p)
2990 && is_gimple_min_invariant (rhs)))
2991 && (TREE_CODE (rhs) == SSA_NAME
2992 || is_gimple_min_invariant (rhs))
2993 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2994 {
2995 insert_decl_map (id, def, rhs);
2996 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2997 }
2998
2999 /* If the value of argument is never used, don't care about initializing
3000 it. */
3001 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3002 {
3003 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3004 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3005 }
3006
3007 /* Initialize this VAR_DECL from the equivalent argument. Convert
3008 the argument to the proper type in case it was promoted. */
3009 if (value)
3010 {
3011 if (rhs == error_mark_node)
3012 {
3013 insert_decl_map (id, p, var);
3014 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3015 }
3016
3017 STRIP_USELESS_TYPE_CONVERSION (rhs);
3018
3019 /* If we are in SSA form properly remap the default definition
3020 or assign to a dummy SSA name if the parameter is unused and
3021 we are not optimizing. */
3022 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3023 {
3024 if (def)
3025 {
3026 def = remap_ssa_name (def, id);
3027 init_stmt = gimple_build_assign (def, rhs);
3028 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3029 set_ssa_default_def (cfun, var, NULL);
3030 }
3031 else if (!optimize)
3032 {
3033 def = make_ssa_name (var, NULL);
3034 init_stmt = gimple_build_assign (def, rhs);
3035 }
3036 }
3037 else
3038 init_stmt = gimple_build_assign (var, rhs);
3039
3040 if (bb && init_stmt)
3041 insert_init_stmt (id, bb, init_stmt);
3042 }
3043 return init_stmt;
3044 }
3045
3046 /* Generate code to initialize the parameters of the function at the
3047 top of the stack in ID from the GIMPLE_CALL STMT. */
3048
3049 static void
3050 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3051 tree fn, basic_block bb)
3052 {
3053 tree parms;
3054 size_t i;
3055 tree p;
3056 tree vars = NULL_TREE;
3057 tree static_chain = gimple_call_chain (stmt);
3058
3059 /* Figure out what the parameters are. */
3060 parms = DECL_ARGUMENTS (fn);
3061
3062 /* Loop through the parameter declarations, replacing each with an
3063 equivalent VAR_DECL, appropriately initialized. */
3064 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3065 {
3066 tree val;
3067 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3068 setup_one_parameter (id, p, val, fn, bb, &vars);
3069 }
3070 /* After remapping parameters remap their types. This has to be done
3071 in a second loop over all parameters to appropriately remap
3072 variable sized arrays when the size is specified in a
3073 parameter following the array. */
3074 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3075 {
3076 tree *varp = id->decl_map->get (p);
3077 if (varp
3078 && TREE_CODE (*varp) == VAR_DECL)
3079 {
3080 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3081 ? ssa_default_def (id->src_cfun, p) : NULL);
3082 tree var = *varp;
3083 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3084 /* Also remap the default definition if it was remapped
3085 to the default definition of the parameter replacement
3086 by the parameter setup. */
3087 if (def)
3088 {
3089 tree *defp = id->decl_map->get (def);
3090 if (defp
3091 && TREE_CODE (*defp) == SSA_NAME
3092 && SSA_NAME_VAR (*defp) == var)
3093 TREE_TYPE (*defp) = TREE_TYPE (var);
3094 }
3095 }
3096 }
3097
3098 /* Initialize the static chain. */
3099 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3100 gcc_assert (fn != current_function_decl);
3101 if (p)
3102 {
3103 /* No static chain? Seems like a bug in tree-nested.c. */
3104 gcc_assert (static_chain);
3105
3106 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3107 }
3108
3109 declare_inline_vars (id->block, vars);
3110 }
3111
3112
3113 /* Declare a return variable to replace the RESULT_DECL for the
3114 function we are calling. An appropriate DECL_STMT is returned.
3115 The USE_STMT is filled to contain a use of the declaration to
3116 indicate the return value of the function.
3117
3118 RETURN_SLOT, if non-null is place where to store the result. It
3119 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3120 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3121
3122 The return value is a (possibly null) value that holds the result
3123 as seen by the caller. */
3124
3125 static tree
3126 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3127 basic_block entry_bb)
3128 {
3129 tree callee = id->src_fn;
3130 tree result = DECL_RESULT (callee);
3131 tree callee_type = TREE_TYPE (result);
3132 tree caller_type;
3133 tree var, use;
3134
3135 /* Handle type-mismatches in the function declaration return type
3136 vs. the call expression. */
3137 if (modify_dest)
3138 caller_type = TREE_TYPE (modify_dest);
3139 else
3140 caller_type = TREE_TYPE (TREE_TYPE (callee));
3141
3142 /* We don't need to do anything for functions that don't return anything. */
3143 if (VOID_TYPE_P (callee_type))
3144 return NULL_TREE;
3145
3146 /* If there was a return slot, then the return value is the
3147 dereferenced address of that object. */
3148 if (return_slot)
3149 {
3150 /* The front end shouldn't have used both return_slot and
3151 a modify expression. */
3152 gcc_assert (!modify_dest);
3153 if (DECL_BY_REFERENCE (result))
3154 {
3155 tree return_slot_addr = build_fold_addr_expr (return_slot);
3156 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3157
3158 /* We are going to construct *&return_slot and we can't do that
3159 for variables believed to be not addressable.
3160
3161 FIXME: This check possibly can match, because values returned
3162 via return slot optimization are not believed to have address
3163 taken by alias analysis. */
3164 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3165 var = return_slot_addr;
3166 }
3167 else
3168 {
3169 var = return_slot;
3170 gcc_assert (TREE_CODE (var) != SSA_NAME);
3171 if (TREE_ADDRESSABLE (result))
3172 mark_addressable (var);
3173 }
3174 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3175 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3176 && !DECL_GIMPLE_REG_P (result)
3177 && DECL_P (var))
3178 DECL_GIMPLE_REG_P (var) = 0;
3179 use = NULL;
3180 goto done;
3181 }
3182
3183 /* All types requiring non-trivial constructors should have been handled. */
3184 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3185
3186 /* Attempt to avoid creating a new temporary variable. */
3187 if (modify_dest
3188 && TREE_CODE (modify_dest) != SSA_NAME)
3189 {
3190 bool use_it = false;
3191
3192 /* We can't use MODIFY_DEST if there's type promotion involved. */
3193 if (!useless_type_conversion_p (callee_type, caller_type))
3194 use_it = false;
3195
3196 /* ??? If we're assigning to a variable sized type, then we must
3197 reuse the destination variable, because we've no good way to
3198 create variable sized temporaries at this point. */
3199 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3200 use_it = true;
3201
3202 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3203 reuse it as the result of the call directly. Don't do this if
3204 it would promote MODIFY_DEST to addressable. */
3205 else if (TREE_ADDRESSABLE (result))
3206 use_it = false;
3207 else
3208 {
3209 tree base_m = get_base_address (modify_dest);
3210
3211 /* If the base isn't a decl, then it's a pointer, and we don't
3212 know where that's going to go. */
3213 if (!DECL_P (base_m))
3214 use_it = false;
3215 else if (is_global_var (base_m))
3216 use_it = false;
3217 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3218 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3219 && !DECL_GIMPLE_REG_P (result)
3220 && DECL_GIMPLE_REG_P (base_m))
3221 use_it = false;
3222 else if (!TREE_ADDRESSABLE (base_m))
3223 use_it = true;
3224 }
3225
3226 if (use_it)
3227 {
3228 var = modify_dest;
3229 use = NULL;
3230 goto done;
3231 }
3232 }
3233
3234 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3235
3236 var = copy_result_decl_to_var (result, id);
3237 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3238
3239 /* Do not have the rest of GCC warn about this variable as it should
3240 not be visible to the user. */
3241 TREE_NO_WARNING (var) = 1;
3242
3243 declare_inline_vars (id->block, var);
3244
3245 /* Build the use expr. If the return type of the function was
3246 promoted, convert it back to the expected type. */
3247 use = var;
3248 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3249 {
3250 /* If we can match up types by promotion/demotion do so. */
3251 if (fold_convertible_p (caller_type, var))
3252 use = fold_convert (caller_type, var);
3253 else
3254 {
3255 /* ??? For valid programs we should not end up here.
3256 Still if we end up with truly mismatched types here, fall back
3257 to using a MEM_REF to not leak invalid GIMPLE to the following
3258 passes. */
3259 /* Prevent var from being written into SSA form. */
3260 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3261 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3262 DECL_GIMPLE_REG_P (var) = false;
3263 else if (is_gimple_reg_type (TREE_TYPE (var)))
3264 TREE_ADDRESSABLE (var) = true;
3265 use = fold_build2 (MEM_REF, caller_type,
3266 build_fold_addr_expr (var),
3267 build_int_cst (ptr_type_node, 0));
3268 }
3269 }
3270
3271 STRIP_USELESS_TYPE_CONVERSION (use);
3272
3273 if (DECL_BY_REFERENCE (result))
3274 {
3275 TREE_ADDRESSABLE (var) = 1;
3276 var = build_fold_addr_expr (var);
3277 }
3278
3279 done:
3280 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3281 way, when the RESULT_DECL is encountered, it will be
3282 automatically replaced by the VAR_DECL.
3283
3284 When returning by reference, ensure that RESULT_DECL remaps to
3285 gimple_val. */
3286 if (DECL_BY_REFERENCE (result)
3287 && !is_gimple_val (var))
3288 {
3289 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3290 insert_decl_map (id, result, temp);
3291 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3292 it's default_def SSA_NAME. */
3293 if (gimple_in_ssa_p (id->src_cfun)
3294 && is_gimple_reg (result))
3295 {
3296 temp = make_ssa_name (temp, NULL);
3297 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3298 }
3299 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3300 }
3301 else
3302 insert_decl_map (id, result, var);
3303
3304 /* Remember this so we can ignore it in remap_decls. */
3305 id->retvar = var;
3306
3307 return use;
3308 }
3309
3310 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3311 to a local label. */
3312
3313 static tree
3314 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3315 {
3316 tree node = *nodep;
3317 tree fn = (tree) fnp;
3318
3319 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3320 return node;
3321
3322 if (TYPE_P (node))
3323 *walk_subtrees = 0;
3324
3325 return NULL_TREE;
3326 }
3327
3328 /* Determine if the function can be copied. If so return NULL. If
3329 not return a string describng the reason for failure. */
3330
3331 static const char *
3332 copy_forbidden (struct function *fun, tree fndecl)
3333 {
3334 const char *reason = fun->cannot_be_copied_reason;
3335 tree decl;
3336 unsigned ix;
3337
3338 /* Only examine the function once. */
3339 if (fun->cannot_be_copied_set)
3340 return reason;
3341
3342 /* We cannot copy a function that receives a non-local goto
3343 because we cannot remap the destination label used in the
3344 function that is performing the non-local goto. */
3345 /* ??? Actually, this should be possible, if we work at it.
3346 No doubt there's just a handful of places that simply
3347 assume it doesn't happen and don't substitute properly. */
3348 if (fun->has_nonlocal_label)
3349 {
3350 reason = G_("function %q+F can never be copied "
3351 "because it receives a non-local goto");
3352 goto fail;
3353 }
3354
3355 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3356 if (TREE_CODE (decl) == VAR_DECL
3357 && TREE_STATIC (decl)
3358 && !DECL_EXTERNAL (decl)
3359 && DECL_INITIAL (decl)
3360 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3361 has_label_address_in_static_1,
3362 fndecl))
3363 {
3364 reason = G_("function %q+F can never be copied because it saves "
3365 "address of local label in a static variable");
3366 goto fail;
3367 }
3368
3369 fail:
3370 fun->cannot_be_copied_reason = reason;
3371 fun->cannot_be_copied_set = true;
3372 return reason;
3373 }
3374
3375
3376 static const char *inline_forbidden_reason;
3377
3378 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3379 iff a function can not be inlined. Also sets the reason why. */
3380
3381 static tree
3382 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3383 struct walk_stmt_info *wip)
3384 {
3385 tree fn = (tree) wip->info;
3386 tree t;
3387 gimple stmt = gsi_stmt (*gsi);
3388
3389 switch (gimple_code (stmt))
3390 {
3391 case GIMPLE_CALL:
3392 /* Refuse to inline alloca call unless user explicitly forced so as
3393 this may change program's memory overhead drastically when the
3394 function using alloca is called in loop. In GCC present in
3395 SPEC2000 inlining into schedule_block cause it to require 2GB of
3396 RAM instead of 256MB. Don't do so for alloca calls emitted for
3397 VLA objects as those can't cause unbounded growth (they're always
3398 wrapped inside stack_save/stack_restore regions. */
3399 if (gimple_alloca_call_p (stmt)
3400 && !gimple_call_alloca_for_var_p (stmt)
3401 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3402 {
3403 inline_forbidden_reason
3404 = G_("function %q+F can never be inlined because it uses "
3405 "alloca (override using the always_inline attribute)");
3406 *handled_ops_p = true;
3407 return fn;
3408 }
3409
3410 t = gimple_call_fndecl (stmt);
3411 if (t == NULL_TREE)
3412 break;
3413
3414 /* We cannot inline functions that call setjmp. */
3415 if (setjmp_call_p (t))
3416 {
3417 inline_forbidden_reason
3418 = G_("function %q+F can never be inlined because it uses setjmp");
3419 *handled_ops_p = true;
3420 return t;
3421 }
3422
3423 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3424 switch (DECL_FUNCTION_CODE (t))
3425 {
3426 /* We cannot inline functions that take a variable number of
3427 arguments. */
3428 case BUILT_IN_VA_START:
3429 case BUILT_IN_NEXT_ARG:
3430 case BUILT_IN_VA_END:
3431 inline_forbidden_reason
3432 = G_("function %q+F can never be inlined because it "
3433 "uses variable argument lists");
3434 *handled_ops_p = true;
3435 return t;
3436
3437 case BUILT_IN_LONGJMP:
3438 /* We can't inline functions that call __builtin_longjmp at
3439 all. The non-local goto machinery really requires the
3440 destination be in a different function. If we allow the
3441 function calling __builtin_longjmp to be inlined into the
3442 function calling __builtin_setjmp, Things will Go Awry. */
3443 inline_forbidden_reason
3444 = G_("function %q+F can never be inlined because "
3445 "it uses setjmp-longjmp exception handling");
3446 *handled_ops_p = true;
3447 return t;
3448
3449 case BUILT_IN_NONLOCAL_GOTO:
3450 /* Similarly. */
3451 inline_forbidden_reason
3452 = G_("function %q+F can never be inlined because "
3453 "it uses non-local goto");
3454 *handled_ops_p = true;
3455 return t;
3456
3457 case BUILT_IN_RETURN:
3458 case BUILT_IN_APPLY_ARGS:
3459 /* If a __builtin_apply_args caller would be inlined,
3460 it would be saving arguments of the function it has
3461 been inlined into. Similarly __builtin_return would
3462 return from the function the inline has been inlined into. */
3463 inline_forbidden_reason
3464 = G_("function %q+F can never be inlined because "
3465 "it uses __builtin_return or __builtin_apply_args");
3466 *handled_ops_p = true;
3467 return t;
3468
3469 default:
3470 break;
3471 }
3472 break;
3473
3474 case GIMPLE_GOTO:
3475 t = gimple_goto_dest (stmt);
3476
3477 /* We will not inline a function which uses computed goto. The
3478 addresses of its local labels, which may be tucked into
3479 global storage, are of course not constant across
3480 instantiations, which causes unexpected behavior. */
3481 if (TREE_CODE (t) != LABEL_DECL)
3482 {
3483 inline_forbidden_reason
3484 = G_("function %q+F can never be inlined "
3485 "because it contains a computed goto");
3486 *handled_ops_p = true;
3487 return t;
3488 }
3489 break;
3490
3491 default:
3492 break;
3493 }
3494
3495 *handled_ops_p = false;
3496 return NULL_TREE;
3497 }
3498
3499 /* Return true if FNDECL is a function that cannot be inlined into
3500 another one. */
3501
3502 static bool
3503 inline_forbidden_p (tree fndecl)
3504 {
3505 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3506 struct walk_stmt_info wi;
3507 basic_block bb;
3508 bool forbidden_p = false;
3509
3510 /* First check for shared reasons not to copy the code. */
3511 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3512 if (inline_forbidden_reason != NULL)
3513 return true;
3514
3515 /* Next, walk the statements of the function looking for
3516 constraucts we can't handle, or are non-optimal for inlining. */
3517 hash_set<tree> visited_nodes;
3518 memset (&wi, 0, sizeof (wi));
3519 wi.info = (void *) fndecl;
3520 wi.pset = &visited_nodes;
3521
3522 FOR_EACH_BB_FN (bb, fun)
3523 {
3524 gimple ret;
3525 gimple_seq seq = bb_seq (bb);
3526 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3527 forbidden_p = (ret != NULL);
3528 if (forbidden_p)
3529 break;
3530 }
3531
3532 return forbidden_p;
3533 }
3534 \f
3535 /* Return false if the function FNDECL cannot be inlined on account of its
3536 attributes, true otherwise. */
3537 static bool
3538 function_attribute_inlinable_p (const_tree fndecl)
3539 {
3540 if (targetm.attribute_table)
3541 {
3542 const_tree a;
3543
3544 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3545 {
3546 const_tree name = TREE_PURPOSE (a);
3547 int i;
3548
3549 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3550 if (is_attribute_p (targetm.attribute_table[i].name, name))
3551 return targetm.function_attribute_inlinable_p (fndecl);
3552 }
3553 }
3554
3555 return true;
3556 }
3557
3558 /* Returns nonzero if FN is a function that does not have any
3559 fundamental inline blocking properties. */
3560
3561 bool
3562 tree_inlinable_function_p (tree fn)
3563 {
3564 bool inlinable = true;
3565 bool do_warning;
3566 tree always_inline;
3567
3568 /* If we've already decided this function shouldn't be inlined,
3569 there's no need to check again. */
3570 if (DECL_UNINLINABLE (fn))
3571 return false;
3572
3573 /* We only warn for functions declared `inline' by the user. */
3574 do_warning = (warn_inline
3575 && DECL_DECLARED_INLINE_P (fn)
3576 && !DECL_NO_INLINE_WARNING_P (fn)
3577 && !DECL_IN_SYSTEM_HEADER (fn));
3578
3579 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3580
3581 if (flag_no_inline
3582 && always_inline == NULL)
3583 {
3584 if (do_warning)
3585 warning (OPT_Winline, "function %q+F can never be inlined because it "
3586 "is suppressed using -fno-inline", fn);
3587 inlinable = false;
3588 }
3589
3590 else if (!function_attribute_inlinable_p (fn))
3591 {
3592 if (do_warning)
3593 warning (OPT_Winline, "function %q+F can never be inlined because it "
3594 "uses attributes conflicting with inlining", fn);
3595 inlinable = false;
3596 }
3597
3598 else if (inline_forbidden_p (fn))
3599 {
3600 /* See if we should warn about uninlinable functions. Previously,
3601 some of these warnings would be issued while trying to expand
3602 the function inline, but that would cause multiple warnings
3603 about functions that would for example call alloca. But since
3604 this a property of the function, just one warning is enough.
3605 As a bonus we can now give more details about the reason why a
3606 function is not inlinable. */
3607 if (always_inline)
3608 error (inline_forbidden_reason, fn);
3609 else if (do_warning)
3610 warning (OPT_Winline, inline_forbidden_reason, fn);
3611
3612 inlinable = false;
3613 }
3614
3615 /* Squirrel away the result so that we don't have to check again. */
3616 DECL_UNINLINABLE (fn) = !inlinable;
3617
3618 return inlinable;
3619 }
3620
3621 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3622 word size and take possible memcpy call into account and return
3623 cost based on whether optimizing for size or speed according to SPEED_P. */
3624
3625 int
3626 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3627 {
3628 HOST_WIDE_INT size;
3629
3630 gcc_assert (!VOID_TYPE_P (type));
3631
3632 if (TREE_CODE (type) == VECTOR_TYPE)
3633 {
3634 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3635 enum machine_mode simd
3636 = targetm.vectorize.preferred_simd_mode (inner);
3637 int simd_mode_size = GET_MODE_SIZE (simd);
3638 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3639 / simd_mode_size);
3640 }
3641
3642 size = int_size_in_bytes (type);
3643
3644 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3645 /* Cost of a memcpy call, 3 arguments and the call. */
3646 return 4;
3647 else
3648 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3649 }
3650
3651 /* Returns cost of operation CODE, according to WEIGHTS */
3652
3653 static int
3654 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3655 tree op1 ATTRIBUTE_UNUSED, tree op2)
3656 {
3657 switch (code)
3658 {
3659 /* These are "free" conversions, or their presumed cost
3660 is folded into other operations. */
3661 case RANGE_EXPR:
3662 CASE_CONVERT:
3663 case COMPLEX_EXPR:
3664 case PAREN_EXPR:
3665 case VIEW_CONVERT_EXPR:
3666 return 0;
3667
3668 /* Assign cost of 1 to usual operations.
3669 ??? We may consider mapping RTL costs to this. */
3670 case COND_EXPR:
3671 case VEC_COND_EXPR:
3672 case VEC_PERM_EXPR:
3673
3674 case PLUS_EXPR:
3675 case POINTER_PLUS_EXPR:
3676 case MINUS_EXPR:
3677 case MULT_EXPR:
3678 case MULT_HIGHPART_EXPR:
3679 case FMA_EXPR:
3680
3681 case ADDR_SPACE_CONVERT_EXPR:
3682 case FIXED_CONVERT_EXPR:
3683 case FIX_TRUNC_EXPR:
3684
3685 case NEGATE_EXPR:
3686 case FLOAT_EXPR:
3687 case MIN_EXPR:
3688 case MAX_EXPR:
3689 case ABS_EXPR:
3690
3691 case LSHIFT_EXPR:
3692 case RSHIFT_EXPR:
3693 case LROTATE_EXPR:
3694 case RROTATE_EXPR:
3695 case VEC_LSHIFT_EXPR:
3696 case VEC_RSHIFT_EXPR:
3697
3698 case BIT_IOR_EXPR:
3699 case BIT_XOR_EXPR:
3700 case BIT_AND_EXPR:
3701 case BIT_NOT_EXPR:
3702
3703 case TRUTH_ANDIF_EXPR:
3704 case TRUTH_ORIF_EXPR:
3705 case TRUTH_AND_EXPR:
3706 case TRUTH_OR_EXPR:
3707 case TRUTH_XOR_EXPR:
3708 case TRUTH_NOT_EXPR:
3709
3710 case LT_EXPR:
3711 case LE_EXPR:
3712 case GT_EXPR:
3713 case GE_EXPR:
3714 case EQ_EXPR:
3715 case NE_EXPR:
3716 case ORDERED_EXPR:
3717 case UNORDERED_EXPR:
3718
3719 case UNLT_EXPR:
3720 case UNLE_EXPR:
3721 case UNGT_EXPR:
3722 case UNGE_EXPR:
3723 case UNEQ_EXPR:
3724 case LTGT_EXPR:
3725
3726 case CONJ_EXPR:
3727
3728 case PREDECREMENT_EXPR:
3729 case PREINCREMENT_EXPR:
3730 case POSTDECREMENT_EXPR:
3731 case POSTINCREMENT_EXPR:
3732
3733 case REALIGN_LOAD_EXPR:
3734
3735 case REDUC_MAX_EXPR:
3736 case REDUC_MIN_EXPR:
3737 case REDUC_PLUS_EXPR:
3738 case WIDEN_SUM_EXPR:
3739 case WIDEN_MULT_EXPR:
3740 case DOT_PROD_EXPR:
3741 case SAD_EXPR:
3742 case WIDEN_MULT_PLUS_EXPR:
3743 case WIDEN_MULT_MINUS_EXPR:
3744 case WIDEN_LSHIFT_EXPR:
3745
3746 case VEC_WIDEN_MULT_HI_EXPR:
3747 case VEC_WIDEN_MULT_LO_EXPR:
3748 case VEC_WIDEN_MULT_EVEN_EXPR:
3749 case VEC_WIDEN_MULT_ODD_EXPR:
3750 case VEC_UNPACK_HI_EXPR:
3751 case VEC_UNPACK_LO_EXPR:
3752 case VEC_UNPACK_FLOAT_HI_EXPR:
3753 case VEC_UNPACK_FLOAT_LO_EXPR:
3754 case VEC_PACK_TRUNC_EXPR:
3755 case VEC_PACK_SAT_EXPR:
3756 case VEC_PACK_FIX_TRUNC_EXPR:
3757 case VEC_WIDEN_LSHIFT_HI_EXPR:
3758 case VEC_WIDEN_LSHIFT_LO_EXPR:
3759
3760 return 1;
3761
3762 /* Few special cases of expensive operations. This is useful
3763 to avoid inlining on functions having too many of these. */
3764 case TRUNC_DIV_EXPR:
3765 case CEIL_DIV_EXPR:
3766 case FLOOR_DIV_EXPR:
3767 case ROUND_DIV_EXPR:
3768 case EXACT_DIV_EXPR:
3769 case TRUNC_MOD_EXPR:
3770 case CEIL_MOD_EXPR:
3771 case FLOOR_MOD_EXPR:
3772 case ROUND_MOD_EXPR:
3773 case RDIV_EXPR:
3774 if (TREE_CODE (op2) != INTEGER_CST)
3775 return weights->div_mod_cost;
3776 return 1;
3777
3778 default:
3779 /* We expect a copy assignment with no operator. */
3780 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3781 return 0;
3782 }
3783 }
3784
3785
3786 /* Estimate number of instructions that will be created by expanding
3787 the statements in the statement sequence STMTS.
3788 WEIGHTS contains weights attributed to various constructs. */
3789
3790 static
3791 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3792 {
3793 int cost;
3794 gimple_stmt_iterator gsi;
3795
3796 cost = 0;
3797 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3798 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3799
3800 return cost;
3801 }
3802
3803
3804 /* Estimate number of instructions that will be created by expanding STMT.
3805 WEIGHTS contains weights attributed to various constructs. */
3806
3807 int
3808 estimate_num_insns (gimple stmt, eni_weights *weights)
3809 {
3810 unsigned cost, i;
3811 enum gimple_code code = gimple_code (stmt);
3812 tree lhs;
3813 tree rhs;
3814
3815 switch (code)
3816 {
3817 case GIMPLE_ASSIGN:
3818 /* Try to estimate the cost of assignments. We have three cases to
3819 deal with:
3820 1) Simple assignments to registers;
3821 2) Stores to things that must live in memory. This includes
3822 "normal" stores to scalars, but also assignments of large
3823 structures, or constructors of big arrays;
3824
3825 Let us look at the first two cases, assuming we have "a = b + C":
3826 <GIMPLE_ASSIGN <var_decl "a">
3827 <plus_expr <var_decl "b"> <constant C>>
3828 If "a" is a GIMPLE register, the assignment to it is free on almost
3829 any target, because "a" usually ends up in a real register. Hence
3830 the only cost of this expression comes from the PLUS_EXPR, and we
3831 can ignore the GIMPLE_ASSIGN.
3832 If "a" is not a GIMPLE register, the assignment to "a" will most
3833 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3834 of moving something into "a", which we compute using the function
3835 estimate_move_cost. */
3836 if (gimple_clobber_p (stmt))
3837 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3838
3839 lhs = gimple_assign_lhs (stmt);
3840 rhs = gimple_assign_rhs1 (stmt);
3841
3842 cost = 0;
3843
3844 /* Account for the cost of moving to / from memory. */
3845 if (gimple_store_p (stmt))
3846 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
3847 if (gimple_assign_load_p (stmt))
3848 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
3849
3850 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3851 gimple_assign_rhs1 (stmt),
3852 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3853 == GIMPLE_BINARY_RHS
3854 ? gimple_assign_rhs2 (stmt) : NULL);
3855 break;
3856
3857 case GIMPLE_COND:
3858 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3859 gimple_op (stmt, 0),
3860 gimple_op (stmt, 1));
3861 break;
3862
3863 case GIMPLE_SWITCH:
3864 /* Take into account cost of the switch + guess 2 conditional jumps for
3865 each case label.
3866
3867 TODO: once the switch expansion logic is sufficiently separated, we can
3868 do better job on estimating cost of the switch. */
3869 if (weights->time_based)
3870 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3871 else
3872 cost = gimple_switch_num_labels (stmt) * 2;
3873 break;
3874
3875 case GIMPLE_CALL:
3876 {
3877 tree decl;
3878
3879 if (gimple_call_internal_p (stmt))
3880 return 0;
3881 else if ((decl = gimple_call_fndecl (stmt))
3882 && DECL_BUILT_IN (decl))
3883 {
3884 /* Do not special case builtins where we see the body.
3885 This just confuse inliner. */
3886 struct cgraph_node *node;
3887 if (!(node = cgraph_node::get (decl))
3888 || node->definition)
3889 ;
3890 /* For buitins that are likely expanded to nothing or
3891 inlined do not account operand costs. */
3892 else if (is_simple_builtin (decl))
3893 return 0;
3894 else if (is_inexpensive_builtin (decl))
3895 return weights->target_builtin_call_cost;
3896 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3897 {
3898 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3899 specialize the cheap expansion we do here.
3900 ??? This asks for a more general solution. */
3901 switch (DECL_FUNCTION_CODE (decl))
3902 {
3903 case BUILT_IN_POW:
3904 case BUILT_IN_POWF:
3905 case BUILT_IN_POWL:
3906 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3907 && REAL_VALUES_EQUAL
3908 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3909 return estimate_operator_cost
3910 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
3911 gimple_call_arg (stmt, 0));
3912 break;
3913
3914 default:
3915 break;
3916 }
3917 }
3918 }
3919
3920 cost = decl ? weights->call_cost : weights->indirect_call_cost;
3921 if (gimple_call_lhs (stmt))
3922 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
3923 weights->time_based);
3924 for (i = 0; i < gimple_call_num_args (stmt); i++)
3925 {
3926 tree arg = gimple_call_arg (stmt, i);
3927 cost += estimate_move_cost (TREE_TYPE (arg),
3928 weights->time_based);
3929 }
3930 break;
3931 }
3932
3933 case GIMPLE_RETURN:
3934 return weights->return_cost;
3935
3936 case GIMPLE_GOTO:
3937 case GIMPLE_LABEL:
3938 case GIMPLE_NOP:
3939 case GIMPLE_PHI:
3940 case GIMPLE_PREDICT:
3941 case GIMPLE_DEBUG:
3942 return 0;
3943
3944 case GIMPLE_ASM:
3945 {
3946 int count = asm_str_count (gimple_asm_string (stmt));
3947 /* 1000 means infinity. This avoids overflows later
3948 with very long asm statements. */
3949 if (count > 1000)
3950 count = 1000;
3951 return count;
3952 }
3953
3954 case GIMPLE_RESX:
3955 /* This is either going to be an external function call with one
3956 argument, or two register copy statements plus a goto. */
3957 return 2;
3958
3959 case GIMPLE_EH_DISPATCH:
3960 /* ??? This is going to turn into a switch statement. Ideally
3961 we'd have a look at the eh region and estimate the number of
3962 edges involved. */
3963 return 10;
3964
3965 case GIMPLE_BIND:
3966 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3967
3968 case GIMPLE_EH_FILTER:
3969 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3970
3971 case GIMPLE_CATCH:
3972 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3973
3974 case GIMPLE_TRY:
3975 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3976 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3977
3978 /* OpenMP directives are generally very expensive. */
3979
3980 case GIMPLE_OMP_RETURN:
3981 case GIMPLE_OMP_SECTIONS_SWITCH:
3982 case GIMPLE_OMP_ATOMIC_STORE:
3983 case GIMPLE_OMP_CONTINUE:
3984 /* ...except these, which are cheap. */
3985 return 0;
3986
3987 case GIMPLE_OMP_ATOMIC_LOAD:
3988 return weights->omp_cost;
3989
3990 case GIMPLE_OMP_FOR:
3991 return (weights->omp_cost
3992 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3993 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3994
3995 case GIMPLE_OMP_PARALLEL:
3996 case GIMPLE_OMP_TASK:
3997 case GIMPLE_OMP_CRITICAL:
3998 case GIMPLE_OMP_MASTER:
3999 case GIMPLE_OMP_TASKGROUP:
4000 case GIMPLE_OMP_ORDERED:
4001 case GIMPLE_OMP_SECTION:
4002 case GIMPLE_OMP_SECTIONS:
4003 case GIMPLE_OMP_SINGLE:
4004 case GIMPLE_OMP_TARGET:
4005 case GIMPLE_OMP_TEAMS:
4006 return (weights->omp_cost
4007 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4008
4009 case GIMPLE_TRANSACTION:
4010 return (weights->tm_cost
4011 + estimate_num_insns_seq (gimple_transaction_body (stmt),
4012 weights));
4013
4014 default:
4015 gcc_unreachable ();
4016 }
4017
4018 return cost;
4019 }
4020
4021 /* Estimate number of instructions that will be created by expanding
4022 function FNDECL. WEIGHTS contains weights attributed to various
4023 constructs. */
4024
4025 int
4026 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4027 {
4028 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4029 gimple_stmt_iterator bsi;
4030 basic_block bb;
4031 int n = 0;
4032
4033 gcc_assert (my_function && my_function->cfg);
4034 FOR_EACH_BB_FN (bb, my_function)
4035 {
4036 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4037 n += estimate_num_insns (gsi_stmt (bsi), weights);
4038 }
4039
4040 return n;
4041 }
4042
4043
4044 /* Initializes weights used by estimate_num_insns. */
4045
4046 void
4047 init_inline_once (void)
4048 {
4049 eni_size_weights.call_cost = 1;
4050 eni_size_weights.indirect_call_cost = 3;
4051 eni_size_weights.target_builtin_call_cost = 1;
4052 eni_size_weights.div_mod_cost = 1;
4053 eni_size_weights.omp_cost = 40;
4054 eni_size_weights.tm_cost = 10;
4055 eni_size_weights.time_based = false;
4056 eni_size_weights.return_cost = 1;
4057
4058 /* Estimating time for call is difficult, since we have no idea what the
4059 called function does. In the current uses of eni_time_weights,
4060 underestimating the cost does less harm than overestimating it, so
4061 we choose a rather small value here. */
4062 eni_time_weights.call_cost = 10;
4063 eni_time_weights.indirect_call_cost = 15;
4064 eni_time_weights.target_builtin_call_cost = 1;
4065 eni_time_weights.div_mod_cost = 10;
4066 eni_time_weights.omp_cost = 40;
4067 eni_time_weights.tm_cost = 40;
4068 eni_time_weights.time_based = true;
4069 eni_time_weights.return_cost = 2;
4070 }
4071
4072 /* Estimate the number of instructions in a gimple_seq. */
4073
4074 int
4075 count_insns_seq (gimple_seq seq, eni_weights *weights)
4076 {
4077 gimple_stmt_iterator gsi;
4078 int n = 0;
4079 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4080 n += estimate_num_insns (gsi_stmt (gsi), weights);
4081
4082 return n;
4083 }
4084
4085
4086 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4087
4088 static void
4089 prepend_lexical_block (tree current_block, tree new_block)
4090 {
4091 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4092 BLOCK_SUBBLOCKS (current_block) = new_block;
4093 BLOCK_SUPERCONTEXT (new_block) = current_block;
4094 }
4095
4096 /* Add local variables from CALLEE to CALLER. */
4097
4098 static inline void
4099 add_local_variables (struct function *callee, struct function *caller,
4100 copy_body_data *id)
4101 {
4102 tree var;
4103 unsigned ix;
4104
4105 FOR_EACH_LOCAL_DECL (callee, ix, var)
4106 if (!can_be_nonlocal (var, id))
4107 {
4108 tree new_var = remap_decl (var, id);
4109
4110 /* Remap debug-expressions. */
4111 if (TREE_CODE (new_var) == VAR_DECL
4112 && DECL_HAS_DEBUG_EXPR_P (var)
4113 && new_var != var)
4114 {
4115 tree tem = DECL_DEBUG_EXPR (var);
4116 bool old_regimplify = id->regimplify;
4117 id->remapping_type_depth++;
4118 walk_tree (&tem, copy_tree_body_r, id, NULL);
4119 id->remapping_type_depth--;
4120 id->regimplify = old_regimplify;
4121 SET_DECL_DEBUG_EXPR (new_var, tem);
4122 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4123 }
4124 add_local_decl (caller, new_var);
4125 }
4126 }
4127
4128 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4129
4130 static bool
4131 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4132 {
4133 tree use_retvar;
4134 tree fn;
4135 hash_map<tree, tree> *dst;
4136 hash_map<tree, tree> *st = NULL;
4137 tree return_slot;
4138 tree modify_dest;
4139 location_t saved_location;
4140 struct cgraph_edge *cg_edge;
4141 cgraph_inline_failed_t reason;
4142 basic_block return_block;
4143 edge e;
4144 gimple_stmt_iterator gsi, stmt_gsi;
4145 bool successfully_inlined = FALSE;
4146 bool purge_dead_abnormal_edges;
4147
4148 /* Set input_location here so we get the right instantiation context
4149 if we call instantiate_decl from inlinable_function_p. */
4150 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4151 saved_location = input_location;
4152 input_location = gimple_location (stmt);
4153
4154 /* From here on, we're only interested in CALL_EXPRs. */
4155 if (gimple_code (stmt) != GIMPLE_CALL)
4156 goto egress;
4157
4158 cg_edge = id->dst_node->get_edge (stmt);
4159 gcc_checking_assert (cg_edge);
4160 /* First, see if we can figure out what function is being called.
4161 If we cannot, then there is no hope of inlining the function. */
4162 if (cg_edge->indirect_unknown_callee)
4163 goto egress;
4164 fn = cg_edge->callee->decl;
4165 gcc_checking_assert (fn);
4166
4167 /* If FN is a declaration of a function in a nested scope that was
4168 globally declared inline, we don't set its DECL_INITIAL.
4169 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4170 C++ front-end uses it for cdtors to refer to their internal
4171 declarations, that are not real functions. Fortunately those
4172 don't have trees to be saved, so we can tell by checking their
4173 gimple_body. */
4174 if (!DECL_INITIAL (fn)
4175 && DECL_ABSTRACT_ORIGIN (fn)
4176 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4177 fn = DECL_ABSTRACT_ORIGIN (fn);
4178
4179 /* Don't try to inline functions that are not well-suited to inlining. */
4180 if (cg_edge->inline_failed)
4181 {
4182 reason = cg_edge->inline_failed;
4183 /* If this call was originally indirect, we do not want to emit any
4184 inlining related warnings or sorry messages because there are no
4185 guarantees regarding those. */
4186 if (cg_edge->indirect_inlining_edge)
4187 goto egress;
4188
4189 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4190 /* For extern inline functions that get redefined we always
4191 silently ignored always_inline flag. Better behaviour would
4192 be to be able to keep both bodies and use extern inline body
4193 for inlining, but we can't do that because frontends overwrite
4194 the body. */
4195 && !cg_edge->callee->local.redefined_extern_inline
4196 /* During early inline pass, report only when optimization is
4197 not turned on. */
4198 && (symtab->global_info_ready
4199 || !optimize
4200 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4201 /* PR 20090218-1_0.c. Body can be provided by another module. */
4202 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4203 {
4204 error ("inlining failed in call to always_inline %q+F: %s", fn,
4205 cgraph_inline_failed_string (reason));
4206 error ("called from here");
4207 }
4208 else if (warn_inline
4209 && DECL_DECLARED_INLINE_P (fn)
4210 && !DECL_NO_INLINE_WARNING_P (fn)
4211 && !DECL_IN_SYSTEM_HEADER (fn)
4212 && reason != CIF_UNSPECIFIED
4213 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4214 /* Do not warn about not inlined recursive calls. */
4215 && !cg_edge->recursive_p ()
4216 /* Avoid warnings during early inline pass. */
4217 && symtab->global_info_ready)
4218 {
4219 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4220 fn, _(cgraph_inline_failed_string (reason)));
4221 warning (OPT_Winline, "called from here");
4222 }
4223 goto egress;
4224 }
4225 fn = cg_edge->callee->decl;
4226 cg_edge->callee->get_body ();
4227
4228 #ifdef ENABLE_CHECKING
4229 if (cg_edge->callee->decl != id->dst_node->decl)
4230 cg_edge->callee->verify ();
4231 #endif
4232
4233 /* We will be inlining this callee. */
4234 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4235
4236 /* Update the callers EH personality. */
4237 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4238 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4239 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4240
4241 /* Split the block holding the GIMPLE_CALL. */
4242 e = split_block (bb, stmt);
4243 bb = e->src;
4244 return_block = e->dest;
4245 remove_edge (e);
4246
4247 /* split_block splits after the statement; work around this by
4248 moving the call into the second block manually. Not pretty,
4249 but seems easier than doing the CFG manipulation by hand
4250 when the GIMPLE_CALL is in the last statement of BB. */
4251 stmt_gsi = gsi_last_bb (bb);
4252 gsi_remove (&stmt_gsi, false);
4253
4254 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4255 been the source of abnormal edges. In this case, schedule
4256 the removal of dead abnormal edges. */
4257 gsi = gsi_start_bb (return_block);
4258 if (gsi_end_p (gsi))
4259 {
4260 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4261 purge_dead_abnormal_edges = true;
4262 }
4263 else
4264 {
4265 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4266 purge_dead_abnormal_edges = false;
4267 }
4268
4269 stmt_gsi = gsi_start_bb (return_block);
4270
4271 /* Build a block containing code to initialize the arguments, the
4272 actual inline expansion of the body, and a label for the return
4273 statements within the function to jump to. The type of the
4274 statement expression is the return type of the function call.
4275 ??? If the call does not have an associated block then we will
4276 remap all callee blocks to NULL, effectively dropping most of
4277 its debug information. This should only happen for calls to
4278 artificial decls inserted by the compiler itself. We need to
4279 either link the inlined blocks into the caller block tree or
4280 not refer to them in any way to not break GC for locations. */
4281 if (gimple_block (stmt))
4282 {
4283 id->block = make_node (BLOCK);
4284 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4285 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4286 prepend_lexical_block (gimple_block (stmt), id->block);
4287 }
4288
4289 /* Local declarations will be replaced by their equivalents in this
4290 map. */
4291 st = id->decl_map;
4292 id->decl_map = new hash_map<tree, tree>;
4293 dst = id->debug_map;
4294 id->debug_map = NULL;
4295
4296 /* Record the function we are about to inline. */
4297 id->src_fn = fn;
4298 id->src_node = cg_edge->callee;
4299 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4300 id->gimple_call = stmt;
4301
4302 gcc_assert (!id->src_cfun->after_inlining);
4303
4304 id->entry_bb = bb;
4305 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4306 {
4307 gimple_stmt_iterator si = gsi_last_bb (bb);
4308 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4309 NOT_TAKEN),
4310 GSI_NEW_STMT);
4311 }
4312 initialize_inlined_parameters (id, stmt, fn, bb);
4313
4314 if (DECL_INITIAL (fn))
4315 {
4316 if (gimple_block (stmt))
4317 {
4318 tree *var;
4319
4320 prepend_lexical_block (id->block,
4321 remap_blocks (DECL_INITIAL (fn), id));
4322 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4323 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4324 == NULL_TREE));
4325 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4326 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4327 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4328 under it. The parameters can be then evaluated in the debugger,
4329 but don't show in backtraces. */
4330 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4331 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4332 {
4333 tree v = *var;
4334 *var = TREE_CHAIN (v);
4335 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4336 BLOCK_VARS (id->block) = v;
4337 }
4338 else
4339 var = &TREE_CHAIN (*var);
4340 }
4341 else
4342 remap_blocks_to_null (DECL_INITIAL (fn), id);
4343 }
4344
4345 /* Return statements in the function body will be replaced by jumps
4346 to the RET_LABEL. */
4347 gcc_assert (DECL_INITIAL (fn));
4348 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4349
4350 /* Find the LHS to which the result of this call is assigned. */
4351 return_slot = NULL;
4352 if (gimple_call_lhs (stmt))
4353 {
4354 modify_dest = gimple_call_lhs (stmt);
4355
4356 /* The function which we are inlining might not return a value,
4357 in which case we should issue a warning that the function
4358 does not return a value. In that case the optimizers will
4359 see that the variable to which the value is assigned was not
4360 initialized. We do not want to issue a warning about that
4361 uninitialized variable. */
4362 if (DECL_P (modify_dest))
4363 TREE_NO_WARNING (modify_dest) = 1;
4364
4365 if (gimple_call_return_slot_opt_p (stmt))
4366 {
4367 return_slot = modify_dest;
4368 modify_dest = NULL;
4369 }
4370 }
4371 else
4372 modify_dest = NULL;
4373
4374 /* If we are inlining a call to the C++ operator new, we don't want
4375 to use type based alias analysis on the return value. Otherwise
4376 we may get confused if the compiler sees that the inlined new
4377 function returns a pointer which was just deleted. See bug
4378 33407. */
4379 if (DECL_IS_OPERATOR_NEW (fn))
4380 {
4381 return_slot = NULL;
4382 modify_dest = NULL;
4383 }
4384
4385 /* Declare the return variable for the function. */
4386 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4387
4388 /* Add local vars in this inlined callee to caller. */
4389 add_local_variables (id->src_cfun, cfun, id);
4390
4391 if (dump_file && (dump_flags & TDF_DETAILS))
4392 {
4393 fprintf (dump_file, "Inlining ");
4394 print_generic_expr (dump_file, id->src_fn, 0);
4395 fprintf (dump_file, " to ");
4396 print_generic_expr (dump_file, id->dst_fn, 0);
4397 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4398 }
4399
4400 /* This is it. Duplicate the callee body. Assume callee is
4401 pre-gimplified. Note that we must not alter the caller
4402 function in any way before this point, as this CALL_EXPR may be
4403 a self-referential call; if we're calling ourselves, we need to
4404 duplicate our body before altering anything. */
4405 copy_body (id, cg_edge->callee->count,
4406 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4407 bb, return_block, NULL);
4408
4409 /* Reset the escaped solution. */
4410 if (cfun->gimple_df)
4411 pt_solution_reset (&cfun->gimple_df->escaped);
4412
4413 /* Clean up. */
4414 if (id->debug_map)
4415 {
4416 delete id->debug_map;
4417 id->debug_map = dst;
4418 }
4419 delete id->decl_map;
4420 id->decl_map = st;
4421
4422 /* Unlink the calls virtual operands before replacing it. */
4423 unlink_stmt_vdef (stmt);
4424 if (gimple_vdef (stmt)
4425 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4426 release_ssa_name (gimple_vdef (stmt));
4427
4428 /* If the inlined function returns a result that we care about,
4429 substitute the GIMPLE_CALL with an assignment of the return
4430 variable to the LHS of the call. That is, if STMT was
4431 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4432 if (use_retvar && gimple_call_lhs (stmt))
4433 {
4434 gimple old_stmt = stmt;
4435 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4436 gsi_replace (&stmt_gsi, stmt, false);
4437 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4438 }
4439 else
4440 {
4441 /* Handle the case of inlining a function with no return
4442 statement, which causes the return value to become undefined. */
4443 if (gimple_call_lhs (stmt)
4444 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4445 {
4446 tree name = gimple_call_lhs (stmt);
4447 tree var = SSA_NAME_VAR (name);
4448 tree def = ssa_default_def (cfun, var);
4449
4450 if (def)
4451 {
4452 /* If the variable is used undefined, make this name
4453 undefined via a move. */
4454 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4455 gsi_replace (&stmt_gsi, stmt, true);
4456 }
4457 else
4458 {
4459 /* Otherwise make this variable undefined. */
4460 gsi_remove (&stmt_gsi, true);
4461 set_ssa_default_def (cfun, var, name);
4462 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4463 }
4464 }
4465 else
4466 gsi_remove (&stmt_gsi, true);
4467 }
4468
4469 if (purge_dead_abnormal_edges)
4470 {
4471 gimple_purge_dead_eh_edges (return_block);
4472 gimple_purge_dead_abnormal_call_edges (return_block);
4473 }
4474
4475 /* If the value of the new expression is ignored, that's OK. We
4476 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4477 the equivalent inlined version either. */
4478 if (is_gimple_assign (stmt))
4479 {
4480 gcc_assert (gimple_assign_single_p (stmt)
4481 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4482 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4483 }
4484
4485 /* Output the inlining info for this abstract function, since it has been
4486 inlined. If we don't do this now, we can lose the information about the
4487 variables in the function when the blocks get blown away as soon as we
4488 remove the cgraph node. */
4489 if (gimple_block (stmt))
4490 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4491
4492 /* Update callgraph if needed. */
4493 cg_edge->callee->remove ();
4494
4495 id->block = NULL_TREE;
4496 successfully_inlined = TRUE;
4497
4498 egress:
4499 input_location = saved_location;
4500 return successfully_inlined;
4501 }
4502
4503 /* Expand call statements reachable from STMT_P.
4504 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4505 in a MODIFY_EXPR. */
4506
4507 static bool
4508 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4509 {
4510 gimple_stmt_iterator gsi;
4511
4512 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4513 {
4514 gimple stmt = gsi_stmt (gsi);
4515
4516 if (is_gimple_call (stmt)
4517 && !gimple_call_internal_p (stmt)
4518 && expand_call_inline (bb, stmt, id))
4519 return true;
4520 }
4521
4522 return false;
4523 }
4524
4525
4526 /* Walk all basic blocks created after FIRST and try to fold every statement
4527 in the STATEMENTS pointer set. */
4528
4529 static void
4530 fold_marked_statements (int first, hash_set<gimple> *statements)
4531 {
4532 for (; first < n_basic_blocks_for_fn (cfun); first++)
4533 if (BASIC_BLOCK_FOR_FN (cfun, first))
4534 {
4535 gimple_stmt_iterator gsi;
4536
4537 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4538 !gsi_end_p (gsi);
4539 gsi_next (&gsi))
4540 if (statements->contains (gsi_stmt (gsi)))
4541 {
4542 gimple old_stmt = gsi_stmt (gsi);
4543 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4544
4545 if (old_decl && DECL_BUILT_IN (old_decl))
4546 {
4547 /* Folding builtins can create multiple instructions,
4548 we need to look at all of them. */
4549 gimple_stmt_iterator i2 = gsi;
4550 gsi_prev (&i2);
4551 if (fold_stmt (&gsi))
4552 {
4553 gimple new_stmt;
4554 /* If a builtin at the end of a bb folded into nothing,
4555 the following loop won't work. */
4556 if (gsi_end_p (gsi))
4557 {
4558 cgraph_update_edges_for_call_stmt (old_stmt,
4559 old_decl, NULL);
4560 break;
4561 }
4562 if (gsi_end_p (i2))
4563 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4564 else
4565 gsi_next (&i2);
4566 while (1)
4567 {
4568 new_stmt = gsi_stmt (i2);
4569 update_stmt (new_stmt);
4570 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4571 new_stmt);
4572
4573 if (new_stmt == gsi_stmt (gsi))
4574 {
4575 /* It is okay to check only for the very last
4576 of these statements. If it is a throwing
4577 statement nothing will change. If it isn't
4578 this can remove EH edges. If that weren't
4579 correct then because some intermediate stmts
4580 throw, but not the last one. That would mean
4581 we'd have to split the block, which we can't
4582 here and we'd loose anyway. And as builtins
4583 probably never throw, this all
4584 is mood anyway. */
4585 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4586 new_stmt))
4587 gimple_purge_dead_eh_edges (
4588 BASIC_BLOCK_FOR_FN (cfun, first));
4589 break;
4590 }
4591 gsi_next (&i2);
4592 }
4593 }
4594 }
4595 else if (fold_stmt (&gsi))
4596 {
4597 /* Re-read the statement from GSI as fold_stmt() may
4598 have changed it. */
4599 gimple new_stmt = gsi_stmt (gsi);
4600 update_stmt (new_stmt);
4601
4602 if (is_gimple_call (old_stmt)
4603 || is_gimple_call (new_stmt))
4604 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4605 new_stmt);
4606
4607 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4608 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4609 first));
4610 }
4611 }
4612 }
4613 }
4614
4615 /* Expand calls to inline functions in the body of FN. */
4616
4617 unsigned int
4618 optimize_inline_calls (tree fn)
4619 {
4620 copy_body_data id;
4621 basic_block bb;
4622 int last = n_basic_blocks_for_fn (cfun);
4623 bool inlined_p = false;
4624
4625 /* Clear out ID. */
4626 memset (&id, 0, sizeof (id));
4627
4628 id.src_node = id.dst_node = cgraph_node::get (fn);
4629 gcc_assert (id.dst_node->definition);
4630 id.dst_fn = fn;
4631 /* Or any functions that aren't finished yet. */
4632 if (current_function_decl)
4633 id.dst_fn = current_function_decl;
4634
4635 id.copy_decl = copy_decl_maybe_to_var;
4636 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4637 id.transform_new_cfg = false;
4638 id.transform_return_to_modify = true;
4639 id.transform_parameter = true;
4640 id.transform_lang_insert_block = NULL;
4641 id.statements_to_fold = new hash_set<gimple>;
4642
4643 push_gimplify_context ();
4644
4645 /* We make no attempts to keep dominance info up-to-date. */
4646 free_dominance_info (CDI_DOMINATORS);
4647 free_dominance_info (CDI_POST_DOMINATORS);
4648
4649 /* Register specific gimple functions. */
4650 gimple_register_cfg_hooks ();
4651
4652 /* Reach the trees by walking over the CFG, and note the
4653 enclosing basic-blocks in the call edges. */
4654 /* We walk the blocks going forward, because inlined function bodies
4655 will split id->current_basic_block, and the new blocks will
4656 follow it; we'll trudge through them, processing their CALL_EXPRs
4657 along the way. */
4658 FOR_EACH_BB_FN (bb, cfun)
4659 inlined_p |= gimple_expand_calls_inline (bb, &id);
4660
4661 pop_gimplify_context (NULL);
4662
4663 #ifdef ENABLE_CHECKING
4664 {
4665 struct cgraph_edge *e;
4666
4667 id.dst_node->verify ();
4668
4669 /* Double check that we inlined everything we are supposed to inline. */
4670 for (e = id.dst_node->callees; e; e = e->next_callee)
4671 gcc_assert (e->inline_failed);
4672 }
4673 #endif
4674
4675 /* Fold queued statements. */
4676 fold_marked_statements (last, id.statements_to_fold);
4677 delete id.statements_to_fold;
4678
4679 gcc_assert (!id.debug_stmts.exists ());
4680
4681 /* If we didn't inline into the function there is nothing to do. */
4682 if (!inlined_p)
4683 return 0;
4684
4685 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4686 number_blocks (fn);
4687
4688 delete_unreachable_blocks_update_callgraph (&id);
4689 #ifdef ENABLE_CHECKING
4690 id.dst_node->verify ();
4691 #endif
4692
4693 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4694 not possible yet - the IPA passes might make various functions to not
4695 throw and they don't care to proactively update local EH info. This is
4696 done later in fixup_cfg pass that also execute the verification. */
4697 return (TODO_update_ssa
4698 | TODO_cleanup_cfg
4699 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4700 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4701 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4702 ? TODO_rebuild_frequencies : 0));
4703 }
4704
4705 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4706
4707 tree
4708 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4709 {
4710 enum tree_code code = TREE_CODE (*tp);
4711 enum tree_code_class cl = TREE_CODE_CLASS (code);
4712
4713 /* We make copies of most nodes. */
4714 if (IS_EXPR_CODE_CLASS (cl)
4715 || code == TREE_LIST
4716 || code == TREE_VEC
4717 || code == TYPE_DECL
4718 || code == OMP_CLAUSE)
4719 {
4720 /* Because the chain gets clobbered when we make a copy, we save it
4721 here. */
4722 tree chain = NULL_TREE, new_tree;
4723
4724 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4725 chain = TREE_CHAIN (*tp);
4726
4727 /* Copy the node. */
4728 new_tree = copy_node (*tp);
4729
4730 *tp = new_tree;
4731
4732 /* Now, restore the chain, if appropriate. That will cause
4733 walk_tree to walk into the chain as well. */
4734 if (code == PARM_DECL
4735 || code == TREE_LIST
4736 || code == OMP_CLAUSE)
4737 TREE_CHAIN (*tp) = chain;
4738
4739 /* For now, we don't update BLOCKs when we make copies. So, we
4740 have to nullify all BIND_EXPRs. */
4741 if (TREE_CODE (*tp) == BIND_EXPR)
4742 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4743 }
4744 else if (code == CONSTRUCTOR)
4745 {
4746 /* CONSTRUCTOR nodes need special handling because
4747 we need to duplicate the vector of elements. */
4748 tree new_tree;
4749
4750 new_tree = copy_node (*tp);
4751 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4752 *tp = new_tree;
4753 }
4754 else if (code == STATEMENT_LIST)
4755 /* We used to just abort on STATEMENT_LIST, but we can run into them
4756 with statement-expressions (c++/40975). */
4757 copy_statement_list (tp);
4758 else if (TREE_CODE_CLASS (code) == tcc_type)
4759 *walk_subtrees = 0;
4760 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4761 *walk_subtrees = 0;
4762 else if (TREE_CODE_CLASS (code) == tcc_constant)
4763 *walk_subtrees = 0;
4764 return NULL_TREE;
4765 }
4766
4767 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4768 information indicating to what new SAVE_EXPR this one should be mapped,
4769 use that one. Otherwise, create a new node and enter it in ST. FN is
4770 the function into which the copy will be placed. */
4771
4772 static void
4773 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
4774 {
4775 tree *n;
4776 tree t;
4777
4778 /* See if we already encountered this SAVE_EXPR. */
4779 n = st->get (*tp);
4780
4781 /* If we didn't already remap this SAVE_EXPR, do so now. */
4782 if (!n)
4783 {
4784 t = copy_node (*tp);
4785
4786 /* Remember this SAVE_EXPR. */
4787 st->put (*tp, t);
4788 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4789 st->put (t, t);
4790 }
4791 else
4792 {
4793 /* We've already walked into this SAVE_EXPR; don't do it again. */
4794 *walk_subtrees = 0;
4795 t = *n;
4796 }
4797
4798 /* Replace this SAVE_EXPR with the copy. */
4799 *tp = t;
4800 }
4801
4802 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4803 label, copies the declaration and enters it in the splay_tree in DATA (which
4804 is really a 'copy_body_data *'. */
4805
4806 static tree
4807 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4808 bool *handled_ops_p ATTRIBUTE_UNUSED,
4809 struct walk_stmt_info *wi)
4810 {
4811 copy_body_data *id = (copy_body_data *) wi->info;
4812 gimple stmt = gsi_stmt (*gsip);
4813
4814 if (gimple_code (stmt) == GIMPLE_LABEL)
4815 {
4816 tree decl = gimple_label_label (stmt);
4817
4818 /* Copy the decl and remember the copy. */
4819 insert_decl_map (id, decl, id->copy_decl (decl, id));
4820 }
4821
4822 return NULL_TREE;
4823 }
4824
4825
4826 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4827 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4828 remaps all local declarations to appropriate replacements in gimple
4829 operands. */
4830
4831 static tree
4832 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4833 {
4834 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4835 copy_body_data *id = (copy_body_data *) wi->info;
4836 hash_map<tree, tree> *st = id->decl_map;
4837 tree *n;
4838 tree expr = *tp;
4839
4840 /* Only a local declaration (variable or label). */
4841 if ((TREE_CODE (expr) == VAR_DECL
4842 && !TREE_STATIC (expr))
4843 || TREE_CODE (expr) == LABEL_DECL)
4844 {
4845 /* Lookup the declaration. */
4846 n = st->get (expr);
4847
4848 /* If it's there, remap it. */
4849 if (n)
4850 *tp = *n;
4851 *walk_subtrees = 0;
4852 }
4853 else if (TREE_CODE (expr) == STATEMENT_LIST
4854 || TREE_CODE (expr) == BIND_EXPR
4855 || TREE_CODE (expr) == SAVE_EXPR)
4856 gcc_unreachable ();
4857 else if (TREE_CODE (expr) == TARGET_EXPR)
4858 {
4859 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4860 It's OK for this to happen if it was part of a subtree that
4861 isn't immediately expanded, such as operand 2 of another
4862 TARGET_EXPR. */
4863 if (!TREE_OPERAND (expr, 1))
4864 {
4865 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4866 TREE_OPERAND (expr, 3) = NULL_TREE;
4867 }
4868 }
4869
4870 /* Keep iterating. */
4871 return NULL_TREE;
4872 }
4873
4874
4875 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4876 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4877 remaps all local declarations to appropriate replacements in gimple
4878 statements. */
4879
4880 static tree
4881 replace_locals_stmt (gimple_stmt_iterator *gsip,
4882 bool *handled_ops_p ATTRIBUTE_UNUSED,
4883 struct walk_stmt_info *wi)
4884 {
4885 copy_body_data *id = (copy_body_data *) wi->info;
4886 gimple stmt = gsi_stmt (*gsip);
4887
4888 if (gimple_code (stmt) == GIMPLE_BIND)
4889 {
4890 tree block = gimple_bind_block (stmt);
4891
4892 if (block)
4893 {
4894 remap_block (&block, id);
4895 gimple_bind_set_block (stmt, block);
4896 }
4897
4898 /* This will remap a lot of the same decls again, but this should be
4899 harmless. */
4900 if (gimple_bind_vars (stmt))
4901 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4902 NULL, id));
4903 }
4904
4905 /* Keep iterating. */
4906 return NULL_TREE;
4907 }
4908
4909
4910 /* Copies everything in SEQ and replaces variables and labels local to
4911 current_function_decl. */
4912
4913 gimple_seq
4914 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4915 {
4916 copy_body_data id;
4917 struct walk_stmt_info wi;
4918 gimple_seq copy;
4919
4920 /* There's nothing to do for NULL_TREE. */
4921 if (seq == NULL)
4922 return seq;
4923
4924 /* Set up ID. */
4925 memset (&id, 0, sizeof (id));
4926 id.src_fn = current_function_decl;
4927 id.dst_fn = current_function_decl;
4928 id.decl_map = new hash_map<tree, tree>;
4929 id.debug_map = NULL;
4930
4931 id.copy_decl = copy_decl_no_change;
4932 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4933 id.transform_new_cfg = false;
4934 id.transform_return_to_modify = false;
4935 id.transform_parameter = false;
4936 id.transform_lang_insert_block = NULL;
4937
4938 /* Walk the tree once to find local labels. */
4939 memset (&wi, 0, sizeof (wi));
4940 hash_set<tree> visited;
4941 wi.info = &id;
4942 wi.pset = &visited;
4943 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4944
4945 copy = gimple_seq_copy (seq);
4946
4947 /* Walk the copy, remapping decls. */
4948 memset (&wi, 0, sizeof (wi));
4949 wi.info = &id;
4950 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4951
4952 /* Clean up. */
4953 delete id.decl_map;
4954 if (id.debug_map)
4955 delete id.debug_map;
4956
4957 return copy;
4958 }
4959
4960
4961 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4962
4963 static tree
4964 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4965 {
4966 if (*tp == data)
4967 return (tree) data;
4968 else
4969 return NULL;
4970 }
4971
4972 DEBUG_FUNCTION bool
4973 debug_find_tree (tree top, tree search)
4974 {
4975 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4976 }
4977
4978
4979 /* Declare the variables created by the inliner. Add all the variables in
4980 VARS to BIND_EXPR. */
4981
4982 static void
4983 declare_inline_vars (tree block, tree vars)
4984 {
4985 tree t;
4986 for (t = vars; t; t = DECL_CHAIN (t))
4987 {
4988 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4989 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4990 add_local_decl (cfun, t);
4991 }
4992
4993 if (block)
4994 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4995 }
4996
4997 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4998 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4999 VAR_DECL translation. */
5000
5001 static tree
5002 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5003 {
5004 /* Don't generate debug information for the copy if we wouldn't have
5005 generated it for the copy either. */
5006 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5007 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5008
5009 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5010 declaration inspired this copy. */
5011 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5012
5013 /* The new variable/label has no RTL, yet. */
5014 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5015 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5016 SET_DECL_RTL (copy, 0);
5017
5018 /* These args would always appear unused, if not for this. */
5019 TREE_USED (copy) = 1;
5020
5021 /* Set the context for the new declaration. */
5022 if (!DECL_CONTEXT (decl))
5023 /* Globals stay global. */
5024 ;
5025 else if (DECL_CONTEXT (decl) != id->src_fn)
5026 /* Things that weren't in the scope of the function we're inlining
5027 from aren't in the scope we're inlining to, either. */
5028 ;
5029 else if (TREE_STATIC (decl))
5030 /* Function-scoped static variables should stay in the original
5031 function. */
5032 ;
5033 else
5034 /* Ordinary automatic local variables are now in the scope of the
5035 new function. */
5036 DECL_CONTEXT (copy) = id->dst_fn;
5037
5038 return copy;
5039 }
5040
5041 static tree
5042 copy_decl_to_var (tree decl, copy_body_data *id)
5043 {
5044 tree copy, type;
5045
5046 gcc_assert (TREE_CODE (decl) == PARM_DECL
5047 || TREE_CODE (decl) == RESULT_DECL);
5048
5049 type = TREE_TYPE (decl);
5050
5051 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5052 VAR_DECL, DECL_NAME (decl), type);
5053 if (DECL_PT_UID_SET_P (decl))
5054 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5055 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5056 TREE_READONLY (copy) = TREE_READONLY (decl);
5057 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5058 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5059
5060 return copy_decl_for_dup_finish (id, decl, copy);
5061 }
5062
5063 /* Like copy_decl_to_var, but create a return slot object instead of a
5064 pointer variable for return by invisible reference. */
5065
5066 static tree
5067 copy_result_decl_to_var (tree decl, copy_body_data *id)
5068 {
5069 tree copy, type;
5070
5071 gcc_assert (TREE_CODE (decl) == PARM_DECL
5072 || TREE_CODE (decl) == RESULT_DECL);
5073
5074 type = TREE_TYPE (decl);
5075 if (DECL_BY_REFERENCE (decl))
5076 type = TREE_TYPE (type);
5077
5078 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5079 VAR_DECL, DECL_NAME (decl), type);
5080 if (DECL_PT_UID_SET_P (decl))
5081 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5082 TREE_READONLY (copy) = TREE_READONLY (decl);
5083 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5084 if (!DECL_BY_REFERENCE (decl))
5085 {
5086 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5087 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5088 }
5089
5090 return copy_decl_for_dup_finish (id, decl, copy);
5091 }
5092
5093 tree
5094 copy_decl_no_change (tree decl, copy_body_data *id)
5095 {
5096 tree copy;
5097
5098 copy = copy_node (decl);
5099
5100 /* The COPY is not abstract; it will be generated in DST_FN. */
5101 DECL_ABSTRACT_P (copy) = false;
5102 lang_hooks.dup_lang_specific_decl (copy);
5103
5104 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5105 been taken; it's for internal bookkeeping in expand_goto_internal. */
5106 if (TREE_CODE (copy) == LABEL_DECL)
5107 {
5108 TREE_ADDRESSABLE (copy) = 0;
5109 LABEL_DECL_UID (copy) = -1;
5110 }
5111
5112 return copy_decl_for_dup_finish (id, decl, copy);
5113 }
5114
5115 static tree
5116 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5117 {
5118 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5119 return copy_decl_to_var (decl, id);
5120 else
5121 return copy_decl_no_change (decl, id);
5122 }
5123
5124 /* Return a copy of the function's argument tree. */
5125 static tree
5126 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5127 bitmap args_to_skip, tree *vars)
5128 {
5129 tree arg, *parg;
5130 tree new_parm = NULL;
5131 int i = 0;
5132
5133 parg = &new_parm;
5134
5135 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5136 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5137 {
5138 tree new_tree = remap_decl (arg, id);
5139 if (TREE_CODE (new_tree) != PARM_DECL)
5140 new_tree = id->copy_decl (arg, id);
5141 lang_hooks.dup_lang_specific_decl (new_tree);
5142 *parg = new_tree;
5143 parg = &DECL_CHAIN (new_tree);
5144 }
5145 else if (!id->decl_map->get (arg))
5146 {
5147 /* Make an equivalent VAR_DECL. If the argument was used
5148 as temporary variable later in function, the uses will be
5149 replaced by local variable. */
5150 tree var = copy_decl_to_var (arg, id);
5151 insert_decl_map (id, arg, var);
5152 /* Declare this new variable. */
5153 DECL_CHAIN (var) = *vars;
5154 *vars = var;
5155 }
5156 return new_parm;
5157 }
5158
5159 /* Return a copy of the function's static chain. */
5160 static tree
5161 copy_static_chain (tree static_chain, copy_body_data * id)
5162 {
5163 tree *chain_copy, *pvar;
5164
5165 chain_copy = &static_chain;
5166 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5167 {
5168 tree new_tree = remap_decl (*pvar, id);
5169 lang_hooks.dup_lang_specific_decl (new_tree);
5170 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5171 *pvar = new_tree;
5172 }
5173 return static_chain;
5174 }
5175
5176 /* Return true if the function is allowed to be versioned.
5177 This is a guard for the versioning functionality. */
5178
5179 bool
5180 tree_versionable_function_p (tree fndecl)
5181 {
5182 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5183 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5184 }
5185
5186 /* Delete all unreachable basic blocks and update callgraph.
5187 Doing so is somewhat nontrivial because we need to update all clones and
5188 remove inline function that become unreachable. */
5189
5190 static bool
5191 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5192 {
5193 bool changed = false;
5194 basic_block b, next_bb;
5195
5196 find_unreachable_blocks ();
5197
5198 /* Delete all unreachable basic blocks. */
5199
5200 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5201 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5202 {
5203 next_bb = b->next_bb;
5204
5205 if (!(b->flags & BB_REACHABLE))
5206 {
5207 gimple_stmt_iterator bsi;
5208
5209 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5210 {
5211 struct cgraph_edge *e;
5212 struct cgraph_node *node;
5213
5214 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5215
5216 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5217 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5218 {
5219 if (!e->inline_failed)
5220 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5221 else
5222 e->remove ();
5223 }
5224 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5225 && id->dst_node->clones)
5226 for (node = id->dst_node->clones; node != id->dst_node;)
5227 {
5228 node->remove_stmt_references (gsi_stmt (bsi));
5229 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5230 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5231 {
5232 if (!e->inline_failed)
5233 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5234 else
5235 e->remove ();
5236 }
5237
5238 if (node->clones)
5239 node = node->clones;
5240 else if (node->next_sibling_clone)
5241 node = node->next_sibling_clone;
5242 else
5243 {
5244 while (node != id->dst_node && !node->next_sibling_clone)
5245 node = node->clone_of;
5246 if (node != id->dst_node)
5247 node = node->next_sibling_clone;
5248 }
5249 }
5250 }
5251 delete_basic_block (b);
5252 changed = true;
5253 }
5254 }
5255
5256 return changed;
5257 }
5258
5259 /* Update clone info after duplication. */
5260
5261 static void
5262 update_clone_info (copy_body_data * id)
5263 {
5264 struct cgraph_node *node;
5265 if (!id->dst_node->clones)
5266 return;
5267 for (node = id->dst_node->clones; node != id->dst_node;)
5268 {
5269 /* First update replace maps to match the new body. */
5270 if (node->clone.tree_map)
5271 {
5272 unsigned int i;
5273 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5274 {
5275 struct ipa_replace_map *replace_info;
5276 replace_info = (*node->clone.tree_map)[i];
5277 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5278 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5279 }
5280 }
5281 if (node->clones)
5282 node = node->clones;
5283 else if (node->next_sibling_clone)
5284 node = node->next_sibling_clone;
5285 else
5286 {
5287 while (node != id->dst_node && !node->next_sibling_clone)
5288 node = node->clone_of;
5289 if (node != id->dst_node)
5290 node = node->next_sibling_clone;
5291 }
5292 }
5293 }
5294
5295 /* Create a copy of a function's tree.
5296 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5297 of the original function and the new copied function
5298 respectively. In case we want to replace a DECL
5299 tree with another tree while duplicating the function's
5300 body, TREE_MAP represents the mapping between these
5301 trees. If UPDATE_CLONES is set, the call_stmt fields
5302 of edges of clones of the function will be updated.
5303
5304 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5305 from new version.
5306 If SKIP_RETURN is true, the new version will return void.
5307 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5308 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5309 */
5310 void
5311 tree_function_versioning (tree old_decl, tree new_decl,
5312 vec<ipa_replace_map *, va_gc> *tree_map,
5313 bool update_clones, bitmap args_to_skip,
5314 bool skip_return, bitmap blocks_to_copy,
5315 basic_block new_entry)
5316 {
5317 struct cgraph_node *old_version_node;
5318 struct cgraph_node *new_version_node;
5319 copy_body_data id;
5320 tree p;
5321 unsigned i;
5322 struct ipa_replace_map *replace_info;
5323 basic_block old_entry_block, bb;
5324 auto_vec<gimple, 10> init_stmts;
5325 tree vars = NULL_TREE;
5326
5327 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5328 && TREE_CODE (new_decl) == FUNCTION_DECL);
5329 DECL_POSSIBLY_INLINED (old_decl) = 1;
5330
5331 old_version_node = cgraph_node::get (old_decl);
5332 gcc_checking_assert (old_version_node);
5333 new_version_node = cgraph_node::get (new_decl);
5334 gcc_checking_assert (new_version_node);
5335
5336 /* Copy over debug args. */
5337 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5338 {
5339 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5340 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5341 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5342 old_debug_args = decl_debug_args_lookup (old_decl);
5343 if (old_debug_args)
5344 {
5345 new_debug_args = decl_debug_args_insert (new_decl);
5346 *new_debug_args = vec_safe_copy (*old_debug_args);
5347 }
5348 }
5349
5350 /* Output the inlining info for this abstract function, since it has been
5351 inlined. If we don't do this now, we can lose the information about the
5352 variables in the function when the blocks get blown away as soon as we
5353 remove the cgraph node. */
5354 (*debug_hooks->outlining_inline_function) (old_decl);
5355
5356 DECL_ARTIFICIAL (new_decl) = 1;
5357 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5358 if (DECL_ORIGIN (old_decl) == old_decl)
5359 old_version_node->used_as_abstract_origin = true;
5360 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5361
5362 /* Prepare the data structures for the tree copy. */
5363 memset (&id, 0, sizeof (id));
5364
5365 /* Generate a new name for the new version. */
5366 id.statements_to_fold = new hash_set<gimple>;
5367
5368 id.decl_map = new hash_map<tree, tree>;
5369 id.debug_map = NULL;
5370 id.src_fn = old_decl;
5371 id.dst_fn = new_decl;
5372 id.src_node = old_version_node;
5373 id.dst_node = new_version_node;
5374 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5375 id.blocks_to_copy = blocks_to_copy;
5376
5377 id.copy_decl = copy_decl_no_change;
5378 id.transform_call_graph_edges
5379 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5380 id.transform_new_cfg = true;
5381 id.transform_return_to_modify = false;
5382 id.transform_parameter = false;
5383 id.transform_lang_insert_block = NULL;
5384
5385 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5386 (DECL_STRUCT_FUNCTION (old_decl));
5387 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5388 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5389 initialize_cfun (new_decl, old_decl,
5390 old_entry_block->count);
5391 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5392 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5393 = id.src_cfun->gimple_df->ipa_pta;
5394
5395 /* Copy the function's static chain. */
5396 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5397 if (p)
5398 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5399 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5400 &id);
5401
5402 /* If there's a tree_map, prepare for substitution. */
5403 if (tree_map)
5404 for (i = 0; i < tree_map->length (); i++)
5405 {
5406 gimple init;
5407 replace_info = (*tree_map)[i];
5408 if (replace_info->replace_p)
5409 {
5410 if (!replace_info->old_tree)
5411 {
5412 int i = replace_info->parm_num;
5413 tree parm;
5414 tree req_type;
5415
5416 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5417 i --;
5418 replace_info->old_tree = parm;
5419 req_type = TREE_TYPE (parm);
5420 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5421 {
5422 if (fold_convertible_p (req_type, replace_info->new_tree))
5423 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5424 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5425 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5426 else
5427 {
5428 if (dump_file)
5429 {
5430 fprintf (dump_file, " const ");
5431 print_generic_expr (dump_file, replace_info->new_tree, 0);
5432 fprintf (dump_file, " can't be converted to param ");
5433 print_generic_expr (dump_file, parm, 0);
5434 fprintf (dump_file, "\n");
5435 }
5436 replace_info->old_tree = NULL;
5437 }
5438 }
5439 }
5440 else
5441 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5442 if (replace_info->old_tree)
5443 {
5444 init = setup_one_parameter (&id, replace_info->old_tree,
5445 replace_info->new_tree, id.src_fn,
5446 NULL,
5447 &vars);
5448 if (init)
5449 init_stmts.safe_push (init);
5450 }
5451 }
5452 }
5453 /* Copy the function's arguments. */
5454 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5455 DECL_ARGUMENTS (new_decl) =
5456 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5457 args_to_skip, &vars);
5458
5459 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5460 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5461
5462 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5463
5464 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5465 /* Add local vars. */
5466 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5467
5468 if (DECL_RESULT (old_decl) == NULL_TREE)
5469 ;
5470 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5471 {
5472 DECL_RESULT (new_decl)
5473 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5474 RESULT_DECL, NULL_TREE, void_type_node);
5475 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5476 cfun->returns_struct = 0;
5477 cfun->returns_pcc_struct = 0;
5478 }
5479 else
5480 {
5481 tree old_name;
5482 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5483 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5484 if (gimple_in_ssa_p (id.src_cfun)
5485 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5486 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5487 {
5488 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5489 insert_decl_map (&id, old_name, new_name);
5490 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5491 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5492 }
5493 }
5494
5495 /* Set up the destination functions loop tree. */
5496 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5497 {
5498 cfun->curr_properties &= ~PROP_loops;
5499 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5500 cfun->curr_properties |= PROP_loops;
5501 }
5502
5503 /* Copy the Function's body. */
5504 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5505 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5506 new_entry);
5507
5508 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5509 number_blocks (new_decl);
5510
5511 /* We want to create the BB unconditionally, so that the addition of
5512 debug stmts doesn't affect BB count, which may in the end cause
5513 codegen differences. */
5514 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5515 while (init_stmts.length ())
5516 insert_init_stmt (&id, bb, init_stmts.pop ());
5517 update_clone_info (&id);
5518
5519 /* Remap the nonlocal_goto_save_area, if any. */
5520 if (cfun->nonlocal_goto_save_area)
5521 {
5522 struct walk_stmt_info wi;
5523
5524 memset (&wi, 0, sizeof (wi));
5525 wi.info = &id;
5526 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5527 }
5528
5529 /* Clean up. */
5530 delete id.decl_map;
5531 if (id.debug_map)
5532 delete id.debug_map;
5533 free_dominance_info (CDI_DOMINATORS);
5534 free_dominance_info (CDI_POST_DOMINATORS);
5535
5536 fold_marked_statements (0, id.statements_to_fold);
5537 delete id.statements_to_fold;
5538 fold_cond_expr_cond ();
5539 delete_unreachable_blocks_update_callgraph (&id);
5540 if (id.dst_node->definition)
5541 cgraph_edge::rebuild_references ();
5542 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5543 {
5544 calculate_dominance_info (CDI_DOMINATORS);
5545 fix_loop_structure (NULL);
5546 }
5547 update_ssa (TODO_update_ssa);
5548
5549 /* After partial cloning we need to rescale frequencies, so they are
5550 within proper range in the cloned function. */
5551 if (new_entry)
5552 {
5553 struct cgraph_edge *e;
5554 rebuild_frequencies ();
5555
5556 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5557 for (e = new_version_node->callees; e; e = e->next_callee)
5558 {
5559 basic_block bb = gimple_bb (e->call_stmt);
5560 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5561 bb);
5562 e->count = bb->count;
5563 }
5564 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5565 {
5566 basic_block bb = gimple_bb (e->call_stmt);
5567 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5568 bb);
5569 e->count = bb->count;
5570 }
5571 }
5572
5573 free_dominance_info (CDI_DOMINATORS);
5574 free_dominance_info (CDI_POST_DOMINATORS);
5575
5576 gcc_assert (!id.debug_stmts.exists ());
5577 pop_cfun ();
5578 return;
5579 }
5580
5581 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5582 the callee and return the inlined body on success. */
5583
5584 tree
5585 maybe_inline_call_in_expr (tree exp)
5586 {
5587 tree fn = get_callee_fndecl (exp);
5588
5589 /* We can only try to inline "const" functions. */
5590 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5591 {
5592 call_expr_arg_iterator iter;
5593 copy_body_data id;
5594 tree param, arg, t;
5595 hash_map<tree, tree> decl_map;
5596
5597 /* Remap the parameters. */
5598 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5599 param;
5600 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5601 decl_map.put (param, arg);
5602
5603 memset (&id, 0, sizeof (id));
5604 id.src_fn = fn;
5605 id.dst_fn = current_function_decl;
5606 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5607 id.decl_map = &decl_map;
5608
5609 id.copy_decl = copy_decl_no_change;
5610 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5611 id.transform_new_cfg = false;
5612 id.transform_return_to_modify = true;
5613 id.transform_parameter = true;
5614 id.transform_lang_insert_block = NULL;
5615
5616 /* Make sure not to unshare trees behind the front-end's back
5617 since front-end specific mechanisms may rely on sharing. */
5618 id.regimplify = false;
5619 id.do_not_unshare = true;
5620
5621 /* We're not inside any EH region. */
5622 id.eh_lp_nr = 0;
5623
5624 t = copy_tree_body (&id);
5625
5626 /* We can only return something suitable for use in a GENERIC
5627 expression tree. */
5628 if (TREE_CODE (t) == MODIFY_EXPR)
5629 return TREE_OPERAND (t, 1);
5630 }
5631
5632 return NULL_TREE;
5633 }
5634
5635 /* Duplicate a type, fields and all. */
5636
5637 tree
5638 build_duplicate_type (tree type)
5639 {
5640 struct copy_body_data id;
5641
5642 memset (&id, 0, sizeof (id));
5643 id.src_fn = current_function_decl;
5644 id.dst_fn = current_function_decl;
5645 id.src_cfun = cfun;
5646 id.decl_map = new hash_map<tree, tree>;
5647 id.debug_map = NULL;
5648 id.copy_decl = copy_decl_no_change;
5649
5650 type = remap_type_1 (type, &id);
5651
5652 delete id.decl_map;
5653 if (id.debug_map)
5654 delete id.debug_map;
5655
5656 TYPE_CANONICAL (type) = type;
5657
5658 return type;
5659 }