coretypes.h: Include machmode.h...
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "inchash.h"
32 #include "tree.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "calls.h"
36 #include "tree-inline.h"
37 #include "flags.h"
38 #include "params.h"
39 #include "insn-config.h"
40 #include "hashtab.h"
41 #include "langhooks.h"
42 #include "predict.h"
43 #include "hard-reg-set.h"
44 #include "function.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "cfganal.h"
48 #include "basic-block.h"
49 #include "tree-iterator.h"
50 #include "intl.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-fold.h"
54 #include "tree-eh.h"
55 #include "gimple-expr.h"
56 #include "is-a.h"
57 #include "gimple.h"
58 #include "gimplify.h"
59 #include "gimple-iterator.h"
60 #include "gimplify-me.h"
61 #include "gimple-walk.h"
62 #include "gimple-ssa.h"
63 #include "tree-cfg.h"
64 #include "tree-phinodes.h"
65 #include "ssa-iterators.h"
66 #include "stringpool.h"
67 #include "tree-ssanames.h"
68 #include "tree-into-ssa.h"
69 #include "rtl.h"
70 #include "statistics.h"
71 #include "expmed.h"
72 #include "dojump.h"
73 #include "explow.h"
74 #include "emit-rtl.h"
75 #include "varasm.h"
76 #include "stmt.h"
77 #include "expr.h"
78 #include "tree-dfa.h"
79 #include "tree-ssa.h"
80 #include "tree-pretty-print.h"
81 #include "except.h"
82 #include "debug.h"
83 #include "hash-map.h"
84 #include "plugin-api.h"
85 #include "ipa-ref.h"
86 #include "cgraph.h"
87 #include "alloc-pool.h"
88 #include "symbol-summary.h"
89 #include "ipa-prop.h"
90 #include "value-prof.h"
91 #include "tree-pass.h"
92 #include "target.h"
93 #include "cfgloop.h"
94 #include "builtins.h"
95 #include "tree-chkp.h"
96
97 #include "rtl.h" /* FIXME: For asm_str_count. */
98
99 /* I'm not real happy about this, but we need to handle gimple and
100 non-gimple trees. */
101
102 /* Inlining, Cloning, Versioning, Parallelization
103
104 Inlining: a function body is duplicated, but the PARM_DECLs are
105 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
106 MODIFY_EXPRs that store to a dedicated returned-value variable.
107 The duplicated eh_region info of the copy will later be appended
108 to the info for the caller; the eh_region info in copied throwing
109 statements and RESX statements are adjusted accordingly.
110
111 Cloning: (only in C++) We have one body for a con/de/structor, and
112 multiple function decls, each with a unique parameter list.
113 Duplicate the body, using the given splay tree; some parameters
114 will become constants (like 0 or 1).
115
116 Versioning: a function body is duplicated and the result is a new
117 function rather than into blocks of an existing function as with
118 inlining. Some parameters will become constants.
119
120 Parallelization: a region of a function is duplicated resulting in
121 a new function. Variables may be replaced with complex expressions
122 to enable shared variable semantics.
123
124 All of these will simultaneously lookup any callgraph edges. If
125 we're going to inline the duplicated function body, and the given
126 function has some cloned callgraph nodes (one for each place this
127 function will be inlined) those callgraph edges will be duplicated.
128 If we're cloning the body, those callgraph edges will be
129 updated to point into the new body. (Note that the original
130 callgraph node and edge list will not be altered.)
131
132 See the CALL_EXPR handling case in copy_tree_body_r (). */
133
134 /* To Do:
135
136 o In order to make inlining-on-trees work, we pessimized
137 function-local static constants. In particular, they are now
138 always output, even when not addressed. Fix this by treating
139 function-local static constants just like global static
140 constants; the back-end already knows not to output them if they
141 are not needed.
142
143 o Provide heuristics to clamp inlining of recursive template
144 calls? */
145
146
147 /* Weights that estimate_num_insns uses to estimate the size of the
148 produced code. */
149
150 eni_weights eni_size_weights;
151
152 /* Weights that estimate_num_insns uses to estimate the time necessary
153 to execute the produced code. */
154
155 eni_weights eni_time_weights;
156
157 /* Prototypes. */
158
159 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
160 basic_block);
161 static void remap_block (tree *, copy_body_data *);
162 static void copy_bind_expr (tree *, int *, copy_body_data *);
163 static void declare_inline_vars (tree, tree);
164 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
165 static void prepend_lexical_block (tree current_block, tree new_block);
166 static tree copy_decl_to_var (tree, copy_body_data *);
167 static tree copy_result_decl_to_var (tree, copy_body_data *);
168 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
169 static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
170 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
171 static void insert_init_stmt (copy_body_data *, basic_block, gimple);
172
173 /* Insert a tree->tree mapping for ID. Despite the name suggests
174 that the trees should be variables, it is used for more than that. */
175
176 void
177 insert_decl_map (copy_body_data *id, tree key, tree value)
178 {
179 id->decl_map->put (key, value);
180
181 /* Always insert an identity map as well. If we see this same new
182 node again, we won't want to duplicate it a second time. */
183 if (key != value)
184 id->decl_map->put (value, value);
185 }
186
187 /* Insert a tree->tree mapping for ID. This is only used for
188 variables. */
189
190 static void
191 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
192 {
193 if (!gimple_in_ssa_p (id->src_cfun))
194 return;
195
196 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
197 return;
198
199 if (!target_for_debug_bind (key))
200 return;
201
202 gcc_assert (TREE_CODE (key) == PARM_DECL);
203 gcc_assert (TREE_CODE (value) == VAR_DECL);
204
205 if (!id->debug_map)
206 id->debug_map = new hash_map<tree, tree>;
207
208 id->debug_map->put (key, value);
209 }
210
211 /* If nonzero, we're remapping the contents of inlined debug
212 statements. If negative, an error has occurred, such as a
213 reference to a variable that isn't available in the inlined
214 context. */
215 static int processing_debug_stmt = 0;
216
217 /* Construct new SSA name for old NAME. ID is the inline context. */
218
219 static tree
220 remap_ssa_name (tree name, copy_body_data *id)
221 {
222 tree new_tree, var;
223 tree *n;
224
225 gcc_assert (TREE_CODE (name) == SSA_NAME);
226
227 n = id->decl_map->get (name);
228 if (n)
229 return unshare_expr (*n);
230
231 if (processing_debug_stmt)
232 {
233 if (SSA_NAME_IS_DEFAULT_DEF (name)
234 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
235 && id->entry_bb == NULL
236 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
237 {
238 tree vexpr = make_node (DEBUG_EXPR_DECL);
239 gimple def_temp;
240 gimple_stmt_iterator gsi;
241 tree val = SSA_NAME_VAR (name);
242
243 n = id->decl_map->get (val);
244 if (n != NULL)
245 val = *n;
246 if (TREE_CODE (val) != PARM_DECL)
247 {
248 processing_debug_stmt = -1;
249 return name;
250 }
251 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
252 DECL_ARTIFICIAL (vexpr) = 1;
253 TREE_TYPE (vexpr) = TREE_TYPE (name);
254 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
255 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
256 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
257 return vexpr;
258 }
259
260 processing_debug_stmt = -1;
261 return name;
262 }
263
264 /* Remap anonymous SSA names or SSA names of anonymous decls. */
265 var = SSA_NAME_VAR (name);
266 if (!var
267 || (!SSA_NAME_IS_DEFAULT_DEF (name)
268 && TREE_CODE (var) == VAR_DECL
269 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
270 && DECL_ARTIFICIAL (var)
271 && DECL_IGNORED_P (var)
272 && !DECL_NAME (var)))
273 {
274 struct ptr_info_def *pi;
275 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
276 if (!var && SSA_NAME_IDENTIFIER (name))
277 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
278 insert_decl_map (id, name, new_tree);
279 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
280 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
281 /* At least IPA points-to info can be directly transferred. */
282 if (id->src_cfun->gimple_df
283 && id->src_cfun->gimple_df->ipa_pta
284 && (pi = SSA_NAME_PTR_INFO (name))
285 && !pi->pt.anything)
286 {
287 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
288 new_pi->pt = pi->pt;
289 }
290 return new_tree;
291 }
292
293 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
294 in copy_bb. */
295 new_tree = remap_decl (var, id);
296
297 /* We might've substituted constant or another SSA_NAME for
298 the variable.
299
300 Replace the SSA name representing RESULT_DECL by variable during
301 inlining: this saves us from need to introduce PHI node in a case
302 return value is just partly initialized. */
303 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
304 && (!SSA_NAME_VAR (name)
305 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
306 || !id->transform_return_to_modify))
307 {
308 struct ptr_info_def *pi;
309 new_tree = make_ssa_name (new_tree);
310 insert_decl_map (id, name, new_tree);
311 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
312 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
313 /* At least IPA points-to info can be directly transferred. */
314 if (id->src_cfun->gimple_df
315 && id->src_cfun->gimple_df->ipa_pta
316 && (pi = SSA_NAME_PTR_INFO (name))
317 && !pi->pt.anything)
318 {
319 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
320 new_pi->pt = pi->pt;
321 }
322 if (SSA_NAME_IS_DEFAULT_DEF (name))
323 {
324 /* By inlining function having uninitialized variable, we might
325 extend the lifetime (variable might get reused). This cause
326 ICE in the case we end up extending lifetime of SSA name across
327 abnormal edge, but also increase register pressure.
328
329 We simply initialize all uninitialized vars by 0 except
330 for case we are inlining to very first BB. We can avoid
331 this for all BBs that are not inside strongly connected
332 regions of the CFG, but this is expensive to test. */
333 if (id->entry_bb
334 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
335 && (!SSA_NAME_VAR (name)
336 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
337 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
338 0)->dest
339 || EDGE_COUNT (id->entry_bb->preds) != 1))
340 {
341 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
342 gimple init_stmt;
343 tree zero = build_zero_cst (TREE_TYPE (new_tree));
344
345 init_stmt = gimple_build_assign (new_tree, zero);
346 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
347 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
348 }
349 else
350 {
351 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
352 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
353 }
354 }
355 }
356 else
357 insert_decl_map (id, name, new_tree);
358 return new_tree;
359 }
360
361 /* Remap DECL during the copying of the BLOCK tree for the function. */
362
363 tree
364 remap_decl (tree decl, copy_body_data *id)
365 {
366 tree *n;
367
368 /* We only remap local variables in the current function. */
369
370 /* See if we have remapped this declaration. */
371
372 n = id->decl_map->get (decl);
373
374 if (!n && processing_debug_stmt)
375 {
376 processing_debug_stmt = -1;
377 return decl;
378 }
379
380 /* If we didn't already have an equivalent for this declaration,
381 create one now. */
382 if (!n)
383 {
384 /* Make a copy of the variable or label. */
385 tree t = id->copy_decl (decl, id);
386
387 /* Remember it, so that if we encounter this local entity again
388 we can reuse this copy. Do this early because remap_type may
389 need this decl for TYPE_STUB_DECL. */
390 insert_decl_map (id, decl, t);
391
392 if (!DECL_P (t))
393 return t;
394
395 /* Remap types, if necessary. */
396 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
397 if (TREE_CODE (t) == TYPE_DECL)
398 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
399
400 /* Remap sizes as necessary. */
401 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
402 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
403
404 /* If fields, do likewise for offset and qualifier. */
405 if (TREE_CODE (t) == FIELD_DECL)
406 {
407 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
408 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
409 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
410 }
411
412 return t;
413 }
414
415 if (id->do_not_unshare)
416 return *n;
417 else
418 return unshare_expr (*n);
419 }
420
421 static tree
422 remap_type_1 (tree type, copy_body_data *id)
423 {
424 tree new_tree, t;
425
426 /* We do need a copy. build and register it now. If this is a pointer or
427 reference type, remap the designated type and make a new pointer or
428 reference type. */
429 if (TREE_CODE (type) == POINTER_TYPE)
430 {
431 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
432 TYPE_MODE (type),
433 TYPE_REF_CAN_ALIAS_ALL (type));
434 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
435 new_tree = build_type_attribute_qual_variant (new_tree,
436 TYPE_ATTRIBUTES (type),
437 TYPE_QUALS (type));
438 insert_decl_map (id, type, new_tree);
439 return new_tree;
440 }
441 else if (TREE_CODE (type) == REFERENCE_TYPE)
442 {
443 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
444 TYPE_MODE (type),
445 TYPE_REF_CAN_ALIAS_ALL (type));
446 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
447 new_tree = build_type_attribute_qual_variant (new_tree,
448 TYPE_ATTRIBUTES (type),
449 TYPE_QUALS (type));
450 insert_decl_map (id, type, new_tree);
451 return new_tree;
452 }
453 else
454 new_tree = copy_node (type);
455
456 insert_decl_map (id, type, new_tree);
457
458 /* This is a new type, not a copy of an old type. Need to reassociate
459 variants. We can handle everything except the main variant lazily. */
460 t = TYPE_MAIN_VARIANT (type);
461 if (type != t)
462 {
463 t = remap_type (t, id);
464 TYPE_MAIN_VARIANT (new_tree) = t;
465 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
466 TYPE_NEXT_VARIANT (t) = new_tree;
467 }
468 else
469 {
470 TYPE_MAIN_VARIANT (new_tree) = new_tree;
471 TYPE_NEXT_VARIANT (new_tree) = NULL;
472 }
473
474 if (TYPE_STUB_DECL (type))
475 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
476
477 /* Lazily create pointer and reference types. */
478 TYPE_POINTER_TO (new_tree) = NULL;
479 TYPE_REFERENCE_TO (new_tree) = NULL;
480
481 /* Copy all types that may contain references to local variables; be sure to
482 preserve sharing in between type and its main variant when possible. */
483 switch (TREE_CODE (new_tree))
484 {
485 case INTEGER_TYPE:
486 case REAL_TYPE:
487 case FIXED_POINT_TYPE:
488 case ENUMERAL_TYPE:
489 case BOOLEAN_TYPE:
490 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
491 {
492 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
493 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
494
495 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
496 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
497 }
498 else
499 {
500 t = TYPE_MIN_VALUE (new_tree);
501 if (t && TREE_CODE (t) != INTEGER_CST)
502 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
503
504 t = TYPE_MAX_VALUE (new_tree);
505 if (t && TREE_CODE (t) != INTEGER_CST)
506 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
507 }
508 return new_tree;
509
510 case FUNCTION_TYPE:
511 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
512 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
513 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
514 else
515 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
516 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
517 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
518 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
519 else
520 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
521 return new_tree;
522
523 case ARRAY_TYPE:
524 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
525 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
526 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
527 else
528 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
529
530 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
531 {
532 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
533 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
534 }
535 else
536 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
537 break;
538
539 case RECORD_TYPE:
540 case UNION_TYPE:
541 case QUAL_UNION_TYPE:
542 if (TYPE_MAIN_VARIANT (type) != type
543 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
544 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
545 else
546 {
547 tree f, nf = NULL;
548
549 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
550 {
551 t = remap_decl (f, id);
552 DECL_CONTEXT (t) = new_tree;
553 DECL_CHAIN (t) = nf;
554 nf = t;
555 }
556 TYPE_FIELDS (new_tree) = nreverse (nf);
557 }
558 break;
559
560 case OFFSET_TYPE:
561 default:
562 /* Shouldn't have been thought variable sized. */
563 gcc_unreachable ();
564 }
565
566 /* All variants of type share the same size, so use the already remaped data. */
567 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
568 {
569 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
570 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
571
572 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
573 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
574 }
575 else
576 {
577 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
578 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
579 }
580
581 return new_tree;
582 }
583
584 tree
585 remap_type (tree type, copy_body_data *id)
586 {
587 tree *node;
588 tree tmp;
589
590 if (type == NULL)
591 return type;
592
593 /* See if we have remapped this type. */
594 node = id->decl_map->get (type);
595 if (node)
596 return *node;
597
598 /* The type only needs remapping if it's variably modified. */
599 if (! variably_modified_type_p (type, id->src_fn))
600 {
601 insert_decl_map (id, type, type);
602 return type;
603 }
604
605 id->remapping_type_depth++;
606 tmp = remap_type_1 (type, id);
607 id->remapping_type_depth--;
608
609 return tmp;
610 }
611
612 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
613
614 static bool
615 can_be_nonlocal (tree decl, copy_body_data *id)
616 {
617 /* We can not duplicate function decls. */
618 if (TREE_CODE (decl) == FUNCTION_DECL)
619 return true;
620
621 /* Local static vars must be non-local or we get multiple declaration
622 problems. */
623 if (TREE_CODE (decl) == VAR_DECL
624 && !auto_var_in_fn_p (decl, id->src_fn))
625 return true;
626
627 return false;
628 }
629
630 static tree
631 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
632 copy_body_data *id)
633 {
634 tree old_var;
635 tree new_decls = NULL_TREE;
636
637 /* Remap its variables. */
638 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
639 {
640 tree new_var;
641
642 if (can_be_nonlocal (old_var, id))
643 {
644 /* We need to add this variable to the local decls as otherwise
645 nothing else will do so. */
646 if (TREE_CODE (old_var) == VAR_DECL
647 && ! DECL_EXTERNAL (old_var))
648 add_local_decl (cfun, old_var);
649 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
650 && !DECL_IGNORED_P (old_var)
651 && nonlocalized_list)
652 vec_safe_push (*nonlocalized_list, old_var);
653 continue;
654 }
655
656 /* Remap the variable. */
657 new_var = remap_decl (old_var, id);
658
659 /* If we didn't remap this variable, we can't mess with its
660 TREE_CHAIN. If we remapped this variable to the return slot, it's
661 already declared somewhere else, so don't declare it here. */
662
663 if (new_var == id->retvar)
664 ;
665 else if (!new_var)
666 {
667 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
668 && !DECL_IGNORED_P (old_var)
669 && nonlocalized_list)
670 vec_safe_push (*nonlocalized_list, old_var);
671 }
672 else
673 {
674 gcc_assert (DECL_P (new_var));
675 DECL_CHAIN (new_var) = new_decls;
676 new_decls = new_var;
677
678 /* Also copy value-expressions. */
679 if (TREE_CODE (new_var) == VAR_DECL
680 && DECL_HAS_VALUE_EXPR_P (new_var))
681 {
682 tree tem = DECL_VALUE_EXPR (new_var);
683 bool old_regimplify = id->regimplify;
684 id->remapping_type_depth++;
685 walk_tree (&tem, copy_tree_body_r, id, NULL);
686 id->remapping_type_depth--;
687 id->regimplify = old_regimplify;
688 SET_DECL_VALUE_EXPR (new_var, tem);
689 }
690 }
691 }
692
693 return nreverse (new_decls);
694 }
695
696 /* Copy the BLOCK to contain remapped versions of the variables
697 therein. And hook the new block into the block-tree. */
698
699 static void
700 remap_block (tree *block, copy_body_data *id)
701 {
702 tree old_block;
703 tree new_block;
704
705 /* Make the new block. */
706 old_block = *block;
707 new_block = make_node (BLOCK);
708 TREE_USED (new_block) = TREE_USED (old_block);
709 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
710 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
711 BLOCK_NONLOCALIZED_VARS (new_block)
712 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
713 *block = new_block;
714
715 /* Remap its variables. */
716 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
717 &BLOCK_NONLOCALIZED_VARS (new_block),
718 id);
719
720 if (id->transform_lang_insert_block)
721 id->transform_lang_insert_block (new_block);
722
723 /* Remember the remapped block. */
724 insert_decl_map (id, old_block, new_block);
725 }
726
727 /* Copy the whole block tree and root it in id->block. */
728 static tree
729 remap_blocks (tree block, copy_body_data *id)
730 {
731 tree t;
732 tree new_tree = block;
733
734 if (!block)
735 return NULL;
736
737 remap_block (&new_tree, id);
738 gcc_assert (new_tree != block);
739 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
740 prepend_lexical_block (new_tree, remap_blocks (t, id));
741 /* Blocks are in arbitrary order, but make things slightly prettier and do
742 not swap order when producing a copy. */
743 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
744 return new_tree;
745 }
746
747 /* Remap the block tree rooted at BLOCK to nothing. */
748 static void
749 remap_blocks_to_null (tree block, copy_body_data *id)
750 {
751 tree t;
752 insert_decl_map (id, block, NULL_TREE);
753 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
754 remap_blocks_to_null (t, id);
755 }
756
757 static void
758 copy_statement_list (tree *tp)
759 {
760 tree_stmt_iterator oi, ni;
761 tree new_tree;
762
763 new_tree = alloc_stmt_list ();
764 ni = tsi_start (new_tree);
765 oi = tsi_start (*tp);
766 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
767 *tp = new_tree;
768
769 for (; !tsi_end_p (oi); tsi_next (&oi))
770 {
771 tree stmt = tsi_stmt (oi);
772 if (TREE_CODE (stmt) == STATEMENT_LIST)
773 /* This copy is not redundant; tsi_link_after will smash this
774 STATEMENT_LIST into the end of the one we're building, and we
775 don't want to do that with the original. */
776 copy_statement_list (&stmt);
777 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
778 }
779 }
780
781 static void
782 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
783 {
784 tree block = BIND_EXPR_BLOCK (*tp);
785 /* Copy (and replace) the statement. */
786 copy_tree_r (tp, walk_subtrees, NULL);
787 if (block)
788 {
789 remap_block (&block, id);
790 BIND_EXPR_BLOCK (*tp) = block;
791 }
792
793 if (BIND_EXPR_VARS (*tp))
794 /* This will remap a lot of the same decls again, but this should be
795 harmless. */
796 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
797 }
798
799
800 /* Create a new gimple_seq by remapping all the statements in BODY
801 using the inlining information in ID. */
802
803 static gimple_seq
804 remap_gimple_seq (gimple_seq body, copy_body_data *id)
805 {
806 gimple_stmt_iterator si;
807 gimple_seq new_body = NULL;
808
809 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
810 {
811 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
812 gimple_seq_add_seq (&new_body, new_stmts);
813 }
814
815 return new_body;
816 }
817
818
819 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
820 block using the mapping information in ID. */
821
822 static gimple
823 copy_gimple_bind (gbind *stmt, copy_body_data *id)
824 {
825 gimple new_bind;
826 tree new_block, new_vars;
827 gimple_seq body, new_body;
828
829 /* Copy the statement. Note that we purposely don't use copy_stmt
830 here because we need to remap statements as we copy. */
831 body = gimple_bind_body (stmt);
832 new_body = remap_gimple_seq (body, id);
833
834 new_block = gimple_bind_block (stmt);
835 if (new_block)
836 remap_block (&new_block, id);
837
838 /* This will remap a lot of the same decls again, but this should be
839 harmless. */
840 new_vars = gimple_bind_vars (stmt);
841 if (new_vars)
842 new_vars = remap_decls (new_vars, NULL, id);
843
844 new_bind = gimple_build_bind (new_vars, new_body, new_block);
845
846 return new_bind;
847 }
848
849 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
850
851 static bool
852 is_parm (tree decl)
853 {
854 if (TREE_CODE (decl) == SSA_NAME)
855 {
856 decl = SSA_NAME_VAR (decl);
857 if (!decl)
858 return false;
859 }
860
861 return (TREE_CODE (decl) == PARM_DECL);
862 }
863
864 /* Remap the dependence CLIQUE from the source to the destination function
865 as specified in ID. */
866
867 static unsigned short
868 remap_dependence_clique (copy_body_data *id, unsigned short clique)
869 {
870 if (clique == 0)
871 return 0;
872 if (!id->dependence_map)
873 id->dependence_map
874 = new hash_map<unsigned short, unsigned short, dependence_hasher>;
875 bool existed;
876 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
877 if (!existed)
878 newc = ++cfun->last_clique;
879 return newc;
880 }
881
882 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
883 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
884 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
885 recursing into the children nodes of *TP. */
886
887 static tree
888 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
889 {
890 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
891 copy_body_data *id = (copy_body_data *) wi_p->info;
892 tree fn = id->src_fn;
893
894 if (TREE_CODE (*tp) == SSA_NAME)
895 {
896 *tp = remap_ssa_name (*tp, id);
897 *walk_subtrees = 0;
898 return NULL;
899 }
900 else if (auto_var_in_fn_p (*tp, fn))
901 {
902 /* Local variables and labels need to be replaced by equivalent
903 variables. We don't want to copy static variables; there's
904 only one of those, no matter how many times we inline the
905 containing function. Similarly for globals from an outer
906 function. */
907 tree new_decl;
908
909 /* Remap the declaration. */
910 new_decl = remap_decl (*tp, id);
911 gcc_assert (new_decl);
912 /* Replace this variable with the copy. */
913 STRIP_TYPE_NOPS (new_decl);
914 /* ??? The C++ frontend uses void * pointer zero to initialize
915 any other type. This confuses the middle-end type verification.
916 As cloned bodies do not go through gimplification again the fixup
917 there doesn't trigger. */
918 if (TREE_CODE (new_decl) == INTEGER_CST
919 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
920 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
921 *tp = new_decl;
922 *walk_subtrees = 0;
923 }
924 else if (TREE_CODE (*tp) == STATEMENT_LIST)
925 gcc_unreachable ();
926 else if (TREE_CODE (*tp) == SAVE_EXPR)
927 gcc_unreachable ();
928 else if (TREE_CODE (*tp) == LABEL_DECL
929 && (!DECL_CONTEXT (*tp)
930 || decl_function_context (*tp) == id->src_fn))
931 /* These may need to be remapped for EH handling. */
932 *tp = remap_decl (*tp, id);
933 else if (TREE_CODE (*tp) == FIELD_DECL)
934 {
935 /* If the enclosing record type is variably_modified_type_p, the field
936 has already been remapped. Otherwise, it need not be. */
937 tree *n = id->decl_map->get (*tp);
938 if (n)
939 *tp = *n;
940 *walk_subtrees = 0;
941 }
942 else if (TYPE_P (*tp))
943 /* Types may need remapping as well. */
944 *tp = remap_type (*tp, id);
945 else if (CONSTANT_CLASS_P (*tp))
946 {
947 /* If this is a constant, we have to copy the node iff the type
948 will be remapped. copy_tree_r will not copy a constant. */
949 tree new_type = remap_type (TREE_TYPE (*tp), id);
950
951 if (new_type == TREE_TYPE (*tp))
952 *walk_subtrees = 0;
953
954 else if (TREE_CODE (*tp) == INTEGER_CST)
955 *tp = wide_int_to_tree (new_type, *tp);
956 else
957 {
958 *tp = copy_node (*tp);
959 TREE_TYPE (*tp) = new_type;
960 }
961 }
962 else
963 {
964 /* Otherwise, just copy the node. Note that copy_tree_r already
965 knows not to copy VAR_DECLs, etc., so this is safe. */
966
967 if (TREE_CODE (*tp) == MEM_REF)
968 {
969 /* We need to re-canonicalize MEM_REFs from inline substitutions
970 that can happen when a pointer argument is an ADDR_EXPR.
971 Recurse here manually to allow that. */
972 tree ptr = TREE_OPERAND (*tp, 0);
973 tree type = remap_type (TREE_TYPE (*tp), id);
974 tree old = *tp;
975 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
976 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
977 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
978 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
979 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
980 if (MR_DEPENDENCE_CLIQUE (old) != 0)
981 {
982 MR_DEPENDENCE_CLIQUE (*tp)
983 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
984 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
985 }
986 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
987 remapped a parameter as the property might be valid only
988 for the parameter itself. */
989 if (TREE_THIS_NOTRAP (old)
990 && (!is_parm (TREE_OPERAND (old, 0))
991 || (!id->transform_parameter && is_parm (ptr))))
992 TREE_THIS_NOTRAP (*tp) = 1;
993 *walk_subtrees = 0;
994 return NULL;
995 }
996
997 /* Here is the "usual case". Copy this tree node, and then
998 tweak some special cases. */
999 copy_tree_r (tp, walk_subtrees, NULL);
1000
1001 if (TREE_CODE (*tp) != OMP_CLAUSE)
1002 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1003
1004 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1005 {
1006 /* The copied TARGET_EXPR has never been expanded, even if the
1007 original node was expanded already. */
1008 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1009 TREE_OPERAND (*tp, 3) = NULL_TREE;
1010 }
1011 else if (TREE_CODE (*tp) == ADDR_EXPR)
1012 {
1013 /* Variable substitution need not be simple. In particular,
1014 the MEM_REF substitution above. Make sure that
1015 TREE_CONSTANT and friends are up-to-date. */
1016 int invariant = is_gimple_min_invariant (*tp);
1017 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1018 recompute_tree_invariant_for_addr_expr (*tp);
1019
1020 /* If this used to be invariant, but is not any longer,
1021 then regimplification is probably needed. */
1022 if (invariant && !is_gimple_min_invariant (*tp))
1023 id->regimplify = true;
1024
1025 *walk_subtrees = 0;
1026 }
1027 }
1028
1029 /* Update the TREE_BLOCK for the cloned expr. */
1030 if (EXPR_P (*tp))
1031 {
1032 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1033 tree old_block = TREE_BLOCK (*tp);
1034 if (old_block)
1035 {
1036 tree *n;
1037 n = id->decl_map->get (TREE_BLOCK (*tp));
1038 if (n)
1039 new_block = *n;
1040 }
1041 TREE_SET_BLOCK (*tp, new_block);
1042 }
1043
1044 /* Keep iterating. */
1045 return NULL_TREE;
1046 }
1047
1048
1049 /* Called from copy_body_id via walk_tree. DATA is really a
1050 `copy_body_data *'. */
1051
1052 tree
1053 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1054 {
1055 copy_body_data *id = (copy_body_data *) data;
1056 tree fn = id->src_fn;
1057 tree new_block;
1058
1059 /* Begin by recognizing trees that we'll completely rewrite for the
1060 inlining context. Our output for these trees is completely
1061 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1062 into an edge). Further down, we'll handle trees that get
1063 duplicated and/or tweaked. */
1064
1065 /* When requested, RETURN_EXPRs should be transformed to just the
1066 contained MODIFY_EXPR. The branch semantics of the return will
1067 be handled elsewhere by manipulating the CFG rather than a statement. */
1068 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1069 {
1070 tree assignment = TREE_OPERAND (*tp, 0);
1071
1072 /* If we're returning something, just turn that into an
1073 assignment into the equivalent of the original RESULT_DECL.
1074 If the "assignment" is just the result decl, the result
1075 decl has already been set (e.g. a recent "foo (&result_decl,
1076 ...)"); just toss the entire RETURN_EXPR. */
1077 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1078 {
1079 /* Replace the RETURN_EXPR with (a copy of) the
1080 MODIFY_EXPR hanging underneath. */
1081 *tp = copy_node (assignment);
1082 }
1083 else /* Else the RETURN_EXPR returns no value. */
1084 {
1085 *tp = NULL;
1086 return (tree) (void *)1;
1087 }
1088 }
1089 else if (TREE_CODE (*tp) == SSA_NAME)
1090 {
1091 *tp = remap_ssa_name (*tp, id);
1092 *walk_subtrees = 0;
1093 return NULL;
1094 }
1095
1096 /* Local variables and labels need to be replaced by equivalent
1097 variables. We don't want to copy static variables; there's only
1098 one of those, no matter how many times we inline the containing
1099 function. Similarly for globals from an outer function. */
1100 else if (auto_var_in_fn_p (*tp, fn))
1101 {
1102 tree new_decl;
1103
1104 /* Remap the declaration. */
1105 new_decl = remap_decl (*tp, id);
1106 gcc_assert (new_decl);
1107 /* Replace this variable with the copy. */
1108 STRIP_TYPE_NOPS (new_decl);
1109 *tp = new_decl;
1110 *walk_subtrees = 0;
1111 }
1112 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1113 copy_statement_list (tp);
1114 else if (TREE_CODE (*tp) == SAVE_EXPR
1115 || TREE_CODE (*tp) == TARGET_EXPR)
1116 remap_save_expr (tp, id->decl_map, walk_subtrees);
1117 else if (TREE_CODE (*tp) == LABEL_DECL
1118 && (! DECL_CONTEXT (*tp)
1119 || decl_function_context (*tp) == id->src_fn))
1120 /* These may need to be remapped for EH handling. */
1121 *tp = remap_decl (*tp, id);
1122 else if (TREE_CODE (*tp) == BIND_EXPR)
1123 copy_bind_expr (tp, walk_subtrees, id);
1124 /* Types may need remapping as well. */
1125 else if (TYPE_P (*tp))
1126 *tp = remap_type (*tp, id);
1127
1128 /* If this is a constant, we have to copy the node iff the type will be
1129 remapped. copy_tree_r will not copy a constant. */
1130 else if (CONSTANT_CLASS_P (*tp))
1131 {
1132 tree new_type = remap_type (TREE_TYPE (*tp), id);
1133
1134 if (new_type == TREE_TYPE (*tp))
1135 *walk_subtrees = 0;
1136
1137 else if (TREE_CODE (*tp) == INTEGER_CST)
1138 *tp = wide_int_to_tree (new_type, *tp);
1139 else
1140 {
1141 *tp = copy_node (*tp);
1142 TREE_TYPE (*tp) = new_type;
1143 }
1144 }
1145
1146 /* Otherwise, just copy the node. Note that copy_tree_r already
1147 knows not to copy VAR_DECLs, etc., so this is safe. */
1148 else
1149 {
1150 /* Here we handle trees that are not completely rewritten.
1151 First we detect some inlining-induced bogosities for
1152 discarding. */
1153 if (TREE_CODE (*tp) == MODIFY_EXPR
1154 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1155 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1156 {
1157 /* Some assignments VAR = VAR; don't generate any rtl code
1158 and thus don't count as variable modification. Avoid
1159 keeping bogosities like 0 = 0. */
1160 tree decl = TREE_OPERAND (*tp, 0), value;
1161 tree *n;
1162
1163 n = id->decl_map->get (decl);
1164 if (n)
1165 {
1166 value = *n;
1167 STRIP_TYPE_NOPS (value);
1168 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1169 {
1170 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1171 return copy_tree_body_r (tp, walk_subtrees, data);
1172 }
1173 }
1174 }
1175 else if (TREE_CODE (*tp) == INDIRECT_REF)
1176 {
1177 /* Get rid of *& from inline substitutions that can happen when a
1178 pointer argument is an ADDR_EXPR. */
1179 tree decl = TREE_OPERAND (*tp, 0);
1180 tree *n = id->decl_map->get (decl);
1181 if (n)
1182 {
1183 /* If we happen to get an ADDR_EXPR in n->value, strip
1184 it manually here as we'll eventually get ADDR_EXPRs
1185 which lie about their types pointed to. In this case
1186 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1187 but we absolutely rely on that. As fold_indirect_ref
1188 does other useful transformations, try that first, though. */
1189 tree type = TREE_TYPE (*tp);
1190 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1191 tree old = *tp;
1192 *tp = gimple_fold_indirect_ref (ptr);
1193 if (! *tp)
1194 {
1195 if (TREE_CODE (ptr) == ADDR_EXPR)
1196 {
1197 *tp
1198 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1199 /* ??? We should either assert here or build
1200 a VIEW_CONVERT_EXPR instead of blindly leaking
1201 incompatible types to our IL. */
1202 if (! *tp)
1203 *tp = TREE_OPERAND (ptr, 0);
1204 }
1205 else
1206 {
1207 *tp = build1 (INDIRECT_REF, type, ptr);
1208 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1209 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1210 TREE_READONLY (*tp) = TREE_READONLY (old);
1211 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1212 have remapped a parameter as the property might be
1213 valid only for the parameter itself. */
1214 if (TREE_THIS_NOTRAP (old)
1215 && (!is_parm (TREE_OPERAND (old, 0))
1216 || (!id->transform_parameter && is_parm (ptr))))
1217 TREE_THIS_NOTRAP (*tp) = 1;
1218 }
1219 }
1220 *walk_subtrees = 0;
1221 return NULL;
1222 }
1223 }
1224 else if (TREE_CODE (*tp) == MEM_REF)
1225 {
1226 /* We need to re-canonicalize MEM_REFs from inline substitutions
1227 that can happen when a pointer argument is an ADDR_EXPR.
1228 Recurse here manually to allow that. */
1229 tree ptr = TREE_OPERAND (*tp, 0);
1230 tree type = remap_type (TREE_TYPE (*tp), id);
1231 tree old = *tp;
1232 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1233 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1234 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1235 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1236 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1237 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1238 {
1239 MR_DEPENDENCE_CLIQUE (*tp)
1240 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1241 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1242 }
1243 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1244 remapped a parameter as the property might be valid only
1245 for the parameter itself. */
1246 if (TREE_THIS_NOTRAP (old)
1247 && (!is_parm (TREE_OPERAND (old, 0))
1248 || (!id->transform_parameter && is_parm (ptr))))
1249 TREE_THIS_NOTRAP (*tp) = 1;
1250 *walk_subtrees = 0;
1251 return NULL;
1252 }
1253
1254 /* Here is the "usual case". Copy this tree node, and then
1255 tweak some special cases. */
1256 copy_tree_r (tp, walk_subtrees, NULL);
1257
1258 /* If EXPR has block defined, map it to newly constructed block.
1259 When inlining we want EXPRs without block appear in the block
1260 of function call if we are not remapping a type. */
1261 if (EXPR_P (*tp))
1262 {
1263 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1264 if (TREE_BLOCK (*tp))
1265 {
1266 tree *n;
1267 n = id->decl_map->get (TREE_BLOCK (*tp));
1268 if (n)
1269 new_block = *n;
1270 }
1271 TREE_SET_BLOCK (*tp, new_block);
1272 }
1273
1274 if (TREE_CODE (*tp) != OMP_CLAUSE)
1275 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1276
1277 /* The copied TARGET_EXPR has never been expanded, even if the
1278 original node was expanded already. */
1279 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1280 {
1281 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1282 TREE_OPERAND (*tp, 3) = NULL_TREE;
1283 }
1284
1285 /* Variable substitution need not be simple. In particular, the
1286 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1287 and friends are up-to-date. */
1288 else if (TREE_CODE (*tp) == ADDR_EXPR)
1289 {
1290 int invariant = is_gimple_min_invariant (*tp);
1291 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1292
1293 /* Handle the case where we substituted an INDIRECT_REF
1294 into the operand of the ADDR_EXPR. */
1295 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1296 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1297 else
1298 recompute_tree_invariant_for_addr_expr (*tp);
1299
1300 /* If this used to be invariant, but is not any longer,
1301 then regimplification is probably needed. */
1302 if (invariant && !is_gimple_min_invariant (*tp))
1303 id->regimplify = true;
1304
1305 *walk_subtrees = 0;
1306 }
1307 }
1308
1309 /* Keep iterating. */
1310 return NULL_TREE;
1311 }
1312
1313 /* Helper for remap_gimple_stmt. Given an EH region number for the
1314 source function, map that to the duplicate EH region number in
1315 the destination function. */
1316
1317 static int
1318 remap_eh_region_nr (int old_nr, copy_body_data *id)
1319 {
1320 eh_region old_r, new_r;
1321
1322 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1323 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1324
1325 return new_r->index;
1326 }
1327
1328 /* Similar, but operate on INTEGER_CSTs. */
1329
1330 static tree
1331 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1332 {
1333 int old_nr, new_nr;
1334
1335 old_nr = tree_to_shwi (old_t_nr);
1336 new_nr = remap_eh_region_nr (old_nr, id);
1337
1338 return build_int_cst (integer_type_node, new_nr);
1339 }
1340
1341 /* Helper for copy_bb. Remap statement STMT using the inlining
1342 information in ID. Return the new statement copy. */
1343
1344 static gimple_seq
1345 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1346 {
1347 gimple copy = NULL;
1348 struct walk_stmt_info wi;
1349 bool skip_first = false;
1350 gimple_seq stmts = NULL;
1351
1352 if (is_gimple_debug (stmt)
1353 && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1354 return stmts;
1355
1356 /* Begin by recognizing trees that we'll completely rewrite for the
1357 inlining context. Our output for these trees is completely
1358 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1359 into an edge). Further down, we'll handle trees that get
1360 duplicated and/or tweaked. */
1361
1362 /* When requested, GIMPLE_RETURNs should be transformed to just the
1363 contained GIMPLE_ASSIGN. The branch semantics of the return will
1364 be handled elsewhere by manipulating the CFG rather than the
1365 statement. */
1366 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1367 {
1368 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1369 tree retbnd = gimple_return_retbnd (stmt);
1370 tree bndslot = id->retbnd;
1371
1372 if (retbnd && bndslot)
1373 {
1374 gimple bndcopy = gimple_build_assign (bndslot, retbnd);
1375 memset (&wi, 0, sizeof (wi));
1376 wi.info = id;
1377 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1378 gimple_seq_add_stmt (&stmts, bndcopy);
1379 }
1380
1381 /* If we're returning something, just turn that into an
1382 assignment into the equivalent of the original RESULT_DECL.
1383 If RETVAL is just the result decl, the result decl has
1384 already been set (e.g. a recent "foo (&result_decl, ...)");
1385 just toss the entire GIMPLE_RETURN. */
1386 if (retval
1387 && (TREE_CODE (retval) != RESULT_DECL
1388 && (TREE_CODE (retval) != SSA_NAME
1389 || ! SSA_NAME_VAR (retval)
1390 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1391 {
1392 copy = gimple_build_assign (id->do_not_unshare
1393 ? id->retvar : unshare_expr (id->retvar),
1394 retval);
1395 /* id->retvar is already substituted. Skip it on later remapping. */
1396 skip_first = true;
1397
1398 /* We need to copy bounds if return structure with pointers into
1399 instrumented function. */
1400 if (chkp_function_instrumented_p (id->dst_fn)
1401 && !bndslot
1402 && !BOUNDED_P (id->retvar)
1403 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1404 id->assign_stmts.safe_push (copy);
1405
1406 }
1407 else
1408 return stmts;
1409 }
1410 else if (gimple_has_substatements (stmt))
1411 {
1412 gimple_seq s1, s2;
1413
1414 /* When cloning bodies from the C++ front end, we will be handed bodies
1415 in High GIMPLE form. Handle here all the High GIMPLE statements that
1416 have embedded statements. */
1417 switch (gimple_code (stmt))
1418 {
1419 case GIMPLE_BIND:
1420 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1421 break;
1422
1423 case GIMPLE_CATCH:
1424 {
1425 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1426 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1427 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1428 }
1429 break;
1430
1431 case GIMPLE_EH_FILTER:
1432 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1433 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1434 break;
1435
1436 case GIMPLE_TRY:
1437 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1438 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1439 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1440 break;
1441
1442 case GIMPLE_WITH_CLEANUP_EXPR:
1443 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1444 copy = gimple_build_wce (s1);
1445 break;
1446
1447 case GIMPLE_OMP_PARALLEL:
1448 {
1449 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1450 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1451 copy = gimple_build_omp_parallel
1452 (s1,
1453 gimple_omp_parallel_clauses (omp_par_stmt),
1454 gimple_omp_parallel_child_fn (omp_par_stmt),
1455 gimple_omp_parallel_data_arg (omp_par_stmt));
1456 }
1457 break;
1458
1459 case GIMPLE_OMP_TASK:
1460 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1461 copy = gimple_build_omp_task
1462 (s1,
1463 gimple_omp_task_clauses (stmt),
1464 gimple_omp_task_child_fn (stmt),
1465 gimple_omp_task_data_arg (stmt),
1466 gimple_omp_task_copy_fn (stmt),
1467 gimple_omp_task_arg_size (stmt),
1468 gimple_omp_task_arg_align (stmt));
1469 break;
1470
1471 case GIMPLE_OMP_FOR:
1472 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1473 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1474 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1475 gimple_omp_for_clauses (stmt),
1476 gimple_omp_for_collapse (stmt), s2);
1477 {
1478 size_t i;
1479 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1480 {
1481 gimple_omp_for_set_index (copy, i,
1482 gimple_omp_for_index (stmt, i));
1483 gimple_omp_for_set_initial (copy, i,
1484 gimple_omp_for_initial (stmt, i));
1485 gimple_omp_for_set_final (copy, i,
1486 gimple_omp_for_final (stmt, i));
1487 gimple_omp_for_set_incr (copy, i,
1488 gimple_omp_for_incr (stmt, i));
1489 gimple_omp_for_set_cond (copy, i,
1490 gimple_omp_for_cond (stmt, i));
1491 }
1492 }
1493 break;
1494
1495 case GIMPLE_OMP_MASTER:
1496 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1497 copy = gimple_build_omp_master (s1);
1498 break;
1499
1500 case GIMPLE_OMP_TASKGROUP:
1501 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1502 copy = gimple_build_omp_taskgroup (s1);
1503 break;
1504
1505 case GIMPLE_OMP_ORDERED:
1506 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1507 copy = gimple_build_omp_ordered (s1);
1508 break;
1509
1510 case GIMPLE_OMP_SECTION:
1511 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1512 copy = gimple_build_omp_section (s1);
1513 break;
1514
1515 case GIMPLE_OMP_SECTIONS:
1516 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1517 copy = gimple_build_omp_sections
1518 (s1, gimple_omp_sections_clauses (stmt));
1519 break;
1520
1521 case GIMPLE_OMP_SINGLE:
1522 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1523 copy = gimple_build_omp_single
1524 (s1, gimple_omp_single_clauses (stmt));
1525 break;
1526
1527 case GIMPLE_OMP_TARGET:
1528 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1529 copy = gimple_build_omp_target
1530 (s1, gimple_omp_target_kind (stmt),
1531 gimple_omp_target_clauses (stmt));
1532 break;
1533
1534 case GIMPLE_OMP_TEAMS:
1535 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1536 copy = gimple_build_omp_teams
1537 (s1, gimple_omp_teams_clauses (stmt));
1538 break;
1539
1540 case GIMPLE_OMP_CRITICAL:
1541 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1542 copy = gimple_build_omp_critical (s1,
1543 gimple_omp_critical_name (
1544 as_a <gomp_critical *> (stmt)));
1545 break;
1546
1547 case GIMPLE_TRANSACTION:
1548 {
1549 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1550 gtransaction *new_trans_stmt;
1551 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1552 id);
1553 copy = new_trans_stmt
1554 = gimple_build_transaction (
1555 s1,
1556 gimple_transaction_label (old_trans_stmt));
1557 gimple_transaction_set_subcode (
1558 new_trans_stmt,
1559 gimple_transaction_subcode (old_trans_stmt));
1560 }
1561 break;
1562
1563 default:
1564 gcc_unreachable ();
1565 }
1566 }
1567 else
1568 {
1569 if (gimple_assign_copy_p (stmt)
1570 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1571 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1572 {
1573 /* Here we handle statements that are not completely rewritten.
1574 First we detect some inlining-induced bogosities for
1575 discarding. */
1576
1577 /* Some assignments VAR = VAR; don't generate any rtl code
1578 and thus don't count as variable modification. Avoid
1579 keeping bogosities like 0 = 0. */
1580 tree decl = gimple_assign_lhs (stmt), value;
1581 tree *n;
1582
1583 n = id->decl_map->get (decl);
1584 if (n)
1585 {
1586 value = *n;
1587 STRIP_TYPE_NOPS (value);
1588 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1589 return NULL;
1590 }
1591 }
1592
1593 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1594 in a block that we aren't copying during tree_function_versioning,
1595 just drop the clobber stmt. */
1596 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1597 {
1598 tree lhs = gimple_assign_lhs (stmt);
1599 if (TREE_CODE (lhs) == MEM_REF
1600 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1601 {
1602 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1603 if (gimple_bb (def_stmt)
1604 && !bitmap_bit_p (id->blocks_to_copy,
1605 gimple_bb (def_stmt)->index))
1606 return NULL;
1607 }
1608 }
1609
1610 if (gimple_debug_bind_p (stmt))
1611 {
1612 gdebug *copy
1613 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1614 gimple_debug_bind_get_value (stmt),
1615 stmt);
1616 id->debug_stmts.safe_push (copy);
1617 gimple_seq_add_stmt (&stmts, copy);
1618 return stmts;
1619 }
1620 if (gimple_debug_source_bind_p (stmt))
1621 {
1622 gdebug *copy = gimple_build_debug_source_bind
1623 (gimple_debug_source_bind_get_var (stmt),
1624 gimple_debug_source_bind_get_value (stmt),
1625 stmt);
1626 id->debug_stmts.safe_push (copy);
1627 gimple_seq_add_stmt (&stmts, copy);
1628 return stmts;
1629 }
1630
1631 /* Create a new deep copy of the statement. */
1632 copy = gimple_copy (stmt);
1633
1634 /* Clear flags that need revisiting. */
1635 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1636 {
1637 if (gimple_call_tail_p (call_stmt))
1638 gimple_call_set_tail (call_stmt, false);
1639 if (gimple_call_from_thunk_p (call_stmt))
1640 gimple_call_set_from_thunk (call_stmt, false);
1641 }
1642
1643 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1644 RESX and EH_DISPATCH. */
1645 if (id->eh_map)
1646 switch (gimple_code (copy))
1647 {
1648 case GIMPLE_CALL:
1649 {
1650 tree r, fndecl = gimple_call_fndecl (copy);
1651 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1652 switch (DECL_FUNCTION_CODE (fndecl))
1653 {
1654 case BUILT_IN_EH_COPY_VALUES:
1655 r = gimple_call_arg (copy, 1);
1656 r = remap_eh_region_tree_nr (r, id);
1657 gimple_call_set_arg (copy, 1, r);
1658 /* FALLTHRU */
1659
1660 case BUILT_IN_EH_POINTER:
1661 case BUILT_IN_EH_FILTER:
1662 r = gimple_call_arg (copy, 0);
1663 r = remap_eh_region_tree_nr (r, id);
1664 gimple_call_set_arg (copy, 0, r);
1665 break;
1666
1667 default:
1668 break;
1669 }
1670
1671 /* Reset alias info if we didn't apply measures to
1672 keep it valid over inlining by setting DECL_PT_UID. */
1673 if (!id->src_cfun->gimple_df
1674 || !id->src_cfun->gimple_df->ipa_pta)
1675 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1676 }
1677 break;
1678
1679 case GIMPLE_RESX:
1680 {
1681 gresx *resx_stmt = as_a <gresx *> (copy);
1682 int r = gimple_resx_region (resx_stmt);
1683 r = remap_eh_region_nr (r, id);
1684 gimple_resx_set_region (resx_stmt, r);
1685 }
1686 break;
1687
1688 case GIMPLE_EH_DISPATCH:
1689 {
1690 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1691 int r = gimple_eh_dispatch_region (eh_dispatch);
1692 r = remap_eh_region_nr (r, id);
1693 gimple_eh_dispatch_set_region (eh_dispatch, r);
1694 }
1695 break;
1696
1697 default:
1698 break;
1699 }
1700 }
1701
1702 /* If STMT has a block defined, map it to the newly constructed
1703 block. */
1704 if (gimple_block (copy))
1705 {
1706 tree *n;
1707 n = id->decl_map->get (gimple_block (copy));
1708 gcc_assert (n);
1709 gimple_set_block (copy, *n);
1710 }
1711
1712 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1713 {
1714 gimple_seq_add_stmt (&stmts, copy);
1715 return stmts;
1716 }
1717
1718 /* Remap all the operands in COPY. */
1719 memset (&wi, 0, sizeof (wi));
1720 wi.info = id;
1721 if (skip_first)
1722 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1723 else
1724 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1725
1726 /* Clear the copied virtual operands. We are not remapping them here
1727 but are going to recreate them from scratch. */
1728 if (gimple_has_mem_ops (copy))
1729 {
1730 gimple_set_vdef (copy, NULL_TREE);
1731 gimple_set_vuse (copy, NULL_TREE);
1732 }
1733
1734 gimple_seq_add_stmt (&stmts, copy);
1735 return stmts;
1736 }
1737
1738
1739 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1740 later */
1741
1742 static basic_block
1743 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1744 gcov_type count_scale)
1745 {
1746 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1747 basic_block copy_basic_block;
1748 tree decl;
1749 gcov_type freq;
1750 basic_block prev;
1751
1752 /* Search for previous copied basic block. */
1753 prev = bb->prev_bb;
1754 while (!prev->aux)
1755 prev = prev->prev_bb;
1756
1757 /* create_basic_block() will append every new block to
1758 basic_block_info automatically. */
1759 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1760 copy_basic_block->count = apply_scale (bb->count, count_scale);
1761
1762 /* We are going to rebuild frequencies from scratch. These values
1763 have just small importance to drive canonicalize_loop_headers. */
1764 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1765
1766 /* We recompute frequencies after inlining, so this is quite safe. */
1767 if (freq > BB_FREQ_MAX)
1768 freq = BB_FREQ_MAX;
1769 copy_basic_block->frequency = freq;
1770
1771 copy_gsi = gsi_start_bb (copy_basic_block);
1772
1773 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1774 {
1775 gimple_seq stmts;
1776 gimple stmt = gsi_stmt (gsi);
1777 gimple orig_stmt = stmt;
1778 gimple_stmt_iterator stmts_gsi;
1779 bool stmt_added = false;
1780
1781 id->regimplify = false;
1782 stmts = remap_gimple_stmt (stmt, id);
1783
1784 if (gimple_seq_empty_p (stmts))
1785 continue;
1786
1787 seq_gsi = copy_gsi;
1788
1789 for (stmts_gsi = gsi_start (stmts);
1790 !gsi_end_p (stmts_gsi); )
1791 {
1792 stmt = gsi_stmt (stmts_gsi);
1793
1794 /* Advance iterator now before stmt is moved to seq_gsi. */
1795 gsi_next (&stmts_gsi);
1796
1797 if (gimple_nop_p (stmt))
1798 continue;
1799
1800 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1801 orig_stmt);
1802
1803 /* With return slot optimization we can end up with
1804 non-gimple (foo *)&this->m, fix that here. */
1805 if (is_gimple_assign (stmt)
1806 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1807 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1808 {
1809 tree new_rhs;
1810 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1811 gimple_assign_rhs1 (stmt),
1812 true, NULL, false,
1813 GSI_CONTINUE_LINKING);
1814 gimple_assign_set_rhs1 (stmt, new_rhs);
1815 id->regimplify = false;
1816 }
1817
1818 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1819
1820 if (id->regimplify)
1821 gimple_regimplify_operands (stmt, &seq_gsi);
1822
1823 stmt_added = true;
1824 }
1825
1826 if (!stmt_added)
1827 continue;
1828
1829 /* If copy_basic_block has been empty at the start of this iteration,
1830 call gsi_start_bb again to get at the newly added statements. */
1831 if (gsi_end_p (copy_gsi))
1832 copy_gsi = gsi_start_bb (copy_basic_block);
1833 else
1834 gsi_next (&copy_gsi);
1835
1836 /* Process the new statement. The call to gimple_regimplify_operands
1837 possibly turned the statement into multiple statements, we
1838 need to process all of them. */
1839 do
1840 {
1841 tree fn;
1842 gcall *call_stmt;
1843
1844 stmt = gsi_stmt (copy_gsi);
1845 call_stmt = dyn_cast <gcall *> (stmt);
1846 if (call_stmt
1847 && gimple_call_va_arg_pack_p (call_stmt)
1848 && id->call_stmt)
1849 {
1850 /* __builtin_va_arg_pack () should be replaced by
1851 all arguments corresponding to ... in the caller. */
1852 tree p;
1853 gcall *new_call;
1854 vec<tree> argarray;
1855 size_t nargs = gimple_call_num_args (id->call_stmt);
1856 size_t n, i, nargs_to_copy;
1857 bool remove_bounds = false;
1858
1859 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1860 nargs--;
1861
1862 /* Bounds should be removed from arg pack in case
1863 we handle not instrumented call in instrumented
1864 function. */
1865 nargs_to_copy = nargs;
1866 if (gimple_call_with_bounds_p (id->call_stmt)
1867 && !gimple_call_with_bounds_p (stmt))
1868 {
1869 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1870 i < gimple_call_num_args (id->call_stmt);
1871 i++)
1872 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1873 nargs_to_copy--;
1874 remove_bounds = true;
1875 }
1876
1877 /* Create the new array of arguments. */
1878 n = nargs_to_copy + gimple_call_num_args (call_stmt);
1879 argarray.create (n);
1880 argarray.safe_grow_cleared (n);
1881
1882 /* Copy all the arguments before '...' */
1883 memcpy (argarray.address (),
1884 gimple_call_arg_ptr (call_stmt, 0),
1885 gimple_call_num_args (call_stmt) * sizeof (tree));
1886
1887 if (remove_bounds)
1888 {
1889 /* Append the rest of arguments removing bounds. */
1890 unsigned cur = gimple_call_num_args (call_stmt);
1891 i = gimple_call_num_args (id->call_stmt) - nargs;
1892 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1893 i < gimple_call_num_args (id->call_stmt);
1894 i++)
1895 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1896 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1897 gcc_assert (cur == n);
1898 }
1899 else
1900 {
1901 /* Append the arguments passed in '...' */
1902 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1903 gimple_call_arg_ptr (id->call_stmt, 0)
1904 + (gimple_call_num_args (id->call_stmt) - nargs),
1905 nargs * sizeof (tree));
1906 }
1907
1908 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1909 argarray);
1910
1911 argarray.release ();
1912
1913 /* Copy all GIMPLE_CALL flags, location and block, except
1914 GF_CALL_VA_ARG_PACK. */
1915 gimple_call_copy_flags (new_call, call_stmt);
1916 gimple_call_set_va_arg_pack (new_call, false);
1917 gimple_set_location (new_call, gimple_location (stmt));
1918 gimple_set_block (new_call, gimple_block (stmt));
1919 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1920
1921 gsi_replace (&copy_gsi, new_call, false);
1922 stmt = new_call;
1923 }
1924 else if (call_stmt
1925 && id->call_stmt
1926 && (decl = gimple_call_fndecl (stmt))
1927 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1928 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1929 {
1930 /* __builtin_va_arg_pack_len () should be replaced by
1931 the number of anonymous arguments. */
1932 size_t nargs = gimple_call_num_args (id->call_stmt), i;
1933 tree count, p;
1934 gimple new_stmt;
1935
1936 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1937 nargs--;
1938
1939 /* For instrumented calls we should ignore bounds. */
1940 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1941 i < gimple_call_num_args (id->call_stmt);
1942 i++)
1943 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1944 nargs--;
1945
1946 count = build_int_cst (integer_type_node, nargs);
1947 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1948 gsi_replace (&copy_gsi, new_stmt, false);
1949 stmt = new_stmt;
1950 }
1951 else if (call_stmt
1952 && id->call_stmt
1953 && gimple_call_internal_p (stmt)
1954 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1955 {
1956 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1957 gsi_remove (&copy_gsi, false);
1958 continue;
1959 }
1960
1961 /* Statements produced by inlining can be unfolded, especially
1962 when we constant propagated some operands. We can't fold
1963 them right now for two reasons:
1964 1) folding require SSA_NAME_DEF_STMTs to be correct
1965 2) we can't change function calls to builtins.
1966 So we just mark statement for later folding. We mark
1967 all new statements, instead just statements that has changed
1968 by some nontrivial substitution so even statements made
1969 foldable indirectly are updated. If this turns out to be
1970 expensive, copy_body can be told to watch for nontrivial
1971 changes. */
1972 if (id->statements_to_fold)
1973 id->statements_to_fold->add (stmt);
1974
1975 /* We're duplicating a CALL_EXPR. Find any corresponding
1976 callgraph edges and update or duplicate them. */
1977 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1978 {
1979 struct cgraph_edge *edge;
1980
1981 switch (id->transform_call_graph_edges)
1982 {
1983 case CB_CGE_DUPLICATE:
1984 edge = id->src_node->get_edge (orig_stmt);
1985 if (edge)
1986 {
1987 int edge_freq = edge->frequency;
1988 int new_freq;
1989 struct cgraph_edge *old_edge = edge;
1990 edge = edge->clone (id->dst_node, call_stmt,
1991 gimple_uid (stmt),
1992 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1993 true);
1994 /* We could also just rescale the frequency, but
1995 doing so would introduce roundoff errors and make
1996 verifier unhappy. */
1997 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1998 copy_basic_block);
1999
2000 /* Speculative calls consist of two edges - direct and indirect.
2001 Duplicate the whole thing and distribute frequencies accordingly. */
2002 if (edge->speculative)
2003 {
2004 struct cgraph_edge *direct, *indirect;
2005 struct ipa_ref *ref;
2006
2007 gcc_assert (!edge->indirect_unknown_callee);
2008 old_edge->speculative_call_info (direct, indirect, ref);
2009 indirect = indirect->clone (id->dst_node, call_stmt,
2010 gimple_uid (stmt),
2011 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2012 true);
2013 if (old_edge->frequency + indirect->frequency)
2014 {
2015 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2016 (old_edge->frequency + indirect->frequency)),
2017 CGRAPH_FREQ_MAX);
2018 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2019 (old_edge->frequency + indirect->frequency)),
2020 CGRAPH_FREQ_MAX);
2021 }
2022 id->dst_node->clone_reference (ref, stmt);
2023 }
2024 else
2025 {
2026 edge->frequency = new_freq;
2027 if (dump_file
2028 && profile_status_for_fn (cfun) != PROFILE_ABSENT
2029 && (edge_freq > edge->frequency + 10
2030 || edge_freq < edge->frequency - 10))
2031 {
2032 fprintf (dump_file, "Edge frequency estimated by "
2033 "cgraph %i diverge from inliner's estimate %i\n",
2034 edge_freq,
2035 edge->frequency);
2036 fprintf (dump_file,
2037 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2038 bb->index,
2039 bb->frequency,
2040 copy_basic_block->frequency);
2041 }
2042 }
2043 }
2044 break;
2045
2046 case CB_CGE_MOVE_CLONES:
2047 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2048 call_stmt);
2049 edge = id->dst_node->get_edge (stmt);
2050 break;
2051
2052 case CB_CGE_MOVE:
2053 edge = id->dst_node->get_edge (orig_stmt);
2054 if (edge)
2055 edge->set_call_stmt (call_stmt);
2056 break;
2057
2058 default:
2059 gcc_unreachable ();
2060 }
2061
2062 /* Constant propagation on argument done during inlining
2063 may create new direct call. Produce an edge for it. */
2064 if ((!edge
2065 || (edge->indirect_inlining_edge
2066 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2067 && id->dst_node->definition
2068 && (fn = gimple_call_fndecl (stmt)) != NULL)
2069 {
2070 struct cgraph_node *dest = cgraph_node::get (fn);
2071
2072 /* We have missing edge in the callgraph. This can happen
2073 when previous inlining turned an indirect call into a
2074 direct call by constant propagating arguments or we are
2075 producing dead clone (for further cloning). In all
2076 other cases we hit a bug (incorrect node sharing is the
2077 most common reason for missing edges). */
2078 gcc_assert (!dest->definition
2079 || dest->address_taken
2080 || !id->src_node->definition
2081 || !id->dst_node->definition);
2082 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2083 id->dst_node->create_edge_including_clones
2084 (dest, orig_stmt, call_stmt, bb->count,
2085 compute_call_stmt_bb_frequency (id->dst_node->decl,
2086 copy_basic_block),
2087 CIF_ORIGINALLY_INDIRECT_CALL);
2088 else
2089 id->dst_node->create_edge (dest, call_stmt,
2090 bb->count,
2091 compute_call_stmt_bb_frequency
2092 (id->dst_node->decl,
2093 copy_basic_block))->inline_failed
2094 = CIF_ORIGINALLY_INDIRECT_CALL;
2095 if (dump_file)
2096 {
2097 fprintf (dump_file, "Created new direct edge to %s\n",
2098 dest->name ());
2099 }
2100 }
2101
2102 notice_special_calls (as_a <gcall *> (stmt));
2103 }
2104
2105 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2106 id->eh_map, id->eh_lp_nr);
2107
2108 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2109 {
2110 ssa_op_iter i;
2111 tree def;
2112
2113 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2114 if (TREE_CODE (def) == SSA_NAME)
2115 SSA_NAME_DEF_STMT (def) = stmt;
2116 }
2117
2118 gsi_next (&copy_gsi);
2119 }
2120 while (!gsi_end_p (copy_gsi));
2121
2122 copy_gsi = gsi_last_bb (copy_basic_block);
2123 }
2124
2125 return copy_basic_block;
2126 }
2127
2128 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2129 form is quite easy, since dominator relationship for old basic blocks does
2130 not change.
2131
2132 There is however exception where inlining might change dominator relation
2133 across EH edges from basic block within inlined functions destinating
2134 to landing pads in function we inline into.
2135
2136 The function fills in PHI_RESULTs of such PHI nodes if they refer
2137 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2138 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2139 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2140 set, and this means that there will be no overlapping live ranges
2141 for the underlying symbol.
2142
2143 This might change in future if we allow redirecting of EH edges and
2144 we might want to change way build CFG pre-inlining to include
2145 all the possible edges then. */
2146 static void
2147 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2148 bool can_throw, bool nonlocal_goto)
2149 {
2150 edge e;
2151 edge_iterator ei;
2152
2153 FOR_EACH_EDGE (e, ei, bb->succs)
2154 if (!e->dest->aux
2155 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2156 {
2157 gphi *phi;
2158 gphi_iterator si;
2159
2160 if (!nonlocal_goto)
2161 gcc_assert (e->flags & EDGE_EH);
2162
2163 if (!can_throw)
2164 gcc_assert (!(e->flags & EDGE_EH));
2165
2166 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2167 {
2168 edge re;
2169
2170 phi = si.phi ();
2171
2172 /* For abnormal goto/call edges the receiver can be the
2173 ENTRY_BLOCK. Do not assert this cannot happen. */
2174
2175 gcc_assert ((e->flags & EDGE_EH)
2176 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2177
2178 re = find_edge (ret_bb, e->dest);
2179 gcc_checking_assert (re);
2180 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2181 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2182
2183 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2184 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2185 }
2186 }
2187 }
2188
2189
2190 /* Copy edges from BB into its copy constructed earlier, scale profile
2191 accordingly. Edges will be taken care of later. Assume aux
2192 pointers to point to the copies of each BB. Return true if any
2193 debug stmts are left after a statement that must end the basic block. */
2194
2195 static bool
2196 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2197 basic_block abnormal_goto_dest)
2198 {
2199 basic_block new_bb = (basic_block) bb->aux;
2200 edge_iterator ei;
2201 edge old_edge;
2202 gimple_stmt_iterator si;
2203 int flags;
2204 bool need_debug_cleanup = false;
2205
2206 /* Use the indices from the original blocks to create edges for the
2207 new ones. */
2208 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2209 if (!(old_edge->flags & EDGE_EH))
2210 {
2211 edge new_edge;
2212
2213 flags = old_edge->flags;
2214
2215 /* Return edges do get a FALLTHRU flag when the get inlined. */
2216 if (old_edge->dest->index == EXIT_BLOCK
2217 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2218 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2219 flags |= EDGE_FALLTHRU;
2220 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2221 new_edge->count = apply_scale (old_edge->count, count_scale);
2222 new_edge->probability = old_edge->probability;
2223 }
2224
2225 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2226 return false;
2227
2228 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2229 {
2230 gimple copy_stmt;
2231 bool can_throw, nonlocal_goto;
2232
2233 copy_stmt = gsi_stmt (si);
2234 if (!is_gimple_debug (copy_stmt))
2235 update_stmt (copy_stmt);
2236
2237 /* Do this before the possible split_block. */
2238 gsi_next (&si);
2239
2240 /* If this tree could throw an exception, there are two
2241 cases where we need to add abnormal edge(s): the
2242 tree wasn't in a region and there is a "current
2243 region" in the caller; or the original tree had
2244 EH edges. In both cases split the block after the tree,
2245 and add abnormal edge(s) as needed; we need both
2246 those from the callee and the caller.
2247 We check whether the copy can throw, because the const
2248 propagation can change an INDIRECT_REF which throws
2249 into a COMPONENT_REF which doesn't. If the copy
2250 can throw, the original could also throw. */
2251 can_throw = stmt_can_throw_internal (copy_stmt);
2252 nonlocal_goto
2253 = (stmt_can_make_abnormal_goto (copy_stmt)
2254 && !computed_goto_p (copy_stmt));
2255
2256 if (can_throw || nonlocal_goto)
2257 {
2258 if (!gsi_end_p (si))
2259 {
2260 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2261 gsi_next (&si);
2262 if (gsi_end_p (si))
2263 need_debug_cleanup = true;
2264 }
2265 if (!gsi_end_p (si))
2266 /* Note that bb's predecessor edges aren't necessarily
2267 right at this point; split_block doesn't care. */
2268 {
2269 edge e = split_block (new_bb, copy_stmt);
2270
2271 new_bb = e->dest;
2272 new_bb->aux = e->src->aux;
2273 si = gsi_start_bb (new_bb);
2274 }
2275 }
2276
2277 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2278 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2279 else if (can_throw)
2280 make_eh_edges (copy_stmt);
2281
2282 /* If the call we inline cannot make abnormal goto do not add
2283 additional abnormal edges but only retain those already present
2284 in the original function body. */
2285 if (abnormal_goto_dest == NULL)
2286 nonlocal_goto = false;
2287 if (nonlocal_goto)
2288 {
2289 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2290
2291 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2292 nonlocal_goto = false;
2293 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2294 in OpenMP regions which aren't allowed to be left abnormally.
2295 So, no need to add abnormal edge in that case. */
2296 else if (is_gimple_call (copy_stmt)
2297 && gimple_call_internal_p (copy_stmt)
2298 && (gimple_call_internal_fn (copy_stmt)
2299 == IFN_ABNORMAL_DISPATCHER)
2300 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2301 nonlocal_goto = false;
2302 else
2303 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2304 }
2305
2306 if ((can_throw || nonlocal_goto)
2307 && gimple_in_ssa_p (cfun))
2308 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2309 can_throw, nonlocal_goto);
2310 }
2311 return need_debug_cleanup;
2312 }
2313
2314 /* Copy the PHIs. All blocks and edges are copied, some blocks
2315 was possibly split and new outgoing EH edges inserted.
2316 BB points to the block of original function and AUX pointers links
2317 the original and newly copied blocks. */
2318
2319 static void
2320 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2321 {
2322 basic_block const new_bb = (basic_block) bb->aux;
2323 edge_iterator ei;
2324 gphi *phi;
2325 gphi_iterator si;
2326 edge new_edge;
2327 bool inserted = false;
2328
2329 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2330 {
2331 tree res, new_res;
2332 gphi *new_phi;
2333
2334 phi = si.phi ();
2335 res = PHI_RESULT (phi);
2336 new_res = res;
2337 if (!virtual_operand_p (res))
2338 {
2339 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2340 new_phi = create_phi_node (new_res, new_bb);
2341 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2342 {
2343 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2344 tree arg;
2345 tree new_arg;
2346 edge_iterator ei2;
2347 location_t locus;
2348
2349 /* When doing partial cloning, we allow PHIs on the entry block
2350 as long as all the arguments are the same. Find any input
2351 edge to see argument to copy. */
2352 if (!old_edge)
2353 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2354 if (!old_edge->src->aux)
2355 break;
2356
2357 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2358 new_arg = arg;
2359 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2360 gcc_assert (new_arg);
2361 /* With return slot optimization we can end up with
2362 non-gimple (foo *)&this->m, fix that here. */
2363 if (TREE_CODE (new_arg) != SSA_NAME
2364 && TREE_CODE (new_arg) != FUNCTION_DECL
2365 && !is_gimple_val (new_arg))
2366 {
2367 gimple_seq stmts = NULL;
2368 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2369 gsi_insert_seq_on_edge (new_edge, stmts);
2370 inserted = true;
2371 }
2372 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2373 if (LOCATION_BLOCK (locus))
2374 {
2375 tree *n;
2376 n = id->decl_map->get (LOCATION_BLOCK (locus));
2377 gcc_assert (n);
2378 if (*n)
2379 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2380 else
2381 locus = LOCATION_LOCUS (locus);
2382 }
2383 else
2384 locus = LOCATION_LOCUS (locus);
2385
2386 add_phi_arg (new_phi, new_arg, new_edge, locus);
2387 }
2388 }
2389 }
2390
2391 /* Commit the delayed edge insertions. */
2392 if (inserted)
2393 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2394 gsi_commit_one_edge_insert (new_edge, NULL);
2395 }
2396
2397
2398 /* Wrapper for remap_decl so it can be used as a callback. */
2399
2400 static tree
2401 remap_decl_1 (tree decl, void *data)
2402 {
2403 return remap_decl (decl, (copy_body_data *) data);
2404 }
2405
2406 /* Build struct function and associated datastructures for the new clone
2407 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2408 the cfun to the function of new_fndecl (and current_function_decl too). */
2409
2410 static void
2411 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2412 {
2413 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2414 gcov_type count_scale;
2415
2416 if (!DECL_ARGUMENTS (new_fndecl))
2417 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2418 if (!DECL_RESULT (new_fndecl))
2419 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2420
2421 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2422 count_scale
2423 = GCOV_COMPUTE_SCALE (count,
2424 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2425 else
2426 count_scale = REG_BR_PROB_BASE;
2427
2428 /* Register specific tree functions. */
2429 gimple_register_cfg_hooks ();
2430
2431 /* Get clean struct function. */
2432 push_struct_function (new_fndecl);
2433
2434 /* We will rebuild these, so just sanity check that they are empty. */
2435 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2436 gcc_assert (cfun->local_decls == NULL);
2437 gcc_assert (cfun->cfg == NULL);
2438 gcc_assert (cfun->decl == new_fndecl);
2439
2440 /* Copy items we preserve during cloning. */
2441 cfun->static_chain_decl = src_cfun->static_chain_decl;
2442 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2443 cfun->function_end_locus = src_cfun->function_end_locus;
2444 cfun->curr_properties = src_cfun->curr_properties;
2445 cfun->last_verified = src_cfun->last_verified;
2446 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2447 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2448 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2449 cfun->stdarg = src_cfun->stdarg;
2450 cfun->after_inlining = src_cfun->after_inlining;
2451 cfun->can_throw_non_call_exceptions
2452 = src_cfun->can_throw_non_call_exceptions;
2453 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2454 cfun->returns_struct = src_cfun->returns_struct;
2455 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2456
2457 init_empty_tree_cfg ();
2458
2459 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2460 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2461 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2462 REG_BR_PROB_BASE);
2463 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2464 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2465 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2466 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2467 REG_BR_PROB_BASE);
2468 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2469 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2470 if (src_cfun->eh)
2471 init_eh_for_function ();
2472
2473 if (src_cfun->gimple_df)
2474 {
2475 init_tree_ssa (cfun);
2476 cfun->gimple_df->in_ssa_p = true;
2477 init_ssa_operands (cfun);
2478 }
2479 }
2480
2481 /* Helper function for copy_cfg_body. Move debug stmts from the end
2482 of NEW_BB to the beginning of successor basic blocks when needed. If the
2483 successor has multiple predecessors, reset them, otherwise keep
2484 their value. */
2485
2486 static void
2487 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2488 {
2489 edge e;
2490 edge_iterator ei;
2491 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2492
2493 if (gsi_end_p (si)
2494 || gsi_one_before_end_p (si)
2495 || !(stmt_can_throw_internal (gsi_stmt (si))
2496 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2497 return;
2498
2499 FOR_EACH_EDGE (e, ei, new_bb->succs)
2500 {
2501 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2502 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2503 while (is_gimple_debug (gsi_stmt (ssi)))
2504 {
2505 gimple stmt = gsi_stmt (ssi);
2506 gdebug *new_stmt;
2507 tree var;
2508 tree value;
2509
2510 /* For the last edge move the debug stmts instead of copying
2511 them. */
2512 if (ei_one_before_end_p (ei))
2513 {
2514 si = ssi;
2515 gsi_prev (&ssi);
2516 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2517 gimple_debug_bind_reset_value (stmt);
2518 gsi_remove (&si, false);
2519 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2520 continue;
2521 }
2522
2523 if (gimple_debug_bind_p (stmt))
2524 {
2525 var = gimple_debug_bind_get_var (stmt);
2526 if (single_pred_p (e->dest))
2527 {
2528 value = gimple_debug_bind_get_value (stmt);
2529 value = unshare_expr (value);
2530 }
2531 else
2532 value = NULL_TREE;
2533 new_stmt = gimple_build_debug_bind (var, value, stmt);
2534 }
2535 else if (gimple_debug_source_bind_p (stmt))
2536 {
2537 var = gimple_debug_source_bind_get_var (stmt);
2538 value = gimple_debug_source_bind_get_value (stmt);
2539 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2540 }
2541 else
2542 gcc_unreachable ();
2543 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2544 id->debug_stmts.safe_push (new_stmt);
2545 gsi_prev (&ssi);
2546 }
2547 }
2548 }
2549
2550 /* Make a copy of the sub-loops of SRC_PARENT and place them
2551 as siblings of DEST_PARENT. */
2552
2553 static void
2554 copy_loops (copy_body_data *id,
2555 struct loop *dest_parent, struct loop *src_parent)
2556 {
2557 struct loop *src_loop = src_parent->inner;
2558 while (src_loop)
2559 {
2560 if (!id->blocks_to_copy
2561 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2562 {
2563 struct loop *dest_loop = alloc_loop ();
2564
2565 /* Assign the new loop its header and latch and associate
2566 those with the new loop. */
2567 dest_loop->header = (basic_block)src_loop->header->aux;
2568 dest_loop->header->loop_father = dest_loop;
2569 if (src_loop->latch != NULL)
2570 {
2571 dest_loop->latch = (basic_block)src_loop->latch->aux;
2572 dest_loop->latch->loop_father = dest_loop;
2573 }
2574
2575 /* Copy loop meta-data. */
2576 copy_loop_info (src_loop, dest_loop);
2577
2578 /* Finally place it into the loop array and the loop tree. */
2579 place_new_loop (cfun, dest_loop);
2580 flow_loop_tree_node_add (dest_parent, dest_loop);
2581
2582 dest_loop->safelen = src_loop->safelen;
2583 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2584 if (src_loop->force_vectorize)
2585 {
2586 dest_loop->force_vectorize = true;
2587 cfun->has_force_vectorize_loops = true;
2588 }
2589 if (src_loop->simduid)
2590 {
2591 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2592 cfun->has_simduid_loops = true;
2593 }
2594
2595 /* Recurse. */
2596 copy_loops (id, dest_loop, src_loop);
2597 }
2598 src_loop = src_loop->next;
2599 }
2600 }
2601
2602 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2603
2604 void
2605 redirect_all_calls (copy_body_data * id, basic_block bb)
2606 {
2607 gimple_stmt_iterator si;
2608 gimple last = last_stmt (bb);
2609 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2610 {
2611 gimple stmt = gsi_stmt (si);
2612 if (is_gimple_call (stmt))
2613 {
2614 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2615 if (edge)
2616 {
2617 edge->redirect_call_stmt_to_callee ();
2618 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2619 gimple_purge_dead_eh_edges (bb);
2620 }
2621 }
2622 }
2623 }
2624
2625 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2626 with each bb's frequency. Used when NODE has a 0-weight entry
2627 but we are about to inline it into a non-zero count call bb.
2628 See the comments for handle_missing_profiles() in predict.c for
2629 when this can happen for COMDATs. */
2630
2631 void
2632 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2633 {
2634 basic_block bb;
2635 edge_iterator ei;
2636 edge e;
2637 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2638
2639 FOR_ALL_BB_FN(bb, fn)
2640 {
2641 bb->count = apply_scale (count,
2642 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2643 FOR_EACH_EDGE (e, ei, bb->succs)
2644 e->count = apply_probability (e->src->count, e->probability);
2645 }
2646 }
2647
2648 /* Make a copy of the body of FN so that it can be inserted inline in
2649 another function. Walks FN via CFG, returns new fndecl. */
2650
2651 static tree
2652 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2653 basic_block entry_block_map, basic_block exit_block_map,
2654 basic_block new_entry)
2655 {
2656 tree callee_fndecl = id->src_fn;
2657 /* Original cfun for the callee, doesn't change. */
2658 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2659 struct function *cfun_to_copy;
2660 basic_block bb;
2661 tree new_fndecl = NULL;
2662 bool need_debug_cleanup = false;
2663 gcov_type count_scale;
2664 int last;
2665 int incoming_frequency = 0;
2666 gcov_type incoming_count = 0;
2667
2668 /* This can happen for COMDAT routines that end up with 0 counts
2669 despite being called (see the comments for handle_missing_profiles()
2670 in predict.c as to why). Apply counts to the blocks in the callee
2671 before inlining, using the guessed edge frequencies, so that we don't
2672 end up with a 0-count inline body which can confuse downstream
2673 optimizations such as function splitting. */
2674 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2675 {
2676 /* Apply the larger of the call bb count and the total incoming
2677 call edge count to the callee. */
2678 gcov_type in_count = 0;
2679 struct cgraph_edge *in_edge;
2680 for (in_edge = id->src_node->callers; in_edge;
2681 in_edge = in_edge->next_caller)
2682 in_count += in_edge->count;
2683 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2684 }
2685
2686 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2687 count_scale
2688 = GCOV_COMPUTE_SCALE (count,
2689 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2690 else
2691 count_scale = REG_BR_PROB_BASE;
2692
2693 /* Register specific tree functions. */
2694 gimple_register_cfg_hooks ();
2695
2696 /* If we are inlining just region of the function, make sure to connect
2697 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2698 part of loop, we must compute frequency and probability of
2699 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2700 probabilities of edges incoming from nonduplicated region. */
2701 if (new_entry)
2702 {
2703 edge e;
2704 edge_iterator ei;
2705
2706 FOR_EACH_EDGE (e, ei, new_entry->preds)
2707 if (!e->src->aux)
2708 {
2709 incoming_frequency += EDGE_FREQUENCY (e);
2710 incoming_count += e->count;
2711 }
2712 incoming_count = apply_scale (incoming_count, count_scale);
2713 incoming_frequency
2714 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2715 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2716 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2717 }
2718
2719 /* Must have a CFG here at this point. */
2720 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2721 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2722
2723 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2724
2725 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2726 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2727 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2728 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2729
2730 /* Duplicate any exception-handling regions. */
2731 if (cfun->eh)
2732 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2733 remap_decl_1, id);
2734
2735 /* Use aux pointers to map the original blocks to copy. */
2736 FOR_EACH_BB_FN (bb, cfun_to_copy)
2737 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2738 {
2739 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2740 bb->aux = new_bb;
2741 new_bb->aux = bb;
2742 new_bb->loop_father = entry_block_map->loop_father;
2743 }
2744
2745 last = last_basic_block_for_fn (cfun);
2746
2747 /* Now that we've duplicated the blocks, duplicate their edges. */
2748 basic_block abnormal_goto_dest = NULL;
2749 if (id->call_stmt
2750 && stmt_can_make_abnormal_goto (id->call_stmt))
2751 {
2752 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2753
2754 bb = gimple_bb (id->call_stmt);
2755 gsi_next (&gsi);
2756 if (gsi_end_p (gsi))
2757 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2758 }
2759 FOR_ALL_BB_FN (bb, cfun_to_copy)
2760 if (!id->blocks_to_copy
2761 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2762 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2763 abnormal_goto_dest);
2764
2765 if (new_entry)
2766 {
2767 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2768 e->probability = REG_BR_PROB_BASE;
2769 e->count = incoming_count;
2770 }
2771
2772 /* Duplicate the loop tree, if available and wanted. */
2773 if (loops_for_fn (src_cfun) != NULL
2774 && current_loops != NULL)
2775 {
2776 copy_loops (id, entry_block_map->loop_father,
2777 get_loop (src_cfun, 0));
2778 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2779 loops_state_set (LOOPS_NEED_FIXUP);
2780 }
2781
2782 /* If the loop tree in the source function needed fixup, mark the
2783 destination loop tree for fixup, too. */
2784 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2785 loops_state_set (LOOPS_NEED_FIXUP);
2786
2787 if (gimple_in_ssa_p (cfun))
2788 FOR_ALL_BB_FN (bb, cfun_to_copy)
2789 if (!id->blocks_to_copy
2790 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2791 copy_phis_for_bb (bb, id);
2792
2793 FOR_ALL_BB_FN (bb, cfun_to_copy)
2794 if (bb->aux)
2795 {
2796 if (need_debug_cleanup
2797 && bb->index != ENTRY_BLOCK
2798 && bb->index != EXIT_BLOCK)
2799 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2800 /* Update call edge destinations. This can not be done before loop
2801 info is updated, because we may split basic blocks. */
2802 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2803 && bb->index != ENTRY_BLOCK
2804 && bb->index != EXIT_BLOCK)
2805 redirect_all_calls (id, (basic_block)bb->aux);
2806 ((basic_block)bb->aux)->aux = NULL;
2807 bb->aux = NULL;
2808 }
2809
2810 /* Zero out AUX fields of newly created block during EH edge
2811 insertion. */
2812 for (; last < last_basic_block_for_fn (cfun); last++)
2813 {
2814 if (need_debug_cleanup)
2815 maybe_move_debug_stmts_to_successors (id,
2816 BASIC_BLOCK_FOR_FN (cfun, last));
2817 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2818 /* Update call edge destinations. This can not be done before loop
2819 info is updated, because we may split basic blocks. */
2820 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2821 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2822 }
2823 entry_block_map->aux = NULL;
2824 exit_block_map->aux = NULL;
2825
2826 if (id->eh_map)
2827 {
2828 delete id->eh_map;
2829 id->eh_map = NULL;
2830 }
2831 if (id->dependence_map)
2832 {
2833 delete id->dependence_map;
2834 id->dependence_map = NULL;
2835 }
2836
2837 return new_fndecl;
2838 }
2839
2840 /* Copy the debug STMT using ID. We deal with these statements in a
2841 special way: if any variable in their VALUE expression wasn't
2842 remapped yet, we won't remap it, because that would get decl uids
2843 out of sync, causing codegen differences between -g and -g0. If
2844 this arises, we drop the VALUE expression altogether. */
2845
2846 static void
2847 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2848 {
2849 tree t, *n;
2850 struct walk_stmt_info wi;
2851
2852 if (gimple_block (stmt))
2853 {
2854 n = id->decl_map->get (gimple_block (stmt));
2855 gimple_set_block (stmt, n ? *n : id->block);
2856 }
2857
2858 /* Remap all the operands in COPY. */
2859 memset (&wi, 0, sizeof (wi));
2860 wi.info = id;
2861
2862 processing_debug_stmt = 1;
2863
2864 if (gimple_debug_source_bind_p (stmt))
2865 t = gimple_debug_source_bind_get_var (stmt);
2866 else
2867 t = gimple_debug_bind_get_var (stmt);
2868
2869 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2870 && (n = id->debug_map->get (t)))
2871 {
2872 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2873 t = *n;
2874 }
2875 else if (TREE_CODE (t) == VAR_DECL
2876 && !is_global_var (t)
2877 && !id->decl_map->get (t))
2878 /* T is a non-localized variable. */;
2879 else
2880 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2881
2882 if (gimple_debug_bind_p (stmt))
2883 {
2884 gimple_debug_bind_set_var (stmt, t);
2885
2886 if (gimple_debug_bind_has_value_p (stmt))
2887 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2888 remap_gimple_op_r, &wi, NULL);
2889
2890 /* Punt if any decl couldn't be remapped. */
2891 if (processing_debug_stmt < 0)
2892 gimple_debug_bind_reset_value (stmt);
2893 }
2894 else if (gimple_debug_source_bind_p (stmt))
2895 {
2896 gimple_debug_source_bind_set_var (stmt, t);
2897 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2898 remap_gimple_op_r, &wi, NULL);
2899 /* When inlining and source bind refers to one of the optimized
2900 away parameters, change the source bind into normal debug bind
2901 referring to the corresponding DEBUG_EXPR_DECL that should have
2902 been bound before the call stmt. */
2903 t = gimple_debug_source_bind_get_value (stmt);
2904 if (t != NULL_TREE
2905 && TREE_CODE (t) == PARM_DECL
2906 && id->call_stmt)
2907 {
2908 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2909 unsigned int i;
2910 if (debug_args != NULL)
2911 {
2912 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2913 if ((**debug_args)[i] == DECL_ORIGIN (t)
2914 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2915 {
2916 t = (**debug_args)[i + 1];
2917 stmt->subcode = GIMPLE_DEBUG_BIND;
2918 gimple_debug_bind_set_value (stmt, t);
2919 break;
2920 }
2921 }
2922 }
2923 }
2924
2925 processing_debug_stmt = 0;
2926
2927 update_stmt (stmt);
2928 }
2929
2930 /* Process deferred debug stmts. In order to give values better odds
2931 of being successfully remapped, we delay the processing of debug
2932 stmts until all other stmts that might require remapping are
2933 processed. */
2934
2935 static void
2936 copy_debug_stmts (copy_body_data *id)
2937 {
2938 size_t i;
2939 gdebug *stmt;
2940
2941 if (!id->debug_stmts.exists ())
2942 return;
2943
2944 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2945 copy_debug_stmt (stmt, id);
2946
2947 id->debug_stmts.release ();
2948 }
2949
2950 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2951 another function. */
2952
2953 static tree
2954 copy_tree_body (copy_body_data *id)
2955 {
2956 tree fndecl = id->src_fn;
2957 tree body = DECL_SAVED_TREE (fndecl);
2958
2959 walk_tree (&body, copy_tree_body_r, id, NULL);
2960
2961 return body;
2962 }
2963
2964 /* Make a copy of the body of FN so that it can be inserted inline in
2965 another function. */
2966
2967 static tree
2968 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2969 basic_block entry_block_map, basic_block exit_block_map,
2970 basic_block new_entry)
2971 {
2972 tree fndecl = id->src_fn;
2973 tree body;
2974
2975 /* If this body has a CFG, walk CFG and copy. */
2976 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2977 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2978 new_entry);
2979 copy_debug_stmts (id);
2980
2981 return body;
2982 }
2983
2984 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2985 defined in function FN, or of a data member thereof. */
2986
2987 static bool
2988 self_inlining_addr_expr (tree value, tree fn)
2989 {
2990 tree var;
2991
2992 if (TREE_CODE (value) != ADDR_EXPR)
2993 return false;
2994
2995 var = get_base_address (TREE_OPERAND (value, 0));
2996
2997 return var && auto_var_in_fn_p (var, fn);
2998 }
2999
3000 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3001 lexical block and line number information from base_stmt, if given,
3002 or from the last stmt of the block otherwise. */
3003
3004 static gimple
3005 insert_init_debug_bind (copy_body_data *id,
3006 basic_block bb, tree var, tree value,
3007 gimple base_stmt)
3008 {
3009 gimple note;
3010 gimple_stmt_iterator gsi;
3011 tree tracked_var;
3012
3013 if (!gimple_in_ssa_p (id->src_cfun))
3014 return NULL;
3015
3016 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3017 return NULL;
3018
3019 tracked_var = target_for_debug_bind (var);
3020 if (!tracked_var)
3021 return NULL;
3022
3023 if (bb)
3024 {
3025 gsi = gsi_last_bb (bb);
3026 if (!base_stmt && !gsi_end_p (gsi))
3027 base_stmt = gsi_stmt (gsi);
3028 }
3029
3030 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
3031
3032 if (bb)
3033 {
3034 if (!gsi_end_p (gsi))
3035 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3036 else
3037 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3038 }
3039
3040 return note;
3041 }
3042
3043 static void
3044 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
3045 {
3046 /* If VAR represents a zero-sized variable, it's possible that the
3047 assignment statement may result in no gimple statements. */
3048 if (init_stmt)
3049 {
3050 gimple_stmt_iterator si = gsi_last_bb (bb);
3051
3052 /* We can end up with init statements that store to a non-register
3053 from a rhs with a conversion. Handle that here by forcing the
3054 rhs into a temporary. gimple_regimplify_operands is not
3055 prepared to do this for us. */
3056 if (!is_gimple_debug (init_stmt)
3057 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3058 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3059 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3060 {
3061 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3062 gimple_expr_type (init_stmt),
3063 gimple_assign_rhs1 (init_stmt));
3064 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3065 GSI_NEW_STMT);
3066 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3067 gimple_assign_set_rhs1 (init_stmt, rhs);
3068 }
3069 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3070 gimple_regimplify_operands (init_stmt, &si);
3071
3072 if (!is_gimple_debug (init_stmt))
3073 {
3074 tree def = gimple_assign_lhs (init_stmt);
3075 insert_init_debug_bind (id, bb, def, def, init_stmt);
3076 }
3077 }
3078 }
3079
3080 /* Initialize parameter P with VALUE. If needed, produce init statement
3081 at the end of BB. When BB is NULL, we return init statement to be
3082 output later. */
3083 static gimple
3084 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3085 basic_block bb, tree *vars)
3086 {
3087 gimple init_stmt = NULL;
3088 tree var;
3089 tree rhs = value;
3090 tree def = (gimple_in_ssa_p (cfun)
3091 ? ssa_default_def (id->src_cfun, p) : NULL);
3092
3093 if (value
3094 && value != error_mark_node
3095 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3096 {
3097 /* If we can match up types by promotion/demotion do so. */
3098 if (fold_convertible_p (TREE_TYPE (p), value))
3099 rhs = fold_convert (TREE_TYPE (p), value);
3100 else
3101 {
3102 /* ??? For valid programs we should not end up here.
3103 Still if we end up with truly mismatched types here, fall back
3104 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3105 GIMPLE to the following passes. */
3106 if (!is_gimple_reg_type (TREE_TYPE (value))
3107 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3108 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3109 else
3110 rhs = build_zero_cst (TREE_TYPE (p));
3111 }
3112 }
3113
3114 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3115 here since the type of this decl must be visible to the calling
3116 function. */
3117 var = copy_decl_to_var (p, id);
3118
3119 /* Declare this new variable. */
3120 DECL_CHAIN (var) = *vars;
3121 *vars = var;
3122
3123 /* Make gimplifier happy about this variable. */
3124 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3125
3126 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3127 we would not need to create a new variable here at all, if it
3128 weren't for debug info. Still, we can just use the argument
3129 value. */
3130 if (TREE_READONLY (p)
3131 && !TREE_ADDRESSABLE (p)
3132 && value && !TREE_SIDE_EFFECTS (value)
3133 && !def)
3134 {
3135 /* We may produce non-gimple trees by adding NOPs or introduce
3136 invalid sharing when operand is not really constant.
3137 It is not big deal to prohibit constant propagation here as
3138 we will constant propagate in DOM1 pass anyway. */
3139 if (is_gimple_min_invariant (value)
3140 && useless_type_conversion_p (TREE_TYPE (p),
3141 TREE_TYPE (value))
3142 /* We have to be very careful about ADDR_EXPR. Make sure
3143 the base variable isn't a local variable of the inlined
3144 function, e.g., when doing recursive inlining, direct or
3145 mutually-recursive or whatever, which is why we don't
3146 just test whether fn == current_function_decl. */
3147 && ! self_inlining_addr_expr (value, fn))
3148 {
3149 insert_decl_map (id, p, value);
3150 insert_debug_decl_map (id, p, var);
3151 return insert_init_debug_bind (id, bb, var, value, NULL);
3152 }
3153 }
3154
3155 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3156 that way, when the PARM_DECL is encountered, it will be
3157 automatically replaced by the VAR_DECL. */
3158 insert_decl_map (id, p, var);
3159
3160 /* Even if P was TREE_READONLY, the new VAR should not be.
3161 In the original code, we would have constructed a
3162 temporary, and then the function body would have never
3163 changed the value of P. However, now, we will be
3164 constructing VAR directly. The constructor body may
3165 change its value multiple times as it is being
3166 constructed. Therefore, it must not be TREE_READONLY;
3167 the back-end assumes that TREE_READONLY variable is
3168 assigned to only once. */
3169 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3170 TREE_READONLY (var) = 0;
3171
3172 /* If there is no setup required and we are in SSA, take the easy route
3173 replacing all SSA names representing the function parameter by the
3174 SSA name passed to function.
3175
3176 We need to construct map for the variable anyway as it might be used
3177 in different SSA names when parameter is set in function.
3178
3179 Do replacement at -O0 for const arguments replaced by constant.
3180 This is important for builtin_constant_p and other construct requiring
3181 constant argument to be visible in inlined function body. */
3182 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3183 && (optimize
3184 || (TREE_READONLY (p)
3185 && is_gimple_min_invariant (rhs)))
3186 && (TREE_CODE (rhs) == SSA_NAME
3187 || is_gimple_min_invariant (rhs))
3188 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3189 {
3190 insert_decl_map (id, def, rhs);
3191 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3192 }
3193
3194 /* If the value of argument is never used, don't care about initializing
3195 it. */
3196 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3197 {
3198 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3199 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3200 }
3201
3202 /* Initialize this VAR_DECL from the equivalent argument. Convert
3203 the argument to the proper type in case it was promoted. */
3204 if (value)
3205 {
3206 if (rhs == error_mark_node)
3207 {
3208 insert_decl_map (id, p, var);
3209 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3210 }
3211
3212 STRIP_USELESS_TYPE_CONVERSION (rhs);
3213
3214 /* If we are in SSA form properly remap the default definition
3215 or assign to a dummy SSA name if the parameter is unused and
3216 we are not optimizing. */
3217 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3218 {
3219 if (def)
3220 {
3221 def = remap_ssa_name (def, id);
3222 init_stmt = gimple_build_assign (def, rhs);
3223 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3224 set_ssa_default_def (cfun, var, NULL);
3225 }
3226 else if (!optimize)
3227 {
3228 def = make_ssa_name (var);
3229 init_stmt = gimple_build_assign (def, rhs);
3230 }
3231 }
3232 else
3233 init_stmt = gimple_build_assign (var, rhs);
3234
3235 if (bb && init_stmt)
3236 insert_init_stmt (id, bb, init_stmt);
3237 }
3238 return init_stmt;
3239 }
3240
3241 /* Generate code to initialize the parameters of the function at the
3242 top of the stack in ID from the GIMPLE_CALL STMT. */
3243
3244 static void
3245 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3246 tree fn, basic_block bb)
3247 {
3248 tree parms;
3249 size_t i;
3250 tree p;
3251 tree vars = NULL_TREE;
3252 tree static_chain = gimple_call_chain (stmt);
3253
3254 /* Figure out what the parameters are. */
3255 parms = DECL_ARGUMENTS (fn);
3256
3257 /* Loop through the parameter declarations, replacing each with an
3258 equivalent VAR_DECL, appropriately initialized. */
3259 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3260 {
3261 tree val;
3262 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3263 setup_one_parameter (id, p, val, fn, bb, &vars);
3264 }
3265 /* After remapping parameters remap their types. This has to be done
3266 in a second loop over all parameters to appropriately remap
3267 variable sized arrays when the size is specified in a
3268 parameter following the array. */
3269 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3270 {
3271 tree *varp = id->decl_map->get (p);
3272 if (varp
3273 && TREE_CODE (*varp) == VAR_DECL)
3274 {
3275 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3276 ? ssa_default_def (id->src_cfun, p) : NULL);
3277 tree var = *varp;
3278 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3279 /* Also remap the default definition if it was remapped
3280 to the default definition of the parameter replacement
3281 by the parameter setup. */
3282 if (def)
3283 {
3284 tree *defp = id->decl_map->get (def);
3285 if (defp
3286 && TREE_CODE (*defp) == SSA_NAME
3287 && SSA_NAME_VAR (*defp) == var)
3288 TREE_TYPE (*defp) = TREE_TYPE (var);
3289 }
3290 }
3291 }
3292
3293 /* Initialize the static chain. */
3294 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3295 gcc_assert (fn != current_function_decl);
3296 if (p)
3297 {
3298 /* No static chain? Seems like a bug in tree-nested.c. */
3299 gcc_assert (static_chain);
3300
3301 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3302 }
3303
3304 declare_inline_vars (id->block, vars);
3305 }
3306
3307
3308 /* Declare a return variable to replace the RESULT_DECL for the
3309 function we are calling. An appropriate DECL_STMT is returned.
3310 The USE_STMT is filled to contain a use of the declaration to
3311 indicate the return value of the function.
3312
3313 RETURN_SLOT, if non-null is place where to store the result. It
3314 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3315 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3316
3317 RETURN_BOUNDS holds a destination for returned bounds.
3318
3319 The return value is a (possibly null) value that holds the result
3320 as seen by the caller. */
3321
3322 static tree
3323 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3324 tree return_bounds, basic_block entry_bb)
3325 {
3326 tree callee = id->src_fn;
3327 tree result = DECL_RESULT (callee);
3328 tree callee_type = TREE_TYPE (result);
3329 tree caller_type;
3330 tree var, use;
3331
3332 /* Handle type-mismatches in the function declaration return type
3333 vs. the call expression. */
3334 if (modify_dest)
3335 caller_type = TREE_TYPE (modify_dest);
3336 else
3337 caller_type = TREE_TYPE (TREE_TYPE (callee));
3338
3339 /* We don't need to do anything for functions that don't return anything. */
3340 if (VOID_TYPE_P (callee_type))
3341 return NULL_TREE;
3342
3343 /* If there was a return slot, then the return value is the
3344 dereferenced address of that object. */
3345 if (return_slot)
3346 {
3347 /* The front end shouldn't have used both return_slot and
3348 a modify expression. */
3349 gcc_assert (!modify_dest);
3350 if (DECL_BY_REFERENCE (result))
3351 {
3352 tree return_slot_addr = build_fold_addr_expr (return_slot);
3353 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3354
3355 /* We are going to construct *&return_slot and we can't do that
3356 for variables believed to be not addressable.
3357
3358 FIXME: This check possibly can match, because values returned
3359 via return slot optimization are not believed to have address
3360 taken by alias analysis. */
3361 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3362 var = return_slot_addr;
3363 }
3364 else
3365 {
3366 var = return_slot;
3367 gcc_assert (TREE_CODE (var) != SSA_NAME);
3368 if (TREE_ADDRESSABLE (result))
3369 mark_addressable (var);
3370 }
3371 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3372 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3373 && !DECL_GIMPLE_REG_P (result)
3374 && DECL_P (var))
3375 DECL_GIMPLE_REG_P (var) = 0;
3376 use = NULL;
3377 goto done;
3378 }
3379
3380 /* All types requiring non-trivial constructors should have been handled. */
3381 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3382
3383 /* Attempt to avoid creating a new temporary variable. */
3384 if (modify_dest
3385 && TREE_CODE (modify_dest) != SSA_NAME)
3386 {
3387 bool use_it = false;
3388
3389 /* We can't use MODIFY_DEST if there's type promotion involved. */
3390 if (!useless_type_conversion_p (callee_type, caller_type))
3391 use_it = false;
3392
3393 /* ??? If we're assigning to a variable sized type, then we must
3394 reuse the destination variable, because we've no good way to
3395 create variable sized temporaries at this point. */
3396 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3397 use_it = true;
3398
3399 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3400 reuse it as the result of the call directly. Don't do this if
3401 it would promote MODIFY_DEST to addressable. */
3402 else if (TREE_ADDRESSABLE (result))
3403 use_it = false;
3404 else
3405 {
3406 tree base_m = get_base_address (modify_dest);
3407
3408 /* If the base isn't a decl, then it's a pointer, and we don't
3409 know where that's going to go. */
3410 if (!DECL_P (base_m))
3411 use_it = false;
3412 else if (is_global_var (base_m))
3413 use_it = false;
3414 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3415 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3416 && !DECL_GIMPLE_REG_P (result)
3417 && DECL_GIMPLE_REG_P (base_m))
3418 use_it = false;
3419 else if (!TREE_ADDRESSABLE (base_m))
3420 use_it = true;
3421 }
3422
3423 if (use_it)
3424 {
3425 var = modify_dest;
3426 use = NULL;
3427 goto done;
3428 }
3429 }
3430
3431 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3432
3433 var = copy_result_decl_to_var (result, id);
3434 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3435
3436 /* Do not have the rest of GCC warn about this variable as it should
3437 not be visible to the user. */
3438 TREE_NO_WARNING (var) = 1;
3439
3440 declare_inline_vars (id->block, var);
3441
3442 /* Build the use expr. If the return type of the function was
3443 promoted, convert it back to the expected type. */
3444 use = var;
3445 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3446 {
3447 /* If we can match up types by promotion/demotion do so. */
3448 if (fold_convertible_p (caller_type, var))
3449 use = fold_convert (caller_type, var);
3450 else
3451 {
3452 /* ??? For valid programs we should not end up here.
3453 Still if we end up with truly mismatched types here, fall back
3454 to using a MEM_REF to not leak invalid GIMPLE to the following
3455 passes. */
3456 /* Prevent var from being written into SSA form. */
3457 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3458 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3459 DECL_GIMPLE_REG_P (var) = false;
3460 else if (is_gimple_reg_type (TREE_TYPE (var)))
3461 TREE_ADDRESSABLE (var) = true;
3462 use = fold_build2 (MEM_REF, caller_type,
3463 build_fold_addr_expr (var),
3464 build_int_cst (ptr_type_node, 0));
3465 }
3466 }
3467
3468 STRIP_USELESS_TYPE_CONVERSION (use);
3469
3470 if (DECL_BY_REFERENCE (result))
3471 {
3472 TREE_ADDRESSABLE (var) = 1;
3473 var = build_fold_addr_expr (var);
3474 }
3475
3476 done:
3477 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3478 way, when the RESULT_DECL is encountered, it will be
3479 automatically replaced by the VAR_DECL.
3480
3481 When returning by reference, ensure that RESULT_DECL remaps to
3482 gimple_val. */
3483 if (DECL_BY_REFERENCE (result)
3484 && !is_gimple_val (var))
3485 {
3486 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3487 insert_decl_map (id, result, temp);
3488 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3489 it's default_def SSA_NAME. */
3490 if (gimple_in_ssa_p (id->src_cfun)
3491 && is_gimple_reg (result))
3492 {
3493 temp = make_ssa_name (temp);
3494 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3495 }
3496 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3497 }
3498 else
3499 insert_decl_map (id, result, var);
3500
3501 /* Remember this so we can ignore it in remap_decls. */
3502 id->retvar = var;
3503
3504 /* If returned bounds are used, then make var for them. */
3505 if (return_bounds)
3506 {
3507 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3508 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3509 TREE_NO_WARNING (bndtemp) = 1;
3510 declare_inline_vars (id->block, bndtemp);
3511
3512 id->retbnd = bndtemp;
3513 insert_init_stmt (id, entry_bb,
3514 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3515 }
3516
3517 return use;
3518 }
3519
3520 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3521 to a local label. */
3522
3523 static tree
3524 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3525 {
3526 tree node = *nodep;
3527 tree fn = (tree) fnp;
3528
3529 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3530 return node;
3531
3532 if (TYPE_P (node))
3533 *walk_subtrees = 0;
3534
3535 return NULL_TREE;
3536 }
3537
3538 /* Determine if the function can be copied. If so return NULL. If
3539 not return a string describng the reason for failure. */
3540
3541 const char *
3542 copy_forbidden (struct function *fun, tree fndecl)
3543 {
3544 const char *reason = fun->cannot_be_copied_reason;
3545 tree decl;
3546 unsigned ix;
3547
3548 /* Only examine the function once. */
3549 if (fun->cannot_be_copied_set)
3550 return reason;
3551
3552 /* We cannot copy a function that receives a non-local goto
3553 because we cannot remap the destination label used in the
3554 function that is performing the non-local goto. */
3555 /* ??? Actually, this should be possible, if we work at it.
3556 No doubt there's just a handful of places that simply
3557 assume it doesn't happen and don't substitute properly. */
3558 if (fun->has_nonlocal_label)
3559 {
3560 reason = G_("function %q+F can never be copied "
3561 "because it receives a non-local goto");
3562 goto fail;
3563 }
3564
3565 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3566 if (TREE_CODE (decl) == VAR_DECL
3567 && TREE_STATIC (decl)
3568 && !DECL_EXTERNAL (decl)
3569 && DECL_INITIAL (decl)
3570 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3571 has_label_address_in_static_1,
3572 fndecl))
3573 {
3574 reason = G_("function %q+F can never be copied because it saves "
3575 "address of local label in a static variable");
3576 goto fail;
3577 }
3578
3579 fail:
3580 fun->cannot_be_copied_reason = reason;
3581 fun->cannot_be_copied_set = true;
3582 return reason;
3583 }
3584
3585
3586 static const char *inline_forbidden_reason;
3587
3588 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3589 iff a function can not be inlined. Also sets the reason why. */
3590
3591 static tree
3592 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3593 struct walk_stmt_info *wip)
3594 {
3595 tree fn = (tree) wip->info;
3596 tree t;
3597 gimple stmt = gsi_stmt (*gsi);
3598
3599 switch (gimple_code (stmt))
3600 {
3601 case GIMPLE_CALL:
3602 /* Refuse to inline alloca call unless user explicitly forced so as
3603 this may change program's memory overhead drastically when the
3604 function using alloca is called in loop. In GCC present in
3605 SPEC2000 inlining into schedule_block cause it to require 2GB of
3606 RAM instead of 256MB. Don't do so for alloca calls emitted for
3607 VLA objects as those can't cause unbounded growth (they're always
3608 wrapped inside stack_save/stack_restore regions. */
3609 if (gimple_alloca_call_p (stmt)
3610 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3611 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3612 {
3613 inline_forbidden_reason
3614 = G_("function %q+F can never be inlined because it uses "
3615 "alloca (override using the always_inline attribute)");
3616 *handled_ops_p = true;
3617 return fn;
3618 }
3619
3620 t = gimple_call_fndecl (stmt);
3621 if (t == NULL_TREE)
3622 break;
3623
3624 /* We cannot inline functions that call setjmp. */
3625 if (setjmp_call_p (t))
3626 {
3627 inline_forbidden_reason
3628 = G_("function %q+F can never be inlined because it uses setjmp");
3629 *handled_ops_p = true;
3630 return t;
3631 }
3632
3633 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3634 switch (DECL_FUNCTION_CODE (t))
3635 {
3636 /* We cannot inline functions that take a variable number of
3637 arguments. */
3638 case BUILT_IN_VA_START:
3639 case BUILT_IN_NEXT_ARG:
3640 case BUILT_IN_VA_END:
3641 inline_forbidden_reason
3642 = G_("function %q+F can never be inlined because it "
3643 "uses variable argument lists");
3644 *handled_ops_p = true;
3645 return t;
3646
3647 case BUILT_IN_LONGJMP:
3648 /* We can't inline functions that call __builtin_longjmp at
3649 all. The non-local goto machinery really requires the
3650 destination be in a different function. If we allow the
3651 function calling __builtin_longjmp to be inlined into the
3652 function calling __builtin_setjmp, Things will Go Awry. */
3653 inline_forbidden_reason
3654 = G_("function %q+F can never be inlined because "
3655 "it uses setjmp-longjmp exception handling");
3656 *handled_ops_p = true;
3657 return t;
3658
3659 case BUILT_IN_NONLOCAL_GOTO:
3660 /* Similarly. */
3661 inline_forbidden_reason
3662 = G_("function %q+F can never be inlined because "
3663 "it uses non-local goto");
3664 *handled_ops_p = true;
3665 return t;
3666
3667 case BUILT_IN_RETURN:
3668 case BUILT_IN_APPLY_ARGS:
3669 /* If a __builtin_apply_args caller would be inlined,
3670 it would be saving arguments of the function it has
3671 been inlined into. Similarly __builtin_return would
3672 return from the function the inline has been inlined into. */
3673 inline_forbidden_reason
3674 = G_("function %q+F can never be inlined because "
3675 "it uses __builtin_return or __builtin_apply_args");
3676 *handled_ops_p = true;
3677 return t;
3678
3679 default:
3680 break;
3681 }
3682 break;
3683
3684 case GIMPLE_GOTO:
3685 t = gimple_goto_dest (stmt);
3686
3687 /* We will not inline a function which uses computed goto. The
3688 addresses of its local labels, which may be tucked into
3689 global storage, are of course not constant across
3690 instantiations, which causes unexpected behavior. */
3691 if (TREE_CODE (t) != LABEL_DECL)
3692 {
3693 inline_forbidden_reason
3694 = G_("function %q+F can never be inlined "
3695 "because it contains a computed goto");
3696 *handled_ops_p = true;
3697 return t;
3698 }
3699 break;
3700
3701 default:
3702 break;
3703 }
3704
3705 *handled_ops_p = false;
3706 return NULL_TREE;
3707 }
3708
3709 /* Return true if FNDECL is a function that cannot be inlined into
3710 another one. */
3711
3712 static bool
3713 inline_forbidden_p (tree fndecl)
3714 {
3715 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3716 struct walk_stmt_info wi;
3717 basic_block bb;
3718 bool forbidden_p = false;
3719
3720 /* First check for shared reasons not to copy the code. */
3721 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3722 if (inline_forbidden_reason != NULL)
3723 return true;
3724
3725 /* Next, walk the statements of the function looking for
3726 constraucts we can't handle, or are non-optimal for inlining. */
3727 hash_set<tree> visited_nodes;
3728 memset (&wi, 0, sizeof (wi));
3729 wi.info = (void *) fndecl;
3730 wi.pset = &visited_nodes;
3731
3732 FOR_EACH_BB_FN (bb, fun)
3733 {
3734 gimple ret;
3735 gimple_seq seq = bb_seq (bb);
3736 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3737 forbidden_p = (ret != NULL);
3738 if (forbidden_p)
3739 break;
3740 }
3741
3742 return forbidden_p;
3743 }
3744 \f
3745 /* Return false if the function FNDECL cannot be inlined on account of its
3746 attributes, true otherwise. */
3747 static bool
3748 function_attribute_inlinable_p (const_tree fndecl)
3749 {
3750 if (targetm.attribute_table)
3751 {
3752 const_tree a;
3753
3754 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3755 {
3756 const_tree name = TREE_PURPOSE (a);
3757 int i;
3758
3759 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3760 if (is_attribute_p (targetm.attribute_table[i].name, name))
3761 return targetm.function_attribute_inlinable_p (fndecl);
3762 }
3763 }
3764
3765 return true;
3766 }
3767
3768 /* Returns nonzero if FN is a function that does not have any
3769 fundamental inline blocking properties. */
3770
3771 bool
3772 tree_inlinable_function_p (tree fn)
3773 {
3774 bool inlinable = true;
3775 bool do_warning;
3776 tree always_inline;
3777
3778 /* If we've already decided this function shouldn't be inlined,
3779 there's no need to check again. */
3780 if (DECL_UNINLINABLE (fn))
3781 return false;
3782
3783 /* We only warn for functions declared `inline' by the user. */
3784 do_warning = (warn_inline
3785 && DECL_DECLARED_INLINE_P (fn)
3786 && !DECL_NO_INLINE_WARNING_P (fn)
3787 && !DECL_IN_SYSTEM_HEADER (fn));
3788
3789 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3790
3791 if (flag_no_inline
3792 && always_inline == NULL)
3793 {
3794 if (do_warning)
3795 warning (OPT_Winline, "function %q+F can never be inlined because it "
3796 "is suppressed using -fno-inline", fn);
3797 inlinable = false;
3798 }
3799
3800 else if (!function_attribute_inlinable_p (fn))
3801 {
3802 if (do_warning)
3803 warning (OPT_Winline, "function %q+F can never be inlined because it "
3804 "uses attributes conflicting with inlining", fn);
3805 inlinable = false;
3806 }
3807
3808 else if (inline_forbidden_p (fn))
3809 {
3810 /* See if we should warn about uninlinable functions. Previously,
3811 some of these warnings would be issued while trying to expand
3812 the function inline, but that would cause multiple warnings
3813 about functions that would for example call alloca. But since
3814 this a property of the function, just one warning is enough.
3815 As a bonus we can now give more details about the reason why a
3816 function is not inlinable. */
3817 if (always_inline)
3818 error (inline_forbidden_reason, fn);
3819 else if (do_warning)
3820 warning (OPT_Winline, inline_forbidden_reason, fn);
3821
3822 inlinable = false;
3823 }
3824
3825 /* Squirrel away the result so that we don't have to check again. */
3826 DECL_UNINLINABLE (fn) = !inlinable;
3827
3828 return inlinable;
3829 }
3830
3831 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3832 word size and take possible memcpy call into account and return
3833 cost based on whether optimizing for size or speed according to SPEED_P. */
3834
3835 int
3836 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3837 {
3838 HOST_WIDE_INT size;
3839
3840 gcc_assert (!VOID_TYPE_P (type));
3841
3842 if (TREE_CODE (type) == VECTOR_TYPE)
3843 {
3844 machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3845 machine_mode simd
3846 = targetm.vectorize.preferred_simd_mode (inner);
3847 int simd_mode_size = GET_MODE_SIZE (simd);
3848 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3849 / simd_mode_size);
3850 }
3851
3852 size = int_size_in_bytes (type);
3853
3854 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3855 /* Cost of a memcpy call, 3 arguments and the call. */
3856 return 4;
3857 else
3858 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3859 }
3860
3861 /* Returns cost of operation CODE, according to WEIGHTS */
3862
3863 static int
3864 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3865 tree op1 ATTRIBUTE_UNUSED, tree op2)
3866 {
3867 switch (code)
3868 {
3869 /* These are "free" conversions, or their presumed cost
3870 is folded into other operations. */
3871 case RANGE_EXPR:
3872 CASE_CONVERT:
3873 case COMPLEX_EXPR:
3874 case PAREN_EXPR:
3875 case VIEW_CONVERT_EXPR:
3876 return 0;
3877
3878 /* Assign cost of 1 to usual operations.
3879 ??? We may consider mapping RTL costs to this. */
3880 case COND_EXPR:
3881 case VEC_COND_EXPR:
3882 case VEC_PERM_EXPR:
3883
3884 case PLUS_EXPR:
3885 case POINTER_PLUS_EXPR:
3886 case MINUS_EXPR:
3887 case MULT_EXPR:
3888 case MULT_HIGHPART_EXPR:
3889 case FMA_EXPR:
3890
3891 case ADDR_SPACE_CONVERT_EXPR:
3892 case FIXED_CONVERT_EXPR:
3893 case FIX_TRUNC_EXPR:
3894
3895 case NEGATE_EXPR:
3896 case FLOAT_EXPR:
3897 case MIN_EXPR:
3898 case MAX_EXPR:
3899 case ABS_EXPR:
3900
3901 case LSHIFT_EXPR:
3902 case RSHIFT_EXPR:
3903 case LROTATE_EXPR:
3904 case RROTATE_EXPR:
3905
3906 case BIT_IOR_EXPR:
3907 case BIT_XOR_EXPR:
3908 case BIT_AND_EXPR:
3909 case BIT_NOT_EXPR:
3910
3911 case TRUTH_ANDIF_EXPR:
3912 case TRUTH_ORIF_EXPR:
3913 case TRUTH_AND_EXPR:
3914 case TRUTH_OR_EXPR:
3915 case TRUTH_XOR_EXPR:
3916 case TRUTH_NOT_EXPR:
3917
3918 case LT_EXPR:
3919 case LE_EXPR:
3920 case GT_EXPR:
3921 case GE_EXPR:
3922 case EQ_EXPR:
3923 case NE_EXPR:
3924 case ORDERED_EXPR:
3925 case UNORDERED_EXPR:
3926
3927 case UNLT_EXPR:
3928 case UNLE_EXPR:
3929 case UNGT_EXPR:
3930 case UNGE_EXPR:
3931 case UNEQ_EXPR:
3932 case LTGT_EXPR:
3933
3934 case CONJ_EXPR:
3935
3936 case PREDECREMENT_EXPR:
3937 case PREINCREMENT_EXPR:
3938 case POSTDECREMENT_EXPR:
3939 case POSTINCREMENT_EXPR:
3940
3941 case REALIGN_LOAD_EXPR:
3942
3943 case REDUC_MAX_EXPR:
3944 case REDUC_MIN_EXPR:
3945 case REDUC_PLUS_EXPR:
3946 case WIDEN_SUM_EXPR:
3947 case WIDEN_MULT_EXPR:
3948 case DOT_PROD_EXPR:
3949 case SAD_EXPR:
3950 case WIDEN_MULT_PLUS_EXPR:
3951 case WIDEN_MULT_MINUS_EXPR:
3952 case WIDEN_LSHIFT_EXPR:
3953
3954 case VEC_WIDEN_MULT_HI_EXPR:
3955 case VEC_WIDEN_MULT_LO_EXPR:
3956 case VEC_WIDEN_MULT_EVEN_EXPR:
3957 case VEC_WIDEN_MULT_ODD_EXPR:
3958 case VEC_UNPACK_HI_EXPR:
3959 case VEC_UNPACK_LO_EXPR:
3960 case VEC_UNPACK_FLOAT_HI_EXPR:
3961 case VEC_UNPACK_FLOAT_LO_EXPR:
3962 case VEC_PACK_TRUNC_EXPR:
3963 case VEC_PACK_SAT_EXPR:
3964 case VEC_PACK_FIX_TRUNC_EXPR:
3965 case VEC_WIDEN_LSHIFT_HI_EXPR:
3966 case VEC_WIDEN_LSHIFT_LO_EXPR:
3967
3968 return 1;
3969
3970 /* Few special cases of expensive operations. This is useful
3971 to avoid inlining on functions having too many of these. */
3972 case TRUNC_DIV_EXPR:
3973 case CEIL_DIV_EXPR:
3974 case FLOOR_DIV_EXPR:
3975 case ROUND_DIV_EXPR:
3976 case EXACT_DIV_EXPR:
3977 case TRUNC_MOD_EXPR:
3978 case CEIL_MOD_EXPR:
3979 case FLOOR_MOD_EXPR:
3980 case ROUND_MOD_EXPR:
3981 case RDIV_EXPR:
3982 if (TREE_CODE (op2) != INTEGER_CST)
3983 return weights->div_mod_cost;
3984 return 1;
3985
3986 default:
3987 /* We expect a copy assignment with no operator. */
3988 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3989 return 0;
3990 }
3991 }
3992
3993
3994 /* Estimate number of instructions that will be created by expanding
3995 the statements in the statement sequence STMTS.
3996 WEIGHTS contains weights attributed to various constructs. */
3997
3998 static
3999 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4000 {
4001 int cost;
4002 gimple_stmt_iterator gsi;
4003
4004 cost = 0;
4005 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4006 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4007
4008 return cost;
4009 }
4010
4011
4012 /* Estimate number of instructions that will be created by expanding STMT.
4013 WEIGHTS contains weights attributed to various constructs. */
4014
4015 int
4016 estimate_num_insns (gimple stmt, eni_weights *weights)
4017 {
4018 unsigned cost, i;
4019 enum gimple_code code = gimple_code (stmt);
4020 tree lhs;
4021 tree rhs;
4022
4023 switch (code)
4024 {
4025 case GIMPLE_ASSIGN:
4026 /* Try to estimate the cost of assignments. We have three cases to
4027 deal with:
4028 1) Simple assignments to registers;
4029 2) Stores to things that must live in memory. This includes
4030 "normal" stores to scalars, but also assignments of large
4031 structures, or constructors of big arrays;
4032
4033 Let us look at the first two cases, assuming we have "a = b + C":
4034 <GIMPLE_ASSIGN <var_decl "a">
4035 <plus_expr <var_decl "b"> <constant C>>
4036 If "a" is a GIMPLE register, the assignment to it is free on almost
4037 any target, because "a" usually ends up in a real register. Hence
4038 the only cost of this expression comes from the PLUS_EXPR, and we
4039 can ignore the GIMPLE_ASSIGN.
4040 If "a" is not a GIMPLE register, the assignment to "a" will most
4041 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4042 of moving something into "a", which we compute using the function
4043 estimate_move_cost. */
4044 if (gimple_clobber_p (stmt))
4045 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4046
4047 lhs = gimple_assign_lhs (stmt);
4048 rhs = gimple_assign_rhs1 (stmt);
4049
4050 cost = 0;
4051
4052 /* Account for the cost of moving to / from memory. */
4053 if (gimple_store_p (stmt))
4054 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4055 if (gimple_assign_load_p (stmt))
4056 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4057
4058 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4059 gimple_assign_rhs1 (stmt),
4060 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4061 == GIMPLE_BINARY_RHS
4062 ? gimple_assign_rhs2 (stmt) : NULL);
4063 break;
4064
4065 case GIMPLE_COND:
4066 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4067 gimple_op (stmt, 0),
4068 gimple_op (stmt, 1));
4069 break;
4070
4071 case GIMPLE_SWITCH:
4072 {
4073 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4074 /* Take into account cost of the switch + guess 2 conditional jumps for
4075 each case label.
4076
4077 TODO: once the switch expansion logic is sufficiently separated, we can
4078 do better job on estimating cost of the switch. */
4079 if (weights->time_based)
4080 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4081 else
4082 cost = gimple_switch_num_labels (switch_stmt) * 2;
4083 }
4084 break;
4085
4086 case GIMPLE_CALL:
4087 {
4088 tree decl;
4089
4090 if (gimple_call_internal_p (stmt))
4091 return 0;
4092 else if ((decl = gimple_call_fndecl (stmt))
4093 && DECL_BUILT_IN (decl))
4094 {
4095 /* Do not special case builtins where we see the body.
4096 This just confuse inliner. */
4097 struct cgraph_node *node;
4098 if (!(node = cgraph_node::get (decl))
4099 || node->definition)
4100 ;
4101 /* For buitins that are likely expanded to nothing or
4102 inlined do not account operand costs. */
4103 else if (is_simple_builtin (decl))
4104 return 0;
4105 else if (is_inexpensive_builtin (decl))
4106 return weights->target_builtin_call_cost;
4107 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4108 {
4109 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4110 specialize the cheap expansion we do here.
4111 ??? This asks for a more general solution. */
4112 switch (DECL_FUNCTION_CODE (decl))
4113 {
4114 case BUILT_IN_POW:
4115 case BUILT_IN_POWF:
4116 case BUILT_IN_POWL:
4117 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4118 && REAL_VALUES_EQUAL
4119 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
4120 return estimate_operator_cost
4121 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4122 gimple_call_arg (stmt, 0));
4123 break;
4124
4125 default:
4126 break;
4127 }
4128 }
4129 }
4130
4131 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4132 if (gimple_call_lhs (stmt))
4133 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4134 weights->time_based);
4135 for (i = 0; i < gimple_call_num_args (stmt); i++)
4136 {
4137 tree arg = gimple_call_arg (stmt, i);
4138 cost += estimate_move_cost (TREE_TYPE (arg),
4139 weights->time_based);
4140 }
4141 break;
4142 }
4143
4144 case GIMPLE_RETURN:
4145 return weights->return_cost;
4146
4147 case GIMPLE_GOTO:
4148 case GIMPLE_LABEL:
4149 case GIMPLE_NOP:
4150 case GIMPLE_PHI:
4151 case GIMPLE_PREDICT:
4152 case GIMPLE_DEBUG:
4153 return 0;
4154
4155 case GIMPLE_ASM:
4156 {
4157 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4158 /* 1000 means infinity. This avoids overflows later
4159 with very long asm statements. */
4160 if (count > 1000)
4161 count = 1000;
4162 return count;
4163 }
4164
4165 case GIMPLE_RESX:
4166 /* This is either going to be an external function call with one
4167 argument, or two register copy statements plus a goto. */
4168 return 2;
4169
4170 case GIMPLE_EH_DISPATCH:
4171 /* ??? This is going to turn into a switch statement. Ideally
4172 we'd have a look at the eh region and estimate the number of
4173 edges involved. */
4174 return 10;
4175
4176 case GIMPLE_BIND:
4177 return estimate_num_insns_seq (
4178 gimple_bind_body (as_a <gbind *> (stmt)),
4179 weights);
4180
4181 case GIMPLE_EH_FILTER:
4182 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4183
4184 case GIMPLE_CATCH:
4185 return estimate_num_insns_seq (gimple_catch_handler (
4186 as_a <gcatch *> (stmt)),
4187 weights);
4188
4189 case GIMPLE_TRY:
4190 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4191 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4192
4193 /* OMP directives are generally very expensive. */
4194
4195 case GIMPLE_OMP_RETURN:
4196 case GIMPLE_OMP_SECTIONS_SWITCH:
4197 case GIMPLE_OMP_ATOMIC_STORE:
4198 case GIMPLE_OMP_CONTINUE:
4199 /* ...except these, which are cheap. */
4200 return 0;
4201
4202 case GIMPLE_OMP_ATOMIC_LOAD:
4203 return weights->omp_cost;
4204
4205 case GIMPLE_OMP_FOR:
4206 return (weights->omp_cost
4207 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4208 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4209
4210 case GIMPLE_OMP_PARALLEL:
4211 case GIMPLE_OMP_TASK:
4212 case GIMPLE_OMP_CRITICAL:
4213 case GIMPLE_OMP_MASTER:
4214 case GIMPLE_OMP_TASKGROUP:
4215 case GIMPLE_OMP_ORDERED:
4216 case GIMPLE_OMP_SECTION:
4217 case GIMPLE_OMP_SECTIONS:
4218 case GIMPLE_OMP_SINGLE:
4219 case GIMPLE_OMP_TARGET:
4220 case GIMPLE_OMP_TEAMS:
4221 return (weights->omp_cost
4222 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4223
4224 case GIMPLE_TRANSACTION:
4225 return (weights->tm_cost
4226 + estimate_num_insns_seq (gimple_transaction_body (
4227 as_a <gtransaction *> (stmt)),
4228 weights));
4229
4230 default:
4231 gcc_unreachable ();
4232 }
4233
4234 return cost;
4235 }
4236
4237 /* Estimate number of instructions that will be created by expanding
4238 function FNDECL. WEIGHTS contains weights attributed to various
4239 constructs. */
4240
4241 int
4242 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4243 {
4244 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4245 gimple_stmt_iterator bsi;
4246 basic_block bb;
4247 int n = 0;
4248
4249 gcc_assert (my_function && my_function->cfg);
4250 FOR_EACH_BB_FN (bb, my_function)
4251 {
4252 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4253 n += estimate_num_insns (gsi_stmt (bsi), weights);
4254 }
4255
4256 return n;
4257 }
4258
4259
4260 /* Initializes weights used by estimate_num_insns. */
4261
4262 void
4263 init_inline_once (void)
4264 {
4265 eni_size_weights.call_cost = 1;
4266 eni_size_weights.indirect_call_cost = 3;
4267 eni_size_weights.target_builtin_call_cost = 1;
4268 eni_size_weights.div_mod_cost = 1;
4269 eni_size_weights.omp_cost = 40;
4270 eni_size_weights.tm_cost = 10;
4271 eni_size_weights.time_based = false;
4272 eni_size_weights.return_cost = 1;
4273
4274 /* Estimating time for call is difficult, since we have no idea what the
4275 called function does. In the current uses of eni_time_weights,
4276 underestimating the cost does less harm than overestimating it, so
4277 we choose a rather small value here. */
4278 eni_time_weights.call_cost = 10;
4279 eni_time_weights.indirect_call_cost = 15;
4280 eni_time_weights.target_builtin_call_cost = 1;
4281 eni_time_weights.div_mod_cost = 10;
4282 eni_time_weights.omp_cost = 40;
4283 eni_time_weights.tm_cost = 40;
4284 eni_time_weights.time_based = true;
4285 eni_time_weights.return_cost = 2;
4286 }
4287
4288 /* Estimate the number of instructions in a gimple_seq. */
4289
4290 int
4291 count_insns_seq (gimple_seq seq, eni_weights *weights)
4292 {
4293 gimple_stmt_iterator gsi;
4294 int n = 0;
4295 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4296 n += estimate_num_insns (gsi_stmt (gsi), weights);
4297
4298 return n;
4299 }
4300
4301
4302 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4303
4304 static void
4305 prepend_lexical_block (tree current_block, tree new_block)
4306 {
4307 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4308 BLOCK_SUBBLOCKS (current_block) = new_block;
4309 BLOCK_SUPERCONTEXT (new_block) = current_block;
4310 }
4311
4312 /* Add local variables from CALLEE to CALLER. */
4313
4314 static inline void
4315 add_local_variables (struct function *callee, struct function *caller,
4316 copy_body_data *id)
4317 {
4318 tree var;
4319 unsigned ix;
4320
4321 FOR_EACH_LOCAL_DECL (callee, ix, var)
4322 if (!can_be_nonlocal (var, id))
4323 {
4324 tree new_var = remap_decl (var, id);
4325
4326 /* Remap debug-expressions. */
4327 if (TREE_CODE (new_var) == VAR_DECL
4328 && DECL_HAS_DEBUG_EXPR_P (var)
4329 && new_var != var)
4330 {
4331 tree tem = DECL_DEBUG_EXPR (var);
4332 bool old_regimplify = id->regimplify;
4333 id->remapping_type_depth++;
4334 walk_tree (&tem, copy_tree_body_r, id, NULL);
4335 id->remapping_type_depth--;
4336 id->regimplify = old_regimplify;
4337 SET_DECL_DEBUG_EXPR (new_var, tem);
4338 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4339 }
4340 add_local_decl (caller, new_var);
4341 }
4342 }
4343
4344 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4345
4346 static bool
4347 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4348 {
4349 tree use_retvar;
4350 tree fn;
4351 hash_map<tree, tree> *dst;
4352 hash_map<tree, tree> *st = NULL;
4353 tree return_slot;
4354 tree modify_dest;
4355 tree return_bounds = NULL;
4356 location_t saved_location;
4357 struct cgraph_edge *cg_edge;
4358 cgraph_inline_failed_t reason;
4359 basic_block return_block;
4360 edge e;
4361 gimple_stmt_iterator gsi, stmt_gsi;
4362 bool successfully_inlined = FALSE;
4363 bool purge_dead_abnormal_edges;
4364 gcall *call_stmt;
4365 unsigned int i;
4366
4367 /* Set input_location here so we get the right instantiation context
4368 if we call instantiate_decl from inlinable_function_p. */
4369 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4370 saved_location = input_location;
4371 input_location = gimple_location (stmt);
4372
4373 /* From here on, we're only interested in CALL_EXPRs. */
4374 call_stmt = dyn_cast <gcall *> (stmt);
4375 if (!call_stmt)
4376 goto egress;
4377
4378 cg_edge = id->dst_node->get_edge (stmt);
4379 gcc_checking_assert (cg_edge);
4380 /* First, see if we can figure out what function is being called.
4381 If we cannot, then there is no hope of inlining the function. */
4382 if (cg_edge->indirect_unknown_callee)
4383 goto egress;
4384 fn = cg_edge->callee->decl;
4385 gcc_checking_assert (fn);
4386
4387 /* If FN is a declaration of a function in a nested scope that was
4388 globally declared inline, we don't set its DECL_INITIAL.
4389 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4390 C++ front-end uses it for cdtors to refer to their internal
4391 declarations, that are not real functions. Fortunately those
4392 don't have trees to be saved, so we can tell by checking their
4393 gimple_body. */
4394 if (!DECL_INITIAL (fn)
4395 && DECL_ABSTRACT_ORIGIN (fn)
4396 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4397 fn = DECL_ABSTRACT_ORIGIN (fn);
4398
4399 /* Don't try to inline functions that are not well-suited to inlining. */
4400 if (cg_edge->inline_failed)
4401 {
4402 reason = cg_edge->inline_failed;
4403 /* If this call was originally indirect, we do not want to emit any
4404 inlining related warnings or sorry messages because there are no
4405 guarantees regarding those. */
4406 if (cg_edge->indirect_inlining_edge)
4407 goto egress;
4408
4409 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4410 /* For extern inline functions that get redefined we always
4411 silently ignored always_inline flag. Better behaviour would
4412 be to be able to keep both bodies and use extern inline body
4413 for inlining, but we can't do that because frontends overwrite
4414 the body. */
4415 && !cg_edge->callee->local.redefined_extern_inline
4416 /* During early inline pass, report only when optimization is
4417 not turned on. */
4418 && (symtab->global_info_ready
4419 || !optimize
4420 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4421 /* PR 20090218-1_0.c. Body can be provided by another module. */
4422 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4423 {
4424 error ("inlining failed in call to always_inline %q+F: %s", fn,
4425 cgraph_inline_failed_string (reason));
4426 error ("called from here");
4427 }
4428 else if (warn_inline
4429 && DECL_DECLARED_INLINE_P (fn)
4430 && !DECL_NO_INLINE_WARNING_P (fn)
4431 && !DECL_IN_SYSTEM_HEADER (fn)
4432 && reason != CIF_UNSPECIFIED
4433 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4434 /* Do not warn about not inlined recursive calls. */
4435 && !cg_edge->recursive_p ()
4436 /* Avoid warnings during early inline pass. */
4437 && symtab->global_info_ready)
4438 {
4439 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4440 fn, _(cgraph_inline_failed_string (reason)));
4441 warning (OPT_Winline, "called from here");
4442 }
4443 goto egress;
4444 }
4445 fn = cg_edge->callee->decl;
4446 cg_edge->callee->get_untransformed_body ();
4447
4448 #ifdef ENABLE_CHECKING
4449 if (cg_edge->callee->decl != id->dst_node->decl)
4450 cg_edge->callee->verify ();
4451 #endif
4452
4453 /* We will be inlining this callee. */
4454 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4455 id->assign_stmts.create (0);
4456
4457 /* Update the callers EH personality. */
4458 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4459 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4460 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4461
4462 /* Split the block holding the GIMPLE_CALL. */
4463 e = split_block (bb, stmt);
4464 bb = e->src;
4465 return_block = e->dest;
4466 remove_edge (e);
4467
4468 /* split_block splits after the statement; work around this by
4469 moving the call into the second block manually. Not pretty,
4470 but seems easier than doing the CFG manipulation by hand
4471 when the GIMPLE_CALL is in the last statement of BB. */
4472 stmt_gsi = gsi_last_bb (bb);
4473 gsi_remove (&stmt_gsi, false);
4474
4475 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4476 been the source of abnormal edges. In this case, schedule
4477 the removal of dead abnormal edges. */
4478 gsi = gsi_start_bb (return_block);
4479 if (gsi_end_p (gsi))
4480 {
4481 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4482 purge_dead_abnormal_edges = true;
4483 }
4484 else
4485 {
4486 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4487 purge_dead_abnormal_edges = false;
4488 }
4489
4490 stmt_gsi = gsi_start_bb (return_block);
4491
4492 /* Build a block containing code to initialize the arguments, the
4493 actual inline expansion of the body, and a label for the return
4494 statements within the function to jump to. The type of the
4495 statement expression is the return type of the function call.
4496 ??? If the call does not have an associated block then we will
4497 remap all callee blocks to NULL, effectively dropping most of
4498 its debug information. This should only happen for calls to
4499 artificial decls inserted by the compiler itself. We need to
4500 either link the inlined blocks into the caller block tree or
4501 not refer to them in any way to not break GC for locations. */
4502 if (gimple_block (stmt))
4503 {
4504 id->block = make_node (BLOCK);
4505 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4506 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4507 prepend_lexical_block (gimple_block (stmt), id->block);
4508 }
4509
4510 /* Local declarations will be replaced by their equivalents in this
4511 map. */
4512 st = id->decl_map;
4513 id->decl_map = new hash_map<tree, tree>;
4514 dst = id->debug_map;
4515 id->debug_map = NULL;
4516
4517 /* Record the function we are about to inline. */
4518 id->src_fn = fn;
4519 id->src_node = cg_edge->callee;
4520 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4521 id->call_stmt = stmt;
4522
4523 /* If the the src function contains an IFN_VA_ARG, then so will the dst
4524 function after inlining. */
4525 if ((id->src_cfun->curr_properties & PROP_gimple_lva) == 0)
4526 {
4527 struct function *dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4528 dst_cfun->curr_properties &= ~PROP_gimple_lva;
4529 }
4530
4531 gcc_assert (!id->src_cfun->after_inlining);
4532
4533 id->entry_bb = bb;
4534 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4535 {
4536 gimple_stmt_iterator si = gsi_last_bb (bb);
4537 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4538 NOT_TAKEN),
4539 GSI_NEW_STMT);
4540 }
4541 initialize_inlined_parameters (id, stmt, fn, bb);
4542
4543 if (DECL_INITIAL (fn))
4544 {
4545 if (gimple_block (stmt))
4546 {
4547 tree *var;
4548
4549 prepend_lexical_block (id->block,
4550 remap_blocks (DECL_INITIAL (fn), id));
4551 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4552 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4553 == NULL_TREE));
4554 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4555 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4556 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4557 under it. The parameters can be then evaluated in the debugger,
4558 but don't show in backtraces. */
4559 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4560 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4561 {
4562 tree v = *var;
4563 *var = TREE_CHAIN (v);
4564 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4565 BLOCK_VARS (id->block) = v;
4566 }
4567 else
4568 var = &TREE_CHAIN (*var);
4569 }
4570 else
4571 remap_blocks_to_null (DECL_INITIAL (fn), id);
4572 }
4573
4574 /* Return statements in the function body will be replaced by jumps
4575 to the RET_LABEL. */
4576 gcc_assert (DECL_INITIAL (fn));
4577 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4578
4579 /* Find the LHS to which the result of this call is assigned. */
4580 return_slot = NULL;
4581 if (gimple_call_lhs (stmt))
4582 {
4583 modify_dest = gimple_call_lhs (stmt);
4584
4585 /* Remember where to copy returned bounds. */
4586 if (gimple_call_with_bounds_p (stmt)
4587 && TREE_CODE (modify_dest) == SSA_NAME)
4588 {
4589 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4590 if (retbnd)
4591 {
4592 return_bounds = gimple_call_lhs (retbnd);
4593 /* If returned bounds are not used then just
4594 remove unused call. */
4595 if (!return_bounds)
4596 {
4597 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4598 gsi_remove (&iter, true);
4599 }
4600 }
4601 }
4602
4603 /* The function which we are inlining might not return a value,
4604 in which case we should issue a warning that the function
4605 does not return a value. In that case the optimizers will
4606 see that the variable to which the value is assigned was not
4607 initialized. We do not want to issue a warning about that
4608 uninitialized variable. */
4609 if (DECL_P (modify_dest))
4610 TREE_NO_WARNING (modify_dest) = 1;
4611
4612 if (gimple_call_return_slot_opt_p (call_stmt))
4613 {
4614 return_slot = modify_dest;
4615 modify_dest = NULL;
4616 }
4617 }
4618 else
4619 modify_dest = NULL;
4620
4621 /* If we are inlining a call to the C++ operator new, we don't want
4622 to use type based alias analysis on the return value. Otherwise
4623 we may get confused if the compiler sees that the inlined new
4624 function returns a pointer which was just deleted. See bug
4625 33407. */
4626 if (DECL_IS_OPERATOR_NEW (fn))
4627 {
4628 return_slot = NULL;
4629 modify_dest = NULL;
4630 }
4631
4632 /* Declare the return variable for the function. */
4633 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4634 return_bounds, bb);
4635
4636 /* Add local vars in this inlined callee to caller. */
4637 add_local_variables (id->src_cfun, cfun, id);
4638
4639 if (dump_file && (dump_flags & TDF_DETAILS))
4640 {
4641 fprintf (dump_file, "Inlining ");
4642 print_generic_expr (dump_file, id->src_fn, 0);
4643 fprintf (dump_file, " to ");
4644 print_generic_expr (dump_file, id->dst_fn, 0);
4645 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4646 }
4647
4648 /* This is it. Duplicate the callee body. Assume callee is
4649 pre-gimplified. Note that we must not alter the caller
4650 function in any way before this point, as this CALL_EXPR may be
4651 a self-referential call; if we're calling ourselves, we need to
4652 duplicate our body before altering anything. */
4653 copy_body (id, cg_edge->callee->count,
4654 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4655 bb, return_block, NULL);
4656
4657 /* Reset the escaped solution. */
4658 if (cfun->gimple_df)
4659 pt_solution_reset (&cfun->gimple_df->escaped);
4660
4661 /* Clean up. */
4662 if (id->debug_map)
4663 {
4664 delete id->debug_map;
4665 id->debug_map = dst;
4666 }
4667 delete id->decl_map;
4668 id->decl_map = st;
4669
4670 /* Unlink the calls virtual operands before replacing it. */
4671 unlink_stmt_vdef (stmt);
4672 if (gimple_vdef (stmt)
4673 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4674 release_ssa_name (gimple_vdef (stmt));
4675
4676 /* If the inlined function returns a result that we care about,
4677 substitute the GIMPLE_CALL with an assignment of the return
4678 variable to the LHS of the call. That is, if STMT was
4679 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4680 if (use_retvar && gimple_call_lhs (stmt))
4681 {
4682 gimple old_stmt = stmt;
4683 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4684 gsi_replace (&stmt_gsi, stmt, false);
4685 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4686
4687 /* Copy bounds if we copy structure with bounds. */
4688 if (chkp_function_instrumented_p (id->dst_fn)
4689 && !BOUNDED_P (use_retvar)
4690 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4691 id->assign_stmts.safe_push (stmt);
4692 }
4693 else
4694 {
4695 /* Handle the case of inlining a function with no return
4696 statement, which causes the return value to become undefined. */
4697 if (gimple_call_lhs (stmt)
4698 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4699 {
4700 tree name = gimple_call_lhs (stmt);
4701 tree var = SSA_NAME_VAR (name);
4702 tree def = ssa_default_def (cfun, var);
4703
4704 if (def)
4705 {
4706 /* If the variable is used undefined, make this name
4707 undefined via a move. */
4708 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4709 gsi_replace (&stmt_gsi, stmt, true);
4710 }
4711 else
4712 {
4713 /* Otherwise make this variable undefined. */
4714 gsi_remove (&stmt_gsi, true);
4715 set_ssa_default_def (cfun, var, name);
4716 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4717 }
4718 }
4719 else
4720 gsi_remove (&stmt_gsi, true);
4721 }
4722
4723 /* Put returned bounds into the correct place if required. */
4724 if (return_bounds)
4725 {
4726 gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4727 gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4728 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4729 unlink_stmt_vdef (old_stmt);
4730 gsi_replace (&bnd_gsi, new_stmt, false);
4731 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4732 cgraph_update_edges_for_call_stmt (old_stmt,
4733 gimple_call_fndecl (old_stmt),
4734 new_stmt);
4735 }
4736
4737 if (purge_dead_abnormal_edges)
4738 {
4739 gimple_purge_dead_eh_edges (return_block);
4740 gimple_purge_dead_abnormal_call_edges (return_block);
4741 }
4742
4743 /* If the value of the new expression is ignored, that's OK. We
4744 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4745 the equivalent inlined version either. */
4746 if (is_gimple_assign (stmt))
4747 {
4748 gcc_assert (gimple_assign_single_p (stmt)
4749 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4750 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4751 }
4752
4753 /* Copy bounds for all generated assigns that need it. */
4754 for (i = 0; i < id->assign_stmts.length (); i++)
4755 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4756 id->assign_stmts.release ();
4757
4758 /* Output the inlining info for this abstract function, since it has been
4759 inlined. If we don't do this now, we can lose the information about the
4760 variables in the function when the blocks get blown away as soon as we
4761 remove the cgraph node. */
4762 if (gimple_block (stmt))
4763 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4764
4765 /* Update callgraph if needed. */
4766 cg_edge->callee->remove ();
4767
4768 id->block = NULL_TREE;
4769 successfully_inlined = TRUE;
4770
4771 egress:
4772 input_location = saved_location;
4773 return successfully_inlined;
4774 }
4775
4776 /* Expand call statements reachable from STMT_P.
4777 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4778 in a MODIFY_EXPR. */
4779
4780 static bool
4781 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4782 {
4783 gimple_stmt_iterator gsi;
4784 bool inlined = false;
4785
4786 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4787 {
4788 gimple stmt = gsi_stmt (gsi);
4789 gsi_prev (&gsi);
4790
4791 if (is_gimple_call (stmt)
4792 && !gimple_call_internal_p (stmt))
4793 inlined |= expand_call_inline (bb, stmt, id);
4794 }
4795
4796 return inlined;
4797 }
4798
4799
4800 /* Walk all basic blocks created after FIRST and try to fold every statement
4801 in the STATEMENTS pointer set. */
4802
4803 static void
4804 fold_marked_statements (int first, hash_set<gimple> *statements)
4805 {
4806 for (; first < n_basic_blocks_for_fn (cfun); first++)
4807 if (BASIC_BLOCK_FOR_FN (cfun, first))
4808 {
4809 gimple_stmt_iterator gsi;
4810
4811 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4812 !gsi_end_p (gsi);
4813 gsi_next (&gsi))
4814 if (statements->contains (gsi_stmt (gsi)))
4815 {
4816 gimple old_stmt = gsi_stmt (gsi);
4817 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4818
4819 if (old_decl && DECL_BUILT_IN (old_decl))
4820 {
4821 /* Folding builtins can create multiple instructions,
4822 we need to look at all of them. */
4823 gimple_stmt_iterator i2 = gsi;
4824 gsi_prev (&i2);
4825 if (fold_stmt (&gsi))
4826 {
4827 gimple new_stmt;
4828 /* If a builtin at the end of a bb folded into nothing,
4829 the following loop won't work. */
4830 if (gsi_end_p (gsi))
4831 {
4832 cgraph_update_edges_for_call_stmt (old_stmt,
4833 old_decl, NULL);
4834 break;
4835 }
4836 if (gsi_end_p (i2))
4837 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4838 else
4839 gsi_next (&i2);
4840 while (1)
4841 {
4842 new_stmt = gsi_stmt (i2);
4843 update_stmt (new_stmt);
4844 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4845 new_stmt);
4846
4847 if (new_stmt == gsi_stmt (gsi))
4848 {
4849 /* It is okay to check only for the very last
4850 of these statements. If it is a throwing
4851 statement nothing will change. If it isn't
4852 this can remove EH edges. If that weren't
4853 correct then because some intermediate stmts
4854 throw, but not the last one. That would mean
4855 we'd have to split the block, which we can't
4856 here and we'd loose anyway. And as builtins
4857 probably never throw, this all
4858 is mood anyway. */
4859 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4860 new_stmt))
4861 gimple_purge_dead_eh_edges (
4862 BASIC_BLOCK_FOR_FN (cfun, first));
4863 break;
4864 }
4865 gsi_next (&i2);
4866 }
4867 }
4868 }
4869 else if (fold_stmt (&gsi))
4870 {
4871 /* Re-read the statement from GSI as fold_stmt() may
4872 have changed it. */
4873 gimple new_stmt = gsi_stmt (gsi);
4874 update_stmt (new_stmt);
4875
4876 if (is_gimple_call (old_stmt)
4877 || is_gimple_call (new_stmt))
4878 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4879 new_stmt);
4880
4881 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4882 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4883 first));
4884 }
4885 }
4886 }
4887 }
4888
4889 /* Expand calls to inline functions in the body of FN. */
4890
4891 unsigned int
4892 optimize_inline_calls (tree fn)
4893 {
4894 copy_body_data id;
4895 basic_block bb;
4896 int last = n_basic_blocks_for_fn (cfun);
4897 bool inlined_p = false;
4898
4899 /* Clear out ID. */
4900 memset (&id, 0, sizeof (id));
4901
4902 id.src_node = id.dst_node = cgraph_node::get (fn);
4903 gcc_assert (id.dst_node->definition);
4904 id.dst_fn = fn;
4905 /* Or any functions that aren't finished yet. */
4906 if (current_function_decl)
4907 id.dst_fn = current_function_decl;
4908
4909 id.copy_decl = copy_decl_maybe_to_var;
4910 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4911 id.transform_new_cfg = false;
4912 id.transform_return_to_modify = true;
4913 id.transform_parameter = true;
4914 id.transform_lang_insert_block = NULL;
4915 id.statements_to_fold = new hash_set<gimple>;
4916
4917 push_gimplify_context ();
4918
4919 /* We make no attempts to keep dominance info up-to-date. */
4920 free_dominance_info (CDI_DOMINATORS);
4921 free_dominance_info (CDI_POST_DOMINATORS);
4922
4923 /* Register specific gimple functions. */
4924 gimple_register_cfg_hooks ();
4925
4926 /* Reach the trees by walking over the CFG, and note the
4927 enclosing basic-blocks in the call edges. */
4928 /* We walk the blocks going forward, because inlined function bodies
4929 will split id->current_basic_block, and the new blocks will
4930 follow it; we'll trudge through them, processing their CALL_EXPRs
4931 along the way. */
4932 FOR_EACH_BB_FN (bb, cfun)
4933 inlined_p |= gimple_expand_calls_inline (bb, &id);
4934
4935 pop_gimplify_context (NULL);
4936
4937 #ifdef ENABLE_CHECKING
4938 {
4939 struct cgraph_edge *e;
4940
4941 id.dst_node->verify ();
4942
4943 /* Double check that we inlined everything we are supposed to inline. */
4944 for (e = id.dst_node->callees; e; e = e->next_callee)
4945 gcc_assert (e->inline_failed);
4946 }
4947 #endif
4948
4949 /* Fold queued statements. */
4950 fold_marked_statements (last, id.statements_to_fold);
4951 delete id.statements_to_fold;
4952
4953 gcc_assert (!id.debug_stmts.exists ());
4954
4955 /* If we didn't inline into the function there is nothing to do. */
4956 if (!inlined_p)
4957 return 0;
4958
4959 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4960 number_blocks (fn);
4961
4962 delete_unreachable_blocks_update_callgraph (&id);
4963 #ifdef ENABLE_CHECKING
4964 id.dst_node->verify ();
4965 #endif
4966
4967 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4968 not possible yet - the IPA passes might make various functions to not
4969 throw and they don't care to proactively update local EH info. This is
4970 done later in fixup_cfg pass that also execute the verification. */
4971 return (TODO_update_ssa
4972 | TODO_cleanup_cfg
4973 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4974 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4975 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4976 ? TODO_rebuild_frequencies : 0));
4977 }
4978
4979 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4980
4981 tree
4982 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4983 {
4984 enum tree_code code = TREE_CODE (*tp);
4985 enum tree_code_class cl = TREE_CODE_CLASS (code);
4986
4987 /* We make copies of most nodes. */
4988 if (IS_EXPR_CODE_CLASS (cl)
4989 || code == TREE_LIST
4990 || code == TREE_VEC
4991 || code == TYPE_DECL
4992 || code == OMP_CLAUSE)
4993 {
4994 /* Because the chain gets clobbered when we make a copy, we save it
4995 here. */
4996 tree chain = NULL_TREE, new_tree;
4997
4998 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4999 chain = TREE_CHAIN (*tp);
5000
5001 /* Copy the node. */
5002 new_tree = copy_node (*tp);
5003
5004 *tp = new_tree;
5005
5006 /* Now, restore the chain, if appropriate. That will cause
5007 walk_tree to walk into the chain as well. */
5008 if (code == PARM_DECL
5009 || code == TREE_LIST
5010 || code == OMP_CLAUSE)
5011 TREE_CHAIN (*tp) = chain;
5012
5013 /* For now, we don't update BLOCKs when we make copies. So, we
5014 have to nullify all BIND_EXPRs. */
5015 if (TREE_CODE (*tp) == BIND_EXPR)
5016 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5017 }
5018 else if (code == CONSTRUCTOR)
5019 {
5020 /* CONSTRUCTOR nodes need special handling because
5021 we need to duplicate the vector of elements. */
5022 tree new_tree;
5023
5024 new_tree = copy_node (*tp);
5025 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5026 *tp = new_tree;
5027 }
5028 else if (code == STATEMENT_LIST)
5029 /* We used to just abort on STATEMENT_LIST, but we can run into them
5030 with statement-expressions (c++/40975). */
5031 copy_statement_list (tp);
5032 else if (TREE_CODE_CLASS (code) == tcc_type)
5033 *walk_subtrees = 0;
5034 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5035 *walk_subtrees = 0;
5036 else if (TREE_CODE_CLASS (code) == tcc_constant)
5037 *walk_subtrees = 0;
5038 return NULL_TREE;
5039 }
5040
5041 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5042 information indicating to what new SAVE_EXPR this one should be mapped,
5043 use that one. Otherwise, create a new node and enter it in ST. FN is
5044 the function into which the copy will be placed. */
5045
5046 static void
5047 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5048 {
5049 tree *n;
5050 tree t;
5051
5052 /* See if we already encountered this SAVE_EXPR. */
5053 n = st->get (*tp);
5054
5055 /* If we didn't already remap this SAVE_EXPR, do so now. */
5056 if (!n)
5057 {
5058 t = copy_node (*tp);
5059
5060 /* Remember this SAVE_EXPR. */
5061 st->put (*tp, t);
5062 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5063 st->put (t, t);
5064 }
5065 else
5066 {
5067 /* We've already walked into this SAVE_EXPR; don't do it again. */
5068 *walk_subtrees = 0;
5069 t = *n;
5070 }
5071
5072 /* Replace this SAVE_EXPR with the copy. */
5073 *tp = t;
5074 }
5075
5076 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5077 label, copies the declaration and enters it in the splay_tree in DATA (which
5078 is really a 'copy_body_data *'. */
5079
5080 static tree
5081 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5082 bool *handled_ops_p ATTRIBUTE_UNUSED,
5083 struct walk_stmt_info *wi)
5084 {
5085 copy_body_data *id = (copy_body_data *) wi->info;
5086 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5087
5088 if (stmt)
5089 {
5090 tree decl = gimple_label_label (stmt);
5091
5092 /* Copy the decl and remember the copy. */
5093 insert_decl_map (id, decl, id->copy_decl (decl, id));
5094 }
5095
5096 return NULL_TREE;
5097 }
5098
5099
5100 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5101 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5102 remaps all local declarations to appropriate replacements in gimple
5103 operands. */
5104
5105 static tree
5106 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5107 {
5108 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5109 copy_body_data *id = (copy_body_data *) wi->info;
5110 hash_map<tree, tree> *st = id->decl_map;
5111 tree *n;
5112 tree expr = *tp;
5113
5114 /* Only a local declaration (variable or label). */
5115 if ((TREE_CODE (expr) == VAR_DECL
5116 && !TREE_STATIC (expr))
5117 || TREE_CODE (expr) == LABEL_DECL)
5118 {
5119 /* Lookup the declaration. */
5120 n = st->get (expr);
5121
5122 /* If it's there, remap it. */
5123 if (n)
5124 *tp = *n;
5125 *walk_subtrees = 0;
5126 }
5127 else if (TREE_CODE (expr) == STATEMENT_LIST
5128 || TREE_CODE (expr) == BIND_EXPR
5129 || TREE_CODE (expr) == SAVE_EXPR)
5130 gcc_unreachable ();
5131 else if (TREE_CODE (expr) == TARGET_EXPR)
5132 {
5133 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5134 It's OK for this to happen if it was part of a subtree that
5135 isn't immediately expanded, such as operand 2 of another
5136 TARGET_EXPR. */
5137 if (!TREE_OPERAND (expr, 1))
5138 {
5139 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5140 TREE_OPERAND (expr, 3) = NULL_TREE;
5141 }
5142 }
5143
5144 /* Keep iterating. */
5145 return NULL_TREE;
5146 }
5147
5148
5149 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5150 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5151 remaps all local declarations to appropriate replacements in gimple
5152 statements. */
5153
5154 static tree
5155 replace_locals_stmt (gimple_stmt_iterator *gsip,
5156 bool *handled_ops_p ATTRIBUTE_UNUSED,
5157 struct walk_stmt_info *wi)
5158 {
5159 copy_body_data *id = (copy_body_data *) wi->info;
5160 gimple gs = gsi_stmt (*gsip);
5161
5162 if (gbind *stmt = dyn_cast <gbind *> (gs))
5163 {
5164 tree block = gimple_bind_block (stmt);
5165
5166 if (block)
5167 {
5168 remap_block (&block, id);
5169 gimple_bind_set_block (stmt, block);
5170 }
5171
5172 /* This will remap a lot of the same decls again, but this should be
5173 harmless. */
5174 if (gimple_bind_vars (stmt))
5175 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5176 NULL, id));
5177 }
5178
5179 /* Keep iterating. */
5180 return NULL_TREE;
5181 }
5182
5183
5184 /* Copies everything in SEQ and replaces variables and labels local to
5185 current_function_decl. */
5186
5187 gimple_seq
5188 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5189 {
5190 copy_body_data id;
5191 struct walk_stmt_info wi;
5192 gimple_seq copy;
5193
5194 /* There's nothing to do for NULL_TREE. */
5195 if (seq == NULL)
5196 return seq;
5197
5198 /* Set up ID. */
5199 memset (&id, 0, sizeof (id));
5200 id.src_fn = current_function_decl;
5201 id.dst_fn = current_function_decl;
5202 id.decl_map = new hash_map<tree, tree>;
5203 id.debug_map = NULL;
5204
5205 id.copy_decl = copy_decl_no_change;
5206 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5207 id.transform_new_cfg = false;
5208 id.transform_return_to_modify = false;
5209 id.transform_parameter = false;
5210 id.transform_lang_insert_block = NULL;
5211
5212 /* Walk the tree once to find local labels. */
5213 memset (&wi, 0, sizeof (wi));
5214 hash_set<tree> visited;
5215 wi.info = &id;
5216 wi.pset = &visited;
5217 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5218
5219 copy = gimple_seq_copy (seq);
5220
5221 /* Walk the copy, remapping decls. */
5222 memset (&wi, 0, sizeof (wi));
5223 wi.info = &id;
5224 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5225
5226 /* Clean up. */
5227 delete id.decl_map;
5228 if (id.debug_map)
5229 delete id.debug_map;
5230 if (id.dependence_map)
5231 {
5232 delete id.dependence_map;
5233 id.dependence_map = NULL;
5234 }
5235
5236 return copy;
5237 }
5238
5239
5240 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5241
5242 static tree
5243 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5244 {
5245 if (*tp == data)
5246 return (tree) data;
5247 else
5248 return NULL;
5249 }
5250
5251 DEBUG_FUNCTION bool
5252 debug_find_tree (tree top, tree search)
5253 {
5254 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5255 }
5256
5257
5258 /* Declare the variables created by the inliner. Add all the variables in
5259 VARS to BIND_EXPR. */
5260
5261 static void
5262 declare_inline_vars (tree block, tree vars)
5263 {
5264 tree t;
5265 for (t = vars; t; t = DECL_CHAIN (t))
5266 {
5267 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5268 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5269 add_local_decl (cfun, t);
5270 }
5271
5272 if (block)
5273 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5274 }
5275
5276 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5277 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5278 VAR_DECL translation. */
5279
5280 static tree
5281 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5282 {
5283 /* Don't generate debug information for the copy if we wouldn't have
5284 generated it for the copy either. */
5285 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5286 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5287
5288 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5289 declaration inspired this copy. */
5290 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5291
5292 /* The new variable/label has no RTL, yet. */
5293 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5294 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5295 SET_DECL_RTL (copy, 0);
5296
5297 /* These args would always appear unused, if not for this. */
5298 TREE_USED (copy) = 1;
5299
5300 /* Set the context for the new declaration. */
5301 if (!DECL_CONTEXT (decl))
5302 /* Globals stay global. */
5303 ;
5304 else if (DECL_CONTEXT (decl) != id->src_fn)
5305 /* Things that weren't in the scope of the function we're inlining
5306 from aren't in the scope we're inlining to, either. */
5307 ;
5308 else if (TREE_STATIC (decl))
5309 /* Function-scoped static variables should stay in the original
5310 function. */
5311 ;
5312 else
5313 /* Ordinary automatic local variables are now in the scope of the
5314 new function. */
5315 DECL_CONTEXT (copy) = id->dst_fn;
5316
5317 return copy;
5318 }
5319
5320 static tree
5321 copy_decl_to_var (tree decl, copy_body_data *id)
5322 {
5323 tree copy, type;
5324
5325 gcc_assert (TREE_CODE (decl) == PARM_DECL
5326 || TREE_CODE (decl) == RESULT_DECL);
5327
5328 type = TREE_TYPE (decl);
5329
5330 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5331 VAR_DECL, DECL_NAME (decl), type);
5332 if (DECL_PT_UID_SET_P (decl))
5333 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5334 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5335 TREE_READONLY (copy) = TREE_READONLY (decl);
5336 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5337 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5338
5339 return copy_decl_for_dup_finish (id, decl, copy);
5340 }
5341
5342 /* Like copy_decl_to_var, but create a return slot object instead of a
5343 pointer variable for return by invisible reference. */
5344
5345 static tree
5346 copy_result_decl_to_var (tree decl, copy_body_data *id)
5347 {
5348 tree copy, type;
5349
5350 gcc_assert (TREE_CODE (decl) == PARM_DECL
5351 || TREE_CODE (decl) == RESULT_DECL);
5352
5353 type = TREE_TYPE (decl);
5354 if (DECL_BY_REFERENCE (decl))
5355 type = TREE_TYPE (type);
5356
5357 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5358 VAR_DECL, DECL_NAME (decl), type);
5359 if (DECL_PT_UID_SET_P (decl))
5360 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5361 TREE_READONLY (copy) = TREE_READONLY (decl);
5362 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5363 if (!DECL_BY_REFERENCE (decl))
5364 {
5365 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5366 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5367 }
5368
5369 return copy_decl_for_dup_finish (id, decl, copy);
5370 }
5371
5372 tree
5373 copy_decl_no_change (tree decl, copy_body_data *id)
5374 {
5375 tree copy;
5376
5377 copy = copy_node (decl);
5378
5379 /* The COPY is not abstract; it will be generated in DST_FN. */
5380 DECL_ABSTRACT_P (copy) = false;
5381 lang_hooks.dup_lang_specific_decl (copy);
5382
5383 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5384 been taken; it's for internal bookkeeping in expand_goto_internal. */
5385 if (TREE_CODE (copy) == LABEL_DECL)
5386 {
5387 TREE_ADDRESSABLE (copy) = 0;
5388 LABEL_DECL_UID (copy) = -1;
5389 }
5390
5391 return copy_decl_for_dup_finish (id, decl, copy);
5392 }
5393
5394 static tree
5395 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5396 {
5397 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5398 return copy_decl_to_var (decl, id);
5399 else
5400 return copy_decl_no_change (decl, id);
5401 }
5402
5403 /* Return a copy of the function's argument tree. */
5404 static tree
5405 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5406 bitmap args_to_skip, tree *vars)
5407 {
5408 tree arg, *parg;
5409 tree new_parm = NULL;
5410 int i = 0;
5411
5412 parg = &new_parm;
5413
5414 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5415 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5416 {
5417 tree new_tree = remap_decl (arg, id);
5418 if (TREE_CODE (new_tree) != PARM_DECL)
5419 new_tree = id->copy_decl (arg, id);
5420 lang_hooks.dup_lang_specific_decl (new_tree);
5421 *parg = new_tree;
5422 parg = &DECL_CHAIN (new_tree);
5423 }
5424 else if (!id->decl_map->get (arg))
5425 {
5426 /* Make an equivalent VAR_DECL. If the argument was used
5427 as temporary variable later in function, the uses will be
5428 replaced by local variable. */
5429 tree var = copy_decl_to_var (arg, id);
5430 insert_decl_map (id, arg, var);
5431 /* Declare this new variable. */
5432 DECL_CHAIN (var) = *vars;
5433 *vars = var;
5434 }
5435 return new_parm;
5436 }
5437
5438 /* Return a copy of the function's static chain. */
5439 static tree
5440 copy_static_chain (tree static_chain, copy_body_data * id)
5441 {
5442 tree *chain_copy, *pvar;
5443
5444 chain_copy = &static_chain;
5445 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5446 {
5447 tree new_tree = remap_decl (*pvar, id);
5448 lang_hooks.dup_lang_specific_decl (new_tree);
5449 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5450 *pvar = new_tree;
5451 }
5452 return static_chain;
5453 }
5454
5455 /* Return true if the function is allowed to be versioned.
5456 This is a guard for the versioning functionality. */
5457
5458 bool
5459 tree_versionable_function_p (tree fndecl)
5460 {
5461 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5462 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5463 }
5464
5465 /* Delete all unreachable basic blocks and update callgraph.
5466 Doing so is somewhat nontrivial because we need to update all clones and
5467 remove inline function that become unreachable. */
5468
5469 static bool
5470 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5471 {
5472 bool changed = false;
5473 basic_block b, next_bb;
5474
5475 find_unreachable_blocks ();
5476
5477 /* Delete all unreachable basic blocks. */
5478
5479 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5480 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5481 {
5482 next_bb = b->next_bb;
5483
5484 if (!(b->flags & BB_REACHABLE))
5485 {
5486 gimple_stmt_iterator bsi;
5487
5488 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5489 {
5490 struct cgraph_edge *e;
5491 struct cgraph_node *node;
5492
5493 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5494
5495 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5496 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5497 {
5498 if (!e->inline_failed)
5499 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5500 else
5501 e->remove ();
5502 }
5503 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5504 && id->dst_node->clones)
5505 for (node = id->dst_node->clones; node != id->dst_node;)
5506 {
5507 node->remove_stmt_references (gsi_stmt (bsi));
5508 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5509 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5510 {
5511 if (!e->inline_failed)
5512 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5513 else
5514 e->remove ();
5515 }
5516
5517 if (node->clones)
5518 node = node->clones;
5519 else if (node->next_sibling_clone)
5520 node = node->next_sibling_clone;
5521 else
5522 {
5523 while (node != id->dst_node && !node->next_sibling_clone)
5524 node = node->clone_of;
5525 if (node != id->dst_node)
5526 node = node->next_sibling_clone;
5527 }
5528 }
5529 }
5530 delete_basic_block (b);
5531 changed = true;
5532 }
5533 }
5534
5535 return changed;
5536 }
5537
5538 /* Update clone info after duplication. */
5539
5540 static void
5541 update_clone_info (copy_body_data * id)
5542 {
5543 struct cgraph_node *node;
5544 if (!id->dst_node->clones)
5545 return;
5546 for (node = id->dst_node->clones; node != id->dst_node;)
5547 {
5548 /* First update replace maps to match the new body. */
5549 if (node->clone.tree_map)
5550 {
5551 unsigned int i;
5552 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5553 {
5554 struct ipa_replace_map *replace_info;
5555 replace_info = (*node->clone.tree_map)[i];
5556 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5557 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5558 }
5559 }
5560 if (node->clones)
5561 node = node->clones;
5562 else if (node->next_sibling_clone)
5563 node = node->next_sibling_clone;
5564 else
5565 {
5566 while (node != id->dst_node && !node->next_sibling_clone)
5567 node = node->clone_of;
5568 if (node != id->dst_node)
5569 node = node->next_sibling_clone;
5570 }
5571 }
5572 }
5573
5574 /* Create a copy of a function's tree.
5575 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5576 of the original function and the new copied function
5577 respectively. In case we want to replace a DECL
5578 tree with another tree while duplicating the function's
5579 body, TREE_MAP represents the mapping between these
5580 trees. If UPDATE_CLONES is set, the call_stmt fields
5581 of edges of clones of the function will be updated.
5582
5583 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5584 from new version.
5585 If SKIP_RETURN is true, the new version will return void.
5586 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5587 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5588 */
5589 void
5590 tree_function_versioning (tree old_decl, tree new_decl,
5591 vec<ipa_replace_map *, va_gc> *tree_map,
5592 bool update_clones, bitmap args_to_skip,
5593 bool skip_return, bitmap blocks_to_copy,
5594 basic_block new_entry)
5595 {
5596 struct cgraph_node *old_version_node;
5597 struct cgraph_node *new_version_node;
5598 copy_body_data id;
5599 tree p;
5600 unsigned i;
5601 struct ipa_replace_map *replace_info;
5602 basic_block old_entry_block, bb;
5603 auto_vec<gimple, 10> init_stmts;
5604 tree vars = NULL_TREE;
5605
5606 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5607 && TREE_CODE (new_decl) == FUNCTION_DECL);
5608 DECL_POSSIBLY_INLINED (old_decl) = 1;
5609
5610 old_version_node = cgraph_node::get (old_decl);
5611 gcc_checking_assert (old_version_node);
5612 new_version_node = cgraph_node::get (new_decl);
5613 gcc_checking_assert (new_version_node);
5614
5615 /* Copy over debug args. */
5616 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5617 {
5618 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5619 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5620 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5621 old_debug_args = decl_debug_args_lookup (old_decl);
5622 if (old_debug_args)
5623 {
5624 new_debug_args = decl_debug_args_insert (new_decl);
5625 *new_debug_args = vec_safe_copy (*old_debug_args);
5626 }
5627 }
5628
5629 /* Output the inlining info for this abstract function, since it has been
5630 inlined. If we don't do this now, we can lose the information about the
5631 variables in the function when the blocks get blown away as soon as we
5632 remove the cgraph node. */
5633 (*debug_hooks->outlining_inline_function) (old_decl);
5634
5635 DECL_ARTIFICIAL (new_decl) = 1;
5636 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5637 if (DECL_ORIGIN (old_decl) == old_decl)
5638 old_version_node->used_as_abstract_origin = true;
5639 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5640
5641 /* Prepare the data structures for the tree copy. */
5642 memset (&id, 0, sizeof (id));
5643
5644 /* Generate a new name for the new version. */
5645 id.statements_to_fold = new hash_set<gimple>;
5646
5647 id.decl_map = new hash_map<tree, tree>;
5648 id.debug_map = NULL;
5649 id.src_fn = old_decl;
5650 id.dst_fn = new_decl;
5651 id.src_node = old_version_node;
5652 id.dst_node = new_version_node;
5653 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5654 id.blocks_to_copy = blocks_to_copy;
5655
5656 id.copy_decl = copy_decl_no_change;
5657 id.transform_call_graph_edges
5658 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5659 id.transform_new_cfg = true;
5660 id.transform_return_to_modify = false;
5661 id.transform_parameter = false;
5662 id.transform_lang_insert_block = NULL;
5663
5664 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5665 (DECL_STRUCT_FUNCTION (old_decl));
5666 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5667 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5668 initialize_cfun (new_decl, old_decl,
5669 old_entry_block->count);
5670 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5671 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5672 = id.src_cfun->gimple_df->ipa_pta;
5673
5674 /* Copy the function's static chain. */
5675 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5676 if (p)
5677 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5678 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5679 &id);
5680
5681 /* If there's a tree_map, prepare for substitution. */
5682 if (tree_map)
5683 for (i = 0; i < tree_map->length (); i++)
5684 {
5685 gimple init;
5686 replace_info = (*tree_map)[i];
5687 if (replace_info->replace_p)
5688 {
5689 if (!replace_info->old_tree)
5690 {
5691 int i = replace_info->parm_num;
5692 tree parm;
5693 tree req_type;
5694
5695 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5696 i --;
5697 replace_info->old_tree = parm;
5698 req_type = TREE_TYPE (parm);
5699 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5700 {
5701 if (fold_convertible_p (req_type, replace_info->new_tree))
5702 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5703 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5704 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5705 else
5706 {
5707 if (dump_file)
5708 {
5709 fprintf (dump_file, " const ");
5710 print_generic_expr (dump_file, replace_info->new_tree, 0);
5711 fprintf (dump_file, " can't be converted to param ");
5712 print_generic_expr (dump_file, parm, 0);
5713 fprintf (dump_file, "\n");
5714 }
5715 replace_info->old_tree = NULL;
5716 }
5717 }
5718 }
5719 else
5720 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5721 if (replace_info->old_tree)
5722 {
5723 init = setup_one_parameter (&id, replace_info->old_tree,
5724 replace_info->new_tree, id.src_fn,
5725 NULL,
5726 &vars);
5727 if (init)
5728 init_stmts.safe_push (init);
5729 }
5730 }
5731 }
5732 /* Copy the function's arguments. */
5733 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5734 DECL_ARGUMENTS (new_decl) =
5735 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5736 args_to_skip, &vars);
5737
5738 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5739 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5740
5741 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5742
5743 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5744 /* Add local vars. */
5745 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5746
5747 if (DECL_RESULT (old_decl) == NULL_TREE)
5748 ;
5749 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5750 {
5751 DECL_RESULT (new_decl)
5752 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5753 RESULT_DECL, NULL_TREE, void_type_node);
5754 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5755 cfun->returns_struct = 0;
5756 cfun->returns_pcc_struct = 0;
5757 }
5758 else
5759 {
5760 tree old_name;
5761 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5762 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5763 if (gimple_in_ssa_p (id.src_cfun)
5764 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5765 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5766 {
5767 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5768 insert_decl_map (&id, old_name, new_name);
5769 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5770 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5771 }
5772 }
5773
5774 /* Set up the destination functions loop tree. */
5775 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5776 {
5777 cfun->curr_properties &= ~PROP_loops;
5778 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5779 cfun->curr_properties |= PROP_loops;
5780 }
5781
5782 /* Copy the Function's body. */
5783 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5784 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5785 new_entry);
5786
5787 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5788 number_blocks (new_decl);
5789
5790 /* We want to create the BB unconditionally, so that the addition of
5791 debug stmts doesn't affect BB count, which may in the end cause
5792 codegen differences. */
5793 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5794 while (init_stmts.length ())
5795 insert_init_stmt (&id, bb, init_stmts.pop ());
5796 update_clone_info (&id);
5797
5798 /* Remap the nonlocal_goto_save_area, if any. */
5799 if (cfun->nonlocal_goto_save_area)
5800 {
5801 struct walk_stmt_info wi;
5802
5803 memset (&wi, 0, sizeof (wi));
5804 wi.info = &id;
5805 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5806 }
5807
5808 /* Clean up. */
5809 delete id.decl_map;
5810 if (id.debug_map)
5811 delete id.debug_map;
5812 free_dominance_info (CDI_DOMINATORS);
5813 free_dominance_info (CDI_POST_DOMINATORS);
5814
5815 fold_marked_statements (0, id.statements_to_fold);
5816 delete id.statements_to_fold;
5817 fold_cond_expr_cond ();
5818 delete_unreachable_blocks_update_callgraph (&id);
5819 if (id.dst_node->definition)
5820 cgraph_edge::rebuild_references ();
5821 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5822 {
5823 calculate_dominance_info (CDI_DOMINATORS);
5824 fix_loop_structure (NULL);
5825 }
5826 update_ssa (TODO_update_ssa);
5827
5828 /* After partial cloning we need to rescale frequencies, so they are
5829 within proper range in the cloned function. */
5830 if (new_entry)
5831 {
5832 struct cgraph_edge *e;
5833 rebuild_frequencies ();
5834
5835 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5836 for (e = new_version_node->callees; e; e = e->next_callee)
5837 {
5838 basic_block bb = gimple_bb (e->call_stmt);
5839 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5840 bb);
5841 e->count = bb->count;
5842 }
5843 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5844 {
5845 basic_block bb = gimple_bb (e->call_stmt);
5846 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5847 bb);
5848 e->count = bb->count;
5849 }
5850 }
5851
5852 free_dominance_info (CDI_DOMINATORS);
5853 free_dominance_info (CDI_POST_DOMINATORS);
5854
5855 gcc_assert (!id.debug_stmts.exists ());
5856 pop_cfun ();
5857 return;
5858 }
5859
5860 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5861 the callee and return the inlined body on success. */
5862
5863 tree
5864 maybe_inline_call_in_expr (tree exp)
5865 {
5866 tree fn = get_callee_fndecl (exp);
5867
5868 /* We can only try to inline "const" functions. */
5869 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5870 {
5871 call_expr_arg_iterator iter;
5872 copy_body_data id;
5873 tree param, arg, t;
5874 hash_map<tree, tree> decl_map;
5875
5876 /* Remap the parameters. */
5877 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5878 param;
5879 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5880 decl_map.put (param, arg);
5881
5882 memset (&id, 0, sizeof (id));
5883 id.src_fn = fn;
5884 id.dst_fn = current_function_decl;
5885 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5886 id.decl_map = &decl_map;
5887
5888 id.copy_decl = copy_decl_no_change;
5889 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5890 id.transform_new_cfg = false;
5891 id.transform_return_to_modify = true;
5892 id.transform_parameter = true;
5893 id.transform_lang_insert_block = NULL;
5894
5895 /* Make sure not to unshare trees behind the front-end's back
5896 since front-end specific mechanisms may rely on sharing. */
5897 id.regimplify = false;
5898 id.do_not_unshare = true;
5899
5900 /* We're not inside any EH region. */
5901 id.eh_lp_nr = 0;
5902
5903 t = copy_tree_body (&id);
5904
5905 /* We can only return something suitable for use in a GENERIC
5906 expression tree. */
5907 if (TREE_CODE (t) == MODIFY_EXPR)
5908 return TREE_OPERAND (t, 1);
5909 }
5910
5911 return NULL_TREE;
5912 }
5913
5914 /* Duplicate a type, fields and all. */
5915
5916 tree
5917 build_duplicate_type (tree type)
5918 {
5919 struct copy_body_data id;
5920
5921 memset (&id, 0, sizeof (id));
5922 id.src_fn = current_function_decl;
5923 id.dst_fn = current_function_decl;
5924 id.src_cfun = cfun;
5925 id.decl_map = new hash_map<tree, tree>;
5926 id.debug_map = NULL;
5927 id.copy_decl = copy_decl_no_change;
5928
5929 type = remap_type_1 (type, &id);
5930
5931 delete id.decl_map;
5932 if (id.debug_map)
5933 delete id.debug_map;
5934
5935 TYPE_CANONICAL (type) = type;
5936
5937 return type;
5938 }
5939
5940 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
5941 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
5942 evaluation. */
5943
5944 tree
5945 copy_fn (tree fn, tree& parms, tree& result)
5946 {
5947 copy_body_data id;
5948 tree param;
5949 hash_map<tree, tree> decl_map;
5950
5951 tree *p = &parms;
5952 *p = NULL_TREE;
5953
5954 memset (&id, 0, sizeof (id));
5955 id.src_fn = fn;
5956 id.dst_fn = current_function_decl;
5957 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5958 id.decl_map = &decl_map;
5959
5960 id.copy_decl = copy_decl_no_change;
5961 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5962 id.transform_new_cfg = false;
5963 id.transform_return_to_modify = false;
5964 id.transform_parameter = true;
5965 id.transform_lang_insert_block = NULL;
5966
5967 /* Make sure not to unshare trees behind the front-end's back
5968 since front-end specific mechanisms may rely on sharing. */
5969 id.regimplify = false;
5970 id.do_not_unshare = true;
5971
5972 /* We're not inside any EH region. */
5973 id.eh_lp_nr = 0;
5974
5975 /* Remap the parameters and result and return them to the caller. */
5976 for (param = DECL_ARGUMENTS (fn);
5977 param;
5978 param = DECL_CHAIN (param))
5979 {
5980 *p = remap_decl (param, &id);
5981 p = &DECL_CHAIN (*p);
5982 }
5983
5984 if (DECL_RESULT (fn))
5985 result = remap_decl (DECL_RESULT (fn), &id);
5986 else
5987 result = NULL_TREE;
5988
5989 return copy_tree_body (&id);
5990 }