Set PROP_gimple_lva for functions without IFN_VA_ARG
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "flags.h"
41 #include "params.h"
42 #include "insn-config.h"
43 #include "hashtab.h"
44 #include "langhooks.h"
45 #include "predict.h"
46 #include "hard-reg-set.h"
47 #include "function.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "cfganal.h"
51 #include "basic-block.h"
52 #include "tree-iterator.h"
53 #include "intl.h"
54 #include "tree-ssa-alias.h"
55 #include "internal-fn.h"
56 #include "gimple-fold.h"
57 #include "tree-eh.h"
58 #include "gimple-expr.h"
59 #include "is-a.h"
60 #include "gimple.h"
61 #include "gimplify.h"
62 #include "gimple-iterator.h"
63 #include "gimplify-me.h"
64 #include "gimple-walk.h"
65 #include "gimple-ssa.h"
66 #include "tree-cfg.h"
67 #include "tree-phinodes.h"
68 #include "ssa-iterators.h"
69 #include "stringpool.h"
70 #include "tree-ssanames.h"
71 #include "tree-into-ssa.h"
72 #include "rtl.h"
73 #include "statistics.h"
74 #include "real.h"
75 #include "fixed-value.h"
76 #include "expmed.h"
77 #include "dojump.h"
78 #include "explow.h"
79 #include "emit-rtl.h"
80 #include "varasm.h"
81 #include "stmt.h"
82 #include "expr.h"
83 #include "tree-dfa.h"
84 #include "tree-ssa.h"
85 #include "tree-pretty-print.h"
86 #include "except.h"
87 #include "debug.h"
88 #include "hash-map.h"
89 #include "plugin-api.h"
90 #include "ipa-ref.h"
91 #include "cgraph.h"
92 #include "alloc-pool.h"
93 #include "symbol-summary.h"
94 #include "ipa-prop.h"
95 #include "value-prof.h"
96 #include "tree-pass.h"
97 #include "target.h"
98 #include "cfgloop.h"
99 #include "builtins.h"
100 #include "tree-chkp.h"
101
102 #include "rtl.h" /* FIXME: For asm_str_count. */
103
104 /* I'm not real happy about this, but we need to handle gimple and
105 non-gimple trees. */
106
107 /* Inlining, Cloning, Versioning, Parallelization
108
109 Inlining: a function body is duplicated, but the PARM_DECLs are
110 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
111 MODIFY_EXPRs that store to a dedicated returned-value variable.
112 The duplicated eh_region info of the copy will later be appended
113 to the info for the caller; the eh_region info in copied throwing
114 statements and RESX statements are adjusted accordingly.
115
116 Cloning: (only in C++) We have one body for a con/de/structor, and
117 multiple function decls, each with a unique parameter list.
118 Duplicate the body, using the given splay tree; some parameters
119 will become constants (like 0 or 1).
120
121 Versioning: a function body is duplicated and the result is a new
122 function rather than into blocks of an existing function as with
123 inlining. Some parameters will become constants.
124
125 Parallelization: a region of a function is duplicated resulting in
126 a new function. Variables may be replaced with complex expressions
127 to enable shared variable semantics.
128
129 All of these will simultaneously lookup any callgraph edges. If
130 we're going to inline the duplicated function body, and the given
131 function has some cloned callgraph nodes (one for each place this
132 function will be inlined) those callgraph edges will be duplicated.
133 If we're cloning the body, those callgraph edges will be
134 updated to point into the new body. (Note that the original
135 callgraph node and edge list will not be altered.)
136
137 See the CALL_EXPR handling case in copy_tree_body_r (). */
138
139 /* To Do:
140
141 o In order to make inlining-on-trees work, we pessimized
142 function-local static constants. In particular, they are now
143 always output, even when not addressed. Fix this by treating
144 function-local static constants just like global static
145 constants; the back-end already knows not to output them if they
146 are not needed.
147
148 o Provide heuristics to clamp inlining of recursive template
149 calls? */
150
151
152 /* Weights that estimate_num_insns uses to estimate the size of the
153 produced code. */
154
155 eni_weights eni_size_weights;
156
157 /* Weights that estimate_num_insns uses to estimate the time necessary
158 to execute the produced code. */
159
160 eni_weights eni_time_weights;
161
162 /* Prototypes. */
163
164 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
165 basic_block);
166 static void remap_block (tree *, copy_body_data *);
167 static void copy_bind_expr (tree *, int *, copy_body_data *);
168 static void declare_inline_vars (tree, tree);
169 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
170 static void prepend_lexical_block (tree current_block, tree new_block);
171 static tree copy_decl_to_var (tree, copy_body_data *);
172 static tree copy_result_decl_to_var (tree, copy_body_data *);
173 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
174 static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
175 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
176 static void insert_init_stmt (copy_body_data *, basic_block, gimple);
177
178 /* Insert a tree->tree mapping for ID. Despite the name suggests
179 that the trees should be variables, it is used for more than that. */
180
181 void
182 insert_decl_map (copy_body_data *id, tree key, tree value)
183 {
184 id->decl_map->put (key, value);
185
186 /* Always insert an identity map as well. If we see this same new
187 node again, we won't want to duplicate it a second time. */
188 if (key != value)
189 id->decl_map->put (value, value);
190 }
191
192 /* Insert a tree->tree mapping for ID. This is only used for
193 variables. */
194
195 static void
196 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
197 {
198 if (!gimple_in_ssa_p (id->src_cfun))
199 return;
200
201 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
202 return;
203
204 if (!target_for_debug_bind (key))
205 return;
206
207 gcc_assert (TREE_CODE (key) == PARM_DECL);
208 gcc_assert (TREE_CODE (value) == VAR_DECL);
209
210 if (!id->debug_map)
211 id->debug_map = new hash_map<tree, tree>;
212
213 id->debug_map->put (key, value);
214 }
215
216 /* If nonzero, we're remapping the contents of inlined debug
217 statements. If negative, an error has occurred, such as a
218 reference to a variable that isn't available in the inlined
219 context. */
220 static int processing_debug_stmt = 0;
221
222 /* Construct new SSA name for old NAME. ID is the inline context. */
223
224 static tree
225 remap_ssa_name (tree name, copy_body_data *id)
226 {
227 tree new_tree, var;
228 tree *n;
229
230 gcc_assert (TREE_CODE (name) == SSA_NAME);
231
232 n = id->decl_map->get (name);
233 if (n)
234 return unshare_expr (*n);
235
236 if (processing_debug_stmt)
237 {
238 if (SSA_NAME_IS_DEFAULT_DEF (name)
239 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
240 && id->entry_bb == NULL
241 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
242 {
243 tree vexpr = make_node (DEBUG_EXPR_DECL);
244 gimple def_temp;
245 gimple_stmt_iterator gsi;
246 tree val = SSA_NAME_VAR (name);
247
248 n = id->decl_map->get (val);
249 if (n != NULL)
250 val = *n;
251 if (TREE_CODE (val) != PARM_DECL)
252 {
253 processing_debug_stmt = -1;
254 return name;
255 }
256 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
257 DECL_ARTIFICIAL (vexpr) = 1;
258 TREE_TYPE (vexpr) = TREE_TYPE (name);
259 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
260 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
261 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
262 return vexpr;
263 }
264
265 processing_debug_stmt = -1;
266 return name;
267 }
268
269 /* Remap anonymous SSA names or SSA names of anonymous decls. */
270 var = SSA_NAME_VAR (name);
271 if (!var
272 || (!SSA_NAME_IS_DEFAULT_DEF (name)
273 && TREE_CODE (var) == VAR_DECL
274 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
275 && DECL_ARTIFICIAL (var)
276 && DECL_IGNORED_P (var)
277 && !DECL_NAME (var)))
278 {
279 struct ptr_info_def *pi;
280 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
281 if (!var && SSA_NAME_IDENTIFIER (name))
282 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
283 insert_decl_map (id, name, new_tree);
284 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
285 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
286 /* At least IPA points-to info can be directly transferred. */
287 if (id->src_cfun->gimple_df
288 && id->src_cfun->gimple_df->ipa_pta
289 && (pi = SSA_NAME_PTR_INFO (name))
290 && !pi->pt.anything)
291 {
292 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293 new_pi->pt = pi->pt;
294 }
295 return new_tree;
296 }
297
298 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
299 in copy_bb. */
300 new_tree = remap_decl (var, id);
301
302 /* We might've substituted constant or another SSA_NAME for
303 the variable.
304
305 Replace the SSA name representing RESULT_DECL by variable during
306 inlining: this saves us from need to introduce PHI node in a case
307 return value is just partly initialized. */
308 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
309 && (!SSA_NAME_VAR (name)
310 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
311 || !id->transform_return_to_modify))
312 {
313 struct ptr_info_def *pi;
314 new_tree = make_ssa_name (new_tree);
315 insert_decl_map (id, name, new_tree);
316 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
317 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
318 /* At least IPA points-to info can be directly transferred. */
319 if (id->src_cfun->gimple_df
320 && id->src_cfun->gimple_df->ipa_pta
321 && (pi = SSA_NAME_PTR_INFO (name))
322 && !pi->pt.anything)
323 {
324 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
325 new_pi->pt = pi->pt;
326 }
327 if (SSA_NAME_IS_DEFAULT_DEF (name))
328 {
329 /* By inlining function having uninitialized variable, we might
330 extend the lifetime (variable might get reused). This cause
331 ICE in the case we end up extending lifetime of SSA name across
332 abnormal edge, but also increase register pressure.
333
334 We simply initialize all uninitialized vars by 0 except
335 for case we are inlining to very first BB. We can avoid
336 this for all BBs that are not inside strongly connected
337 regions of the CFG, but this is expensive to test. */
338 if (id->entry_bb
339 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
340 && (!SSA_NAME_VAR (name)
341 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
342 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
343 0)->dest
344 || EDGE_COUNT (id->entry_bb->preds) != 1))
345 {
346 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
347 gimple init_stmt;
348 tree zero = build_zero_cst (TREE_TYPE (new_tree));
349
350 init_stmt = gimple_build_assign (new_tree, zero);
351 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
352 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
353 }
354 else
355 {
356 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
357 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
358 }
359 }
360 }
361 else
362 insert_decl_map (id, name, new_tree);
363 return new_tree;
364 }
365
366 /* Remap DECL during the copying of the BLOCK tree for the function. */
367
368 tree
369 remap_decl (tree decl, copy_body_data *id)
370 {
371 tree *n;
372
373 /* We only remap local variables in the current function. */
374
375 /* See if we have remapped this declaration. */
376
377 n = id->decl_map->get (decl);
378
379 if (!n && processing_debug_stmt)
380 {
381 processing_debug_stmt = -1;
382 return decl;
383 }
384
385 /* If we didn't already have an equivalent for this declaration,
386 create one now. */
387 if (!n)
388 {
389 /* Make a copy of the variable or label. */
390 tree t = id->copy_decl (decl, id);
391
392 /* Remember it, so that if we encounter this local entity again
393 we can reuse this copy. Do this early because remap_type may
394 need this decl for TYPE_STUB_DECL. */
395 insert_decl_map (id, decl, t);
396
397 if (!DECL_P (t))
398 return t;
399
400 /* Remap types, if necessary. */
401 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
402 if (TREE_CODE (t) == TYPE_DECL)
403 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
404
405 /* Remap sizes as necessary. */
406 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
407 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
408
409 /* If fields, do likewise for offset and qualifier. */
410 if (TREE_CODE (t) == FIELD_DECL)
411 {
412 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
413 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
414 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
415 }
416
417 return t;
418 }
419
420 if (id->do_not_unshare)
421 return *n;
422 else
423 return unshare_expr (*n);
424 }
425
426 static tree
427 remap_type_1 (tree type, copy_body_data *id)
428 {
429 tree new_tree, t;
430
431 /* We do need a copy. build and register it now. If this is a pointer or
432 reference type, remap the designated type and make a new pointer or
433 reference type. */
434 if (TREE_CODE (type) == POINTER_TYPE)
435 {
436 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
437 TYPE_MODE (type),
438 TYPE_REF_CAN_ALIAS_ALL (type));
439 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
440 new_tree = build_type_attribute_qual_variant (new_tree,
441 TYPE_ATTRIBUTES (type),
442 TYPE_QUALS (type));
443 insert_decl_map (id, type, new_tree);
444 return new_tree;
445 }
446 else if (TREE_CODE (type) == REFERENCE_TYPE)
447 {
448 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
449 TYPE_MODE (type),
450 TYPE_REF_CAN_ALIAS_ALL (type));
451 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 new_tree = build_type_attribute_qual_variant (new_tree,
453 TYPE_ATTRIBUTES (type),
454 TYPE_QUALS (type));
455 insert_decl_map (id, type, new_tree);
456 return new_tree;
457 }
458 else
459 new_tree = copy_node (type);
460
461 insert_decl_map (id, type, new_tree);
462
463 /* This is a new type, not a copy of an old type. Need to reassociate
464 variants. We can handle everything except the main variant lazily. */
465 t = TYPE_MAIN_VARIANT (type);
466 if (type != t)
467 {
468 t = remap_type (t, id);
469 TYPE_MAIN_VARIANT (new_tree) = t;
470 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
471 TYPE_NEXT_VARIANT (t) = new_tree;
472 }
473 else
474 {
475 TYPE_MAIN_VARIANT (new_tree) = new_tree;
476 TYPE_NEXT_VARIANT (new_tree) = NULL;
477 }
478
479 if (TYPE_STUB_DECL (type))
480 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
481
482 /* Lazily create pointer and reference types. */
483 TYPE_POINTER_TO (new_tree) = NULL;
484 TYPE_REFERENCE_TO (new_tree) = NULL;
485
486 /* Copy all types that may contain references to local variables; be sure to
487 preserve sharing in between type and its main variant when possible. */
488 switch (TREE_CODE (new_tree))
489 {
490 case INTEGER_TYPE:
491 case REAL_TYPE:
492 case FIXED_POINT_TYPE:
493 case ENUMERAL_TYPE:
494 case BOOLEAN_TYPE:
495 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
496 {
497 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
498 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
499
500 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
501 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
502 }
503 else
504 {
505 t = TYPE_MIN_VALUE (new_tree);
506 if (t && TREE_CODE (t) != INTEGER_CST)
507 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
508
509 t = TYPE_MAX_VALUE (new_tree);
510 if (t && TREE_CODE (t) != INTEGER_CST)
511 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
512 }
513 return new_tree;
514
515 case FUNCTION_TYPE:
516 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
517 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
518 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
519 else
520 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
521 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
522 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
523 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
524 else
525 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
526 return new_tree;
527
528 case ARRAY_TYPE:
529 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
530 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
531 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
532 else
533 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
534
535 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
536 {
537 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
538 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
539 }
540 else
541 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
542 break;
543
544 case RECORD_TYPE:
545 case UNION_TYPE:
546 case QUAL_UNION_TYPE:
547 if (TYPE_MAIN_VARIANT (type) != type
548 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
549 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
550 else
551 {
552 tree f, nf = NULL;
553
554 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
555 {
556 t = remap_decl (f, id);
557 DECL_CONTEXT (t) = new_tree;
558 DECL_CHAIN (t) = nf;
559 nf = t;
560 }
561 TYPE_FIELDS (new_tree) = nreverse (nf);
562 }
563 break;
564
565 case OFFSET_TYPE:
566 default:
567 /* Shouldn't have been thought variable sized. */
568 gcc_unreachable ();
569 }
570
571 /* All variants of type share the same size, so use the already remaped data. */
572 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
573 {
574 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
575 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
576
577 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
578 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
579 }
580 else
581 {
582 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
583 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
584 }
585
586 return new_tree;
587 }
588
589 tree
590 remap_type (tree type, copy_body_data *id)
591 {
592 tree *node;
593 tree tmp;
594
595 if (type == NULL)
596 return type;
597
598 /* See if we have remapped this type. */
599 node = id->decl_map->get (type);
600 if (node)
601 return *node;
602
603 /* The type only needs remapping if it's variably modified. */
604 if (! variably_modified_type_p (type, id->src_fn))
605 {
606 insert_decl_map (id, type, type);
607 return type;
608 }
609
610 id->remapping_type_depth++;
611 tmp = remap_type_1 (type, id);
612 id->remapping_type_depth--;
613
614 return tmp;
615 }
616
617 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
618
619 static bool
620 can_be_nonlocal (tree decl, copy_body_data *id)
621 {
622 /* We can not duplicate function decls. */
623 if (TREE_CODE (decl) == FUNCTION_DECL)
624 return true;
625
626 /* Local static vars must be non-local or we get multiple declaration
627 problems. */
628 if (TREE_CODE (decl) == VAR_DECL
629 && !auto_var_in_fn_p (decl, id->src_fn))
630 return true;
631
632 return false;
633 }
634
635 static tree
636 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
637 copy_body_data *id)
638 {
639 tree old_var;
640 tree new_decls = NULL_TREE;
641
642 /* Remap its variables. */
643 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
644 {
645 tree new_var;
646
647 if (can_be_nonlocal (old_var, id))
648 {
649 /* We need to add this variable to the local decls as otherwise
650 nothing else will do so. */
651 if (TREE_CODE (old_var) == VAR_DECL
652 && ! DECL_EXTERNAL (old_var))
653 add_local_decl (cfun, old_var);
654 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
655 && !DECL_IGNORED_P (old_var)
656 && nonlocalized_list)
657 vec_safe_push (*nonlocalized_list, old_var);
658 continue;
659 }
660
661 /* Remap the variable. */
662 new_var = remap_decl (old_var, id);
663
664 /* If we didn't remap this variable, we can't mess with its
665 TREE_CHAIN. If we remapped this variable to the return slot, it's
666 already declared somewhere else, so don't declare it here. */
667
668 if (new_var == id->retvar)
669 ;
670 else if (!new_var)
671 {
672 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
673 && !DECL_IGNORED_P (old_var)
674 && nonlocalized_list)
675 vec_safe_push (*nonlocalized_list, old_var);
676 }
677 else
678 {
679 gcc_assert (DECL_P (new_var));
680 DECL_CHAIN (new_var) = new_decls;
681 new_decls = new_var;
682
683 /* Also copy value-expressions. */
684 if (TREE_CODE (new_var) == VAR_DECL
685 && DECL_HAS_VALUE_EXPR_P (new_var))
686 {
687 tree tem = DECL_VALUE_EXPR (new_var);
688 bool old_regimplify = id->regimplify;
689 id->remapping_type_depth++;
690 walk_tree (&tem, copy_tree_body_r, id, NULL);
691 id->remapping_type_depth--;
692 id->regimplify = old_regimplify;
693 SET_DECL_VALUE_EXPR (new_var, tem);
694 }
695 }
696 }
697
698 return nreverse (new_decls);
699 }
700
701 /* Copy the BLOCK to contain remapped versions of the variables
702 therein. And hook the new block into the block-tree. */
703
704 static void
705 remap_block (tree *block, copy_body_data *id)
706 {
707 tree old_block;
708 tree new_block;
709
710 /* Make the new block. */
711 old_block = *block;
712 new_block = make_node (BLOCK);
713 TREE_USED (new_block) = TREE_USED (old_block);
714 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
715 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
716 BLOCK_NONLOCALIZED_VARS (new_block)
717 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
718 *block = new_block;
719
720 /* Remap its variables. */
721 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
722 &BLOCK_NONLOCALIZED_VARS (new_block),
723 id);
724
725 if (id->transform_lang_insert_block)
726 id->transform_lang_insert_block (new_block);
727
728 /* Remember the remapped block. */
729 insert_decl_map (id, old_block, new_block);
730 }
731
732 /* Copy the whole block tree and root it in id->block. */
733 static tree
734 remap_blocks (tree block, copy_body_data *id)
735 {
736 tree t;
737 tree new_tree = block;
738
739 if (!block)
740 return NULL;
741
742 remap_block (&new_tree, id);
743 gcc_assert (new_tree != block);
744 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
745 prepend_lexical_block (new_tree, remap_blocks (t, id));
746 /* Blocks are in arbitrary order, but make things slightly prettier and do
747 not swap order when producing a copy. */
748 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
749 return new_tree;
750 }
751
752 /* Remap the block tree rooted at BLOCK to nothing. */
753 static void
754 remap_blocks_to_null (tree block, copy_body_data *id)
755 {
756 tree t;
757 insert_decl_map (id, block, NULL_TREE);
758 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
759 remap_blocks_to_null (t, id);
760 }
761
762 static void
763 copy_statement_list (tree *tp)
764 {
765 tree_stmt_iterator oi, ni;
766 tree new_tree;
767
768 new_tree = alloc_stmt_list ();
769 ni = tsi_start (new_tree);
770 oi = tsi_start (*tp);
771 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
772 *tp = new_tree;
773
774 for (; !tsi_end_p (oi); tsi_next (&oi))
775 {
776 tree stmt = tsi_stmt (oi);
777 if (TREE_CODE (stmt) == STATEMENT_LIST)
778 /* This copy is not redundant; tsi_link_after will smash this
779 STATEMENT_LIST into the end of the one we're building, and we
780 don't want to do that with the original. */
781 copy_statement_list (&stmt);
782 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
783 }
784 }
785
786 static void
787 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
788 {
789 tree block = BIND_EXPR_BLOCK (*tp);
790 /* Copy (and replace) the statement. */
791 copy_tree_r (tp, walk_subtrees, NULL);
792 if (block)
793 {
794 remap_block (&block, id);
795 BIND_EXPR_BLOCK (*tp) = block;
796 }
797
798 if (BIND_EXPR_VARS (*tp))
799 /* This will remap a lot of the same decls again, but this should be
800 harmless. */
801 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
802 }
803
804
805 /* Create a new gimple_seq by remapping all the statements in BODY
806 using the inlining information in ID. */
807
808 static gimple_seq
809 remap_gimple_seq (gimple_seq body, copy_body_data *id)
810 {
811 gimple_stmt_iterator si;
812 gimple_seq new_body = NULL;
813
814 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
815 {
816 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
817 gimple_seq_add_seq (&new_body, new_stmts);
818 }
819
820 return new_body;
821 }
822
823
824 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
825 block using the mapping information in ID. */
826
827 static gimple
828 copy_gimple_bind (gbind *stmt, copy_body_data *id)
829 {
830 gimple new_bind;
831 tree new_block, new_vars;
832 gimple_seq body, new_body;
833
834 /* Copy the statement. Note that we purposely don't use copy_stmt
835 here because we need to remap statements as we copy. */
836 body = gimple_bind_body (stmt);
837 new_body = remap_gimple_seq (body, id);
838
839 new_block = gimple_bind_block (stmt);
840 if (new_block)
841 remap_block (&new_block, id);
842
843 /* This will remap a lot of the same decls again, but this should be
844 harmless. */
845 new_vars = gimple_bind_vars (stmt);
846 if (new_vars)
847 new_vars = remap_decls (new_vars, NULL, id);
848
849 new_bind = gimple_build_bind (new_vars, new_body, new_block);
850
851 return new_bind;
852 }
853
854 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
855
856 static bool
857 is_parm (tree decl)
858 {
859 if (TREE_CODE (decl) == SSA_NAME)
860 {
861 decl = SSA_NAME_VAR (decl);
862 if (!decl)
863 return false;
864 }
865
866 return (TREE_CODE (decl) == PARM_DECL);
867 }
868
869 /* Remap the dependence CLIQUE from the source to the destination function
870 as specified in ID. */
871
872 static unsigned short
873 remap_dependence_clique (copy_body_data *id, unsigned short clique)
874 {
875 if (clique == 0)
876 return 0;
877 if (!id->dependence_map)
878 id->dependence_map
879 = new hash_map<unsigned short, unsigned short, dependence_hasher>;
880 bool existed;
881 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
882 if (!existed)
883 newc = ++cfun->last_clique;
884 return newc;
885 }
886
887 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
888 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
889 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
890 recursing into the children nodes of *TP. */
891
892 static tree
893 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
894 {
895 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
896 copy_body_data *id = (copy_body_data *) wi_p->info;
897 tree fn = id->src_fn;
898
899 if (TREE_CODE (*tp) == SSA_NAME)
900 {
901 *tp = remap_ssa_name (*tp, id);
902 *walk_subtrees = 0;
903 return NULL;
904 }
905 else if (auto_var_in_fn_p (*tp, fn))
906 {
907 /* Local variables and labels need to be replaced by equivalent
908 variables. We don't want to copy static variables; there's
909 only one of those, no matter how many times we inline the
910 containing function. Similarly for globals from an outer
911 function. */
912 tree new_decl;
913
914 /* Remap the declaration. */
915 new_decl = remap_decl (*tp, id);
916 gcc_assert (new_decl);
917 /* Replace this variable with the copy. */
918 STRIP_TYPE_NOPS (new_decl);
919 /* ??? The C++ frontend uses void * pointer zero to initialize
920 any other type. This confuses the middle-end type verification.
921 As cloned bodies do not go through gimplification again the fixup
922 there doesn't trigger. */
923 if (TREE_CODE (new_decl) == INTEGER_CST
924 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
925 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
926 *tp = new_decl;
927 *walk_subtrees = 0;
928 }
929 else if (TREE_CODE (*tp) == STATEMENT_LIST)
930 gcc_unreachable ();
931 else if (TREE_CODE (*tp) == SAVE_EXPR)
932 gcc_unreachable ();
933 else if (TREE_CODE (*tp) == LABEL_DECL
934 && (!DECL_CONTEXT (*tp)
935 || decl_function_context (*tp) == id->src_fn))
936 /* These may need to be remapped for EH handling. */
937 *tp = remap_decl (*tp, id);
938 else if (TREE_CODE (*tp) == FIELD_DECL)
939 {
940 /* If the enclosing record type is variably_modified_type_p, the field
941 has already been remapped. Otherwise, it need not be. */
942 tree *n = id->decl_map->get (*tp);
943 if (n)
944 *tp = *n;
945 *walk_subtrees = 0;
946 }
947 else if (TYPE_P (*tp))
948 /* Types may need remapping as well. */
949 *tp = remap_type (*tp, id);
950 else if (CONSTANT_CLASS_P (*tp))
951 {
952 /* If this is a constant, we have to copy the node iff the type
953 will be remapped. copy_tree_r will not copy a constant. */
954 tree new_type = remap_type (TREE_TYPE (*tp), id);
955
956 if (new_type == TREE_TYPE (*tp))
957 *walk_subtrees = 0;
958
959 else if (TREE_CODE (*tp) == INTEGER_CST)
960 *tp = wide_int_to_tree (new_type, *tp);
961 else
962 {
963 *tp = copy_node (*tp);
964 TREE_TYPE (*tp) = new_type;
965 }
966 }
967 else
968 {
969 /* Otherwise, just copy the node. Note that copy_tree_r already
970 knows not to copy VAR_DECLs, etc., so this is safe. */
971
972 if (TREE_CODE (*tp) == MEM_REF)
973 {
974 /* We need to re-canonicalize MEM_REFs from inline substitutions
975 that can happen when a pointer argument is an ADDR_EXPR.
976 Recurse here manually to allow that. */
977 tree ptr = TREE_OPERAND (*tp, 0);
978 tree type = remap_type (TREE_TYPE (*tp), id);
979 tree old = *tp;
980 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
981 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
982 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
983 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
984 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
985 if (MR_DEPENDENCE_CLIQUE (old) != 0)
986 {
987 MR_DEPENDENCE_CLIQUE (*tp)
988 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
989 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
990 }
991 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
992 remapped a parameter as the property might be valid only
993 for the parameter itself. */
994 if (TREE_THIS_NOTRAP (old)
995 && (!is_parm (TREE_OPERAND (old, 0))
996 || (!id->transform_parameter && is_parm (ptr))))
997 TREE_THIS_NOTRAP (*tp) = 1;
998 *walk_subtrees = 0;
999 return NULL;
1000 }
1001
1002 /* Here is the "usual case". Copy this tree node, and then
1003 tweak some special cases. */
1004 copy_tree_r (tp, walk_subtrees, NULL);
1005
1006 if (TREE_CODE (*tp) != OMP_CLAUSE)
1007 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1008
1009 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1010 {
1011 /* The copied TARGET_EXPR has never been expanded, even if the
1012 original node was expanded already. */
1013 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1014 TREE_OPERAND (*tp, 3) = NULL_TREE;
1015 }
1016 else if (TREE_CODE (*tp) == ADDR_EXPR)
1017 {
1018 /* Variable substitution need not be simple. In particular,
1019 the MEM_REF substitution above. Make sure that
1020 TREE_CONSTANT and friends are up-to-date. */
1021 int invariant = is_gimple_min_invariant (*tp);
1022 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1023 recompute_tree_invariant_for_addr_expr (*tp);
1024
1025 /* If this used to be invariant, but is not any longer,
1026 then regimplification is probably needed. */
1027 if (invariant && !is_gimple_min_invariant (*tp))
1028 id->regimplify = true;
1029
1030 *walk_subtrees = 0;
1031 }
1032 }
1033
1034 /* Update the TREE_BLOCK for the cloned expr. */
1035 if (EXPR_P (*tp))
1036 {
1037 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1038 tree old_block = TREE_BLOCK (*tp);
1039 if (old_block)
1040 {
1041 tree *n;
1042 n = id->decl_map->get (TREE_BLOCK (*tp));
1043 if (n)
1044 new_block = *n;
1045 }
1046 TREE_SET_BLOCK (*tp, new_block);
1047 }
1048
1049 /* Keep iterating. */
1050 return NULL_TREE;
1051 }
1052
1053
1054 /* Called from copy_body_id via walk_tree. DATA is really a
1055 `copy_body_data *'. */
1056
1057 tree
1058 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1059 {
1060 copy_body_data *id = (copy_body_data *) data;
1061 tree fn = id->src_fn;
1062 tree new_block;
1063
1064 /* Begin by recognizing trees that we'll completely rewrite for the
1065 inlining context. Our output for these trees is completely
1066 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1067 into an edge). Further down, we'll handle trees that get
1068 duplicated and/or tweaked. */
1069
1070 /* When requested, RETURN_EXPRs should be transformed to just the
1071 contained MODIFY_EXPR. The branch semantics of the return will
1072 be handled elsewhere by manipulating the CFG rather than a statement. */
1073 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1074 {
1075 tree assignment = TREE_OPERAND (*tp, 0);
1076
1077 /* If we're returning something, just turn that into an
1078 assignment into the equivalent of the original RESULT_DECL.
1079 If the "assignment" is just the result decl, the result
1080 decl has already been set (e.g. a recent "foo (&result_decl,
1081 ...)"); just toss the entire RETURN_EXPR. */
1082 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1083 {
1084 /* Replace the RETURN_EXPR with (a copy of) the
1085 MODIFY_EXPR hanging underneath. */
1086 *tp = copy_node (assignment);
1087 }
1088 else /* Else the RETURN_EXPR returns no value. */
1089 {
1090 *tp = NULL;
1091 return (tree) (void *)1;
1092 }
1093 }
1094 else if (TREE_CODE (*tp) == SSA_NAME)
1095 {
1096 *tp = remap_ssa_name (*tp, id);
1097 *walk_subtrees = 0;
1098 return NULL;
1099 }
1100
1101 /* Local variables and labels need to be replaced by equivalent
1102 variables. We don't want to copy static variables; there's only
1103 one of those, no matter how many times we inline the containing
1104 function. Similarly for globals from an outer function. */
1105 else if (auto_var_in_fn_p (*tp, fn))
1106 {
1107 tree new_decl;
1108
1109 /* Remap the declaration. */
1110 new_decl = remap_decl (*tp, id);
1111 gcc_assert (new_decl);
1112 /* Replace this variable with the copy. */
1113 STRIP_TYPE_NOPS (new_decl);
1114 *tp = new_decl;
1115 *walk_subtrees = 0;
1116 }
1117 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1118 copy_statement_list (tp);
1119 else if (TREE_CODE (*tp) == SAVE_EXPR
1120 || TREE_CODE (*tp) == TARGET_EXPR)
1121 remap_save_expr (tp, id->decl_map, walk_subtrees);
1122 else if (TREE_CODE (*tp) == LABEL_DECL
1123 && (! DECL_CONTEXT (*tp)
1124 || decl_function_context (*tp) == id->src_fn))
1125 /* These may need to be remapped for EH handling. */
1126 *tp = remap_decl (*tp, id);
1127 else if (TREE_CODE (*tp) == BIND_EXPR)
1128 copy_bind_expr (tp, walk_subtrees, id);
1129 /* Types may need remapping as well. */
1130 else if (TYPE_P (*tp))
1131 *tp = remap_type (*tp, id);
1132
1133 /* If this is a constant, we have to copy the node iff the type will be
1134 remapped. copy_tree_r will not copy a constant. */
1135 else if (CONSTANT_CLASS_P (*tp))
1136 {
1137 tree new_type = remap_type (TREE_TYPE (*tp), id);
1138
1139 if (new_type == TREE_TYPE (*tp))
1140 *walk_subtrees = 0;
1141
1142 else if (TREE_CODE (*tp) == INTEGER_CST)
1143 *tp = wide_int_to_tree (new_type, *tp);
1144 else
1145 {
1146 *tp = copy_node (*tp);
1147 TREE_TYPE (*tp) = new_type;
1148 }
1149 }
1150
1151 /* Otherwise, just copy the node. Note that copy_tree_r already
1152 knows not to copy VAR_DECLs, etc., so this is safe. */
1153 else
1154 {
1155 /* Here we handle trees that are not completely rewritten.
1156 First we detect some inlining-induced bogosities for
1157 discarding. */
1158 if (TREE_CODE (*tp) == MODIFY_EXPR
1159 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1160 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1161 {
1162 /* Some assignments VAR = VAR; don't generate any rtl code
1163 and thus don't count as variable modification. Avoid
1164 keeping bogosities like 0 = 0. */
1165 tree decl = TREE_OPERAND (*tp, 0), value;
1166 tree *n;
1167
1168 n = id->decl_map->get (decl);
1169 if (n)
1170 {
1171 value = *n;
1172 STRIP_TYPE_NOPS (value);
1173 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1174 {
1175 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1176 return copy_tree_body_r (tp, walk_subtrees, data);
1177 }
1178 }
1179 }
1180 else if (TREE_CODE (*tp) == INDIRECT_REF)
1181 {
1182 /* Get rid of *& from inline substitutions that can happen when a
1183 pointer argument is an ADDR_EXPR. */
1184 tree decl = TREE_OPERAND (*tp, 0);
1185 tree *n = id->decl_map->get (decl);
1186 if (n)
1187 {
1188 /* If we happen to get an ADDR_EXPR in n->value, strip
1189 it manually here as we'll eventually get ADDR_EXPRs
1190 which lie about their types pointed to. In this case
1191 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1192 but we absolutely rely on that. As fold_indirect_ref
1193 does other useful transformations, try that first, though. */
1194 tree type = TREE_TYPE (*tp);
1195 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1196 tree old = *tp;
1197 *tp = gimple_fold_indirect_ref (ptr);
1198 if (! *tp)
1199 {
1200 if (TREE_CODE (ptr) == ADDR_EXPR)
1201 {
1202 *tp
1203 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1204 /* ??? We should either assert here or build
1205 a VIEW_CONVERT_EXPR instead of blindly leaking
1206 incompatible types to our IL. */
1207 if (! *tp)
1208 *tp = TREE_OPERAND (ptr, 0);
1209 }
1210 else
1211 {
1212 *tp = build1 (INDIRECT_REF, type, ptr);
1213 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1214 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1215 TREE_READONLY (*tp) = TREE_READONLY (old);
1216 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1217 have remapped a parameter as the property might be
1218 valid only for the parameter itself. */
1219 if (TREE_THIS_NOTRAP (old)
1220 && (!is_parm (TREE_OPERAND (old, 0))
1221 || (!id->transform_parameter && is_parm (ptr))))
1222 TREE_THIS_NOTRAP (*tp) = 1;
1223 }
1224 }
1225 *walk_subtrees = 0;
1226 return NULL;
1227 }
1228 }
1229 else if (TREE_CODE (*tp) == MEM_REF)
1230 {
1231 /* We need to re-canonicalize MEM_REFs from inline substitutions
1232 that can happen when a pointer argument is an ADDR_EXPR.
1233 Recurse here manually to allow that. */
1234 tree ptr = TREE_OPERAND (*tp, 0);
1235 tree type = remap_type (TREE_TYPE (*tp), id);
1236 tree old = *tp;
1237 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1238 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1239 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1240 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1241 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1242 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1243 {
1244 MR_DEPENDENCE_CLIQUE (*tp)
1245 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1246 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1247 }
1248 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1249 remapped a parameter as the property might be valid only
1250 for the parameter itself. */
1251 if (TREE_THIS_NOTRAP (old)
1252 && (!is_parm (TREE_OPERAND (old, 0))
1253 || (!id->transform_parameter && is_parm (ptr))))
1254 TREE_THIS_NOTRAP (*tp) = 1;
1255 *walk_subtrees = 0;
1256 return NULL;
1257 }
1258
1259 /* Here is the "usual case". Copy this tree node, and then
1260 tweak some special cases. */
1261 copy_tree_r (tp, walk_subtrees, NULL);
1262
1263 /* If EXPR has block defined, map it to newly constructed block.
1264 When inlining we want EXPRs without block appear in the block
1265 of function call if we are not remapping a type. */
1266 if (EXPR_P (*tp))
1267 {
1268 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1269 if (TREE_BLOCK (*tp))
1270 {
1271 tree *n;
1272 n = id->decl_map->get (TREE_BLOCK (*tp));
1273 if (n)
1274 new_block = *n;
1275 }
1276 TREE_SET_BLOCK (*tp, new_block);
1277 }
1278
1279 if (TREE_CODE (*tp) != OMP_CLAUSE)
1280 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1281
1282 /* The copied TARGET_EXPR has never been expanded, even if the
1283 original node was expanded already. */
1284 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1285 {
1286 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1287 TREE_OPERAND (*tp, 3) = NULL_TREE;
1288 }
1289
1290 /* Variable substitution need not be simple. In particular, the
1291 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1292 and friends are up-to-date. */
1293 else if (TREE_CODE (*tp) == ADDR_EXPR)
1294 {
1295 int invariant = is_gimple_min_invariant (*tp);
1296 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1297
1298 /* Handle the case where we substituted an INDIRECT_REF
1299 into the operand of the ADDR_EXPR. */
1300 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1301 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1302 else
1303 recompute_tree_invariant_for_addr_expr (*tp);
1304
1305 /* If this used to be invariant, but is not any longer,
1306 then regimplification is probably needed. */
1307 if (invariant && !is_gimple_min_invariant (*tp))
1308 id->regimplify = true;
1309
1310 *walk_subtrees = 0;
1311 }
1312 }
1313
1314 /* Keep iterating. */
1315 return NULL_TREE;
1316 }
1317
1318 /* Helper for remap_gimple_stmt. Given an EH region number for the
1319 source function, map that to the duplicate EH region number in
1320 the destination function. */
1321
1322 static int
1323 remap_eh_region_nr (int old_nr, copy_body_data *id)
1324 {
1325 eh_region old_r, new_r;
1326
1327 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1328 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1329
1330 return new_r->index;
1331 }
1332
1333 /* Similar, but operate on INTEGER_CSTs. */
1334
1335 static tree
1336 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1337 {
1338 int old_nr, new_nr;
1339
1340 old_nr = tree_to_shwi (old_t_nr);
1341 new_nr = remap_eh_region_nr (old_nr, id);
1342
1343 return build_int_cst (integer_type_node, new_nr);
1344 }
1345
1346 /* Helper for copy_bb. Remap statement STMT using the inlining
1347 information in ID. Return the new statement copy. */
1348
1349 static gimple_seq
1350 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1351 {
1352 gimple copy = NULL;
1353 struct walk_stmt_info wi;
1354 bool skip_first = false;
1355 gimple_seq stmts = NULL;
1356
1357 if (is_gimple_debug (stmt)
1358 && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1359 return stmts;
1360
1361 /* Begin by recognizing trees that we'll completely rewrite for the
1362 inlining context. Our output for these trees is completely
1363 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1364 into an edge). Further down, we'll handle trees that get
1365 duplicated and/or tweaked. */
1366
1367 /* When requested, GIMPLE_RETURNs should be transformed to just the
1368 contained GIMPLE_ASSIGN. The branch semantics of the return will
1369 be handled elsewhere by manipulating the CFG rather than the
1370 statement. */
1371 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1372 {
1373 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1374 tree retbnd = gimple_return_retbnd (stmt);
1375 tree bndslot = id->retbnd;
1376
1377 if (retbnd && bndslot)
1378 {
1379 gimple bndcopy = gimple_build_assign (bndslot, retbnd);
1380 memset (&wi, 0, sizeof (wi));
1381 wi.info = id;
1382 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1383 gimple_seq_add_stmt (&stmts, bndcopy);
1384 }
1385
1386 /* If we're returning something, just turn that into an
1387 assignment into the equivalent of the original RESULT_DECL.
1388 If RETVAL is just the result decl, the result decl has
1389 already been set (e.g. a recent "foo (&result_decl, ...)");
1390 just toss the entire GIMPLE_RETURN. */
1391 if (retval
1392 && (TREE_CODE (retval) != RESULT_DECL
1393 && (TREE_CODE (retval) != SSA_NAME
1394 || ! SSA_NAME_VAR (retval)
1395 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1396 {
1397 copy = gimple_build_assign (id->do_not_unshare
1398 ? id->retvar : unshare_expr (id->retvar),
1399 retval);
1400 /* id->retvar is already substituted. Skip it on later remapping. */
1401 skip_first = true;
1402
1403 /* We need to copy bounds if return structure with pointers into
1404 instrumented function. */
1405 if (chkp_function_instrumented_p (id->dst_fn)
1406 && !bndslot
1407 && !BOUNDED_P (id->retvar)
1408 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1409 id->assign_stmts.safe_push (copy);
1410
1411 }
1412 else
1413 return stmts;
1414 }
1415 else if (gimple_has_substatements (stmt))
1416 {
1417 gimple_seq s1, s2;
1418
1419 /* When cloning bodies from the C++ front end, we will be handed bodies
1420 in High GIMPLE form. Handle here all the High GIMPLE statements that
1421 have embedded statements. */
1422 switch (gimple_code (stmt))
1423 {
1424 case GIMPLE_BIND:
1425 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1426 break;
1427
1428 case GIMPLE_CATCH:
1429 {
1430 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1431 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1432 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1433 }
1434 break;
1435
1436 case GIMPLE_EH_FILTER:
1437 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1438 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1439 break;
1440
1441 case GIMPLE_TRY:
1442 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1443 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1444 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1445 break;
1446
1447 case GIMPLE_WITH_CLEANUP_EXPR:
1448 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1449 copy = gimple_build_wce (s1);
1450 break;
1451
1452 case GIMPLE_OMP_PARALLEL:
1453 {
1454 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1455 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1456 copy = gimple_build_omp_parallel
1457 (s1,
1458 gimple_omp_parallel_clauses (omp_par_stmt),
1459 gimple_omp_parallel_child_fn (omp_par_stmt),
1460 gimple_omp_parallel_data_arg (omp_par_stmt));
1461 }
1462 break;
1463
1464 case GIMPLE_OMP_TASK:
1465 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1466 copy = gimple_build_omp_task
1467 (s1,
1468 gimple_omp_task_clauses (stmt),
1469 gimple_omp_task_child_fn (stmt),
1470 gimple_omp_task_data_arg (stmt),
1471 gimple_omp_task_copy_fn (stmt),
1472 gimple_omp_task_arg_size (stmt),
1473 gimple_omp_task_arg_align (stmt));
1474 break;
1475
1476 case GIMPLE_OMP_FOR:
1477 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1478 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1479 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1480 gimple_omp_for_clauses (stmt),
1481 gimple_omp_for_collapse (stmt), s2);
1482 {
1483 size_t i;
1484 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1485 {
1486 gimple_omp_for_set_index (copy, i,
1487 gimple_omp_for_index (stmt, i));
1488 gimple_omp_for_set_initial (copy, i,
1489 gimple_omp_for_initial (stmt, i));
1490 gimple_omp_for_set_final (copy, i,
1491 gimple_omp_for_final (stmt, i));
1492 gimple_omp_for_set_incr (copy, i,
1493 gimple_omp_for_incr (stmt, i));
1494 gimple_omp_for_set_cond (copy, i,
1495 gimple_omp_for_cond (stmt, i));
1496 }
1497 }
1498 break;
1499
1500 case GIMPLE_OMP_MASTER:
1501 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1502 copy = gimple_build_omp_master (s1);
1503 break;
1504
1505 case GIMPLE_OMP_TASKGROUP:
1506 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1507 copy = gimple_build_omp_taskgroup (s1);
1508 break;
1509
1510 case GIMPLE_OMP_ORDERED:
1511 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1512 copy = gimple_build_omp_ordered (s1);
1513 break;
1514
1515 case GIMPLE_OMP_SECTION:
1516 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1517 copy = gimple_build_omp_section (s1);
1518 break;
1519
1520 case GIMPLE_OMP_SECTIONS:
1521 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1522 copy = gimple_build_omp_sections
1523 (s1, gimple_omp_sections_clauses (stmt));
1524 break;
1525
1526 case GIMPLE_OMP_SINGLE:
1527 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1528 copy = gimple_build_omp_single
1529 (s1, gimple_omp_single_clauses (stmt));
1530 break;
1531
1532 case GIMPLE_OMP_TARGET:
1533 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1534 copy = gimple_build_omp_target
1535 (s1, gimple_omp_target_kind (stmt),
1536 gimple_omp_target_clauses (stmt));
1537 break;
1538
1539 case GIMPLE_OMP_TEAMS:
1540 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1541 copy = gimple_build_omp_teams
1542 (s1, gimple_omp_teams_clauses (stmt));
1543 break;
1544
1545 case GIMPLE_OMP_CRITICAL:
1546 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1547 copy = gimple_build_omp_critical (s1,
1548 gimple_omp_critical_name (
1549 as_a <gomp_critical *> (stmt)));
1550 break;
1551
1552 case GIMPLE_TRANSACTION:
1553 {
1554 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1555 gtransaction *new_trans_stmt;
1556 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1557 id);
1558 copy = new_trans_stmt
1559 = gimple_build_transaction (
1560 s1,
1561 gimple_transaction_label (old_trans_stmt));
1562 gimple_transaction_set_subcode (
1563 new_trans_stmt,
1564 gimple_transaction_subcode (old_trans_stmt));
1565 }
1566 break;
1567
1568 default:
1569 gcc_unreachable ();
1570 }
1571 }
1572 else
1573 {
1574 if (gimple_assign_copy_p (stmt)
1575 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1576 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1577 {
1578 /* Here we handle statements that are not completely rewritten.
1579 First we detect some inlining-induced bogosities for
1580 discarding. */
1581
1582 /* Some assignments VAR = VAR; don't generate any rtl code
1583 and thus don't count as variable modification. Avoid
1584 keeping bogosities like 0 = 0. */
1585 tree decl = gimple_assign_lhs (stmt), value;
1586 tree *n;
1587
1588 n = id->decl_map->get (decl);
1589 if (n)
1590 {
1591 value = *n;
1592 STRIP_TYPE_NOPS (value);
1593 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1594 return NULL;
1595 }
1596 }
1597
1598 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1599 in a block that we aren't copying during tree_function_versioning,
1600 just drop the clobber stmt. */
1601 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1602 {
1603 tree lhs = gimple_assign_lhs (stmt);
1604 if (TREE_CODE (lhs) == MEM_REF
1605 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1606 {
1607 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1608 if (gimple_bb (def_stmt)
1609 && !bitmap_bit_p (id->blocks_to_copy,
1610 gimple_bb (def_stmt)->index))
1611 return NULL;
1612 }
1613 }
1614
1615 if (gimple_debug_bind_p (stmt))
1616 {
1617 gdebug *copy
1618 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1619 gimple_debug_bind_get_value (stmt),
1620 stmt);
1621 id->debug_stmts.safe_push (copy);
1622 gimple_seq_add_stmt (&stmts, copy);
1623 return stmts;
1624 }
1625 if (gimple_debug_source_bind_p (stmt))
1626 {
1627 gdebug *copy = gimple_build_debug_source_bind
1628 (gimple_debug_source_bind_get_var (stmt),
1629 gimple_debug_source_bind_get_value (stmt),
1630 stmt);
1631 id->debug_stmts.safe_push (copy);
1632 gimple_seq_add_stmt (&stmts, copy);
1633 return stmts;
1634 }
1635
1636 /* Create a new deep copy of the statement. */
1637 copy = gimple_copy (stmt);
1638
1639 /* Clear flags that need revisiting. */
1640 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1641 {
1642 if (gimple_call_tail_p (call_stmt))
1643 gimple_call_set_tail (call_stmt, false);
1644 if (gimple_call_from_thunk_p (call_stmt))
1645 gimple_call_set_from_thunk (call_stmt, false);
1646 }
1647
1648 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1649 RESX and EH_DISPATCH. */
1650 if (id->eh_map)
1651 switch (gimple_code (copy))
1652 {
1653 case GIMPLE_CALL:
1654 {
1655 tree r, fndecl = gimple_call_fndecl (copy);
1656 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1657 switch (DECL_FUNCTION_CODE (fndecl))
1658 {
1659 case BUILT_IN_EH_COPY_VALUES:
1660 r = gimple_call_arg (copy, 1);
1661 r = remap_eh_region_tree_nr (r, id);
1662 gimple_call_set_arg (copy, 1, r);
1663 /* FALLTHRU */
1664
1665 case BUILT_IN_EH_POINTER:
1666 case BUILT_IN_EH_FILTER:
1667 r = gimple_call_arg (copy, 0);
1668 r = remap_eh_region_tree_nr (r, id);
1669 gimple_call_set_arg (copy, 0, r);
1670 break;
1671
1672 default:
1673 break;
1674 }
1675
1676 /* Reset alias info if we didn't apply measures to
1677 keep it valid over inlining by setting DECL_PT_UID. */
1678 if (!id->src_cfun->gimple_df
1679 || !id->src_cfun->gimple_df->ipa_pta)
1680 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1681 }
1682 break;
1683
1684 case GIMPLE_RESX:
1685 {
1686 gresx *resx_stmt = as_a <gresx *> (copy);
1687 int r = gimple_resx_region (resx_stmt);
1688 r = remap_eh_region_nr (r, id);
1689 gimple_resx_set_region (resx_stmt, r);
1690 }
1691 break;
1692
1693 case GIMPLE_EH_DISPATCH:
1694 {
1695 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1696 int r = gimple_eh_dispatch_region (eh_dispatch);
1697 r = remap_eh_region_nr (r, id);
1698 gimple_eh_dispatch_set_region (eh_dispatch, r);
1699 }
1700 break;
1701
1702 default:
1703 break;
1704 }
1705 }
1706
1707 /* If STMT has a block defined, map it to the newly constructed
1708 block. */
1709 if (gimple_block (copy))
1710 {
1711 tree *n;
1712 n = id->decl_map->get (gimple_block (copy));
1713 gcc_assert (n);
1714 gimple_set_block (copy, *n);
1715 }
1716
1717 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1718 {
1719 gimple_seq_add_stmt (&stmts, copy);
1720 return stmts;
1721 }
1722
1723 /* Remap all the operands in COPY. */
1724 memset (&wi, 0, sizeof (wi));
1725 wi.info = id;
1726 if (skip_first)
1727 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1728 else
1729 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1730
1731 /* Clear the copied virtual operands. We are not remapping them here
1732 but are going to recreate them from scratch. */
1733 if (gimple_has_mem_ops (copy))
1734 {
1735 gimple_set_vdef (copy, NULL_TREE);
1736 gimple_set_vuse (copy, NULL_TREE);
1737 }
1738
1739 gimple_seq_add_stmt (&stmts, copy);
1740 return stmts;
1741 }
1742
1743
1744 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1745 later */
1746
1747 static basic_block
1748 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1749 gcov_type count_scale)
1750 {
1751 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1752 basic_block copy_basic_block;
1753 tree decl;
1754 gcov_type freq;
1755 basic_block prev;
1756
1757 /* Search for previous copied basic block. */
1758 prev = bb->prev_bb;
1759 while (!prev->aux)
1760 prev = prev->prev_bb;
1761
1762 /* create_basic_block() will append every new block to
1763 basic_block_info automatically. */
1764 copy_basic_block = create_basic_block (NULL, (void *) 0,
1765 (basic_block) prev->aux);
1766 copy_basic_block->count = apply_scale (bb->count, count_scale);
1767
1768 /* We are going to rebuild frequencies from scratch. These values
1769 have just small importance to drive canonicalize_loop_headers. */
1770 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1771
1772 /* We recompute frequencies after inlining, so this is quite safe. */
1773 if (freq > BB_FREQ_MAX)
1774 freq = BB_FREQ_MAX;
1775 copy_basic_block->frequency = freq;
1776
1777 copy_gsi = gsi_start_bb (copy_basic_block);
1778
1779 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1780 {
1781 gimple_seq stmts;
1782 gimple stmt = gsi_stmt (gsi);
1783 gimple orig_stmt = stmt;
1784 gimple_stmt_iterator stmts_gsi;
1785 bool stmt_added = false;
1786
1787 id->regimplify = false;
1788 stmts = remap_gimple_stmt (stmt, id);
1789
1790 if (gimple_seq_empty_p (stmts))
1791 continue;
1792
1793 seq_gsi = copy_gsi;
1794
1795 for (stmts_gsi = gsi_start (stmts);
1796 !gsi_end_p (stmts_gsi); )
1797 {
1798 stmt = gsi_stmt (stmts_gsi);
1799
1800 /* Advance iterator now before stmt is moved to seq_gsi. */
1801 gsi_next (&stmts_gsi);
1802
1803 if (gimple_nop_p (stmt))
1804 continue;
1805
1806 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1807 orig_stmt);
1808
1809 /* With return slot optimization we can end up with
1810 non-gimple (foo *)&this->m, fix that here. */
1811 if (is_gimple_assign (stmt)
1812 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1813 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1814 {
1815 tree new_rhs;
1816 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1817 gimple_assign_rhs1 (stmt),
1818 true, NULL, false,
1819 GSI_CONTINUE_LINKING);
1820 gimple_assign_set_rhs1 (stmt, new_rhs);
1821 id->regimplify = false;
1822 }
1823
1824 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1825
1826 if (id->regimplify)
1827 gimple_regimplify_operands (stmt, &seq_gsi);
1828
1829 stmt_added = true;
1830 }
1831
1832 if (!stmt_added)
1833 continue;
1834
1835 /* If copy_basic_block has been empty at the start of this iteration,
1836 call gsi_start_bb again to get at the newly added statements. */
1837 if (gsi_end_p (copy_gsi))
1838 copy_gsi = gsi_start_bb (copy_basic_block);
1839 else
1840 gsi_next (&copy_gsi);
1841
1842 /* Process the new statement. The call to gimple_regimplify_operands
1843 possibly turned the statement into multiple statements, we
1844 need to process all of them. */
1845 do
1846 {
1847 tree fn;
1848 gcall *call_stmt;
1849
1850 stmt = gsi_stmt (copy_gsi);
1851 call_stmt = dyn_cast <gcall *> (stmt);
1852 if (call_stmt
1853 && gimple_call_va_arg_pack_p (call_stmt)
1854 && id->call_stmt)
1855 {
1856 /* __builtin_va_arg_pack () should be replaced by
1857 all arguments corresponding to ... in the caller. */
1858 tree p;
1859 gcall *new_call;
1860 vec<tree> argarray;
1861 size_t nargs = gimple_call_num_args (id->call_stmt);
1862 size_t n, i, nargs_to_copy;
1863 bool remove_bounds = false;
1864
1865 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1866 nargs--;
1867
1868 /* Bounds should be removed from arg pack in case
1869 we handle not instrumented call in instrumented
1870 function. */
1871 nargs_to_copy = nargs;
1872 if (gimple_call_with_bounds_p (id->call_stmt)
1873 && !gimple_call_with_bounds_p (stmt))
1874 {
1875 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1876 i < gimple_call_num_args (id->call_stmt);
1877 i++)
1878 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1879 nargs_to_copy--;
1880 remove_bounds = true;
1881 }
1882
1883 /* Create the new array of arguments. */
1884 n = nargs_to_copy + gimple_call_num_args (call_stmt);
1885 argarray.create (n);
1886 argarray.safe_grow_cleared (n);
1887
1888 /* Copy all the arguments before '...' */
1889 memcpy (argarray.address (),
1890 gimple_call_arg_ptr (call_stmt, 0),
1891 gimple_call_num_args (call_stmt) * sizeof (tree));
1892
1893 if (remove_bounds)
1894 {
1895 /* Append the rest of arguments removing bounds. */
1896 unsigned cur = gimple_call_num_args (call_stmt);
1897 i = gimple_call_num_args (id->call_stmt) - nargs;
1898 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1899 i < gimple_call_num_args (id->call_stmt);
1900 i++)
1901 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1902 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1903 gcc_assert (cur == n);
1904 }
1905 else
1906 {
1907 /* Append the arguments passed in '...' */
1908 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1909 gimple_call_arg_ptr (id->call_stmt, 0)
1910 + (gimple_call_num_args (id->call_stmt) - nargs),
1911 nargs * sizeof (tree));
1912 }
1913
1914 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1915 argarray);
1916
1917 argarray.release ();
1918
1919 /* Copy all GIMPLE_CALL flags, location and block, except
1920 GF_CALL_VA_ARG_PACK. */
1921 gimple_call_copy_flags (new_call, call_stmt);
1922 gimple_call_set_va_arg_pack (new_call, false);
1923 gimple_set_location (new_call, gimple_location (stmt));
1924 gimple_set_block (new_call, gimple_block (stmt));
1925 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1926
1927 gsi_replace (&copy_gsi, new_call, false);
1928 stmt = new_call;
1929 }
1930 else if (call_stmt
1931 && id->call_stmt
1932 && (decl = gimple_call_fndecl (stmt))
1933 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1934 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1935 {
1936 /* __builtin_va_arg_pack_len () should be replaced by
1937 the number of anonymous arguments. */
1938 size_t nargs = gimple_call_num_args (id->call_stmt), i;
1939 tree count, p;
1940 gimple new_stmt;
1941
1942 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1943 nargs--;
1944
1945 /* For instrumented calls we should ignore bounds. */
1946 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1947 i < gimple_call_num_args (id->call_stmt);
1948 i++)
1949 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1950 nargs--;
1951
1952 count = build_int_cst (integer_type_node, nargs);
1953 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1954 gsi_replace (&copy_gsi, new_stmt, false);
1955 stmt = new_stmt;
1956 }
1957 else if (call_stmt
1958 && id->call_stmt
1959 && gimple_call_internal_p (stmt)
1960 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1961 {
1962 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1963 gsi_remove (&copy_gsi, false);
1964 continue;
1965 }
1966
1967 /* Statements produced by inlining can be unfolded, especially
1968 when we constant propagated some operands. We can't fold
1969 them right now for two reasons:
1970 1) folding require SSA_NAME_DEF_STMTs to be correct
1971 2) we can't change function calls to builtins.
1972 So we just mark statement for later folding. We mark
1973 all new statements, instead just statements that has changed
1974 by some nontrivial substitution so even statements made
1975 foldable indirectly are updated. If this turns out to be
1976 expensive, copy_body can be told to watch for nontrivial
1977 changes. */
1978 if (id->statements_to_fold)
1979 id->statements_to_fold->add (stmt);
1980
1981 /* We're duplicating a CALL_EXPR. Find any corresponding
1982 callgraph edges and update or duplicate them. */
1983 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1984 {
1985 struct cgraph_edge *edge;
1986
1987 switch (id->transform_call_graph_edges)
1988 {
1989 case CB_CGE_DUPLICATE:
1990 edge = id->src_node->get_edge (orig_stmt);
1991 if (edge)
1992 {
1993 int edge_freq = edge->frequency;
1994 int new_freq;
1995 struct cgraph_edge *old_edge = edge;
1996 edge = edge->clone (id->dst_node, call_stmt,
1997 gimple_uid (stmt),
1998 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1999 true);
2000 /* We could also just rescale the frequency, but
2001 doing so would introduce roundoff errors and make
2002 verifier unhappy. */
2003 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
2004 copy_basic_block);
2005
2006 /* Speculative calls consist of two edges - direct and indirect.
2007 Duplicate the whole thing and distribute frequencies accordingly. */
2008 if (edge->speculative)
2009 {
2010 struct cgraph_edge *direct, *indirect;
2011 struct ipa_ref *ref;
2012
2013 gcc_assert (!edge->indirect_unknown_callee);
2014 old_edge->speculative_call_info (direct, indirect, ref);
2015 indirect = indirect->clone (id->dst_node, call_stmt,
2016 gimple_uid (stmt),
2017 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2018 true);
2019 if (old_edge->frequency + indirect->frequency)
2020 {
2021 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2022 (old_edge->frequency + indirect->frequency)),
2023 CGRAPH_FREQ_MAX);
2024 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2025 (old_edge->frequency + indirect->frequency)),
2026 CGRAPH_FREQ_MAX);
2027 }
2028 id->dst_node->clone_reference (ref, stmt);
2029 }
2030 else
2031 {
2032 edge->frequency = new_freq;
2033 if (dump_file
2034 && profile_status_for_fn (cfun) != PROFILE_ABSENT
2035 && (edge_freq > edge->frequency + 10
2036 || edge_freq < edge->frequency - 10))
2037 {
2038 fprintf (dump_file, "Edge frequency estimated by "
2039 "cgraph %i diverge from inliner's estimate %i\n",
2040 edge_freq,
2041 edge->frequency);
2042 fprintf (dump_file,
2043 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2044 bb->index,
2045 bb->frequency,
2046 copy_basic_block->frequency);
2047 }
2048 }
2049 }
2050 break;
2051
2052 case CB_CGE_MOVE_CLONES:
2053 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2054 call_stmt);
2055 edge = id->dst_node->get_edge (stmt);
2056 break;
2057
2058 case CB_CGE_MOVE:
2059 edge = id->dst_node->get_edge (orig_stmt);
2060 if (edge)
2061 edge->set_call_stmt (call_stmt);
2062 break;
2063
2064 default:
2065 gcc_unreachable ();
2066 }
2067
2068 /* Constant propagation on argument done during inlining
2069 may create new direct call. Produce an edge for it. */
2070 if ((!edge
2071 || (edge->indirect_inlining_edge
2072 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2073 && id->dst_node->definition
2074 && (fn = gimple_call_fndecl (stmt)) != NULL)
2075 {
2076 struct cgraph_node *dest = cgraph_node::get (fn);
2077
2078 /* We have missing edge in the callgraph. This can happen
2079 when previous inlining turned an indirect call into a
2080 direct call by constant propagating arguments or we are
2081 producing dead clone (for further cloning). In all
2082 other cases we hit a bug (incorrect node sharing is the
2083 most common reason for missing edges). */
2084 gcc_assert (!dest->definition
2085 || dest->address_taken
2086 || !id->src_node->definition
2087 || !id->dst_node->definition);
2088 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2089 id->dst_node->create_edge_including_clones
2090 (dest, orig_stmt, call_stmt, bb->count,
2091 compute_call_stmt_bb_frequency (id->dst_node->decl,
2092 copy_basic_block),
2093 CIF_ORIGINALLY_INDIRECT_CALL);
2094 else
2095 id->dst_node->create_edge (dest, call_stmt,
2096 bb->count,
2097 compute_call_stmt_bb_frequency
2098 (id->dst_node->decl,
2099 copy_basic_block))->inline_failed
2100 = CIF_ORIGINALLY_INDIRECT_CALL;
2101 if (dump_file)
2102 {
2103 fprintf (dump_file, "Created new direct edge to %s\n",
2104 dest->name ());
2105 }
2106 }
2107
2108 notice_special_calls (as_a <gcall *> (stmt));
2109 }
2110
2111 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2112 id->eh_map, id->eh_lp_nr);
2113
2114 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2115 {
2116 ssa_op_iter i;
2117 tree def;
2118
2119 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2120 if (TREE_CODE (def) == SSA_NAME)
2121 SSA_NAME_DEF_STMT (def) = stmt;
2122 }
2123
2124 gsi_next (&copy_gsi);
2125 }
2126 while (!gsi_end_p (copy_gsi));
2127
2128 copy_gsi = gsi_last_bb (copy_basic_block);
2129 }
2130
2131 return copy_basic_block;
2132 }
2133
2134 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2135 form is quite easy, since dominator relationship for old basic blocks does
2136 not change.
2137
2138 There is however exception where inlining might change dominator relation
2139 across EH edges from basic block within inlined functions destinating
2140 to landing pads in function we inline into.
2141
2142 The function fills in PHI_RESULTs of such PHI nodes if they refer
2143 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2144 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2145 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2146 set, and this means that there will be no overlapping live ranges
2147 for the underlying symbol.
2148
2149 This might change in future if we allow redirecting of EH edges and
2150 we might want to change way build CFG pre-inlining to include
2151 all the possible edges then. */
2152 static void
2153 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2154 bool can_throw, bool nonlocal_goto)
2155 {
2156 edge e;
2157 edge_iterator ei;
2158
2159 FOR_EACH_EDGE (e, ei, bb->succs)
2160 if (!e->dest->aux
2161 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2162 {
2163 gphi *phi;
2164 gphi_iterator si;
2165
2166 if (!nonlocal_goto)
2167 gcc_assert (e->flags & EDGE_EH);
2168
2169 if (!can_throw)
2170 gcc_assert (!(e->flags & EDGE_EH));
2171
2172 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2173 {
2174 edge re;
2175
2176 phi = si.phi ();
2177
2178 /* For abnormal goto/call edges the receiver can be the
2179 ENTRY_BLOCK. Do not assert this cannot happen. */
2180
2181 gcc_assert ((e->flags & EDGE_EH)
2182 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2183
2184 re = find_edge (ret_bb, e->dest);
2185 gcc_checking_assert (re);
2186 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2187 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2188
2189 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2190 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2191 }
2192 }
2193 }
2194
2195
2196 /* Copy edges from BB into its copy constructed earlier, scale profile
2197 accordingly. Edges will be taken care of later. Assume aux
2198 pointers to point to the copies of each BB. Return true if any
2199 debug stmts are left after a statement that must end the basic block. */
2200
2201 static bool
2202 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2203 basic_block abnormal_goto_dest)
2204 {
2205 basic_block new_bb = (basic_block) bb->aux;
2206 edge_iterator ei;
2207 edge old_edge;
2208 gimple_stmt_iterator si;
2209 int flags;
2210 bool need_debug_cleanup = false;
2211
2212 /* Use the indices from the original blocks to create edges for the
2213 new ones. */
2214 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2215 if (!(old_edge->flags & EDGE_EH))
2216 {
2217 edge new_edge;
2218
2219 flags = old_edge->flags;
2220
2221 /* Return edges do get a FALLTHRU flag when the get inlined. */
2222 if (old_edge->dest->index == EXIT_BLOCK
2223 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2224 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2225 flags |= EDGE_FALLTHRU;
2226 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2227 new_edge->count = apply_scale (old_edge->count, count_scale);
2228 new_edge->probability = old_edge->probability;
2229 }
2230
2231 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2232 return false;
2233
2234 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2235 {
2236 gimple copy_stmt;
2237 bool can_throw, nonlocal_goto;
2238
2239 copy_stmt = gsi_stmt (si);
2240 if (!is_gimple_debug (copy_stmt))
2241 update_stmt (copy_stmt);
2242
2243 /* Do this before the possible split_block. */
2244 gsi_next (&si);
2245
2246 /* If this tree could throw an exception, there are two
2247 cases where we need to add abnormal edge(s): the
2248 tree wasn't in a region and there is a "current
2249 region" in the caller; or the original tree had
2250 EH edges. In both cases split the block after the tree,
2251 and add abnormal edge(s) as needed; we need both
2252 those from the callee and the caller.
2253 We check whether the copy can throw, because the const
2254 propagation can change an INDIRECT_REF which throws
2255 into a COMPONENT_REF which doesn't. If the copy
2256 can throw, the original could also throw. */
2257 can_throw = stmt_can_throw_internal (copy_stmt);
2258 nonlocal_goto
2259 = (stmt_can_make_abnormal_goto (copy_stmt)
2260 && !computed_goto_p (copy_stmt));
2261
2262 if (can_throw || nonlocal_goto)
2263 {
2264 if (!gsi_end_p (si))
2265 {
2266 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2267 gsi_next (&si);
2268 if (gsi_end_p (si))
2269 need_debug_cleanup = true;
2270 }
2271 if (!gsi_end_p (si))
2272 /* Note that bb's predecessor edges aren't necessarily
2273 right at this point; split_block doesn't care. */
2274 {
2275 edge e = split_block (new_bb, copy_stmt);
2276
2277 new_bb = e->dest;
2278 new_bb->aux = e->src->aux;
2279 si = gsi_start_bb (new_bb);
2280 }
2281 }
2282
2283 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2284 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2285 else if (can_throw)
2286 make_eh_edges (copy_stmt);
2287
2288 /* If the call we inline cannot make abnormal goto do not add
2289 additional abnormal edges but only retain those already present
2290 in the original function body. */
2291 if (abnormal_goto_dest == NULL)
2292 nonlocal_goto = false;
2293 if (nonlocal_goto)
2294 {
2295 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2296
2297 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2298 nonlocal_goto = false;
2299 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2300 in OpenMP regions which aren't allowed to be left abnormally.
2301 So, no need to add abnormal edge in that case. */
2302 else if (is_gimple_call (copy_stmt)
2303 && gimple_call_internal_p (copy_stmt)
2304 && (gimple_call_internal_fn (copy_stmt)
2305 == IFN_ABNORMAL_DISPATCHER)
2306 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2307 nonlocal_goto = false;
2308 else
2309 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2310 }
2311
2312 if ((can_throw || nonlocal_goto)
2313 && gimple_in_ssa_p (cfun))
2314 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2315 can_throw, nonlocal_goto);
2316 }
2317 return need_debug_cleanup;
2318 }
2319
2320 /* Copy the PHIs. All blocks and edges are copied, some blocks
2321 was possibly split and new outgoing EH edges inserted.
2322 BB points to the block of original function and AUX pointers links
2323 the original and newly copied blocks. */
2324
2325 static void
2326 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2327 {
2328 basic_block const new_bb = (basic_block) bb->aux;
2329 edge_iterator ei;
2330 gphi *phi;
2331 gphi_iterator si;
2332 edge new_edge;
2333 bool inserted = false;
2334
2335 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2336 {
2337 tree res, new_res;
2338 gphi *new_phi;
2339
2340 phi = si.phi ();
2341 res = PHI_RESULT (phi);
2342 new_res = res;
2343 if (!virtual_operand_p (res))
2344 {
2345 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2346 new_phi = create_phi_node (new_res, new_bb);
2347 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2348 {
2349 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2350 tree arg;
2351 tree new_arg;
2352 edge_iterator ei2;
2353 location_t locus;
2354
2355 /* When doing partial cloning, we allow PHIs on the entry block
2356 as long as all the arguments are the same. Find any input
2357 edge to see argument to copy. */
2358 if (!old_edge)
2359 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2360 if (!old_edge->src->aux)
2361 break;
2362
2363 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2364 new_arg = arg;
2365 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2366 gcc_assert (new_arg);
2367 /* With return slot optimization we can end up with
2368 non-gimple (foo *)&this->m, fix that here. */
2369 if (TREE_CODE (new_arg) != SSA_NAME
2370 && TREE_CODE (new_arg) != FUNCTION_DECL
2371 && !is_gimple_val (new_arg))
2372 {
2373 gimple_seq stmts = NULL;
2374 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2375 gsi_insert_seq_on_edge (new_edge, stmts);
2376 inserted = true;
2377 }
2378 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2379 if (LOCATION_BLOCK (locus))
2380 {
2381 tree *n;
2382 n = id->decl_map->get (LOCATION_BLOCK (locus));
2383 gcc_assert (n);
2384 if (*n)
2385 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2386 else
2387 locus = LOCATION_LOCUS (locus);
2388 }
2389 else
2390 locus = LOCATION_LOCUS (locus);
2391
2392 add_phi_arg (new_phi, new_arg, new_edge, locus);
2393 }
2394 }
2395 }
2396
2397 /* Commit the delayed edge insertions. */
2398 if (inserted)
2399 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2400 gsi_commit_one_edge_insert (new_edge, NULL);
2401 }
2402
2403
2404 /* Wrapper for remap_decl so it can be used as a callback. */
2405
2406 static tree
2407 remap_decl_1 (tree decl, void *data)
2408 {
2409 return remap_decl (decl, (copy_body_data *) data);
2410 }
2411
2412 /* Build struct function and associated datastructures for the new clone
2413 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2414 the cfun to the function of new_fndecl (and current_function_decl too). */
2415
2416 static void
2417 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2418 {
2419 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2420 gcov_type count_scale;
2421
2422 if (!DECL_ARGUMENTS (new_fndecl))
2423 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2424 if (!DECL_RESULT (new_fndecl))
2425 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2426
2427 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2428 count_scale
2429 = GCOV_COMPUTE_SCALE (count,
2430 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2431 else
2432 count_scale = REG_BR_PROB_BASE;
2433
2434 /* Register specific tree functions. */
2435 gimple_register_cfg_hooks ();
2436
2437 /* Get clean struct function. */
2438 push_struct_function (new_fndecl);
2439
2440 /* We will rebuild these, so just sanity check that they are empty. */
2441 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2442 gcc_assert (cfun->local_decls == NULL);
2443 gcc_assert (cfun->cfg == NULL);
2444 gcc_assert (cfun->decl == new_fndecl);
2445
2446 /* Copy items we preserve during cloning. */
2447 cfun->static_chain_decl = src_cfun->static_chain_decl;
2448 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2449 cfun->function_end_locus = src_cfun->function_end_locus;
2450 cfun->curr_properties = src_cfun->curr_properties;
2451 cfun->last_verified = src_cfun->last_verified;
2452 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2453 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2454 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2455 cfun->stdarg = src_cfun->stdarg;
2456 cfun->after_inlining = src_cfun->after_inlining;
2457 cfun->can_throw_non_call_exceptions
2458 = src_cfun->can_throw_non_call_exceptions;
2459 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2460 cfun->returns_struct = src_cfun->returns_struct;
2461 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2462
2463 init_empty_tree_cfg ();
2464
2465 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2466 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2467 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2468 REG_BR_PROB_BASE);
2469 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2470 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2471 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2472 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2473 REG_BR_PROB_BASE);
2474 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2475 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2476 if (src_cfun->eh)
2477 init_eh_for_function ();
2478
2479 if (src_cfun->gimple_df)
2480 {
2481 init_tree_ssa (cfun);
2482 cfun->gimple_df->in_ssa_p = true;
2483 init_ssa_operands (cfun);
2484 }
2485 }
2486
2487 /* Helper function for copy_cfg_body. Move debug stmts from the end
2488 of NEW_BB to the beginning of successor basic blocks when needed. If the
2489 successor has multiple predecessors, reset them, otherwise keep
2490 their value. */
2491
2492 static void
2493 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2494 {
2495 edge e;
2496 edge_iterator ei;
2497 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2498
2499 if (gsi_end_p (si)
2500 || gsi_one_before_end_p (si)
2501 || !(stmt_can_throw_internal (gsi_stmt (si))
2502 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2503 return;
2504
2505 FOR_EACH_EDGE (e, ei, new_bb->succs)
2506 {
2507 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2508 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2509 while (is_gimple_debug (gsi_stmt (ssi)))
2510 {
2511 gimple stmt = gsi_stmt (ssi);
2512 gdebug *new_stmt;
2513 tree var;
2514 tree value;
2515
2516 /* For the last edge move the debug stmts instead of copying
2517 them. */
2518 if (ei_one_before_end_p (ei))
2519 {
2520 si = ssi;
2521 gsi_prev (&ssi);
2522 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2523 gimple_debug_bind_reset_value (stmt);
2524 gsi_remove (&si, false);
2525 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2526 continue;
2527 }
2528
2529 if (gimple_debug_bind_p (stmt))
2530 {
2531 var = gimple_debug_bind_get_var (stmt);
2532 if (single_pred_p (e->dest))
2533 {
2534 value = gimple_debug_bind_get_value (stmt);
2535 value = unshare_expr (value);
2536 }
2537 else
2538 value = NULL_TREE;
2539 new_stmt = gimple_build_debug_bind (var, value, stmt);
2540 }
2541 else if (gimple_debug_source_bind_p (stmt))
2542 {
2543 var = gimple_debug_source_bind_get_var (stmt);
2544 value = gimple_debug_source_bind_get_value (stmt);
2545 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2546 }
2547 else
2548 gcc_unreachable ();
2549 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2550 id->debug_stmts.safe_push (new_stmt);
2551 gsi_prev (&ssi);
2552 }
2553 }
2554 }
2555
2556 /* Make a copy of the sub-loops of SRC_PARENT and place them
2557 as siblings of DEST_PARENT. */
2558
2559 static void
2560 copy_loops (copy_body_data *id,
2561 struct loop *dest_parent, struct loop *src_parent)
2562 {
2563 struct loop *src_loop = src_parent->inner;
2564 while (src_loop)
2565 {
2566 if (!id->blocks_to_copy
2567 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2568 {
2569 struct loop *dest_loop = alloc_loop ();
2570
2571 /* Assign the new loop its header and latch and associate
2572 those with the new loop. */
2573 dest_loop->header = (basic_block)src_loop->header->aux;
2574 dest_loop->header->loop_father = dest_loop;
2575 if (src_loop->latch != NULL)
2576 {
2577 dest_loop->latch = (basic_block)src_loop->latch->aux;
2578 dest_loop->latch->loop_father = dest_loop;
2579 }
2580
2581 /* Copy loop meta-data. */
2582 copy_loop_info (src_loop, dest_loop);
2583
2584 /* Finally place it into the loop array and the loop tree. */
2585 place_new_loop (cfun, dest_loop);
2586 flow_loop_tree_node_add (dest_parent, dest_loop);
2587
2588 dest_loop->safelen = src_loop->safelen;
2589 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2590 if (src_loop->force_vectorize)
2591 {
2592 dest_loop->force_vectorize = true;
2593 cfun->has_force_vectorize_loops = true;
2594 }
2595 if (src_loop->simduid)
2596 {
2597 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2598 cfun->has_simduid_loops = true;
2599 }
2600
2601 /* Recurse. */
2602 copy_loops (id, dest_loop, src_loop);
2603 }
2604 src_loop = src_loop->next;
2605 }
2606 }
2607
2608 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2609
2610 void
2611 redirect_all_calls (copy_body_data * id, basic_block bb)
2612 {
2613 gimple_stmt_iterator si;
2614 gimple last = last_stmt (bb);
2615 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2616 {
2617 gimple stmt = gsi_stmt (si);
2618 if (is_gimple_call (stmt))
2619 {
2620 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2621 if (edge)
2622 {
2623 edge->redirect_call_stmt_to_callee ();
2624 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2625 gimple_purge_dead_eh_edges (bb);
2626 }
2627 }
2628 }
2629 }
2630
2631 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2632 with each bb's frequency. Used when NODE has a 0-weight entry
2633 but we are about to inline it into a non-zero count call bb.
2634 See the comments for handle_missing_profiles() in predict.c for
2635 when this can happen for COMDATs. */
2636
2637 void
2638 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2639 {
2640 basic_block bb;
2641 edge_iterator ei;
2642 edge e;
2643 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2644
2645 FOR_ALL_BB_FN(bb, fn)
2646 {
2647 bb->count = apply_scale (count,
2648 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2649 FOR_EACH_EDGE (e, ei, bb->succs)
2650 e->count = apply_probability (e->src->count, e->probability);
2651 }
2652 }
2653
2654 /* Make a copy of the body of FN so that it can be inserted inline in
2655 another function. Walks FN via CFG, returns new fndecl. */
2656
2657 static tree
2658 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2659 basic_block entry_block_map, basic_block exit_block_map,
2660 basic_block new_entry)
2661 {
2662 tree callee_fndecl = id->src_fn;
2663 /* Original cfun for the callee, doesn't change. */
2664 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2665 struct function *cfun_to_copy;
2666 basic_block bb;
2667 tree new_fndecl = NULL;
2668 bool need_debug_cleanup = false;
2669 gcov_type count_scale;
2670 int last;
2671 int incoming_frequency = 0;
2672 gcov_type incoming_count = 0;
2673
2674 /* This can happen for COMDAT routines that end up with 0 counts
2675 despite being called (see the comments for handle_missing_profiles()
2676 in predict.c as to why). Apply counts to the blocks in the callee
2677 before inlining, using the guessed edge frequencies, so that we don't
2678 end up with a 0-count inline body which can confuse downstream
2679 optimizations such as function splitting. */
2680 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2681 {
2682 /* Apply the larger of the call bb count and the total incoming
2683 call edge count to the callee. */
2684 gcov_type in_count = 0;
2685 struct cgraph_edge *in_edge;
2686 for (in_edge = id->src_node->callers; in_edge;
2687 in_edge = in_edge->next_caller)
2688 in_count += in_edge->count;
2689 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2690 }
2691
2692 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2693 count_scale
2694 = GCOV_COMPUTE_SCALE (count,
2695 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2696 else
2697 count_scale = REG_BR_PROB_BASE;
2698
2699 /* Register specific tree functions. */
2700 gimple_register_cfg_hooks ();
2701
2702 /* If we are inlining just region of the function, make sure to connect
2703 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2704 part of loop, we must compute frequency and probability of
2705 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2706 probabilities of edges incoming from nonduplicated region. */
2707 if (new_entry)
2708 {
2709 edge e;
2710 edge_iterator ei;
2711
2712 FOR_EACH_EDGE (e, ei, new_entry->preds)
2713 if (!e->src->aux)
2714 {
2715 incoming_frequency += EDGE_FREQUENCY (e);
2716 incoming_count += e->count;
2717 }
2718 incoming_count = apply_scale (incoming_count, count_scale);
2719 incoming_frequency
2720 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2721 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2722 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2723 }
2724
2725 /* Must have a CFG here at this point. */
2726 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2727 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2728
2729 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2730
2731 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2732 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2733 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2734 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2735
2736 /* Duplicate any exception-handling regions. */
2737 if (cfun->eh)
2738 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2739 remap_decl_1, id);
2740
2741 /* Use aux pointers to map the original blocks to copy. */
2742 FOR_EACH_BB_FN (bb, cfun_to_copy)
2743 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2744 {
2745 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2746 bb->aux = new_bb;
2747 new_bb->aux = bb;
2748 new_bb->loop_father = entry_block_map->loop_father;
2749 }
2750
2751 last = last_basic_block_for_fn (cfun);
2752
2753 /* Now that we've duplicated the blocks, duplicate their edges. */
2754 basic_block abnormal_goto_dest = NULL;
2755 if (id->call_stmt
2756 && stmt_can_make_abnormal_goto (id->call_stmt))
2757 {
2758 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2759
2760 bb = gimple_bb (id->call_stmt);
2761 gsi_next (&gsi);
2762 if (gsi_end_p (gsi))
2763 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2764 }
2765 FOR_ALL_BB_FN (bb, cfun_to_copy)
2766 if (!id->blocks_to_copy
2767 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2768 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2769 abnormal_goto_dest);
2770
2771 if (new_entry)
2772 {
2773 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2774 e->probability = REG_BR_PROB_BASE;
2775 e->count = incoming_count;
2776 }
2777
2778 /* Duplicate the loop tree, if available and wanted. */
2779 if (loops_for_fn (src_cfun) != NULL
2780 && current_loops != NULL)
2781 {
2782 copy_loops (id, entry_block_map->loop_father,
2783 get_loop (src_cfun, 0));
2784 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2785 loops_state_set (LOOPS_NEED_FIXUP);
2786 }
2787
2788 /* If the loop tree in the source function needed fixup, mark the
2789 destination loop tree for fixup, too. */
2790 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2791 loops_state_set (LOOPS_NEED_FIXUP);
2792
2793 if (gimple_in_ssa_p (cfun))
2794 FOR_ALL_BB_FN (bb, cfun_to_copy)
2795 if (!id->blocks_to_copy
2796 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2797 copy_phis_for_bb (bb, id);
2798
2799 FOR_ALL_BB_FN (bb, cfun_to_copy)
2800 if (bb->aux)
2801 {
2802 if (need_debug_cleanup
2803 && bb->index != ENTRY_BLOCK
2804 && bb->index != EXIT_BLOCK)
2805 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2806 /* Update call edge destinations. This can not be done before loop
2807 info is updated, because we may split basic blocks. */
2808 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2809 && bb->index != ENTRY_BLOCK
2810 && bb->index != EXIT_BLOCK)
2811 redirect_all_calls (id, (basic_block)bb->aux);
2812 ((basic_block)bb->aux)->aux = NULL;
2813 bb->aux = NULL;
2814 }
2815
2816 /* Zero out AUX fields of newly created block during EH edge
2817 insertion. */
2818 for (; last < last_basic_block_for_fn (cfun); last++)
2819 {
2820 if (need_debug_cleanup)
2821 maybe_move_debug_stmts_to_successors (id,
2822 BASIC_BLOCK_FOR_FN (cfun, last));
2823 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2824 /* Update call edge destinations. This can not be done before loop
2825 info is updated, because we may split basic blocks. */
2826 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2827 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2828 }
2829 entry_block_map->aux = NULL;
2830 exit_block_map->aux = NULL;
2831
2832 if (id->eh_map)
2833 {
2834 delete id->eh_map;
2835 id->eh_map = NULL;
2836 }
2837 if (id->dependence_map)
2838 {
2839 delete id->dependence_map;
2840 id->dependence_map = NULL;
2841 }
2842
2843 return new_fndecl;
2844 }
2845
2846 /* Copy the debug STMT using ID. We deal with these statements in a
2847 special way: if any variable in their VALUE expression wasn't
2848 remapped yet, we won't remap it, because that would get decl uids
2849 out of sync, causing codegen differences between -g and -g0. If
2850 this arises, we drop the VALUE expression altogether. */
2851
2852 static void
2853 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2854 {
2855 tree t, *n;
2856 struct walk_stmt_info wi;
2857
2858 if (gimple_block (stmt))
2859 {
2860 n = id->decl_map->get (gimple_block (stmt));
2861 gimple_set_block (stmt, n ? *n : id->block);
2862 }
2863
2864 /* Remap all the operands in COPY. */
2865 memset (&wi, 0, sizeof (wi));
2866 wi.info = id;
2867
2868 processing_debug_stmt = 1;
2869
2870 if (gimple_debug_source_bind_p (stmt))
2871 t = gimple_debug_source_bind_get_var (stmt);
2872 else
2873 t = gimple_debug_bind_get_var (stmt);
2874
2875 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2876 && (n = id->debug_map->get (t)))
2877 {
2878 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2879 t = *n;
2880 }
2881 else if (TREE_CODE (t) == VAR_DECL
2882 && !is_global_var (t)
2883 && !id->decl_map->get (t))
2884 /* T is a non-localized variable. */;
2885 else
2886 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2887
2888 if (gimple_debug_bind_p (stmt))
2889 {
2890 gimple_debug_bind_set_var (stmt, t);
2891
2892 if (gimple_debug_bind_has_value_p (stmt))
2893 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2894 remap_gimple_op_r, &wi, NULL);
2895
2896 /* Punt if any decl couldn't be remapped. */
2897 if (processing_debug_stmt < 0)
2898 gimple_debug_bind_reset_value (stmt);
2899 }
2900 else if (gimple_debug_source_bind_p (stmt))
2901 {
2902 gimple_debug_source_bind_set_var (stmt, t);
2903 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2904 remap_gimple_op_r, &wi, NULL);
2905 /* When inlining and source bind refers to one of the optimized
2906 away parameters, change the source bind into normal debug bind
2907 referring to the corresponding DEBUG_EXPR_DECL that should have
2908 been bound before the call stmt. */
2909 t = gimple_debug_source_bind_get_value (stmt);
2910 if (t != NULL_TREE
2911 && TREE_CODE (t) == PARM_DECL
2912 && id->call_stmt)
2913 {
2914 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2915 unsigned int i;
2916 if (debug_args != NULL)
2917 {
2918 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2919 if ((**debug_args)[i] == DECL_ORIGIN (t)
2920 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2921 {
2922 t = (**debug_args)[i + 1];
2923 stmt->subcode = GIMPLE_DEBUG_BIND;
2924 gimple_debug_bind_set_value (stmt, t);
2925 break;
2926 }
2927 }
2928 }
2929 }
2930
2931 processing_debug_stmt = 0;
2932
2933 update_stmt (stmt);
2934 }
2935
2936 /* Process deferred debug stmts. In order to give values better odds
2937 of being successfully remapped, we delay the processing of debug
2938 stmts until all other stmts that might require remapping are
2939 processed. */
2940
2941 static void
2942 copy_debug_stmts (copy_body_data *id)
2943 {
2944 size_t i;
2945 gdebug *stmt;
2946
2947 if (!id->debug_stmts.exists ())
2948 return;
2949
2950 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2951 copy_debug_stmt (stmt, id);
2952
2953 id->debug_stmts.release ();
2954 }
2955
2956 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2957 another function. */
2958
2959 static tree
2960 copy_tree_body (copy_body_data *id)
2961 {
2962 tree fndecl = id->src_fn;
2963 tree body = DECL_SAVED_TREE (fndecl);
2964
2965 walk_tree (&body, copy_tree_body_r, id, NULL);
2966
2967 return body;
2968 }
2969
2970 /* Make a copy of the body of FN so that it can be inserted inline in
2971 another function. */
2972
2973 static tree
2974 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2975 basic_block entry_block_map, basic_block exit_block_map,
2976 basic_block new_entry)
2977 {
2978 tree fndecl = id->src_fn;
2979 tree body;
2980
2981 /* If this body has a CFG, walk CFG and copy. */
2982 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2983 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2984 new_entry);
2985 copy_debug_stmts (id);
2986
2987 return body;
2988 }
2989
2990 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2991 defined in function FN, or of a data member thereof. */
2992
2993 static bool
2994 self_inlining_addr_expr (tree value, tree fn)
2995 {
2996 tree var;
2997
2998 if (TREE_CODE (value) != ADDR_EXPR)
2999 return false;
3000
3001 var = get_base_address (TREE_OPERAND (value, 0));
3002
3003 return var && auto_var_in_fn_p (var, fn);
3004 }
3005
3006 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3007 lexical block and line number information from base_stmt, if given,
3008 or from the last stmt of the block otherwise. */
3009
3010 static gimple
3011 insert_init_debug_bind (copy_body_data *id,
3012 basic_block bb, tree var, tree value,
3013 gimple base_stmt)
3014 {
3015 gimple note;
3016 gimple_stmt_iterator gsi;
3017 tree tracked_var;
3018
3019 if (!gimple_in_ssa_p (id->src_cfun))
3020 return NULL;
3021
3022 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3023 return NULL;
3024
3025 tracked_var = target_for_debug_bind (var);
3026 if (!tracked_var)
3027 return NULL;
3028
3029 if (bb)
3030 {
3031 gsi = gsi_last_bb (bb);
3032 if (!base_stmt && !gsi_end_p (gsi))
3033 base_stmt = gsi_stmt (gsi);
3034 }
3035
3036 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
3037
3038 if (bb)
3039 {
3040 if (!gsi_end_p (gsi))
3041 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3042 else
3043 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3044 }
3045
3046 return note;
3047 }
3048
3049 static void
3050 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
3051 {
3052 /* If VAR represents a zero-sized variable, it's possible that the
3053 assignment statement may result in no gimple statements. */
3054 if (init_stmt)
3055 {
3056 gimple_stmt_iterator si = gsi_last_bb (bb);
3057
3058 /* We can end up with init statements that store to a non-register
3059 from a rhs with a conversion. Handle that here by forcing the
3060 rhs into a temporary. gimple_regimplify_operands is not
3061 prepared to do this for us. */
3062 if (!is_gimple_debug (init_stmt)
3063 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3064 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3065 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3066 {
3067 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3068 gimple_expr_type (init_stmt),
3069 gimple_assign_rhs1 (init_stmt));
3070 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3071 GSI_NEW_STMT);
3072 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3073 gimple_assign_set_rhs1 (init_stmt, rhs);
3074 }
3075 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3076 gimple_regimplify_operands (init_stmt, &si);
3077
3078 if (!is_gimple_debug (init_stmt))
3079 {
3080 tree def = gimple_assign_lhs (init_stmt);
3081 insert_init_debug_bind (id, bb, def, def, init_stmt);
3082 }
3083 }
3084 }
3085
3086 /* Initialize parameter P with VALUE. If needed, produce init statement
3087 at the end of BB. When BB is NULL, we return init statement to be
3088 output later. */
3089 static gimple
3090 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3091 basic_block bb, tree *vars)
3092 {
3093 gimple init_stmt = NULL;
3094 tree var;
3095 tree rhs = value;
3096 tree def = (gimple_in_ssa_p (cfun)
3097 ? ssa_default_def (id->src_cfun, p) : NULL);
3098
3099 if (value
3100 && value != error_mark_node
3101 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3102 {
3103 /* If we can match up types by promotion/demotion do so. */
3104 if (fold_convertible_p (TREE_TYPE (p), value))
3105 rhs = fold_convert (TREE_TYPE (p), value);
3106 else
3107 {
3108 /* ??? For valid programs we should not end up here.
3109 Still if we end up with truly mismatched types here, fall back
3110 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3111 GIMPLE to the following passes. */
3112 if (!is_gimple_reg_type (TREE_TYPE (value))
3113 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3114 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3115 else
3116 rhs = build_zero_cst (TREE_TYPE (p));
3117 }
3118 }
3119
3120 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3121 here since the type of this decl must be visible to the calling
3122 function. */
3123 var = copy_decl_to_var (p, id);
3124
3125 /* Declare this new variable. */
3126 DECL_CHAIN (var) = *vars;
3127 *vars = var;
3128
3129 /* Make gimplifier happy about this variable. */
3130 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3131
3132 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3133 we would not need to create a new variable here at all, if it
3134 weren't for debug info. Still, we can just use the argument
3135 value. */
3136 if (TREE_READONLY (p)
3137 && !TREE_ADDRESSABLE (p)
3138 && value && !TREE_SIDE_EFFECTS (value)
3139 && !def)
3140 {
3141 /* We may produce non-gimple trees by adding NOPs or introduce
3142 invalid sharing when operand is not really constant.
3143 It is not big deal to prohibit constant propagation here as
3144 we will constant propagate in DOM1 pass anyway. */
3145 if (is_gimple_min_invariant (value)
3146 && useless_type_conversion_p (TREE_TYPE (p),
3147 TREE_TYPE (value))
3148 /* We have to be very careful about ADDR_EXPR. Make sure
3149 the base variable isn't a local variable of the inlined
3150 function, e.g., when doing recursive inlining, direct or
3151 mutually-recursive or whatever, which is why we don't
3152 just test whether fn == current_function_decl. */
3153 && ! self_inlining_addr_expr (value, fn))
3154 {
3155 insert_decl_map (id, p, value);
3156 insert_debug_decl_map (id, p, var);
3157 return insert_init_debug_bind (id, bb, var, value, NULL);
3158 }
3159 }
3160
3161 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3162 that way, when the PARM_DECL is encountered, it will be
3163 automatically replaced by the VAR_DECL. */
3164 insert_decl_map (id, p, var);
3165
3166 /* Even if P was TREE_READONLY, the new VAR should not be.
3167 In the original code, we would have constructed a
3168 temporary, and then the function body would have never
3169 changed the value of P. However, now, we will be
3170 constructing VAR directly. The constructor body may
3171 change its value multiple times as it is being
3172 constructed. Therefore, it must not be TREE_READONLY;
3173 the back-end assumes that TREE_READONLY variable is
3174 assigned to only once. */
3175 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3176 TREE_READONLY (var) = 0;
3177
3178 /* If there is no setup required and we are in SSA, take the easy route
3179 replacing all SSA names representing the function parameter by the
3180 SSA name passed to function.
3181
3182 We need to construct map for the variable anyway as it might be used
3183 in different SSA names when parameter is set in function.
3184
3185 Do replacement at -O0 for const arguments replaced by constant.
3186 This is important for builtin_constant_p and other construct requiring
3187 constant argument to be visible in inlined function body. */
3188 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3189 && (optimize
3190 || (TREE_READONLY (p)
3191 && is_gimple_min_invariant (rhs)))
3192 && (TREE_CODE (rhs) == SSA_NAME
3193 || is_gimple_min_invariant (rhs))
3194 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3195 {
3196 insert_decl_map (id, def, rhs);
3197 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3198 }
3199
3200 /* If the value of argument is never used, don't care about initializing
3201 it. */
3202 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3203 {
3204 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3205 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3206 }
3207
3208 /* Initialize this VAR_DECL from the equivalent argument. Convert
3209 the argument to the proper type in case it was promoted. */
3210 if (value)
3211 {
3212 if (rhs == error_mark_node)
3213 {
3214 insert_decl_map (id, p, var);
3215 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3216 }
3217
3218 STRIP_USELESS_TYPE_CONVERSION (rhs);
3219
3220 /* If we are in SSA form properly remap the default definition
3221 or assign to a dummy SSA name if the parameter is unused and
3222 we are not optimizing. */
3223 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3224 {
3225 if (def)
3226 {
3227 def = remap_ssa_name (def, id);
3228 init_stmt = gimple_build_assign (def, rhs);
3229 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3230 set_ssa_default_def (cfun, var, NULL);
3231 }
3232 else if (!optimize)
3233 {
3234 def = make_ssa_name (var);
3235 init_stmt = gimple_build_assign (def, rhs);
3236 }
3237 }
3238 else
3239 init_stmt = gimple_build_assign (var, rhs);
3240
3241 if (bb && init_stmt)
3242 insert_init_stmt (id, bb, init_stmt);
3243 }
3244 return init_stmt;
3245 }
3246
3247 /* Generate code to initialize the parameters of the function at the
3248 top of the stack in ID from the GIMPLE_CALL STMT. */
3249
3250 static void
3251 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3252 tree fn, basic_block bb)
3253 {
3254 tree parms;
3255 size_t i;
3256 tree p;
3257 tree vars = NULL_TREE;
3258 tree static_chain = gimple_call_chain (stmt);
3259
3260 /* Figure out what the parameters are. */
3261 parms = DECL_ARGUMENTS (fn);
3262
3263 /* Loop through the parameter declarations, replacing each with an
3264 equivalent VAR_DECL, appropriately initialized. */
3265 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3266 {
3267 tree val;
3268 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3269 setup_one_parameter (id, p, val, fn, bb, &vars);
3270 }
3271 /* After remapping parameters remap their types. This has to be done
3272 in a second loop over all parameters to appropriately remap
3273 variable sized arrays when the size is specified in a
3274 parameter following the array. */
3275 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3276 {
3277 tree *varp = id->decl_map->get (p);
3278 if (varp
3279 && TREE_CODE (*varp) == VAR_DECL)
3280 {
3281 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3282 ? ssa_default_def (id->src_cfun, p) : NULL);
3283 tree var = *varp;
3284 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3285 /* Also remap the default definition if it was remapped
3286 to the default definition of the parameter replacement
3287 by the parameter setup. */
3288 if (def)
3289 {
3290 tree *defp = id->decl_map->get (def);
3291 if (defp
3292 && TREE_CODE (*defp) == SSA_NAME
3293 && SSA_NAME_VAR (*defp) == var)
3294 TREE_TYPE (*defp) = TREE_TYPE (var);
3295 }
3296 }
3297 }
3298
3299 /* Initialize the static chain. */
3300 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3301 gcc_assert (fn != current_function_decl);
3302 if (p)
3303 {
3304 /* No static chain? Seems like a bug in tree-nested.c. */
3305 gcc_assert (static_chain);
3306
3307 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3308 }
3309
3310 declare_inline_vars (id->block, vars);
3311 }
3312
3313
3314 /* Declare a return variable to replace the RESULT_DECL for the
3315 function we are calling. An appropriate DECL_STMT is returned.
3316 The USE_STMT is filled to contain a use of the declaration to
3317 indicate the return value of the function.
3318
3319 RETURN_SLOT, if non-null is place where to store the result. It
3320 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3321 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3322
3323 RETURN_BOUNDS holds a destination for returned bounds.
3324
3325 The return value is a (possibly null) value that holds the result
3326 as seen by the caller. */
3327
3328 static tree
3329 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3330 tree return_bounds, basic_block entry_bb)
3331 {
3332 tree callee = id->src_fn;
3333 tree result = DECL_RESULT (callee);
3334 tree callee_type = TREE_TYPE (result);
3335 tree caller_type;
3336 tree var, use;
3337
3338 /* Handle type-mismatches in the function declaration return type
3339 vs. the call expression. */
3340 if (modify_dest)
3341 caller_type = TREE_TYPE (modify_dest);
3342 else
3343 caller_type = TREE_TYPE (TREE_TYPE (callee));
3344
3345 /* We don't need to do anything for functions that don't return anything. */
3346 if (VOID_TYPE_P (callee_type))
3347 return NULL_TREE;
3348
3349 /* If there was a return slot, then the return value is the
3350 dereferenced address of that object. */
3351 if (return_slot)
3352 {
3353 /* The front end shouldn't have used both return_slot and
3354 a modify expression. */
3355 gcc_assert (!modify_dest);
3356 if (DECL_BY_REFERENCE (result))
3357 {
3358 tree return_slot_addr = build_fold_addr_expr (return_slot);
3359 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3360
3361 /* We are going to construct *&return_slot and we can't do that
3362 for variables believed to be not addressable.
3363
3364 FIXME: This check possibly can match, because values returned
3365 via return slot optimization are not believed to have address
3366 taken by alias analysis. */
3367 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3368 var = return_slot_addr;
3369 }
3370 else
3371 {
3372 var = return_slot;
3373 gcc_assert (TREE_CODE (var) != SSA_NAME);
3374 if (TREE_ADDRESSABLE (result))
3375 mark_addressable (var);
3376 }
3377 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3378 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3379 && !DECL_GIMPLE_REG_P (result)
3380 && DECL_P (var))
3381 DECL_GIMPLE_REG_P (var) = 0;
3382 use = NULL;
3383 goto done;
3384 }
3385
3386 /* All types requiring non-trivial constructors should have been handled. */
3387 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3388
3389 /* Attempt to avoid creating a new temporary variable. */
3390 if (modify_dest
3391 && TREE_CODE (modify_dest) != SSA_NAME)
3392 {
3393 bool use_it = false;
3394
3395 /* We can't use MODIFY_DEST if there's type promotion involved. */
3396 if (!useless_type_conversion_p (callee_type, caller_type))
3397 use_it = false;
3398
3399 /* ??? If we're assigning to a variable sized type, then we must
3400 reuse the destination variable, because we've no good way to
3401 create variable sized temporaries at this point. */
3402 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3403 use_it = true;
3404
3405 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3406 reuse it as the result of the call directly. Don't do this if
3407 it would promote MODIFY_DEST to addressable. */
3408 else if (TREE_ADDRESSABLE (result))
3409 use_it = false;
3410 else
3411 {
3412 tree base_m = get_base_address (modify_dest);
3413
3414 /* If the base isn't a decl, then it's a pointer, and we don't
3415 know where that's going to go. */
3416 if (!DECL_P (base_m))
3417 use_it = false;
3418 else if (is_global_var (base_m))
3419 use_it = false;
3420 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3421 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3422 && !DECL_GIMPLE_REG_P (result)
3423 && DECL_GIMPLE_REG_P (base_m))
3424 use_it = false;
3425 else if (!TREE_ADDRESSABLE (base_m))
3426 use_it = true;
3427 }
3428
3429 if (use_it)
3430 {
3431 var = modify_dest;
3432 use = NULL;
3433 goto done;
3434 }
3435 }
3436
3437 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3438
3439 var = copy_result_decl_to_var (result, id);
3440 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3441
3442 /* Do not have the rest of GCC warn about this variable as it should
3443 not be visible to the user. */
3444 TREE_NO_WARNING (var) = 1;
3445
3446 declare_inline_vars (id->block, var);
3447
3448 /* Build the use expr. If the return type of the function was
3449 promoted, convert it back to the expected type. */
3450 use = var;
3451 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3452 {
3453 /* If we can match up types by promotion/demotion do so. */
3454 if (fold_convertible_p (caller_type, var))
3455 use = fold_convert (caller_type, var);
3456 else
3457 {
3458 /* ??? For valid programs we should not end up here.
3459 Still if we end up with truly mismatched types here, fall back
3460 to using a MEM_REF to not leak invalid GIMPLE to the following
3461 passes. */
3462 /* Prevent var from being written into SSA form. */
3463 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3464 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3465 DECL_GIMPLE_REG_P (var) = false;
3466 else if (is_gimple_reg_type (TREE_TYPE (var)))
3467 TREE_ADDRESSABLE (var) = true;
3468 use = fold_build2 (MEM_REF, caller_type,
3469 build_fold_addr_expr (var),
3470 build_int_cst (ptr_type_node, 0));
3471 }
3472 }
3473
3474 STRIP_USELESS_TYPE_CONVERSION (use);
3475
3476 if (DECL_BY_REFERENCE (result))
3477 {
3478 TREE_ADDRESSABLE (var) = 1;
3479 var = build_fold_addr_expr (var);
3480 }
3481
3482 done:
3483 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3484 way, when the RESULT_DECL is encountered, it will be
3485 automatically replaced by the VAR_DECL.
3486
3487 When returning by reference, ensure that RESULT_DECL remaps to
3488 gimple_val. */
3489 if (DECL_BY_REFERENCE (result)
3490 && !is_gimple_val (var))
3491 {
3492 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3493 insert_decl_map (id, result, temp);
3494 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3495 it's default_def SSA_NAME. */
3496 if (gimple_in_ssa_p (id->src_cfun)
3497 && is_gimple_reg (result))
3498 {
3499 temp = make_ssa_name (temp);
3500 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3501 }
3502 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3503 }
3504 else
3505 insert_decl_map (id, result, var);
3506
3507 /* Remember this so we can ignore it in remap_decls. */
3508 id->retvar = var;
3509
3510 /* If returned bounds are used, then make var for them. */
3511 if (return_bounds)
3512 {
3513 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3514 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3515 TREE_NO_WARNING (bndtemp) = 1;
3516 declare_inline_vars (id->block, bndtemp);
3517
3518 id->retbnd = bndtemp;
3519 insert_init_stmt (id, entry_bb,
3520 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3521 }
3522
3523 return use;
3524 }
3525
3526 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3527 to a local label. */
3528
3529 static tree
3530 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3531 {
3532 tree node = *nodep;
3533 tree fn = (tree) fnp;
3534
3535 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3536 return node;
3537
3538 if (TYPE_P (node))
3539 *walk_subtrees = 0;
3540
3541 return NULL_TREE;
3542 }
3543
3544 /* Determine if the function can be copied. If so return NULL. If
3545 not return a string describng the reason for failure. */
3546
3547 const char *
3548 copy_forbidden (struct function *fun, tree fndecl)
3549 {
3550 const char *reason = fun->cannot_be_copied_reason;
3551 tree decl;
3552 unsigned ix;
3553
3554 /* Only examine the function once. */
3555 if (fun->cannot_be_copied_set)
3556 return reason;
3557
3558 /* We cannot copy a function that receives a non-local goto
3559 because we cannot remap the destination label used in the
3560 function that is performing the non-local goto. */
3561 /* ??? Actually, this should be possible, if we work at it.
3562 No doubt there's just a handful of places that simply
3563 assume it doesn't happen and don't substitute properly. */
3564 if (fun->has_nonlocal_label)
3565 {
3566 reason = G_("function %q+F can never be copied "
3567 "because it receives a non-local goto");
3568 goto fail;
3569 }
3570
3571 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3572 if (TREE_CODE (decl) == VAR_DECL
3573 && TREE_STATIC (decl)
3574 && !DECL_EXTERNAL (decl)
3575 && DECL_INITIAL (decl)
3576 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3577 has_label_address_in_static_1,
3578 fndecl))
3579 {
3580 reason = G_("function %q+F can never be copied because it saves "
3581 "address of local label in a static variable");
3582 goto fail;
3583 }
3584
3585 fail:
3586 fun->cannot_be_copied_reason = reason;
3587 fun->cannot_be_copied_set = true;
3588 return reason;
3589 }
3590
3591
3592 static const char *inline_forbidden_reason;
3593
3594 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3595 iff a function can not be inlined. Also sets the reason why. */
3596
3597 static tree
3598 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3599 struct walk_stmt_info *wip)
3600 {
3601 tree fn = (tree) wip->info;
3602 tree t;
3603 gimple stmt = gsi_stmt (*gsi);
3604
3605 switch (gimple_code (stmt))
3606 {
3607 case GIMPLE_CALL:
3608 /* Refuse to inline alloca call unless user explicitly forced so as
3609 this may change program's memory overhead drastically when the
3610 function using alloca is called in loop. In GCC present in
3611 SPEC2000 inlining into schedule_block cause it to require 2GB of
3612 RAM instead of 256MB. Don't do so for alloca calls emitted for
3613 VLA objects as those can't cause unbounded growth (they're always
3614 wrapped inside stack_save/stack_restore regions. */
3615 if (gimple_alloca_call_p (stmt)
3616 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3617 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3618 {
3619 inline_forbidden_reason
3620 = G_("function %q+F can never be inlined because it uses "
3621 "alloca (override using the always_inline attribute)");
3622 *handled_ops_p = true;
3623 return fn;
3624 }
3625
3626 t = gimple_call_fndecl (stmt);
3627 if (t == NULL_TREE)
3628 break;
3629
3630 /* We cannot inline functions that call setjmp. */
3631 if (setjmp_call_p (t))
3632 {
3633 inline_forbidden_reason
3634 = G_("function %q+F can never be inlined because it uses setjmp");
3635 *handled_ops_p = true;
3636 return t;
3637 }
3638
3639 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3640 switch (DECL_FUNCTION_CODE (t))
3641 {
3642 /* We cannot inline functions that take a variable number of
3643 arguments. */
3644 case BUILT_IN_VA_START:
3645 case BUILT_IN_NEXT_ARG:
3646 case BUILT_IN_VA_END:
3647 inline_forbidden_reason
3648 = G_("function %q+F can never be inlined because it "
3649 "uses variable argument lists");
3650 *handled_ops_p = true;
3651 return t;
3652
3653 case BUILT_IN_LONGJMP:
3654 /* We can't inline functions that call __builtin_longjmp at
3655 all. The non-local goto machinery really requires the
3656 destination be in a different function. If we allow the
3657 function calling __builtin_longjmp to be inlined into the
3658 function calling __builtin_setjmp, Things will Go Awry. */
3659 inline_forbidden_reason
3660 = G_("function %q+F can never be inlined because "
3661 "it uses setjmp-longjmp exception handling");
3662 *handled_ops_p = true;
3663 return t;
3664
3665 case BUILT_IN_NONLOCAL_GOTO:
3666 /* Similarly. */
3667 inline_forbidden_reason
3668 = G_("function %q+F can never be inlined because "
3669 "it uses non-local goto");
3670 *handled_ops_p = true;
3671 return t;
3672
3673 case BUILT_IN_RETURN:
3674 case BUILT_IN_APPLY_ARGS:
3675 /* If a __builtin_apply_args caller would be inlined,
3676 it would be saving arguments of the function it has
3677 been inlined into. Similarly __builtin_return would
3678 return from the function the inline has been inlined into. */
3679 inline_forbidden_reason
3680 = G_("function %q+F can never be inlined because "
3681 "it uses __builtin_return or __builtin_apply_args");
3682 *handled_ops_p = true;
3683 return t;
3684
3685 default:
3686 break;
3687 }
3688 break;
3689
3690 case GIMPLE_GOTO:
3691 t = gimple_goto_dest (stmt);
3692
3693 /* We will not inline a function which uses computed goto. The
3694 addresses of its local labels, which may be tucked into
3695 global storage, are of course not constant across
3696 instantiations, which causes unexpected behavior. */
3697 if (TREE_CODE (t) != LABEL_DECL)
3698 {
3699 inline_forbidden_reason
3700 = G_("function %q+F can never be inlined "
3701 "because it contains a computed goto");
3702 *handled_ops_p = true;
3703 return t;
3704 }
3705 break;
3706
3707 default:
3708 break;
3709 }
3710
3711 *handled_ops_p = false;
3712 return NULL_TREE;
3713 }
3714
3715 /* Return true if FNDECL is a function that cannot be inlined into
3716 another one. */
3717
3718 static bool
3719 inline_forbidden_p (tree fndecl)
3720 {
3721 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3722 struct walk_stmt_info wi;
3723 basic_block bb;
3724 bool forbidden_p = false;
3725
3726 /* First check for shared reasons not to copy the code. */
3727 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3728 if (inline_forbidden_reason != NULL)
3729 return true;
3730
3731 /* Next, walk the statements of the function looking for
3732 constraucts we can't handle, or are non-optimal for inlining. */
3733 hash_set<tree> visited_nodes;
3734 memset (&wi, 0, sizeof (wi));
3735 wi.info = (void *) fndecl;
3736 wi.pset = &visited_nodes;
3737
3738 FOR_EACH_BB_FN (bb, fun)
3739 {
3740 gimple ret;
3741 gimple_seq seq = bb_seq (bb);
3742 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3743 forbidden_p = (ret != NULL);
3744 if (forbidden_p)
3745 break;
3746 }
3747
3748 return forbidden_p;
3749 }
3750 \f
3751 /* Return false if the function FNDECL cannot be inlined on account of its
3752 attributes, true otherwise. */
3753 static bool
3754 function_attribute_inlinable_p (const_tree fndecl)
3755 {
3756 if (targetm.attribute_table)
3757 {
3758 const_tree a;
3759
3760 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3761 {
3762 const_tree name = TREE_PURPOSE (a);
3763 int i;
3764
3765 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3766 if (is_attribute_p (targetm.attribute_table[i].name, name))
3767 return targetm.function_attribute_inlinable_p (fndecl);
3768 }
3769 }
3770
3771 return true;
3772 }
3773
3774 /* Returns nonzero if FN is a function that does not have any
3775 fundamental inline blocking properties. */
3776
3777 bool
3778 tree_inlinable_function_p (tree fn)
3779 {
3780 bool inlinable = true;
3781 bool do_warning;
3782 tree always_inline;
3783
3784 /* If we've already decided this function shouldn't be inlined,
3785 there's no need to check again. */
3786 if (DECL_UNINLINABLE (fn))
3787 return false;
3788
3789 /* We only warn for functions declared `inline' by the user. */
3790 do_warning = (warn_inline
3791 && DECL_DECLARED_INLINE_P (fn)
3792 && !DECL_NO_INLINE_WARNING_P (fn)
3793 && !DECL_IN_SYSTEM_HEADER (fn));
3794
3795 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3796
3797 if (flag_no_inline
3798 && always_inline == NULL)
3799 {
3800 if (do_warning)
3801 warning (OPT_Winline, "function %q+F can never be inlined because it "
3802 "is suppressed using -fno-inline", fn);
3803 inlinable = false;
3804 }
3805
3806 else if (!function_attribute_inlinable_p (fn))
3807 {
3808 if (do_warning)
3809 warning (OPT_Winline, "function %q+F can never be inlined because it "
3810 "uses attributes conflicting with inlining", fn);
3811 inlinable = false;
3812 }
3813
3814 else if (inline_forbidden_p (fn))
3815 {
3816 /* See if we should warn about uninlinable functions. Previously,
3817 some of these warnings would be issued while trying to expand
3818 the function inline, but that would cause multiple warnings
3819 about functions that would for example call alloca. But since
3820 this a property of the function, just one warning is enough.
3821 As a bonus we can now give more details about the reason why a
3822 function is not inlinable. */
3823 if (always_inline)
3824 error (inline_forbidden_reason, fn);
3825 else if (do_warning)
3826 warning (OPT_Winline, inline_forbidden_reason, fn);
3827
3828 inlinable = false;
3829 }
3830
3831 /* Squirrel away the result so that we don't have to check again. */
3832 DECL_UNINLINABLE (fn) = !inlinable;
3833
3834 return inlinable;
3835 }
3836
3837 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3838 word size and take possible memcpy call into account and return
3839 cost based on whether optimizing for size or speed according to SPEED_P. */
3840
3841 int
3842 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3843 {
3844 HOST_WIDE_INT size;
3845
3846 gcc_assert (!VOID_TYPE_P (type));
3847
3848 if (TREE_CODE (type) == VECTOR_TYPE)
3849 {
3850 machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3851 machine_mode simd
3852 = targetm.vectorize.preferred_simd_mode (inner);
3853 int simd_mode_size = GET_MODE_SIZE (simd);
3854 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3855 / simd_mode_size);
3856 }
3857
3858 size = int_size_in_bytes (type);
3859
3860 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3861 /* Cost of a memcpy call, 3 arguments and the call. */
3862 return 4;
3863 else
3864 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3865 }
3866
3867 /* Returns cost of operation CODE, according to WEIGHTS */
3868
3869 static int
3870 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3871 tree op1 ATTRIBUTE_UNUSED, tree op2)
3872 {
3873 switch (code)
3874 {
3875 /* These are "free" conversions, or their presumed cost
3876 is folded into other operations. */
3877 case RANGE_EXPR:
3878 CASE_CONVERT:
3879 case COMPLEX_EXPR:
3880 case PAREN_EXPR:
3881 case VIEW_CONVERT_EXPR:
3882 return 0;
3883
3884 /* Assign cost of 1 to usual operations.
3885 ??? We may consider mapping RTL costs to this. */
3886 case COND_EXPR:
3887 case VEC_COND_EXPR:
3888 case VEC_PERM_EXPR:
3889
3890 case PLUS_EXPR:
3891 case POINTER_PLUS_EXPR:
3892 case MINUS_EXPR:
3893 case MULT_EXPR:
3894 case MULT_HIGHPART_EXPR:
3895 case FMA_EXPR:
3896
3897 case ADDR_SPACE_CONVERT_EXPR:
3898 case FIXED_CONVERT_EXPR:
3899 case FIX_TRUNC_EXPR:
3900
3901 case NEGATE_EXPR:
3902 case FLOAT_EXPR:
3903 case MIN_EXPR:
3904 case MAX_EXPR:
3905 case ABS_EXPR:
3906
3907 case LSHIFT_EXPR:
3908 case RSHIFT_EXPR:
3909 case LROTATE_EXPR:
3910 case RROTATE_EXPR:
3911
3912 case BIT_IOR_EXPR:
3913 case BIT_XOR_EXPR:
3914 case BIT_AND_EXPR:
3915 case BIT_NOT_EXPR:
3916
3917 case TRUTH_ANDIF_EXPR:
3918 case TRUTH_ORIF_EXPR:
3919 case TRUTH_AND_EXPR:
3920 case TRUTH_OR_EXPR:
3921 case TRUTH_XOR_EXPR:
3922 case TRUTH_NOT_EXPR:
3923
3924 case LT_EXPR:
3925 case LE_EXPR:
3926 case GT_EXPR:
3927 case GE_EXPR:
3928 case EQ_EXPR:
3929 case NE_EXPR:
3930 case ORDERED_EXPR:
3931 case UNORDERED_EXPR:
3932
3933 case UNLT_EXPR:
3934 case UNLE_EXPR:
3935 case UNGT_EXPR:
3936 case UNGE_EXPR:
3937 case UNEQ_EXPR:
3938 case LTGT_EXPR:
3939
3940 case CONJ_EXPR:
3941
3942 case PREDECREMENT_EXPR:
3943 case PREINCREMENT_EXPR:
3944 case POSTDECREMENT_EXPR:
3945 case POSTINCREMENT_EXPR:
3946
3947 case REALIGN_LOAD_EXPR:
3948
3949 case REDUC_MAX_EXPR:
3950 case REDUC_MIN_EXPR:
3951 case REDUC_PLUS_EXPR:
3952 case WIDEN_SUM_EXPR:
3953 case WIDEN_MULT_EXPR:
3954 case DOT_PROD_EXPR:
3955 case SAD_EXPR:
3956 case WIDEN_MULT_PLUS_EXPR:
3957 case WIDEN_MULT_MINUS_EXPR:
3958 case WIDEN_LSHIFT_EXPR:
3959
3960 case VEC_WIDEN_MULT_HI_EXPR:
3961 case VEC_WIDEN_MULT_LO_EXPR:
3962 case VEC_WIDEN_MULT_EVEN_EXPR:
3963 case VEC_WIDEN_MULT_ODD_EXPR:
3964 case VEC_UNPACK_HI_EXPR:
3965 case VEC_UNPACK_LO_EXPR:
3966 case VEC_UNPACK_FLOAT_HI_EXPR:
3967 case VEC_UNPACK_FLOAT_LO_EXPR:
3968 case VEC_PACK_TRUNC_EXPR:
3969 case VEC_PACK_SAT_EXPR:
3970 case VEC_PACK_FIX_TRUNC_EXPR:
3971 case VEC_WIDEN_LSHIFT_HI_EXPR:
3972 case VEC_WIDEN_LSHIFT_LO_EXPR:
3973
3974 return 1;
3975
3976 /* Few special cases of expensive operations. This is useful
3977 to avoid inlining on functions having too many of these. */
3978 case TRUNC_DIV_EXPR:
3979 case CEIL_DIV_EXPR:
3980 case FLOOR_DIV_EXPR:
3981 case ROUND_DIV_EXPR:
3982 case EXACT_DIV_EXPR:
3983 case TRUNC_MOD_EXPR:
3984 case CEIL_MOD_EXPR:
3985 case FLOOR_MOD_EXPR:
3986 case ROUND_MOD_EXPR:
3987 case RDIV_EXPR:
3988 if (TREE_CODE (op2) != INTEGER_CST)
3989 return weights->div_mod_cost;
3990 return 1;
3991
3992 default:
3993 /* We expect a copy assignment with no operator. */
3994 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3995 return 0;
3996 }
3997 }
3998
3999
4000 /* Estimate number of instructions that will be created by expanding
4001 the statements in the statement sequence STMTS.
4002 WEIGHTS contains weights attributed to various constructs. */
4003
4004 static
4005 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4006 {
4007 int cost;
4008 gimple_stmt_iterator gsi;
4009
4010 cost = 0;
4011 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4012 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4013
4014 return cost;
4015 }
4016
4017
4018 /* Estimate number of instructions that will be created by expanding STMT.
4019 WEIGHTS contains weights attributed to various constructs. */
4020
4021 int
4022 estimate_num_insns (gimple stmt, eni_weights *weights)
4023 {
4024 unsigned cost, i;
4025 enum gimple_code code = gimple_code (stmt);
4026 tree lhs;
4027 tree rhs;
4028
4029 switch (code)
4030 {
4031 case GIMPLE_ASSIGN:
4032 /* Try to estimate the cost of assignments. We have three cases to
4033 deal with:
4034 1) Simple assignments to registers;
4035 2) Stores to things that must live in memory. This includes
4036 "normal" stores to scalars, but also assignments of large
4037 structures, or constructors of big arrays;
4038
4039 Let us look at the first two cases, assuming we have "a = b + C":
4040 <GIMPLE_ASSIGN <var_decl "a">
4041 <plus_expr <var_decl "b"> <constant C>>
4042 If "a" is a GIMPLE register, the assignment to it is free on almost
4043 any target, because "a" usually ends up in a real register. Hence
4044 the only cost of this expression comes from the PLUS_EXPR, and we
4045 can ignore the GIMPLE_ASSIGN.
4046 If "a" is not a GIMPLE register, the assignment to "a" will most
4047 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4048 of moving something into "a", which we compute using the function
4049 estimate_move_cost. */
4050 if (gimple_clobber_p (stmt))
4051 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4052
4053 lhs = gimple_assign_lhs (stmt);
4054 rhs = gimple_assign_rhs1 (stmt);
4055
4056 cost = 0;
4057
4058 /* Account for the cost of moving to / from memory. */
4059 if (gimple_store_p (stmt))
4060 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4061 if (gimple_assign_load_p (stmt))
4062 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4063
4064 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4065 gimple_assign_rhs1 (stmt),
4066 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4067 == GIMPLE_BINARY_RHS
4068 ? gimple_assign_rhs2 (stmt) : NULL);
4069 break;
4070
4071 case GIMPLE_COND:
4072 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4073 gimple_op (stmt, 0),
4074 gimple_op (stmt, 1));
4075 break;
4076
4077 case GIMPLE_SWITCH:
4078 {
4079 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4080 /* Take into account cost of the switch + guess 2 conditional jumps for
4081 each case label.
4082
4083 TODO: once the switch expansion logic is sufficiently separated, we can
4084 do better job on estimating cost of the switch. */
4085 if (weights->time_based)
4086 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4087 else
4088 cost = gimple_switch_num_labels (switch_stmt) * 2;
4089 }
4090 break;
4091
4092 case GIMPLE_CALL:
4093 {
4094 tree decl;
4095
4096 if (gimple_call_internal_p (stmt))
4097 return 0;
4098 else if ((decl = gimple_call_fndecl (stmt))
4099 && DECL_BUILT_IN (decl))
4100 {
4101 /* Do not special case builtins where we see the body.
4102 This just confuse inliner. */
4103 struct cgraph_node *node;
4104 if (!(node = cgraph_node::get (decl))
4105 || node->definition)
4106 ;
4107 /* For buitins that are likely expanded to nothing or
4108 inlined do not account operand costs. */
4109 else if (is_simple_builtin (decl))
4110 return 0;
4111 else if (is_inexpensive_builtin (decl))
4112 return weights->target_builtin_call_cost;
4113 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4114 {
4115 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4116 specialize the cheap expansion we do here.
4117 ??? This asks for a more general solution. */
4118 switch (DECL_FUNCTION_CODE (decl))
4119 {
4120 case BUILT_IN_POW:
4121 case BUILT_IN_POWF:
4122 case BUILT_IN_POWL:
4123 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4124 && REAL_VALUES_EQUAL
4125 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
4126 return estimate_operator_cost
4127 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4128 gimple_call_arg (stmt, 0));
4129 break;
4130
4131 default:
4132 break;
4133 }
4134 }
4135 }
4136
4137 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4138 if (gimple_call_lhs (stmt))
4139 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4140 weights->time_based);
4141 for (i = 0; i < gimple_call_num_args (stmt); i++)
4142 {
4143 tree arg = gimple_call_arg (stmt, i);
4144 cost += estimate_move_cost (TREE_TYPE (arg),
4145 weights->time_based);
4146 }
4147 break;
4148 }
4149
4150 case GIMPLE_RETURN:
4151 return weights->return_cost;
4152
4153 case GIMPLE_GOTO:
4154 case GIMPLE_LABEL:
4155 case GIMPLE_NOP:
4156 case GIMPLE_PHI:
4157 case GIMPLE_PREDICT:
4158 case GIMPLE_DEBUG:
4159 return 0;
4160
4161 case GIMPLE_ASM:
4162 {
4163 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4164 /* 1000 means infinity. This avoids overflows later
4165 with very long asm statements. */
4166 if (count > 1000)
4167 count = 1000;
4168 return count;
4169 }
4170
4171 case GIMPLE_RESX:
4172 /* This is either going to be an external function call with one
4173 argument, or two register copy statements plus a goto. */
4174 return 2;
4175
4176 case GIMPLE_EH_DISPATCH:
4177 /* ??? This is going to turn into a switch statement. Ideally
4178 we'd have a look at the eh region and estimate the number of
4179 edges involved. */
4180 return 10;
4181
4182 case GIMPLE_BIND:
4183 return estimate_num_insns_seq (
4184 gimple_bind_body (as_a <gbind *> (stmt)),
4185 weights);
4186
4187 case GIMPLE_EH_FILTER:
4188 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4189
4190 case GIMPLE_CATCH:
4191 return estimate_num_insns_seq (gimple_catch_handler (
4192 as_a <gcatch *> (stmt)),
4193 weights);
4194
4195 case GIMPLE_TRY:
4196 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4197 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4198
4199 /* OMP directives are generally very expensive. */
4200
4201 case GIMPLE_OMP_RETURN:
4202 case GIMPLE_OMP_SECTIONS_SWITCH:
4203 case GIMPLE_OMP_ATOMIC_STORE:
4204 case GIMPLE_OMP_CONTINUE:
4205 /* ...except these, which are cheap. */
4206 return 0;
4207
4208 case GIMPLE_OMP_ATOMIC_LOAD:
4209 return weights->omp_cost;
4210
4211 case GIMPLE_OMP_FOR:
4212 return (weights->omp_cost
4213 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4214 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4215
4216 case GIMPLE_OMP_PARALLEL:
4217 case GIMPLE_OMP_TASK:
4218 case GIMPLE_OMP_CRITICAL:
4219 case GIMPLE_OMP_MASTER:
4220 case GIMPLE_OMP_TASKGROUP:
4221 case GIMPLE_OMP_ORDERED:
4222 case GIMPLE_OMP_SECTION:
4223 case GIMPLE_OMP_SECTIONS:
4224 case GIMPLE_OMP_SINGLE:
4225 case GIMPLE_OMP_TARGET:
4226 case GIMPLE_OMP_TEAMS:
4227 return (weights->omp_cost
4228 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4229
4230 case GIMPLE_TRANSACTION:
4231 return (weights->tm_cost
4232 + estimate_num_insns_seq (gimple_transaction_body (
4233 as_a <gtransaction *> (stmt)),
4234 weights));
4235
4236 default:
4237 gcc_unreachable ();
4238 }
4239
4240 return cost;
4241 }
4242
4243 /* Estimate number of instructions that will be created by expanding
4244 function FNDECL. WEIGHTS contains weights attributed to various
4245 constructs. */
4246
4247 int
4248 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4249 {
4250 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4251 gimple_stmt_iterator bsi;
4252 basic_block bb;
4253 int n = 0;
4254
4255 gcc_assert (my_function && my_function->cfg);
4256 FOR_EACH_BB_FN (bb, my_function)
4257 {
4258 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4259 n += estimate_num_insns (gsi_stmt (bsi), weights);
4260 }
4261
4262 return n;
4263 }
4264
4265
4266 /* Initializes weights used by estimate_num_insns. */
4267
4268 void
4269 init_inline_once (void)
4270 {
4271 eni_size_weights.call_cost = 1;
4272 eni_size_weights.indirect_call_cost = 3;
4273 eni_size_weights.target_builtin_call_cost = 1;
4274 eni_size_weights.div_mod_cost = 1;
4275 eni_size_weights.omp_cost = 40;
4276 eni_size_weights.tm_cost = 10;
4277 eni_size_weights.time_based = false;
4278 eni_size_weights.return_cost = 1;
4279
4280 /* Estimating time for call is difficult, since we have no idea what the
4281 called function does. In the current uses of eni_time_weights,
4282 underestimating the cost does less harm than overestimating it, so
4283 we choose a rather small value here. */
4284 eni_time_weights.call_cost = 10;
4285 eni_time_weights.indirect_call_cost = 15;
4286 eni_time_weights.target_builtin_call_cost = 1;
4287 eni_time_weights.div_mod_cost = 10;
4288 eni_time_weights.omp_cost = 40;
4289 eni_time_weights.tm_cost = 40;
4290 eni_time_weights.time_based = true;
4291 eni_time_weights.return_cost = 2;
4292 }
4293
4294 /* Estimate the number of instructions in a gimple_seq. */
4295
4296 int
4297 count_insns_seq (gimple_seq seq, eni_weights *weights)
4298 {
4299 gimple_stmt_iterator gsi;
4300 int n = 0;
4301 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4302 n += estimate_num_insns (gsi_stmt (gsi), weights);
4303
4304 return n;
4305 }
4306
4307
4308 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4309
4310 static void
4311 prepend_lexical_block (tree current_block, tree new_block)
4312 {
4313 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4314 BLOCK_SUBBLOCKS (current_block) = new_block;
4315 BLOCK_SUPERCONTEXT (new_block) = current_block;
4316 }
4317
4318 /* Add local variables from CALLEE to CALLER. */
4319
4320 static inline void
4321 add_local_variables (struct function *callee, struct function *caller,
4322 copy_body_data *id)
4323 {
4324 tree var;
4325 unsigned ix;
4326
4327 FOR_EACH_LOCAL_DECL (callee, ix, var)
4328 if (!can_be_nonlocal (var, id))
4329 {
4330 tree new_var = remap_decl (var, id);
4331
4332 /* Remap debug-expressions. */
4333 if (TREE_CODE (new_var) == VAR_DECL
4334 && DECL_HAS_DEBUG_EXPR_P (var)
4335 && new_var != var)
4336 {
4337 tree tem = DECL_DEBUG_EXPR (var);
4338 bool old_regimplify = id->regimplify;
4339 id->remapping_type_depth++;
4340 walk_tree (&tem, copy_tree_body_r, id, NULL);
4341 id->remapping_type_depth--;
4342 id->regimplify = old_regimplify;
4343 SET_DECL_DEBUG_EXPR (new_var, tem);
4344 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4345 }
4346 add_local_decl (caller, new_var);
4347 }
4348 }
4349
4350 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4351
4352 static bool
4353 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4354 {
4355 tree use_retvar;
4356 tree fn;
4357 hash_map<tree, tree> *dst;
4358 hash_map<tree, tree> *st = NULL;
4359 tree return_slot;
4360 tree modify_dest;
4361 tree return_bounds = NULL;
4362 location_t saved_location;
4363 struct cgraph_edge *cg_edge;
4364 cgraph_inline_failed_t reason;
4365 basic_block return_block;
4366 edge e;
4367 gimple_stmt_iterator gsi, stmt_gsi;
4368 bool successfully_inlined = FALSE;
4369 bool purge_dead_abnormal_edges;
4370 gcall *call_stmt;
4371 unsigned int i;
4372
4373 /* Set input_location here so we get the right instantiation context
4374 if we call instantiate_decl from inlinable_function_p. */
4375 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4376 saved_location = input_location;
4377 input_location = gimple_location (stmt);
4378
4379 /* From here on, we're only interested in CALL_EXPRs. */
4380 call_stmt = dyn_cast <gcall *> (stmt);
4381 if (!call_stmt)
4382 goto egress;
4383
4384 cg_edge = id->dst_node->get_edge (stmt);
4385 gcc_checking_assert (cg_edge);
4386 /* First, see if we can figure out what function is being called.
4387 If we cannot, then there is no hope of inlining the function. */
4388 if (cg_edge->indirect_unknown_callee)
4389 goto egress;
4390 fn = cg_edge->callee->decl;
4391 gcc_checking_assert (fn);
4392
4393 /* If FN is a declaration of a function in a nested scope that was
4394 globally declared inline, we don't set its DECL_INITIAL.
4395 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4396 C++ front-end uses it for cdtors to refer to their internal
4397 declarations, that are not real functions. Fortunately those
4398 don't have trees to be saved, so we can tell by checking their
4399 gimple_body. */
4400 if (!DECL_INITIAL (fn)
4401 && DECL_ABSTRACT_ORIGIN (fn)
4402 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4403 fn = DECL_ABSTRACT_ORIGIN (fn);
4404
4405 /* Don't try to inline functions that are not well-suited to inlining. */
4406 if (cg_edge->inline_failed)
4407 {
4408 reason = cg_edge->inline_failed;
4409 /* If this call was originally indirect, we do not want to emit any
4410 inlining related warnings or sorry messages because there are no
4411 guarantees regarding those. */
4412 if (cg_edge->indirect_inlining_edge)
4413 goto egress;
4414
4415 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4416 /* For extern inline functions that get redefined we always
4417 silently ignored always_inline flag. Better behaviour would
4418 be to be able to keep both bodies and use extern inline body
4419 for inlining, but we can't do that because frontends overwrite
4420 the body. */
4421 && !cg_edge->callee->local.redefined_extern_inline
4422 /* During early inline pass, report only when optimization is
4423 not turned on. */
4424 && (symtab->global_info_ready
4425 || !optimize
4426 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4427 /* PR 20090218-1_0.c. Body can be provided by another module. */
4428 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4429 {
4430 error ("inlining failed in call to always_inline %q+F: %s", fn,
4431 cgraph_inline_failed_string (reason));
4432 error ("called from here");
4433 }
4434 else if (warn_inline
4435 && DECL_DECLARED_INLINE_P (fn)
4436 && !DECL_NO_INLINE_WARNING_P (fn)
4437 && !DECL_IN_SYSTEM_HEADER (fn)
4438 && reason != CIF_UNSPECIFIED
4439 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4440 /* Do not warn about not inlined recursive calls. */
4441 && !cg_edge->recursive_p ()
4442 /* Avoid warnings during early inline pass. */
4443 && symtab->global_info_ready)
4444 {
4445 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4446 fn, _(cgraph_inline_failed_string (reason)));
4447 warning (OPT_Winline, "called from here");
4448 }
4449 goto egress;
4450 }
4451 fn = cg_edge->callee->decl;
4452 cg_edge->callee->get_untransformed_body ();
4453
4454 #ifdef ENABLE_CHECKING
4455 if (cg_edge->callee->decl != id->dst_node->decl)
4456 cg_edge->callee->verify ();
4457 #endif
4458
4459 /* We will be inlining this callee. */
4460 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4461 id->assign_stmts.create (0);
4462
4463 /* Update the callers EH personality. */
4464 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4465 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4466 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4467
4468 /* Split the block holding the GIMPLE_CALL. */
4469 e = split_block (bb, stmt);
4470 bb = e->src;
4471 return_block = e->dest;
4472 remove_edge (e);
4473
4474 /* split_block splits after the statement; work around this by
4475 moving the call into the second block manually. Not pretty,
4476 but seems easier than doing the CFG manipulation by hand
4477 when the GIMPLE_CALL is in the last statement of BB. */
4478 stmt_gsi = gsi_last_bb (bb);
4479 gsi_remove (&stmt_gsi, false);
4480
4481 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4482 been the source of abnormal edges. In this case, schedule
4483 the removal of dead abnormal edges. */
4484 gsi = gsi_start_bb (return_block);
4485 if (gsi_end_p (gsi))
4486 {
4487 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4488 purge_dead_abnormal_edges = true;
4489 }
4490 else
4491 {
4492 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4493 purge_dead_abnormal_edges = false;
4494 }
4495
4496 stmt_gsi = gsi_start_bb (return_block);
4497
4498 /* Build a block containing code to initialize the arguments, the
4499 actual inline expansion of the body, and a label for the return
4500 statements within the function to jump to. The type of the
4501 statement expression is the return type of the function call.
4502 ??? If the call does not have an associated block then we will
4503 remap all callee blocks to NULL, effectively dropping most of
4504 its debug information. This should only happen for calls to
4505 artificial decls inserted by the compiler itself. We need to
4506 either link the inlined blocks into the caller block tree or
4507 not refer to them in any way to not break GC for locations. */
4508 if (gimple_block (stmt))
4509 {
4510 id->block = make_node (BLOCK);
4511 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4512 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4513 prepend_lexical_block (gimple_block (stmt), id->block);
4514 }
4515
4516 /* Local declarations will be replaced by their equivalents in this
4517 map. */
4518 st = id->decl_map;
4519 id->decl_map = new hash_map<tree, tree>;
4520 dst = id->debug_map;
4521 id->debug_map = NULL;
4522
4523 /* Record the function we are about to inline. */
4524 id->src_fn = fn;
4525 id->src_node = cg_edge->callee;
4526 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4527 id->call_stmt = stmt;
4528
4529 /* If the the src function contains an IFN_VA_ARG, then so will the dst
4530 function after inlining. */
4531 if ((id->src_cfun->curr_properties & PROP_gimple_lva) == 0)
4532 {
4533 struct function *dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4534 dst_cfun->curr_properties &= ~PROP_gimple_lva;
4535 }
4536
4537 gcc_assert (!id->src_cfun->after_inlining);
4538
4539 id->entry_bb = bb;
4540 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4541 {
4542 gimple_stmt_iterator si = gsi_last_bb (bb);
4543 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4544 NOT_TAKEN),
4545 GSI_NEW_STMT);
4546 }
4547 initialize_inlined_parameters (id, stmt, fn, bb);
4548
4549 if (DECL_INITIAL (fn))
4550 {
4551 if (gimple_block (stmt))
4552 {
4553 tree *var;
4554
4555 prepend_lexical_block (id->block,
4556 remap_blocks (DECL_INITIAL (fn), id));
4557 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4558 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4559 == NULL_TREE));
4560 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4561 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4562 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4563 under it. The parameters can be then evaluated in the debugger,
4564 but don't show in backtraces. */
4565 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4566 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4567 {
4568 tree v = *var;
4569 *var = TREE_CHAIN (v);
4570 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4571 BLOCK_VARS (id->block) = v;
4572 }
4573 else
4574 var = &TREE_CHAIN (*var);
4575 }
4576 else
4577 remap_blocks_to_null (DECL_INITIAL (fn), id);
4578 }
4579
4580 /* Return statements in the function body will be replaced by jumps
4581 to the RET_LABEL. */
4582 gcc_assert (DECL_INITIAL (fn));
4583 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4584
4585 /* Find the LHS to which the result of this call is assigned. */
4586 return_slot = NULL;
4587 if (gimple_call_lhs (stmt))
4588 {
4589 modify_dest = gimple_call_lhs (stmt);
4590
4591 /* Remember where to copy returned bounds. */
4592 if (gimple_call_with_bounds_p (stmt)
4593 && TREE_CODE (modify_dest) == SSA_NAME)
4594 {
4595 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4596 if (retbnd)
4597 {
4598 return_bounds = gimple_call_lhs (retbnd);
4599 /* If returned bounds are not used then just
4600 remove unused call. */
4601 if (!return_bounds)
4602 {
4603 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4604 gsi_remove (&iter, true);
4605 }
4606 }
4607 }
4608
4609 /* The function which we are inlining might not return a value,
4610 in which case we should issue a warning that the function
4611 does not return a value. In that case the optimizers will
4612 see that the variable to which the value is assigned was not
4613 initialized. We do not want to issue a warning about that
4614 uninitialized variable. */
4615 if (DECL_P (modify_dest))
4616 TREE_NO_WARNING (modify_dest) = 1;
4617
4618 if (gimple_call_return_slot_opt_p (call_stmt))
4619 {
4620 return_slot = modify_dest;
4621 modify_dest = NULL;
4622 }
4623 }
4624 else
4625 modify_dest = NULL;
4626
4627 /* If we are inlining a call to the C++ operator new, we don't want
4628 to use type based alias analysis on the return value. Otherwise
4629 we may get confused if the compiler sees that the inlined new
4630 function returns a pointer which was just deleted. See bug
4631 33407. */
4632 if (DECL_IS_OPERATOR_NEW (fn))
4633 {
4634 return_slot = NULL;
4635 modify_dest = NULL;
4636 }
4637
4638 /* Declare the return variable for the function. */
4639 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4640 return_bounds, bb);
4641
4642 /* Add local vars in this inlined callee to caller. */
4643 add_local_variables (id->src_cfun, cfun, id);
4644
4645 if (dump_file && (dump_flags & TDF_DETAILS))
4646 {
4647 fprintf (dump_file, "Inlining ");
4648 print_generic_expr (dump_file, id->src_fn, 0);
4649 fprintf (dump_file, " to ");
4650 print_generic_expr (dump_file, id->dst_fn, 0);
4651 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4652 }
4653
4654 /* This is it. Duplicate the callee body. Assume callee is
4655 pre-gimplified. Note that we must not alter the caller
4656 function in any way before this point, as this CALL_EXPR may be
4657 a self-referential call; if we're calling ourselves, we need to
4658 duplicate our body before altering anything. */
4659 copy_body (id, cg_edge->callee->count,
4660 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4661 bb, return_block, NULL);
4662
4663 /* Reset the escaped solution. */
4664 if (cfun->gimple_df)
4665 pt_solution_reset (&cfun->gimple_df->escaped);
4666
4667 /* Clean up. */
4668 if (id->debug_map)
4669 {
4670 delete id->debug_map;
4671 id->debug_map = dst;
4672 }
4673 delete id->decl_map;
4674 id->decl_map = st;
4675
4676 /* Unlink the calls virtual operands before replacing it. */
4677 unlink_stmt_vdef (stmt);
4678 if (gimple_vdef (stmt)
4679 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4680 release_ssa_name (gimple_vdef (stmt));
4681
4682 /* If the inlined function returns a result that we care about,
4683 substitute the GIMPLE_CALL with an assignment of the return
4684 variable to the LHS of the call. That is, if STMT was
4685 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4686 if (use_retvar && gimple_call_lhs (stmt))
4687 {
4688 gimple old_stmt = stmt;
4689 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4690 gsi_replace (&stmt_gsi, stmt, false);
4691 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4692
4693 /* Copy bounds if we copy structure with bounds. */
4694 if (chkp_function_instrumented_p (id->dst_fn)
4695 && !BOUNDED_P (use_retvar)
4696 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4697 id->assign_stmts.safe_push (stmt);
4698 }
4699 else
4700 {
4701 /* Handle the case of inlining a function with no return
4702 statement, which causes the return value to become undefined. */
4703 if (gimple_call_lhs (stmt)
4704 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4705 {
4706 tree name = gimple_call_lhs (stmt);
4707 tree var = SSA_NAME_VAR (name);
4708 tree def = ssa_default_def (cfun, var);
4709
4710 if (def)
4711 {
4712 /* If the variable is used undefined, make this name
4713 undefined via a move. */
4714 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4715 gsi_replace (&stmt_gsi, stmt, true);
4716 }
4717 else
4718 {
4719 /* Otherwise make this variable undefined. */
4720 gsi_remove (&stmt_gsi, true);
4721 set_ssa_default_def (cfun, var, name);
4722 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4723 }
4724 }
4725 else
4726 gsi_remove (&stmt_gsi, true);
4727 }
4728
4729 /* Put returned bounds into the correct place if required. */
4730 if (return_bounds)
4731 {
4732 gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4733 gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4734 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4735 unlink_stmt_vdef (old_stmt);
4736 gsi_replace (&bnd_gsi, new_stmt, false);
4737 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4738 cgraph_update_edges_for_call_stmt (old_stmt,
4739 gimple_call_fndecl (old_stmt),
4740 new_stmt);
4741 }
4742
4743 if (purge_dead_abnormal_edges)
4744 {
4745 gimple_purge_dead_eh_edges (return_block);
4746 gimple_purge_dead_abnormal_call_edges (return_block);
4747 }
4748
4749 /* If the value of the new expression is ignored, that's OK. We
4750 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4751 the equivalent inlined version either. */
4752 if (is_gimple_assign (stmt))
4753 {
4754 gcc_assert (gimple_assign_single_p (stmt)
4755 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4756 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4757 }
4758
4759 /* Copy bounds for all generated assigns that need it. */
4760 for (i = 0; i < id->assign_stmts.length (); i++)
4761 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4762 id->assign_stmts.release ();
4763
4764 /* Output the inlining info for this abstract function, since it has been
4765 inlined. If we don't do this now, we can lose the information about the
4766 variables in the function when the blocks get blown away as soon as we
4767 remove the cgraph node. */
4768 if (gimple_block (stmt))
4769 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4770
4771 /* Update callgraph if needed. */
4772 cg_edge->callee->remove ();
4773
4774 id->block = NULL_TREE;
4775 successfully_inlined = TRUE;
4776
4777 egress:
4778 input_location = saved_location;
4779 return successfully_inlined;
4780 }
4781
4782 /* Expand call statements reachable from STMT_P.
4783 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4784 in a MODIFY_EXPR. */
4785
4786 static bool
4787 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4788 {
4789 gimple_stmt_iterator gsi;
4790 bool inlined = false;
4791
4792 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4793 {
4794 gimple stmt = gsi_stmt (gsi);
4795 gsi_prev (&gsi);
4796
4797 if (is_gimple_call (stmt)
4798 && !gimple_call_internal_p (stmt))
4799 inlined |= expand_call_inline (bb, stmt, id);
4800 }
4801
4802 return inlined;
4803 }
4804
4805
4806 /* Walk all basic blocks created after FIRST and try to fold every statement
4807 in the STATEMENTS pointer set. */
4808
4809 static void
4810 fold_marked_statements (int first, hash_set<gimple> *statements)
4811 {
4812 for (; first < n_basic_blocks_for_fn (cfun); first++)
4813 if (BASIC_BLOCK_FOR_FN (cfun, first))
4814 {
4815 gimple_stmt_iterator gsi;
4816
4817 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4818 !gsi_end_p (gsi);
4819 gsi_next (&gsi))
4820 if (statements->contains (gsi_stmt (gsi)))
4821 {
4822 gimple old_stmt = gsi_stmt (gsi);
4823 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4824
4825 if (old_decl && DECL_BUILT_IN (old_decl))
4826 {
4827 /* Folding builtins can create multiple instructions,
4828 we need to look at all of them. */
4829 gimple_stmt_iterator i2 = gsi;
4830 gsi_prev (&i2);
4831 if (fold_stmt (&gsi))
4832 {
4833 gimple new_stmt;
4834 /* If a builtin at the end of a bb folded into nothing,
4835 the following loop won't work. */
4836 if (gsi_end_p (gsi))
4837 {
4838 cgraph_update_edges_for_call_stmt (old_stmt,
4839 old_decl, NULL);
4840 break;
4841 }
4842 if (gsi_end_p (i2))
4843 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4844 else
4845 gsi_next (&i2);
4846 while (1)
4847 {
4848 new_stmt = gsi_stmt (i2);
4849 update_stmt (new_stmt);
4850 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4851 new_stmt);
4852
4853 if (new_stmt == gsi_stmt (gsi))
4854 {
4855 /* It is okay to check only for the very last
4856 of these statements. If it is a throwing
4857 statement nothing will change. If it isn't
4858 this can remove EH edges. If that weren't
4859 correct then because some intermediate stmts
4860 throw, but not the last one. That would mean
4861 we'd have to split the block, which we can't
4862 here and we'd loose anyway. And as builtins
4863 probably never throw, this all
4864 is mood anyway. */
4865 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4866 new_stmt))
4867 gimple_purge_dead_eh_edges (
4868 BASIC_BLOCK_FOR_FN (cfun, first));
4869 break;
4870 }
4871 gsi_next (&i2);
4872 }
4873 }
4874 }
4875 else if (fold_stmt (&gsi))
4876 {
4877 /* Re-read the statement from GSI as fold_stmt() may
4878 have changed it. */
4879 gimple new_stmt = gsi_stmt (gsi);
4880 update_stmt (new_stmt);
4881
4882 if (is_gimple_call (old_stmt)
4883 || is_gimple_call (new_stmt))
4884 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4885 new_stmt);
4886
4887 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4888 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4889 first));
4890 }
4891 }
4892 }
4893 }
4894
4895 /* Expand calls to inline functions in the body of FN. */
4896
4897 unsigned int
4898 optimize_inline_calls (tree fn)
4899 {
4900 copy_body_data id;
4901 basic_block bb;
4902 int last = n_basic_blocks_for_fn (cfun);
4903 bool inlined_p = false;
4904
4905 /* Clear out ID. */
4906 memset (&id, 0, sizeof (id));
4907
4908 id.src_node = id.dst_node = cgraph_node::get (fn);
4909 gcc_assert (id.dst_node->definition);
4910 id.dst_fn = fn;
4911 /* Or any functions that aren't finished yet. */
4912 if (current_function_decl)
4913 id.dst_fn = current_function_decl;
4914
4915 id.copy_decl = copy_decl_maybe_to_var;
4916 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4917 id.transform_new_cfg = false;
4918 id.transform_return_to_modify = true;
4919 id.transform_parameter = true;
4920 id.transform_lang_insert_block = NULL;
4921 id.statements_to_fold = new hash_set<gimple>;
4922
4923 push_gimplify_context ();
4924
4925 /* We make no attempts to keep dominance info up-to-date. */
4926 free_dominance_info (CDI_DOMINATORS);
4927 free_dominance_info (CDI_POST_DOMINATORS);
4928
4929 /* Register specific gimple functions. */
4930 gimple_register_cfg_hooks ();
4931
4932 /* Reach the trees by walking over the CFG, and note the
4933 enclosing basic-blocks in the call edges. */
4934 /* We walk the blocks going forward, because inlined function bodies
4935 will split id->current_basic_block, and the new blocks will
4936 follow it; we'll trudge through them, processing their CALL_EXPRs
4937 along the way. */
4938 FOR_EACH_BB_FN (bb, cfun)
4939 inlined_p |= gimple_expand_calls_inline (bb, &id);
4940
4941 pop_gimplify_context (NULL);
4942
4943 #ifdef ENABLE_CHECKING
4944 {
4945 struct cgraph_edge *e;
4946
4947 id.dst_node->verify ();
4948
4949 /* Double check that we inlined everything we are supposed to inline. */
4950 for (e = id.dst_node->callees; e; e = e->next_callee)
4951 gcc_assert (e->inline_failed);
4952 }
4953 #endif
4954
4955 /* Fold queued statements. */
4956 fold_marked_statements (last, id.statements_to_fold);
4957 delete id.statements_to_fold;
4958
4959 gcc_assert (!id.debug_stmts.exists ());
4960
4961 /* If we didn't inline into the function there is nothing to do. */
4962 if (!inlined_p)
4963 return 0;
4964
4965 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4966 number_blocks (fn);
4967
4968 delete_unreachable_blocks_update_callgraph (&id);
4969 #ifdef ENABLE_CHECKING
4970 id.dst_node->verify ();
4971 #endif
4972
4973 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4974 not possible yet - the IPA passes might make various functions to not
4975 throw and they don't care to proactively update local EH info. This is
4976 done later in fixup_cfg pass that also execute the verification. */
4977 return (TODO_update_ssa
4978 | TODO_cleanup_cfg
4979 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4980 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4981 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4982 ? TODO_rebuild_frequencies : 0));
4983 }
4984
4985 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4986
4987 tree
4988 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4989 {
4990 enum tree_code code = TREE_CODE (*tp);
4991 enum tree_code_class cl = TREE_CODE_CLASS (code);
4992
4993 /* We make copies of most nodes. */
4994 if (IS_EXPR_CODE_CLASS (cl)
4995 || code == TREE_LIST
4996 || code == TREE_VEC
4997 || code == TYPE_DECL
4998 || code == OMP_CLAUSE)
4999 {
5000 /* Because the chain gets clobbered when we make a copy, we save it
5001 here. */
5002 tree chain = NULL_TREE, new_tree;
5003
5004 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5005 chain = TREE_CHAIN (*tp);
5006
5007 /* Copy the node. */
5008 new_tree = copy_node (*tp);
5009
5010 *tp = new_tree;
5011
5012 /* Now, restore the chain, if appropriate. That will cause
5013 walk_tree to walk into the chain as well. */
5014 if (code == PARM_DECL
5015 || code == TREE_LIST
5016 || code == OMP_CLAUSE)
5017 TREE_CHAIN (*tp) = chain;
5018
5019 /* For now, we don't update BLOCKs when we make copies. So, we
5020 have to nullify all BIND_EXPRs. */
5021 if (TREE_CODE (*tp) == BIND_EXPR)
5022 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5023 }
5024 else if (code == CONSTRUCTOR)
5025 {
5026 /* CONSTRUCTOR nodes need special handling because
5027 we need to duplicate the vector of elements. */
5028 tree new_tree;
5029
5030 new_tree = copy_node (*tp);
5031 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5032 *tp = new_tree;
5033 }
5034 else if (code == STATEMENT_LIST)
5035 /* We used to just abort on STATEMENT_LIST, but we can run into them
5036 with statement-expressions (c++/40975). */
5037 copy_statement_list (tp);
5038 else if (TREE_CODE_CLASS (code) == tcc_type)
5039 *walk_subtrees = 0;
5040 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5041 *walk_subtrees = 0;
5042 else if (TREE_CODE_CLASS (code) == tcc_constant)
5043 *walk_subtrees = 0;
5044 return NULL_TREE;
5045 }
5046
5047 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5048 information indicating to what new SAVE_EXPR this one should be mapped,
5049 use that one. Otherwise, create a new node and enter it in ST. FN is
5050 the function into which the copy will be placed. */
5051
5052 static void
5053 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5054 {
5055 tree *n;
5056 tree t;
5057
5058 /* See if we already encountered this SAVE_EXPR. */
5059 n = st->get (*tp);
5060
5061 /* If we didn't already remap this SAVE_EXPR, do so now. */
5062 if (!n)
5063 {
5064 t = copy_node (*tp);
5065
5066 /* Remember this SAVE_EXPR. */
5067 st->put (*tp, t);
5068 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5069 st->put (t, t);
5070 }
5071 else
5072 {
5073 /* We've already walked into this SAVE_EXPR; don't do it again. */
5074 *walk_subtrees = 0;
5075 t = *n;
5076 }
5077
5078 /* Replace this SAVE_EXPR with the copy. */
5079 *tp = t;
5080 }
5081
5082 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5083 label, copies the declaration and enters it in the splay_tree in DATA (which
5084 is really a 'copy_body_data *'. */
5085
5086 static tree
5087 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5088 bool *handled_ops_p ATTRIBUTE_UNUSED,
5089 struct walk_stmt_info *wi)
5090 {
5091 copy_body_data *id = (copy_body_data *) wi->info;
5092 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5093
5094 if (stmt)
5095 {
5096 tree decl = gimple_label_label (stmt);
5097
5098 /* Copy the decl and remember the copy. */
5099 insert_decl_map (id, decl, id->copy_decl (decl, id));
5100 }
5101
5102 return NULL_TREE;
5103 }
5104
5105
5106 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5107 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5108 remaps all local declarations to appropriate replacements in gimple
5109 operands. */
5110
5111 static tree
5112 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5113 {
5114 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5115 copy_body_data *id = (copy_body_data *) wi->info;
5116 hash_map<tree, tree> *st = id->decl_map;
5117 tree *n;
5118 tree expr = *tp;
5119
5120 /* Only a local declaration (variable or label). */
5121 if ((TREE_CODE (expr) == VAR_DECL
5122 && !TREE_STATIC (expr))
5123 || TREE_CODE (expr) == LABEL_DECL)
5124 {
5125 /* Lookup the declaration. */
5126 n = st->get (expr);
5127
5128 /* If it's there, remap it. */
5129 if (n)
5130 *tp = *n;
5131 *walk_subtrees = 0;
5132 }
5133 else if (TREE_CODE (expr) == STATEMENT_LIST
5134 || TREE_CODE (expr) == BIND_EXPR
5135 || TREE_CODE (expr) == SAVE_EXPR)
5136 gcc_unreachable ();
5137 else if (TREE_CODE (expr) == TARGET_EXPR)
5138 {
5139 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5140 It's OK for this to happen if it was part of a subtree that
5141 isn't immediately expanded, such as operand 2 of another
5142 TARGET_EXPR. */
5143 if (!TREE_OPERAND (expr, 1))
5144 {
5145 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5146 TREE_OPERAND (expr, 3) = NULL_TREE;
5147 }
5148 }
5149
5150 /* Keep iterating. */
5151 return NULL_TREE;
5152 }
5153
5154
5155 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5156 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5157 remaps all local declarations to appropriate replacements in gimple
5158 statements. */
5159
5160 static tree
5161 replace_locals_stmt (gimple_stmt_iterator *gsip,
5162 bool *handled_ops_p ATTRIBUTE_UNUSED,
5163 struct walk_stmt_info *wi)
5164 {
5165 copy_body_data *id = (copy_body_data *) wi->info;
5166 gimple gs = gsi_stmt (*gsip);
5167
5168 if (gbind *stmt = dyn_cast <gbind *> (gs))
5169 {
5170 tree block = gimple_bind_block (stmt);
5171
5172 if (block)
5173 {
5174 remap_block (&block, id);
5175 gimple_bind_set_block (stmt, block);
5176 }
5177
5178 /* This will remap a lot of the same decls again, but this should be
5179 harmless. */
5180 if (gimple_bind_vars (stmt))
5181 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5182 NULL, id));
5183 }
5184
5185 /* Keep iterating. */
5186 return NULL_TREE;
5187 }
5188
5189
5190 /* Copies everything in SEQ and replaces variables and labels local to
5191 current_function_decl. */
5192
5193 gimple_seq
5194 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5195 {
5196 copy_body_data id;
5197 struct walk_stmt_info wi;
5198 gimple_seq copy;
5199
5200 /* There's nothing to do for NULL_TREE. */
5201 if (seq == NULL)
5202 return seq;
5203
5204 /* Set up ID. */
5205 memset (&id, 0, sizeof (id));
5206 id.src_fn = current_function_decl;
5207 id.dst_fn = current_function_decl;
5208 id.decl_map = new hash_map<tree, tree>;
5209 id.debug_map = NULL;
5210
5211 id.copy_decl = copy_decl_no_change;
5212 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5213 id.transform_new_cfg = false;
5214 id.transform_return_to_modify = false;
5215 id.transform_parameter = false;
5216 id.transform_lang_insert_block = NULL;
5217
5218 /* Walk the tree once to find local labels. */
5219 memset (&wi, 0, sizeof (wi));
5220 hash_set<tree> visited;
5221 wi.info = &id;
5222 wi.pset = &visited;
5223 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5224
5225 copy = gimple_seq_copy (seq);
5226
5227 /* Walk the copy, remapping decls. */
5228 memset (&wi, 0, sizeof (wi));
5229 wi.info = &id;
5230 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5231
5232 /* Clean up. */
5233 delete id.decl_map;
5234 if (id.debug_map)
5235 delete id.debug_map;
5236 if (id.dependence_map)
5237 {
5238 delete id.dependence_map;
5239 id.dependence_map = NULL;
5240 }
5241
5242 return copy;
5243 }
5244
5245
5246 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5247
5248 static tree
5249 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5250 {
5251 if (*tp == data)
5252 return (tree) data;
5253 else
5254 return NULL;
5255 }
5256
5257 DEBUG_FUNCTION bool
5258 debug_find_tree (tree top, tree search)
5259 {
5260 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5261 }
5262
5263
5264 /* Declare the variables created by the inliner. Add all the variables in
5265 VARS to BIND_EXPR. */
5266
5267 static void
5268 declare_inline_vars (tree block, tree vars)
5269 {
5270 tree t;
5271 for (t = vars; t; t = DECL_CHAIN (t))
5272 {
5273 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5274 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5275 add_local_decl (cfun, t);
5276 }
5277
5278 if (block)
5279 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5280 }
5281
5282 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5283 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5284 VAR_DECL translation. */
5285
5286 static tree
5287 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5288 {
5289 /* Don't generate debug information for the copy if we wouldn't have
5290 generated it for the copy either. */
5291 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5292 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5293
5294 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5295 declaration inspired this copy. */
5296 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5297
5298 /* The new variable/label has no RTL, yet. */
5299 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5300 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5301 SET_DECL_RTL (copy, 0);
5302
5303 /* These args would always appear unused, if not for this. */
5304 TREE_USED (copy) = 1;
5305
5306 /* Set the context for the new declaration. */
5307 if (!DECL_CONTEXT (decl))
5308 /* Globals stay global. */
5309 ;
5310 else if (DECL_CONTEXT (decl) != id->src_fn)
5311 /* Things that weren't in the scope of the function we're inlining
5312 from aren't in the scope we're inlining to, either. */
5313 ;
5314 else if (TREE_STATIC (decl))
5315 /* Function-scoped static variables should stay in the original
5316 function. */
5317 ;
5318 else
5319 /* Ordinary automatic local variables are now in the scope of the
5320 new function. */
5321 DECL_CONTEXT (copy) = id->dst_fn;
5322
5323 return copy;
5324 }
5325
5326 static tree
5327 copy_decl_to_var (tree decl, copy_body_data *id)
5328 {
5329 tree copy, type;
5330
5331 gcc_assert (TREE_CODE (decl) == PARM_DECL
5332 || TREE_CODE (decl) == RESULT_DECL);
5333
5334 type = TREE_TYPE (decl);
5335
5336 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5337 VAR_DECL, DECL_NAME (decl), type);
5338 if (DECL_PT_UID_SET_P (decl))
5339 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5340 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5341 TREE_READONLY (copy) = TREE_READONLY (decl);
5342 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5343 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5344
5345 return copy_decl_for_dup_finish (id, decl, copy);
5346 }
5347
5348 /* Like copy_decl_to_var, but create a return slot object instead of a
5349 pointer variable for return by invisible reference. */
5350
5351 static tree
5352 copy_result_decl_to_var (tree decl, copy_body_data *id)
5353 {
5354 tree copy, type;
5355
5356 gcc_assert (TREE_CODE (decl) == PARM_DECL
5357 || TREE_CODE (decl) == RESULT_DECL);
5358
5359 type = TREE_TYPE (decl);
5360 if (DECL_BY_REFERENCE (decl))
5361 type = TREE_TYPE (type);
5362
5363 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5364 VAR_DECL, DECL_NAME (decl), type);
5365 if (DECL_PT_UID_SET_P (decl))
5366 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5367 TREE_READONLY (copy) = TREE_READONLY (decl);
5368 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5369 if (!DECL_BY_REFERENCE (decl))
5370 {
5371 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5372 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5373 }
5374
5375 return copy_decl_for_dup_finish (id, decl, copy);
5376 }
5377
5378 tree
5379 copy_decl_no_change (tree decl, copy_body_data *id)
5380 {
5381 tree copy;
5382
5383 copy = copy_node (decl);
5384
5385 /* The COPY is not abstract; it will be generated in DST_FN. */
5386 DECL_ABSTRACT_P (copy) = false;
5387 lang_hooks.dup_lang_specific_decl (copy);
5388
5389 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5390 been taken; it's for internal bookkeeping in expand_goto_internal. */
5391 if (TREE_CODE (copy) == LABEL_DECL)
5392 {
5393 TREE_ADDRESSABLE (copy) = 0;
5394 LABEL_DECL_UID (copy) = -1;
5395 }
5396
5397 return copy_decl_for_dup_finish (id, decl, copy);
5398 }
5399
5400 static tree
5401 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5402 {
5403 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5404 return copy_decl_to_var (decl, id);
5405 else
5406 return copy_decl_no_change (decl, id);
5407 }
5408
5409 /* Return a copy of the function's argument tree. */
5410 static tree
5411 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5412 bitmap args_to_skip, tree *vars)
5413 {
5414 tree arg, *parg;
5415 tree new_parm = NULL;
5416 int i = 0;
5417
5418 parg = &new_parm;
5419
5420 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5421 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5422 {
5423 tree new_tree = remap_decl (arg, id);
5424 if (TREE_CODE (new_tree) != PARM_DECL)
5425 new_tree = id->copy_decl (arg, id);
5426 lang_hooks.dup_lang_specific_decl (new_tree);
5427 *parg = new_tree;
5428 parg = &DECL_CHAIN (new_tree);
5429 }
5430 else if (!id->decl_map->get (arg))
5431 {
5432 /* Make an equivalent VAR_DECL. If the argument was used
5433 as temporary variable later in function, the uses will be
5434 replaced by local variable. */
5435 tree var = copy_decl_to_var (arg, id);
5436 insert_decl_map (id, arg, var);
5437 /* Declare this new variable. */
5438 DECL_CHAIN (var) = *vars;
5439 *vars = var;
5440 }
5441 return new_parm;
5442 }
5443
5444 /* Return a copy of the function's static chain. */
5445 static tree
5446 copy_static_chain (tree static_chain, copy_body_data * id)
5447 {
5448 tree *chain_copy, *pvar;
5449
5450 chain_copy = &static_chain;
5451 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5452 {
5453 tree new_tree = remap_decl (*pvar, id);
5454 lang_hooks.dup_lang_specific_decl (new_tree);
5455 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5456 *pvar = new_tree;
5457 }
5458 return static_chain;
5459 }
5460
5461 /* Return true if the function is allowed to be versioned.
5462 This is a guard for the versioning functionality. */
5463
5464 bool
5465 tree_versionable_function_p (tree fndecl)
5466 {
5467 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5468 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5469 }
5470
5471 /* Delete all unreachable basic blocks and update callgraph.
5472 Doing so is somewhat nontrivial because we need to update all clones and
5473 remove inline function that become unreachable. */
5474
5475 static bool
5476 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5477 {
5478 bool changed = false;
5479 basic_block b, next_bb;
5480
5481 find_unreachable_blocks ();
5482
5483 /* Delete all unreachable basic blocks. */
5484
5485 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5486 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5487 {
5488 next_bb = b->next_bb;
5489
5490 if (!(b->flags & BB_REACHABLE))
5491 {
5492 gimple_stmt_iterator bsi;
5493
5494 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5495 {
5496 struct cgraph_edge *e;
5497 struct cgraph_node *node;
5498
5499 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5500
5501 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5502 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5503 {
5504 if (!e->inline_failed)
5505 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5506 else
5507 e->remove ();
5508 }
5509 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5510 && id->dst_node->clones)
5511 for (node = id->dst_node->clones; node != id->dst_node;)
5512 {
5513 node->remove_stmt_references (gsi_stmt (bsi));
5514 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5515 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5516 {
5517 if (!e->inline_failed)
5518 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5519 else
5520 e->remove ();
5521 }
5522
5523 if (node->clones)
5524 node = node->clones;
5525 else if (node->next_sibling_clone)
5526 node = node->next_sibling_clone;
5527 else
5528 {
5529 while (node != id->dst_node && !node->next_sibling_clone)
5530 node = node->clone_of;
5531 if (node != id->dst_node)
5532 node = node->next_sibling_clone;
5533 }
5534 }
5535 }
5536 delete_basic_block (b);
5537 changed = true;
5538 }
5539 }
5540
5541 return changed;
5542 }
5543
5544 /* Update clone info after duplication. */
5545
5546 static void
5547 update_clone_info (copy_body_data * id)
5548 {
5549 struct cgraph_node *node;
5550 if (!id->dst_node->clones)
5551 return;
5552 for (node = id->dst_node->clones; node != id->dst_node;)
5553 {
5554 /* First update replace maps to match the new body. */
5555 if (node->clone.tree_map)
5556 {
5557 unsigned int i;
5558 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5559 {
5560 struct ipa_replace_map *replace_info;
5561 replace_info = (*node->clone.tree_map)[i];
5562 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5563 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5564 }
5565 }
5566 if (node->clones)
5567 node = node->clones;
5568 else if (node->next_sibling_clone)
5569 node = node->next_sibling_clone;
5570 else
5571 {
5572 while (node != id->dst_node && !node->next_sibling_clone)
5573 node = node->clone_of;
5574 if (node != id->dst_node)
5575 node = node->next_sibling_clone;
5576 }
5577 }
5578 }
5579
5580 /* Create a copy of a function's tree.
5581 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5582 of the original function and the new copied function
5583 respectively. In case we want to replace a DECL
5584 tree with another tree while duplicating the function's
5585 body, TREE_MAP represents the mapping between these
5586 trees. If UPDATE_CLONES is set, the call_stmt fields
5587 of edges of clones of the function will be updated.
5588
5589 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5590 from new version.
5591 If SKIP_RETURN is true, the new version will return void.
5592 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5593 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5594 */
5595 void
5596 tree_function_versioning (tree old_decl, tree new_decl,
5597 vec<ipa_replace_map *, va_gc> *tree_map,
5598 bool update_clones, bitmap args_to_skip,
5599 bool skip_return, bitmap blocks_to_copy,
5600 basic_block new_entry)
5601 {
5602 struct cgraph_node *old_version_node;
5603 struct cgraph_node *new_version_node;
5604 copy_body_data id;
5605 tree p;
5606 unsigned i;
5607 struct ipa_replace_map *replace_info;
5608 basic_block old_entry_block, bb;
5609 auto_vec<gimple, 10> init_stmts;
5610 tree vars = NULL_TREE;
5611
5612 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5613 && TREE_CODE (new_decl) == FUNCTION_DECL);
5614 DECL_POSSIBLY_INLINED (old_decl) = 1;
5615
5616 old_version_node = cgraph_node::get (old_decl);
5617 gcc_checking_assert (old_version_node);
5618 new_version_node = cgraph_node::get (new_decl);
5619 gcc_checking_assert (new_version_node);
5620
5621 /* Copy over debug args. */
5622 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5623 {
5624 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5625 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5626 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5627 old_debug_args = decl_debug_args_lookup (old_decl);
5628 if (old_debug_args)
5629 {
5630 new_debug_args = decl_debug_args_insert (new_decl);
5631 *new_debug_args = vec_safe_copy (*old_debug_args);
5632 }
5633 }
5634
5635 /* Output the inlining info for this abstract function, since it has been
5636 inlined. If we don't do this now, we can lose the information about the
5637 variables in the function when the blocks get blown away as soon as we
5638 remove the cgraph node. */
5639 (*debug_hooks->outlining_inline_function) (old_decl);
5640
5641 DECL_ARTIFICIAL (new_decl) = 1;
5642 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5643 if (DECL_ORIGIN (old_decl) == old_decl)
5644 old_version_node->used_as_abstract_origin = true;
5645 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5646
5647 /* Prepare the data structures for the tree copy. */
5648 memset (&id, 0, sizeof (id));
5649
5650 /* Generate a new name for the new version. */
5651 id.statements_to_fold = new hash_set<gimple>;
5652
5653 id.decl_map = new hash_map<tree, tree>;
5654 id.debug_map = NULL;
5655 id.src_fn = old_decl;
5656 id.dst_fn = new_decl;
5657 id.src_node = old_version_node;
5658 id.dst_node = new_version_node;
5659 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5660 id.blocks_to_copy = blocks_to_copy;
5661
5662 id.copy_decl = copy_decl_no_change;
5663 id.transform_call_graph_edges
5664 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5665 id.transform_new_cfg = true;
5666 id.transform_return_to_modify = false;
5667 id.transform_parameter = false;
5668 id.transform_lang_insert_block = NULL;
5669
5670 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5671 (DECL_STRUCT_FUNCTION (old_decl));
5672 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5673 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5674 initialize_cfun (new_decl, old_decl,
5675 old_entry_block->count);
5676 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5677 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5678 = id.src_cfun->gimple_df->ipa_pta;
5679
5680 /* Copy the function's static chain. */
5681 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5682 if (p)
5683 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5684 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5685 &id);
5686
5687 /* If there's a tree_map, prepare for substitution. */
5688 if (tree_map)
5689 for (i = 0; i < tree_map->length (); i++)
5690 {
5691 gimple init;
5692 replace_info = (*tree_map)[i];
5693 if (replace_info->replace_p)
5694 {
5695 if (!replace_info->old_tree)
5696 {
5697 int i = replace_info->parm_num;
5698 tree parm;
5699 tree req_type;
5700
5701 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5702 i --;
5703 replace_info->old_tree = parm;
5704 req_type = TREE_TYPE (parm);
5705 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5706 {
5707 if (fold_convertible_p (req_type, replace_info->new_tree))
5708 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5709 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5710 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5711 else
5712 {
5713 if (dump_file)
5714 {
5715 fprintf (dump_file, " const ");
5716 print_generic_expr (dump_file, replace_info->new_tree, 0);
5717 fprintf (dump_file, " can't be converted to param ");
5718 print_generic_expr (dump_file, parm, 0);
5719 fprintf (dump_file, "\n");
5720 }
5721 replace_info->old_tree = NULL;
5722 }
5723 }
5724 }
5725 else
5726 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5727 if (replace_info->old_tree)
5728 {
5729 init = setup_one_parameter (&id, replace_info->old_tree,
5730 replace_info->new_tree, id.src_fn,
5731 NULL,
5732 &vars);
5733 if (init)
5734 init_stmts.safe_push (init);
5735 }
5736 }
5737 }
5738 /* Copy the function's arguments. */
5739 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5740 DECL_ARGUMENTS (new_decl) =
5741 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5742 args_to_skip, &vars);
5743
5744 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5745 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5746
5747 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5748
5749 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5750 /* Add local vars. */
5751 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5752
5753 if (DECL_RESULT (old_decl) == NULL_TREE)
5754 ;
5755 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5756 {
5757 DECL_RESULT (new_decl)
5758 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5759 RESULT_DECL, NULL_TREE, void_type_node);
5760 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5761 cfun->returns_struct = 0;
5762 cfun->returns_pcc_struct = 0;
5763 }
5764 else
5765 {
5766 tree old_name;
5767 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5768 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5769 if (gimple_in_ssa_p (id.src_cfun)
5770 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5771 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5772 {
5773 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5774 insert_decl_map (&id, old_name, new_name);
5775 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5776 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5777 }
5778 }
5779
5780 /* Set up the destination functions loop tree. */
5781 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5782 {
5783 cfun->curr_properties &= ~PROP_loops;
5784 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5785 cfun->curr_properties |= PROP_loops;
5786 }
5787
5788 /* Copy the Function's body. */
5789 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5790 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5791 new_entry);
5792
5793 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5794 number_blocks (new_decl);
5795
5796 /* We want to create the BB unconditionally, so that the addition of
5797 debug stmts doesn't affect BB count, which may in the end cause
5798 codegen differences. */
5799 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5800 while (init_stmts.length ())
5801 insert_init_stmt (&id, bb, init_stmts.pop ());
5802 update_clone_info (&id);
5803
5804 /* Remap the nonlocal_goto_save_area, if any. */
5805 if (cfun->nonlocal_goto_save_area)
5806 {
5807 struct walk_stmt_info wi;
5808
5809 memset (&wi, 0, sizeof (wi));
5810 wi.info = &id;
5811 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5812 }
5813
5814 /* Clean up. */
5815 delete id.decl_map;
5816 if (id.debug_map)
5817 delete id.debug_map;
5818 free_dominance_info (CDI_DOMINATORS);
5819 free_dominance_info (CDI_POST_DOMINATORS);
5820
5821 fold_marked_statements (0, id.statements_to_fold);
5822 delete id.statements_to_fold;
5823 fold_cond_expr_cond ();
5824 delete_unreachable_blocks_update_callgraph (&id);
5825 if (id.dst_node->definition)
5826 cgraph_edge::rebuild_references ();
5827 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5828 {
5829 calculate_dominance_info (CDI_DOMINATORS);
5830 fix_loop_structure (NULL);
5831 }
5832 update_ssa (TODO_update_ssa);
5833
5834 /* After partial cloning we need to rescale frequencies, so they are
5835 within proper range in the cloned function. */
5836 if (new_entry)
5837 {
5838 struct cgraph_edge *e;
5839 rebuild_frequencies ();
5840
5841 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5842 for (e = new_version_node->callees; e; e = e->next_callee)
5843 {
5844 basic_block bb = gimple_bb (e->call_stmt);
5845 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5846 bb);
5847 e->count = bb->count;
5848 }
5849 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5850 {
5851 basic_block bb = gimple_bb (e->call_stmt);
5852 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5853 bb);
5854 e->count = bb->count;
5855 }
5856 }
5857
5858 free_dominance_info (CDI_DOMINATORS);
5859 free_dominance_info (CDI_POST_DOMINATORS);
5860
5861 gcc_assert (!id.debug_stmts.exists ());
5862 pop_cfun ();
5863 return;
5864 }
5865
5866 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5867 the callee and return the inlined body on success. */
5868
5869 tree
5870 maybe_inline_call_in_expr (tree exp)
5871 {
5872 tree fn = get_callee_fndecl (exp);
5873
5874 /* We can only try to inline "const" functions. */
5875 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5876 {
5877 call_expr_arg_iterator iter;
5878 copy_body_data id;
5879 tree param, arg, t;
5880 hash_map<tree, tree> decl_map;
5881
5882 /* Remap the parameters. */
5883 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5884 param;
5885 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5886 decl_map.put (param, arg);
5887
5888 memset (&id, 0, sizeof (id));
5889 id.src_fn = fn;
5890 id.dst_fn = current_function_decl;
5891 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5892 id.decl_map = &decl_map;
5893
5894 id.copy_decl = copy_decl_no_change;
5895 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5896 id.transform_new_cfg = false;
5897 id.transform_return_to_modify = true;
5898 id.transform_parameter = true;
5899 id.transform_lang_insert_block = NULL;
5900
5901 /* Make sure not to unshare trees behind the front-end's back
5902 since front-end specific mechanisms may rely on sharing. */
5903 id.regimplify = false;
5904 id.do_not_unshare = true;
5905
5906 /* We're not inside any EH region. */
5907 id.eh_lp_nr = 0;
5908
5909 t = copy_tree_body (&id);
5910
5911 /* We can only return something suitable for use in a GENERIC
5912 expression tree. */
5913 if (TREE_CODE (t) == MODIFY_EXPR)
5914 return TREE_OPERAND (t, 1);
5915 }
5916
5917 return NULL_TREE;
5918 }
5919
5920 /* Duplicate a type, fields and all. */
5921
5922 tree
5923 build_duplicate_type (tree type)
5924 {
5925 struct copy_body_data id;
5926
5927 memset (&id, 0, sizeof (id));
5928 id.src_fn = current_function_decl;
5929 id.dst_fn = current_function_decl;
5930 id.src_cfun = cfun;
5931 id.decl_map = new hash_map<tree, tree>;
5932 id.debug_map = NULL;
5933 id.copy_decl = copy_decl_no_change;
5934
5935 type = remap_type_1 (type, &id);
5936
5937 delete id.decl_map;
5938 if (id.debug_map)
5939 delete id.debug_map;
5940
5941 TYPE_CANONICAL (type) = type;
5942
5943 return type;
5944 }
5945
5946 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
5947 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
5948 evaluation. */
5949
5950 tree
5951 copy_fn (tree fn, tree& parms, tree& result)
5952 {
5953 copy_body_data id;
5954 tree param;
5955 hash_map<tree, tree> decl_map;
5956
5957 tree *p = &parms;
5958 *p = NULL_TREE;
5959
5960 memset (&id, 0, sizeof (id));
5961 id.src_fn = fn;
5962 id.dst_fn = current_function_decl;
5963 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5964 id.decl_map = &decl_map;
5965
5966 id.copy_decl = copy_decl_no_change;
5967 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5968 id.transform_new_cfg = false;
5969 id.transform_return_to_modify = false;
5970 id.transform_parameter = true;
5971 id.transform_lang_insert_block = NULL;
5972
5973 /* Make sure not to unshare trees behind the front-end's back
5974 since front-end specific mechanisms may rely on sharing. */
5975 id.regimplify = false;
5976 id.do_not_unshare = true;
5977
5978 /* We're not inside any EH region. */
5979 id.eh_lp_nr = 0;
5980
5981 /* Remap the parameters and result and return them to the caller. */
5982 for (param = DECL_ARGUMENTS (fn);
5983 param;
5984 param = DECL_CHAIN (param))
5985 {
5986 *p = remap_decl (param, &id);
5987 p = &DECL_CHAIN (*p);
5988 }
5989
5990 if (DECL_RESULT (fn))
5991 result = remap_decl (DECL_RESULT (fn), &id);
5992 else
5993 result = NULL_TREE;
5994
5995 return copy_tree_body (&id);
5996 }