[ARM/AArch64][testsuite] Add vmull tests.
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "flags.h"
41 #include "params.h"
42 #include "insn-config.h"
43 #include "hashtab.h"
44 #include "langhooks.h"
45 #include "predict.h"
46 #include "hard-reg-set.h"
47 #include "function.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "cfganal.h"
51 #include "basic-block.h"
52 #include "tree-iterator.h"
53 #include "intl.h"
54 #include "tree-ssa-alias.h"
55 #include "internal-fn.h"
56 #include "gimple-fold.h"
57 #include "tree-eh.h"
58 #include "gimple-expr.h"
59 #include "is-a.h"
60 #include "gimple.h"
61 #include "gimplify.h"
62 #include "gimple-iterator.h"
63 #include "gimplify-me.h"
64 #include "gimple-walk.h"
65 #include "gimple-ssa.h"
66 #include "tree-cfg.h"
67 #include "tree-phinodes.h"
68 #include "ssa-iterators.h"
69 #include "stringpool.h"
70 #include "tree-ssanames.h"
71 #include "tree-into-ssa.h"
72 #include "rtl.h"
73 #include "statistics.h"
74 #include "real.h"
75 #include "fixed-value.h"
76 #include "expmed.h"
77 #include "dojump.h"
78 #include "explow.h"
79 #include "emit-rtl.h"
80 #include "varasm.h"
81 #include "stmt.h"
82 #include "expr.h"
83 #include "tree-dfa.h"
84 #include "tree-ssa.h"
85 #include "tree-pretty-print.h"
86 #include "except.h"
87 #include "debug.h"
88 #include "hash-map.h"
89 #include "plugin-api.h"
90 #include "ipa-ref.h"
91 #include "cgraph.h"
92 #include "alloc-pool.h"
93 #include "symbol-summary.h"
94 #include "ipa-prop.h"
95 #include "value-prof.h"
96 #include "tree-pass.h"
97 #include "target.h"
98 #include "cfgloop.h"
99 #include "builtins.h"
100 #include "tree-chkp.h"
101
102 #include "rtl.h" /* FIXME: For asm_str_count. */
103
104 /* I'm not real happy about this, but we need to handle gimple and
105 non-gimple trees. */
106
107 /* Inlining, Cloning, Versioning, Parallelization
108
109 Inlining: a function body is duplicated, but the PARM_DECLs are
110 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
111 MODIFY_EXPRs that store to a dedicated returned-value variable.
112 The duplicated eh_region info of the copy will later be appended
113 to the info for the caller; the eh_region info in copied throwing
114 statements and RESX statements are adjusted accordingly.
115
116 Cloning: (only in C++) We have one body for a con/de/structor, and
117 multiple function decls, each with a unique parameter list.
118 Duplicate the body, using the given splay tree; some parameters
119 will become constants (like 0 or 1).
120
121 Versioning: a function body is duplicated and the result is a new
122 function rather than into blocks of an existing function as with
123 inlining. Some parameters will become constants.
124
125 Parallelization: a region of a function is duplicated resulting in
126 a new function. Variables may be replaced with complex expressions
127 to enable shared variable semantics.
128
129 All of these will simultaneously lookup any callgraph edges. If
130 we're going to inline the duplicated function body, and the given
131 function has some cloned callgraph nodes (one for each place this
132 function will be inlined) those callgraph edges will be duplicated.
133 If we're cloning the body, those callgraph edges will be
134 updated to point into the new body. (Note that the original
135 callgraph node and edge list will not be altered.)
136
137 See the CALL_EXPR handling case in copy_tree_body_r (). */
138
139 /* To Do:
140
141 o In order to make inlining-on-trees work, we pessimized
142 function-local static constants. In particular, they are now
143 always output, even when not addressed. Fix this by treating
144 function-local static constants just like global static
145 constants; the back-end already knows not to output them if they
146 are not needed.
147
148 o Provide heuristics to clamp inlining of recursive template
149 calls? */
150
151
152 /* Weights that estimate_num_insns uses to estimate the size of the
153 produced code. */
154
155 eni_weights eni_size_weights;
156
157 /* Weights that estimate_num_insns uses to estimate the time necessary
158 to execute the produced code. */
159
160 eni_weights eni_time_weights;
161
162 /* Prototypes. */
163
164 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
165 basic_block);
166 static void remap_block (tree *, copy_body_data *);
167 static void copy_bind_expr (tree *, int *, copy_body_data *);
168 static void declare_inline_vars (tree, tree);
169 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
170 static void prepend_lexical_block (tree current_block, tree new_block);
171 static tree copy_decl_to_var (tree, copy_body_data *);
172 static tree copy_result_decl_to_var (tree, copy_body_data *);
173 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
174 static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
175 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
176 static void insert_init_stmt (copy_body_data *, basic_block, gimple);
177
178 /* Insert a tree->tree mapping for ID. Despite the name suggests
179 that the trees should be variables, it is used for more than that. */
180
181 void
182 insert_decl_map (copy_body_data *id, tree key, tree value)
183 {
184 id->decl_map->put (key, value);
185
186 /* Always insert an identity map as well. If we see this same new
187 node again, we won't want to duplicate it a second time. */
188 if (key != value)
189 id->decl_map->put (value, value);
190 }
191
192 /* Insert a tree->tree mapping for ID. This is only used for
193 variables. */
194
195 static void
196 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
197 {
198 if (!gimple_in_ssa_p (id->src_cfun))
199 return;
200
201 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
202 return;
203
204 if (!target_for_debug_bind (key))
205 return;
206
207 gcc_assert (TREE_CODE (key) == PARM_DECL);
208 gcc_assert (TREE_CODE (value) == VAR_DECL);
209
210 if (!id->debug_map)
211 id->debug_map = new hash_map<tree, tree>;
212
213 id->debug_map->put (key, value);
214 }
215
216 /* If nonzero, we're remapping the contents of inlined debug
217 statements. If negative, an error has occurred, such as a
218 reference to a variable that isn't available in the inlined
219 context. */
220 static int processing_debug_stmt = 0;
221
222 /* Construct new SSA name for old NAME. ID is the inline context. */
223
224 static tree
225 remap_ssa_name (tree name, copy_body_data *id)
226 {
227 tree new_tree, var;
228 tree *n;
229
230 gcc_assert (TREE_CODE (name) == SSA_NAME);
231
232 n = id->decl_map->get (name);
233 if (n)
234 return unshare_expr (*n);
235
236 if (processing_debug_stmt)
237 {
238 if (SSA_NAME_IS_DEFAULT_DEF (name)
239 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
240 && id->entry_bb == NULL
241 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
242 {
243 tree vexpr = make_node (DEBUG_EXPR_DECL);
244 gimple def_temp;
245 gimple_stmt_iterator gsi;
246 tree val = SSA_NAME_VAR (name);
247
248 n = id->decl_map->get (val);
249 if (n != NULL)
250 val = *n;
251 if (TREE_CODE (val) != PARM_DECL)
252 {
253 processing_debug_stmt = -1;
254 return name;
255 }
256 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
257 DECL_ARTIFICIAL (vexpr) = 1;
258 TREE_TYPE (vexpr) = TREE_TYPE (name);
259 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
260 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
261 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
262 return vexpr;
263 }
264
265 processing_debug_stmt = -1;
266 return name;
267 }
268
269 /* Remap anonymous SSA names or SSA names of anonymous decls. */
270 var = SSA_NAME_VAR (name);
271 if (!var
272 || (!SSA_NAME_IS_DEFAULT_DEF (name)
273 && TREE_CODE (var) == VAR_DECL
274 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
275 && DECL_ARTIFICIAL (var)
276 && DECL_IGNORED_P (var)
277 && !DECL_NAME (var)))
278 {
279 struct ptr_info_def *pi;
280 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
281 if (!var && SSA_NAME_IDENTIFIER (name))
282 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
283 insert_decl_map (id, name, new_tree);
284 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
285 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
286 /* At least IPA points-to info can be directly transferred. */
287 if (id->src_cfun->gimple_df
288 && id->src_cfun->gimple_df->ipa_pta
289 && (pi = SSA_NAME_PTR_INFO (name))
290 && !pi->pt.anything)
291 {
292 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293 new_pi->pt = pi->pt;
294 }
295 return new_tree;
296 }
297
298 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
299 in copy_bb. */
300 new_tree = remap_decl (var, id);
301
302 /* We might've substituted constant or another SSA_NAME for
303 the variable.
304
305 Replace the SSA name representing RESULT_DECL by variable during
306 inlining: this saves us from need to introduce PHI node in a case
307 return value is just partly initialized. */
308 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
309 && (!SSA_NAME_VAR (name)
310 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
311 || !id->transform_return_to_modify))
312 {
313 struct ptr_info_def *pi;
314 new_tree = make_ssa_name (new_tree);
315 insert_decl_map (id, name, new_tree);
316 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
317 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
318 /* At least IPA points-to info can be directly transferred. */
319 if (id->src_cfun->gimple_df
320 && id->src_cfun->gimple_df->ipa_pta
321 && (pi = SSA_NAME_PTR_INFO (name))
322 && !pi->pt.anything)
323 {
324 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
325 new_pi->pt = pi->pt;
326 }
327 if (SSA_NAME_IS_DEFAULT_DEF (name))
328 {
329 /* By inlining function having uninitialized variable, we might
330 extend the lifetime (variable might get reused). This cause
331 ICE in the case we end up extending lifetime of SSA name across
332 abnormal edge, but also increase register pressure.
333
334 We simply initialize all uninitialized vars by 0 except
335 for case we are inlining to very first BB. We can avoid
336 this for all BBs that are not inside strongly connected
337 regions of the CFG, but this is expensive to test. */
338 if (id->entry_bb
339 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
340 && (!SSA_NAME_VAR (name)
341 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
342 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
343 0)->dest
344 || EDGE_COUNT (id->entry_bb->preds) != 1))
345 {
346 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
347 gimple init_stmt;
348 tree zero = build_zero_cst (TREE_TYPE (new_tree));
349
350 init_stmt = gimple_build_assign (new_tree, zero);
351 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
352 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
353 }
354 else
355 {
356 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
357 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
358 }
359 }
360 }
361 else
362 insert_decl_map (id, name, new_tree);
363 return new_tree;
364 }
365
366 /* Remap DECL during the copying of the BLOCK tree for the function. */
367
368 tree
369 remap_decl (tree decl, copy_body_data *id)
370 {
371 tree *n;
372
373 /* We only remap local variables in the current function. */
374
375 /* See if we have remapped this declaration. */
376
377 n = id->decl_map->get (decl);
378
379 if (!n && processing_debug_stmt)
380 {
381 processing_debug_stmt = -1;
382 return decl;
383 }
384
385 /* If we didn't already have an equivalent for this declaration,
386 create one now. */
387 if (!n)
388 {
389 /* Make a copy of the variable or label. */
390 tree t = id->copy_decl (decl, id);
391
392 /* Remember it, so that if we encounter this local entity again
393 we can reuse this copy. Do this early because remap_type may
394 need this decl for TYPE_STUB_DECL. */
395 insert_decl_map (id, decl, t);
396
397 if (!DECL_P (t))
398 return t;
399
400 /* Remap types, if necessary. */
401 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
402 if (TREE_CODE (t) == TYPE_DECL)
403 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
404
405 /* Remap sizes as necessary. */
406 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
407 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
408
409 /* If fields, do likewise for offset and qualifier. */
410 if (TREE_CODE (t) == FIELD_DECL)
411 {
412 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
413 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
414 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
415 }
416
417 return t;
418 }
419
420 if (id->do_not_unshare)
421 return *n;
422 else
423 return unshare_expr (*n);
424 }
425
426 static tree
427 remap_type_1 (tree type, copy_body_data *id)
428 {
429 tree new_tree, t;
430
431 /* We do need a copy. build and register it now. If this is a pointer or
432 reference type, remap the designated type and make a new pointer or
433 reference type. */
434 if (TREE_CODE (type) == POINTER_TYPE)
435 {
436 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
437 TYPE_MODE (type),
438 TYPE_REF_CAN_ALIAS_ALL (type));
439 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
440 new_tree = build_type_attribute_qual_variant (new_tree,
441 TYPE_ATTRIBUTES (type),
442 TYPE_QUALS (type));
443 insert_decl_map (id, type, new_tree);
444 return new_tree;
445 }
446 else if (TREE_CODE (type) == REFERENCE_TYPE)
447 {
448 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
449 TYPE_MODE (type),
450 TYPE_REF_CAN_ALIAS_ALL (type));
451 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 new_tree = build_type_attribute_qual_variant (new_tree,
453 TYPE_ATTRIBUTES (type),
454 TYPE_QUALS (type));
455 insert_decl_map (id, type, new_tree);
456 return new_tree;
457 }
458 else
459 new_tree = copy_node (type);
460
461 insert_decl_map (id, type, new_tree);
462
463 /* This is a new type, not a copy of an old type. Need to reassociate
464 variants. We can handle everything except the main variant lazily. */
465 t = TYPE_MAIN_VARIANT (type);
466 if (type != t)
467 {
468 t = remap_type (t, id);
469 TYPE_MAIN_VARIANT (new_tree) = t;
470 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
471 TYPE_NEXT_VARIANT (t) = new_tree;
472 }
473 else
474 {
475 TYPE_MAIN_VARIANT (new_tree) = new_tree;
476 TYPE_NEXT_VARIANT (new_tree) = NULL;
477 }
478
479 if (TYPE_STUB_DECL (type))
480 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
481
482 /* Lazily create pointer and reference types. */
483 TYPE_POINTER_TO (new_tree) = NULL;
484 TYPE_REFERENCE_TO (new_tree) = NULL;
485
486 /* Copy all types that may contain references to local variables; be sure to
487 preserve sharing in between type and its main variant when possible. */
488 switch (TREE_CODE (new_tree))
489 {
490 case INTEGER_TYPE:
491 case REAL_TYPE:
492 case FIXED_POINT_TYPE:
493 case ENUMERAL_TYPE:
494 case BOOLEAN_TYPE:
495 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
496 {
497 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
498 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
499
500 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
501 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
502 }
503 else
504 {
505 t = TYPE_MIN_VALUE (new_tree);
506 if (t && TREE_CODE (t) != INTEGER_CST)
507 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
508
509 t = TYPE_MAX_VALUE (new_tree);
510 if (t && TREE_CODE (t) != INTEGER_CST)
511 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
512 }
513 return new_tree;
514
515 case FUNCTION_TYPE:
516 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
517 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
518 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
519 else
520 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
521 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
522 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
523 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
524 else
525 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
526 return new_tree;
527
528 case ARRAY_TYPE:
529 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
530 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
531 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
532 else
533 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
534
535 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
536 {
537 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
538 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
539 }
540 else
541 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
542 break;
543
544 case RECORD_TYPE:
545 case UNION_TYPE:
546 case QUAL_UNION_TYPE:
547 if (TYPE_MAIN_VARIANT (type) != type
548 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
549 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
550 else
551 {
552 tree f, nf = NULL;
553
554 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
555 {
556 t = remap_decl (f, id);
557 DECL_CONTEXT (t) = new_tree;
558 DECL_CHAIN (t) = nf;
559 nf = t;
560 }
561 TYPE_FIELDS (new_tree) = nreverse (nf);
562 }
563 break;
564
565 case OFFSET_TYPE:
566 default:
567 /* Shouldn't have been thought variable sized. */
568 gcc_unreachable ();
569 }
570
571 /* All variants of type share the same size, so use the already remaped data. */
572 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
573 {
574 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
575 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
576
577 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
578 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
579 }
580 else
581 {
582 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
583 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
584 }
585
586 return new_tree;
587 }
588
589 tree
590 remap_type (tree type, copy_body_data *id)
591 {
592 tree *node;
593 tree tmp;
594
595 if (type == NULL)
596 return type;
597
598 /* See if we have remapped this type. */
599 node = id->decl_map->get (type);
600 if (node)
601 return *node;
602
603 /* The type only needs remapping if it's variably modified. */
604 if (! variably_modified_type_p (type, id->src_fn))
605 {
606 insert_decl_map (id, type, type);
607 return type;
608 }
609
610 id->remapping_type_depth++;
611 tmp = remap_type_1 (type, id);
612 id->remapping_type_depth--;
613
614 return tmp;
615 }
616
617 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
618
619 static bool
620 can_be_nonlocal (tree decl, copy_body_data *id)
621 {
622 /* We can not duplicate function decls. */
623 if (TREE_CODE (decl) == FUNCTION_DECL)
624 return true;
625
626 /* Local static vars must be non-local or we get multiple declaration
627 problems. */
628 if (TREE_CODE (decl) == VAR_DECL
629 && !auto_var_in_fn_p (decl, id->src_fn))
630 return true;
631
632 return false;
633 }
634
635 static tree
636 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
637 copy_body_data *id)
638 {
639 tree old_var;
640 tree new_decls = NULL_TREE;
641
642 /* Remap its variables. */
643 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
644 {
645 tree new_var;
646
647 if (can_be_nonlocal (old_var, id))
648 {
649 /* We need to add this variable to the local decls as otherwise
650 nothing else will do so. */
651 if (TREE_CODE (old_var) == VAR_DECL
652 && ! DECL_EXTERNAL (old_var))
653 add_local_decl (cfun, old_var);
654 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
655 && !DECL_IGNORED_P (old_var)
656 && nonlocalized_list)
657 vec_safe_push (*nonlocalized_list, old_var);
658 continue;
659 }
660
661 /* Remap the variable. */
662 new_var = remap_decl (old_var, id);
663
664 /* If we didn't remap this variable, we can't mess with its
665 TREE_CHAIN. If we remapped this variable to the return slot, it's
666 already declared somewhere else, so don't declare it here. */
667
668 if (new_var == id->retvar)
669 ;
670 else if (!new_var)
671 {
672 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
673 && !DECL_IGNORED_P (old_var)
674 && nonlocalized_list)
675 vec_safe_push (*nonlocalized_list, old_var);
676 }
677 else
678 {
679 gcc_assert (DECL_P (new_var));
680 DECL_CHAIN (new_var) = new_decls;
681 new_decls = new_var;
682
683 /* Also copy value-expressions. */
684 if (TREE_CODE (new_var) == VAR_DECL
685 && DECL_HAS_VALUE_EXPR_P (new_var))
686 {
687 tree tem = DECL_VALUE_EXPR (new_var);
688 bool old_regimplify = id->regimplify;
689 id->remapping_type_depth++;
690 walk_tree (&tem, copy_tree_body_r, id, NULL);
691 id->remapping_type_depth--;
692 id->regimplify = old_regimplify;
693 SET_DECL_VALUE_EXPR (new_var, tem);
694 }
695 }
696 }
697
698 return nreverse (new_decls);
699 }
700
701 /* Copy the BLOCK to contain remapped versions of the variables
702 therein. And hook the new block into the block-tree. */
703
704 static void
705 remap_block (tree *block, copy_body_data *id)
706 {
707 tree old_block;
708 tree new_block;
709
710 /* Make the new block. */
711 old_block = *block;
712 new_block = make_node (BLOCK);
713 TREE_USED (new_block) = TREE_USED (old_block);
714 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
715 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
716 BLOCK_NONLOCALIZED_VARS (new_block)
717 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
718 *block = new_block;
719
720 /* Remap its variables. */
721 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
722 &BLOCK_NONLOCALIZED_VARS (new_block),
723 id);
724
725 if (id->transform_lang_insert_block)
726 id->transform_lang_insert_block (new_block);
727
728 /* Remember the remapped block. */
729 insert_decl_map (id, old_block, new_block);
730 }
731
732 /* Copy the whole block tree and root it in id->block. */
733 static tree
734 remap_blocks (tree block, copy_body_data *id)
735 {
736 tree t;
737 tree new_tree = block;
738
739 if (!block)
740 return NULL;
741
742 remap_block (&new_tree, id);
743 gcc_assert (new_tree != block);
744 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
745 prepend_lexical_block (new_tree, remap_blocks (t, id));
746 /* Blocks are in arbitrary order, but make things slightly prettier and do
747 not swap order when producing a copy. */
748 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
749 return new_tree;
750 }
751
752 /* Remap the block tree rooted at BLOCK to nothing. */
753 static void
754 remap_blocks_to_null (tree block, copy_body_data *id)
755 {
756 tree t;
757 insert_decl_map (id, block, NULL_TREE);
758 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
759 remap_blocks_to_null (t, id);
760 }
761
762 static void
763 copy_statement_list (tree *tp)
764 {
765 tree_stmt_iterator oi, ni;
766 tree new_tree;
767
768 new_tree = alloc_stmt_list ();
769 ni = tsi_start (new_tree);
770 oi = tsi_start (*tp);
771 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
772 *tp = new_tree;
773
774 for (; !tsi_end_p (oi); tsi_next (&oi))
775 {
776 tree stmt = tsi_stmt (oi);
777 if (TREE_CODE (stmt) == STATEMENT_LIST)
778 /* This copy is not redundant; tsi_link_after will smash this
779 STATEMENT_LIST into the end of the one we're building, and we
780 don't want to do that with the original. */
781 copy_statement_list (&stmt);
782 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
783 }
784 }
785
786 static void
787 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
788 {
789 tree block = BIND_EXPR_BLOCK (*tp);
790 /* Copy (and replace) the statement. */
791 copy_tree_r (tp, walk_subtrees, NULL);
792 if (block)
793 {
794 remap_block (&block, id);
795 BIND_EXPR_BLOCK (*tp) = block;
796 }
797
798 if (BIND_EXPR_VARS (*tp))
799 /* This will remap a lot of the same decls again, but this should be
800 harmless. */
801 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
802 }
803
804
805 /* Create a new gimple_seq by remapping all the statements in BODY
806 using the inlining information in ID. */
807
808 static gimple_seq
809 remap_gimple_seq (gimple_seq body, copy_body_data *id)
810 {
811 gimple_stmt_iterator si;
812 gimple_seq new_body = NULL;
813
814 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
815 {
816 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
817 gimple_seq_add_seq (&new_body, new_stmts);
818 }
819
820 return new_body;
821 }
822
823
824 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
825 block using the mapping information in ID. */
826
827 static gimple
828 copy_gimple_bind (gbind *stmt, copy_body_data *id)
829 {
830 gimple new_bind;
831 tree new_block, new_vars;
832 gimple_seq body, new_body;
833
834 /* Copy the statement. Note that we purposely don't use copy_stmt
835 here because we need to remap statements as we copy. */
836 body = gimple_bind_body (stmt);
837 new_body = remap_gimple_seq (body, id);
838
839 new_block = gimple_bind_block (stmt);
840 if (new_block)
841 remap_block (&new_block, id);
842
843 /* This will remap a lot of the same decls again, but this should be
844 harmless. */
845 new_vars = gimple_bind_vars (stmt);
846 if (new_vars)
847 new_vars = remap_decls (new_vars, NULL, id);
848
849 new_bind = gimple_build_bind (new_vars, new_body, new_block);
850
851 return new_bind;
852 }
853
854 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
855
856 static bool
857 is_parm (tree decl)
858 {
859 if (TREE_CODE (decl) == SSA_NAME)
860 {
861 decl = SSA_NAME_VAR (decl);
862 if (!decl)
863 return false;
864 }
865
866 return (TREE_CODE (decl) == PARM_DECL);
867 }
868
869 /* Remap the dependence CLIQUE from the source to the destination function
870 as specified in ID. */
871
872 static unsigned short
873 remap_dependence_clique (copy_body_data *id, unsigned short clique)
874 {
875 if (clique == 0)
876 return 0;
877 if (!id->dependence_map)
878 id->dependence_map
879 = new hash_map<unsigned short, unsigned short, dependence_hasher>;
880 bool existed;
881 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
882 if (!existed)
883 newc = ++cfun->last_clique;
884 return newc;
885 }
886
887 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
888 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
889 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
890 recursing into the children nodes of *TP. */
891
892 static tree
893 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
894 {
895 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
896 copy_body_data *id = (copy_body_data *) wi_p->info;
897 tree fn = id->src_fn;
898
899 if (TREE_CODE (*tp) == SSA_NAME)
900 {
901 *tp = remap_ssa_name (*tp, id);
902 *walk_subtrees = 0;
903 return NULL;
904 }
905 else if (auto_var_in_fn_p (*tp, fn))
906 {
907 /* Local variables and labels need to be replaced by equivalent
908 variables. We don't want to copy static variables; there's
909 only one of those, no matter how many times we inline the
910 containing function. Similarly for globals from an outer
911 function. */
912 tree new_decl;
913
914 /* Remap the declaration. */
915 new_decl = remap_decl (*tp, id);
916 gcc_assert (new_decl);
917 /* Replace this variable with the copy. */
918 STRIP_TYPE_NOPS (new_decl);
919 /* ??? The C++ frontend uses void * pointer zero to initialize
920 any other type. This confuses the middle-end type verification.
921 As cloned bodies do not go through gimplification again the fixup
922 there doesn't trigger. */
923 if (TREE_CODE (new_decl) == INTEGER_CST
924 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
925 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
926 *tp = new_decl;
927 *walk_subtrees = 0;
928 }
929 else if (TREE_CODE (*tp) == STATEMENT_LIST)
930 gcc_unreachable ();
931 else if (TREE_CODE (*tp) == SAVE_EXPR)
932 gcc_unreachable ();
933 else if (TREE_CODE (*tp) == LABEL_DECL
934 && (!DECL_CONTEXT (*tp)
935 || decl_function_context (*tp) == id->src_fn))
936 /* These may need to be remapped for EH handling. */
937 *tp = remap_decl (*tp, id);
938 else if (TREE_CODE (*tp) == FIELD_DECL)
939 {
940 /* If the enclosing record type is variably_modified_type_p, the field
941 has already been remapped. Otherwise, it need not be. */
942 tree *n = id->decl_map->get (*tp);
943 if (n)
944 *tp = *n;
945 *walk_subtrees = 0;
946 }
947 else if (TYPE_P (*tp))
948 /* Types may need remapping as well. */
949 *tp = remap_type (*tp, id);
950 else if (CONSTANT_CLASS_P (*tp))
951 {
952 /* If this is a constant, we have to copy the node iff the type
953 will be remapped. copy_tree_r will not copy a constant. */
954 tree new_type = remap_type (TREE_TYPE (*tp), id);
955
956 if (new_type == TREE_TYPE (*tp))
957 *walk_subtrees = 0;
958
959 else if (TREE_CODE (*tp) == INTEGER_CST)
960 *tp = wide_int_to_tree (new_type, *tp);
961 else
962 {
963 *tp = copy_node (*tp);
964 TREE_TYPE (*tp) = new_type;
965 }
966 }
967 else
968 {
969 /* Otherwise, just copy the node. Note that copy_tree_r already
970 knows not to copy VAR_DECLs, etc., so this is safe. */
971
972 if (TREE_CODE (*tp) == MEM_REF)
973 {
974 /* We need to re-canonicalize MEM_REFs from inline substitutions
975 that can happen when a pointer argument is an ADDR_EXPR.
976 Recurse here manually to allow that. */
977 tree ptr = TREE_OPERAND (*tp, 0);
978 tree type = remap_type (TREE_TYPE (*tp), id);
979 tree old = *tp;
980 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
981 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
982 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
983 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
984 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
985 if (MR_DEPENDENCE_CLIQUE (old) != 0)
986 {
987 MR_DEPENDENCE_CLIQUE (*tp)
988 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
989 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
990 }
991 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
992 remapped a parameter as the property might be valid only
993 for the parameter itself. */
994 if (TREE_THIS_NOTRAP (old)
995 && (!is_parm (TREE_OPERAND (old, 0))
996 || (!id->transform_parameter && is_parm (ptr))))
997 TREE_THIS_NOTRAP (*tp) = 1;
998 *walk_subtrees = 0;
999 return NULL;
1000 }
1001
1002 /* Here is the "usual case". Copy this tree node, and then
1003 tweak some special cases. */
1004 copy_tree_r (tp, walk_subtrees, NULL);
1005
1006 if (TREE_CODE (*tp) != OMP_CLAUSE)
1007 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1008
1009 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1010 {
1011 /* The copied TARGET_EXPR has never been expanded, even if the
1012 original node was expanded already. */
1013 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1014 TREE_OPERAND (*tp, 3) = NULL_TREE;
1015 }
1016 else if (TREE_CODE (*tp) == ADDR_EXPR)
1017 {
1018 /* Variable substitution need not be simple. In particular,
1019 the MEM_REF substitution above. Make sure that
1020 TREE_CONSTANT and friends are up-to-date. */
1021 int invariant = is_gimple_min_invariant (*tp);
1022 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1023 recompute_tree_invariant_for_addr_expr (*tp);
1024
1025 /* If this used to be invariant, but is not any longer,
1026 then regimplification is probably needed. */
1027 if (invariant && !is_gimple_min_invariant (*tp))
1028 id->regimplify = true;
1029
1030 *walk_subtrees = 0;
1031 }
1032 }
1033
1034 /* Update the TREE_BLOCK for the cloned expr. */
1035 if (EXPR_P (*tp))
1036 {
1037 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1038 tree old_block = TREE_BLOCK (*tp);
1039 if (old_block)
1040 {
1041 tree *n;
1042 n = id->decl_map->get (TREE_BLOCK (*tp));
1043 if (n)
1044 new_block = *n;
1045 }
1046 TREE_SET_BLOCK (*tp, new_block);
1047 }
1048
1049 /* Keep iterating. */
1050 return NULL_TREE;
1051 }
1052
1053
1054 /* Called from copy_body_id via walk_tree. DATA is really a
1055 `copy_body_data *'. */
1056
1057 tree
1058 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1059 {
1060 copy_body_data *id = (copy_body_data *) data;
1061 tree fn = id->src_fn;
1062 tree new_block;
1063
1064 /* Begin by recognizing trees that we'll completely rewrite for the
1065 inlining context. Our output for these trees is completely
1066 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1067 into an edge). Further down, we'll handle trees that get
1068 duplicated and/or tweaked. */
1069
1070 /* When requested, RETURN_EXPRs should be transformed to just the
1071 contained MODIFY_EXPR. The branch semantics of the return will
1072 be handled elsewhere by manipulating the CFG rather than a statement. */
1073 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1074 {
1075 tree assignment = TREE_OPERAND (*tp, 0);
1076
1077 /* If we're returning something, just turn that into an
1078 assignment into the equivalent of the original RESULT_DECL.
1079 If the "assignment" is just the result decl, the result
1080 decl has already been set (e.g. a recent "foo (&result_decl,
1081 ...)"); just toss the entire RETURN_EXPR. */
1082 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1083 {
1084 /* Replace the RETURN_EXPR with (a copy of) the
1085 MODIFY_EXPR hanging underneath. */
1086 *tp = copy_node (assignment);
1087 }
1088 else /* Else the RETURN_EXPR returns no value. */
1089 {
1090 *tp = NULL;
1091 return (tree) (void *)1;
1092 }
1093 }
1094 else if (TREE_CODE (*tp) == SSA_NAME)
1095 {
1096 *tp = remap_ssa_name (*tp, id);
1097 *walk_subtrees = 0;
1098 return NULL;
1099 }
1100
1101 /* Local variables and labels need to be replaced by equivalent
1102 variables. We don't want to copy static variables; there's only
1103 one of those, no matter how many times we inline the containing
1104 function. Similarly for globals from an outer function. */
1105 else if (auto_var_in_fn_p (*tp, fn))
1106 {
1107 tree new_decl;
1108
1109 /* Remap the declaration. */
1110 new_decl = remap_decl (*tp, id);
1111 gcc_assert (new_decl);
1112 /* Replace this variable with the copy. */
1113 STRIP_TYPE_NOPS (new_decl);
1114 *tp = new_decl;
1115 *walk_subtrees = 0;
1116 }
1117 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1118 copy_statement_list (tp);
1119 else if (TREE_CODE (*tp) == SAVE_EXPR
1120 || TREE_CODE (*tp) == TARGET_EXPR)
1121 remap_save_expr (tp, id->decl_map, walk_subtrees);
1122 else if (TREE_CODE (*tp) == LABEL_DECL
1123 && (! DECL_CONTEXT (*tp)
1124 || decl_function_context (*tp) == id->src_fn))
1125 /* These may need to be remapped for EH handling. */
1126 *tp = remap_decl (*tp, id);
1127 else if (TREE_CODE (*tp) == BIND_EXPR)
1128 copy_bind_expr (tp, walk_subtrees, id);
1129 /* Types may need remapping as well. */
1130 else if (TYPE_P (*tp))
1131 *tp = remap_type (*tp, id);
1132
1133 /* If this is a constant, we have to copy the node iff the type will be
1134 remapped. copy_tree_r will not copy a constant. */
1135 else if (CONSTANT_CLASS_P (*tp))
1136 {
1137 tree new_type = remap_type (TREE_TYPE (*tp), id);
1138
1139 if (new_type == TREE_TYPE (*tp))
1140 *walk_subtrees = 0;
1141
1142 else if (TREE_CODE (*tp) == INTEGER_CST)
1143 *tp = wide_int_to_tree (new_type, *tp);
1144 else
1145 {
1146 *tp = copy_node (*tp);
1147 TREE_TYPE (*tp) = new_type;
1148 }
1149 }
1150
1151 /* Otherwise, just copy the node. Note that copy_tree_r already
1152 knows not to copy VAR_DECLs, etc., so this is safe. */
1153 else
1154 {
1155 /* Here we handle trees that are not completely rewritten.
1156 First we detect some inlining-induced bogosities for
1157 discarding. */
1158 if (TREE_CODE (*tp) == MODIFY_EXPR
1159 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1160 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1161 {
1162 /* Some assignments VAR = VAR; don't generate any rtl code
1163 and thus don't count as variable modification. Avoid
1164 keeping bogosities like 0 = 0. */
1165 tree decl = TREE_OPERAND (*tp, 0), value;
1166 tree *n;
1167
1168 n = id->decl_map->get (decl);
1169 if (n)
1170 {
1171 value = *n;
1172 STRIP_TYPE_NOPS (value);
1173 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1174 {
1175 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1176 return copy_tree_body_r (tp, walk_subtrees, data);
1177 }
1178 }
1179 }
1180 else if (TREE_CODE (*tp) == INDIRECT_REF)
1181 {
1182 /* Get rid of *& from inline substitutions that can happen when a
1183 pointer argument is an ADDR_EXPR. */
1184 tree decl = TREE_OPERAND (*tp, 0);
1185 tree *n = id->decl_map->get (decl);
1186 if (n)
1187 {
1188 /* If we happen to get an ADDR_EXPR in n->value, strip
1189 it manually here as we'll eventually get ADDR_EXPRs
1190 which lie about their types pointed to. In this case
1191 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1192 but we absolutely rely on that. As fold_indirect_ref
1193 does other useful transformations, try that first, though. */
1194 tree type = TREE_TYPE (*tp);
1195 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1196 tree old = *tp;
1197 *tp = gimple_fold_indirect_ref (ptr);
1198 if (! *tp)
1199 {
1200 if (TREE_CODE (ptr) == ADDR_EXPR)
1201 {
1202 *tp
1203 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1204 /* ??? We should either assert here or build
1205 a VIEW_CONVERT_EXPR instead of blindly leaking
1206 incompatible types to our IL. */
1207 if (! *tp)
1208 *tp = TREE_OPERAND (ptr, 0);
1209 }
1210 else
1211 {
1212 *tp = build1 (INDIRECT_REF, type, ptr);
1213 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1214 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1215 TREE_READONLY (*tp) = TREE_READONLY (old);
1216 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1217 have remapped a parameter as the property might be
1218 valid only for the parameter itself. */
1219 if (TREE_THIS_NOTRAP (old)
1220 && (!is_parm (TREE_OPERAND (old, 0))
1221 || (!id->transform_parameter && is_parm (ptr))))
1222 TREE_THIS_NOTRAP (*tp) = 1;
1223 }
1224 }
1225 *walk_subtrees = 0;
1226 return NULL;
1227 }
1228 }
1229 else if (TREE_CODE (*tp) == MEM_REF)
1230 {
1231 /* We need to re-canonicalize MEM_REFs from inline substitutions
1232 that can happen when a pointer argument is an ADDR_EXPR.
1233 Recurse here manually to allow that. */
1234 tree ptr = TREE_OPERAND (*tp, 0);
1235 tree type = remap_type (TREE_TYPE (*tp), id);
1236 tree old = *tp;
1237 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1238 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1239 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1240 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1241 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1242 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1243 {
1244 MR_DEPENDENCE_CLIQUE (*tp)
1245 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1246 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1247 }
1248 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1249 remapped a parameter as the property might be valid only
1250 for the parameter itself. */
1251 if (TREE_THIS_NOTRAP (old)
1252 && (!is_parm (TREE_OPERAND (old, 0))
1253 || (!id->transform_parameter && is_parm (ptr))))
1254 TREE_THIS_NOTRAP (*tp) = 1;
1255 *walk_subtrees = 0;
1256 return NULL;
1257 }
1258
1259 /* Here is the "usual case". Copy this tree node, and then
1260 tweak some special cases. */
1261 copy_tree_r (tp, walk_subtrees, NULL);
1262
1263 /* If EXPR has block defined, map it to newly constructed block.
1264 When inlining we want EXPRs without block appear in the block
1265 of function call if we are not remapping a type. */
1266 if (EXPR_P (*tp))
1267 {
1268 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1269 if (TREE_BLOCK (*tp))
1270 {
1271 tree *n;
1272 n = id->decl_map->get (TREE_BLOCK (*tp));
1273 if (n)
1274 new_block = *n;
1275 }
1276 TREE_SET_BLOCK (*tp, new_block);
1277 }
1278
1279 if (TREE_CODE (*tp) != OMP_CLAUSE)
1280 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1281
1282 /* The copied TARGET_EXPR has never been expanded, even if the
1283 original node was expanded already. */
1284 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1285 {
1286 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1287 TREE_OPERAND (*tp, 3) = NULL_TREE;
1288 }
1289
1290 /* Variable substitution need not be simple. In particular, the
1291 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1292 and friends are up-to-date. */
1293 else if (TREE_CODE (*tp) == ADDR_EXPR)
1294 {
1295 int invariant = is_gimple_min_invariant (*tp);
1296 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1297
1298 /* Handle the case where we substituted an INDIRECT_REF
1299 into the operand of the ADDR_EXPR. */
1300 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1301 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1302 else
1303 recompute_tree_invariant_for_addr_expr (*tp);
1304
1305 /* If this used to be invariant, but is not any longer,
1306 then regimplification is probably needed. */
1307 if (invariant && !is_gimple_min_invariant (*tp))
1308 id->regimplify = true;
1309
1310 *walk_subtrees = 0;
1311 }
1312 }
1313
1314 /* Keep iterating. */
1315 return NULL_TREE;
1316 }
1317
1318 /* Helper for remap_gimple_stmt. Given an EH region number for the
1319 source function, map that to the duplicate EH region number in
1320 the destination function. */
1321
1322 static int
1323 remap_eh_region_nr (int old_nr, copy_body_data *id)
1324 {
1325 eh_region old_r, new_r;
1326
1327 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1328 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1329
1330 return new_r->index;
1331 }
1332
1333 /* Similar, but operate on INTEGER_CSTs. */
1334
1335 static tree
1336 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1337 {
1338 int old_nr, new_nr;
1339
1340 old_nr = tree_to_shwi (old_t_nr);
1341 new_nr = remap_eh_region_nr (old_nr, id);
1342
1343 return build_int_cst (integer_type_node, new_nr);
1344 }
1345
1346 /* Helper for copy_bb. Remap statement STMT using the inlining
1347 information in ID. Return the new statement copy. */
1348
1349 static gimple_seq
1350 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1351 {
1352 gimple copy = NULL;
1353 struct walk_stmt_info wi;
1354 bool skip_first = false;
1355 gimple_seq stmts = NULL;
1356
1357 if (is_gimple_debug (stmt)
1358 && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1359 return stmts;
1360
1361 /* Begin by recognizing trees that we'll completely rewrite for the
1362 inlining context. Our output for these trees is completely
1363 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1364 into an edge). Further down, we'll handle trees that get
1365 duplicated and/or tweaked. */
1366
1367 /* When requested, GIMPLE_RETURNs should be transformed to just the
1368 contained GIMPLE_ASSIGN. The branch semantics of the return will
1369 be handled elsewhere by manipulating the CFG rather than the
1370 statement. */
1371 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1372 {
1373 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1374 tree retbnd = gimple_return_retbnd (stmt);
1375 tree bndslot = id->retbnd;
1376
1377 if (retbnd && bndslot)
1378 {
1379 gimple bndcopy = gimple_build_assign (bndslot, retbnd);
1380 memset (&wi, 0, sizeof (wi));
1381 wi.info = id;
1382 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1383 gimple_seq_add_stmt (&stmts, bndcopy);
1384 }
1385
1386 /* If we're returning something, just turn that into an
1387 assignment into the equivalent of the original RESULT_DECL.
1388 If RETVAL is just the result decl, the result decl has
1389 already been set (e.g. a recent "foo (&result_decl, ...)");
1390 just toss the entire GIMPLE_RETURN. */
1391 if (retval
1392 && (TREE_CODE (retval) != RESULT_DECL
1393 && (TREE_CODE (retval) != SSA_NAME
1394 || ! SSA_NAME_VAR (retval)
1395 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1396 {
1397 copy = gimple_build_assign (id->do_not_unshare
1398 ? id->retvar : unshare_expr (id->retvar),
1399 retval);
1400 /* id->retvar is already substituted. Skip it on later remapping. */
1401 skip_first = true;
1402
1403 /* We need to copy bounds if return structure with pointers into
1404 instrumented function. */
1405 if (chkp_function_instrumented_p (id->dst_fn)
1406 && !bndslot
1407 && !BOUNDED_P (id->retvar)
1408 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1409 id->assign_stmts.safe_push (copy);
1410
1411 }
1412 else
1413 return stmts;
1414 }
1415 else if (gimple_has_substatements (stmt))
1416 {
1417 gimple_seq s1, s2;
1418
1419 /* When cloning bodies from the C++ front end, we will be handed bodies
1420 in High GIMPLE form. Handle here all the High GIMPLE statements that
1421 have embedded statements. */
1422 switch (gimple_code (stmt))
1423 {
1424 case GIMPLE_BIND:
1425 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1426 break;
1427
1428 case GIMPLE_CATCH:
1429 {
1430 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1431 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1432 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1433 }
1434 break;
1435
1436 case GIMPLE_EH_FILTER:
1437 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1438 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1439 break;
1440
1441 case GIMPLE_TRY:
1442 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1443 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1444 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1445 break;
1446
1447 case GIMPLE_WITH_CLEANUP_EXPR:
1448 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1449 copy = gimple_build_wce (s1);
1450 break;
1451
1452 case GIMPLE_OMP_PARALLEL:
1453 {
1454 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1455 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1456 copy = gimple_build_omp_parallel
1457 (s1,
1458 gimple_omp_parallel_clauses (omp_par_stmt),
1459 gimple_omp_parallel_child_fn (omp_par_stmt),
1460 gimple_omp_parallel_data_arg (omp_par_stmt));
1461 }
1462 break;
1463
1464 case GIMPLE_OMP_TASK:
1465 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1466 copy = gimple_build_omp_task
1467 (s1,
1468 gimple_omp_task_clauses (stmt),
1469 gimple_omp_task_child_fn (stmt),
1470 gimple_omp_task_data_arg (stmt),
1471 gimple_omp_task_copy_fn (stmt),
1472 gimple_omp_task_arg_size (stmt),
1473 gimple_omp_task_arg_align (stmt));
1474 break;
1475
1476 case GIMPLE_OMP_FOR:
1477 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1478 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1479 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1480 gimple_omp_for_clauses (stmt),
1481 gimple_omp_for_collapse (stmt), s2);
1482 {
1483 size_t i;
1484 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1485 {
1486 gimple_omp_for_set_index (copy, i,
1487 gimple_omp_for_index (stmt, i));
1488 gimple_omp_for_set_initial (copy, i,
1489 gimple_omp_for_initial (stmt, i));
1490 gimple_omp_for_set_final (copy, i,
1491 gimple_omp_for_final (stmt, i));
1492 gimple_omp_for_set_incr (copy, i,
1493 gimple_omp_for_incr (stmt, i));
1494 gimple_omp_for_set_cond (copy, i,
1495 gimple_omp_for_cond (stmt, i));
1496 }
1497 }
1498 break;
1499
1500 case GIMPLE_OMP_MASTER:
1501 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1502 copy = gimple_build_omp_master (s1);
1503 break;
1504
1505 case GIMPLE_OMP_TASKGROUP:
1506 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1507 copy = gimple_build_omp_taskgroup (s1);
1508 break;
1509
1510 case GIMPLE_OMP_ORDERED:
1511 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1512 copy = gimple_build_omp_ordered (s1);
1513 break;
1514
1515 case GIMPLE_OMP_SECTION:
1516 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1517 copy = gimple_build_omp_section (s1);
1518 break;
1519
1520 case GIMPLE_OMP_SECTIONS:
1521 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1522 copy = gimple_build_omp_sections
1523 (s1, gimple_omp_sections_clauses (stmt));
1524 break;
1525
1526 case GIMPLE_OMP_SINGLE:
1527 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1528 copy = gimple_build_omp_single
1529 (s1, gimple_omp_single_clauses (stmt));
1530 break;
1531
1532 case GIMPLE_OMP_TARGET:
1533 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1534 copy = gimple_build_omp_target
1535 (s1, gimple_omp_target_kind (stmt),
1536 gimple_omp_target_clauses (stmt));
1537 break;
1538
1539 case GIMPLE_OMP_TEAMS:
1540 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1541 copy = gimple_build_omp_teams
1542 (s1, gimple_omp_teams_clauses (stmt));
1543 break;
1544
1545 case GIMPLE_OMP_CRITICAL:
1546 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1547 copy = gimple_build_omp_critical (s1,
1548 gimple_omp_critical_name (
1549 as_a <gomp_critical *> (stmt)));
1550 break;
1551
1552 case GIMPLE_TRANSACTION:
1553 {
1554 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1555 gtransaction *new_trans_stmt;
1556 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1557 id);
1558 copy = new_trans_stmt
1559 = gimple_build_transaction (
1560 s1,
1561 gimple_transaction_label (old_trans_stmt));
1562 gimple_transaction_set_subcode (
1563 new_trans_stmt,
1564 gimple_transaction_subcode (old_trans_stmt));
1565 }
1566 break;
1567
1568 default:
1569 gcc_unreachable ();
1570 }
1571 }
1572 else
1573 {
1574 if (gimple_assign_copy_p (stmt)
1575 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1576 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1577 {
1578 /* Here we handle statements that are not completely rewritten.
1579 First we detect some inlining-induced bogosities for
1580 discarding. */
1581
1582 /* Some assignments VAR = VAR; don't generate any rtl code
1583 and thus don't count as variable modification. Avoid
1584 keeping bogosities like 0 = 0. */
1585 tree decl = gimple_assign_lhs (stmt), value;
1586 tree *n;
1587
1588 n = id->decl_map->get (decl);
1589 if (n)
1590 {
1591 value = *n;
1592 STRIP_TYPE_NOPS (value);
1593 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1594 return NULL;
1595 }
1596 }
1597
1598 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1599 in a block that we aren't copying during tree_function_versioning,
1600 just drop the clobber stmt. */
1601 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1602 {
1603 tree lhs = gimple_assign_lhs (stmt);
1604 if (TREE_CODE (lhs) == MEM_REF
1605 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1606 {
1607 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1608 if (gimple_bb (def_stmt)
1609 && !bitmap_bit_p (id->blocks_to_copy,
1610 gimple_bb (def_stmt)->index))
1611 return NULL;
1612 }
1613 }
1614
1615 if (gimple_debug_bind_p (stmt))
1616 {
1617 gdebug *copy
1618 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1619 gimple_debug_bind_get_value (stmt),
1620 stmt);
1621 id->debug_stmts.safe_push (copy);
1622 gimple_seq_add_stmt (&stmts, copy);
1623 return stmts;
1624 }
1625 if (gimple_debug_source_bind_p (stmt))
1626 {
1627 gdebug *copy = gimple_build_debug_source_bind
1628 (gimple_debug_source_bind_get_var (stmt),
1629 gimple_debug_source_bind_get_value (stmt),
1630 stmt);
1631 id->debug_stmts.safe_push (copy);
1632 gimple_seq_add_stmt (&stmts, copy);
1633 return stmts;
1634 }
1635
1636 /* Create a new deep copy of the statement. */
1637 copy = gimple_copy (stmt);
1638
1639 /* Clear flags that need revisiting. */
1640 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1641 {
1642 if (gimple_call_tail_p (call_stmt))
1643 gimple_call_set_tail (call_stmt, false);
1644 if (gimple_call_from_thunk_p (call_stmt))
1645 gimple_call_set_from_thunk (call_stmt, false);
1646 }
1647
1648 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1649 RESX and EH_DISPATCH. */
1650 if (id->eh_map)
1651 switch (gimple_code (copy))
1652 {
1653 case GIMPLE_CALL:
1654 {
1655 tree r, fndecl = gimple_call_fndecl (copy);
1656 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1657 switch (DECL_FUNCTION_CODE (fndecl))
1658 {
1659 case BUILT_IN_EH_COPY_VALUES:
1660 r = gimple_call_arg (copy, 1);
1661 r = remap_eh_region_tree_nr (r, id);
1662 gimple_call_set_arg (copy, 1, r);
1663 /* FALLTHRU */
1664
1665 case BUILT_IN_EH_POINTER:
1666 case BUILT_IN_EH_FILTER:
1667 r = gimple_call_arg (copy, 0);
1668 r = remap_eh_region_tree_nr (r, id);
1669 gimple_call_set_arg (copy, 0, r);
1670 break;
1671
1672 default:
1673 break;
1674 }
1675
1676 /* Reset alias info if we didn't apply measures to
1677 keep it valid over inlining by setting DECL_PT_UID. */
1678 if (!id->src_cfun->gimple_df
1679 || !id->src_cfun->gimple_df->ipa_pta)
1680 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1681 }
1682 break;
1683
1684 case GIMPLE_RESX:
1685 {
1686 gresx *resx_stmt = as_a <gresx *> (copy);
1687 int r = gimple_resx_region (resx_stmt);
1688 r = remap_eh_region_nr (r, id);
1689 gimple_resx_set_region (resx_stmt, r);
1690 }
1691 break;
1692
1693 case GIMPLE_EH_DISPATCH:
1694 {
1695 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1696 int r = gimple_eh_dispatch_region (eh_dispatch);
1697 r = remap_eh_region_nr (r, id);
1698 gimple_eh_dispatch_set_region (eh_dispatch, r);
1699 }
1700 break;
1701
1702 default:
1703 break;
1704 }
1705 }
1706
1707 /* If STMT has a block defined, map it to the newly constructed
1708 block. */
1709 if (gimple_block (copy))
1710 {
1711 tree *n;
1712 n = id->decl_map->get (gimple_block (copy));
1713 gcc_assert (n);
1714 gimple_set_block (copy, *n);
1715 }
1716
1717 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1718 {
1719 gimple_seq_add_stmt (&stmts, copy);
1720 return stmts;
1721 }
1722
1723 /* Remap all the operands in COPY. */
1724 memset (&wi, 0, sizeof (wi));
1725 wi.info = id;
1726 if (skip_first)
1727 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1728 else
1729 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1730
1731 /* Clear the copied virtual operands. We are not remapping them here
1732 but are going to recreate them from scratch. */
1733 if (gimple_has_mem_ops (copy))
1734 {
1735 gimple_set_vdef (copy, NULL_TREE);
1736 gimple_set_vuse (copy, NULL_TREE);
1737 }
1738
1739 gimple_seq_add_stmt (&stmts, copy);
1740 return stmts;
1741 }
1742
1743
1744 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1745 later */
1746
1747 static basic_block
1748 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1749 gcov_type count_scale)
1750 {
1751 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1752 basic_block copy_basic_block;
1753 tree decl;
1754 gcov_type freq;
1755 basic_block prev;
1756
1757 /* Search for previous copied basic block. */
1758 prev = bb->prev_bb;
1759 while (!prev->aux)
1760 prev = prev->prev_bb;
1761
1762 /* create_basic_block() will append every new block to
1763 basic_block_info automatically. */
1764 copy_basic_block = create_basic_block (NULL, (void *) 0,
1765 (basic_block) prev->aux);
1766 copy_basic_block->count = apply_scale (bb->count, count_scale);
1767
1768 /* We are going to rebuild frequencies from scratch. These values
1769 have just small importance to drive canonicalize_loop_headers. */
1770 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1771
1772 /* We recompute frequencies after inlining, so this is quite safe. */
1773 if (freq > BB_FREQ_MAX)
1774 freq = BB_FREQ_MAX;
1775 copy_basic_block->frequency = freq;
1776
1777 copy_gsi = gsi_start_bb (copy_basic_block);
1778
1779 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1780 {
1781 gimple_seq stmts;
1782 gimple stmt = gsi_stmt (gsi);
1783 gimple orig_stmt = stmt;
1784 gimple_stmt_iterator stmts_gsi;
1785 bool stmt_added = false;
1786
1787 id->regimplify = false;
1788 stmts = remap_gimple_stmt (stmt, id);
1789
1790 if (gimple_seq_empty_p (stmts))
1791 continue;
1792
1793 seq_gsi = copy_gsi;
1794
1795 for (stmts_gsi = gsi_start (stmts);
1796 !gsi_end_p (stmts_gsi); )
1797 {
1798 stmt = gsi_stmt (stmts_gsi);
1799
1800 /* Advance iterator now before stmt is moved to seq_gsi. */
1801 gsi_next (&stmts_gsi);
1802
1803 if (gimple_nop_p (stmt))
1804 continue;
1805
1806 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1807 orig_stmt);
1808
1809 /* With return slot optimization we can end up with
1810 non-gimple (foo *)&this->m, fix that here. */
1811 if (is_gimple_assign (stmt)
1812 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1813 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1814 {
1815 tree new_rhs;
1816 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1817 gimple_assign_rhs1 (stmt),
1818 true, NULL, false,
1819 GSI_CONTINUE_LINKING);
1820 gimple_assign_set_rhs1 (stmt, new_rhs);
1821 id->regimplify = false;
1822 }
1823
1824 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1825
1826 if (id->regimplify)
1827 gimple_regimplify_operands (stmt, &seq_gsi);
1828
1829 stmt_added = true;
1830 }
1831
1832 if (!stmt_added)
1833 continue;
1834
1835 /* If copy_basic_block has been empty at the start of this iteration,
1836 call gsi_start_bb again to get at the newly added statements. */
1837 if (gsi_end_p (copy_gsi))
1838 copy_gsi = gsi_start_bb (copy_basic_block);
1839 else
1840 gsi_next (&copy_gsi);
1841
1842 /* Process the new statement. The call to gimple_regimplify_operands
1843 possibly turned the statement into multiple statements, we
1844 need to process all of them. */
1845 do
1846 {
1847 tree fn;
1848 gcall *call_stmt;
1849
1850 stmt = gsi_stmt (copy_gsi);
1851 call_stmt = dyn_cast <gcall *> (stmt);
1852 if (call_stmt
1853 && gimple_call_va_arg_pack_p (call_stmt)
1854 && id->call_stmt)
1855 {
1856 /* __builtin_va_arg_pack () should be replaced by
1857 all arguments corresponding to ... in the caller. */
1858 tree p;
1859 gcall *new_call;
1860 vec<tree> argarray;
1861 size_t nargs = gimple_call_num_args (id->call_stmt);
1862 size_t n, i, nargs_to_copy;
1863 bool remove_bounds = false;
1864
1865 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1866 nargs--;
1867
1868 /* Bounds should be removed from arg pack in case
1869 we handle not instrumented call in instrumented
1870 function. */
1871 nargs_to_copy = nargs;
1872 if (gimple_call_with_bounds_p (id->call_stmt)
1873 && !gimple_call_with_bounds_p (stmt))
1874 {
1875 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1876 i < gimple_call_num_args (id->call_stmt);
1877 i++)
1878 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1879 nargs_to_copy--;
1880 remove_bounds = true;
1881 }
1882
1883 /* Create the new array of arguments. */
1884 n = nargs_to_copy + gimple_call_num_args (call_stmt);
1885 argarray.create (n);
1886 argarray.safe_grow_cleared (n);
1887
1888 /* Copy all the arguments before '...' */
1889 memcpy (argarray.address (),
1890 gimple_call_arg_ptr (call_stmt, 0),
1891 gimple_call_num_args (call_stmt) * sizeof (tree));
1892
1893 if (remove_bounds)
1894 {
1895 /* Append the rest of arguments removing bounds. */
1896 unsigned cur = gimple_call_num_args (call_stmt);
1897 i = gimple_call_num_args (id->call_stmt) - nargs;
1898 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1899 i < gimple_call_num_args (id->call_stmt);
1900 i++)
1901 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1902 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1903 gcc_assert (cur == n);
1904 }
1905 else
1906 {
1907 /* Append the arguments passed in '...' */
1908 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1909 gimple_call_arg_ptr (id->call_stmt, 0)
1910 + (gimple_call_num_args (id->call_stmt) - nargs),
1911 nargs * sizeof (tree));
1912 }
1913
1914 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1915 argarray);
1916
1917 argarray.release ();
1918
1919 /* Copy all GIMPLE_CALL flags, location and block, except
1920 GF_CALL_VA_ARG_PACK. */
1921 gimple_call_copy_flags (new_call, call_stmt);
1922 gimple_call_set_va_arg_pack (new_call, false);
1923 gimple_set_location (new_call, gimple_location (stmt));
1924 gimple_set_block (new_call, gimple_block (stmt));
1925 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1926
1927 gsi_replace (&copy_gsi, new_call, false);
1928 stmt = new_call;
1929 }
1930 else if (call_stmt
1931 && id->call_stmt
1932 && (decl = gimple_call_fndecl (stmt))
1933 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1934 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1935 {
1936 /* __builtin_va_arg_pack_len () should be replaced by
1937 the number of anonymous arguments. */
1938 size_t nargs = gimple_call_num_args (id->call_stmt), i;
1939 tree count, p;
1940 gimple new_stmt;
1941
1942 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1943 nargs--;
1944
1945 /* For instrumented calls we should ignore bounds. */
1946 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1947 i < gimple_call_num_args (id->call_stmt);
1948 i++)
1949 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1950 nargs--;
1951
1952 count = build_int_cst (integer_type_node, nargs);
1953 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1954 gsi_replace (&copy_gsi, new_stmt, false);
1955 stmt = new_stmt;
1956 }
1957 else if (call_stmt
1958 && id->call_stmt
1959 && gimple_call_internal_p (stmt)
1960 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1961 {
1962 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1963 gsi_remove (&copy_gsi, false);
1964 continue;
1965 }
1966
1967 /* Statements produced by inlining can be unfolded, especially
1968 when we constant propagated some operands. We can't fold
1969 them right now for two reasons:
1970 1) folding require SSA_NAME_DEF_STMTs to be correct
1971 2) we can't change function calls to builtins.
1972 So we just mark statement for later folding. We mark
1973 all new statements, instead just statements that has changed
1974 by some nontrivial substitution so even statements made
1975 foldable indirectly are updated. If this turns out to be
1976 expensive, copy_body can be told to watch for nontrivial
1977 changes. */
1978 if (id->statements_to_fold)
1979 id->statements_to_fold->add (stmt);
1980
1981 /* We're duplicating a CALL_EXPR. Find any corresponding
1982 callgraph edges and update or duplicate them. */
1983 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1984 {
1985 struct cgraph_edge *edge;
1986
1987 switch (id->transform_call_graph_edges)
1988 {
1989 case CB_CGE_DUPLICATE:
1990 edge = id->src_node->get_edge (orig_stmt);
1991 if (edge)
1992 {
1993 int edge_freq = edge->frequency;
1994 int new_freq;
1995 struct cgraph_edge *old_edge = edge;
1996 edge = edge->clone (id->dst_node, call_stmt,
1997 gimple_uid (stmt),
1998 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1999 true);
2000 /* We could also just rescale the frequency, but
2001 doing so would introduce roundoff errors and make
2002 verifier unhappy. */
2003 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
2004 copy_basic_block);
2005
2006 /* Speculative calls consist of two edges - direct and indirect.
2007 Duplicate the whole thing and distribute frequencies accordingly. */
2008 if (edge->speculative)
2009 {
2010 struct cgraph_edge *direct, *indirect;
2011 struct ipa_ref *ref;
2012
2013 gcc_assert (!edge->indirect_unknown_callee);
2014 old_edge->speculative_call_info (direct, indirect, ref);
2015 indirect = indirect->clone (id->dst_node, call_stmt,
2016 gimple_uid (stmt),
2017 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2018 true);
2019 if (old_edge->frequency + indirect->frequency)
2020 {
2021 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2022 (old_edge->frequency + indirect->frequency)),
2023 CGRAPH_FREQ_MAX);
2024 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2025 (old_edge->frequency + indirect->frequency)),
2026 CGRAPH_FREQ_MAX);
2027 }
2028 id->dst_node->clone_reference (ref, stmt);
2029 }
2030 else
2031 {
2032 edge->frequency = new_freq;
2033 if (dump_file
2034 && profile_status_for_fn (cfun) != PROFILE_ABSENT
2035 && (edge_freq > edge->frequency + 10
2036 || edge_freq < edge->frequency - 10))
2037 {
2038 fprintf (dump_file, "Edge frequency estimated by "
2039 "cgraph %i diverge from inliner's estimate %i\n",
2040 edge_freq,
2041 edge->frequency);
2042 fprintf (dump_file,
2043 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2044 bb->index,
2045 bb->frequency,
2046 copy_basic_block->frequency);
2047 }
2048 }
2049 }
2050 break;
2051
2052 case CB_CGE_MOVE_CLONES:
2053 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2054 call_stmt);
2055 edge = id->dst_node->get_edge (stmt);
2056 break;
2057
2058 case CB_CGE_MOVE:
2059 edge = id->dst_node->get_edge (orig_stmt);
2060 if (edge)
2061 edge->set_call_stmt (call_stmt);
2062 break;
2063
2064 default:
2065 gcc_unreachable ();
2066 }
2067
2068 /* Constant propagation on argument done during inlining
2069 may create new direct call. Produce an edge for it. */
2070 if ((!edge
2071 || (edge->indirect_inlining_edge
2072 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2073 && id->dst_node->definition
2074 && (fn = gimple_call_fndecl (stmt)) != NULL)
2075 {
2076 struct cgraph_node *dest = cgraph_node::get (fn);
2077
2078 /* We have missing edge in the callgraph. This can happen
2079 when previous inlining turned an indirect call into a
2080 direct call by constant propagating arguments or we are
2081 producing dead clone (for further cloning). In all
2082 other cases we hit a bug (incorrect node sharing is the
2083 most common reason for missing edges). */
2084 gcc_assert (!dest->definition
2085 || dest->address_taken
2086 || !id->src_node->definition
2087 || !id->dst_node->definition);
2088 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2089 id->dst_node->create_edge_including_clones
2090 (dest, orig_stmt, call_stmt, bb->count,
2091 compute_call_stmt_bb_frequency (id->dst_node->decl,
2092 copy_basic_block),
2093 CIF_ORIGINALLY_INDIRECT_CALL);
2094 else
2095 id->dst_node->create_edge (dest, call_stmt,
2096 bb->count,
2097 compute_call_stmt_bb_frequency
2098 (id->dst_node->decl,
2099 copy_basic_block))->inline_failed
2100 = CIF_ORIGINALLY_INDIRECT_CALL;
2101 if (dump_file)
2102 {
2103 fprintf (dump_file, "Created new direct edge to %s\n",
2104 dest->name ());
2105 }
2106 }
2107
2108 notice_special_calls (as_a <gcall *> (stmt));
2109 }
2110
2111 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2112 id->eh_map, id->eh_lp_nr);
2113
2114 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2115 {
2116 ssa_op_iter i;
2117 tree def;
2118
2119 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2120 if (TREE_CODE (def) == SSA_NAME)
2121 SSA_NAME_DEF_STMT (def) = stmt;
2122 }
2123
2124 gsi_next (&copy_gsi);
2125 }
2126 while (!gsi_end_p (copy_gsi));
2127
2128 copy_gsi = gsi_last_bb (copy_basic_block);
2129 }
2130
2131 return copy_basic_block;
2132 }
2133
2134 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2135 form is quite easy, since dominator relationship for old basic blocks does
2136 not change.
2137
2138 There is however exception where inlining might change dominator relation
2139 across EH edges from basic block within inlined functions destinating
2140 to landing pads in function we inline into.
2141
2142 The function fills in PHI_RESULTs of such PHI nodes if they refer
2143 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2144 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2145 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2146 set, and this means that there will be no overlapping live ranges
2147 for the underlying symbol.
2148
2149 This might change in future if we allow redirecting of EH edges and
2150 we might want to change way build CFG pre-inlining to include
2151 all the possible edges then. */
2152 static void
2153 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2154 bool can_throw, bool nonlocal_goto)
2155 {
2156 edge e;
2157 edge_iterator ei;
2158
2159 FOR_EACH_EDGE (e, ei, bb->succs)
2160 if (!e->dest->aux
2161 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2162 {
2163 gphi *phi;
2164 gphi_iterator si;
2165
2166 if (!nonlocal_goto)
2167 gcc_assert (e->flags & EDGE_EH);
2168
2169 if (!can_throw)
2170 gcc_assert (!(e->flags & EDGE_EH));
2171
2172 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2173 {
2174 edge re;
2175
2176 phi = si.phi ();
2177
2178 /* For abnormal goto/call edges the receiver can be the
2179 ENTRY_BLOCK. Do not assert this cannot happen. */
2180
2181 gcc_assert ((e->flags & EDGE_EH)
2182 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2183
2184 re = find_edge (ret_bb, e->dest);
2185 gcc_checking_assert (re);
2186 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2187 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2188
2189 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2190 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2191 }
2192 }
2193 }
2194
2195
2196 /* Copy edges from BB into its copy constructed earlier, scale profile
2197 accordingly. Edges will be taken care of later. Assume aux
2198 pointers to point to the copies of each BB. Return true if any
2199 debug stmts are left after a statement that must end the basic block. */
2200
2201 static bool
2202 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2203 basic_block abnormal_goto_dest)
2204 {
2205 basic_block new_bb = (basic_block) bb->aux;
2206 edge_iterator ei;
2207 edge old_edge;
2208 gimple_stmt_iterator si;
2209 int flags;
2210 bool need_debug_cleanup = false;
2211
2212 /* Use the indices from the original blocks to create edges for the
2213 new ones. */
2214 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2215 if (!(old_edge->flags & EDGE_EH))
2216 {
2217 edge new_edge;
2218
2219 flags = old_edge->flags;
2220
2221 /* Return edges do get a FALLTHRU flag when the get inlined. */
2222 if (old_edge->dest->index == EXIT_BLOCK
2223 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2224 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2225 flags |= EDGE_FALLTHRU;
2226 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2227 new_edge->count = apply_scale (old_edge->count, count_scale);
2228 new_edge->probability = old_edge->probability;
2229 }
2230
2231 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2232 return false;
2233
2234 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2235 {
2236 gimple copy_stmt;
2237 bool can_throw, nonlocal_goto;
2238
2239 copy_stmt = gsi_stmt (si);
2240 if (!is_gimple_debug (copy_stmt))
2241 update_stmt (copy_stmt);
2242
2243 /* Do this before the possible split_block. */
2244 gsi_next (&si);
2245
2246 /* If this tree could throw an exception, there are two
2247 cases where we need to add abnormal edge(s): the
2248 tree wasn't in a region and there is a "current
2249 region" in the caller; or the original tree had
2250 EH edges. In both cases split the block after the tree,
2251 and add abnormal edge(s) as needed; we need both
2252 those from the callee and the caller.
2253 We check whether the copy can throw, because the const
2254 propagation can change an INDIRECT_REF which throws
2255 into a COMPONENT_REF which doesn't. If the copy
2256 can throw, the original could also throw. */
2257 can_throw = stmt_can_throw_internal (copy_stmt);
2258 nonlocal_goto
2259 = (stmt_can_make_abnormal_goto (copy_stmt)
2260 && !computed_goto_p (copy_stmt));
2261
2262 if (can_throw || nonlocal_goto)
2263 {
2264 if (!gsi_end_p (si))
2265 {
2266 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2267 gsi_next (&si);
2268 if (gsi_end_p (si))
2269 need_debug_cleanup = true;
2270 }
2271 if (!gsi_end_p (si))
2272 /* Note that bb's predecessor edges aren't necessarily
2273 right at this point; split_block doesn't care. */
2274 {
2275 edge e = split_block (new_bb, copy_stmt);
2276
2277 new_bb = e->dest;
2278 new_bb->aux = e->src->aux;
2279 si = gsi_start_bb (new_bb);
2280 }
2281 }
2282
2283 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2284 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2285 else if (can_throw)
2286 make_eh_edges (copy_stmt);
2287
2288 /* If the call we inline cannot make abnormal goto do not add
2289 additional abnormal edges but only retain those already present
2290 in the original function body. */
2291 if (abnormal_goto_dest == NULL)
2292 nonlocal_goto = false;
2293 if (nonlocal_goto)
2294 {
2295 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2296
2297 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2298 nonlocal_goto = false;
2299 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2300 in OpenMP regions which aren't allowed to be left abnormally.
2301 So, no need to add abnormal edge in that case. */
2302 else if (is_gimple_call (copy_stmt)
2303 && gimple_call_internal_p (copy_stmt)
2304 && (gimple_call_internal_fn (copy_stmt)
2305 == IFN_ABNORMAL_DISPATCHER)
2306 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2307 nonlocal_goto = false;
2308 else
2309 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2310 }
2311
2312 if ((can_throw || nonlocal_goto)
2313 && gimple_in_ssa_p (cfun))
2314 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2315 can_throw, nonlocal_goto);
2316 }
2317 return need_debug_cleanup;
2318 }
2319
2320 /* Copy the PHIs. All blocks and edges are copied, some blocks
2321 was possibly split and new outgoing EH edges inserted.
2322 BB points to the block of original function and AUX pointers links
2323 the original and newly copied blocks. */
2324
2325 static void
2326 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2327 {
2328 basic_block const new_bb = (basic_block) bb->aux;
2329 edge_iterator ei;
2330 gphi *phi;
2331 gphi_iterator si;
2332 edge new_edge;
2333 bool inserted = false;
2334
2335 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2336 {
2337 tree res, new_res;
2338 gphi *new_phi;
2339
2340 phi = si.phi ();
2341 res = PHI_RESULT (phi);
2342 new_res = res;
2343 if (!virtual_operand_p (res))
2344 {
2345 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2346 new_phi = create_phi_node (new_res, new_bb);
2347 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2348 {
2349 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2350 tree arg;
2351 tree new_arg;
2352 edge_iterator ei2;
2353 location_t locus;
2354
2355 /* When doing partial cloning, we allow PHIs on the entry block
2356 as long as all the arguments are the same. Find any input
2357 edge to see argument to copy. */
2358 if (!old_edge)
2359 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2360 if (!old_edge->src->aux)
2361 break;
2362
2363 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2364 new_arg = arg;
2365 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2366 gcc_assert (new_arg);
2367 /* With return slot optimization we can end up with
2368 non-gimple (foo *)&this->m, fix that here. */
2369 if (TREE_CODE (new_arg) != SSA_NAME
2370 && TREE_CODE (new_arg) != FUNCTION_DECL
2371 && !is_gimple_val (new_arg))
2372 {
2373 gimple_seq stmts = NULL;
2374 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2375 gsi_insert_seq_on_edge (new_edge, stmts);
2376 inserted = true;
2377 }
2378 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2379 if (LOCATION_BLOCK (locus))
2380 {
2381 tree *n;
2382 n = id->decl_map->get (LOCATION_BLOCK (locus));
2383 gcc_assert (n);
2384 if (*n)
2385 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2386 else
2387 locus = LOCATION_LOCUS (locus);
2388 }
2389 else
2390 locus = LOCATION_LOCUS (locus);
2391
2392 add_phi_arg (new_phi, new_arg, new_edge, locus);
2393 }
2394 }
2395 }
2396
2397 /* Commit the delayed edge insertions. */
2398 if (inserted)
2399 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2400 gsi_commit_one_edge_insert (new_edge, NULL);
2401 }
2402
2403
2404 /* Wrapper for remap_decl so it can be used as a callback. */
2405
2406 static tree
2407 remap_decl_1 (tree decl, void *data)
2408 {
2409 return remap_decl (decl, (copy_body_data *) data);
2410 }
2411
2412 /* Build struct function and associated datastructures for the new clone
2413 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2414 the cfun to the function of new_fndecl (and current_function_decl too). */
2415
2416 static void
2417 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2418 {
2419 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2420 gcov_type count_scale;
2421
2422 if (!DECL_ARGUMENTS (new_fndecl))
2423 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2424 if (!DECL_RESULT (new_fndecl))
2425 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2426
2427 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2428 count_scale
2429 = GCOV_COMPUTE_SCALE (count,
2430 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2431 else
2432 count_scale = REG_BR_PROB_BASE;
2433
2434 /* Register specific tree functions. */
2435 gimple_register_cfg_hooks ();
2436
2437 /* Get clean struct function. */
2438 push_struct_function (new_fndecl);
2439
2440 /* We will rebuild these, so just sanity check that they are empty. */
2441 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2442 gcc_assert (cfun->local_decls == NULL);
2443 gcc_assert (cfun->cfg == NULL);
2444 gcc_assert (cfun->decl == new_fndecl);
2445
2446 /* Copy items we preserve during cloning. */
2447 cfun->static_chain_decl = src_cfun->static_chain_decl;
2448 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2449 cfun->function_end_locus = src_cfun->function_end_locus;
2450 cfun->curr_properties = src_cfun->curr_properties;
2451 cfun->last_verified = src_cfun->last_verified;
2452 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2453 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2454 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2455 cfun->stdarg = src_cfun->stdarg;
2456 cfun->after_inlining = src_cfun->after_inlining;
2457 cfun->can_throw_non_call_exceptions
2458 = src_cfun->can_throw_non_call_exceptions;
2459 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2460 cfun->returns_struct = src_cfun->returns_struct;
2461 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2462
2463 init_empty_tree_cfg ();
2464
2465 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2466 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2467 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2468 REG_BR_PROB_BASE);
2469 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2470 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2471 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2472 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2473 REG_BR_PROB_BASE);
2474 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2475 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2476 if (src_cfun->eh)
2477 init_eh_for_function ();
2478
2479 if (src_cfun->gimple_df)
2480 {
2481 init_tree_ssa (cfun);
2482 cfun->gimple_df->in_ssa_p = true;
2483 init_ssa_operands (cfun);
2484 }
2485 }
2486
2487 /* Helper function for copy_cfg_body. Move debug stmts from the end
2488 of NEW_BB to the beginning of successor basic blocks when needed. If the
2489 successor has multiple predecessors, reset them, otherwise keep
2490 their value. */
2491
2492 static void
2493 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2494 {
2495 edge e;
2496 edge_iterator ei;
2497 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2498
2499 if (gsi_end_p (si)
2500 || gsi_one_before_end_p (si)
2501 || !(stmt_can_throw_internal (gsi_stmt (si))
2502 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2503 return;
2504
2505 FOR_EACH_EDGE (e, ei, new_bb->succs)
2506 {
2507 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2508 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2509 while (is_gimple_debug (gsi_stmt (ssi)))
2510 {
2511 gimple stmt = gsi_stmt (ssi);
2512 gdebug *new_stmt;
2513 tree var;
2514 tree value;
2515
2516 /* For the last edge move the debug stmts instead of copying
2517 them. */
2518 if (ei_one_before_end_p (ei))
2519 {
2520 si = ssi;
2521 gsi_prev (&ssi);
2522 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2523 gimple_debug_bind_reset_value (stmt);
2524 gsi_remove (&si, false);
2525 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2526 continue;
2527 }
2528
2529 if (gimple_debug_bind_p (stmt))
2530 {
2531 var = gimple_debug_bind_get_var (stmt);
2532 if (single_pred_p (e->dest))
2533 {
2534 value = gimple_debug_bind_get_value (stmt);
2535 value = unshare_expr (value);
2536 }
2537 else
2538 value = NULL_TREE;
2539 new_stmt = gimple_build_debug_bind (var, value, stmt);
2540 }
2541 else if (gimple_debug_source_bind_p (stmt))
2542 {
2543 var = gimple_debug_source_bind_get_var (stmt);
2544 value = gimple_debug_source_bind_get_value (stmt);
2545 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2546 }
2547 else
2548 gcc_unreachable ();
2549 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2550 id->debug_stmts.safe_push (new_stmt);
2551 gsi_prev (&ssi);
2552 }
2553 }
2554 }
2555
2556 /* Make a copy of the sub-loops of SRC_PARENT and place them
2557 as siblings of DEST_PARENT. */
2558
2559 static void
2560 copy_loops (copy_body_data *id,
2561 struct loop *dest_parent, struct loop *src_parent)
2562 {
2563 struct loop *src_loop = src_parent->inner;
2564 while (src_loop)
2565 {
2566 if (!id->blocks_to_copy
2567 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2568 {
2569 struct loop *dest_loop = alloc_loop ();
2570
2571 /* Assign the new loop its header and latch and associate
2572 those with the new loop. */
2573 dest_loop->header = (basic_block)src_loop->header->aux;
2574 dest_loop->header->loop_father = dest_loop;
2575 if (src_loop->latch != NULL)
2576 {
2577 dest_loop->latch = (basic_block)src_loop->latch->aux;
2578 dest_loop->latch->loop_father = dest_loop;
2579 }
2580
2581 /* Copy loop meta-data. */
2582 copy_loop_info (src_loop, dest_loop);
2583
2584 /* Finally place it into the loop array and the loop tree. */
2585 place_new_loop (cfun, dest_loop);
2586 flow_loop_tree_node_add (dest_parent, dest_loop);
2587
2588 dest_loop->safelen = src_loop->safelen;
2589 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2590 if (src_loop->force_vectorize)
2591 {
2592 dest_loop->force_vectorize = true;
2593 cfun->has_force_vectorize_loops = true;
2594 }
2595 if (src_loop->simduid)
2596 {
2597 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2598 cfun->has_simduid_loops = true;
2599 }
2600
2601 /* Recurse. */
2602 copy_loops (id, dest_loop, src_loop);
2603 }
2604 src_loop = src_loop->next;
2605 }
2606 }
2607
2608 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2609
2610 void
2611 redirect_all_calls (copy_body_data * id, basic_block bb)
2612 {
2613 gimple_stmt_iterator si;
2614 gimple last = last_stmt (bb);
2615 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2616 {
2617 gimple stmt = gsi_stmt (si);
2618 if (is_gimple_call (stmt))
2619 {
2620 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2621 if (edge)
2622 {
2623 edge->redirect_call_stmt_to_callee ();
2624 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2625 gimple_purge_dead_eh_edges (bb);
2626 }
2627 }
2628 }
2629 }
2630
2631 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2632 with each bb's frequency. Used when NODE has a 0-weight entry
2633 but we are about to inline it into a non-zero count call bb.
2634 See the comments for handle_missing_profiles() in predict.c for
2635 when this can happen for COMDATs. */
2636
2637 void
2638 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2639 {
2640 basic_block bb;
2641 edge_iterator ei;
2642 edge e;
2643 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2644
2645 FOR_ALL_BB_FN(bb, fn)
2646 {
2647 bb->count = apply_scale (count,
2648 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2649 FOR_EACH_EDGE (e, ei, bb->succs)
2650 e->count = apply_probability (e->src->count, e->probability);
2651 }
2652 }
2653
2654 /* Make a copy of the body of FN so that it can be inserted inline in
2655 another function. Walks FN via CFG, returns new fndecl. */
2656
2657 static tree
2658 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2659 basic_block entry_block_map, basic_block exit_block_map,
2660 basic_block new_entry)
2661 {
2662 tree callee_fndecl = id->src_fn;
2663 /* Original cfun for the callee, doesn't change. */
2664 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2665 struct function *cfun_to_copy;
2666 basic_block bb;
2667 tree new_fndecl = NULL;
2668 bool need_debug_cleanup = false;
2669 gcov_type count_scale;
2670 int last;
2671 int incoming_frequency = 0;
2672 gcov_type incoming_count = 0;
2673
2674 /* This can happen for COMDAT routines that end up with 0 counts
2675 despite being called (see the comments for handle_missing_profiles()
2676 in predict.c as to why). Apply counts to the blocks in the callee
2677 before inlining, using the guessed edge frequencies, so that we don't
2678 end up with a 0-count inline body which can confuse downstream
2679 optimizations such as function splitting. */
2680 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2681 {
2682 /* Apply the larger of the call bb count and the total incoming
2683 call edge count to the callee. */
2684 gcov_type in_count = 0;
2685 struct cgraph_edge *in_edge;
2686 for (in_edge = id->src_node->callers; in_edge;
2687 in_edge = in_edge->next_caller)
2688 in_count += in_edge->count;
2689 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2690 }
2691
2692 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2693 count_scale
2694 = GCOV_COMPUTE_SCALE (count,
2695 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2696 else
2697 count_scale = REG_BR_PROB_BASE;
2698
2699 /* Register specific tree functions. */
2700 gimple_register_cfg_hooks ();
2701
2702 /* If we are inlining just region of the function, make sure to connect
2703 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2704 part of loop, we must compute frequency and probability of
2705 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2706 probabilities of edges incoming from nonduplicated region. */
2707 if (new_entry)
2708 {
2709 edge e;
2710 edge_iterator ei;
2711
2712 FOR_EACH_EDGE (e, ei, new_entry->preds)
2713 if (!e->src->aux)
2714 {
2715 incoming_frequency += EDGE_FREQUENCY (e);
2716 incoming_count += e->count;
2717 }
2718 incoming_count = apply_scale (incoming_count, count_scale);
2719 incoming_frequency
2720 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2721 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2722 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2723 }
2724
2725 /* Must have a CFG here at this point. */
2726 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2727 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2728
2729 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2730
2731 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2732 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2733 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2734 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2735
2736 /* Duplicate any exception-handling regions. */
2737 if (cfun->eh)
2738 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2739 remap_decl_1, id);
2740
2741 /* Use aux pointers to map the original blocks to copy. */
2742 FOR_EACH_BB_FN (bb, cfun_to_copy)
2743 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2744 {
2745 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2746 bb->aux = new_bb;
2747 new_bb->aux = bb;
2748 new_bb->loop_father = entry_block_map->loop_father;
2749 }
2750
2751 last = last_basic_block_for_fn (cfun);
2752
2753 /* Now that we've duplicated the blocks, duplicate their edges. */
2754 basic_block abnormal_goto_dest = NULL;
2755 if (id->call_stmt
2756 && stmt_can_make_abnormal_goto (id->call_stmt))
2757 {
2758 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2759
2760 bb = gimple_bb (id->call_stmt);
2761 gsi_next (&gsi);
2762 if (gsi_end_p (gsi))
2763 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2764 }
2765 FOR_ALL_BB_FN (bb, cfun_to_copy)
2766 if (!id->blocks_to_copy
2767 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2768 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2769 abnormal_goto_dest);
2770
2771 if (new_entry)
2772 {
2773 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2774 e->probability = REG_BR_PROB_BASE;
2775 e->count = incoming_count;
2776 }
2777
2778 /* Duplicate the loop tree, if available and wanted. */
2779 if (loops_for_fn (src_cfun) != NULL
2780 && current_loops != NULL)
2781 {
2782 copy_loops (id, entry_block_map->loop_father,
2783 get_loop (src_cfun, 0));
2784 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2785 loops_state_set (LOOPS_NEED_FIXUP);
2786 }
2787
2788 /* If the loop tree in the source function needed fixup, mark the
2789 destination loop tree for fixup, too. */
2790 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2791 loops_state_set (LOOPS_NEED_FIXUP);
2792
2793 if (gimple_in_ssa_p (cfun))
2794 FOR_ALL_BB_FN (bb, cfun_to_copy)
2795 if (!id->blocks_to_copy
2796 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2797 copy_phis_for_bb (bb, id);
2798
2799 FOR_ALL_BB_FN (bb, cfun_to_copy)
2800 if (bb->aux)
2801 {
2802 if (need_debug_cleanup
2803 && bb->index != ENTRY_BLOCK
2804 && bb->index != EXIT_BLOCK)
2805 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2806 /* Update call edge destinations. This can not be done before loop
2807 info is updated, because we may split basic blocks. */
2808 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2809 redirect_all_calls (id, (basic_block)bb->aux);
2810 ((basic_block)bb->aux)->aux = NULL;
2811 bb->aux = NULL;
2812 }
2813
2814 /* Zero out AUX fields of newly created block during EH edge
2815 insertion. */
2816 for (; last < last_basic_block_for_fn (cfun); last++)
2817 {
2818 if (need_debug_cleanup)
2819 maybe_move_debug_stmts_to_successors (id,
2820 BASIC_BLOCK_FOR_FN (cfun, last));
2821 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2822 /* Update call edge destinations. This can not be done before loop
2823 info is updated, because we may split basic blocks. */
2824 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2825 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2826 }
2827 entry_block_map->aux = NULL;
2828 exit_block_map->aux = NULL;
2829
2830 if (id->eh_map)
2831 {
2832 delete id->eh_map;
2833 id->eh_map = NULL;
2834 }
2835 if (id->dependence_map)
2836 {
2837 delete id->dependence_map;
2838 id->dependence_map = NULL;
2839 }
2840
2841 return new_fndecl;
2842 }
2843
2844 /* Copy the debug STMT using ID. We deal with these statements in a
2845 special way: if any variable in their VALUE expression wasn't
2846 remapped yet, we won't remap it, because that would get decl uids
2847 out of sync, causing codegen differences between -g and -g0. If
2848 this arises, we drop the VALUE expression altogether. */
2849
2850 static void
2851 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2852 {
2853 tree t, *n;
2854 struct walk_stmt_info wi;
2855
2856 if (gimple_block (stmt))
2857 {
2858 n = id->decl_map->get (gimple_block (stmt));
2859 gimple_set_block (stmt, n ? *n : id->block);
2860 }
2861
2862 /* Remap all the operands in COPY. */
2863 memset (&wi, 0, sizeof (wi));
2864 wi.info = id;
2865
2866 processing_debug_stmt = 1;
2867
2868 if (gimple_debug_source_bind_p (stmt))
2869 t = gimple_debug_source_bind_get_var (stmt);
2870 else
2871 t = gimple_debug_bind_get_var (stmt);
2872
2873 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2874 && (n = id->debug_map->get (t)))
2875 {
2876 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2877 t = *n;
2878 }
2879 else if (TREE_CODE (t) == VAR_DECL
2880 && !is_global_var (t)
2881 && !id->decl_map->get (t))
2882 /* T is a non-localized variable. */;
2883 else
2884 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2885
2886 if (gimple_debug_bind_p (stmt))
2887 {
2888 gimple_debug_bind_set_var (stmt, t);
2889
2890 if (gimple_debug_bind_has_value_p (stmt))
2891 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2892 remap_gimple_op_r, &wi, NULL);
2893
2894 /* Punt if any decl couldn't be remapped. */
2895 if (processing_debug_stmt < 0)
2896 gimple_debug_bind_reset_value (stmt);
2897 }
2898 else if (gimple_debug_source_bind_p (stmt))
2899 {
2900 gimple_debug_source_bind_set_var (stmt, t);
2901 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2902 remap_gimple_op_r, &wi, NULL);
2903 /* When inlining and source bind refers to one of the optimized
2904 away parameters, change the source bind into normal debug bind
2905 referring to the corresponding DEBUG_EXPR_DECL that should have
2906 been bound before the call stmt. */
2907 t = gimple_debug_source_bind_get_value (stmt);
2908 if (t != NULL_TREE
2909 && TREE_CODE (t) == PARM_DECL
2910 && id->call_stmt)
2911 {
2912 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2913 unsigned int i;
2914 if (debug_args != NULL)
2915 {
2916 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2917 if ((**debug_args)[i] == DECL_ORIGIN (t)
2918 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2919 {
2920 t = (**debug_args)[i + 1];
2921 stmt->subcode = GIMPLE_DEBUG_BIND;
2922 gimple_debug_bind_set_value (stmt, t);
2923 break;
2924 }
2925 }
2926 }
2927 }
2928
2929 processing_debug_stmt = 0;
2930
2931 update_stmt (stmt);
2932 }
2933
2934 /* Process deferred debug stmts. In order to give values better odds
2935 of being successfully remapped, we delay the processing of debug
2936 stmts until all other stmts that might require remapping are
2937 processed. */
2938
2939 static void
2940 copy_debug_stmts (copy_body_data *id)
2941 {
2942 size_t i;
2943 gdebug *stmt;
2944
2945 if (!id->debug_stmts.exists ())
2946 return;
2947
2948 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2949 copy_debug_stmt (stmt, id);
2950
2951 id->debug_stmts.release ();
2952 }
2953
2954 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2955 another function. */
2956
2957 static tree
2958 copy_tree_body (copy_body_data *id)
2959 {
2960 tree fndecl = id->src_fn;
2961 tree body = DECL_SAVED_TREE (fndecl);
2962
2963 walk_tree (&body, copy_tree_body_r, id, NULL);
2964
2965 return body;
2966 }
2967
2968 /* Make a copy of the body of FN so that it can be inserted inline in
2969 another function. */
2970
2971 static tree
2972 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2973 basic_block entry_block_map, basic_block exit_block_map,
2974 basic_block new_entry)
2975 {
2976 tree fndecl = id->src_fn;
2977 tree body;
2978
2979 /* If this body has a CFG, walk CFG and copy. */
2980 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2981 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2982 new_entry);
2983 copy_debug_stmts (id);
2984
2985 return body;
2986 }
2987
2988 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2989 defined in function FN, or of a data member thereof. */
2990
2991 static bool
2992 self_inlining_addr_expr (tree value, tree fn)
2993 {
2994 tree var;
2995
2996 if (TREE_CODE (value) != ADDR_EXPR)
2997 return false;
2998
2999 var = get_base_address (TREE_OPERAND (value, 0));
3000
3001 return var && auto_var_in_fn_p (var, fn);
3002 }
3003
3004 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3005 lexical block and line number information from base_stmt, if given,
3006 or from the last stmt of the block otherwise. */
3007
3008 static gimple
3009 insert_init_debug_bind (copy_body_data *id,
3010 basic_block bb, tree var, tree value,
3011 gimple base_stmt)
3012 {
3013 gimple note;
3014 gimple_stmt_iterator gsi;
3015 tree tracked_var;
3016
3017 if (!gimple_in_ssa_p (id->src_cfun))
3018 return NULL;
3019
3020 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3021 return NULL;
3022
3023 tracked_var = target_for_debug_bind (var);
3024 if (!tracked_var)
3025 return NULL;
3026
3027 if (bb)
3028 {
3029 gsi = gsi_last_bb (bb);
3030 if (!base_stmt && !gsi_end_p (gsi))
3031 base_stmt = gsi_stmt (gsi);
3032 }
3033
3034 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
3035
3036 if (bb)
3037 {
3038 if (!gsi_end_p (gsi))
3039 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3040 else
3041 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3042 }
3043
3044 return note;
3045 }
3046
3047 static void
3048 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
3049 {
3050 /* If VAR represents a zero-sized variable, it's possible that the
3051 assignment statement may result in no gimple statements. */
3052 if (init_stmt)
3053 {
3054 gimple_stmt_iterator si = gsi_last_bb (bb);
3055
3056 /* We can end up with init statements that store to a non-register
3057 from a rhs with a conversion. Handle that here by forcing the
3058 rhs into a temporary. gimple_regimplify_operands is not
3059 prepared to do this for us. */
3060 if (!is_gimple_debug (init_stmt)
3061 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3062 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3063 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3064 {
3065 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3066 gimple_expr_type (init_stmt),
3067 gimple_assign_rhs1 (init_stmt));
3068 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3069 GSI_NEW_STMT);
3070 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3071 gimple_assign_set_rhs1 (init_stmt, rhs);
3072 }
3073 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3074 gimple_regimplify_operands (init_stmt, &si);
3075
3076 if (!is_gimple_debug (init_stmt))
3077 {
3078 tree def = gimple_assign_lhs (init_stmt);
3079 insert_init_debug_bind (id, bb, def, def, init_stmt);
3080 }
3081 }
3082 }
3083
3084 /* Initialize parameter P with VALUE. If needed, produce init statement
3085 at the end of BB. When BB is NULL, we return init statement to be
3086 output later. */
3087 static gimple
3088 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3089 basic_block bb, tree *vars)
3090 {
3091 gimple init_stmt = NULL;
3092 tree var;
3093 tree rhs = value;
3094 tree def = (gimple_in_ssa_p (cfun)
3095 ? ssa_default_def (id->src_cfun, p) : NULL);
3096
3097 if (value
3098 && value != error_mark_node
3099 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3100 {
3101 /* If we can match up types by promotion/demotion do so. */
3102 if (fold_convertible_p (TREE_TYPE (p), value))
3103 rhs = fold_convert (TREE_TYPE (p), value);
3104 else
3105 {
3106 /* ??? For valid programs we should not end up here.
3107 Still if we end up with truly mismatched types here, fall back
3108 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3109 GIMPLE to the following passes. */
3110 if (!is_gimple_reg_type (TREE_TYPE (value))
3111 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3112 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3113 else
3114 rhs = build_zero_cst (TREE_TYPE (p));
3115 }
3116 }
3117
3118 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3119 here since the type of this decl must be visible to the calling
3120 function. */
3121 var = copy_decl_to_var (p, id);
3122
3123 /* Declare this new variable. */
3124 DECL_CHAIN (var) = *vars;
3125 *vars = var;
3126
3127 /* Make gimplifier happy about this variable. */
3128 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3129
3130 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3131 we would not need to create a new variable here at all, if it
3132 weren't for debug info. Still, we can just use the argument
3133 value. */
3134 if (TREE_READONLY (p)
3135 && !TREE_ADDRESSABLE (p)
3136 && value && !TREE_SIDE_EFFECTS (value)
3137 && !def)
3138 {
3139 /* We may produce non-gimple trees by adding NOPs or introduce
3140 invalid sharing when operand is not really constant.
3141 It is not big deal to prohibit constant propagation here as
3142 we will constant propagate in DOM1 pass anyway. */
3143 if (is_gimple_min_invariant (value)
3144 && useless_type_conversion_p (TREE_TYPE (p),
3145 TREE_TYPE (value))
3146 /* We have to be very careful about ADDR_EXPR. Make sure
3147 the base variable isn't a local variable of the inlined
3148 function, e.g., when doing recursive inlining, direct or
3149 mutually-recursive or whatever, which is why we don't
3150 just test whether fn == current_function_decl. */
3151 && ! self_inlining_addr_expr (value, fn))
3152 {
3153 insert_decl_map (id, p, value);
3154 insert_debug_decl_map (id, p, var);
3155 return insert_init_debug_bind (id, bb, var, value, NULL);
3156 }
3157 }
3158
3159 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3160 that way, when the PARM_DECL is encountered, it will be
3161 automatically replaced by the VAR_DECL. */
3162 insert_decl_map (id, p, var);
3163
3164 /* Even if P was TREE_READONLY, the new VAR should not be.
3165 In the original code, we would have constructed a
3166 temporary, and then the function body would have never
3167 changed the value of P. However, now, we will be
3168 constructing VAR directly. The constructor body may
3169 change its value multiple times as it is being
3170 constructed. Therefore, it must not be TREE_READONLY;
3171 the back-end assumes that TREE_READONLY variable is
3172 assigned to only once. */
3173 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3174 TREE_READONLY (var) = 0;
3175
3176 /* If there is no setup required and we are in SSA, take the easy route
3177 replacing all SSA names representing the function parameter by the
3178 SSA name passed to function.
3179
3180 We need to construct map for the variable anyway as it might be used
3181 in different SSA names when parameter is set in function.
3182
3183 Do replacement at -O0 for const arguments replaced by constant.
3184 This is important for builtin_constant_p and other construct requiring
3185 constant argument to be visible in inlined function body. */
3186 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3187 && (optimize
3188 || (TREE_READONLY (p)
3189 && is_gimple_min_invariant (rhs)))
3190 && (TREE_CODE (rhs) == SSA_NAME
3191 || is_gimple_min_invariant (rhs))
3192 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3193 {
3194 insert_decl_map (id, def, rhs);
3195 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3196 }
3197
3198 /* If the value of argument is never used, don't care about initializing
3199 it. */
3200 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3201 {
3202 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3203 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3204 }
3205
3206 /* Initialize this VAR_DECL from the equivalent argument. Convert
3207 the argument to the proper type in case it was promoted. */
3208 if (value)
3209 {
3210 if (rhs == error_mark_node)
3211 {
3212 insert_decl_map (id, p, var);
3213 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3214 }
3215
3216 STRIP_USELESS_TYPE_CONVERSION (rhs);
3217
3218 /* If we are in SSA form properly remap the default definition
3219 or assign to a dummy SSA name if the parameter is unused and
3220 we are not optimizing. */
3221 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3222 {
3223 if (def)
3224 {
3225 def = remap_ssa_name (def, id);
3226 init_stmt = gimple_build_assign (def, rhs);
3227 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3228 set_ssa_default_def (cfun, var, NULL);
3229 }
3230 else if (!optimize)
3231 {
3232 def = make_ssa_name (var);
3233 init_stmt = gimple_build_assign (def, rhs);
3234 }
3235 }
3236 else
3237 init_stmt = gimple_build_assign (var, rhs);
3238
3239 if (bb && init_stmt)
3240 insert_init_stmt (id, bb, init_stmt);
3241 }
3242 return init_stmt;
3243 }
3244
3245 /* Generate code to initialize the parameters of the function at the
3246 top of the stack in ID from the GIMPLE_CALL STMT. */
3247
3248 static void
3249 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3250 tree fn, basic_block bb)
3251 {
3252 tree parms;
3253 size_t i;
3254 tree p;
3255 tree vars = NULL_TREE;
3256 tree static_chain = gimple_call_chain (stmt);
3257
3258 /* Figure out what the parameters are. */
3259 parms = DECL_ARGUMENTS (fn);
3260
3261 /* Loop through the parameter declarations, replacing each with an
3262 equivalent VAR_DECL, appropriately initialized. */
3263 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3264 {
3265 tree val;
3266 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3267 setup_one_parameter (id, p, val, fn, bb, &vars);
3268 }
3269 /* After remapping parameters remap their types. This has to be done
3270 in a second loop over all parameters to appropriately remap
3271 variable sized arrays when the size is specified in a
3272 parameter following the array. */
3273 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3274 {
3275 tree *varp = id->decl_map->get (p);
3276 if (varp
3277 && TREE_CODE (*varp) == VAR_DECL)
3278 {
3279 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3280 ? ssa_default_def (id->src_cfun, p) : NULL);
3281 tree var = *varp;
3282 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3283 /* Also remap the default definition if it was remapped
3284 to the default definition of the parameter replacement
3285 by the parameter setup. */
3286 if (def)
3287 {
3288 tree *defp = id->decl_map->get (def);
3289 if (defp
3290 && TREE_CODE (*defp) == SSA_NAME
3291 && SSA_NAME_VAR (*defp) == var)
3292 TREE_TYPE (*defp) = TREE_TYPE (var);
3293 }
3294 }
3295 }
3296
3297 /* Initialize the static chain. */
3298 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3299 gcc_assert (fn != current_function_decl);
3300 if (p)
3301 {
3302 /* No static chain? Seems like a bug in tree-nested.c. */
3303 gcc_assert (static_chain);
3304
3305 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3306 }
3307
3308 declare_inline_vars (id->block, vars);
3309 }
3310
3311
3312 /* Declare a return variable to replace the RESULT_DECL for the
3313 function we are calling. An appropriate DECL_STMT is returned.
3314 The USE_STMT is filled to contain a use of the declaration to
3315 indicate the return value of the function.
3316
3317 RETURN_SLOT, if non-null is place where to store the result. It
3318 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3319 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3320
3321 RETURN_BOUNDS holds a destination for returned bounds.
3322
3323 The return value is a (possibly null) value that holds the result
3324 as seen by the caller. */
3325
3326 static tree
3327 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3328 tree return_bounds, basic_block entry_bb)
3329 {
3330 tree callee = id->src_fn;
3331 tree result = DECL_RESULT (callee);
3332 tree callee_type = TREE_TYPE (result);
3333 tree caller_type;
3334 tree var, use;
3335
3336 /* Handle type-mismatches in the function declaration return type
3337 vs. the call expression. */
3338 if (modify_dest)
3339 caller_type = TREE_TYPE (modify_dest);
3340 else
3341 caller_type = TREE_TYPE (TREE_TYPE (callee));
3342
3343 /* We don't need to do anything for functions that don't return anything. */
3344 if (VOID_TYPE_P (callee_type))
3345 return NULL_TREE;
3346
3347 /* If there was a return slot, then the return value is the
3348 dereferenced address of that object. */
3349 if (return_slot)
3350 {
3351 /* The front end shouldn't have used both return_slot and
3352 a modify expression. */
3353 gcc_assert (!modify_dest);
3354 if (DECL_BY_REFERENCE (result))
3355 {
3356 tree return_slot_addr = build_fold_addr_expr (return_slot);
3357 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3358
3359 /* We are going to construct *&return_slot and we can't do that
3360 for variables believed to be not addressable.
3361
3362 FIXME: This check possibly can match, because values returned
3363 via return slot optimization are not believed to have address
3364 taken by alias analysis. */
3365 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3366 var = return_slot_addr;
3367 }
3368 else
3369 {
3370 var = return_slot;
3371 gcc_assert (TREE_CODE (var) != SSA_NAME);
3372 if (TREE_ADDRESSABLE (result))
3373 mark_addressable (var);
3374 }
3375 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3376 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3377 && !DECL_GIMPLE_REG_P (result)
3378 && DECL_P (var))
3379 DECL_GIMPLE_REG_P (var) = 0;
3380 use = NULL;
3381 goto done;
3382 }
3383
3384 /* All types requiring non-trivial constructors should have been handled. */
3385 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3386
3387 /* Attempt to avoid creating a new temporary variable. */
3388 if (modify_dest
3389 && TREE_CODE (modify_dest) != SSA_NAME)
3390 {
3391 bool use_it = false;
3392
3393 /* We can't use MODIFY_DEST if there's type promotion involved. */
3394 if (!useless_type_conversion_p (callee_type, caller_type))
3395 use_it = false;
3396
3397 /* ??? If we're assigning to a variable sized type, then we must
3398 reuse the destination variable, because we've no good way to
3399 create variable sized temporaries at this point. */
3400 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3401 use_it = true;
3402
3403 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3404 reuse it as the result of the call directly. Don't do this if
3405 it would promote MODIFY_DEST to addressable. */
3406 else if (TREE_ADDRESSABLE (result))
3407 use_it = false;
3408 else
3409 {
3410 tree base_m = get_base_address (modify_dest);
3411
3412 /* If the base isn't a decl, then it's a pointer, and we don't
3413 know where that's going to go. */
3414 if (!DECL_P (base_m))
3415 use_it = false;
3416 else if (is_global_var (base_m))
3417 use_it = false;
3418 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3419 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3420 && !DECL_GIMPLE_REG_P (result)
3421 && DECL_GIMPLE_REG_P (base_m))
3422 use_it = false;
3423 else if (!TREE_ADDRESSABLE (base_m))
3424 use_it = true;
3425 }
3426
3427 if (use_it)
3428 {
3429 var = modify_dest;
3430 use = NULL;
3431 goto done;
3432 }
3433 }
3434
3435 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3436
3437 var = copy_result_decl_to_var (result, id);
3438 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3439
3440 /* Do not have the rest of GCC warn about this variable as it should
3441 not be visible to the user. */
3442 TREE_NO_WARNING (var) = 1;
3443
3444 declare_inline_vars (id->block, var);
3445
3446 /* Build the use expr. If the return type of the function was
3447 promoted, convert it back to the expected type. */
3448 use = var;
3449 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3450 {
3451 /* If we can match up types by promotion/demotion do so. */
3452 if (fold_convertible_p (caller_type, var))
3453 use = fold_convert (caller_type, var);
3454 else
3455 {
3456 /* ??? For valid programs we should not end up here.
3457 Still if we end up with truly mismatched types here, fall back
3458 to using a MEM_REF to not leak invalid GIMPLE to the following
3459 passes. */
3460 /* Prevent var from being written into SSA form. */
3461 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3462 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3463 DECL_GIMPLE_REG_P (var) = false;
3464 else if (is_gimple_reg_type (TREE_TYPE (var)))
3465 TREE_ADDRESSABLE (var) = true;
3466 use = fold_build2 (MEM_REF, caller_type,
3467 build_fold_addr_expr (var),
3468 build_int_cst (ptr_type_node, 0));
3469 }
3470 }
3471
3472 STRIP_USELESS_TYPE_CONVERSION (use);
3473
3474 if (DECL_BY_REFERENCE (result))
3475 {
3476 TREE_ADDRESSABLE (var) = 1;
3477 var = build_fold_addr_expr (var);
3478 }
3479
3480 done:
3481 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3482 way, when the RESULT_DECL is encountered, it will be
3483 automatically replaced by the VAR_DECL.
3484
3485 When returning by reference, ensure that RESULT_DECL remaps to
3486 gimple_val. */
3487 if (DECL_BY_REFERENCE (result)
3488 && !is_gimple_val (var))
3489 {
3490 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3491 insert_decl_map (id, result, temp);
3492 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3493 it's default_def SSA_NAME. */
3494 if (gimple_in_ssa_p (id->src_cfun)
3495 && is_gimple_reg (result))
3496 {
3497 temp = make_ssa_name (temp);
3498 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3499 }
3500 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3501 }
3502 else
3503 insert_decl_map (id, result, var);
3504
3505 /* Remember this so we can ignore it in remap_decls. */
3506 id->retvar = var;
3507
3508 /* If returned bounds are used, then make var for them. */
3509 if (return_bounds)
3510 {
3511 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3512 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3513 TREE_NO_WARNING (bndtemp) = 1;
3514 declare_inline_vars (id->block, bndtemp);
3515
3516 id->retbnd = bndtemp;
3517 insert_init_stmt (id, entry_bb,
3518 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3519 }
3520
3521 return use;
3522 }
3523
3524 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3525 to a local label. */
3526
3527 static tree
3528 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3529 {
3530 tree node = *nodep;
3531 tree fn = (tree) fnp;
3532
3533 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3534 return node;
3535
3536 if (TYPE_P (node))
3537 *walk_subtrees = 0;
3538
3539 return NULL_TREE;
3540 }
3541
3542 /* Determine if the function can be copied. If so return NULL. If
3543 not return a string describng the reason for failure. */
3544
3545 const char *
3546 copy_forbidden (struct function *fun, tree fndecl)
3547 {
3548 const char *reason = fun->cannot_be_copied_reason;
3549 tree decl;
3550 unsigned ix;
3551
3552 /* Only examine the function once. */
3553 if (fun->cannot_be_copied_set)
3554 return reason;
3555
3556 /* We cannot copy a function that receives a non-local goto
3557 because we cannot remap the destination label used in the
3558 function that is performing the non-local goto. */
3559 /* ??? Actually, this should be possible, if we work at it.
3560 No doubt there's just a handful of places that simply
3561 assume it doesn't happen and don't substitute properly. */
3562 if (fun->has_nonlocal_label)
3563 {
3564 reason = G_("function %q+F can never be copied "
3565 "because it receives a non-local goto");
3566 goto fail;
3567 }
3568
3569 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3570 if (TREE_CODE (decl) == VAR_DECL
3571 && TREE_STATIC (decl)
3572 && !DECL_EXTERNAL (decl)
3573 && DECL_INITIAL (decl)
3574 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3575 has_label_address_in_static_1,
3576 fndecl))
3577 {
3578 reason = G_("function %q+F can never be copied because it saves "
3579 "address of local label in a static variable");
3580 goto fail;
3581 }
3582
3583 fail:
3584 fun->cannot_be_copied_reason = reason;
3585 fun->cannot_be_copied_set = true;
3586 return reason;
3587 }
3588
3589
3590 static const char *inline_forbidden_reason;
3591
3592 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3593 iff a function can not be inlined. Also sets the reason why. */
3594
3595 static tree
3596 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3597 struct walk_stmt_info *wip)
3598 {
3599 tree fn = (tree) wip->info;
3600 tree t;
3601 gimple stmt = gsi_stmt (*gsi);
3602
3603 switch (gimple_code (stmt))
3604 {
3605 case GIMPLE_CALL:
3606 /* Refuse to inline alloca call unless user explicitly forced so as
3607 this may change program's memory overhead drastically when the
3608 function using alloca is called in loop. In GCC present in
3609 SPEC2000 inlining into schedule_block cause it to require 2GB of
3610 RAM instead of 256MB. Don't do so for alloca calls emitted for
3611 VLA objects as those can't cause unbounded growth (they're always
3612 wrapped inside stack_save/stack_restore regions. */
3613 if (gimple_alloca_call_p (stmt)
3614 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3615 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3616 {
3617 inline_forbidden_reason
3618 = G_("function %q+F can never be inlined because it uses "
3619 "alloca (override using the always_inline attribute)");
3620 *handled_ops_p = true;
3621 return fn;
3622 }
3623
3624 t = gimple_call_fndecl (stmt);
3625 if (t == NULL_TREE)
3626 break;
3627
3628 /* We cannot inline functions that call setjmp. */
3629 if (setjmp_call_p (t))
3630 {
3631 inline_forbidden_reason
3632 = G_("function %q+F can never be inlined because it uses setjmp");
3633 *handled_ops_p = true;
3634 return t;
3635 }
3636
3637 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3638 switch (DECL_FUNCTION_CODE (t))
3639 {
3640 /* We cannot inline functions that take a variable number of
3641 arguments. */
3642 case BUILT_IN_VA_START:
3643 case BUILT_IN_NEXT_ARG:
3644 case BUILT_IN_VA_END:
3645 inline_forbidden_reason
3646 = G_("function %q+F can never be inlined because it "
3647 "uses variable argument lists");
3648 *handled_ops_p = true;
3649 return t;
3650
3651 case BUILT_IN_LONGJMP:
3652 /* We can't inline functions that call __builtin_longjmp at
3653 all. The non-local goto machinery really requires the
3654 destination be in a different function. If we allow the
3655 function calling __builtin_longjmp to be inlined into the
3656 function calling __builtin_setjmp, Things will Go Awry. */
3657 inline_forbidden_reason
3658 = G_("function %q+F can never be inlined because "
3659 "it uses setjmp-longjmp exception handling");
3660 *handled_ops_p = true;
3661 return t;
3662
3663 case BUILT_IN_NONLOCAL_GOTO:
3664 /* Similarly. */
3665 inline_forbidden_reason
3666 = G_("function %q+F can never be inlined because "
3667 "it uses non-local goto");
3668 *handled_ops_p = true;
3669 return t;
3670
3671 case BUILT_IN_RETURN:
3672 case BUILT_IN_APPLY_ARGS:
3673 /* If a __builtin_apply_args caller would be inlined,
3674 it would be saving arguments of the function it has
3675 been inlined into. Similarly __builtin_return would
3676 return from the function the inline has been inlined into. */
3677 inline_forbidden_reason
3678 = G_("function %q+F can never be inlined because "
3679 "it uses __builtin_return or __builtin_apply_args");
3680 *handled_ops_p = true;
3681 return t;
3682
3683 default:
3684 break;
3685 }
3686 break;
3687
3688 case GIMPLE_GOTO:
3689 t = gimple_goto_dest (stmt);
3690
3691 /* We will not inline a function which uses computed goto. The
3692 addresses of its local labels, which may be tucked into
3693 global storage, are of course not constant across
3694 instantiations, which causes unexpected behavior. */
3695 if (TREE_CODE (t) != LABEL_DECL)
3696 {
3697 inline_forbidden_reason
3698 = G_("function %q+F can never be inlined "
3699 "because it contains a computed goto");
3700 *handled_ops_p = true;
3701 return t;
3702 }
3703 break;
3704
3705 default:
3706 break;
3707 }
3708
3709 *handled_ops_p = false;
3710 return NULL_TREE;
3711 }
3712
3713 /* Return true if FNDECL is a function that cannot be inlined into
3714 another one. */
3715
3716 static bool
3717 inline_forbidden_p (tree fndecl)
3718 {
3719 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3720 struct walk_stmt_info wi;
3721 basic_block bb;
3722 bool forbidden_p = false;
3723
3724 /* First check for shared reasons not to copy the code. */
3725 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3726 if (inline_forbidden_reason != NULL)
3727 return true;
3728
3729 /* Next, walk the statements of the function looking for
3730 constraucts we can't handle, or are non-optimal for inlining. */
3731 hash_set<tree> visited_nodes;
3732 memset (&wi, 0, sizeof (wi));
3733 wi.info = (void *) fndecl;
3734 wi.pset = &visited_nodes;
3735
3736 FOR_EACH_BB_FN (bb, fun)
3737 {
3738 gimple ret;
3739 gimple_seq seq = bb_seq (bb);
3740 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3741 forbidden_p = (ret != NULL);
3742 if (forbidden_p)
3743 break;
3744 }
3745
3746 return forbidden_p;
3747 }
3748 \f
3749 /* Return false if the function FNDECL cannot be inlined on account of its
3750 attributes, true otherwise. */
3751 static bool
3752 function_attribute_inlinable_p (const_tree fndecl)
3753 {
3754 if (targetm.attribute_table)
3755 {
3756 const_tree a;
3757
3758 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3759 {
3760 const_tree name = TREE_PURPOSE (a);
3761 int i;
3762
3763 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3764 if (is_attribute_p (targetm.attribute_table[i].name, name))
3765 return targetm.function_attribute_inlinable_p (fndecl);
3766 }
3767 }
3768
3769 return true;
3770 }
3771
3772 /* Returns nonzero if FN is a function that does not have any
3773 fundamental inline blocking properties. */
3774
3775 bool
3776 tree_inlinable_function_p (tree fn)
3777 {
3778 bool inlinable = true;
3779 bool do_warning;
3780 tree always_inline;
3781
3782 /* If we've already decided this function shouldn't be inlined,
3783 there's no need to check again. */
3784 if (DECL_UNINLINABLE (fn))
3785 return false;
3786
3787 /* We only warn for functions declared `inline' by the user. */
3788 do_warning = (warn_inline
3789 && DECL_DECLARED_INLINE_P (fn)
3790 && !DECL_NO_INLINE_WARNING_P (fn)
3791 && !DECL_IN_SYSTEM_HEADER (fn));
3792
3793 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3794
3795 if (flag_no_inline
3796 && always_inline == NULL)
3797 {
3798 if (do_warning)
3799 warning (OPT_Winline, "function %q+F can never be inlined because it "
3800 "is suppressed using -fno-inline", fn);
3801 inlinable = false;
3802 }
3803
3804 else if (!function_attribute_inlinable_p (fn))
3805 {
3806 if (do_warning)
3807 warning (OPT_Winline, "function %q+F can never be inlined because it "
3808 "uses attributes conflicting with inlining", fn);
3809 inlinable = false;
3810 }
3811
3812 else if (inline_forbidden_p (fn))
3813 {
3814 /* See if we should warn about uninlinable functions. Previously,
3815 some of these warnings would be issued while trying to expand
3816 the function inline, but that would cause multiple warnings
3817 about functions that would for example call alloca. But since
3818 this a property of the function, just one warning is enough.
3819 As a bonus we can now give more details about the reason why a
3820 function is not inlinable. */
3821 if (always_inline)
3822 error (inline_forbidden_reason, fn);
3823 else if (do_warning)
3824 warning (OPT_Winline, inline_forbidden_reason, fn);
3825
3826 inlinable = false;
3827 }
3828
3829 /* Squirrel away the result so that we don't have to check again. */
3830 DECL_UNINLINABLE (fn) = !inlinable;
3831
3832 return inlinable;
3833 }
3834
3835 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3836 word size and take possible memcpy call into account and return
3837 cost based on whether optimizing for size or speed according to SPEED_P. */
3838
3839 int
3840 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3841 {
3842 HOST_WIDE_INT size;
3843
3844 gcc_assert (!VOID_TYPE_P (type));
3845
3846 if (TREE_CODE (type) == VECTOR_TYPE)
3847 {
3848 machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3849 machine_mode simd
3850 = targetm.vectorize.preferred_simd_mode (inner);
3851 int simd_mode_size = GET_MODE_SIZE (simd);
3852 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3853 / simd_mode_size);
3854 }
3855
3856 size = int_size_in_bytes (type);
3857
3858 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3859 /* Cost of a memcpy call, 3 arguments and the call. */
3860 return 4;
3861 else
3862 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3863 }
3864
3865 /* Returns cost of operation CODE, according to WEIGHTS */
3866
3867 static int
3868 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3869 tree op1 ATTRIBUTE_UNUSED, tree op2)
3870 {
3871 switch (code)
3872 {
3873 /* These are "free" conversions, or their presumed cost
3874 is folded into other operations. */
3875 case RANGE_EXPR:
3876 CASE_CONVERT:
3877 case COMPLEX_EXPR:
3878 case PAREN_EXPR:
3879 case VIEW_CONVERT_EXPR:
3880 return 0;
3881
3882 /* Assign cost of 1 to usual operations.
3883 ??? We may consider mapping RTL costs to this. */
3884 case COND_EXPR:
3885 case VEC_COND_EXPR:
3886 case VEC_PERM_EXPR:
3887
3888 case PLUS_EXPR:
3889 case POINTER_PLUS_EXPR:
3890 case MINUS_EXPR:
3891 case MULT_EXPR:
3892 case MULT_HIGHPART_EXPR:
3893 case FMA_EXPR:
3894
3895 case ADDR_SPACE_CONVERT_EXPR:
3896 case FIXED_CONVERT_EXPR:
3897 case FIX_TRUNC_EXPR:
3898
3899 case NEGATE_EXPR:
3900 case FLOAT_EXPR:
3901 case MIN_EXPR:
3902 case MAX_EXPR:
3903 case ABS_EXPR:
3904
3905 case LSHIFT_EXPR:
3906 case RSHIFT_EXPR:
3907 case LROTATE_EXPR:
3908 case RROTATE_EXPR:
3909
3910 case BIT_IOR_EXPR:
3911 case BIT_XOR_EXPR:
3912 case BIT_AND_EXPR:
3913 case BIT_NOT_EXPR:
3914
3915 case TRUTH_ANDIF_EXPR:
3916 case TRUTH_ORIF_EXPR:
3917 case TRUTH_AND_EXPR:
3918 case TRUTH_OR_EXPR:
3919 case TRUTH_XOR_EXPR:
3920 case TRUTH_NOT_EXPR:
3921
3922 case LT_EXPR:
3923 case LE_EXPR:
3924 case GT_EXPR:
3925 case GE_EXPR:
3926 case EQ_EXPR:
3927 case NE_EXPR:
3928 case ORDERED_EXPR:
3929 case UNORDERED_EXPR:
3930
3931 case UNLT_EXPR:
3932 case UNLE_EXPR:
3933 case UNGT_EXPR:
3934 case UNGE_EXPR:
3935 case UNEQ_EXPR:
3936 case LTGT_EXPR:
3937
3938 case CONJ_EXPR:
3939
3940 case PREDECREMENT_EXPR:
3941 case PREINCREMENT_EXPR:
3942 case POSTDECREMENT_EXPR:
3943 case POSTINCREMENT_EXPR:
3944
3945 case REALIGN_LOAD_EXPR:
3946
3947 case REDUC_MAX_EXPR:
3948 case REDUC_MIN_EXPR:
3949 case REDUC_PLUS_EXPR:
3950 case WIDEN_SUM_EXPR:
3951 case WIDEN_MULT_EXPR:
3952 case DOT_PROD_EXPR:
3953 case SAD_EXPR:
3954 case WIDEN_MULT_PLUS_EXPR:
3955 case WIDEN_MULT_MINUS_EXPR:
3956 case WIDEN_LSHIFT_EXPR:
3957
3958 case VEC_WIDEN_MULT_HI_EXPR:
3959 case VEC_WIDEN_MULT_LO_EXPR:
3960 case VEC_WIDEN_MULT_EVEN_EXPR:
3961 case VEC_WIDEN_MULT_ODD_EXPR:
3962 case VEC_UNPACK_HI_EXPR:
3963 case VEC_UNPACK_LO_EXPR:
3964 case VEC_UNPACK_FLOAT_HI_EXPR:
3965 case VEC_UNPACK_FLOAT_LO_EXPR:
3966 case VEC_PACK_TRUNC_EXPR:
3967 case VEC_PACK_SAT_EXPR:
3968 case VEC_PACK_FIX_TRUNC_EXPR:
3969 case VEC_WIDEN_LSHIFT_HI_EXPR:
3970 case VEC_WIDEN_LSHIFT_LO_EXPR:
3971
3972 return 1;
3973
3974 /* Few special cases of expensive operations. This is useful
3975 to avoid inlining on functions having too many of these. */
3976 case TRUNC_DIV_EXPR:
3977 case CEIL_DIV_EXPR:
3978 case FLOOR_DIV_EXPR:
3979 case ROUND_DIV_EXPR:
3980 case EXACT_DIV_EXPR:
3981 case TRUNC_MOD_EXPR:
3982 case CEIL_MOD_EXPR:
3983 case FLOOR_MOD_EXPR:
3984 case ROUND_MOD_EXPR:
3985 case RDIV_EXPR:
3986 if (TREE_CODE (op2) != INTEGER_CST)
3987 return weights->div_mod_cost;
3988 return 1;
3989
3990 default:
3991 /* We expect a copy assignment with no operator. */
3992 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3993 return 0;
3994 }
3995 }
3996
3997
3998 /* Estimate number of instructions that will be created by expanding
3999 the statements in the statement sequence STMTS.
4000 WEIGHTS contains weights attributed to various constructs. */
4001
4002 static
4003 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4004 {
4005 int cost;
4006 gimple_stmt_iterator gsi;
4007
4008 cost = 0;
4009 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4010 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4011
4012 return cost;
4013 }
4014
4015
4016 /* Estimate number of instructions that will be created by expanding STMT.
4017 WEIGHTS contains weights attributed to various constructs. */
4018
4019 int
4020 estimate_num_insns (gimple stmt, eni_weights *weights)
4021 {
4022 unsigned cost, i;
4023 enum gimple_code code = gimple_code (stmt);
4024 tree lhs;
4025 tree rhs;
4026
4027 switch (code)
4028 {
4029 case GIMPLE_ASSIGN:
4030 /* Try to estimate the cost of assignments. We have three cases to
4031 deal with:
4032 1) Simple assignments to registers;
4033 2) Stores to things that must live in memory. This includes
4034 "normal" stores to scalars, but also assignments of large
4035 structures, or constructors of big arrays;
4036
4037 Let us look at the first two cases, assuming we have "a = b + C":
4038 <GIMPLE_ASSIGN <var_decl "a">
4039 <plus_expr <var_decl "b"> <constant C>>
4040 If "a" is a GIMPLE register, the assignment to it is free on almost
4041 any target, because "a" usually ends up in a real register. Hence
4042 the only cost of this expression comes from the PLUS_EXPR, and we
4043 can ignore the GIMPLE_ASSIGN.
4044 If "a" is not a GIMPLE register, the assignment to "a" will most
4045 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4046 of moving something into "a", which we compute using the function
4047 estimate_move_cost. */
4048 if (gimple_clobber_p (stmt))
4049 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4050
4051 lhs = gimple_assign_lhs (stmt);
4052 rhs = gimple_assign_rhs1 (stmt);
4053
4054 cost = 0;
4055
4056 /* Account for the cost of moving to / from memory. */
4057 if (gimple_store_p (stmt))
4058 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4059 if (gimple_assign_load_p (stmt))
4060 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4061
4062 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4063 gimple_assign_rhs1 (stmt),
4064 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4065 == GIMPLE_BINARY_RHS
4066 ? gimple_assign_rhs2 (stmt) : NULL);
4067 break;
4068
4069 case GIMPLE_COND:
4070 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4071 gimple_op (stmt, 0),
4072 gimple_op (stmt, 1));
4073 break;
4074
4075 case GIMPLE_SWITCH:
4076 {
4077 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4078 /* Take into account cost of the switch + guess 2 conditional jumps for
4079 each case label.
4080
4081 TODO: once the switch expansion logic is sufficiently separated, we can
4082 do better job on estimating cost of the switch. */
4083 if (weights->time_based)
4084 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4085 else
4086 cost = gimple_switch_num_labels (switch_stmt) * 2;
4087 }
4088 break;
4089
4090 case GIMPLE_CALL:
4091 {
4092 tree decl;
4093
4094 if (gimple_call_internal_p (stmt))
4095 return 0;
4096 else if ((decl = gimple_call_fndecl (stmt))
4097 && DECL_BUILT_IN (decl))
4098 {
4099 /* Do not special case builtins where we see the body.
4100 This just confuse inliner. */
4101 struct cgraph_node *node;
4102 if (!(node = cgraph_node::get (decl))
4103 || node->definition)
4104 ;
4105 /* For buitins that are likely expanded to nothing or
4106 inlined do not account operand costs. */
4107 else if (is_simple_builtin (decl))
4108 return 0;
4109 else if (is_inexpensive_builtin (decl))
4110 return weights->target_builtin_call_cost;
4111 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4112 {
4113 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4114 specialize the cheap expansion we do here.
4115 ??? This asks for a more general solution. */
4116 switch (DECL_FUNCTION_CODE (decl))
4117 {
4118 case BUILT_IN_POW:
4119 case BUILT_IN_POWF:
4120 case BUILT_IN_POWL:
4121 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4122 && REAL_VALUES_EQUAL
4123 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
4124 return estimate_operator_cost
4125 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4126 gimple_call_arg (stmt, 0));
4127 break;
4128
4129 default:
4130 break;
4131 }
4132 }
4133 }
4134
4135 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4136 if (gimple_call_lhs (stmt))
4137 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4138 weights->time_based);
4139 for (i = 0; i < gimple_call_num_args (stmt); i++)
4140 {
4141 tree arg = gimple_call_arg (stmt, i);
4142 cost += estimate_move_cost (TREE_TYPE (arg),
4143 weights->time_based);
4144 }
4145 break;
4146 }
4147
4148 case GIMPLE_RETURN:
4149 return weights->return_cost;
4150
4151 case GIMPLE_GOTO:
4152 case GIMPLE_LABEL:
4153 case GIMPLE_NOP:
4154 case GIMPLE_PHI:
4155 case GIMPLE_PREDICT:
4156 case GIMPLE_DEBUG:
4157 return 0;
4158
4159 case GIMPLE_ASM:
4160 {
4161 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4162 /* 1000 means infinity. This avoids overflows later
4163 with very long asm statements. */
4164 if (count > 1000)
4165 count = 1000;
4166 return count;
4167 }
4168
4169 case GIMPLE_RESX:
4170 /* This is either going to be an external function call with one
4171 argument, or two register copy statements plus a goto. */
4172 return 2;
4173
4174 case GIMPLE_EH_DISPATCH:
4175 /* ??? This is going to turn into a switch statement. Ideally
4176 we'd have a look at the eh region and estimate the number of
4177 edges involved. */
4178 return 10;
4179
4180 case GIMPLE_BIND:
4181 return estimate_num_insns_seq (
4182 gimple_bind_body (as_a <gbind *> (stmt)),
4183 weights);
4184
4185 case GIMPLE_EH_FILTER:
4186 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4187
4188 case GIMPLE_CATCH:
4189 return estimate_num_insns_seq (gimple_catch_handler (
4190 as_a <gcatch *> (stmt)),
4191 weights);
4192
4193 case GIMPLE_TRY:
4194 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4195 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4196
4197 /* OMP directives are generally very expensive. */
4198
4199 case GIMPLE_OMP_RETURN:
4200 case GIMPLE_OMP_SECTIONS_SWITCH:
4201 case GIMPLE_OMP_ATOMIC_STORE:
4202 case GIMPLE_OMP_CONTINUE:
4203 /* ...except these, which are cheap. */
4204 return 0;
4205
4206 case GIMPLE_OMP_ATOMIC_LOAD:
4207 return weights->omp_cost;
4208
4209 case GIMPLE_OMP_FOR:
4210 return (weights->omp_cost
4211 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4212 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4213
4214 case GIMPLE_OMP_PARALLEL:
4215 case GIMPLE_OMP_TASK:
4216 case GIMPLE_OMP_CRITICAL:
4217 case GIMPLE_OMP_MASTER:
4218 case GIMPLE_OMP_TASKGROUP:
4219 case GIMPLE_OMP_ORDERED:
4220 case GIMPLE_OMP_SECTION:
4221 case GIMPLE_OMP_SECTIONS:
4222 case GIMPLE_OMP_SINGLE:
4223 case GIMPLE_OMP_TARGET:
4224 case GIMPLE_OMP_TEAMS:
4225 return (weights->omp_cost
4226 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4227
4228 case GIMPLE_TRANSACTION:
4229 return (weights->tm_cost
4230 + estimate_num_insns_seq (gimple_transaction_body (
4231 as_a <gtransaction *> (stmt)),
4232 weights));
4233
4234 default:
4235 gcc_unreachable ();
4236 }
4237
4238 return cost;
4239 }
4240
4241 /* Estimate number of instructions that will be created by expanding
4242 function FNDECL. WEIGHTS contains weights attributed to various
4243 constructs. */
4244
4245 int
4246 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4247 {
4248 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4249 gimple_stmt_iterator bsi;
4250 basic_block bb;
4251 int n = 0;
4252
4253 gcc_assert (my_function && my_function->cfg);
4254 FOR_EACH_BB_FN (bb, my_function)
4255 {
4256 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4257 n += estimate_num_insns (gsi_stmt (bsi), weights);
4258 }
4259
4260 return n;
4261 }
4262
4263
4264 /* Initializes weights used by estimate_num_insns. */
4265
4266 void
4267 init_inline_once (void)
4268 {
4269 eni_size_weights.call_cost = 1;
4270 eni_size_weights.indirect_call_cost = 3;
4271 eni_size_weights.target_builtin_call_cost = 1;
4272 eni_size_weights.div_mod_cost = 1;
4273 eni_size_weights.omp_cost = 40;
4274 eni_size_weights.tm_cost = 10;
4275 eni_size_weights.time_based = false;
4276 eni_size_weights.return_cost = 1;
4277
4278 /* Estimating time for call is difficult, since we have no idea what the
4279 called function does. In the current uses of eni_time_weights,
4280 underestimating the cost does less harm than overestimating it, so
4281 we choose a rather small value here. */
4282 eni_time_weights.call_cost = 10;
4283 eni_time_weights.indirect_call_cost = 15;
4284 eni_time_weights.target_builtin_call_cost = 1;
4285 eni_time_weights.div_mod_cost = 10;
4286 eni_time_weights.omp_cost = 40;
4287 eni_time_weights.tm_cost = 40;
4288 eni_time_weights.time_based = true;
4289 eni_time_weights.return_cost = 2;
4290 }
4291
4292 /* Estimate the number of instructions in a gimple_seq. */
4293
4294 int
4295 count_insns_seq (gimple_seq seq, eni_weights *weights)
4296 {
4297 gimple_stmt_iterator gsi;
4298 int n = 0;
4299 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4300 n += estimate_num_insns (gsi_stmt (gsi), weights);
4301
4302 return n;
4303 }
4304
4305
4306 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4307
4308 static void
4309 prepend_lexical_block (tree current_block, tree new_block)
4310 {
4311 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4312 BLOCK_SUBBLOCKS (current_block) = new_block;
4313 BLOCK_SUPERCONTEXT (new_block) = current_block;
4314 }
4315
4316 /* Add local variables from CALLEE to CALLER. */
4317
4318 static inline void
4319 add_local_variables (struct function *callee, struct function *caller,
4320 copy_body_data *id)
4321 {
4322 tree var;
4323 unsigned ix;
4324
4325 FOR_EACH_LOCAL_DECL (callee, ix, var)
4326 if (!can_be_nonlocal (var, id))
4327 {
4328 tree new_var = remap_decl (var, id);
4329
4330 /* Remap debug-expressions. */
4331 if (TREE_CODE (new_var) == VAR_DECL
4332 && DECL_HAS_DEBUG_EXPR_P (var)
4333 && new_var != var)
4334 {
4335 tree tem = DECL_DEBUG_EXPR (var);
4336 bool old_regimplify = id->regimplify;
4337 id->remapping_type_depth++;
4338 walk_tree (&tem, copy_tree_body_r, id, NULL);
4339 id->remapping_type_depth--;
4340 id->regimplify = old_regimplify;
4341 SET_DECL_DEBUG_EXPR (new_var, tem);
4342 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4343 }
4344 add_local_decl (caller, new_var);
4345 }
4346 }
4347
4348 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4349
4350 static bool
4351 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4352 {
4353 tree use_retvar;
4354 tree fn;
4355 hash_map<tree, tree> *dst;
4356 hash_map<tree, tree> *st = NULL;
4357 tree return_slot;
4358 tree modify_dest;
4359 tree return_bounds = NULL;
4360 location_t saved_location;
4361 struct cgraph_edge *cg_edge;
4362 cgraph_inline_failed_t reason;
4363 basic_block return_block;
4364 edge e;
4365 gimple_stmt_iterator gsi, stmt_gsi;
4366 bool successfully_inlined = FALSE;
4367 bool purge_dead_abnormal_edges;
4368 gcall *call_stmt;
4369 unsigned int i;
4370
4371 /* Set input_location here so we get the right instantiation context
4372 if we call instantiate_decl from inlinable_function_p. */
4373 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4374 saved_location = input_location;
4375 input_location = gimple_location (stmt);
4376
4377 /* From here on, we're only interested in CALL_EXPRs. */
4378 call_stmt = dyn_cast <gcall *> (stmt);
4379 if (!call_stmt)
4380 goto egress;
4381
4382 cg_edge = id->dst_node->get_edge (stmt);
4383 gcc_checking_assert (cg_edge);
4384 /* First, see if we can figure out what function is being called.
4385 If we cannot, then there is no hope of inlining the function. */
4386 if (cg_edge->indirect_unknown_callee)
4387 goto egress;
4388 fn = cg_edge->callee->decl;
4389 gcc_checking_assert (fn);
4390
4391 /* If FN is a declaration of a function in a nested scope that was
4392 globally declared inline, we don't set its DECL_INITIAL.
4393 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4394 C++ front-end uses it for cdtors to refer to their internal
4395 declarations, that are not real functions. Fortunately those
4396 don't have trees to be saved, so we can tell by checking their
4397 gimple_body. */
4398 if (!DECL_INITIAL (fn)
4399 && DECL_ABSTRACT_ORIGIN (fn)
4400 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4401 fn = DECL_ABSTRACT_ORIGIN (fn);
4402
4403 /* Don't try to inline functions that are not well-suited to inlining. */
4404 if (cg_edge->inline_failed)
4405 {
4406 reason = cg_edge->inline_failed;
4407 /* If this call was originally indirect, we do not want to emit any
4408 inlining related warnings or sorry messages because there are no
4409 guarantees regarding those. */
4410 if (cg_edge->indirect_inlining_edge)
4411 goto egress;
4412
4413 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4414 /* For extern inline functions that get redefined we always
4415 silently ignored always_inline flag. Better behaviour would
4416 be to be able to keep both bodies and use extern inline body
4417 for inlining, but we can't do that because frontends overwrite
4418 the body. */
4419 && !cg_edge->callee->local.redefined_extern_inline
4420 /* During early inline pass, report only when optimization is
4421 not turned on. */
4422 && (symtab->global_info_ready
4423 || !optimize
4424 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4425 /* PR 20090218-1_0.c. Body can be provided by another module. */
4426 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4427 {
4428 error ("inlining failed in call to always_inline %q+F: %s", fn,
4429 cgraph_inline_failed_string (reason));
4430 error ("called from here");
4431 }
4432 else if (warn_inline
4433 && DECL_DECLARED_INLINE_P (fn)
4434 && !DECL_NO_INLINE_WARNING_P (fn)
4435 && !DECL_IN_SYSTEM_HEADER (fn)
4436 && reason != CIF_UNSPECIFIED
4437 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4438 /* Do not warn about not inlined recursive calls. */
4439 && !cg_edge->recursive_p ()
4440 /* Avoid warnings during early inline pass. */
4441 && symtab->global_info_ready)
4442 {
4443 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4444 fn, _(cgraph_inline_failed_string (reason)));
4445 warning (OPT_Winline, "called from here");
4446 }
4447 goto egress;
4448 }
4449 fn = cg_edge->callee->decl;
4450 cg_edge->callee->get_untransformed_body ();
4451
4452 #ifdef ENABLE_CHECKING
4453 if (cg_edge->callee->decl != id->dst_node->decl)
4454 cg_edge->callee->verify ();
4455 #endif
4456
4457 /* We will be inlining this callee. */
4458 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4459 id->assign_stmts.create (0);
4460
4461 /* Update the callers EH personality. */
4462 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4463 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4464 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4465
4466 /* Split the block holding the GIMPLE_CALL. */
4467 e = split_block (bb, stmt);
4468 bb = e->src;
4469 return_block = e->dest;
4470 remove_edge (e);
4471
4472 /* split_block splits after the statement; work around this by
4473 moving the call into the second block manually. Not pretty,
4474 but seems easier than doing the CFG manipulation by hand
4475 when the GIMPLE_CALL is in the last statement of BB. */
4476 stmt_gsi = gsi_last_bb (bb);
4477 gsi_remove (&stmt_gsi, false);
4478
4479 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4480 been the source of abnormal edges. In this case, schedule
4481 the removal of dead abnormal edges. */
4482 gsi = gsi_start_bb (return_block);
4483 if (gsi_end_p (gsi))
4484 {
4485 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4486 purge_dead_abnormal_edges = true;
4487 }
4488 else
4489 {
4490 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4491 purge_dead_abnormal_edges = false;
4492 }
4493
4494 stmt_gsi = gsi_start_bb (return_block);
4495
4496 /* Build a block containing code to initialize the arguments, the
4497 actual inline expansion of the body, and a label for the return
4498 statements within the function to jump to. The type of the
4499 statement expression is the return type of the function call.
4500 ??? If the call does not have an associated block then we will
4501 remap all callee blocks to NULL, effectively dropping most of
4502 its debug information. This should only happen for calls to
4503 artificial decls inserted by the compiler itself. We need to
4504 either link the inlined blocks into the caller block tree or
4505 not refer to them in any way to not break GC for locations. */
4506 if (gimple_block (stmt))
4507 {
4508 id->block = make_node (BLOCK);
4509 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4510 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4511 prepend_lexical_block (gimple_block (stmt), id->block);
4512 }
4513
4514 /* Local declarations will be replaced by their equivalents in this
4515 map. */
4516 st = id->decl_map;
4517 id->decl_map = new hash_map<tree, tree>;
4518 dst = id->debug_map;
4519 id->debug_map = NULL;
4520
4521 /* Record the function we are about to inline. */
4522 id->src_fn = fn;
4523 id->src_node = cg_edge->callee;
4524 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4525 id->call_stmt = stmt;
4526
4527 gcc_assert (!id->src_cfun->after_inlining);
4528
4529 id->entry_bb = bb;
4530 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4531 {
4532 gimple_stmt_iterator si = gsi_last_bb (bb);
4533 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4534 NOT_TAKEN),
4535 GSI_NEW_STMT);
4536 }
4537 initialize_inlined_parameters (id, stmt, fn, bb);
4538
4539 if (DECL_INITIAL (fn))
4540 {
4541 if (gimple_block (stmt))
4542 {
4543 tree *var;
4544
4545 prepend_lexical_block (id->block,
4546 remap_blocks (DECL_INITIAL (fn), id));
4547 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4548 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4549 == NULL_TREE));
4550 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4551 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4552 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4553 under it. The parameters can be then evaluated in the debugger,
4554 but don't show in backtraces. */
4555 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4556 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4557 {
4558 tree v = *var;
4559 *var = TREE_CHAIN (v);
4560 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4561 BLOCK_VARS (id->block) = v;
4562 }
4563 else
4564 var = &TREE_CHAIN (*var);
4565 }
4566 else
4567 remap_blocks_to_null (DECL_INITIAL (fn), id);
4568 }
4569
4570 /* Return statements in the function body will be replaced by jumps
4571 to the RET_LABEL. */
4572 gcc_assert (DECL_INITIAL (fn));
4573 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4574
4575 /* Find the LHS to which the result of this call is assigned. */
4576 return_slot = NULL;
4577 if (gimple_call_lhs (stmt))
4578 {
4579 modify_dest = gimple_call_lhs (stmt);
4580
4581 /* Remember where to copy returned bounds. */
4582 if (gimple_call_with_bounds_p (stmt)
4583 && TREE_CODE (modify_dest) == SSA_NAME)
4584 {
4585 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4586 if (retbnd)
4587 {
4588 return_bounds = gimple_call_lhs (retbnd);
4589 /* If returned bounds are not used then just
4590 remove unused call. */
4591 if (!return_bounds)
4592 {
4593 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4594 gsi_remove (&iter, true);
4595 }
4596 }
4597 }
4598
4599 /* The function which we are inlining might not return a value,
4600 in which case we should issue a warning that the function
4601 does not return a value. In that case the optimizers will
4602 see that the variable to which the value is assigned was not
4603 initialized. We do not want to issue a warning about that
4604 uninitialized variable. */
4605 if (DECL_P (modify_dest))
4606 TREE_NO_WARNING (modify_dest) = 1;
4607
4608 if (gimple_call_return_slot_opt_p (call_stmt))
4609 {
4610 return_slot = modify_dest;
4611 modify_dest = NULL;
4612 }
4613 }
4614 else
4615 modify_dest = NULL;
4616
4617 /* If we are inlining a call to the C++ operator new, we don't want
4618 to use type based alias analysis on the return value. Otherwise
4619 we may get confused if the compiler sees that the inlined new
4620 function returns a pointer which was just deleted. See bug
4621 33407. */
4622 if (DECL_IS_OPERATOR_NEW (fn))
4623 {
4624 return_slot = NULL;
4625 modify_dest = NULL;
4626 }
4627
4628 /* Declare the return variable for the function. */
4629 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4630 return_bounds, bb);
4631
4632 /* Add local vars in this inlined callee to caller. */
4633 add_local_variables (id->src_cfun, cfun, id);
4634
4635 if (dump_file && (dump_flags & TDF_DETAILS))
4636 {
4637 fprintf (dump_file, "Inlining ");
4638 print_generic_expr (dump_file, id->src_fn, 0);
4639 fprintf (dump_file, " to ");
4640 print_generic_expr (dump_file, id->dst_fn, 0);
4641 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4642 }
4643
4644 /* This is it. Duplicate the callee body. Assume callee is
4645 pre-gimplified. Note that we must not alter the caller
4646 function in any way before this point, as this CALL_EXPR may be
4647 a self-referential call; if we're calling ourselves, we need to
4648 duplicate our body before altering anything. */
4649 copy_body (id, cg_edge->callee->count,
4650 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4651 bb, return_block, NULL);
4652
4653 /* Reset the escaped solution. */
4654 if (cfun->gimple_df)
4655 pt_solution_reset (&cfun->gimple_df->escaped);
4656
4657 /* Clean up. */
4658 if (id->debug_map)
4659 {
4660 delete id->debug_map;
4661 id->debug_map = dst;
4662 }
4663 delete id->decl_map;
4664 id->decl_map = st;
4665
4666 /* Unlink the calls virtual operands before replacing it. */
4667 unlink_stmt_vdef (stmt);
4668 if (gimple_vdef (stmt)
4669 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4670 release_ssa_name (gimple_vdef (stmt));
4671
4672 /* If the inlined function returns a result that we care about,
4673 substitute the GIMPLE_CALL with an assignment of the return
4674 variable to the LHS of the call. That is, if STMT was
4675 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4676 if (use_retvar && gimple_call_lhs (stmt))
4677 {
4678 gimple old_stmt = stmt;
4679 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4680 gsi_replace (&stmt_gsi, stmt, false);
4681 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4682
4683 /* Copy bounds if we copy structure with bounds. */
4684 if (chkp_function_instrumented_p (id->dst_fn)
4685 && !BOUNDED_P (use_retvar)
4686 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4687 id->assign_stmts.safe_push (stmt);
4688 }
4689 else
4690 {
4691 /* Handle the case of inlining a function with no return
4692 statement, which causes the return value to become undefined. */
4693 if (gimple_call_lhs (stmt)
4694 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4695 {
4696 tree name = gimple_call_lhs (stmt);
4697 tree var = SSA_NAME_VAR (name);
4698 tree def = ssa_default_def (cfun, var);
4699
4700 if (def)
4701 {
4702 /* If the variable is used undefined, make this name
4703 undefined via a move. */
4704 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4705 gsi_replace (&stmt_gsi, stmt, true);
4706 }
4707 else
4708 {
4709 /* Otherwise make this variable undefined. */
4710 gsi_remove (&stmt_gsi, true);
4711 set_ssa_default_def (cfun, var, name);
4712 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4713 }
4714 }
4715 else
4716 gsi_remove (&stmt_gsi, true);
4717 }
4718
4719 /* Put returned bounds into the correct place if required. */
4720 if (return_bounds)
4721 {
4722 gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4723 gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4724 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4725 unlink_stmt_vdef (old_stmt);
4726 gsi_replace (&bnd_gsi, new_stmt, false);
4727 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4728 cgraph_update_edges_for_call_stmt (old_stmt,
4729 gimple_call_fndecl (old_stmt),
4730 new_stmt);
4731 }
4732
4733 if (purge_dead_abnormal_edges)
4734 {
4735 gimple_purge_dead_eh_edges (return_block);
4736 gimple_purge_dead_abnormal_call_edges (return_block);
4737 }
4738
4739 /* If the value of the new expression is ignored, that's OK. We
4740 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4741 the equivalent inlined version either. */
4742 if (is_gimple_assign (stmt))
4743 {
4744 gcc_assert (gimple_assign_single_p (stmt)
4745 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4746 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4747 }
4748
4749 /* Copy bounds for all generated assigns that need it. */
4750 for (i = 0; i < id->assign_stmts.length (); i++)
4751 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4752 id->assign_stmts.release ();
4753
4754 /* Output the inlining info for this abstract function, since it has been
4755 inlined. If we don't do this now, we can lose the information about the
4756 variables in the function when the blocks get blown away as soon as we
4757 remove the cgraph node. */
4758 if (gimple_block (stmt))
4759 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4760
4761 /* Update callgraph if needed. */
4762 cg_edge->callee->remove ();
4763
4764 id->block = NULL_TREE;
4765 successfully_inlined = TRUE;
4766
4767 egress:
4768 input_location = saved_location;
4769 return successfully_inlined;
4770 }
4771
4772 /* Expand call statements reachable from STMT_P.
4773 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4774 in a MODIFY_EXPR. */
4775
4776 static bool
4777 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4778 {
4779 gimple_stmt_iterator gsi;
4780
4781 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4782 {
4783 gimple stmt = gsi_stmt (gsi);
4784
4785 if (is_gimple_call (stmt)
4786 && !gimple_call_internal_p (stmt)
4787 && expand_call_inline (bb, stmt, id))
4788 return true;
4789 }
4790
4791 return false;
4792 }
4793
4794
4795 /* Walk all basic blocks created after FIRST and try to fold every statement
4796 in the STATEMENTS pointer set. */
4797
4798 static void
4799 fold_marked_statements (int first, hash_set<gimple> *statements)
4800 {
4801 for (; first < n_basic_blocks_for_fn (cfun); first++)
4802 if (BASIC_BLOCK_FOR_FN (cfun, first))
4803 {
4804 gimple_stmt_iterator gsi;
4805
4806 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4807 !gsi_end_p (gsi);
4808 gsi_next (&gsi))
4809 if (statements->contains (gsi_stmt (gsi)))
4810 {
4811 gimple old_stmt = gsi_stmt (gsi);
4812 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4813
4814 if (old_decl && DECL_BUILT_IN (old_decl))
4815 {
4816 /* Folding builtins can create multiple instructions,
4817 we need to look at all of them. */
4818 gimple_stmt_iterator i2 = gsi;
4819 gsi_prev (&i2);
4820 if (fold_stmt (&gsi))
4821 {
4822 gimple new_stmt;
4823 /* If a builtin at the end of a bb folded into nothing,
4824 the following loop won't work. */
4825 if (gsi_end_p (gsi))
4826 {
4827 cgraph_update_edges_for_call_stmt (old_stmt,
4828 old_decl, NULL);
4829 break;
4830 }
4831 if (gsi_end_p (i2))
4832 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4833 else
4834 gsi_next (&i2);
4835 while (1)
4836 {
4837 new_stmt = gsi_stmt (i2);
4838 update_stmt (new_stmt);
4839 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4840 new_stmt);
4841
4842 if (new_stmt == gsi_stmt (gsi))
4843 {
4844 /* It is okay to check only for the very last
4845 of these statements. If it is a throwing
4846 statement nothing will change. If it isn't
4847 this can remove EH edges. If that weren't
4848 correct then because some intermediate stmts
4849 throw, but not the last one. That would mean
4850 we'd have to split the block, which we can't
4851 here and we'd loose anyway. And as builtins
4852 probably never throw, this all
4853 is mood anyway. */
4854 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4855 new_stmt))
4856 gimple_purge_dead_eh_edges (
4857 BASIC_BLOCK_FOR_FN (cfun, first));
4858 break;
4859 }
4860 gsi_next (&i2);
4861 }
4862 }
4863 }
4864 else if (fold_stmt (&gsi))
4865 {
4866 /* Re-read the statement from GSI as fold_stmt() may
4867 have changed it. */
4868 gimple new_stmt = gsi_stmt (gsi);
4869 update_stmt (new_stmt);
4870
4871 if (is_gimple_call (old_stmt)
4872 || is_gimple_call (new_stmt))
4873 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4874 new_stmt);
4875
4876 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4877 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4878 first));
4879 }
4880 }
4881 }
4882 }
4883
4884 /* Expand calls to inline functions in the body of FN. */
4885
4886 unsigned int
4887 optimize_inline_calls (tree fn)
4888 {
4889 copy_body_data id;
4890 basic_block bb;
4891 int last = n_basic_blocks_for_fn (cfun);
4892 bool inlined_p = false;
4893
4894 /* Clear out ID. */
4895 memset (&id, 0, sizeof (id));
4896
4897 id.src_node = id.dst_node = cgraph_node::get (fn);
4898 gcc_assert (id.dst_node->definition);
4899 id.dst_fn = fn;
4900 /* Or any functions that aren't finished yet. */
4901 if (current_function_decl)
4902 id.dst_fn = current_function_decl;
4903
4904 id.copy_decl = copy_decl_maybe_to_var;
4905 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4906 id.transform_new_cfg = false;
4907 id.transform_return_to_modify = true;
4908 id.transform_parameter = true;
4909 id.transform_lang_insert_block = NULL;
4910 id.statements_to_fold = new hash_set<gimple>;
4911
4912 push_gimplify_context ();
4913
4914 /* We make no attempts to keep dominance info up-to-date. */
4915 free_dominance_info (CDI_DOMINATORS);
4916 free_dominance_info (CDI_POST_DOMINATORS);
4917
4918 /* Register specific gimple functions. */
4919 gimple_register_cfg_hooks ();
4920
4921 /* Reach the trees by walking over the CFG, and note the
4922 enclosing basic-blocks in the call edges. */
4923 /* We walk the blocks going forward, because inlined function bodies
4924 will split id->current_basic_block, and the new blocks will
4925 follow it; we'll trudge through them, processing their CALL_EXPRs
4926 along the way. */
4927 FOR_EACH_BB_FN (bb, cfun)
4928 inlined_p |= gimple_expand_calls_inline (bb, &id);
4929
4930 pop_gimplify_context (NULL);
4931
4932 #ifdef ENABLE_CHECKING
4933 {
4934 struct cgraph_edge *e;
4935
4936 id.dst_node->verify ();
4937
4938 /* Double check that we inlined everything we are supposed to inline. */
4939 for (e = id.dst_node->callees; e; e = e->next_callee)
4940 gcc_assert (e->inline_failed);
4941 }
4942 #endif
4943
4944 /* Fold queued statements. */
4945 fold_marked_statements (last, id.statements_to_fold);
4946 delete id.statements_to_fold;
4947
4948 gcc_assert (!id.debug_stmts.exists ());
4949
4950 /* If we didn't inline into the function there is nothing to do. */
4951 if (!inlined_p)
4952 return 0;
4953
4954 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4955 number_blocks (fn);
4956
4957 delete_unreachable_blocks_update_callgraph (&id);
4958 #ifdef ENABLE_CHECKING
4959 id.dst_node->verify ();
4960 #endif
4961
4962 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4963 not possible yet - the IPA passes might make various functions to not
4964 throw and they don't care to proactively update local EH info. This is
4965 done later in fixup_cfg pass that also execute the verification. */
4966 return (TODO_update_ssa
4967 | TODO_cleanup_cfg
4968 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4969 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4970 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4971 ? TODO_rebuild_frequencies : 0));
4972 }
4973
4974 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4975
4976 tree
4977 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4978 {
4979 enum tree_code code = TREE_CODE (*tp);
4980 enum tree_code_class cl = TREE_CODE_CLASS (code);
4981
4982 /* We make copies of most nodes. */
4983 if (IS_EXPR_CODE_CLASS (cl)
4984 || code == TREE_LIST
4985 || code == TREE_VEC
4986 || code == TYPE_DECL
4987 || code == OMP_CLAUSE)
4988 {
4989 /* Because the chain gets clobbered when we make a copy, we save it
4990 here. */
4991 tree chain = NULL_TREE, new_tree;
4992
4993 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4994 chain = TREE_CHAIN (*tp);
4995
4996 /* Copy the node. */
4997 new_tree = copy_node (*tp);
4998
4999 *tp = new_tree;
5000
5001 /* Now, restore the chain, if appropriate. That will cause
5002 walk_tree to walk into the chain as well. */
5003 if (code == PARM_DECL
5004 || code == TREE_LIST
5005 || code == OMP_CLAUSE)
5006 TREE_CHAIN (*tp) = chain;
5007
5008 /* For now, we don't update BLOCKs when we make copies. So, we
5009 have to nullify all BIND_EXPRs. */
5010 if (TREE_CODE (*tp) == BIND_EXPR)
5011 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5012 }
5013 else if (code == CONSTRUCTOR)
5014 {
5015 /* CONSTRUCTOR nodes need special handling because
5016 we need to duplicate the vector of elements. */
5017 tree new_tree;
5018
5019 new_tree = copy_node (*tp);
5020 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5021 *tp = new_tree;
5022 }
5023 else if (code == STATEMENT_LIST)
5024 /* We used to just abort on STATEMENT_LIST, but we can run into them
5025 with statement-expressions (c++/40975). */
5026 copy_statement_list (tp);
5027 else if (TREE_CODE_CLASS (code) == tcc_type)
5028 *walk_subtrees = 0;
5029 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5030 *walk_subtrees = 0;
5031 else if (TREE_CODE_CLASS (code) == tcc_constant)
5032 *walk_subtrees = 0;
5033 return NULL_TREE;
5034 }
5035
5036 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5037 information indicating to what new SAVE_EXPR this one should be mapped,
5038 use that one. Otherwise, create a new node and enter it in ST. FN is
5039 the function into which the copy will be placed. */
5040
5041 static void
5042 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5043 {
5044 tree *n;
5045 tree t;
5046
5047 /* See if we already encountered this SAVE_EXPR. */
5048 n = st->get (*tp);
5049
5050 /* If we didn't already remap this SAVE_EXPR, do so now. */
5051 if (!n)
5052 {
5053 t = copy_node (*tp);
5054
5055 /* Remember this SAVE_EXPR. */
5056 st->put (*tp, t);
5057 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5058 st->put (t, t);
5059 }
5060 else
5061 {
5062 /* We've already walked into this SAVE_EXPR; don't do it again. */
5063 *walk_subtrees = 0;
5064 t = *n;
5065 }
5066
5067 /* Replace this SAVE_EXPR with the copy. */
5068 *tp = t;
5069 }
5070
5071 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5072 label, copies the declaration and enters it in the splay_tree in DATA (which
5073 is really a 'copy_body_data *'. */
5074
5075 static tree
5076 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5077 bool *handled_ops_p ATTRIBUTE_UNUSED,
5078 struct walk_stmt_info *wi)
5079 {
5080 copy_body_data *id = (copy_body_data *) wi->info;
5081 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5082
5083 if (stmt)
5084 {
5085 tree decl = gimple_label_label (stmt);
5086
5087 /* Copy the decl and remember the copy. */
5088 insert_decl_map (id, decl, id->copy_decl (decl, id));
5089 }
5090
5091 return NULL_TREE;
5092 }
5093
5094
5095 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5096 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5097 remaps all local declarations to appropriate replacements in gimple
5098 operands. */
5099
5100 static tree
5101 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5102 {
5103 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5104 copy_body_data *id = (copy_body_data *) wi->info;
5105 hash_map<tree, tree> *st = id->decl_map;
5106 tree *n;
5107 tree expr = *tp;
5108
5109 /* Only a local declaration (variable or label). */
5110 if ((TREE_CODE (expr) == VAR_DECL
5111 && !TREE_STATIC (expr))
5112 || TREE_CODE (expr) == LABEL_DECL)
5113 {
5114 /* Lookup the declaration. */
5115 n = st->get (expr);
5116
5117 /* If it's there, remap it. */
5118 if (n)
5119 *tp = *n;
5120 *walk_subtrees = 0;
5121 }
5122 else if (TREE_CODE (expr) == STATEMENT_LIST
5123 || TREE_CODE (expr) == BIND_EXPR
5124 || TREE_CODE (expr) == SAVE_EXPR)
5125 gcc_unreachable ();
5126 else if (TREE_CODE (expr) == TARGET_EXPR)
5127 {
5128 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5129 It's OK for this to happen if it was part of a subtree that
5130 isn't immediately expanded, such as operand 2 of another
5131 TARGET_EXPR. */
5132 if (!TREE_OPERAND (expr, 1))
5133 {
5134 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5135 TREE_OPERAND (expr, 3) = NULL_TREE;
5136 }
5137 }
5138
5139 /* Keep iterating. */
5140 return NULL_TREE;
5141 }
5142
5143
5144 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5145 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5146 remaps all local declarations to appropriate replacements in gimple
5147 statements. */
5148
5149 static tree
5150 replace_locals_stmt (gimple_stmt_iterator *gsip,
5151 bool *handled_ops_p ATTRIBUTE_UNUSED,
5152 struct walk_stmt_info *wi)
5153 {
5154 copy_body_data *id = (copy_body_data *) wi->info;
5155 gimple gs = gsi_stmt (*gsip);
5156
5157 if (gbind *stmt = dyn_cast <gbind *> (gs))
5158 {
5159 tree block = gimple_bind_block (stmt);
5160
5161 if (block)
5162 {
5163 remap_block (&block, id);
5164 gimple_bind_set_block (stmt, block);
5165 }
5166
5167 /* This will remap a lot of the same decls again, but this should be
5168 harmless. */
5169 if (gimple_bind_vars (stmt))
5170 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5171 NULL, id));
5172 }
5173
5174 /* Keep iterating. */
5175 return NULL_TREE;
5176 }
5177
5178
5179 /* Copies everything in SEQ and replaces variables and labels local to
5180 current_function_decl. */
5181
5182 gimple_seq
5183 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5184 {
5185 copy_body_data id;
5186 struct walk_stmt_info wi;
5187 gimple_seq copy;
5188
5189 /* There's nothing to do for NULL_TREE. */
5190 if (seq == NULL)
5191 return seq;
5192
5193 /* Set up ID. */
5194 memset (&id, 0, sizeof (id));
5195 id.src_fn = current_function_decl;
5196 id.dst_fn = current_function_decl;
5197 id.decl_map = new hash_map<tree, tree>;
5198 id.debug_map = NULL;
5199
5200 id.copy_decl = copy_decl_no_change;
5201 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5202 id.transform_new_cfg = false;
5203 id.transform_return_to_modify = false;
5204 id.transform_parameter = false;
5205 id.transform_lang_insert_block = NULL;
5206
5207 /* Walk the tree once to find local labels. */
5208 memset (&wi, 0, sizeof (wi));
5209 hash_set<tree> visited;
5210 wi.info = &id;
5211 wi.pset = &visited;
5212 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5213
5214 copy = gimple_seq_copy (seq);
5215
5216 /* Walk the copy, remapping decls. */
5217 memset (&wi, 0, sizeof (wi));
5218 wi.info = &id;
5219 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5220
5221 /* Clean up. */
5222 delete id.decl_map;
5223 if (id.debug_map)
5224 delete id.debug_map;
5225 if (id.dependence_map)
5226 {
5227 delete id.dependence_map;
5228 id.dependence_map = NULL;
5229 }
5230
5231 return copy;
5232 }
5233
5234
5235 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5236
5237 static tree
5238 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5239 {
5240 if (*tp == data)
5241 return (tree) data;
5242 else
5243 return NULL;
5244 }
5245
5246 DEBUG_FUNCTION bool
5247 debug_find_tree (tree top, tree search)
5248 {
5249 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5250 }
5251
5252
5253 /* Declare the variables created by the inliner. Add all the variables in
5254 VARS to BIND_EXPR. */
5255
5256 static void
5257 declare_inline_vars (tree block, tree vars)
5258 {
5259 tree t;
5260 for (t = vars; t; t = DECL_CHAIN (t))
5261 {
5262 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5263 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5264 add_local_decl (cfun, t);
5265 }
5266
5267 if (block)
5268 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5269 }
5270
5271 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5272 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5273 VAR_DECL translation. */
5274
5275 static tree
5276 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5277 {
5278 /* Don't generate debug information for the copy if we wouldn't have
5279 generated it for the copy either. */
5280 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5281 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5282
5283 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5284 declaration inspired this copy. */
5285 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5286
5287 /* The new variable/label has no RTL, yet. */
5288 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5289 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5290 SET_DECL_RTL (copy, 0);
5291
5292 /* These args would always appear unused, if not for this. */
5293 TREE_USED (copy) = 1;
5294
5295 /* Set the context for the new declaration. */
5296 if (!DECL_CONTEXT (decl))
5297 /* Globals stay global. */
5298 ;
5299 else if (DECL_CONTEXT (decl) != id->src_fn)
5300 /* Things that weren't in the scope of the function we're inlining
5301 from aren't in the scope we're inlining to, either. */
5302 ;
5303 else if (TREE_STATIC (decl))
5304 /* Function-scoped static variables should stay in the original
5305 function. */
5306 ;
5307 else
5308 /* Ordinary automatic local variables are now in the scope of the
5309 new function. */
5310 DECL_CONTEXT (copy) = id->dst_fn;
5311
5312 return copy;
5313 }
5314
5315 static tree
5316 copy_decl_to_var (tree decl, copy_body_data *id)
5317 {
5318 tree copy, type;
5319
5320 gcc_assert (TREE_CODE (decl) == PARM_DECL
5321 || TREE_CODE (decl) == RESULT_DECL);
5322
5323 type = TREE_TYPE (decl);
5324
5325 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5326 VAR_DECL, DECL_NAME (decl), type);
5327 if (DECL_PT_UID_SET_P (decl))
5328 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5329 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5330 TREE_READONLY (copy) = TREE_READONLY (decl);
5331 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5332 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5333
5334 return copy_decl_for_dup_finish (id, decl, copy);
5335 }
5336
5337 /* Like copy_decl_to_var, but create a return slot object instead of a
5338 pointer variable for return by invisible reference. */
5339
5340 static tree
5341 copy_result_decl_to_var (tree decl, copy_body_data *id)
5342 {
5343 tree copy, type;
5344
5345 gcc_assert (TREE_CODE (decl) == PARM_DECL
5346 || TREE_CODE (decl) == RESULT_DECL);
5347
5348 type = TREE_TYPE (decl);
5349 if (DECL_BY_REFERENCE (decl))
5350 type = TREE_TYPE (type);
5351
5352 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5353 VAR_DECL, DECL_NAME (decl), type);
5354 if (DECL_PT_UID_SET_P (decl))
5355 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5356 TREE_READONLY (copy) = TREE_READONLY (decl);
5357 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5358 if (!DECL_BY_REFERENCE (decl))
5359 {
5360 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5361 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5362 }
5363
5364 return copy_decl_for_dup_finish (id, decl, copy);
5365 }
5366
5367 tree
5368 copy_decl_no_change (tree decl, copy_body_data *id)
5369 {
5370 tree copy;
5371
5372 copy = copy_node (decl);
5373
5374 /* The COPY is not abstract; it will be generated in DST_FN. */
5375 DECL_ABSTRACT_P (copy) = false;
5376 lang_hooks.dup_lang_specific_decl (copy);
5377
5378 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5379 been taken; it's for internal bookkeeping in expand_goto_internal. */
5380 if (TREE_CODE (copy) == LABEL_DECL)
5381 {
5382 TREE_ADDRESSABLE (copy) = 0;
5383 LABEL_DECL_UID (copy) = -1;
5384 }
5385
5386 return copy_decl_for_dup_finish (id, decl, copy);
5387 }
5388
5389 static tree
5390 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5391 {
5392 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5393 return copy_decl_to_var (decl, id);
5394 else
5395 return copy_decl_no_change (decl, id);
5396 }
5397
5398 /* Return a copy of the function's argument tree. */
5399 static tree
5400 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5401 bitmap args_to_skip, tree *vars)
5402 {
5403 tree arg, *parg;
5404 tree new_parm = NULL;
5405 int i = 0;
5406
5407 parg = &new_parm;
5408
5409 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5410 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5411 {
5412 tree new_tree = remap_decl (arg, id);
5413 if (TREE_CODE (new_tree) != PARM_DECL)
5414 new_tree = id->copy_decl (arg, id);
5415 lang_hooks.dup_lang_specific_decl (new_tree);
5416 *parg = new_tree;
5417 parg = &DECL_CHAIN (new_tree);
5418 }
5419 else if (!id->decl_map->get (arg))
5420 {
5421 /* Make an equivalent VAR_DECL. If the argument was used
5422 as temporary variable later in function, the uses will be
5423 replaced by local variable. */
5424 tree var = copy_decl_to_var (arg, id);
5425 insert_decl_map (id, arg, var);
5426 /* Declare this new variable. */
5427 DECL_CHAIN (var) = *vars;
5428 *vars = var;
5429 }
5430 return new_parm;
5431 }
5432
5433 /* Return a copy of the function's static chain. */
5434 static tree
5435 copy_static_chain (tree static_chain, copy_body_data * id)
5436 {
5437 tree *chain_copy, *pvar;
5438
5439 chain_copy = &static_chain;
5440 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5441 {
5442 tree new_tree = remap_decl (*pvar, id);
5443 lang_hooks.dup_lang_specific_decl (new_tree);
5444 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5445 *pvar = new_tree;
5446 }
5447 return static_chain;
5448 }
5449
5450 /* Return true if the function is allowed to be versioned.
5451 This is a guard for the versioning functionality. */
5452
5453 bool
5454 tree_versionable_function_p (tree fndecl)
5455 {
5456 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5457 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5458 }
5459
5460 /* Delete all unreachable basic blocks and update callgraph.
5461 Doing so is somewhat nontrivial because we need to update all clones and
5462 remove inline function that become unreachable. */
5463
5464 static bool
5465 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5466 {
5467 bool changed = false;
5468 basic_block b, next_bb;
5469
5470 find_unreachable_blocks ();
5471
5472 /* Delete all unreachable basic blocks. */
5473
5474 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5475 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5476 {
5477 next_bb = b->next_bb;
5478
5479 if (!(b->flags & BB_REACHABLE))
5480 {
5481 gimple_stmt_iterator bsi;
5482
5483 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5484 {
5485 struct cgraph_edge *e;
5486 struct cgraph_node *node;
5487
5488 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5489
5490 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5491 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5492 {
5493 if (!e->inline_failed)
5494 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5495 else
5496 e->remove ();
5497 }
5498 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5499 && id->dst_node->clones)
5500 for (node = id->dst_node->clones; node != id->dst_node;)
5501 {
5502 node->remove_stmt_references (gsi_stmt (bsi));
5503 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5504 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5505 {
5506 if (!e->inline_failed)
5507 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5508 else
5509 e->remove ();
5510 }
5511
5512 if (node->clones)
5513 node = node->clones;
5514 else if (node->next_sibling_clone)
5515 node = node->next_sibling_clone;
5516 else
5517 {
5518 while (node != id->dst_node && !node->next_sibling_clone)
5519 node = node->clone_of;
5520 if (node != id->dst_node)
5521 node = node->next_sibling_clone;
5522 }
5523 }
5524 }
5525 delete_basic_block (b);
5526 changed = true;
5527 }
5528 }
5529
5530 return changed;
5531 }
5532
5533 /* Update clone info after duplication. */
5534
5535 static void
5536 update_clone_info (copy_body_data * id)
5537 {
5538 struct cgraph_node *node;
5539 if (!id->dst_node->clones)
5540 return;
5541 for (node = id->dst_node->clones; node != id->dst_node;)
5542 {
5543 /* First update replace maps to match the new body. */
5544 if (node->clone.tree_map)
5545 {
5546 unsigned int i;
5547 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5548 {
5549 struct ipa_replace_map *replace_info;
5550 replace_info = (*node->clone.tree_map)[i];
5551 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5552 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5553 }
5554 }
5555 if (node->clones)
5556 node = node->clones;
5557 else if (node->next_sibling_clone)
5558 node = node->next_sibling_clone;
5559 else
5560 {
5561 while (node != id->dst_node && !node->next_sibling_clone)
5562 node = node->clone_of;
5563 if (node != id->dst_node)
5564 node = node->next_sibling_clone;
5565 }
5566 }
5567 }
5568
5569 /* Create a copy of a function's tree.
5570 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5571 of the original function and the new copied function
5572 respectively. In case we want to replace a DECL
5573 tree with another tree while duplicating the function's
5574 body, TREE_MAP represents the mapping between these
5575 trees. If UPDATE_CLONES is set, the call_stmt fields
5576 of edges of clones of the function will be updated.
5577
5578 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5579 from new version.
5580 If SKIP_RETURN is true, the new version will return void.
5581 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5582 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5583 */
5584 void
5585 tree_function_versioning (tree old_decl, tree new_decl,
5586 vec<ipa_replace_map *, va_gc> *tree_map,
5587 bool update_clones, bitmap args_to_skip,
5588 bool skip_return, bitmap blocks_to_copy,
5589 basic_block new_entry)
5590 {
5591 struct cgraph_node *old_version_node;
5592 struct cgraph_node *new_version_node;
5593 copy_body_data id;
5594 tree p;
5595 unsigned i;
5596 struct ipa_replace_map *replace_info;
5597 basic_block old_entry_block, bb;
5598 auto_vec<gimple, 10> init_stmts;
5599 tree vars = NULL_TREE;
5600
5601 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5602 && TREE_CODE (new_decl) == FUNCTION_DECL);
5603 DECL_POSSIBLY_INLINED (old_decl) = 1;
5604
5605 old_version_node = cgraph_node::get (old_decl);
5606 gcc_checking_assert (old_version_node);
5607 new_version_node = cgraph_node::get (new_decl);
5608 gcc_checking_assert (new_version_node);
5609
5610 /* Copy over debug args. */
5611 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5612 {
5613 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5614 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5615 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5616 old_debug_args = decl_debug_args_lookup (old_decl);
5617 if (old_debug_args)
5618 {
5619 new_debug_args = decl_debug_args_insert (new_decl);
5620 *new_debug_args = vec_safe_copy (*old_debug_args);
5621 }
5622 }
5623
5624 /* Output the inlining info for this abstract function, since it has been
5625 inlined. If we don't do this now, we can lose the information about the
5626 variables in the function when the blocks get blown away as soon as we
5627 remove the cgraph node. */
5628 (*debug_hooks->outlining_inline_function) (old_decl);
5629
5630 DECL_ARTIFICIAL (new_decl) = 1;
5631 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5632 if (DECL_ORIGIN (old_decl) == old_decl)
5633 old_version_node->used_as_abstract_origin = true;
5634 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5635
5636 /* Prepare the data structures for the tree copy. */
5637 memset (&id, 0, sizeof (id));
5638
5639 /* Generate a new name for the new version. */
5640 id.statements_to_fold = new hash_set<gimple>;
5641
5642 id.decl_map = new hash_map<tree, tree>;
5643 id.debug_map = NULL;
5644 id.src_fn = old_decl;
5645 id.dst_fn = new_decl;
5646 id.src_node = old_version_node;
5647 id.dst_node = new_version_node;
5648 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5649 id.blocks_to_copy = blocks_to_copy;
5650
5651 id.copy_decl = copy_decl_no_change;
5652 id.transform_call_graph_edges
5653 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5654 id.transform_new_cfg = true;
5655 id.transform_return_to_modify = false;
5656 id.transform_parameter = false;
5657 id.transform_lang_insert_block = NULL;
5658
5659 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5660 (DECL_STRUCT_FUNCTION (old_decl));
5661 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5662 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5663 initialize_cfun (new_decl, old_decl,
5664 old_entry_block->count);
5665 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5666 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5667 = id.src_cfun->gimple_df->ipa_pta;
5668
5669 /* Copy the function's static chain. */
5670 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5671 if (p)
5672 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5673 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5674 &id);
5675
5676 /* If there's a tree_map, prepare for substitution. */
5677 if (tree_map)
5678 for (i = 0; i < tree_map->length (); i++)
5679 {
5680 gimple init;
5681 replace_info = (*tree_map)[i];
5682 if (replace_info->replace_p)
5683 {
5684 if (!replace_info->old_tree)
5685 {
5686 int i = replace_info->parm_num;
5687 tree parm;
5688 tree req_type;
5689
5690 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5691 i --;
5692 replace_info->old_tree = parm;
5693 req_type = TREE_TYPE (parm);
5694 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5695 {
5696 if (fold_convertible_p (req_type, replace_info->new_tree))
5697 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5698 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5699 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5700 else
5701 {
5702 if (dump_file)
5703 {
5704 fprintf (dump_file, " const ");
5705 print_generic_expr (dump_file, replace_info->new_tree, 0);
5706 fprintf (dump_file, " can't be converted to param ");
5707 print_generic_expr (dump_file, parm, 0);
5708 fprintf (dump_file, "\n");
5709 }
5710 replace_info->old_tree = NULL;
5711 }
5712 }
5713 }
5714 else
5715 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5716 if (replace_info->old_tree)
5717 {
5718 init = setup_one_parameter (&id, replace_info->old_tree,
5719 replace_info->new_tree, id.src_fn,
5720 NULL,
5721 &vars);
5722 if (init)
5723 init_stmts.safe_push (init);
5724 }
5725 }
5726 }
5727 /* Copy the function's arguments. */
5728 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5729 DECL_ARGUMENTS (new_decl) =
5730 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5731 args_to_skip, &vars);
5732
5733 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5734 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5735
5736 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5737
5738 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5739 /* Add local vars. */
5740 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5741
5742 if (DECL_RESULT (old_decl) == NULL_TREE)
5743 ;
5744 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5745 {
5746 DECL_RESULT (new_decl)
5747 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5748 RESULT_DECL, NULL_TREE, void_type_node);
5749 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5750 cfun->returns_struct = 0;
5751 cfun->returns_pcc_struct = 0;
5752 }
5753 else
5754 {
5755 tree old_name;
5756 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5757 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5758 if (gimple_in_ssa_p (id.src_cfun)
5759 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5760 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5761 {
5762 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5763 insert_decl_map (&id, old_name, new_name);
5764 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5765 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5766 }
5767 }
5768
5769 /* Set up the destination functions loop tree. */
5770 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5771 {
5772 cfun->curr_properties &= ~PROP_loops;
5773 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5774 cfun->curr_properties |= PROP_loops;
5775 }
5776
5777 /* Copy the Function's body. */
5778 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5779 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5780 new_entry);
5781
5782 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5783 number_blocks (new_decl);
5784
5785 /* We want to create the BB unconditionally, so that the addition of
5786 debug stmts doesn't affect BB count, which may in the end cause
5787 codegen differences. */
5788 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5789 while (init_stmts.length ())
5790 insert_init_stmt (&id, bb, init_stmts.pop ());
5791 update_clone_info (&id);
5792
5793 /* Remap the nonlocal_goto_save_area, if any. */
5794 if (cfun->nonlocal_goto_save_area)
5795 {
5796 struct walk_stmt_info wi;
5797
5798 memset (&wi, 0, sizeof (wi));
5799 wi.info = &id;
5800 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5801 }
5802
5803 /* Clean up. */
5804 delete id.decl_map;
5805 if (id.debug_map)
5806 delete id.debug_map;
5807 free_dominance_info (CDI_DOMINATORS);
5808 free_dominance_info (CDI_POST_DOMINATORS);
5809
5810 fold_marked_statements (0, id.statements_to_fold);
5811 delete id.statements_to_fold;
5812 fold_cond_expr_cond ();
5813 delete_unreachable_blocks_update_callgraph (&id);
5814 if (id.dst_node->definition)
5815 cgraph_edge::rebuild_references ();
5816 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5817 {
5818 calculate_dominance_info (CDI_DOMINATORS);
5819 fix_loop_structure (NULL);
5820 }
5821 update_ssa (TODO_update_ssa);
5822
5823 /* After partial cloning we need to rescale frequencies, so they are
5824 within proper range in the cloned function. */
5825 if (new_entry)
5826 {
5827 struct cgraph_edge *e;
5828 rebuild_frequencies ();
5829
5830 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5831 for (e = new_version_node->callees; e; e = e->next_callee)
5832 {
5833 basic_block bb = gimple_bb (e->call_stmt);
5834 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5835 bb);
5836 e->count = bb->count;
5837 }
5838 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5839 {
5840 basic_block bb = gimple_bb (e->call_stmt);
5841 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5842 bb);
5843 e->count = bb->count;
5844 }
5845 }
5846
5847 free_dominance_info (CDI_DOMINATORS);
5848 free_dominance_info (CDI_POST_DOMINATORS);
5849
5850 gcc_assert (!id.debug_stmts.exists ());
5851 pop_cfun ();
5852 return;
5853 }
5854
5855 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5856 the callee and return the inlined body on success. */
5857
5858 tree
5859 maybe_inline_call_in_expr (tree exp)
5860 {
5861 tree fn = get_callee_fndecl (exp);
5862
5863 /* We can only try to inline "const" functions. */
5864 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5865 {
5866 call_expr_arg_iterator iter;
5867 copy_body_data id;
5868 tree param, arg, t;
5869 hash_map<tree, tree> decl_map;
5870
5871 /* Remap the parameters. */
5872 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5873 param;
5874 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5875 decl_map.put (param, arg);
5876
5877 memset (&id, 0, sizeof (id));
5878 id.src_fn = fn;
5879 id.dst_fn = current_function_decl;
5880 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5881 id.decl_map = &decl_map;
5882
5883 id.copy_decl = copy_decl_no_change;
5884 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5885 id.transform_new_cfg = false;
5886 id.transform_return_to_modify = true;
5887 id.transform_parameter = true;
5888 id.transform_lang_insert_block = NULL;
5889
5890 /* Make sure not to unshare trees behind the front-end's back
5891 since front-end specific mechanisms may rely on sharing. */
5892 id.regimplify = false;
5893 id.do_not_unshare = true;
5894
5895 /* We're not inside any EH region. */
5896 id.eh_lp_nr = 0;
5897
5898 t = copy_tree_body (&id);
5899
5900 /* We can only return something suitable for use in a GENERIC
5901 expression tree. */
5902 if (TREE_CODE (t) == MODIFY_EXPR)
5903 return TREE_OPERAND (t, 1);
5904 }
5905
5906 return NULL_TREE;
5907 }
5908
5909 /* Duplicate a type, fields and all. */
5910
5911 tree
5912 build_duplicate_type (tree type)
5913 {
5914 struct copy_body_data id;
5915
5916 memset (&id, 0, sizeof (id));
5917 id.src_fn = current_function_decl;
5918 id.dst_fn = current_function_decl;
5919 id.src_cfun = cfun;
5920 id.decl_map = new hash_map<tree, tree>;
5921 id.debug_map = NULL;
5922 id.copy_decl = copy_decl_no_change;
5923
5924 type = remap_type_1 (type, &id);
5925
5926 delete id.decl_map;
5927 if (id.debug_map)
5928 delete id.debug_map;
5929
5930 TYPE_CANONICAL (type) = type;
5931
5932 return type;
5933 }
5934
5935 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
5936 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
5937 evaluation. */
5938
5939 tree
5940 copy_fn (tree fn, tree& parms, tree& result)
5941 {
5942 copy_body_data id;
5943 tree param;
5944 hash_map<tree, tree> decl_map;
5945
5946 tree *p = &parms;
5947 *p = NULL_TREE;
5948
5949 memset (&id, 0, sizeof (id));
5950 id.src_fn = fn;
5951 id.dst_fn = current_function_decl;
5952 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5953 id.decl_map = &decl_map;
5954
5955 id.copy_decl = copy_decl_no_change;
5956 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5957 id.transform_new_cfg = false;
5958 id.transform_return_to_modify = false;
5959 id.transform_parameter = true;
5960 id.transform_lang_insert_block = NULL;
5961
5962 /* Make sure not to unshare trees behind the front-end's back
5963 since front-end specific mechanisms may rely on sharing. */
5964 id.regimplify = false;
5965 id.do_not_unshare = true;
5966
5967 /* We're not inside any EH region. */
5968 id.eh_lp_nr = 0;
5969
5970 /* Remap the parameters and result and return them to the caller. */
5971 for (param = DECL_ARGUMENTS (fn);
5972 param;
5973 param = DECL_CHAIN (param))
5974 {
5975 *p = remap_decl (param, &id);
5976 p = &DECL_CHAIN (*p);
5977 }
5978
5979 if (DECL_RESULT (fn))
5980 result = remap_decl (DECL_RESULT (fn), &id);
5981 else
5982 result = NULL_TREE;
5983
5984 return copy_tree_body (&id);
5985 }