tree-inline.c (remap_location): New function extracted from...
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63
64 /* I'm not real happy about this, but we need to handle gimple and
65 non-gimple trees. */
66
67 /* Inlining, Cloning, Versioning, Parallelization
68
69 Inlining: a function body is duplicated, but the PARM_DECLs are
70 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
71 MODIFY_EXPRs that store to a dedicated returned-value variable.
72 The duplicated eh_region info of the copy will later be appended
73 to the info for the caller; the eh_region info in copied throwing
74 statements and RESX statements are adjusted accordingly.
75
76 Cloning: (only in C++) We have one body for a con/de/structor, and
77 multiple function decls, each with a unique parameter list.
78 Duplicate the body, using the given splay tree; some parameters
79 will become constants (like 0 or 1).
80
81 Versioning: a function body is duplicated and the result is a new
82 function rather than into blocks of an existing function as with
83 inlining. Some parameters will become constants.
84
85 Parallelization: a region of a function is duplicated resulting in
86 a new function. Variables may be replaced with complex expressions
87 to enable shared variable semantics.
88
89 All of these will simultaneously lookup any callgraph edges. If
90 we're going to inline the duplicated function body, and the given
91 function has some cloned callgraph nodes (one for each place this
92 function will be inlined) those callgraph edges will be duplicated.
93 If we're cloning the body, those callgraph edges will be
94 updated to point into the new body. (Note that the original
95 callgraph node and edge list will not be altered.)
96
97 See the CALL_EXPR handling case in copy_tree_body_r (). */
98
99 /* To Do:
100
101 o In order to make inlining-on-trees work, we pessimized
102 function-local static constants. In particular, they are now
103 always output, even when not addressed. Fix this by treating
104 function-local static constants just like global static
105 constants; the back-end already knows not to output them if they
106 are not needed.
107
108 o Provide heuristics to clamp inlining of recursive template
109 calls? */
110
111
112 /* Weights that estimate_num_insns uses to estimate the size of the
113 produced code. */
114
115 eni_weights eni_size_weights;
116
117 /* Weights that estimate_num_insns uses to estimate the time necessary
118 to execute the produced code. */
119
120 eni_weights eni_time_weights;
121
122 /* Prototypes. */
123
124 static tree declare_return_variable (copy_body_data *, tree, tree,
125 basic_block);
126 static void remap_block (tree *, copy_body_data *);
127 static void copy_bind_expr (tree *, int *, copy_body_data *);
128 static void declare_inline_vars (tree, tree);
129 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
130 static void prepend_lexical_block (tree current_block, tree new_block);
131 static tree copy_decl_to_var (tree, copy_body_data *);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
140
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
143 {
144 id->decl_map->put (key, value);
145
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
150 }
151
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
154
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 {
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
160
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
163
164 if (!target_for_debug_bind (key))
165 return;
166
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
169
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
172
173 id->debug_map->put (key, value);
174 }
175
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
181
182 /* Construct new SSA name for old NAME. ID is the inline context. */
183
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
186 {
187 tree new_tree, var;
188 tree *n;
189
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
191
192 n = id->decl_map->get (name);
193 if (n)
194 return unshare_expr (*n);
195
196 if (processing_debug_stmt)
197 {
198 if (SSA_NAME_IS_DEFAULT_DEF (name)
199 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 && id->entry_bb == NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
202 {
203 tree vexpr = make_node (DEBUG_EXPR_DECL);
204 gimple *def_temp;
205 gimple_stmt_iterator gsi;
206 tree val = SSA_NAME_VAR (name);
207
208 n = id->decl_map->get (val);
209 if (n != NULL)
210 val = *n;
211 if (TREE_CODE (val) != PARM_DECL)
212 {
213 processing_debug_stmt = -1;
214 return name;
215 }
216 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
217 DECL_ARTIFICIAL (vexpr) = 1;
218 TREE_TYPE (vexpr) = TREE_TYPE (name);
219 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
220 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
221 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
222 return vexpr;
223 }
224
225 processing_debug_stmt = -1;
226 return name;
227 }
228
229 /* Remap anonymous SSA names or SSA names of anonymous decls. */
230 var = SSA_NAME_VAR (name);
231 if (!var
232 || (!SSA_NAME_IS_DEFAULT_DEF (name)
233 && VAR_P (var)
234 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
235 && DECL_ARTIFICIAL (var)
236 && DECL_IGNORED_P (var)
237 && !DECL_NAME (var)))
238 {
239 struct ptr_info_def *pi;
240 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
241 if (!var && SSA_NAME_IDENTIFIER (name))
242 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
243 insert_decl_map (id, name, new_tree);
244 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
245 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
246 /* At least IPA points-to info can be directly transferred. */
247 if (id->src_cfun->gimple_df
248 && id->src_cfun->gimple_df->ipa_pta
249 && POINTER_TYPE_P (TREE_TYPE (name))
250 && (pi = SSA_NAME_PTR_INFO (name))
251 && !pi->pt.anything)
252 {
253 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
254 new_pi->pt = pi->pt;
255 }
256 return new_tree;
257 }
258
259 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
260 in copy_bb. */
261 new_tree = remap_decl (var, id);
262
263 /* We might've substituted constant or another SSA_NAME for
264 the variable.
265
266 Replace the SSA name representing RESULT_DECL by variable during
267 inlining: this saves us from need to introduce PHI node in a case
268 return value is just partly initialized. */
269 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
270 && (!SSA_NAME_VAR (name)
271 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
272 || !id->transform_return_to_modify))
273 {
274 struct ptr_info_def *pi;
275 new_tree = make_ssa_name (new_tree);
276 insert_decl_map (id, name, new_tree);
277 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
278 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
279 /* At least IPA points-to info can be directly transferred. */
280 if (id->src_cfun->gimple_df
281 && id->src_cfun->gimple_df->ipa_pta
282 && POINTER_TYPE_P (TREE_TYPE (name))
283 && (pi = SSA_NAME_PTR_INFO (name))
284 && !pi->pt.anything)
285 {
286 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
287 new_pi->pt = pi->pt;
288 }
289 if (SSA_NAME_IS_DEFAULT_DEF (name))
290 {
291 /* By inlining function having uninitialized variable, we might
292 extend the lifetime (variable might get reused). This cause
293 ICE in the case we end up extending lifetime of SSA name across
294 abnormal edge, but also increase register pressure.
295
296 We simply initialize all uninitialized vars by 0 except
297 for case we are inlining to very first BB. We can avoid
298 this for all BBs that are not inside strongly connected
299 regions of the CFG, but this is expensive to test. */
300 if (id->entry_bb
301 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
302 && (!SSA_NAME_VAR (name)
303 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
304 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
305 0)->dest
306 || EDGE_COUNT (id->entry_bb->preds) != 1))
307 {
308 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
309 gimple *init_stmt;
310 tree zero = build_zero_cst (TREE_TYPE (new_tree));
311
312 init_stmt = gimple_build_assign (new_tree, zero);
313 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
314 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
315 }
316 else
317 {
318 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
319 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
320 }
321 }
322 }
323 else
324 insert_decl_map (id, name, new_tree);
325 return new_tree;
326 }
327
328 /* Remap DECL during the copying of the BLOCK tree for the function. */
329
330 tree
331 remap_decl (tree decl, copy_body_data *id)
332 {
333 tree *n;
334
335 /* We only remap local variables in the current function. */
336
337 /* See if we have remapped this declaration. */
338
339 n = id->decl_map->get (decl);
340
341 if (!n && processing_debug_stmt)
342 {
343 processing_debug_stmt = -1;
344 return decl;
345 }
346
347 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
348 necessary DECLs have already been remapped and we do not want to duplicate
349 a decl coming from outside of the sequence we are copying. */
350 if (!n
351 && id->prevent_decl_creation_for_types
352 && id->remapping_type_depth > 0
353 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
354 return decl;
355
356 /* If we didn't already have an equivalent for this declaration, create one
357 now. */
358 if (!n)
359 {
360 /* Make a copy of the variable or label. */
361 tree t = id->copy_decl (decl, id);
362
363 /* Remember it, so that if we encounter this local entity again
364 we can reuse this copy. Do this early because remap_type may
365 need this decl for TYPE_STUB_DECL. */
366 insert_decl_map (id, decl, t);
367
368 if (!DECL_P (t))
369 return t;
370
371 /* Remap types, if necessary. */
372 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
373 if (TREE_CODE (t) == TYPE_DECL)
374 {
375 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
376
377 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
378 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
379 is not set on the TYPE_DECL, for example in LTO mode. */
380 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
381 {
382 tree x = build_variant_type_copy (TREE_TYPE (t));
383 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
384 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
385 DECL_ORIGINAL_TYPE (t) = x;
386 }
387 }
388
389 /* Remap sizes as necessary. */
390 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
391 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
392
393 /* If fields, do likewise for offset and qualifier. */
394 if (TREE_CODE (t) == FIELD_DECL)
395 {
396 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
397 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
398 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
399 }
400
401 return t;
402 }
403
404 if (id->do_not_unshare)
405 return *n;
406 else
407 return unshare_expr (*n);
408 }
409
410 static tree
411 remap_type_1 (tree type, copy_body_data *id)
412 {
413 tree new_tree, t;
414
415 /* We do need a copy. build and register it now. If this is a pointer or
416 reference type, remap the designated type and make a new pointer or
417 reference type. */
418 if (TREE_CODE (type) == POINTER_TYPE)
419 {
420 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
421 TYPE_MODE (type),
422 TYPE_REF_CAN_ALIAS_ALL (type));
423 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
424 new_tree = build_type_attribute_qual_variant (new_tree,
425 TYPE_ATTRIBUTES (type),
426 TYPE_QUALS (type));
427 insert_decl_map (id, type, new_tree);
428 return new_tree;
429 }
430 else if (TREE_CODE (type) == REFERENCE_TYPE)
431 {
432 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
433 TYPE_MODE (type),
434 TYPE_REF_CAN_ALIAS_ALL (type));
435 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
436 new_tree = build_type_attribute_qual_variant (new_tree,
437 TYPE_ATTRIBUTES (type),
438 TYPE_QUALS (type));
439 insert_decl_map (id, type, new_tree);
440 return new_tree;
441 }
442 else
443 new_tree = copy_node (type);
444
445 insert_decl_map (id, type, new_tree);
446
447 /* This is a new type, not a copy of an old type. Need to reassociate
448 variants. We can handle everything except the main variant lazily. */
449 t = TYPE_MAIN_VARIANT (type);
450 if (type != t)
451 {
452 t = remap_type (t, id);
453 TYPE_MAIN_VARIANT (new_tree) = t;
454 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
455 TYPE_NEXT_VARIANT (t) = new_tree;
456 }
457 else
458 {
459 TYPE_MAIN_VARIANT (new_tree) = new_tree;
460 TYPE_NEXT_VARIANT (new_tree) = NULL;
461 }
462
463 if (TYPE_STUB_DECL (type))
464 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
465
466 /* Lazily create pointer and reference types. */
467 TYPE_POINTER_TO (new_tree) = NULL;
468 TYPE_REFERENCE_TO (new_tree) = NULL;
469
470 /* Copy all types that may contain references to local variables; be sure to
471 preserve sharing in between type and its main variant when possible. */
472 switch (TREE_CODE (new_tree))
473 {
474 case INTEGER_TYPE:
475 case REAL_TYPE:
476 case FIXED_POINT_TYPE:
477 case ENUMERAL_TYPE:
478 case BOOLEAN_TYPE:
479 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
480 {
481 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
482 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
483
484 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
485 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
486 }
487 else
488 {
489 t = TYPE_MIN_VALUE (new_tree);
490 if (t && TREE_CODE (t) != INTEGER_CST)
491 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
492
493 t = TYPE_MAX_VALUE (new_tree);
494 if (t && TREE_CODE (t) != INTEGER_CST)
495 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
496 }
497 return new_tree;
498
499 case FUNCTION_TYPE:
500 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
501 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
502 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
503 else
504 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
505 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
506 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
507 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
508 else
509 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
510 return new_tree;
511
512 case ARRAY_TYPE:
513 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
514 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
515 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
516 else
517 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
518
519 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
520 {
521 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
522 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
523 }
524 else
525 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
526 break;
527
528 case RECORD_TYPE:
529 case UNION_TYPE:
530 case QUAL_UNION_TYPE:
531 if (TYPE_MAIN_VARIANT (type) != type
532 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
533 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
534 else
535 {
536 tree f, nf = NULL;
537
538 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
539 {
540 t = remap_decl (f, id);
541 DECL_CONTEXT (t) = new_tree;
542 DECL_CHAIN (t) = nf;
543 nf = t;
544 }
545 TYPE_FIELDS (new_tree) = nreverse (nf);
546 }
547 break;
548
549 case OFFSET_TYPE:
550 default:
551 /* Shouldn't have been thought variable sized. */
552 gcc_unreachable ();
553 }
554
555 /* All variants of type share the same size, so use the already remaped data. */
556 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
557 {
558 tree s = TYPE_SIZE (type);
559 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
560 tree su = TYPE_SIZE_UNIT (type);
561 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
562 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
563 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
564 || s == mvs);
565 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
566 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
567 || su == mvsu);
568 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
569 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
570 }
571 else
572 {
573 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
574 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
575 }
576
577 return new_tree;
578 }
579
580 tree
581 remap_type (tree type, copy_body_data *id)
582 {
583 tree *node;
584 tree tmp;
585
586 if (type == NULL)
587 return type;
588
589 /* See if we have remapped this type. */
590 node = id->decl_map->get (type);
591 if (node)
592 return *node;
593
594 /* The type only needs remapping if it's variably modified. */
595 if (! variably_modified_type_p (type, id->src_fn))
596 {
597 insert_decl_map (id, type, type);
598 return type;
599 }
600
601 id->remapping_type_depth++;
602 tmp = remap_type_1 (type, id);
603 id->remapping_type_depth--;
604
605 return tmp;
606 }
607
608 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
609
610 static bool
611 can_be_nonlocal (tree decl, copy_body_data *id)
612 {
613 /* We can not duplicate function decls. */
614 if (TREE_CODE (decl) == FUNCTION_DECL)
615 return true;
616
617 /* Local static vars must be non-local or we get multiple declaration
618 problems. */
619 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
620 return true;
621
622 return false;
623 }
624
625 static tree
626 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
627 copy_body_data *id)
628 {
629 tree old_var;
630 tree new_decls = NULL_TREE;
631
632 /* Remap its variables. */
633 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
634 {
635 tree new_var;
636
637 if (can_be_nonlocal (old_var, id))
638 {
639 /* We need to add this variable to the local decls as otherwise
640 nothing else will do so. */
641 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
642 add_local_decl (cfun, old_var);
643 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
644 && !DECL_IGNORED_P (old_var)
645 && nonlocalized_list)
646 vec_safe_push (*nonlocalized_list, old_var);
647 continue;
648 }
649
650 /* Remap the variable. */
651 new_var = remap_decl (old_var, id);
652
653 /* If we didn't remap this variable, we can't mess with its
654 TREE_CHAIN. If we remapped this variable to the return slot, it's
655 already declared somewhere else, so don't declare it here. */
656
657 if (new_var == id->retvar)
658 ;
659 else if (!new_var)
660 {
661 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
662 && !DECL_IGNORED_P (old_var)
663 && nonlocalized_list)
664 vec_safe_push (*nonlocalized_list, old_var);
665 }
666 else
667 {
668 gcc_assert (DECL_P (new_var));
669 DECL_CHAIN (new_var) = new_decls;
670 new_decls = new_var;
671
672 /* Also copy value-expressions. */
673 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
674 {
675 tree tem = DECL_VALUE_EXPR (new_var);
676 bool old_regimplify = id->regimplify;
677 id->remapping_type_depth++;
678 walk_tree (&tem, copy_tree_body_r, id, NULL);
679 id->remapping_type_depth--;
680 id->regimplify = old_regimplify;
681 SET_DECL_VALUE_EXPR (new_var, tem);
682 }
683 }
684 }
685
686 return nreverse (new_decls);
687 }
688
689 /* Copy the BLOCK to contain remapped versions of the variables
690 therein. And hook the new block into the block-tree. */
691
692 static void
693 remap_block (tree *block, copy_body_data *id)
694 {
695 tree old_block;
696 tree new_block;
697
698 /* Make the new block. */
699 old_block = *block;
700 new_block = make_node (BLOCK);
701 TREE_USED (new_block) = TREE_USED (old_block);
702 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
703 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
704 BLOCK_NONLOCALIZED_VARS (new_block)
705 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
706 *block = new_block;
707
708 /* Remap its variables. */
709 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
710 &BLOCK_NONLOCALIZED_VARS (new_block),
711 id);
712
713 if (id->transform_lang_insert_block)
714 id->transform_lang_insert_block (new_block);
715
716 /* Remember the remapped block. */
717 insert_decl_map (id, old_block, new_block);
718 }
719
720 /* Copy the whole block tree and root it in id->block. */
721
722 static tree
723 remap_blocks (tree block, copy_body_data *id)
724 {
725 tree t;
726 tree new_tree = block;
727
728 if (!block)
729 return NULL;
730
731 remap_block (&new_tree, id);
732 gcc_assert (new_tree != block);
733 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
734 prepend_lexical_block (new_tree, remap_blocks (t, id));
735 /* Blocks are in arbitrary order, but make things slightly prettier and do
736 not swap order when producing a copy. */
737 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
738 return new_tree;
739 }
740
741 /* Remap the block tree rooted at BLOCK to nothing. */
742
743 static void
744 remap_blocks_to_null (tree block, copy_body_data *id)
745 {
746 tree t;
747 insert_decl_map (id, block, NULL_TREE);
748 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
749 remap_blocks_to_null (t, id);
750 }
751
752 /* Remap the location info pointed to by LOCUS. */
753
754 static location_t
755 remap_location (location_t locus, copy_body_data *id)
756 {
757 if (LOCATION_BLOCK (locus))
758 {
759 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
760 gcc_assert (n);
761 if (*n)
762 return set_block (locus, *n);
763 }
764
765 locus = LOCATION_LOCUS (locus);
766
767 if (locus != UNKNOWN_LOCATION && id->block)
768 return set_block (locus, id->block);
769
770 return locus;
771 }
772
773 static void
774 copy_statement_list (tree *tp)
775 {
776 tree_stmt_iterator oi, ni;
777 tree new_tree;
778
779 new_tree = alloc_stmt_list ();
780 ni = tsi_start (new_tree);
781 oi = tsi_start (*tp);
782 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
783 *tp = new_tree;
784
785 for (; !tsi_end_p (oi); tsi_next (&oi))
786 {
787 tree stmt = tsi_stmt (oi);
788 if (TREE_CODE (stmt) == STATEMENT_LIST)
789 /* This copy is not redundant; tsi_link_after will smash this
790 STATEMENT_LIST into the end of the one we're building, and we
791 don't want to do that with the original. */
792 copy_statement_list (&stmt);
793 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
794 }
795 }
796
797 static void
798 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
799 {
800 tree block = BIND_EXPR_BLOCK (*tp);
801 /* Copy (and replace) the statement. */
802 copy_tree_r (tp, walk_subtrees, NULL);
803 if (block)
804 {
805 remap_block (&block, id);
806 BIND_EXPR_BLOCK (*tp) = block;
807 }
808
809 if (BIND_EXPR_VARS (*tp))
810 /* This will remap a lot of the same decls again, but this should be
811 harmless. */
812 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
813 }
814
815
816 /* Create a new gimple_seq by remapping all the statements in BODY
817 using the inlining information in ID. */
818
819 static gimple_seq
820 remap_gimple_seq (gimple_seq body, copy_body_data *id)
821 {
822 gimple_stmt_iterator si;
823 gimple_seq new_body = NULL;
824
825 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
826 {
827 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
828 gimple_seq_add_seq (&new_body, new_stmts);
829 }
830
831 return new_body;
832 }
833
834
835 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
836 block using the mapping information in ID. */
837
838 static gimple *
839 copy_gimple_bind (gbind *stmt, copy_body_data *id)
840 {
841 gimple *new_bind;
842 tree new_block, new_vars;
843 gimple_seq body, new_body;
844
845 /* Copy the statement. Note that we purposely don't use copy_stmt
846 here because we need to remap statements as we copy. */
847 body = gimple_bind_body (stmt);
848 new_body = remap_gimple_seq (body, id);
849
850 new_block = gimple_bind_block (stmt);
851 if (new_block)
852 remap_block (&new_block, id);
853
854 /* This will remap a lot of the same decls again, but this should be
855 harmless. */
856 new_vars = gimple_bind_vars (stmt);
857 if (new_vars)
858 new_vars = remap_decls (new_vars, NULL, id);
859
860 new_bind = gimple_build_bind (new_vars, new_body, new_block);
861
862 return new_bind;
863 }
864
865 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
866
867 static bool
868 is_parm (tree decl)
869 {
870 if (TREE_CODE (decl) == SSA_NAME)
871 {
872 decl = SSA_NAME_VAR (decl);
873 if (!decl)
874 return false;
875 }
876
877 return (TREE_CODE (decl) == PARM_DECL);
878 }
879
880 /* Remap the dependence CLIQUE from the source to the destination function
881 as specified in ID. */
882
883 static unsigned short
884 remap_dependence_clique (copy_body_data *id, unsigned short clique)
885 {
886 if (clique == 0 || processing_debug_stmt)
887 return 0;
888 if (!id->dependence_map)
889 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
890 bool existed;
891 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
892 if (!existed)
893 newc = ++cfun->last_clique;
894 return newc;
895 }
896
897 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
898 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
899 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
900 recursing into the children nodes of *TP. */
901
902 static tree
903 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
904 {
905 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
906 copy_body_data *id = (copy_body_data *) wi_p->info;
907 tree fn = id->src_fn;
908
909 /* For recursive invocations this is no longer the LHS itself. */
910 bool is_lhs = wi_p->is_lhs;
911 wi_p->is_lhs = false;
912
913 if (TREE_CODE (*tp) == SSA_NAME)
914 {
915 *tp = remap_ssa_name (*tp, id);
916 *walk_subtrees = 0;
917 if (is_lhs)
918 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
919 return NULL;
920 }
921 else if (auto_var_in_fn_p (*tp, fn))
922 {
923 /* Local variables and labels need to be replaced by equivalent
924 variables. We don't want to copy static variables; there's
925 only one of those, no matter how many times we inline the
926 containing function. Similarly for globals from an outer
927 function. */
928 tree new_decl;
929
930 /* Remap the declaration. */
931 new_decl = remap_decl (*tp, id);
932 gcc_assert (new_decl);
933 /* Replace this variable with the copy. */
934 STRIP_TYPE_NOPS (new_decl);
935 /* ??? The C++ frontend uses void * pointer zero to initialize
936 any other type. This confuses the middle-end type verification.
937 As cloned bodies do not go through gimplification again the fixup
938 there doesn't trigger. */
939 if (TREE_CODE (new_decl) == INTEGER_CST
940 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
941 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
942 *tp = new_decl;
943 *walk_subtrees = 0;
944 }
945 else if (TREE_CODE (*tp) == STATEMENT_LIST)
946 gcc_unreachable ();
947 else if (TREE_CODE (*tp) == SAVE_EXPR)
948 gcc_unreachable ();
949 else if (TREE_CODE (*tp) == LABEL_DECL
950 && (!DECL_CONTEXT (*tp)
951 || decl_function_context (*tp) == id->src_fn))
952 /* These may need to be remapped for EH handling. */
953 *tp = remap_decl (*tp, id);
954 else if (TREE_CODE (*tp) == FIELD_DECL)
955 {
956 /* If the enclosing record type is variably_modified_type_p, the field
957 has already been remapped. Otherwise, it need not be. */
958 tree *n = id->decl_map->get (*tp);
959 if (n)
960 *tp = *n;
961 *walk_subtrees = 0;
962 }
963 else if (TYPE_P (*tp))
964 /* Types may need remapping as well. */
965 *tp = remap_type (*tp, id);
966 else if (CONSTANT_CLASS_P (*tp))
967 {
968 /* If this is a constant, we have to copy the node iff the type
969 will be remapped. copy_tree_r will not copy a constant. */
970 tree new_type = remap_type (TREE_TYPE (*tp), id);
971
972 if (new_type == TREE_TYPE (*tp))
973 *walk_subtrees = 0;
974
975 else if (TREE_CODE (*tp) == INTEGER_CST)
976 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
977 else
978 {
979 *tp = copy_node (*tp);
980 TREE_TYPE (*tp) = new_type;
981 }
982 }
983 else
984 {
985 /* Otherwise, just copy the node. Note that copy_tree_r already
986 knows not to copy VAR_DECLs, etc., so this is safe. */
987
988 if (TREE_CODE (*tp) == MEM_REF)
989 {
990 /* We need to re-canonicalize MEM_REFs from inline substitutions
991 that can happen when a pointer argument is an ADDR_EXPR.
992 Recurse here manually to allow that. */
993 tree ptr = TREE_OPERAND (*tp, 0);
994 tree type = remap_type (TREE_TYPE (*tp), id);
995 tree old = *tp;
996 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
997 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
998 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
999 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1000 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1001 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1002 {
1003 MR_DEPENDENCE_CLIQUE (*tp)
1004 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1005 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1006 }
1007 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1008 remapped a parameter as the property might be valid only
1009 for the parameter itself. */
1010 if (TREE_THIS_NOTRAP (old)
1011 && (!is_parm (TREE_OPERAND (old, 0))
1012 || (!id->transform_parameter && is_parm (ptr))))
1013 TREE_THIS_NOTRAP (*tp) = 1;
1014 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1015 *walk_subtrees = 0;
1016 return NULL;
1017 }
1018
1019 /* Here is the "usual case". Copy this tree node, and then
1020 tweak some special cases. */
1021 copy_tree_r (tp, walk_subtrees, NULL);
1022
1023 if (TREE_CODE (*tp) != OMP_CLAUSE)
1024 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1025
1026 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1027 {
1028 /* The copied TARGET_EXPR has never been expanded, even if the
1029 original node was expanded already. */
1030 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1031 TREE_OPERAND (*tp, 3) = NULL_TREE;
1032 }
1033 else if (TREE_CODE (*tp) == ADDR_EXPR)
1034 {
1035 /* Variable substitution need not be simple. In particular,
1036 the MEM_REF substitution above. Make sure that
1037 TREE_CONSTANT and friends are up-to-date. */
1038 int invariant = is_gimple_min_invariant (*tp);
1039 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1040 recompute_tree_invariant_for_addr_expr (*tp);
1041
1042 /* If this used to be invariant, but is not any longer,
1043 then regimplification is probably needed. */
1044 if (invariant && !is_gimple_min_invariant (*tp))
1045 id->regimplify = true;
1046
1047 *walk_subtrees = 0;
1048 }
1049 }
1050
1051 /* Update the TREE_BLOCK for the cloned expr. */
1052 if (EXPR_P (*tp))
1053 {
1054 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1055 tree old_block = TREE_BLOCK (*tp);
1056 if (old_block)
1057 {
1058 tree *n;
1059 n = id->decl_map->get (TREE_BLOCK (*tp));
1060 if (n)
1061 new_block = *n;
1062 }
1063 TREE_SET_BLOCK (*tp, new_block);
1064 }
1065
1066 /* Keep iterating. */
1067 return NULL_TREE;
1068 }
1069
1070
1071 /* Called from copy_body_id via walk_tree. DATA is really a
1072 `copy_body_data *'. */
1073
1074 tree
1075 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1076 {
1077 copy_body_data *id = (copy_body_data *) data;
1078 tree fn = id->src_fn;
1079 tree new_block;
1080
1081 /* Begin by recognizing trees that we'll completely rewrite for the
1082 inlining context. Our output for these trees is completely
1083 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1084 into an edge). Further down, we'll handle trees that get
1085 duplicated and/or tweaked. */
1086
1087 /* When requested, RETURN_EXPRs should be transformed to just the
1088 contained MODIFY_EXPR. The branch semantics of the return will
1089 be handled elsewhere by manipulating the CFG rather than a statement. */
1090 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1091 {
1092 tree assignment = TREE_OPERAND (*tp, 0);
1093
1094 /* If we're returning something, just turn that into an
1095 assignment into the equivalent of the original RESULT_DECL.
1096 If the "assignment" is just the result decl, the result
1097 decl has already been set (e.g. a recent "foo (&result_decl,
1098 ...)"); just toss the entire RETURN_EXPR. */
1099 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1100 {
1101 /* Replace the RETURN_EXPR with (a copy of) the
1102 MODIFY_EXPR hanging underneath. */
1103 *tp = copy_node (assignment);
1104 }
1105 else /* Else the RETURN_EXPR returns no value. */
1106 {
1107 *tp = NULL;
1108 return (tree) (void *)1;
1109 }
1110 }
1111 else if (TREE_CODE (*tp) == SSA_NAME)
1112 {
1113 *tp = remap_ssa_name (*tp, id);
1114 *walk_subtrees = 0;
1115 return NULL;
1116 }
1117
1118 /* Local variables and labels need to be replaced by equivalent
1119 variables. We don't want to copy static variables; there's only
1120 one of those, no matter how many times we inline the containing
1121 function. Similarly for globals from an outer function. */
1122 else if (auto_var_in_fn_p (*tp, fn))
1123 {
1124 tree new_decl;
1125
1126 /* Remap the declaration. */
1127 new_decl = remap_decl (*tp, id);
1128 gcc_assert (new_decl);
1129 /* Replace this variable with the copy. */
1130 STRIP_TYPE_NOPS (new_decl);
1131 *tp = new_decl;
1132 *walk_subtrees = 0;
1133 }
1134 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1135 copy_statement_list (tp);
1136 else if (TREE_CODE (*tp) == SAVE_EXPR
1137 || TREE_CODE (*tp) == TARGET_EXPR)
1138 remap_save_expr (tp, id->decl_map, walk_subtrees);
1139 else if (TREE_CODE (*tp) == LABEL_DECL
1140 && (! DECL_CONTEXT (*tp)
1141 || decl_function_context (*tp) == id->src_fn))
1142 /* These may need to be remapped for EH handling. */
1143 *tp = remap_decl (*tp, id);
1144 else if (TREE_CODE (*tp) == BIND_EXPR)
1145 copy_bind_expr (tp, walk_subtrees, id);
1146 /* Types may need remapping as well. */
1147 else if (TYPE_P (*tp))
1148 *tp = remap_type (*tp, id);
1149
1150 /* If this is a constant, we have to copy the node iff the type will be
1151 remapped. copy_tree_r will not copy a constant. */
1152 else if (CONSTANT_CLASS_P (*tp))
1153 {
1154 tree new_type = remap_type (TREE_TYPE (*tp), id);
1155
1156 if (new_type == TREE_TYPE (*tp))
1157 *walk_subtrees = 0;
1158
1159 else if (TREE_CODE (*tp) == INTEGER_CST)
1160 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1161 else
1162 {
1163 *tp = copy_node (*tp);
1164 TREE_TYPE (*tp) = new_type;
1165 }
1166 }
1167
1168 /* Otherwise, just copy the node. Note that copy_tree_r already
1169 knows not to copy VAR_DECLs, etc., so this is safe. */
1170 else
1171 {
1172 /* Here we handle trees that are not completely rewritten.
1173 First we detect some inlining-induced bogosities for
1174 discarding. */
1175 if (TREE_CODE (*tp) == MODIFY_EXPR
1176 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1177 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1178 {
1179 /* Some assignments VAR = VAR; don't generate any rtl code
1180 and thus don't count as variable modification. Avoid
1181 keeping bogosities like 0 = 0. */
1182 tree decl = TREE_OPERAND (*tp, 0), value;
1183 tree *n;
1184
1185 n = id->decl_map->get (decl);
1186 if (n)
1187 {
1188 value = *n;
1189 STRIP_TYPE_NOPS (value);
1190 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1191 {
1192 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1193 return copy_tree_body_r (tp, walk_subtrees, data);
1194 }
1195 }
1196 }
1197 else if (TREE_CODE (*tp) == INDIRECT_REF)
1198 {
1199 /* Get rid of *& from inline substitutions that can happen when a
1200 pointer argument is an ADDR_EXPR. */
1201 tree decl = TREE_OPERAND (*tp, 0);
1202 tree *n = id->decl_map->get (decl);
1203 if (n)
1204 {
1205 /* If we happen to get an ADDR_EXPR in n->value, strip
1206 it manually here as we'll eventually get ADDR_EXPRs
1207 which lie about their types pointed to. In this case
1208 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1209 but we absolutely rely on that. As fold_indirect_ref
1210 does other useful transformations, try that first, though. */
1211 tree type = TREE_TYPE (*tp);
1212 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1213 tree old = *tp;
1214 *tp = gimple_fold_indirect_ref (ptr);
1215 if (! *tp)
1216 {
1217 type = remap_type (type, id);
1218 if (TREE_CODE (ptr) == ADDR_EXPR)
1219 {
1220 *tp
1221 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1222 /* ??? We should either assert here or build
1223 a VIEW_CONVERT_EXPR instead of blindly leaking
1224 incompatible types to our IL. */
1225 if (! *tp)
1226 *tp = TREE_OPERAND (ptr, 0);
1227 }
1228 else
1229 {
1230 *tp = build1 (INDIRECT_REF, type, ptr);
1231 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1232 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1233 TREE_READONLY (*tp) = TREE_READONLY (old);
1234 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1235 have remapped a parameter as the property might be
1236 valid only for the parameter itself. */
1237 if (TREE_THIS_NOTRAP (old)
1238 && (!is_parm (TREE_OPERAND (old, 0))
1239 || (!id->transform_parameter && is_parm (ptr))))
1240 TREE_THIS_NOTRAP (*tp) = 1;
1241 }
1242 }
1243 *walk_subtrees = 0;
1244 return NULL;
1245 }
1246 }
1247 else if (TREE_CODE (*tp) == MEM_REF)
1248 {
1249 /* We need to re-canonicalize MEM_REFs from inline substitutions
1250 that can happen when a pointer argument is an ADDR_EXPR.
1251 Recurse here manually to allow that. */
1252 tree ptr = TREE_OPERAND (*tp, 0);
1253 tree type = remap_type (TREE_TYPE (*tp), id);
1254 tree old = *tp;
1255 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1256 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1257 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1258 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1259 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1260 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1261 {
1262 MR_DEPENDENCE_CLIQUE (*tp)
1263 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1264 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1265 }
1266 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1267 remapped a parameter as the property might be valid only
1268 for the parameter itself. */
1269 if (TREE_THIS_NOTRAP (old)
1270 && (!is_parm (TREE_OPERAND (old, 0))
1271 || (!id->transform_parameter && is_parm (ptr))))
1272 TREE_THIS_NOTRAP (*tp) = 1;
1273 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1274 *walk_subtrees = 0;
1275 return NULL;
1276 }
1277
1278 /* Here is the "usual case". Copy this tree node, and then
1279 tweak some special cases. */
1280 copy_tree_r (tp, walk_subtrees, NULL);
1281
1282 /* If EXPR has block defined, map it to newly constructed block.
1283 When inlining we want EXPRs without block appear in the block
1284 of function call if we are not remapping a type. */
1285 if (EXPR_P (*tp))
1286 {
1287 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1288 if (TREE_BLOCK (*tp))
1289 {
1290 tree *n;
1291 n = id->decl_map->get (TREE_BLOCK (*tp));
1292 if (n)
1293 new_block = *n;
1294 }
1295 TREE_SET_BLOCK (*tp, new_block);
1296 }
1297
1298 if (TREE_CODE (*tp) != OMP_CLAUSE)
1299 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1300
1301 /* The copied TARGET_EXPR has never been expanded, even if the
1302 original node was expanded already. */
1303 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1304 {
1305 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1306 TREE_OPERAND (*tp, 3) = NULL_TREE;
1307 }
1308
1309 /* Variable substitution need not be simple. In particular, the
1310 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1311 and friends are up-to-date. */
1312 else if (TREE_CODE (*tp) == ADDR_EXPR)
1313 {
1314 int invariant = is_gimple_min_invariant (*tp);
1315 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1316
1317 /* Handle the case where we substituted an INDIRECT_REF
1318 into the operand of the ADDR_EXPR. */
1319 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1320 {
1321 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1322 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1323 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1324 *tp = t;
1325 }
1326 else
1327 recompute_tree_invariant_for_addr_expr (*tp);
1328
1329 /* If this used to be invariant, but is not any longer,
1330 then regimplification is probably needed. */
1331 if (invariant && !is_gimple_min_invariant (*tp))
1332 id->regimplify = true;
1333
1334 *walk_subtrees = 0;
1335 }
1336 }
1337
1338 /* Keep iterating. */
1339 return NULL_TREE;
1340 }
1341
1342 /* Helper for remap_gimple_stmt. Given an EH region number for the
1343 source function, map that to the duplicate EH region number in
1344 the destination function. */
1345
1346 static int
1347 remap_eh_region_nr (int old_nr, copy_body_data *id)
1348 {
1349 eh_region old_r, new_r;
1350
1351 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1352 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1353
1354 return new_r->index;
1355 }
1356
1357 /* Similar, but operate on INTEGER_CSTs. */
1358
1359 static tree
1360 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1361 {
1362 int old_nr, new_nr;
1363
1364 old_nr = tree_to_shwi (old_t_nr);
1365 new_nr = remap_eh_region_nr (old_nr, id);
1366
1367 return build_int_cst (integer_type_node, new_nr);
1368 }
1369
1370 /* Helper for copy_bb. Remap statement STMT using the inlining
1371 information in ID. Return the new statement copy. */
1372
1373 static gimple_seq
1374 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1375 {
1376 gimple *copy = NULL;
1377 struct walk_stmt_info wi;
1378 bool skip_first = false;
1379 gimple_seq stmts = NULL;
1380
1381 if (is_gimple_debug (stmt)
1382 && (gimple_debug_nonbind_marker_p (stmt)
1383 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1384 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1385 return stmts;
1386
1387 /* Begin by recognizing trees that we'll completely rewrite for the
1388 inlining context. Our output for these trees is completely
1389 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1390 into an edge). Further down, we'll handle trees that get
1391 duplicated and/or tweaked. */
1392
1393 /* When requested, GIMPLE_RETURNs should be transformed to just the
1394 contained GIMPLE_ASSIGN. The branch semantics of the return will
1395 be handled elsewhere by manipulating the CFG rather than the
1396 statement. */
1397 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1398 {
1399 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1400 tree retbnd = gimple_return_retbnd (stmt);
1401 tree bndslot = id->retbnd;
1402
1403 if (retbnd && bndslot)
1404 {
1405 gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1406 memset (&wi, 0, sizeof (wi));
1407 wi.info = id;
1408 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1409 gimple_seq_add_stmt (&stmts, bndcopy);
1410 }
1411
1412 /* If we're returning something, just turn that into an
1413 assignment into the equivalent of the original RESULT_DECL.
1414 If RETVAL is just the result decl, the result decl has
1415 already been set (e.g. a recent "foo (&result_decl, ...)");
1416 just toss the entire GIMPLE_RETURN. */
1417 if (retval
1418 && (TREE_CODE (retval) != RESULT_DECL
1419 && (TREE_CODE (retval) != SSA_NAME
1420 || ! SSA_NAME_VAR (retval)
1421 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1422 {
1423 copy = gimple_build_assign (id->do_not_unshare
1424 ? id->retvar : unshare_expr (id->retvar),
1425 retval);
1426 /* id->retvar is already substituted. Skip it on later remapping. */
1427 skip_first = true;
1428 }
1429 else
1430 return stmts;
1431 }
1432 else if (gimple_has_substatements (stmt))
1433 {
1434 gimple_seq s1, s2;
1435
1436 /* When cloning bodies from the C++ front end, we will be handed bodies
1437 in High GIMPLE form. Handle here all the High GIMPLE statements that
1438 have embedded statements. */
1439 switch (gimple_code (stmt))
1440 {
1441 case GIMPLE_BIND:
1442 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1443 break;
1444
1445 case GIMPLE_CATCH:
1446 {
1447 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1448 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1449 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1450 }
1451 break;
1452
1453 case GIMPLE_EH_FILTER:
1454 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1455 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1456 break;
1457
1458 case GIMPLE_TRY:
1459 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1460 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1461 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1462 break;
1463
1464 case GIMPLE_WITH_CLEANUP_EXPR:
1465 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1466 copy = gimple_build_wce (s1);
1467 break;
1468
1469 case GIMPLE_OMP_PARALLEL:
1470 {
1471 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1472 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1473 copy = gimple_build_omp_parallel
1474 (s1,
1475 gimple_omp_parallel_clauses (omp_par_stmt),
1476 gimple_omp_parallel_child_fn (omp_par_stmt),
1477 gimple_omp_parallel_data_arg (omp_par_stmt));
1478 }
1479 break;
1480
1481 case GIMPLE_OMP_TASK:
1482 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1483 copy = gimple_build_omp_task
1484 (s1,
1485 gimple_omp_task_clauses (stmt),
1486 gimple_omp_task_child_fn (stmt),
1487 gimple_omp_task_data_arg (stmt),
1488 gimple_omp_task_copy_fn (stmt),
1489 gimple_omp_task_arg_size (stmt),
1490 gimple_omp_task_arg_align (stmt));
1491 break;
1492
1493 case GIMPLE_OMP_FOR:
1494 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1495 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1496 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1497 gimple_omp_for_clauses (stmt),
1498 gimple_omp_for_collapse (stmt), s2);
1499 {
1500 size_t i;
1501 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1502 {
1503 gimple_omp_for_set_index (copy, i,
1504 gimple_omp_for_index (stmt, i));
1505 gimple_omp_for_set_initial (copy, i,
1506 gimple_omp_for_initial (stmt, i));
1507 gimple_omp_for_set_final (copy, i,
1508 gimple_omp_for_final (stmt, i));
1509 gimple_omp_for_set_incr (copy, i,
1510 gimple_omp_for_incr (stmt, i));
1511 gimple_omp_for_set_cond (copy, i,
1512 gimple_omp_for_cond (stmt, i));
1513 }
1514 }
1515 break;
1516
1517 case GIMPLE_OMP_MASTER:
1518 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1519 copy = gimple_build_omp_master (s1);
1520 break;
1521
1522 case GIMPLE_OMP_TASKGROUP:
1523 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1524 copy = gimple_build_omp_taskgroup (s1);
1525 break;
1526
1527 case GIMPLE_OMP_ORDERED:
1528 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1529 copy = gimple_build_omp_ordered
1530 (s1,
1531 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1532 break;
1533
1534 case GIMPLE_OMP_SECTION:
1535 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1536 copy = gimple_build_omp_section (s1);
1537 break;
1538
1539 case GIMPLE_OMP_SECTIONS:
1540 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1541 copy = gimple_build_omp_sections
1542 (s1, gimple_omp_sections_clauses (stmt));
1543 break;
1544
1545 case GIMPLE_OMP_SINGLE:
1546 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1547 copy = gimple_build_omp_single
1548 (s1, gimple_omp_single_clauses (stmt));
1549 break;
1550
1551 case GIMPLE_OMP_TARGET:
1552 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1553 copy = gimple_build_omp_target
1554 (s1, gimple_omp_target_kind (stmt),
1555 gimple_omp_target_clauses (stmt));
1556 break;
1557
1558 case GIMPLE_OMP_TEAMS:
1559 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1560 copy = gimple_build_omp_teams
1561 (s1, gimple_omp_teams_clauses (stmt));
1562 break;
1563
1564 case GIMPLE_OMP_CRITICAL:
1565 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1566 copy = gimple_build_omp_critical (s1,
1567 gimple_omp_critical_name
1568 (as_a <gomp_critical *> (stmt)),
1569 gimple_omp_critical_clauses
1570 (as_a <gomp_critical *> (stmt)));
1571 break;
1572
1573 case GIMPLE_TRANSACTION:
1574 {
1575 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1576 gtransaction *new_trans_stmt;
1577 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1578 id);
1579 copy = new_trans_stmt = gimple_build_transaction (s1);
1580 gimple_transaction_set_subcode (new_trans_stmt,
1581 gimple_transaction_subcode (old_trans_stmt));
1582 gimple_transaction_set_label_norm (new_trans_stmt,
1583 gimple_transaction_label_norm (old_trans_stmt));
1584 gimple_transaction_set_label_uninst (new_trans_stmt,
1585 gimple_transaction_label_uninst (old_trans_stmt));
1586 gimple_transaction_set_label_over (new_trans_stmt,
1587 gimple_transaction_label_over (old_trans_stmt));
1588 }
1589 break;
1590
1591 default:
1592 gcc_unreachable ();
1593 }
1594 }
1595 else
1596 {
1597 if (gimple_assign_copy_p (stmt)
1598 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1599 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1600 {
1601 /* Here we handle statements that are not completely rewritten.
1602 First we detect some inlining-induced bogosities for
1603 discarding. */
1604
1605 /* Some assignments VAR = VAR; don't generate any rtl code
1606 and thus don't count as variable modification. Avoid
1607 keeping bogosities like 0 = 0. */
1608 tree decl = gimple_assign_lhs (stmt), value;
1609 tree *n;
1610
1611 n = id->decl_map->get (decl);
1612 if (n)
1613 {
1614 value = *n;
1615 STRIP_TYPE_NOPS (value);
1616 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1617 return NULL;
1618 }
1619 }
1620
1621 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1622 in a block that we aren't copying during tree_function_versioning,
1623 just drop the clobber stmt. */
1624 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1625 {
1626 tree lhs = gimple_assign_lhs (stmt);
1627 if (TREE_CODE (lhs) == MEM_REF
1628 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1629 {
1630 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1631 if (gimple_bb (def_stmt)
1632 && !bitmap_bit_p (id->blocks_to_copy,
1633 gimple_bb (def_stmt)->index))
1634 return NULL;
1635 }
1636 }
1637
1638 if (gimple_debug_bind_p (stmt))
1639 {
1640 gdebug *copy
1641 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1642 gimple_debug_bind_get_value (stmt),
1643 stmt);
1644 id->debug_stmts.safe_push (copy);
1645 gimple_seq_add_stmt (&stmts, copy);
1646 return stmts;
1647 }
1648 if (gimple_debug_source_bind_p (stmt))
1649 {
1650 gdebug *copy = gimple_build_debug_source_bind
1651 (gimple_debug_source_bind_get_var (stmt),
1652 gimple_debug_source_bind_get_value (stmt),
1653 stmt);
1654 id->debug_stmts.safe_push (copy);
1655 gimple_seq_add_stmt (&stmts, copy);
1656 return stmts;
1657 }
1658 if (gimple_debug_nonbind_marker_p (stmt))
1659 {
1660 /* If the inlined function has too many debug markers,
1661 don't copy them. */
1662 if (id->src_cfun->debug_marker_count
1663 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1664 return stmts;
1665
1666 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1667 id->debug_stmts.safe_push (copy);
1668 gimple_seq_add_stmt (&stmts, copy);
1669 return stmts;
1670 }
1671 gcc_checking_assert (!is_gimple_debug (stmt));
1672
1673 /* Create a new deep copy of the statement. */
1674 copy = gimple_copy (stmt);
1675
1676 /* Clear flags that need revisiting. */
1677 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1678 {
1679 if (gimple_call_tail_p (call_stmt))
1680 gimple_call_set_tail (call_stmt, false);
1681 if (gimple_call_from_thunk_p (call_stmt))
1682 gimple_call_set_from_thunk (call_stmt, false);
1683 if (gimple_call_internal_p (call_stmt))
1684 switch (gimple_call_internal_fn (call_stmt))
1685 {
1686 case IFN_GOMP_SIMD_LANE:
1687 case IFN_GOMP_SIMD_VF:
1688 case IFN_GOMP_SIMD_LAST_LANE:
1689 case IFN_GOMP_SIMD_ORDERED_START:
1690 case IFN_GOMP_SIMD_ORDERED_END:
1691 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1692 break;
1693 default:
1694 break;
1695 }
1696 }
1697
1698 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1699 RESX and EH_DISPATCH. */
1700 if (id->eh_map)
1701 switch (gimple_code (copy))
1702 {
1703 case GIMPLE_CALL:
1704 {
1705 tree r, fndecl = gimple_call_fndecl (copy);
1706 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1707 switch (DECL_FUNCTION_CODE (fndecl))
1708 {
1709 case BUILT_IN_EH_COPY_VALUES:
1710 r = gimple_call_arg (copy, 1);
1711 r = remap_eh_region_tree_nr (r, id);
1712 gimple_call_set_arg (copy, 1, r);
1713 /* FALLTHRU */
1714
1715 case BUILT_IN_EH_POINTER:
1716 case BUILT_IN_EH_FILTER:
1717 r = gimple_call_arg (copy, 0);
1718 r = remap_eh_region_tree_nr (r, id);
1719 gimple_call_set_arg (copy, 0, r);
1720 break;
1721
1722 default:
1723 break;
1724 }
1725
1726 /* Reset alias info if we didn't apply measures to
1727 keep it valid over inlining by setting DECL_PT_UID. */
1728 if (!id->src_cfun->gimple_df
1729 || !id->src_cfun->gimple_df->ipa_pta)
1730 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1731 }
1732 break;
1733
1734 case GIMPLE_RESX:
1735 {
1736 gresx *resx_stmt = as_a <gresx *> (copy);
1737 int r = gimple_resx_region (resx_stmt);
1738 r = remap_eh_region_nr (r, id);
1739 gimple_resx_set_region (resx_stmt, r);
1740 }
1741 break;
1742
1743 case GIMPLE_EH_DISPATCH:
1744 {
1745 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1746 int r = gimple_eh_dispatch_region (eh_dispatch);
1747 r = remap_eh_region_nr (r, id);
1748 gimple_eh_dispatch_set_region (eh_dispatch, r);
1749 }
1750 break;
1751
1752 default:
1753 break;
1754 }
1755 }
1756
1757 /* If STMT has a block defined, map it to the newly constructed
1758 block. */
1759 if (gimple_block (copy))
1760 {
1761 tree *n;
1762 n = id->decl_map->get (gimple_block (copy));
1763 gcc_assert (n);
1764 gimple_set_block (copy, *n);
1765 }
1766
1767 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy)
1768 || gimple_debug_nonbind_marker_p (copy))
1769 {
1770 gimple_seq_add_stmt (&stmts, copy);
1771 return stmts;
1772 }
1773
1774 /* Remap all the operands in COPY. */
1775 memset (&wi, 0, sizeof (wi));
1776 wi.info = id;
1777 if (skip_first)
1778 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1779 else
1780 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1781
1782 /* Clear the copied virtual operands. We are not remapping them here
1783 but are going to recreate them from scratch. */
1784 if (gimple_has_mem_ops (copy))
1785 {
1786 gimple_set_vdef (copy, NULL_TREE);
1787 gimple_set_vuse (copy, NULL_TREE);
1788 }
1789
1790 gimple_seq_add_stmt (&stmts, copy);
1791 return stmts;
1792 }
1793
1794
1795 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1796 later */
1797
1798 static basic_block
1799 copy_bb (copy_body_data *id, basic_block bb,
1800 profile_count num, profile_count den)
1801 {
1802 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1803 basic_block copy_basic_block;
1804 tree decl;
1805 basic_block prev;
1806
1807 profile_count::adjust_for_ipa_scaling (&num, &den);
1808
1809 /* Search for previous copied basic block. */
1810 prev = bb->prev_bb;
1811 while (!prev->aux)
1812 prev = prev->prev_bb;
1813
1814 /* create_basic_block() will append every new block to
1815 basic_block_info automatically. */
1816 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1817 copy_basic_block->count = bb->count.apply_scale (num, den);
1818
1819 copy_gsi = gsi_start_bb (copy_basic_block);
1820
1821 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1822 {
1823 gimple_seq stmts;
1824 gimple *stmt = gsi_stmt (gsi);
1825 gimple *orig_stmt = stmt;
1826 gimple_stmt_iterator stmts_gsi;
1827 bool stmt_added = false;
1828
1829 id->regimplify = false;
1830 stmts = remap_gimple_stmt (stmt, id);
1831
1832 if (gimple_seq_empty_p (stmts))
1833 continue;
1834
1835 seq_gsi = copy_gsi;
1836
1837 for (stmts_gsi = gsi_start (stmts);
1838 !gsi_end_p (stmts_gsi); )
1839 {
1840 stmt = gsi_stmt (stmts_gsi);
1841
1842 /* Advance iterator now before stmt is moved to seq_gsi. */
1843 gsi_next (&stmts_gsi);
1844
1845 if (gimple_nop_p (stmt))
1846 continue;
1847
1848 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1849 orig_stmt);
1850
1851 /* With return slot optimization we can end up with
1852 non-gimple (foo *)&this->m, fix that here. */
1853 if (is_gimple_assign (stmt)
1854 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1855 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1856 {
1857 tree new_rhs;
1858 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1859 gimple_assign_rhs1 (stmt),
1860 true, NULL, false,
1861 GSI_CONTINUE_LINKING);
1862 gimple_assign_set_rhs1 (stmt, new_rhs);
1863 id->regimplify = false;
1864 }
1865
1866 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1867
1868 if (id->regimplify)
1869 gimple_regimplify_operands (stmt, &seq_gsi);
1870
1871 stmt_added = true;
1872 }
1873
1874 if (!stmt_added)
1875 continue;
1876
1877 /* If copy_basic_block has been empty at the start of this iteration,
1878 call gsi_start_bb again to get at the newly added statements. */
1879 if (gsi_end_p (copy_gsi))
1880 copy_gsi = gsi_start_bb (copy_basic_block);
1881 else
1882 gsi_next (&copy_gsi);
1883
1884 /* Process the new statement. The call to gimple_regimplify_operands
1885 possibly turned the statement into multiple statements, we
1886 need to process all of them. */
1887 do
1888 {
1889 tree fn;
1890 gcall *call_stmt;
1891
1892 stmt = gsi_stmt (copy_gsi);
1893 call_stmt = dyn_cast <gcall *> (stmt);
1894 if (call_stmt
1895 && gimple_call_va_arg_pack_p (call_stmt)
1896 && id->call_stmt
1897 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1898 {
1899 /* __builtin_va_arg_pack () should be replaced by
1900 all arguments corresponding to ... in the caller. */
1901 tree p;
1902 gcall *new_call;
1903 vec<tree> argarray;
1904 size_t nargs = gimple_call_num_args (id->call_stmt);
1905 size_t n;
1906
1907 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1908 nargs--;
1909
1910 /* Create the new array of arguments. */
1911 n = nargs + gimple_call_num_args (call_stmt);
1912 argarray.create (n);
1913 argarray.safe_grow_cleared (n);
1914
1915 /* Copy all the arguments before '...' */
1916 memcpy (argarray.address (),
1917 gimple_call_arg_ptr (call_stmt, 0),
1918 gimple_call_num_args (call_stmt) * sizeof (tree));
1919
1920 /* Append the arguments passed in '...' */
1921 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1922 gimple_call_arg_ptr (id->call_stmt, 0)
1923 + (gimple_call_num_args (id->call_stmt) - nargs),
1924 nargs * sizeof (tree));
1925
1926 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1927 argarray);
1928
1929 argarray.release ();
1930
1931 /* Copy all GIMPLE_CALL flags, location and block, except
1932 GF_CALL_VA_ARG_PACK. */
1933 gimple_call_copy_flags (new_call, call_stmt);
1934 gimple_call_set_va_arg_pack (new_call, false);
1935 gimple_set_location (new_call, gimple_location (stmt));
1936 gimple_set_block (new_call, gimple_block (stmt));
1937 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1938
1939 gsi_replace (&copy_gsi, new_call, false);
1940 stmt = new_call;
1941 }
1942 else if (call_stmt
1943 && id->call_stmt
1944 && (decl = gimple_call_fndecl (stmt))
1945 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1946 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN
1947 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1948 {
1949 /* __builtin_va_arg_pack_len () should be replaced by
1950 the number of anonymous arguments. */
1951 size_t nargs = gimple_call_num_args (id->call_stmt);
1952 tree count, p;
1953 gimple *new_stmt;
1954
1955 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1956 nargs--;
1957
1958 count = build_int_cst (integer_type_node, nargs);
1959 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1960 gsi_replace (&copy_gsi, new_stmt, false);
1961 stmt = new_stmt;
1962 }
1963 else if (call_stmt
1964 && id->call_stmt
1965 && gimple_call_internal_p (stmt)
1966 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1967 {
1968 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1969 gsi_remove (&copy_gsi, false);
1970 continue;
1971 }
1972
1973 /* Statements produced by inlining can be unfolded, especially
1974 when we constant propagated some operands. We can't fold
1975 them right now for two reasons:
1976 1) folding require SSA_NAME_DEF_STMTs to be correct
1977 2) we can't change function calls to builtins.
1978 So we just mark statement for later folding. We mark
1979 all new statements, instead just statements that has changed
1980 by some nontrivial substitution so even statements made
1981 foldable indirectly are updated. If this turns out to be
1982 expensive, copy_body can be told to watch for nontrivial
1983 changes. */
1984 if (id->statements_to_fold)
1985 id->statements_to_fold->add (stmt);
1986
1987 /* We're duplicating a CALL_EXPR. Find any corresponding
1988 callgraph edges and update or duplicate them. */
1989 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1990 {
1991 struct cgraph_edge *edge;
1992
1993 switch (id->transform_call_graph_edges)
1994 {
1995 case CB_CGE_DUPLICATE:
1996 edge = id->src_node->get_edge (orig_stmt);
1997 if (edge)
1998 {
1999 struct cgraph_edge *old_edge = edge;
2000 profile_count old_cnt = edge->count;
2001 edge = edge->clone (id->dst_node, call_stmt,
2002 gimple_uid (stmt),
2003 num, den,
2004 true);
2005
2006 /* Speculative calls consist of two edges - direct and
2007 indirect. Duplicate the whole thing and distribute
2008 frequencies accordingly. */
2009 if (edge->speculative)
2010 {
2011 struct cgraph_edge *direct, *indirect;
2012 struct ipa_ref *ref;
2013
2014 gcc_assert (!edge->indirect_unknown_callee);
2015 old_edge->speculative_call_info (direct, indirect, ref);
2016
2017 profile_count indir_cnt = indirect->count;
2018 indirect = indirect->clone (id->dst_node, call_stmt,
2019 gimple_uid (stmt),
2020 num, den,
2021 true);
2022
2023 profile_probability prob
2024 = indir_cnt.probability_in (old_cnt + indir_cnt);
2025 indirect->count
2026 = copy_basic_block->count.apply_probability (prob);
2027 edge->count = copy_basic_block->count - indirect->count;
2028 id->dst_node->clone_reference (ref, stmt);
2029 }
2030 else
2031 edge->count = copy_basic_block->count;
2032 }
2033 break;
2034
2035 case CB_CGE_MOVE_CLONES:
2036 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2037 call_stmt);
2038 edge = id->dst_node->get_edge (stmt);
2039 break;
2040
2041 case CB_CGE_MOVE:
2042 edge = id->dst_node->get_edge (orig_stmt);
2043 if (edge)
2044 edge->set_call_stmt (call_stmt);
2045 break;
2046
2047 default:
2048 gcc_unreachable ();
2049 }
2050
2051 /* Constant propagation on argument done during inlining
2052 may create new direct call. Produce an edge for it. */
2053 if ((!edge
2054 || (edge->indirect_inlining_edge
2055 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2056 && id->dst_node->definition
2057 && (fn = gimple_call_fndecl (stmt)) != NULL)
2058 {
2059 struct cgraph_node *dest = cgraph_node::get_create (fn);
2060
2061 /* We have missing edge in the callgraph. This can happen
2062 when previous inlining turned an indirect call into a
2063 direct call by constant propagating arguments or we are
2064 producing dead clone (for further cloning). In all
2065 other cases we hit a bug (incorrect node sharing is the
2066 most common reason for missing edges). */
2067 gcc_assert (!dest->definition
2068 || dest->address_taken
2069 || !id->src_node->definition
2070 || !id->dst_node->definition);
2071 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2072 id->dst_node->create_edge_including_clones
2073 (dest, orig_stmt, call_stmt, bb->count,
2074 CIF_ORIGINALLY_INDIRECT_CALL);
2075 else
2076 id->dst_node->create_edge (dest, call_stmt,
2077 bb->count)->inline_failed
2078 = CIF_ORIGINALLY_INDIRECT_CALL;
2079 if (dump_file)
2080 {
2081 fprintf (dump_file, "Created new direct edge to %s\n",
2082 dest->name ());
2083 }
2084 }
2085
2086 notice_special_calls (as_a <gcall *> (stmt));
2087 }
2088
2089 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2090 id->eh_map, id->eh_lp_nr);
2091
2092 gsi_next (&copy_gsi);
2093 }
2094 while (!gsi_end_p (copy_gsi));
2095
2096 copy_gsi = gsi_last_bb (copy_basic_block);
2097 }
2098
2099 return copy_basic_block;
2100 }
2101
2102 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2103 form is quite easy, since dominator relationship for old basic blocks does
2104 not change.
2105
2106 There is however exception where inlining might change dominator relation
2107 across EH edges from basic block within inlined functions destinating
2108 to landing pads in function we inline into.
2109
2110 The function fills in PHI_RESULTs of such PHI nodes if they refer
2111 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2112 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2113 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2114 set, and this means that there will be no overlapping live ranges
2115 for the underlying symbol.
2116
2117 This might change in future if we allow redirecting of EH edges and
2118 we might want to change way build CFG pre-inlining to include
2119 all the possible edges then. */
2120 static void
2121 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2122 bool can_throw, bool nonlocal_goto)
2123 {
2124 edge e;
2125 edge_iterator ei;
2126
2127 FOR_EACH_EDGE (e, ei, bb->succs)
2128 if (!e->dest->aux
2129 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2130 {
2131 gphi *phi;
2132 gphi_iterator si;
2133
2134 if (!nonlocal_goto)
2135 gcc_assert (e->flags & EDGE_EH);
2136
2137 if (!can_throw)
2138 gcc_assert (!(e->flags & EDGE_EH));
2139
2140 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2141 {
2142 edge re;
2143
2144 phi = si.phi ();
2145
2146 /* For abnormal goto/call edges the receiver can be the
2147 ENTRY_BLOCK. Do not assert this cannot happen. */
2148
2149 gcc_assert ((e->flags & EDGE_EH)
2150 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2151
2152 re = find_edge (ret_bb, e->dest);
2153 gcc_checking_assert (re);
2154 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2155 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2156
2157 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2158 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2159 }
2160 }
2161 }
2162
2163
2164 /* Copy edges from BB into its copy constructed earlier, scale profile
2165 accordingly. Edges will be taken care of later. Assume aux
2166 pointers to point to the copies of each BB. Return true if any
2167 debug stmts are left after a statement that must end the basic block. */
2168
2169 static bool
2170 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2171 basic_block ret_bb, basic_block abnormal_goto_dest,
2172 copy_body_data *id)
2173 {
2174 basic_block new_bb = (basic_block) bb->aux;
2175 edge_iterator ei;
2176 edge old_edge;
2177 gimple_stmt_iterator si;
2178 bool need_debug_cleanup = false;
2179
2180 /* Use the indices from the original blocks to create edges for the
2181 new ones. */
2182 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2183 if (!(old_edge->flags & EDGE_EH))
2184 {
2185 edge new_edge;
2186 int flags = old_edge->flags;
2187 location_t locus = old_edge->goto_locus;
2188
2189 /* Return edges do get a FALLTHRU flag when they get inlined. */
2190 if (old_edge->dest->index == EXIT_BLOCK
2191 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2192 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2193 flags |= EDGE_FALLTHRU;
2194
2195 new_edge
2196 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2197 new_edge->probability = old_edge->probability;
2198 new_edge->goto_locus = remap_location (locus, id);
2199 }
2200
2201 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2202 return false;
2203
2204 /* When doing function splitting, we must decreate count of the return block
2205 which was previously reachable by block we did not copy. */
2206 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2207 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2208 if (old_edge->src->index != ENTRY_BLOCK
2209 && !old_edge->src->aux)
2210 new_bb->count -= old_edge->count ().apply_scale (num, den);
2211
2212 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2213 {
2214 gimple *copy_stmt;
2215 bool can_throw, nonlocal_goto;
2216
2217 copy_stmt = gsi_stmt (si);
2218 if (!is_gimple_debug (copy_stmt))
2219 update_stmt (copy_stmt);
2220
2221 /* Do this before the possible split_block. */
2222 gsi_next (&si);
2223
2224 /* If this tree could throw an exception, there are two
2225 cases where we need to add abnormal edge(s): the
2226 tree wasn't in a region and there is a "current
2227 region" in the caller; or the original tree had
2228 EH edges. In both cases split the block after the tree,
2229 and add abnormal edge(s) as needed; we need both
2230 those from the callee and the caller.
2231 We check whether the copy can throw, because the const
2232 propagation can change an INDIRECT_REF which throws
2233 into a COMPONENT_REF which doesn't. If the copy
2234 can throw, the original could also throw. */
2235 can_throw = stmt_can_throw_internal (copy_stmt);
2236 nonlocal_goto
2237 = (stmt_can_make_abnormal_goto (copy_stmt)
2238 && !computed_goto_p (copy_stmt));
2239
2240 if (can_throw || nonlocal_goto)
2241 {
2242 if (!gsi_end_p (si))
2243 {
2244 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2245 gsi_next (&si);
2246 if (gsi_end_p (si))
2247 need_debug_cleanup = true;
2248 }
2249 if (!gsi_end_p (si))
2250 /* Note that bb's predecessor edges aren't necessarily
2251 right at this point; split_block doesn't care. */
2252 {
2253 edge e = split_block (new_bb, copy_stmt);
2254
2255 new_bb = e->dest;
2256 new_bb->aux = e->src->aux;
2257 si = gsi_start_bb (new_bb);
2258 }
2259 }
2260
2261 bool update_probs = false;
2262
2263 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2264 {
2265 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2266 update_probs = true;
2267 }
2268 else if (can_throw)
2269 {
2270 make_eh_edges (copy_stmt);
2271 update_probs = true;
2272 }
2273
2274 /* EH edges may not match old edges. Copy as much as possible. */
2275 if (update_probs)
2276 {
2277 edge e;
2278 edge_iterator ei;
2279 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2280
2281 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2282 if ((old_edge->flags & EDGE_EH)
2283 && (e = find_edge (copy_stmt_bb,
2284 (basic_block) old_edge->dest->aux))
2285 && (e->flags & EDGE_EH))
2286 e->probability = old_edge->probability;
2287
2288 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2289 if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
2290 e->probability = profile_probability::never ();
2291 }
2292
2293
2294 /* If the call we inline cannot make abnormal goto do not add
2295 additional abnormal edges but only retain those already present
2296 in the original function body. */
2297 if (abnormal_goto_dest == NULL)
2298 nonlocal_goto = false;
2299 if (nonlocal_goto)
2300 {
2301 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2302
2303 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2304 nonlocal_goto = false;
2305 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2306 in OpenMP regions which aren't allowed to be left abnormally.
2307 So, no need to add abnormal edge in that case. */
2308 else if (is_gimple_call (copy_stmt)
2309 && gimple_call_internal_p (copy_stmt)
2310 && (gimple_call_internal_fn (copy_stmt)
2311 == IFN_ABNORMAL_DISPATCHER)
2312 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2313 nonlocal_goto = false;
2314 else
2315 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2316 EDGE_ABNORMAL);
2317 }
2318
2319 if ((can_throw || nonlocal_goto)
2320 && gimple_in_ssa_p (cfun))
2321 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2322 can_throw, nonlocal_goto);
2323 }
2324 return need_debug_cleanup;
2325 }
2326
2327 /* Copy the PHIs. All blocks and edges are copied, some blocks
2328 was possibly split and new outgoing EH edges inserted.
2329 BB points to the block of original function and AUX pointers links
2330 the original and newly copied blocks. */
2331
2332 static void
2333 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2334 {
2335 basic_block const new_bb = (basic_block) bb->aux;
2336 edge_iterator ei;
2337 gphi *phi;
2338 gphi_iterator si;
2339 edge new_edge;
2340 bool inserted = false;
2341
2342 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2343 {
2344 tree res, new_res;
2345 gphi *new_phi;
2346
2347 phi = si.phi ();
2348 res = PHI_RESULT (phi);
2349 new_res = res;
2350 if (!virtual_operand_p (res))
2351 {
2352 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2353 if (EDGE_COUNT (new_bb->preds) == 0)
2354 {
2355 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2356 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2357 }
2358 else
2359 {
2360 new_phi = create_phi_node (new_res, new_bb);
2361 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2362 {
2363 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2364 bb);
2365 tree arg;
2366 tree new_arg;
2367 edge_iterator ei2;
2368 location_t locus;
2369
2370 /* When doing partial cloning, we allow PHIs on the entry
2371 block as long as all the arguments are the same.
2372 Find any input edge to see argument to copy. */
2373 if (!old_edge)
2374 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2375 if (!old_edge->src->aux)
2376 break;
2377
2378 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2379 new_arg = arg;
2380 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2381 gcc_assert (new_arg);
2382 /* With return slot optimization we can end up with
2383 non-gimple (foo *)&this->m, fix that here. */
2384 if (TREE_CODE (new_arg) != SSA_NAME
2385 && TREE_CODE (new_arg) != FUNCTION_DECL
2386 && !is_gimple_val (new_arg))
2387 {
2388 gimple_seq stmts = NULL;
2389 new_arg = force_gimple_operand (new_arg, &stmts, true,
2390 NULL);
2391 gsi_insert_seq_on_edge (new_edge, stmts);
2392 inserted = true;
2393 }
2394 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2395 locus = remap_location (locus, id);
2396 add_phi_arg (new_phi, new_arg, new_edge, locus);
2397 }
2398 }
2399 }
2400 }
2401
2402 /* Commit the delayed edge insertions. */
2403 if (inserted)
2404 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2405 gsi_commit_one_edge_insert (new_edge, NULL);
2406 }
2407
2408
2409 /* Wrapper for remap_decl so it can be used as a callback. */
2410
2411 static tree
2412 remap_decl_1 (tree decl, void *data)
2413 {
2414 return remap_decl (decl, (copy_body_data *) data);
2415 }
2416
2417 /* Build struct function and associated datastructures for the new clone
2418 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2419 the cfun to the function of new_fndecl (and current_function_decl too). */
2420
2421 static void
2422 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2423 {
2424 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2425
2426 if (!DECL_ARGUMENTS (new_fndecl))
2427 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2428 if (!DECL_RESULT (new_fndecl))
2429 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2430
2431 /* Register specific tree functions. */
2432 gimple_register_cfg_hooks ();
2433
2434 /* Get clean struct function. */
2435 push_struct_function (new_fndecl);
2436
2437 /* We will rebuild these, so just sanity check that they are empty. */
2438 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2439 gcc_assert (cfun->local_decls == NULL);
2440 gcc_assert (cfun->cfg == NULL);
2441 gcc_assert (cfun->decl == new_fndecl);
2442
2443 /* Copy items we preserve during cloning. */
2444 cfun->static_chain_decl = src_cfun->static_chain_decl;
2445 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2446 cfun->function_end_locus = src_cfun->function_end_locus;
2447 cfun->curr_properties = src_cfun->curr_properties;
2448 cfun->last_verified = src_cfun->last_verified;
2449 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2450 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2451 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2452 cfun->stdarg = src_cfun->stdarg;
2453 cfun->after_inlining = src_cfun->after_inlining;
2454 cfun->can_throw_non_call_exceptions
2455 = src_cfun->can_throw_non_call_exceptions;
2456 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2457 cfun->returns_struct = src_cfun->returns_struct;
2458 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2459
2460 init_empty_tree_cfg ();
2461
2462 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2463
2464 profile_count num = count;
2465 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2466 profile_count::adjust_for_ipa_scaling (&num, &den);
2467
2468 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2469 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2470 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2471 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2472 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2473 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2474 if (src_cfun->eh)
2475 init_eh_for_function ();
2476
2477 if (src_cfun->gimple_df)
2478 {
2479 init_tree_ssa (cfun);
2480 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2481 if (cfun->gimple_df->in_ssa_p)
2482 init_ssa_operands (cfun);
2483 }
2484 }
2485
2486 /* Helper function for copy_cfg_body. Move debug stmts from the end
2487 of NEW_BB to the beginning of successor basic blocks when needed. If the
2488 successor has multiple predecessors, reset them, otherwise keep
2489 their value. */
2490
2491 static void
2492 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2493 {
2494 edge e;
2495 edge_iterator ei;
2496 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2497
2498 if (gsi_end_p (si)
2499 || gsi_one_before_end_p (si)
2500 || !(stmt_can_throw_internal (gsi_stmt (si))
2501 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2502 return;
2503
2504 FOR_EACH_EDGE (e, ei, new_bb->succs)
2505 {
2506 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2507 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2508 while (is_gimple_debug (gsi_stmt (ssi)))
2509 {
2510 gimple *stmt = gsi_stmt (ssi);
2511 gdebug *new_stmt;
2512 tree var;
2513 tree value;
2514
2515 /* For the last edge move the debug stmts instead of copying
2516 them. */
2517 if (ei_one_before_end_p (ei))
2518 {
2519 si = ssi;
2520 gsi_prev (&ssi);
2521 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2522 {
2523 gimple_debug_bind_reset_value (stmt);
2524 gimple_set_location (stmt, UNKNOWN_LOCATION);
2525 }
2526 gsi_remove (&si, false);
2527 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2528 continue;
2529 }
2530
2531 if (gimple_debug_bind_p (stmt))
2532 {
2533 var = gimple_debug_bind_get_var (stmt);
2534 if (single_pred_p (e->dest))
2535 {
2536 value = gimple_debug_bind_get_value (stmt);
2537 value = unshare_expr (value);
2538 new_stmt = gimple_build_debug_bind (var, value, stmt);
2539 }
2540 else
2541 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2542 }
2543 else if (gimple_debug_source_bind_p (stmt))
2544 {
2545 var = gimple_debug_source_bind_get_var (stmt);
2546 value = gimple_debug_source_bind_get_value (stmt);
2547 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2548 }
2549 else if (gimple_debug_nonbind_marker_p (stmt))
2550 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2551 else
2552 gcc_unreachable ();
2553 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2554 id->debug_stmts.safe_push (new_stmt);
2555 gsi_prev (&ssi);
2556 }
2557 }
2558 }
2559
2560 /* Make a copy of the sub-loops of SRC_PARENT and place them
2561 as siblings of DEST_PARENT. */
2562
2563 static void
2564 copy_loops (copy_body_data *id,
2565 struct loop *dest_parent, struct loop *src_parent)
2566 {
2567 struct loop *src_loop = src_parent->inner;
2568 while (src_loop)
2569 {
2570 if (!id->blocks_to_copy
2571 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2572 {
2573 struct loop *dest_loop = alloc_loop ();
2574
2575 /* Assign the new loop its header and latch and associate
2576 those with the new loop. */
2577 dest_loop->header = (basic_block)src_loop->header->aux;
2578 dest_loop->header->loop_father = dest_loop;
2579 if (src_loop->latch != NULL)
2580 {
2581 dest_loop->latch = (basic_block)src_loop->latch->aux;
2582 dest_loop->latch->loop_father = dest_loop;
2583 }
2584
2585 /* Copy loop meta-data. */
2586 copy_loop_info (src_loop, dest_loop);
2587
2588 /* Finally place it into the loop array and the loop tree. */
2589 place_new_loop (cfun, dest_loop);
2590 flow_loop_tree_node_add (dest_parent, dest_loop);
2591
2592 dest_loop->safelen = src_loop->safelen;
2593 if (src_loop->unroll)
2594 {
2595 dest_loop->unroll = src_loop->unroll;
2596 cfun->has_unroll = true;
2597 }
2598 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2599 if (src_loop->force_vectorize)
2600 {
2601 dest_loop->force_vectorize = true;
2602 cfun->has_force_vectorize_loops = true;
2603 }
2604 if (src_loop->simduid)
2605 {
2606 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2607 cfun->has_simduid_loops = true;
2608 }
2609
2610 /* Recurse. */
2611 copy_loops (id, dest_loop, src_loop);
2612 }
2613 src_loop = src_loop->next;
2614 }
2615 }
2616
2617 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2618
2619 void
2620 redirect_all_calls (copy_body_data * id, basic_block bb)
2621 {
2622 gimple_stmt_iterator si;
2623 gimple *last = last_stmt (bb);
2624 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2625 {
2626 gimple *stmt = gsi_stmt (si);
2627 if (is_gimple_call (stmt))
2628 {
2629 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2630 if (edge)
2631 {
2632 edge->redirect_call_stmt_to_callee ();
2633 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2634 gimple_purge_dead_eh_edges (bb);
2635 }
2636 }
2637 }
2638 }
2639
2640 /* Make a copy of the body of FN so that it can be inserted inline in
2641 another function. Walks FN via CFG, returns new fndecl. */
2642
2643 static tree
2644 copy_cfg_body (copy_body_data * id,
2645 basic_block entry_block_map, basic_block exit_block_map,
2646 basic_block new_entry)
2647 {
2648 tree callee_fndecl = id->src_fn;
2649 /* Original cfun for the callee, doesn't change. */
2650 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2651 struct function *cfun_to_copy;
2652 basic_block bb;
2653 tree new_fndecl = NULL;
2654 bool need_debug_cleanup = false;
2655 int last;
2656 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2657 profile_count num = entry_block_map->count;
2658
2659 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2660
2661 /* Register specific tree functions. */
2662 gimple_register_cfg_hooks ();
2663
2664 /* If we are inlining just region of the function, make sure to connect
2665 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2666 part of loop, we must compute frequency and probability of
2667 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2668 probabilities of edges incoming from nonduplicated region. */
2669 if (new_entry)
2670 {
2671 edge e;
2672 edge_iterator ei;
2673 den = profile_count::zero ();
2674
2675 FOR_EACH_EDGE (e, ei, new_entry->preds)
2676 if (!e->src->aux)
2677 den += e->count ();
2678 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2679 }
2680
2681 profile_count::adjust_for_ipa_scaling (&num, &den);
2682
2683 /* Must have a CFG here at this point. */
2684 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2685 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2686
2687
2688 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2689 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2690 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2691 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2692
2693 /* Duplicate any exception-handling regions. */
2694 if (cfun->eh)
2695 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2696 remap_decl_1, id);
2697
2698 /* Use aux pointers to map the original blocks to copy. */
2699 FOR_EACH_BB_FN (bb, cfun_to_copy)
2700 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2701 {
2702 basic_block new_bb = copy_bb (id, bb, num, den);
2703 bb->aux = new_bb;
2704 new_bb->aux = bb;
2705 new_bb->loop_father = entry_block_map->loop_father;
2706 }
2707
2708 last = last_basic_block_for_fn (cfun);
2709
2710 /* Now that we've duplicated the blocks, duplicate their edges. */
2711 basic_block abnormal_goto_dest = NULL;
2712 if (id->call_stmt
2713 && stmt_can_make_abnormal_goto (id->call_stmt))
2714 {
2715 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2716
2717 bb = gimple_bb (id->call_stmt);
2718 gsi_next (&gsi);
2719 if (gsi_end_p (gsi))
2720 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2721 }
2722 FOR_ALL_BB_FN (bb, cfun_to_copy)
2723 if (!id->blocks_to_copy
2724 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2725 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2726 abnormal_goto_dest, id);
2727
2728 if (new_entry)
2729 {
2730 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2731 EDGE_FALLTHRU);
2732 e->probability = profile_probability::always ();
2733 }
2734
2735 /* Duplicate the loop tree, if available and wanted. */
2736 if (loops_for_fn (src_cfun) != NULL
2737 && current_loops != NULL)
2738 {
2739 copy_loops (id, entry_block_map->loop_father,
2740 get_loop (src_cfun, 0));
2741 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2742 loops_state_set (LOOPS_NEED_FIXUP);
2743 }
2744
2745 /* If the loop tree in the source function needed fixup, mark the
2746 destination loop tree for fixup, too. */
2747 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2748 loops_state_set (LOOPS_NEED_FIXUP);
2749
2750 if (gimple_in_ssa_p (cfun))
2751 FOR_ALL_BB_FN (bb, cfun_to_copy)
2752 if (!id->blocks_to_copy
2753 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2754 copy_phis_for_bb (bb, id);
2755
2756 FOR_ALL_BB_FN (bb, cfun_to_copy)
2757 if (bb->aux)
2758 {
2759 if (need_debug_cleanup
2760 && bb->index != ENTRY_BLOCK
2761 && bb->index != EXIT_BLOCK)
2762 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2763 /* Update call edge destinations. This can not be done before loop
2764 info is updated, because we may split basic blocks. */
2765 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2766 && bb->index != ENTRY_BLOCK
2767 && bb->index != EXIT_BLOCK)
2768 redirect_all_calls (id, (basic_block)bb->aux);
2769 ((basic_block)bb->aux)->aux = NULL;
2770 bb->aux = NULL;
2771 }
2772
2773 /* Zero out AUX fields of newly created block during EH edge
2774 insertion. */
2775 for (; last < last_basic_block_for_fn (cfun); last++)
2776 {
2777 if (need_debug_cleanup)
2778 maybe_move_debug_stmts_to_successors (id,
2779 BASIC_BLOCK_FOR_FN (cfun, last));
2780 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2781 /* Update call edge destinations. This can not be done before loop
2782 info is updated, because we may split basic blocks. */
2783 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2784 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2785 }
2786 entry_block_map->aux = NULL;
2787 exit_block_map->aux = NULL;
2788
2789 if (id->eh_map)
2790 {
2791 delete id->eh_map;
2792 id->eh_map = NULL;
2793 }
2794 if (id->dependence_map)
2795 {
2796 delete id->dependence_map;
2797 id->dependence_map = NULL;
2798 }
2799
2800 return new_fndecl;
2801 }
2802
2803 /* Copy the debug STMT using ID. We deal with these statements in a
2804 special way: if any variable in their VALUE expression wasn't
2805 remapped yet, we won't remap it, because that would get decl uids
2806 out of sync, causing codegen differences between -g and -g0. If
2807 this arises, we drop the VALUE expression altogether. */
2808
2809 static void
2810 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2811 {
2812 tree t, *n;
2813 struct walk_stmt_info wi;
2814
2815 if (gimple_block (stmt))
2816 {
2817 n = id->decl_map->get (gimple_block (stmt));
2818 gimple_set_block (stmt, n ? *n : id->block);
2819 }
2820
2821 if (gimple_debug_nonbind_marker_p (stmt))
2822 return;
2823
2824 /* Remap all the operands in COPY. */
2825 memset (&wi, 0, sizeof (wi));
2826 wi.info = id;
2827
2828 processing_debug_stmt = 1;
2829
2830 if (gimple_debug_source_bind_p (stmt))
2831 t = gimple_debug_source_bind_get_var (stmt);
2832 else if (gimple_debug_bind_p (stmt))
2833 t = gimple_debug_bind_get_var (stmt);
2834 else
2835 gcc_unreachable ();
2836
2837 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2838 && (n = id->debug_map->get (t)))
2839 {
2840 gcc_assert (VAR_P (*n));
2841 t = *n;
2842 }
2843 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2844 /* T is a non-localized variable. */;
2845 else
2846 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2847
2848 if (gimple_debug_bind_p (stmt))
2849 {
2850 gimple_debug_bind_set_var (stmt, t);
2851
2852 if (gimple_debug_bind_has_value_p (stmt))
2853 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2854 remap_gimple_op_r, &wi, NULL);
2855
2856 /* Punt if any decl couldn't be remapped. */
2857 if (processing_debug_stmt < 0)
2858 gimple_debug_bind_reset_value (stmt);
2859 }
2860 else if (gimple_debug_source_bind_p (stmt))
2861 {
2862 gimple_debug_source_bind_set_var (stmt, t);
2863 /* When inlining and source bind refers to one of the optimized
2864 away parameters, change the source bind into normal debug bind
2865 referring to the corresponding DEBUG_EXPR_DECL that should have
2866 been bound before the call stmt. */
2867 t = gimple_debug_source_bind_get_value (stmt);
2868 if (t != NULL_TREE
2869 && TREE_CODE (t) == PARM_DECL
2870 && id->call_stmt)
2871 {
2872 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2873 unsigned int i;
2874 if (debug_args != NULL)
2875 {
2876 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2877 if ((**debug_args)[i] == DECL_ORIGIN (t)
2878 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2879 {
2880 t = (**debug_args)[i + 1];
2881 stmt->subcode = GIMPLE_DEBUG_BIND;
2882 gimple_debug_bind_set_value (stmt, t);
2883 break;
2884 }
2885 }
2886 }
2887 if (gimple_debug_source_bind_p (stmt))
2888 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2889 remap_gimple_op_r, &wi, NULL);
2890 }
2891
2892 processing_debug_stmt = 0;
2893
2894 update_stmt (stmt);
2895 }
2896
2897 /* Process deferred debug stmts. In order to give values better odds
2898 of being successfully remapped, we delay the processing of debug
2899 stmts until all other stmts that might require remapping are
2900 processed. */
2901
2902 static void
2903 copy_debug_stmts (copy_body_data *id)
2904 {
2905 size_t i;
2906 gdebug *stmt;
2907
2908 if (!id->debug_stmts.exists ())
2909 return;
2910
2911 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2912 copy_debug_stmt (stmt, id);
2913
2914 id->debug_stmts.release ();
2915 }
2916
2917 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2918 another function. */
2919
2920 static tree
2921 copy_tree_body (copy_body_data *id)
2922 {
2923 tree fndecl = id->src_fn;
2924 tree body = DECL_SAVED_TREE (fndecl);
2925
2926 walk_tree (&body, copy_tree_body_r, id, NULL);
2927
2928 return body;
2929 }
2930
2931 /* Make a copy of the body of FN so that it can be inserted inline in
2932 another function. */
2933
2934 static tree
2935 copy_body (copy_body_data *id,
2936 basic_block entry_block_map, basic_block exit_block_map,
2937 basic_block new_entry)
2938 {
2939 tree fndecl = id->src_fn;
2940 tree body;
2941
2942 /* If this body has a CFG, walk CFG and copy. */
2943 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2944 body = copy_cfg_body (id, entry_block_map, exit_block_map,
2945 new_entry);
2946 copy_debug_stmts (id);
2947
2948 return body;
2949 }
2950
2951 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2952 defined in function FN, or of a data member thereof. */
2953
2954 static bool
2955 self_inlining_addr_expr (tree value, tree fn)
2956 {
2957 tree var;
2958
2959 if (TREE_CODE (value) != ADDR_EXPR)
2960 return false;
2961
2962 var = get_base_address (TREE_OPERAND (value, 0));
2963
2964 return var && auto_var_in_fn_p (var, fn);
2965 }
2966
2967 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2968 lexical block and line number information from base_stmt, if given,
2969 or from the last stmt of the block otherwise. */
2970
2971 static gimple *
2972 insert_init_debug_bind (copy_body_data *id,
2973 basic_block bb, tree var, tree value,
2974 gimple *base_stmt)
2975 {
2976 gimple *note;
2977 gimple_stmt_iterator gsi;
2978 tree tracked_var;
2979
2980 if (!gimple_in_ssa_p (id->src_cfun))
2981 return NULL;
2982
2983 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
2984 return NULL;
2985
2986 tracked_var = target_for_debug_bind (var);
2987 if (!tracked_var)
2988 return NULL;
2989
2990 if (bb)
2991 {
2992 gsi = gsi_last_bb (bb);
2993 if (!base_stmt && !gsi_end_p (gsi))
2994 base_stmt = gsi_stmt (gsi);
2995 }
2996
2997 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
2998
2999 if (bb)
3000 {
3001 if (!gsi_end_p (gsi))
3002 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3003 else
3004 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3005 }
3006
3007 return note;
3008 }
3009
3010 static void
3011 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3012 {
3013 /* If VAR represents a zero-sized variable, it's possible that the
3014 assignment statement may result in no gimple statements. */
3015 if (init_stmt)
3016 {
3017 gimple_stmt_iterator si = gsi_last_bb (bb);
3018
3019 /* We can end up with init statements that store to a non-register
3020 from a rhs with a conversion. Handle that here by forcing the
3021 rhs into a temporary. gimple_regimplify_operands is not
3022 prepared to do this for us. */
3023 if (!is_gimple_debug (init_stmt)
3024 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3025 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3026 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3027 {
3028 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3029 gimple_expr_type (init_stmt),
3030 gimple_assign_rhs1 (init_stmt));
3031 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3032 GSI_NEW_STMT);
3033 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3034 gimple_assign_set_rhs1 (init_stmt, rhs);
3035 }
3036 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3037 gimple_regimplify_operands (init_stmt, &si);
3038
3039 if (!is_gimple_debug (init_stmt))
3040 {
3041 tree def = gimple_assign_lhs (init_stmt);
3042 insert_init_debug_bind (id, bb, def, def, init_stmt);
3043 }
3044 }
3045 }
3046
3047 /* Initialize parameter P with VALUE. If needed, produce init statement
3048 at the end of BB. When BB is NULL, we return init statement to be
3049 output later. */
3050 static gimple *
3051 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3052 basic_block bb, tree *vars)
3053 {
3054 gimple *init_stmt = NULL;
3055 tree var;
3056 tree rhs = value;
3057 tree def = (gimple_in_ssa_p (cfun)
3058 ? ssa_default_def (id->src_cfun, p) : NULL);
3059
3060 if (value
3061 && value != error_mark_node
3062 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3063 {
3064 /* If we can match up types by promotion/demotion do so. */
3065 if (fold_convertible_p (TREE_TYPE (p), value))
3066 rhs = fold_convert (TREE_TYPE (p), value);
3067 else
3068 {
3069 /* ??? For valid programs we should not end up here.
3070 Still if we end up with truly mismatched types here, fall back
3071 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3072 GIMPLE to the following passes. */
3073 if (!is_gimple_reg_type (TREE_TYPE (value))
3074 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3075 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3076 else
3077 rhs = build_zero_cst (TREE_TYPE (p));
3078 }
3079 }
3080
3081 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3082 here since the type of this decl must be visible to the calling
3083 function. */
3084 var = copy_decl_to_var (p, id);
3085
3086 /* Declare this new variable. */
3087 DECL_CHAIN (var) = *vars;
3088 *vars = var;
3089
3090 /* Make gimplifier happy about this variable. */
3091 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3092
3093 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3094 we would not need to create a new variable here at all, if it
3095 weren't for debug info. Still, we can just use the argument
3096 value. */
3097 if (TREE_READONLY (p)
3098 && !TREE_ADDRESSABLE (p)
3099 && value && !TREE_SIDE_EFFECTS (value)
3100 && !def)
3101 {
3102 /* We may produce non-gimple trees by adding NOPs or introduce
3103 invalid sharing when operand is not really constant.
3104 It is not big deal to prohibit constant propagation here as
3105 we will constant propagate in DOM1 pass anyway. */
3106 if (is_gimple_min_invariant (value)
3107 && useless_type_conversion_p (TREE_TYPE (p),
3108 TREE_TYPE (value))
3109 /* We have to be very careful about ADDR_EXPR. Make sure
3110 the base variable isn't a local variable of the inlined
3111 function, e.g., when doing recursive inlining, direct or
3112 mutually-recursive or whatever, which is why we don't
3113 just test whether fn == current_function_decl. */
3114 && ! self_inlining_addr_expr (value, fn))
3115 {
3116 insert_decl_map (id, p, value);
3117 insert_debug_decl_map (id, p, var);
3118 return insert_init_debug_bind (id, bb, var, value, NULL);
3119 }
3120 }
3121
3122 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3123 that way, when the PARM_DECL is encountered, it will be
3124 automatically replaced by the VAR_DECL. */
3125 insert_decl_map (id, p, var);
3126
3127 /* Even if P was TREE_READONLY, the new VAR should not be.
3128 In the original code, we would have constructed a
3129 temporary, and then the function body would have never
3130 changed the value of P. However, now, we will be
3131 constructing VAR directly. The constructor body may
3132 change its value multiple times as it is being
3133 constructed. Therefore, it must not be TREE_READONLY;
3134 the back-end assumes that TREE_READONLY variable is
3135 assigned to only once. */
3136 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3137 TREE_READONLY (var) = 0;
3138
3139 /* If there is no setup required and we are in SSA, take the easy route
3140 replacing all SSA names representing the function parameter by the
3141 SSA name passed to function.
3142
3143 We need to construct map for the variable anyway as it might be used
3144 in different SSA names when parameter is set in function.
3145
3146 Do replacement at -O0 for const arguments replaced by constant.
3147 This is important for builtin_constant_p and other construct requiring
3148 constant argument to be visible in inlined function body. */
3149 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3150 && (optimize
3151 || (TREE_READONLY (p)
3152 && is_gimple_min_invariant (rhs)))
3153 && (TREE_CODE (rhs) == SSA_NAME
3154 || is_gimple_min_invariant (rhs))
3155 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3156 {
3157 insert_decl_map (id, def, rhs);
3158 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3159 }
3160
3161 /* If the value of argument is never used, don't care about initializing
3162 it. */
3163 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3164 {
3165 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3166 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3167 }
3168
3169 /* Initialize this VAR_DECL from the equivalent argument. Convert
3170 the argument to the proper type in case it was promoted. */
3171 if (value)
3172 {
3173 if (rhs == error_mark_node)
3174 {
3175 insert_decl_map (id, p, var);
3176 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3177 }
3178
3179 STRIP_USELESS_TYPE_CONVERSION (rhs);
3180
3181 /* If we are in SSA form properly remap the default definition
3182 or assign to a dummy SSA name if the parameter is unused and
3183 we are not optimizing. */
3184 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3185 {
3186 if (def)
3187 {
3188 def = remap_ssa_name (def, id);
3189 init_stmt = gimple_build_assign (def, rhs);
3190 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3191 set_ssa_default_def (cfun, var, NULL);
3192 }
3193 else if (!optimize)
3194 {
3195 def = make_ssa_name (var);
3196 init_stmt = gimple_build_assign (def, rhs);
3197 }
3198 }
3199 else
3200 init_stmt = gimple_build_assign (var, rhs);
3201
3202 if (bb && init_stmt)
3203 insert_init_stmt (id, bb, init_stmt);
3204 }
3205 return init_stmt;
3206 }
3207
3208 /* Generate code to initialize the parameters of the function at the
3209 top of the stack in ID from the GIMPLE_CALL STMT. */
3210
3211 static void
3212 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3213 tree fn, basic_block bb)
3214 {
3215 tree parms;
3216 size_t i;
3217 tree p;
3218 tree vars = NULL_TREE;
3219 tree static_chain = gimple_call_chain (stmt);
3220
3221 /* Figure out what the parameters are. */
3222 parms = DECL_ARGUMENTS (fn);
3223
3224 /* Loop through the parameter declarations, replacing each with an
3225 equivalent VAR_DECL, appropriately initialized. */
3226 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3227 {
3228 tree val;
3229 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3230 setup_one_parameter (id, p, val, fn, bb, &vars);
3231 }
3232 /* After remapping parameters remap their types. This has to be done
3233 in a second loop over all parameters to appropriately remap
3234 variable sized arrays when the size is specified in a
3235 parameter following the array. */
3236 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3237 {
3238 tree *varp = id->decl_map->get (p);
3239 if (varp && VAR_P (*varp))
3240 {
3241 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3242 ? ssa_default_def (id->src_cfun, p) : NULL);
3243 tree var = *varp;
3244 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3245 /* Also remap the default definition if it was remapped
3246 to the default definition of the parameter replacement
3247 by the parameter setup. */
3248 if (def)
3249 {
3250 tree *defp = id->decl_map->get (def);
3251 if (defp
3252 && TREE_CODE (*defp) == SSA_NAME
3253 && SSA_NAME_VAR (*defp) == var)
3254 TREE_TYPE (*defp) = TREE_TYPE (var);
3255 }
3256 }
3257 }
3258
3259 /* Initialize the static chain. */
3260 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3261 gcc_assert (fn != current_function_decl);
3262 if (p)
3263 {
3264 /* No static chain? Seems like a bug in tree-nested.c. */
3265 gcc_assert (static_chain);
3266
3267 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3268 }
3269
3270 declare_inline_vars (id->block, vars);
3271 }
3272
3273
3274 /* Declare a return variable to replace the RESULT_DECL for the
3275 function we are calling. An appropriate DECL_STMT is returned.
3276 The USE_STMT is filled to contain a use of the declaration to
3277 indicate the return value of the function.
3278
3279 RETURN_SLOT, if non-null is place where to store the result. It
3280 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3281 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3282
3283 The return value is a (possibly null) value that holds the result
3284 as seen by the caller. */
3285
3286 static tree
3287 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3288 basic_block entry_bb)
3289 {
3290 tree callee = id->src_fn;
3291 tree result = DECL_RESULT (callee);
3292 tree callee_type = TREE_TYPE (result);
3293 tree caller_type;
3294 tree var, use;
3295
3296 /* Handle type-mismatches in the function declaration return type
3297 vs. the call expression. */
3298 if (modify_dest)
3299 caller_type = TREE_TYPE (modify_dest);
3300 else
3301 caller_type = TREE_TYPE (TREE_TYPE (callee));
3302
3303 /* We don't need to do anything for functions that don't return anything. */
3304 if (VOID_TYPE_P (callee_type))
3305 return NULL_TREE;
3306
3307 /* If there was a return slot, then the return value is the
3308 dereferenced address of that object. */
3309 if (return_slot)
3310 {
3311 /* The front end shouldn't have used both return_slot and
3312 a modify expression. */
3313 gcc_assert (!modify_dest);
3314 if (DECL_BY_REFERENCE (result))
3315 {
3316 tree return_slot_addr = build_fold_addr_expr (return_slot);
3317 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3318
3319 /* We are going to construct *&return_slot and we can't do that
3320 for variables believed to be not addressable.
3321
3322 FIXME: This check possibly can match, because values returned
3323 via return slot optimization are not believed to have address
3324 taken by alias analysis. */
3325 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3326 var = return_slot_addr;
3327 }
3328 else
3329 {
3330 var = return_slot;
3331 gcc_assert (TREE_CODE (var) != SSA_NAME);
3332 if (TREE_ADDRESSABLE (result))
3333 mark_addressable (var);
3334 }
3335 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3336 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3337 && !DECL_GIMPLE_REG_P (result)
3338 && DECL_P (var))
3339 DECL_GIMPLE_REG_P (var) = 0;
3340 use = NULL;
3341 goto done;
3342 }
3343
3344 /* All types requiring non-trivial constructors should have been handled. */
3345 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3346
3347 /* Attempt to avoid creating a new temporary variable. */
3348 if (modify_dest
3349 && TREE_CODE (modify_dest) != SSA_NAME)
3350 {
3351 bool use_it = false;
3352
3353 /* We can't use MODIFY_DEST if there's type promotion involved. */
3354 if (!useless_type_conversion_p (callee_type, caller_type))
3355 use_it = false;
3356
3357 /* ??? If we're assigning to a variable sized type, then we must
3358 reuse the destination variable, because we've no good way to
3359 create variable sized temporaries at this point. */
3360 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3361 use_it = true;
3362
3363 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3364 reuse it as the result of the call directly. Don't do this if
3365 it would promote MODIFY_DEST to addressable. */
3366 else if (TREE_ADDRESSABLE (result))
3367 use_it = false;
3368 else
3369 {
3370 tree base_m = get_base_address (modify_dest);
3371
3372 /* If the base isn't a decl, then it's a pointer, and we don't
3373 know where that's going to go. */
3374 if (!DECL_P (base_m))
3375 use_it = false;
3376 else if (is_global_var (base_m))
3377 use_it = false;
3378 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3379 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3380 && !DECL_GIMPLE_REG_P (result)
3381 && DECL_GIMPLE_REG_P (base_m))
3382 use_it = false;
3383 else if (!TREE_ADDRESSABLE (base_m))
3384 use_it = true;
3385 }
3386
3387 if (use_it)
3388 {
3389 var = modify_dest;
3390 use = NULL;
3391 goto done;
3392 }
3393 }
3394
3395 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3396
3397 var = copy_result_decl_to_var (result, id);
3398 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3399
3400 /* Do not have the rest of GCC warn about this variable as it should
3401 not be visible to the user. */
3402 TREE_NO_WARNING (var) = 1;
3403
3404 declare_inline_vars (id->block, var);
3405
3406 /* Build the use expr. If the return type of the function was
3407 promoted, convert it back to the expected type. */
3408 use = var;
3409 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3410 {
3411 /* If we can match up types by promotion/demotion do so. */
3412 if (fold_convertible_p (caller_type, var))
3413 use = fold_convert (caller_type, var);
3414 else
3415 {
3416 /* ??? For valid programs we should not end up here.
3417 Still if we end up with truly mismatched types here, fall back
3418 to using a MEM_REF to not leak invalid GIMPLE to the following
3419 passes. */
3420 /* Prevent var from being written into SSA form. */
3421 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3422 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3423 DECL_GIMPLE_REG_P (var) = false;
3424 else if (is_gimple_reg_type (TREE_TYPE (var)))
3425 TREE_ADDRESSABLE (var) = true;
3426 use = fold_build2 (MEM_REF, caller_type,
3427 build_fold_addr_expr (var),
3428 build_int_cst (ptr_type_node, 0));
3429 }
3430 }
3431
3432 STRIP_USELESS_TYPE_CONVERSION (use);
3433
3434 if (DECL_BY_REFERENCE (result))
3435 {
3436 TREE_ADDRESSABLE (var) = 1;
3437 var = build_fold_addr_expr (var);
3438 }
3439
3440 done:
3441 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3442 way, when the RESULT_DECL is encountered, it will be
3443 automatically replaced by the VAR_DECL.
3444
3445 When returning by reference, ensure that RESULT_DECL remaps to
3446 gimple_val. */
3447 if (DECL_BY_REFERENCE (result)
3448 && !is_gimple_val (var))
3449 {
3450 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3451 insert_decl_map (id, result, temp);
3452 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3453 it's default_def SSA_NAME. */
3454 if (gimple_in_ssa_p (id->src_cfun)
3455 && is_gimple_reg (result))
3456 {
3457 temp = make_ssa_name (temp);
3458 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3459 }
3460 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3461 }
3462 else
3463 insert_decl_map (id, result, var);
3464
3465 /* Remember this so we can ignore it in remap_decls. */
3466 id->retvar = var;
3467 return use;
3468 }
3469
3470 /* Determine if the function can be copied. If so return NULL. If
3471 not return a string describng the reason for failure. */
3472
3473 const char *
3474 copy_forbidden (struct function *fun)
3475 {
3476 const char *reason = fun->cannot_be_copied_reason;
3477
3478 /* Only examine the function once. */
3479 if (fun->cannot_be_copied_set)
3480 return reason;
3481
3482 /* We cannot copy a function that receives a non-local goto
3483 because we cannot remap the destination label used in the
3484 function that is performing the non-local goto. */
3485 /* ??? Actually, this should be possible, if we work at it.
3486 No doubt there's just a handful of places that simply
3487 assume it doesn't happen and don't substitute properly. */
3488 if (fun->has_nonlocal_label)
3489 {
3490 reason = G_("function %q+F can never be copied "
3491 "because it receives a non-local goto");
3492 goto fail;
3493 }
3494
3495 if (fun->has_forced_label_in_static)
3496 {
3497 reason = G_("function %q+F can never be copied because it saves "
3498 "address of local label in a static variable");
3499 goto fail;
3500 }
3501
3502 fail:
3503 fun->cannot_be_copied_reason = reason;
3504 fun->cannot_be_copied_set = true;
3505 return reason;
3506 }
3507
3508
3509 static const char *inline_forbidden_reason;
3510
3511 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3512 iff a function can not be inlined. Also sets the reason why. */
3513
3514 static tree
3515 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3516 struct walk_stmt_info *wip)
3517 {
3518 tree fn = (tree) wip->info;
3519 tree t;
3520 gimple *stmt = gsi_stmt (*gsi);
3521
3522 switch (gimple_code (stmt))
3523 {
3524 case GIMPLE_CALL:
3525 /* Refuse to inline alloca call unless user explicitly forced so as
3526 this may change program's memory overhead drastically when the
3527 function using alloca is called in loop. In GCC present in
3528 SPEC2000 inlining into schedule_block cause it to require 2GB of
3529 RAM instead of 256MB. Don't do so for alloca calls emitted for
3530 VLA objects as those can't cause unbounded growth (they're always
3531 wrapped inside stack_save/stack_restore regions. */
3532 if (gimple_maybe_alloca_call_p (stmt)
3533 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3534 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3535 {
3536 inline_forbidden_reason
3537 = G_("function %q+F can never be inlined because it uses "
3538 "alloca (override using the always_inline attribute)");
3539 *handled_ops_p = true;
3540 return fn;
3541 }
3542
3543 t = gimple_call_fndecl (stmt);
3544 if (t == NULL_TREE)
3545 break;
3546
3547 /* We cannot inline functions that call setjmp. */
3548 if (setjmp_call_p (t))
3549 {
3550 inline_forbidden_reason
3551 = G_("function %q+F can never be inlined because it uses setjmp");
3552 *handled_ops_p = true;
3553 return t;
3554 }
3555
3556 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3557 switch (DECL_FUNCTION_CODE (t))
3558 {
3559 /* We cannot inline functions that take a variable number of
3560 arguments. */
3561 case BUILT_IN_VA_START:
3562 case BUILT_IN_NEXT_ARG:
3563 case BUILT_IN_VA_END:
3564 inline_forbidden_reason
3565 = G_("function %q+F can never be inlined because it "
3566 "uses variable argument lists");
3567 *handled_ops_p = true;
3568 return t;
3569
3570 case BUILT_IN_LONGJMP:
3571 /* We can't inline functions that call __builtin_longjmp at
3572 all. The non-local goto machinery really requires the
3573 destination be in a different function. If we allow the
3574 function calling __builtin_longjmp to be inlined into the
3575 function calling __builtin_setjmp, Things will Go Awry. */
3576 inline_forbidden_reason
3577 = G_("function %q+F can never be inlined because "
3578 "it uses setjmp-longjmp exception handling");
3579 *handled_ops_p = true;
3580 return t;
3581
3582 case BUILT_IN_NONLOCAL_GOTO:
3583 /* Similarly. */
3584 inline_forbidden_reason
3585 = G_("function %q+F can never be inlined because "
3586 "it uses non-local goto");
3587 *handled_ops_p = true;
3588 return t;
3589
3590 case BUILT_IN_RETURN:
3591 case BUILT_IN_APPLY_ARGS:
3592 /* If a __builtin_apply_args caller would be inlined,
3593 it would be saving arguments of the function it has
3594 been inlined into. Similarly __builtin_return would
3595 return from the function the inline has been inlined into. */
3596 inline_forbidden_reason
3597 = G_("function %q+F can never be inlined because "
3598 "it uses __builtin_return or __builtin_apply_args");
3599 *handled_ops_p = true;
3600 return t;
3601
3602 default:
3603 break;
3604 }
3605 break;
3606
3607 case GIMPLE_GOTO:
3608 t = gimple_goto_dest (stmt);
3609
3610 /* We will not inline a function which uses computed goto. The
3611 addresses of its local labels, which may be tucked into
3612 global storage, are of course not constant across
3613 instantiations, which causes unexpected behavior. */
3614 if (TREE_CODE (t) != LABEL_DECL)
3615 {
3616 inline_forbidden_reason
3617 = G_("function %q+F can never be inlined "
3618 "because it contains a computed goto");
3619 *handled_ops_p = true;
3620 return t;
3621 }
3622 break;
3623
3624 default:
3625 break;
3626 }
3627
3628 *handled_ops_p = false;
3629 return NULL_TREE;
3630 }
3631
3632 /* Return true if FNDECL is a function that cannot be inlined into
3633 another one. */
3634
3635 static bool
3636 inline_forbidden_p (tree fndecl)
3637 {
3638 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3639 struct walk_stmt_info wi;
3640 basic_block bb;
3641 bool forbidden_p = false;
3642
3643 /* First check for shared reasons not to copy the code. */
3644 inline_forbidden_reason = copy_forbidden (fun);
3645 if (inline_forbidden_reason != NULL)
3646 return true;
3647
3648 /* Next, walk the statements of the function looking for
3649 constraucts we can't handle, or are non-optimal for inlining. */
3650 hash_set<tree> visited_nodes;
3651 memset (&wi, 0, sizeof (wi));
3652 wi.info = (void *) fndecl;
3653 wi.pset = &visited_nodes;
3654
3655 FOR_EACH_BB_FN (bb, fun)
3656 {
3657 gimple *ret;
3658 gimple_seq seq = bb_seq (bb);
3659 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3660 forbidden_p = (ret != NULL);
3661 if (forbidden_p)
3662 break;
3663 }
3664
3665 return forbidden_p;
3666 }
3667 \f
3668 /* Return false if the function FNDECL cannot be inlined on account of its
3669 attributes, true otherwise. */
3670 static bool
3671 function_attribute_inlinable_p (const_tree fndecl)
3672 {
3673 if (targetm.attribute_table)
3674 {
3675 const_tree a;
3676
3677 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3678 {
3679 const_tree name = TREE_PURPOSE (a);
3680 int i;
3681
3682 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3683 if (is_attribute_p (targetm.attribute_table[i].name, name))
3684 return targetm.function_attribute_inlinable_p (fndecl);
3685 }
3686 }
3687
3688 return true;
3689 }
3690
3691 /* Returns nonzero if FN is a function that does not have any
3692 fundamental inline blocking properties. */
3693
3694 bool
3695 tree_inlinable_function_p (tree fn)
3696 {
3697 bool inlinable = true;
3698 bool do_warning;
3699 tree always_inline;
3700
3701 /* If we've already decided this function shouldn't be inlined,
3702 there's no need to check again. */
3703 if (DECL_UNINLINABLE (fn))
3704 return false;
3705
3706 /* We only warn for functions declared `inline' by the user. */
3707 do_warning = (warn_inline
3708 && DECL_DECLARED_INLINE_P (fn)
3709 && !DECL_NO_INLINE_WARNING_P (fn)
3710 && !DECL_IN_SYSTEM_HEADER (fn));
3711
3712 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3713
3714 if (flag_no_inline
3715 && always_inline == NULL)
3716 {
3717 if (do_warning)
3718 warning (OPT_Winline, "function %q+F can never be inlined because it "
3719 "is suppressed using -fno-inline", fn);
3720 inlinable = false;
3721 }
3722
3723 else if (!function_attribute_inlinable_p (fn))
3724 {
3725 if (do_warning)
3726 warning (OPT_Winline, "function %q+F can never be inlined because it "
3727 "uses attributes conflicting with inlining", fn);
3728 inlinable = false;
3729 }
3730
3731 else if (inline_forbidden_p (fn))
3732 {
3733 /* See if we should warn about uninlinable functions. Previously,
3734 some of these warnings would be issued while trying to expand
3735 the function inline, but that would cause multiple warnings
3736 about functions that would for example call alloca. But since
3737 this a property of the function, just one warning is enough.
3738 As a bonus we can now give more details about the reason why a
3739 function is not inlinable. */
3740 if (always_inline)
3741 error (inline_forbidden_reason, fn);
3742 else if (do_warning)
3743 warning (OPT_Winline, inline_forbidden_reason, fn);
3744
3745 inlinable = false;
3746 }
3747
3748 /* Squirrel away the result so that we don't have to check again. */
3749 DECL_UNINLINABLE (fn) = !inlinable;
3750
3751 return inlinable;
3752 }
3753
3754 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3755 word size and take possible memcpy call into account and return
3756 cost based on whether optimizing for size or speed according to SPEED_P. */
3757
3758 int
3759 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3760 {
3761 HOST_WIDE_INT size;
3762
3763 gcc_assert (!VOID_TYPE_P (type));
3764
3765 if (TREE_CODE (type) == VECTOR_TYPE)
3766 {
3767 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3768 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3769 int orig_mode_size
3770 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3771 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3772 return ((orig_mode_size + simd_mode_size - 1)
3773 / simd_mode_size);
3774 }
3775
3776 size = int_size_in_bytes (type);
3777
3778 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3779 /* Cost of a memcpy call, 3 arguments and the call. */
3780 return 4;
3781 else
3782 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3783 }
3784
3785 /* Returns cost of operation CODE, according to WEIGHTS */
3786
3787 static int
3788 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3789 tree op1 ATTRIBUTE_UNUSED, tree op2)
3790 {
3791 switch (code)
3792 {
3793 /* These are "free" conversions, or their presumed cost
3794 is folded into other operations. */
3795 case RANGE_EXPR:
3796 CASE_CONVERT:
3797 case COMPLEX_EXPR:
3798 case PAREN_EXPR:
3799 case VIEW_CONVERT_EXPR:
3800 return 0;
3801
3802 /* Assign cost of 1 to usual operations.
3803 ??? We may consider mapping RTL costs to this. */
3804 case COND_EXPR:
3805 case VEC_COND_EXPR:
3806 case VEC_PERM_EXPR:
3807
3808 case PLUS_EXPR:
3809 case POINTER_PLUS_EXPR:
3810 case POINTER_DIFF_EXPR:
3811 case MINUS_EXPR:
3812 case MULT_EXPR:
3813 case MULT_HIGHPART_EXPR:
3814
3815 case ADDR_SPACE_CONVERT_EXPR:
3816 case FIXED_CONVERT_EXPR:
3817 case FIX_TRUNC_EXPR:
3818
3819 case NEGATE_EXPR:
3820 case FLOAT_EXPR:
3821 case MIN_EXPR:
3822 case MAX_EXPR:
3823 case ABS_EXPR:
3824 case ABSU_EXPR:
3825
3826 case LSHIFT_EXPR:
3827 case RSHIFT_EXPR:
3828 case LROTATE_EXPR:
3829 case RROTATE_EXPR:
3830
3831 case BIT_IOR_EXPR:
3832 case BIT_XOR_EXPR:
3833 case BIT_AND_EXPR:
3834 case BIT_NOT_EXPR:
3835
3836 case TRUTH_ANDIF_EXPR:
3837 case TRUTH_ORIF_EXPR:
3838 case TRUTH_AND_EXPR:
3839 case TRUTH_OR_EXPR:
3840 case TRUTH_XOR_EXPR:
3841 case TRUTH_NOT_EXPR:
3842
3843 case LT_EXPR:
3844 case LE_EXPR:
3845 case GT_EXPR:
3846 case GE_EXPR:
3847 case EQ_EXPR:
3848 case NE_EXPR:
3849 case ORDERED_EXPR:
3850 case UNORDERED_EXPR:
3851
3852 case UNLT_EXPR:
3853 case UNLE_EXPR:
3854 case UNGT_EXPR:
3855 case UNGE_EXPR:
3856 case UNEQ_EXPR:
3857 case LTGT_EXPR:
3858
3859 case CONJ_EXPR:
3860
3861 case PREDECREMENT_EXPR:
3862 case PREINCREMENT_EXPR:
3863 case POSTDECREMENT_EXPR:
3864 case POSTINCREMENT_EXPR:
3865
3866 case REALIGN_LOAD_EXPR:
3867
3868 case WIDEN_SUM_EXPR:
3869 case WIDEN_MULT_EXPR:
3870 case DOT_PROD_EXPR:
3871 case SAD_EXPR:
3872 case WIDEN_MULT_PLUS_EXPR:
3873 case WIDEN_MULT_MINUS_EXPR:
3874 case WIDEN_LSHIFT_EXPR:
3875
3876 case VEC_WIDEN_MULT_HI_EXPR:
3877 case VEC_WIDEN_MULT_LO_EXPR:
3878 case VEC_WIDEN_MULT_EVEN_EXPR:
3879 case VEC_WIDEN_MULT_ODD_EXPR:
3880 case VEC_UNPACK_HI_EXPR:
3881 case VEC_UNPACK_LO_EXPR:
3882 case VEC_UNPACK_FLOAT_HI_EXPR:
3883 case VEC_UNPACK_FLOAT_LO_EXPR:
3884 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3885 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3886 case VEC_PACK_TRUNC_EXPR:
3887 case VEC_PACK_SAT_EXPR:
3888 case VEC_PACK_FIX_TRUNC_EXPR:
3889 case VEC_PACK_FLOAT_EXPR:
3890 case VEC_WIDEN_LSHIFT_HI_EXPR:
3891 case VEC_WIDEN_LSHIFT_LO_EXPR:
3892 case VEC_DUPLICATE_EXPR:
3893 case VEC_SERIES_EXPR:
3894
3895 return 1;
3896
3897 /* Few special cases of expensive operations. This is useful
3898 to avoid inlining on functions having too many of these. */
3899 case TRUNC_DIV_EXPR:
3900 case CEIL_DIV_EXPR:
3901 case FLOOR_DIV_EXPR:
3902 case ROUND_DIV_EXPR:
3903 case EXACT_DIV_EXPR:
3904 case TRUNC_MOD_EXPR:
3905 case CEIL_MOD_EXPR:
3906 case FLOOR_MOD_EXPR:
3907 case ROUND_MOD_EXPR:
3908 case RDIV_EXPR:
3909 if (TREE_CODE (op2) != INTEGER_CST)
3910 return weights->div_mod_cost;
3911 return 1;
3912
3913 /* Bit-field insertion needs several shift and mask operations. */
3914 case BIT_INSERT_EXPR:
3915 return 3;
3916
3917 default:
3918 /* We expect a copy assignment with no operator. */
3919 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3920 return 0;
3921 }
3922 }
3923
3924
3925 /* Estimate number of instructions that will be created by expanding
3926 the statements in the statement sequence STMTS.
3927 WEIGHTS contains weights attributed to various constructs. */
3928
3929 int
3930 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3931 {
3932 int cost;
3933 gimple_stmt_iterator gsi;
3934
3935 cost = 0;
3936 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3937 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3938
3939 return cost;
3940 }
3941
3942
3943 /* Estimate number of instructions that will be created by expanding STMT.
3944 WEIGHTS contains weights attributed to various constructs. */
3945
3946 int
3947 estimate_num_insns (gimple *stmt, eni_weights *weights)
3948 {
3949 unsigned cost, i;
3950 enum gimple_code code = gimple_code (stmt);
3951 tree lhs;
3952 tree rhs;
3953
3954 switch (code)
3955 {
3956 case GIMPLE_ASSIGN:
3957 /* Try to estimate the cost of assignments. We have three cases to
3958 deal with:
3959 1) Simple assignments to registers;
3960 2) Stores to things that must live in memory. This includes
3961 "normal" stores to scalars, but also assignments of large
3962 structures, or constructors of big arrays;
3963
3964 Let us look at the first two cases, assuming we have "a = b + C":
3965 <GIMPLE_ASSIGN <var_decl "a">
3966 <plus_expr <var_decl "b"> <constant C>>
3967 If "a" is a GIMPLE register, the assignment to it is free on almost
3968 any target, because "a" usually ends up in a real register. Hence
3969 the only cost of this expression comes from the PLUS_EXPR, and we
3970 can ignore the GIMPLE_ASSIGN.
3971 If "a" is not a GIMPLE register, the assignment to "a" will most
3972 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3973 of moving something into "a", which we compute using the function
3974 estimate_move_cost. */
3975 if (gimple_clobber_p (stmt))
3976 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3977
3978 lhs = gimple_assign_lhs (stmt);
3979 rhs = gimple_assign_rhs1 (stmt);
3980
3981 cost = 0;
3982
3983 /* Account for the cost of moving to / from memory. */
3984 if (gimple_store_p (stmt))
3985 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
3986 if (gimple_assign_load_p (stmt))
3987 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
3988
3989 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3990 gimple_assign_rhs1 (stmt),
3991 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3992 == GIMPLE_BINARY_RHS
3993 ? gimple_assign_rhs2 (stmt) : NULL);
3994 break;
3995
3996 case GIMPLE_COND:
3997 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3998 gimple_op (stmt, 0),
3999 gimple_op (stmt, 1));
4000 break;
4001
4002 case GIMPLE_SWITCH:
4003 {
4004 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4005 /* Take into account cost of the switch + guess 2 conditional jumps for
4006 each case label.
4007
4008 TODO: once the switch expansion logic is sufficiently separated, we can
4009 do better job on estimating cost of the switch. */
4010 if (weights->time_based)
4011 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4012 else
4013 cost = gimple_switch_num_labels (switch_stmt) * 2;
4014 }
4015 break;
4016
4017 case GIMPLE_CALL:
4018 {
4019 tree decl;
4020
4021 if (gimple_call_internal_p (stmt))
4022 return 0;
4023 else if ((decl = gimple_call_fndecl (stmt))
4024 && DECL_BUILT_IN (decl))
4025 {
4026 /* Do not special case builtins where we see the body.
4027 This just confuse inliner. */
4028 struct cgraph_node *node;
4029 if (!(node = cgraph_node::get (decl))
4030 || node->definition)
4031 ;
4032 /* For buitins that are likely expanded to nothing or
4033 inlined do not account operand costs. */
4034 else if (is_simple_builtin (decl))
4035 return 0;
4036 else if (is_inexpensive_builtin (decl))
4037 return weights->target_builtin_call_cost;
4038 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4039 {
4040 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4041 specialize the cheap expansion we do here.
4042 ??? This asks for a more general solution. */
4043 switch (DECL_FUNCTION_CODE (decl))
4044 {
4045 case BUILT_IN_POW:
4046 case BUILT_IN_POWF:
4047 case BUILT_IN_POWL:
4048 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4049 && (real_equal
4050 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4051 &dconst2)))
4052 return estimate_operator_cost
4053 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4054 gimple_call_arg (stmt, 0));
4055 break;
4056
4057 default:
4058 break;
4059 }
4060 }
4061 }
4062
4063 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4064 if (gimple_call_lhs (stmt))
4065 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4066 weights->time_based);
4067 for (i = 0; i < gimple_call_num_args (stmt); i++)
4068 {
4069 tree arg = gimple_call_arg (stmt, i);
4070 cost += estimate_move_cost (TREE_TYPE (arg),
4071 weights->time_based);
4072 }
4073 break;
4074 }
4075
4076 case GIMPLE_RETURN:
4077 return weights->return_cost;
4078
4079 case GIMPLE_GOTO:
4080 case GIMPLE_LABEL:
4081 case GIMPLE_NOP:
4082 case GIMPLE_PHI:
4083 case GIMPLE_PREDICT:
4084 case GIMPLE_DEBUG:
4085 return 0;
4086
4087 case GIMPLE_ASM:
4088 {
4089 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4090 /* 1000 means infinity. This avoids overflows later
4091 with very long asm statements. */
4092 if (count > 1000)
4093 count = 1000;
4094 return MAX (1, count);
4095 }
4096
4097 case GIMPLE_RESX:
4098 /* This is either going to be an external function call with one
4099 argument, or two register copy statements plus a goto. */
4100 return 2;
4101
4102 case GIMPLE_EH_DISPATCH:
4103 /* ??? This is going to turn into a switch statement. Ideally
4104 we'd have a look at the eh region and estimate the number of
4105 edges involved. */
4106 return 10;
4107
4108 case GIMPLE_BIND:
4109 return estimate_num_insns_seq (
4110 gimple_bind_body (as_a <gbind *> (stmt)),
4111 weights);
4112
4113 case GIMPLE_EH_FILTER:
4114 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4115
4116 case GIMPLE_CATCH:
4117 return estimate_num_insns_seq (gimple_catch_handler (
4118 as_a <gcatch *> (stmt)),
4119 weights);
4120
4121 case GIMPLE_TRY:
4122 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4123 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4124
4125 /* OMP directives are generally very expensive. */
4126
4127 case GIMPLE_OMP_RETURN:
4128 case GIMPLE_OMP_SECTIONS_SWITCH:
4129 case GIMPLE_OMP_ATOMIC_STORE:
4130 case GIMPLE_OMP_CONTINUE:
4131 /* ...except these, which are cheap. */
4132 return 0;
4133
4134 case GIMPLE_OMP_ATOMIC_LOAD:
4135 return weights->omp_cost;
4136
4137 case GIMPLE_OMP_FOR:
4138 return (weights->omp_cost
4139 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4140 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4141
4142 case GIMPLE_OMP_PARALLEL:
4143 case GIMPLE_OMP_TASK:
4144 case GIMPLE_OMP_CRITICAL:
4145 case GIMPLE_OMP_MASTER:
4146 case GIMPLE_OMP_TASKGROUP:
4147 case GIMPLE_OMP_ORDERED:
4148 case GIMPLE_OMP_SECTION:
4149 case GIMPLE_OMP_SECTIONS:
4150 case GIMPLE_OMP_SINGLE:
4151 case GIMPLE_OMP_TARGET:
4152 case GIMPLE_OMP_TEAMS:
4153 return (weights->omp_cost
4154 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4155
4156 case GIMPLE_TRANSACTION:
4157 return (weights->tm_cost
4158 + estimate_num_insns_seq (gimple_transaction_body (
4159 as_a <gtransaction *> (stmt)),
4160 weights));
4161
4162 default:
4163 gcc_unreachable ();
4164 }
4165
4166 return cost;
4167 }
4168
4169 /* Estimate number of instructions that will be created by expanding
4170 function FNDECL. WEIGHTS contains weights attributed to various
4171 constructs. */
4172
4173 int
4174 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4175 {
4176 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4177 gimple_stmt_iterator bsi;
4178 basic_block bb;
4179 int n = 0;
4180
4181 gcc_assert (my_function && my_function->cfg);
4182 FOR_EACH_BB_FN (bb, my_function)
4183 {
4184 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4185 n += estimate_num_insns (gsi_stmt (bsi), weights);
4186 }
4187
4188 return n;
4189 }
4190
4191
4192 /* Initializes weights used by estimate_num_insns. */
4193
4194 void
4195 init_inline_once (void)
4196 {
4197 eni_size_weights.call_cost = 1;
4198 eni_size_weights.indirect_call_cost = 3;
4199 eni_size_weights.target_builtin_call_cost = 1;
4200 eni_size_weights.div_mod_cost = 1;
4201 eni_size_weights.omp_cost = 40;
4202 eni_size_weights.tm_cost = 10;
4203 eni_size_weights.time_based = false;
4204 eni_size_weights.return_cost = 1;
4205
4206 /* Estimating time for call is difficult, since we have no idea what the
4207 called function does. In the current uses of eni_time_weights,
4208 underestimating the cost does less harm than overestimating it, so
4209 we choose a rather small value here. */
4210 eni_time_weights.call_cost = 10;
4211 eni_time_weights.indirect_call_cost = 15;
4212 eni_time_weights.target_builtin_call_cost = 1;
4213 eni_time_weights.div_mod_cost = 10;
4214 eni_time_weights.omp_cost = 40;
4215 eni_time_weights.tm_cost = 40;
4216 eni_time_weights.time_based = true;
4217 eni_time_weights.return_cost = 2;
4218 }
4219
4220
4221 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4222
4223 static void
4224 prepend_lexical_block (tree current_block, tree new_block)
4225 {
4226 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4227 BLOCK_SUBBLOCKS (current_block) = new_block;
4228 BLOCK_SUPERCONTEXT (new_block) = current_block;
4229 }
4230
4231 /* Add local variables from CALLEE to CALLER. */
4232
4233 static inline void
4234 add_local_variables (struct function *callee, struct function *caller,
4235 copy_body_data *id)
4236 {
4237 tree var;
4238 unsigned ix;
4239
4240 FOR_EACH_LOCAL_DECL (callee, ix, var)
4241 if (!can_be_nonlocal (var, id))
4242 {
4243 tree new_var = remap_decl (var, id);
4244
4245 /* Remap debug-expressions. */
4246 if (VAR_P (new_var)
4247 && DECL_HAS_DEBUG_EXPR_P (var)
4248 && new_var != var)
4249 {
4250 tree tem = DECL_DEBUG_EXPR (var);
4251 bool old_regimplify = id->regimplify;
4252 id->remapping_type_depth++;
4253 walk_tree (&tem, copy_tree_body_r, id, NULL);
4254 id->remapping_type_depth--;
4255 id->regimplify = old_regimplify;
4256 SET_DECL_DEBUG_EXPR (new_var, tem);
4257 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4258 }
4259 add_local_decl (caller, new_var);
4260 }
4261 }
4262
4263 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4264 have brought in or introduced any debug stmts for SRCVAR. */
4265
4266 static inline void
4267 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4268 {
4269 tree *remappedvarp = id->decl_map->get (srcvar);
4270
4271 if (!remappedvarp)
4272 return;
4273
4274 if (!VAR_P (*remappedvarp))
4275 return;
4276
4277 if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4278 return;
4279
4280 tree tvar = target_for_debug_bind (*remappedvarp);
4281 if (!tvar)
4282 return;
4283
4284 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4285 id->call_stmt);
4286 gimple_seq_add_stmt (bindings, stmt);
4287 }
4288
4289 /* For each inlined variable for which we may have debug bind stmts,
4290 add before GSI a final debug stmt resetting it, marking the end of
4291 its life, so that var-tracking knows it doesn't have to compute
4292 further locations for it. */
4293
4294 static inline void
4295 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4296 {
4297 tree var;
4298 unsigned ix;
4299 gimple_seq bindings = NULL;
4300
4301 if (!gimple_in_ssa_p (id->src_cfun))
4302 return;
4303
4304 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4305 return;
4306
4307 for (var = DECL_ARGUMENTS (id->src_fn);
4308 var; var = DECL_CHAIN (var))
4309 reset_debug_binding (id, var, &bindings);
4310
4311 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4312 reset_debug_binding (id, var, &bindings);
4313
4314 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4315 }
4316
4317 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4318
4319 static bool
4320 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4321 {
4322 tree use_retvar;
4323 tree fn;
4324 hash_map<tree, tree> *dst;
4325 hash_map<tree, tree> *st = NULL;
4326 tree return_slot;
4327 tree modify_dest;
4328 struct cgraph_edge *cg_edge;
4329 cgraph_inline_failed_t reason;
4330 basic_block return_block;
4331 edge e;
4332 gimple_stmt_iterator gsi, stmt_gsi;
4333 bool successfully_inlined = false;
4334 bool purge_dead_abnormal_edges;
4335 gcall *call_stmt;
4336 unsigned int prop_mask, src_properties;
4337 struct function *dst_cfun;
4338 tree simduid;
4339 use_operand_p use;
4340 gimple *simtenter_stmt = NULL;
4341 vec<tree> *simtvars_save;
4342
4343 /* The gimplifier uses input_location in too many places, such as
4344 internal_get_tmp_var (). */
4345 location_t saved_location = input_location;
4346 input_location = gimple_location (stmt);
4347
4348 /* From here on, we're only interested in CALL_EXPRs. */
4349 call_stmt = dyn_cast <gcall *> (stmt);
4350 if (!call_stmt)
4351 goto egress;
4352
4353 cg_edge = id->dst_node->get_edge (stmt);
4354 gcc_checking_assert (cg_edge);
4355 /* First, see if we can figure out what function is being called.
4356 If we cannot, then there is no hope of inlining the function. */
4357 if (cg_edge->indirect_unknown_callee)
4358 goto egress;
4359 fn = cg_edge->callee->decl;
4360 gcc_checking_assert (fn);
4361
4362 /* If FN is a declaration of a function in a nested scope that was
4363 globally declared inline, we don't set its DECL_INITIAL.
4364 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4365 C++ front-end uses it for cdtors to refer to their internal
4366 declarations, that are not real functions. Fortunately those
4367 don't have trees to be saved, so we can tell by checking their
4368 gimple_body. */
4369 if (!DECL_INITIAL (fn)
4370 && DECL_ABSTRACT_ORIGIN (fn)
4371 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4372 fn = DECL_ABSTRACT_ORIGIN (fn);
4373
4374 /* Don't try to inline functions that are not well-suited to inlining. */
4375 if (cg_edge->inline_failed)
4376 {
4377 reason = cg_edge->inline_failed;
4378 /* If this call was originally indirect, we do not want to emit any
4379 inlining related warnings or sorry messages because there are no
4380 guarantees regarding those. */
4381 if (cg_edge->indirect_inlining_edge)
4382 goto egress;
4383
4384 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4385 /* For extern inline functions that get redefined we always
4386 silently ignored always_inline flag. Better behavior would
4387 be to be able to keep both bodies and use extern inline body
4388 for inlining, but we can't do that because frontends overwrite
4389 the body. */
4390 && !cg_edge->callee->local.redefined_extern_inline
4391 /* During early inline pass, report only when optimization is
4392 not turned on. */
4393 && (symtab->global_info_ready
4394 || !optimize
4395 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4396 /* PR 20090218-1_0.c. Body can be provided by another module. */
4397 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4398 {
4399 error ("inlining failed in call to always_inline %q+F: %s", fn,
4400 cgraph_inline_failed_string (reason));
4401 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4402 inform (gimple_location (stmt), "called from here");
4403 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4404 inform (DECL_SOURCE_LOCATION (cfun->decl),
4405 "called from this function");
4406 }
4407 else if (warn_inline
4408 && DECL_DECLARED_INLINE_P (fn)
4409 && !DECL_NO_INLINE_WARNING_P (fn)
4410 && !DECL_IN_SYSTEM_HEADER (fn)
4411 && reason != CIF_UNSPECIFIED
4412 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4413 /* Do not warn about not inlined recursive calls. */
4414 && !cg_edge->recursive_p ()
4415 /* Avoid warnings during early inline pass. */
4416 && symtab->global_info_ready)
4417 {
4418 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4419 fn, _(cgraph_inline_failed_string (reason))))
4420 {
4421 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4422 inform (gimple_location (stmt), "called from here");
4423 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4424 inform (DECL_SOURCE_LOCATION (cfun->decl),
4425 "called from this function");
4426 }
4427 }
4428 goto egress;
4429 }
4430 id->src_node = cg_edge->callee;
4431
4432 /* If callee is thunk, all we need is to adjust the THIS pointer
4433 and redirect to function being thunked. */
4434 if (id->src_node->thunk.thunk_p)
4435 {
4436 cgraph_edge *edge;
4437 tree virtual_offset = NULL;
4438 profile_count count = cg_edge->count;
4439 tree op;
4440 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4441
4442 cg_edge->remove ();
4443 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4444 gimple_uid (stmt),
4445 profile_count::one (),
4446 profile_count::one (),
4447 true);
4448 edge->count = count;
4449 if (id->src_node->thunk.virtual_offset_p)
4450 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4451 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4452 NULL);
4453 gsi_insert_before (&iter, gimple_build_assign (op,
4454 gimple_call_arg (stmt, 0)),
4455 GSI_NEW_STMT);
4456 gcc_assert (id->src_node->thunk.this_adjusting);
4457 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4458 virtual_offset);
4459
4460 gimple_call_set_arg (stmt, 0, op);
4461 gimple_call_set_fndecl (stmt, edge->callee->decl);
4462 update_stmt (stmt);
4463 id->src_node->remove ();
4464 expand_call_inline (bb, stmt, id);
4465 maybe_remove_unused_call_args (cfun, stmt);
4466 return true;
4467 }
4468 fn = cg_edge->callee->decl;
4469 cg_edge->callee->get_untransformed_body ();
4470
4471 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4472 cg_edge->callee->verify ();
4473
4474 /* We will be inlining this callee. */
4475 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4476 id->assign_stmts.create (0);
4477
4478 /* Update the callers EH personality. */
4479 if (DECL_FUNCTION_PERSONALITY (fn))
4480 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4481 = DECL_FUNCTION_PERSONALITY (fn);
4482
4483 /* Split the block before the GIMPLE_CALL. */
4484 stmt_gsi = gsi_for_stmt (stmt);
4485 gsi_prev (&stmt_gsi);
4486 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4487 bb = e->src;
4488 return_block = e->dest;
4489 remove_edge (e);
4490
4491 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4492 been the source of abnormal edges. In this case, schedule
4493 the removal of dead abnormal edges. */
4494 gsi = gsi_start_bb (return_block);
4495 gsi_next (&gsi);
4496 purge_dead_abnormal_edges = gsi_end_p (gsi);
4497
4498 stmt_gsi = gsi_start_bb (return_block);
4499
4500 /* Build a block containing code to initialize the arguments, the
4501 actual inline expansion of the body, and a label for the return
4502 statements within the function to jump to. The type of the
4503 statement expression is the return type of the function call.
4504 ??? If the call does not have an associated block then we will
4505 remap all callee blocks to NULL, effectively dropping most of
4506 its debug information. This should only happen for calls to
4507 artificial decls inserted by the compiler itself. We need to
4508 either link the inlined blocks into the caller block tree or
4509 not refer to them in any way to not break GC for locations. */
4510 if (gimple_block (stmt))
4511 {
4512 id->block = make_node (BLOCK);
4513 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4514 BLOCK_SOURCE_LOCATION (id->block)
4515 = LOCATION_LOCUS (gimple_location (stmt));
4516 prepend_lexical_block (gimple_block (stmt), id->block);
4517 }
4518
4519 /* Local declarations will be replaced by their equivalents in this
4520 map. */
4521 st = id->decl_map;
4522 id->decl_map = new hash_map<tree, tree>;
4523 dst = id->debug_map;
4524 id->debug_map = NULL;
4525
4526 /* Record the function we are about to inline. */
4527 id->src_fn = fn;
4528 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4529 id->call_stmt = call_stmt;
4530
4531 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4532 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4533 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4534 simtvars_save = id->dst_simt_vars;
4535 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4536 && (simduid = bb->loop_father->simduid) != NULL_TREE
4537 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4538 && single_imm_use (simduid, &use, &simtenter_stmt)
4539 && is_gimple_call (simtenter_stmt)
4540 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4541 vec_alloc (id->dst_simt_vars, 0);
4542 else
4543 id->dst_simt_vars = NULL;
4544
4545 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4546 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4547
4548 /* If the src function contains an IFN_VA_ARG, then so will the dst
4549 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4550 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4551 src_properties = id->src_cfun->curr_properties & prop_mask;
4552 if (src_properties != prop_mask)
4553 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4554
4555 gcc_assert (!id->src_cfun->after_inlining);
4556
4557 id->entry_bb = bb;
4558 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4559 {
4560 gimple_stmt_iterator si = gsi_last_bb (bb);
4561 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4562 NOT_TAKEN),
4563 GSI_NEW_STMT);
4564 }
4565 initialize_inlined_parameters (id, stmt, fn, bb);
4566 if (debug_nonbind_markers_p && debug_inline_points && id->block
4567 && inlined_function_outer_scope_p (id->block))
4568 {
4569 gimple_stmt_iterator si = gsi_last_bb (bb);
4570 gsi_insert_after (&si, gimple_build_debug_inline_entry
4571 (id->block, input_location), GSI_NEW_STMT);
4572 }
4573
4574 if (DECL_INITIAL (fn))
4575 {
4576 if (gimple_block (stmt))
4577 {
4578 tree *var;
4579
4580 prepend_lexical_block (id->block,
4581 remap_blocks (DECL_INITIAL (fn), id));
4582 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4583 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4584 == NULL_TREE));
4585 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4586 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4587 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4588 under it. The parameters can be then evaluated in the debugger,
4589 but don't show in backtraces. */
4590 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4591 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4592 {
4593 tree v = *var;
4594 *var = TREE_CHAIN (v);
4595 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4596 BLOCK_VARS (id->block) = v;
4597 }
4598 else
4599 var = &TREE_CHAIN (*var);
4600 }
4601 else
4602 remap_blocks_to_null (DECL_INITIAL (fn), id);
4603 }
4604
4605 /* Return statements in the function body will be replaced by jumps
4606 to the RET_LABEL. */
4607 gcc_assert (DECL_INITIAL (fn));
4608 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4609
4610 /* Find the LHS to which the result of this call is assigned. */
4611 return_slot = NULL;
4612 if (gimple_call_lhs (stmt))
4613 {
4614 modify_dest = gimple_call_lhs (stmt);
4615
4616 /* The function which we are inlining might not return a value,
4617 in which case we should issue a warning that the function
4618 does not return a value. In that case the optimizers will
4619 see that the variable to which the value is assigned was not
4620 initialized. We do not want to issue a warning about that
4621 uninitialized variable. */
4622 if (DECL_P (modify_dest))
4623 TREE_NO_WARNING (modify_dest) = 1;
4624
4625 if (gimple_call_return_slot_opt_p (call_stmt))
4626 {
4627 return_slot = modify_dest;
4628 modify_dest = NULL;
4629 }
4630 }
4631 else
4632 modify_dest = NULL;
4633
4634 /* If we are inlining a call to the C++ operator new, we don't want
4635 to use type based alias analysis on the return value. Otherwise
4636 we may get confused if the compiler sees that the inlined new
4637 function returns a pointer which was just deleted. See bug
4638 33407. */
4639 if (DECL_IS_OPERATOR_NEW (fn))
4640 {
4641 return_slot = NULL;
4642 modify_dest = NULL;
4643 }
4644
4645 /* Declare the return variable for the function. */
4646 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4647
4648 /* Add local vars in this inlined callee to caller. */
4649 add_local_variables (id->src_cfun, cfun, id);
4650
4651 if (dump_file && (dump_flags & TDF_DETAILS))
4652 {
4653 fprintf (dump_file, "Inlining %s to %s with frequency %4.2f\n",
4654 id->src_node->dump_name (),
4655 id->dst_node->dump_name (),
4656 cg_edge->sreal_frequency ().to_double ());
4657 id->src_node->dump (dump_file);
4658 id->dst_node->dump (dump_file);
4659 }
4660
4661 /* This is it. Duplicate the callee body. Assume callee is
4662 pre-gimplified. Note that we must not alter the caller
4663 function in any way before this point, as this CALL_EXPR may be
4664 a self-referential call; if we're calling ourselves, we need to
4665 duplicate our body before altering anything. */
4666 copy_body (id, bb, return_block, NULL);
4667
4668 reset_debug_bindings (id, stmt_gsi);
4669
4670 if (flag_stack_reuse != SR_NONE)
4671 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4672 if (!TREE_THIS_VOLATILE (p))
4673 {
4674 tree *varp = id->decl_map->get (p);
4675 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4676 {
4677 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4678 gimple *clobber_stmt;
4679 TREE_THIS_VOLATILE (clobber) = 1;
4680 clobber_stmt = gimple_build_assign (*varp, clobber);
4681 gimple_set_location (clobber_stmt, gimple_location (stmt));
4682 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4683 }
4684 }
4685
4686 /* Reset the escaped solution. */
4687 if (cfun->gimple_df)
4688 pt_solution_reset (&cfun->gimple_df->escaped);
4689
4690 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4691 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4692 {
4693 size_t nargs = gimple_call_num_args (simtenter_stmt);
4694 vec<tree> *vars = id->dst_simt_vars;
4695 auto_vec<tree> newargs (nargs + vars->length ());
4696 for (size_t i = 0; i < nargs; i++)
4697 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4698 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4699 {
4700 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4701 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4702 }
4703 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4704 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4705 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4706 gsi_replace (&gsi, g, false);
4707 }
4708 vec_free (id->dst_simt_vars);
4709 id->dst_simt_vars = simtvars_save;
4710
4711 /* Clean up. */
4712 if (id->debug_map)
4713 {
4714 delete id->debug_map;
4715 id->debug_map = dst;
4716 }
4717 delete id->decl_map;
4718 id->decl_map = st;
4719
4720 /* Unlink the calls virtual operands before replacing it. */
4721 unlink_stmt_vdef (stmt);
4722 if (gimple_vdef (stmt)
4723 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4724 release_ssa_name (gimple_vdef (stmt));
4725
4726 /* If the inlined function returns a result that we care about,
4727 substitute the GIMPLE_CALL with an assignment of the return
4728 variable to the LHS of the call. That is, if STMT was
4729 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4730 if (use_retvar && gimple_call_lhs (stmt))
4731 {
4732 gimple *old_stmt = stmt;
4733 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4734 gimple_set_location (stmt, gimple_location (old_stmt));
4735 gsi_replace (&stmt_gsi, stmt, false);
4736 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4737 /* Append a clobber for id->retvar if easily possible. */
4738 if (flag_stack_reuse != SR_NONE
4739 && id->retvar
4740 && VAR_P (id->retvar)
4741 && id->retvar != return_slot
4742 && id->retvar != modify_dest
4743 && !TREE_THIS_VOLATILE (id->retvar)
4744 && !is_gimple_reg (id->retvar)
4745 && !stmt_ends_bb_p (stmt))
4746 {
4747 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4748 gimple *clobber_stmt;
4749 TREE_THIS_VOLATILE (clobber) = 1;
4750 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4751 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4752 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4753 }
4754 }
4755 else
4756 {
4757 /* Handle the case of inlining a function with no return
4758 statement, which causes the return value to become undefined. */
4759 if (gimple_call_lhs (stmt)
4760 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4761 {
4762 tree name = gimple_call_lhs (stmt);
4763 tree var = SSA_NAME_VAR (name);
4764 tree def = var ? ssa_default_def (cfun, var) : NULL;
4765
4766 if (def)
4767 {
4768 /* If the variable is used undefined, make this name
4769 undefined via a move. */
4770 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4771 gsi_replace (&stmt_gsi, stmt, true);
4772 }
4773 else
4774 {
4775 if (!var)
4776 {
4777 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4778 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4779 }
4780 /* Otherwise make this variable undefined. */
4781 gsi_remove (&stmt_gsi, true);
4782 set_ssa_default_def (cfun, var, name);
4783 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4784 }
4785 }
4786 /* Replace with a clobber for id->retvar. */
4787 else if (flag_stack_reuse != SR_NONE
4788 && id->retvar
4789 && VAR_P (id->retvar)
4790 && id->retvar != return_slot
4791 && id->retvar != modify_dest
4792 && !TREE_THIS_VOLATILE (id->retvar)
4793 && !is_gimple_reg (id->retvar))
4794 {
4795 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4796 gimple *clobber_stmt;
4797 TREE_THIS_VOLATILE (clobber) = 1;
4798 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4799 gimple_set_location (clobber_stmt, gimple_location (stmt));
4800 gsi_replace (&stmt_gsi, clobber_stmt, false);
4801 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4802 }
4803 else
4804 gsi_remove (&stmt_gsi, true);
4805 }
4806
4807 if (purge_dead_abnormal_edges)
4808 {
4809 gimple_purge_dead_eh_edges (return_block);
4810 gimple_purge_dead_abnormal_call_edges (return_block);
4811 }
4812
4813 /* If the value of the new expression is ignored, that's OK. We
4814 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4815 the equivalent inlined version either. */
4816 if (is_gimple_assign (stmt))
4817 {
4818 gcc_assert (gimple_assign_single_p (stmt)
4819 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4820 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4821 }
4822
4823 id->assign_stmts.release ();
4824
4825 /* Output the inlining info for this abstract function, since it has been
4826 inlined. If we don't do this now, we can lose the information about the
4827 variables in the function when the blocks get blown away as soon as we
4828 remove the cgraph node. */
4829 if (gimple_block (stmt))
4830 (*debug_hooks->outlining_inline_function) (fn);
4831
4832 /* Update callgraph if needed. */
4833 cg_edge->callee->remove ();
4834
4835 id->block = NULL_TREE;
4836 id->retvar = NULL_TREE;
4837 id->retbnd = NULL_TREE;
4838 successfully_inlined = true;
4839
4840 egress:
4841 input_location = saved_location;
4842 return successfully_inlined;
4843 }
4844
4845 /* Expand call statements reachable from STMT_P.
4846 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4847 in a MODIFY_EXPR. */
4848
4849 static bool
4850 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4851 {
4852 gimple_stmt_iterator gsi;
4853 bool inlined = false;
4854
4855 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4856 {
4857 gimple *stmt = gsi_stmt (gsi);
4858 gsi_prev (&gsi);
4859
4860 if (is_gimple_call (stmt)
4861 && !gimple_call_internal_p (stmt))
4862 inlined |= expand_call_inline (bb, stmt, id);
4863 }
4864
4865 return inlined;
4866 }
4867
4868
4869 /* Walk all basic blocks created after FIRST and try to fold every statement
4870 in the STATEMENTS pointer set. */
4871
4872 static void
4873 fold_marked_statements (int first, hash_set<gimple *> *statements)
4874 {
4875 for (; first < n_basic_blocks_for_fn (cfun); first++)
4876 if (BASIC_BLOCK_FOR_FN (cfun, first))
4877 {
4878 gimple_stmt_iterator gsi;
4879
4880 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4881 !gsi_end_p (gsi);
4882 gsi_next (&gsi))
4883 if (statements->contains (gsi_stmt (gsi)))
4884 {
4885 gimple *old_stmt = gsi_stmt (gsi);
4886 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4887
4888 if (old_decl && DECL_BUILT_IN (old_decl))
4889 {
4890 /* Folding builtins can create multiple instructions,
4891 we need to look at all of them. */
4892 gimple_stmt_iterator i2 = gsi;
4893 gsi_prev (&i2);
4894 if (fold_stmt (&gsi))
4895 {
4896 gimple *new_stmt;
4897 /* If a builtin at the end of a bb folded into nothing,
4898 the following loop won't work. */
4899 if (gsi_end_p (gsi))
4900 {
4901 cgraph_update_edges_for_call_stmt (old_stmt,
4902 old_decl, NULL);
4903 break;
4904 }
4905 if (gsi_end_p (i2))
4906 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4907 else
4908 gsi_next (&i2);
4909 while (1)
4910 {
4911 new_stmt = gsi_stmt (i2);
4912 update_stmt (new_stmt);
4913 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4914 new_stmt);
4915
4916 if (new_stmt == gsi_stmt (gsi))
4917 {
4918 /* It is okay to check only for the very last
4919 of these statements. If it is a throwing
4920 statement nothing will change. If it isn't
4921 this can remove EH edges. If that weren't
4922 correct then because some intermediate stmts
4923 throw, but not the last one. That would mean
4924 we'd have to split the block, which we can't
4925 here and we'd loose anyway. And as builtins
4926 probably never throw, this all
4927 is mood anyway. */
4928 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4929 new_stmt))
4930 gimple_purge_dead_eh_edges (
4931 BASIC_BLOCK_FOR_FN (cfun, first));
4932 break;
4933 }
4934 gsi_next (&i2);
4935 }
4936 }
4937 }
4938 else if (fold_stmt (&gsi))
4939 {
4940 /* Re-read the statement from GSI as fold_stmt() may
4941 have changed it. */
4942 gimple *new_stmt = gsi_stmt (gsi);
4943 update_stmt (new_stmt);
4944
4945 if (is_gimple_call (old_stmt)
4946 || is_gimple_call (new_stmt))
4947 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4948 new_stmt);
4949
4950 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4951 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4952 first));
4953 }
4954 }
4955 }
4956 }
4957
4958 /* Expand calls to inline functions in the body of FN. */
4959
4960 unsigned int
4961 optimize_inline_calls (tree fn)
4962 {
4963 copy_body_data id;
4964 basic_block bb;
4965 int last = n_basic_blocks_for_fn (cfun);
4966 bool inlined_p = false;
4967
4968 /* Clear out ID. */
4969 memset (&id, 0, sizeof (id));
4970
4971 id.src_node = id.dst_node = cgraph_node::get (fn);
4972 gcc_assert (id.dst_node->definition);
4973 id.dst_fn = fn;
4974 /* Or any functions that aren't finished yet. */
4975 if (current_function_decl)
4976 id.dst_fn = current_function_decl;
4977
4978 id.copy_decl = copy_decl_maybe_to_var;
4979 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4980 id.transform_new_cfg = false;
4981 id.transform_return_to_modify = true;
4982 id.transform_parameter = true;
4983 id.transform_lang_insert_block = NULL;
4984 id.statements_to_fold = new hash_set<gimple *>;
4985
4986 push_gimplify_context ();
4987
4988 /* We make no attempts to keep dominance info up-to-date. */
4989 free_dominance_info (CDI_DOMINATORS);
4990 free_dominance_info (CDI_POST_DOMINATORS);
4991
4992 /* Register specific gimple functions. */
4993 gimple_register_cfg_hooks ();
4994
4995 /* Reach the trees by walking over the CFG, and note the
4996 enclosing basic-blocks in the call edges. */
4997 /* We walk the blocks going forward, because inlined function bodies
4998 will split id->current_basic_block, and the new blocks will
4999 follow it; we'll trudge through them, processing their CALL_EXPRs
5000 along the way. */
5001 FOR_EACH_BB_FN (bb, cfun)
5002 inlined_p |= gimple_expand_calls_inline (bb, &id);
5003
5004 pop_gimplify_context (NULL);
5005
5006 if (flag_checking)
5007 {
5008 struct cgraph_edge *e;
5009
5010 id.dst_node->verify ();
5011
5012 /* Double check that we inlined everything we are supposed to inline. */
5013 for (e = id.dst_node->callees; e; e = e->next_callee)
5014 gcc_assert (e->inline_failed);
5015 }
5016
5017 /* Fold queued statements. */
5018 update_max_bb_count ();
5019 fold_marked_statements (last, id.statements_to_fold);
5020 delete id.statements_to_fold;
5021
5022 gcc_assert (!id.debug_stmts.exists ());
5023
5024 /* If we didn't inline into the function there is nothing to do. */
5025 if (!inlined_p)
5026 return 0;
5027
5028 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5029 number_blocks (fn);
5030
5031 delete_unreachable_blocks_update_callgraph (&id);
5032 if (flag_checking)
5033 id.dst_node->verify ();
5034
5035 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5036 not possible yet - the IPA passes might make various functions to not
5037 throw and they don't care to proactively update local EH info. This is
5038 done later in fixup_cfg pass that also execute the verification. */
5039 return (TODO_update_ssa
5040 | TODO_cleanup_cfg
5041 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5042 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5043 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5044 ? TODO_rebuild_frequencies : 0));
5045 }
5046
5047 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5048
5049 tree
5050 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5051 {
5052 enum tree_code code = TREE_CODE (*tp);
5053 enum tree_code_class cl = TREE_CODE_CLASS (code);
5054
5055 /* We make copies of most nodes. */
5056 if (IS_EXPR_CODE_CLASS (cl)
5057 || code == TREE_LIST
5058 || code == TREE_VEC
5059 || code == TYPE_DECL
5060 || code == OMP_CLAUSE)
5061 {
5062 /* Because the chain gets clobbered when we make a copy, we save it
5063 here. */
5064 tree chain = NULL_TREE, new_tree;
5065
5066 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5067 chain = TREE_CHAIN (*tp);
5068
5069 /* Copy the node. */
5070 new_tree = copy_node (*tp);
5071
5072 *tp = new_tree;
5073
5074 /* Now, restore the chain, if appropriate. That will cause
5075 walk_tree to walk into the chain as well. */
5076 if (code == PARM_DECL
5077 || code == TREE_LIST
5078 || code == OMP_CLAUSE)
5079 TREE_CHAIN (*tp) = chain;
5080
5081 /* For now, we don't update BLOCKs when we make copies. So, we
5082 have to nullify all BIND_EXPRs. */
5083 if (TREE_CODE (*tp) == BIND_EXPR)
5084 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5085 }
5086 else if (code == CONSTRUCTOR)
5087 {
5088 /* CONSTRUCTOR nodes need special handling because
5089 we need to duplicate the vector of elements. */
5090 tree new_tree;
5091
5092 new_tree = copy_node (*tp);
5093 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5094 *tp = new_tree;
5095 }
5096 else if (code == STATEMENT_LIST)
5097 /* We used to just abort on STATEMENT_LIST, but we can run into them
5098 with statement-expressions (c++/40975). */
5099 copy_statement_list (tp);
5100 else if (TREE_CODE_CLASS (code) == tcc_type)
5101 *walk_subtrees = 0;
5102 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5103 *walk_subtrees = 0;
5104 else if (TREE_CODE_CLASS (code) == tcc_constant)
5105 *walk_subtrees = 0;
5106 return NULL_TREE;
5107 }
5108
5109 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5110 information indicating to what new SAVE_EXPR this one should be mapped,
5111 use that one. Otherwise, create a new node and enter it in ST. FN is
5112 the function into which the copy will be placed. */
5113
5114 static void
5115 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5116 {
5117 tree *n;
5118 tree t;
5119
5120 /* See if we already encountered this SAVE_EXPR. */
5121 n = st->get (*tp);
5122
5123 /* If we didn't already remap this SAVE_EXPR, do so now. */
5124 if (!n)
5125 {
5126 t = copy_node (*tp);
5127
5128 /* Remember this SAVE_EXPR. */
5129 st->put (*tp, t);
5130 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5131 st->put (t, t);
5132 }
5133 else
5134 {
5135 /* We've already walked into this SAVE_EXPR; don't do it again. */
5136 *walk_subtrees = 0;
5137 t = *n;
5138 }
5139
5140 /* Replace this SAVE_EXPR with the copy. */
5141 *tp = t;
5142 }
5143
5144 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5145 label, copies the declaration and enters it in the splay_tree in DATA (which
5146 is really a 'copy_body_data *'. */
5147
5148 static tree
5149 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5150 bool *handled_ops_p ATTRIBUTE_UNUSED,
5151 struct walk_stmt_info *wi)
5152 {
5153 copy_body_data *id = (copy_body_data *) wi->info;
5154 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5155
5156 if (stmt)
5157 {
5158 tree decl = gimple_label_label (stmt);
5159
5160 /* Copy the decl and remember the copy. */
5161 insert_decl_map (id, decl, id->copy_decl (decl, id));
5162 }
5163
5164 return NULL_TREE;
5165 }
5166
5167 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5168 struct walk_stmt_info *wi);
5169
5170 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5171 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5172 remaps all local declarations to appropriate replacements in gimple
5173 operands. */
5174
5175 static tree
5176 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5177 {
5178 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5179 copy_body_data *id = (copy_body_data *) wi->info;
5180 hash_map<tree, tree> *st = id->decl_map;
5181 tree *n;
5182 tree expr = *tp;
5183
5184 /* For recursive invocations this is no longer the LHS itself. */
5185 bool is_lhs = wi->is_lhs;
5186 wi->is_lhs = false;
5187
5188 if (TREE_CODE (expr) == SSA_NAME)
5189 {
5190 *tp = remap_ssa_name (*tp, id);
5191 *walk_subtrees = 0;
5192 if (is_lhs)
5193 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5194 }
5195 /* Only a local declaration (variable or label). */
5196 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5197 || TREE_CODE (expr) == LABEL_DECL)
5198 {
5199 /* Lookup the declaration. */
5200 n = st->get (expr);
5201
5202 /* If it's there, remap it. */
5203 if (n)
5204 *tp = *n;
5205 *walk_subtrees = 0;
5206 }
5207 else if (TREE_CODE (expr) == STATEMENT_LIST
5208 || TREE_CODE (expr) == BIND_EXPR
5209 || TREE_CODE (expr) == SAVE_EXPR)
5210 gcc_unreachable ();
5211 else if (TREE_CODE (expr) == TARGET_EXPR)
5212 {
5213 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5214 It's OK for this to happen if it was part of a subtree that
5215 isn't immediately expanded, such as operand 2 of another
5216 TARGET_EXPR. */
5217 if (!TREE_OPERAND (expr, 1))
5218 {
5219 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5220 TREE_OPERAND (expr, 3) = NULL_TREE;
5221 }
5222 }
5223 else if (TREE_CODE (expr) == OMP_CLAUSE)
5224 {
5225 /* Before the omplower pass completes, some OMP clauses can contain
5226 sequences that are neither copied by gimple_seq_copy nor walked by
5227 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5228 in those situations, we have to copy and process them explicitely. */
5229
5230 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5231 {
5232 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5233 seq = duplicate_remap_omp_clause_seq (seq, wi);
5234 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5235 }
5236 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5237 {
5238 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5239 seq = duplicate_remap_omp_clause_seq (seq, wi);
5240 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5241 }
5242 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5243 {
5244 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5245 seq = duplicate_remap_omp_clause_seq (seq, wi);
5246 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5247 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5248 seq = duplicate_remap_omp_clause_seq (seq, wi);
5249 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5250 }
5251 }
5252
5253 /* Keep iterating. */
5254 return NULL_TREE;
5255 }
5256
5257
5258 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5259 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5260 remaps all local declarations to appropriate replacements in gimple
5261 statements. */
5262
5263 static tree
5264 replace_locals_stmt (gimple_stmt_iterator *gsip,
5265 bool *handled_ops_p ATTRIBUTE_UNUSED,
5266 struct walk_stmt_info *wi)
5267 {
5268 copy_body_data *id = (copy_body_data *) wi->info;
5269 gimple *gs = gsi_stmt (*gsip);
5270
5271 if (gbind *stmt = dyn_cast <gbind *> (gs))
5272 {
5273 tree block = gimple_bind_block (stmt);
5274
5275 if (block)
5276 {
5277 remap_block (&block, id);
5278 gimple_bind_set_block (stmt, block);
5279 }
5280
5281 /* This will remap a lot of the same decls again, but this should be
5282 harmless. */
5283 if (gimple_bind_vars (stmt))
5284 {
5285 tree old_var, decls = gimple_bind_vars (stmt);
5286
5287 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5288 if (!can_be_nonlocal (old_var, id)
5289 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5290 remap_decl (old_var, id);
5291
5292 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5293 id->prevent_decl_creation_for_types = true;
5294 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5295 id->prevent_decl_creation_for_types = false;
5296 }
5297 }
5298
5299 /* Keep iterating. */
5300 return NULL_TREE;
5301 }
5302
5303 /* Create a copy of SEQ and remap all decls in it. */
5304
5305 static gimple_seq
5306 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5307 {
5308 if (!seq)
5309 return NULL;
5310
5311 /* If there are any labels in OMP sequences, they can be only referred to in
5312 the sequence itself and therefore we can do both here. */
5313 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5314 gimple_seq copy = gimple_seq_copy (seq);
5315 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5316 return copy;
5317 }
5318
5319 /* Copies everything in SEQ and replaces variables and labels local to
5320 current_function_decl. */
5321
5322 gimple_seq
5323 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5324 {
5325 copy_body_data id;
5326 struct walk_stmt_info wi;
5327 gimple_seq copy;
5328
5329 /* There's nothing to do for NULL_TREE. */
5330 if (seq == NULL)
5331 return seq;
5332
5333 /* Set up ID. */
5334 memset (&id, 0, sizeof (id));
5335 id.src_fn = current_function_decl;
5336 id.dst_fn = current_function_decl;
5337 id.src_cfun = cfun;
5338 id.decl_map = new hash_map<tree, tree>;
5339 id.debug_map = NULL;
5340
5341 id.copy_decl = copy_decl_no_change;
5342 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5343 id.transform_new_cfg = false;
5344 id.transform_return_to_modify = false;
5345 id.transform_parameter = false;
5346 id.transform_lang_insert_block = NULL;
5347
5348 /* Walk the tree once to find local labels. */
5349 memset (&wi, 0, sizeof (wi));
5350 hash_set<tree> visited;
5351 wi.info = &id;
5352 wi.pset = &visited;
5353 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5354
5355 copy = gimple_seq_copy (seq);
5356
5357 /* Walk the copy, remapping decls. */
5358 memset (&wi, 0, sizeof (wi));
5359 wi.info = &id;
5360 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5361
5362 /* Clean up. */
5363 delete id.decl_map;
5364 if (id.debug_map)
5365 delete id.debug_map;
5366 if (id.dependence_map)
5367 {
5368 delete id.dependence_map;
5369 id.dependence_map = NULL;
5370 }
5371
5372 return copy;
5373 }
5374
5375
5376 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5377
5378 static tree
5379 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5380 {
5381 if (*tp == data)
5382 return (tree) data;
5383 else
5384 return NULL;
5385 }
5386
5387 DEBUG_FUNCTION bool
5388 debug_find_tree (tree top, tree search)
5389 {
5390 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5391 }
5392
5393
5394 /* Declare the variables created by the inliner. Add all the variables in
5395 VARS to BIND_EXPR. */
5396
5397 static void
5398 declare_inline_vars (tree block, tree vars)
5399 {
5400 tree t;
5401 for (t = vars; t; t = DECL_CHAIN (t))
5402 {
5403 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5404 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5405 add_local_decl (cfun, t);
5406 }
5407
5408 if (block)
5409 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5410 }
5411
5412 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5413 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5414 VAR_DECL translation. */
5415
5416 tree
5417 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5418 {
5419 /* Don't generate debug information for the copy if we wouldn't have
5420 generated it for the copy either. */
5421 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5422 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5423
5424 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5425 declaration inspired this copy. */
5426 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5427
5428 /* The new variable/label has no RTL, yet. */
5429 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5430 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5431 SET_DECL_RTL (copy, 0);
5432
5433 /* These args would always appear unused, if not for this. */
5434 TREE_USED (copy) = 1;
5435
5436 /* Set the context for the new declaration. */
5437 if (!DECL_CONTEXT (decl))
5438 /* Globals stay global. */
5439 ;
5440 else if (DECL_CONTEXT (decl) != id->src_fn)
5441 /* Things that weren't in the scope of the function we're inlining
5442 from aren't in the scope we're inlining to, either. */
5443 ;
5444 else if (TREE_STATIC (decl))
5445 /* Function-scoped static variables should stay in the original
5446 function. */
5447 ;
5448 else
5449 {
5450 /* Ordinary automatic local variables are now in the scope of the
5451 new function. */
5452 DECL_CONTEXT (copy) = id->dst_fn;
5453 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5454 {
5455 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5456 DECL_ATTRIBUTES (copy)
5457 = tree_cons (get_identifier ("omp simt private"), NULL,
5458 DECL_ATTRIBUTES (copy));
5459 id->dst_simt_vars->safe_push (copy);
5460 }
5461 }
5462
5463 return copy;
5464 }
5465
5466 static tree
5467 copy_decl_to_var (tree decl, copy_body_data *id)
5468 {
5469 tree copy, type;
5470
5471 gcc_assert (TREE_CODE (decl) == PARM_DECL
5472 || TREE_CODE (decl) == RESULT_DECL);
5473
5474 type = TREE_TYPE (decl);
5475
5476 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5477 VAR_DECL, DECL_NAME (decl), type);
5478 if (DECL_PT_UID_SET_P (decl))
5479 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5480 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5481 TREE_READONLY (copy) = TREE_READONLY (decl);
5482 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5483 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5484
5485 return copy_decl_for_dup_finish (id, decl, copy);
5486 }
5487
5488 /* Like copy_decl_to_var, but create a return slot object instead of a
5489 pointer variable for return by invisible reference. */
5490
5491 static tree
5492 copy_result_decl_to_var (tree decl, copy_body_data *id)
5493 {
5494 tree copy, type;
5495
5496 gcc_assert (TREE_CODE (decl) == PARM_DECL
5497 || TREE_CODE (decl) == RESULT_DECL);
5498
5499 type = TREE_TYPE (decl);
5500 if (DECL_BY_REFERENCE (decl))
5501 type = TREE_TYPE (type);
5502
5503 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5504 VAR_DECL, DECL_NAME (decl), type);
5505 if (DECL_PT_UID_SET_P (decl))
5506 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5507 TREE_READONLY (copy) = TREE_READONLY (decl);
5508 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5509 if (!DECL_BY_REFERENCE (decl))
5510 {
5511 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5512 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5513 }
5514
5515 return copy_decl_for_dup_finish (id, decl, copy);
5516 }
5517
5518 tree
5519 copy_decl_no_change (tree decl, copy_body_data *id)
5520 {
5521 tree copy;
5522
5523 copy = copy_node (decl);
5524
5525 /* The COPY is not abstract; it will be generated in DST_FN. */
5526 DECL_ABSTRACT_P (copy) = false;
5527 lang_hooks.dup_lang_specific_decl (copy);
5528
5529 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5530 been taken; it's for internal bookkeeping in expand_goto_internal. */
5531 if (TREE_CODE (copy) == LABEL_DECL)
5532 {
5533 TREE_ADDRESSABLE (copy) = 0;
5534 LABEL_DECL_UID (copy) = -1;
5535 }
5536
5537 return copy_decl_for_dup_finish (id, decl, copy);
5538 }
5539
5540 static tree
5541 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5542 {
5543 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5544 return copy_decl_to_var (decl, id);
5545 else
5546 return copy_decl_no_change (decl, id);
5547 }
5548
5549 /* Return a copy of the function's argument tree. */
5550 static tree
5551 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5552 bitmap args_to_skip, tree *vars)
5553 {
5554 tree arg, *parg;
5555 tree new_parm = NULL;
5556 int i = 0;
5557
5558 parg = &new_parm;
5559
5560 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5561 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5562 {
5563 tree new_tree = remap_decl (arg, id);
5564 if (TREE_CODE (new_tree) != PARM_DECL)
5565 new_tree = id->copy_decl (arg, id);
5566 lang_hooks.dup_lang_specific_decl (new_tree);
5567 *parg = new_tree;
5568 parg = &DECL_CHAIN (new_tree);
5569 }
5570 else if (!id->decl_map->get (arg))
5571 {
5572 /* Make an equivalent VAR_DECL. If the argument was used
5573 as temporary variable later in function, the uses will be
5574 replaced by local variable. */
5575 tree var = copy_decl_to_var (arg, id);
5576 insert_decl_map (id, arg, var);
5577 /* Declare this new variable. */
5578 DECL_CHAIN (var) = *vars;
5579 *vars = var;
5580 }
5581 return new_parm;
5582 }
5583
5584 /* Return a copy of the function's static chain. */
5585 static tree
5586 copy_static_chain (tree static_chain, copy_body_data * id)
5587 {
5588 tree *chain_copy, *pvar;
5589
5590 chain_copy = &static_chain;
5591 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5592 {
5593 tree new_tree = remap_decl (*pvar, id);
5594 lang_hooks.dup_lang_specific_decl (new_tree);
5595 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5596 *pvar = new_tree;
5597 }
5598 return static_chain;
5599 }
5600
5601 /* Return true if the function is allowed to be versioned.
5602 This is a guard for the versioning functionality. */
5603
5604 bool
5605 tree_versionable_function_p (tree fndecl)
5606 {
5607 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5608 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5609 }
5610
5611 /* Delete all unreachable basic blocks and update callgraph.
5612 Doing so is somewhat nontrivial because we need to update all clones and
5613 remove inline function that become unreachable. */
5614
5615 static bool
5616 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5617 {
5618 bool changed = false;
5619 basic_block b, next_bb;
5620
5621 find_unreachable_blocks ();
5622
5623 /* Delete all unreachable basic blocks. */
5624
5625 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5626 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5627 {
5628 next_bb = b->next_bb;
5629
5630 if (!(b->flags & BB_REACHABLE))
5631 {
5632 gimple_stmt_iterator bsi;
5633
5634 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5635 {
5636 struct cgraph_edge *e;
5637 struct cgraph_node *node;
5638
5639 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5640
5641 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5642 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5643 {
5644 if (!e->inline_failed)
5645 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5646 else
5647 e->remove ();
5648 }
5649 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5650 && id->dst_node->clones)
5651 for (node = id->dst_node->clones; node != id->dst_node;)
5652 {
5653 node->remove_stmt_references (gsi_stmt (bsi));
5654 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5655 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5656 {
5657 if (!e->inline_failed)
5658 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5659 else
5660 e->remove ();
5661 }
5662
5663 if (node->clones)
5664 node = node->clones;
5665 else if (node->next_sibling_clone)
5666 node = node->next_sibling_clone;
5667 else
5668 {
5669 while (node != id->dst_node && !node->next_sibling_clone)
5670 node = node->clone_of;
5671 if (node != id->dst_node)
5672 node = node->next_sibling_clone;
5673 }
5674 }
5675 }
5676 delete_basic_block (b);
5677 changed = true;
5678 }
5679 }
5680
5681 return changed;
5682 }
5683
5684 /* Update clone info after duplication. */
5685
5686 static void
5687 update_clone_info (copy_body_data * id)
5688 {
5689 struct cgraph_node *node;
5690 if (!id->dst_node->clones)
5691 return;
5692 for (node = id->dst_node->clones; node != id->dst_node;)
5693 {
5694 /* First update replace maps to match the new body. */
5695 if (node->clone.tree_map)
5696 {
5697 unsigned int i;
5698 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5699 {
5700 struct ipa_replace_map *replace_info;
5701 replace_info = (*node->clone.tree_map)[i];
5702 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5703 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5704 }
5705 }
5706 if (node->clones)
5707 node = node->clones;
5708 else if (node->next_sibling_clone)
5709 node = node->next_sibling_clone;
5710 else
5711 {
5712 while (node != id->dst_node && !node->next_sibling_clone)
5713 node = node->clone_of;
5714 if (node != id->dst_node)
5715 node = node->next_sibling_clone;
5716 }
5717 }
5718 }
5719
5720 /* Create a copy of a function's tree.
5721 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5722 of the original function and the new copied function
5723 respectively. In case we want to replace a DECL
5724 tree with another tree while duplicating the function's
5725 body, TREE_MAP represents the mapping between these
5726 trees. If UPDATE_CLONES is set, the call_stmt fields
5727 of edges of clones of the function will be updated.
5728
5729 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5730 from new version.
5731 If SKIP_RETURN is true, the new version will return void.
5732 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5733 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5734 */
5735 void
5736 tree_function_versioning (tree old_decl, tree new_decl,
5737 vec<ipa_replace_map *, va_gc> *tree_map,
5738 bool update_clones, bitmap args_to_skip,
5739 bool skip_return, bitmap blocks_to_copy,
5740 basic_block new_entry)
5741 {
5742 struct cgraph_node *old_version_node;
5743 struct cgraph_node *new_version_node;
5744 copy_body_data id;
5745 tree p;
5746 unsigned i;
5747 struct ipa_replace_map *replace_info;
5748 basic_block old_entry_block, bb;
5749 auto_vec<gimple *, 10> init_stmts;
5750 tree vars = NULL_TREE;
5751 bitmap debug_args_to_skip = args_to_skip;
5752
5753 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5754 && TREE_CODE (new_decl) == FUNCTION_DECL);
5755 DECL_POSSIBLY_INLINED (old_decl) = 1;
5756
5757 old_version_node = cgraph_node::get (old_decl);
5758 gcc_checking_assert (old_version_node);
5759 new_version_node = cgraph_node::get (new_decl);
5760 gcc_checking_assert (new_version_node);
5761
5762 /* Copy over debug args. */
5763 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5764 {
5765 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5766 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5767 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5768 old_debug_args = decl_debug_args_lookup (old_decl);
5769 if (old_debug_args)
5770 {
5771 new_debug_args = decl_debug_args_insert (new_decl);
5772 *new_debug_args = vec_safe_copy (*old_debug_args);
5773 }
5774 }
5775
5776 /* Output the inlining info for this abstract function, since it has been
5777 inlined. If we don't do this now, we can lose the information about the
5778 variables in the function when the blocks get blown away as soon as we
5779 remove the cgraph node. */
5780 (*debug_hooks->outlining_inline_function) (old_decl);
5781
5782 DECL_ARTIFICIAL (new_decl) = 1;
5783 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5784 if (DECL_ORIGIN (old_decl) == old_decl)
5785 old_version_node->used_as_abstract_origin = true;
5786 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5787
5788 /* Prepare the data structures for the tree copy. */
5789 memset (&id, 0, sizeof (id));
5790
5791 /* Generate a new name for the new version. */
5792 id.statements_to_fold = new hash_set<gimple *>;
5793
5794 id.decl_map = new hash_map<tree, tree>;
5795 id.debug_map = NULL;
5796 id.src_fn = old_decl;
5797 id.dst_fn = new_decl;
5798 id.src_node = old_version_node;
5799 id.dst_node = new_version_node;
5800 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5801 id.blocks_to_copy = blocks_to_copy;
5802
5803 id.copy_decl = copy_decl_no_change;
5804 id.transform_call_graph_edges
5805 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5806 id.transform_new_cfg = true;
5807 id.transform_return_to_modify = false;
5808 id.transform_parameter = false;
5809 id.transform_lang_insert_block = NULL;
5810
5811 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5812 (DECL_STRUCT_FUNCTION (old_decl));
5813 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5814 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5815 initialize_cfun (new_decl, old_decl,
5816 new_entry ? new_entry->count : old_entry_block->count);
5817 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5818 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5819 = id.src_cfun->gimple_df->ipa_pta;
5820
5821 /* Copy the function's static chain. */
5822 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5823 if (p)
5824 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5825 = copy_static_chain (p, &id);
5826
5827 /* If there's a tree_map, prepare for substitution. */
5828 if (tree_map)
5829 for (i = 0; i < tree_map->length (); i++)
5830 {
5831 gimple *init;
5832 replace_info = (*tree_map)[i];
5833 if (replace_info->replace_p)
5834 {
5835 int parm_num = -1;
5836 if (!replace_info->old_tree)
5837 {
5838 int p = replace_info->parm_num;
5839 tree parm;
5840 tree req_type, new_type;
5841
5842 for (parm = DECL_ARGUMENTS (old_decl); p;
5843 parm = DECL_CHAIN (parm))
5844 p--;
5845 replace_info->old_tree = parm;
5846 parm_num = replace_info->parm_num;
5847 req_type = TREE_TYPE (parm);
5848 new_type = TREE_TYPE (replace_info->new_tree);
5849 if (!useless_type_conversion_p (req_type, new_type))
5850 {
5851 if (fold_convertible_p (req_type, replace_info->new_tree))
5852 replace_info->new_tree
5853 = fold_build1 (NOP_EXPR, req_type,
5854 replace_info->new_tree);
5855 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5856 replace_info->new_tree
5857 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5858 replace_info->new_tree);
5859 else
5860 {
5861 if (dump_file)
5862 {
5863 fprintf (dump_file, " const ");
5864 print_generic_expr (dump_file,
5865 replace_info->new_tree);
5866 fprintf (dump_file,
5867 " can't be converted to param ");
5868 print_generic_expr (dump_file, parm);
5869 fprintf (dump_file, "\n");
5870 }
5871 replace_info->old_tree = NULL;
5872 }
5873 }
5874 }
5875 else
5876 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5877 if (replace_info->old_tree)
5878 {
5879 init = setup_one_parameter (&id, replace_info->old_tree,
5880 replace_info->new_tree, id.src_fn,
5881 NULL,
5882 &vars);
5883 if (init)
5884 init_stmts.safe_push (init);
5885 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5886 {
5887 if (parm_num == -1)
5888 {
5889 tree parm;
5890 int p;
5891 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5892 parm = DECL_CHAIN (parm), p++)
5893 if (parm == replace_info->old_tree)
5894 {
5895 parm_num = p;
5896 break;
5897 }
5898 }
5899 if (parm_num != -1)
5900 {
5901 if (debug_args_to_skip == args_to_skip)
5902 {
5903 debug_args_to_skip = BITMAP_ALLOC (NULL);
5904 bitmap_copy (debug_args_to_skip, args_to_skip);
5905 }
5906 bitmap_clear_bit (debug_args_to_skip, parm_num);
5907 }
5908 }
5909 }
5910 }
5911 }
5912 /* Copy the function's arguments. */
5913 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5914 DECL_ARGUMENTS (new_decl)
5915 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5916 args_to_skip, &vars);
5917
5918 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5919 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5920
5921 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5922
5923 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5924 /* Add local vars. */
5925 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5926
5927 if (DECL_RESULT (old_decl) == NULL_TREE)
5928 ;
5929 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5930 {
5931 DECL_RESULT (new_decl)
5932 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5933 RESULT_DECL, NULL_TREE, void_type_node);
5934 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5935 cfun->returns_struct = 0;
5936 cfun->returns_pcc_struct = 0;
5937 }
5938 else
5939 {
5940 tree old_name;
5941 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5942 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5943 if (gimple_in_ssa_p (id.src_cfun)
5944 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5945 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5946 {
5947 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5948 insert_decl_map (&id, old_name, new_name);
5949 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5950 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5951 }
5952 }
5953
5954 /* Set up the destination functions loop tree. */
5955 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5956 {
5957 cfun->curr_properties &= ~PROP_loops;
5958 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5959 cfun->curr_properties |= PROP_loops;
5960 }
5961
5962 /* Copy the Function's body. */
5963 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5964 new_entry);
5965
5966 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5967 number_blocks (new_decl);
5968
5969 /* We want to create the BB unconditionally, so that the addition of
5970 debug stmts doesn't affect BB count, which may in the end cause
5971 codegen differences. */
5972 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5973 while (init_stmts.length ())
5974 insert_init_stmt (&id, bb, init_stmts.pop ());
5975 update_clone_info (&id);
5976
5977 /* Remap the nonlocal_goto_save_area, if any. */
5978 if (cfun->nonlocal_goto_save_area)
5979 {
5980 struct walk_stmt_info wi;
5981
5982 memset (&wi, 0, sizeof (wi));
5983 wi.info = &id;
5984 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5985 }
5986
5987 /* Clean up. */
5988 delete id.decl_map;
5989 if (id.debug_map)
5990 delete id.debug_map;
5991 free_dominance_info (CDI_DOMINATORS);
5992 free_dominance_info (CDI_POST_DOMINATORS);
5993
5994 update_max_bb_count ();
5995 fold_marked_statements (0, id.statements_to_fold);
5996 delete id.statements_to_fold;
5997 delete_unreachable_blocks_update_callgraph (&id);
5998 if (id.dst_node->definition)
5999 cgraph_edge::rebuild_references ();
6000 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6001 {
6002 calculate_dominance_info (CDI_DOMINATORS);
6003 fix_loop_structure (NULL);
6004 }
6005 update_ssa (TODO_update_ssa);
6006
6007 /* After partial cloning we need to rescale frequencies, so they are
6008 within proper range in the cloned function. */
6009 if (new_entry)
6010 {
6011 struct cgraph_edge *e;
6012 rebuild_frequencies ();
6013
6014 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6015 for (e = new_version_node->callees; e; e = e->next_callee)
6016 {
6017 basic_block bb = gimple_bb (e->call_stmt);
6018 e->count = bb->count;
6019 }
6020 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6021 {
6022 basic_block bb = gimple_bb (e->call_stmt);
6023 e->count = bb->count;
6024 }
6025 }
6026
6027 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6028 {
6029 tree parm;
6030 vec<tree, va_gc> **debug_args = NULL;
6031 unsigned int len = 0;
6032 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6033 parm; parm = DECL_CHAIN (parm), i++)
6034 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6035 {
6036 tree ddecl;
6037
6038 if (debug_args == NULL)
6039 {
6040 debug_args = decl_debug_args_insert (new_decl);
6041 len = vec_safe_length (*debug_args);
6042 }
6043 ddecl = make_node (DEBUG_EXPR_DECL);
6044 DECL_ARTIFICIAL (ddecl) = 1;
6045 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6046 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6047 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6048 vec_safe_push (*debug_args, ddecl);
6049 }
6050 if (debug_args != NULL)
6051 {
6052 /* On the callee side, add
6053 DEBUG D#Y s=> parm
6054 DEBUG var => D#Y
6055 stmts to the first bb where var is a VAR_DECL created for the
6056 optimized away parameter in DECL_INITIAL block. This hints
6057 in the debug info that var (whole DECL_ORIGIN is the parm
6058 PARM_DECL) is optimized away, but could be looked up at the
6059 call site as value of D#X there. */
6060 tree var = vars, vexpr;
6061 gimple_stmt_iterator cgsi
6062 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6063 gimple *def_temp;
6064 var = vars;
6065 i = vec_safe_length (*debug_args);
6066 do
6067 {
6068 i -= 2;
6069 while (var != NULL_TREE
6070 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6071 var = TREE_CHAIN (var);
6072 if (var == NULL_TREE)
6073 break;
6074 vexpr = make_node (DEBUG_EXPR_DECL);
6075 parm = (**debug_args)[i];
6076 DECL_ARTIFICIAL (vexpr) = 1;
6077 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6078 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6079 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6080 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6081 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6082 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6083 }
6084 while (i > len);
6085 }
6086 }
6087
6088 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6089 BITMAP_FREE (debug_args_to_skip);
6090 free_dominance_info (CDI_DOMINATORS);
6091 free_dominance_info (CDI_POST_DOMINATORS);
6092
6093 gcc_assert (!id.debug_stmts.exists ());
6094 pop_cfun ();
6095 return;
6096 }
6097
6098 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6099 the callee and return the inlined body on success. */
6100
6101 tree
6102 maybe_inline_call_in_expr (tree exp)
6103 {
6104 tree fn = get_callee_fndecl (exp);
6105
6106 /* We can only try to inline "const" functions. */
6107 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6108 {
6109 call_expr_arg_iterator iter;
6110 copy_body_data id;
6111 tree param, arg, t;
6112 hash_map<tree, tree> decl_map;
6113
6114 /* Remap the parameters. */
6115 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6116 param;
6117 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6118 decl_map.put (param, arg);
6119
6120 memset (&id, 0, sizeof (id));
6121 id.src_fn = fn;
6122 id.dst_fn = current_function_decl;
6123 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6124 id.decl_map = &decl_map;
6125
6126 id.copy_decl = copy_decl_no_change;
6127 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6128 id.transform_new_cfg = false;
6129 id.transform_return_to_modify = true;
6130 id.transform_parameter = true;
6131 id.transform_lang_insert_block = NULL;
6132
6133 /* Make sure not to unshare trees behind the front-end's back
6134 since front-end specific mechanisms may rely on sharing. */
6135 id.regimplify = false;
6136 id.do_not_unshare = true;
6137
6138 /* We're not inside any EH region. */
6139 id.eh_lp_nr = 0;
6140
6141 t = copy_tree_body (&id);
6142
6143 /* We can only return something suitable for use in a GENERIC
6144 expression tree. */
6145 if (TREE_CODE (t) == MODIFY_EXPR)
6146 return TREE_OPERAND (t, 1);
6147 }
6148
6149 return NULL_TREE;
6150 }
6151
6152 /* Duplicate a type, fields and all. */
6153
6154 tree
6155 build_duplicate_type (tree type)
6156 {
6157 struct copy_body_data id;
6158
6159 memset (&id, 0, sizeof (id));
6160 id.src_fn = current_function_decl;
6161 id.dst_fn = current_function_decl;
6162 id.src_cfun = cfun;
6163 id.decl_map = new hash_map<tree, tree>;
6164 id.debug_map = NULL;
6165 id.copy_decl = copy_decl_no_change;
6166
6167 type = remap_type_1 (type, &id);
6168
6169 delete id.decl_map;
6170 if (id.debug_map)
6171 delete id.debug_map;
6172
6173 TYPE_CANONICAL (type) = type;
6174
6175 return type;
6176 }
6177
6178 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6179 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6180 evaluation. */
6181
6182 tree
6183 copy_fn (tree fn, tree& parms, tree& result)
6184 {
6185 copy_body_data id;
6186 tree param;
6187 hash_map<tree, tree> decl_map;
6188
6189 tree *p = &parms;
6190 *p = NULL_TREE;
6191
6192 memset (&id, 0, sizeof (id));
6193 id.src_fn = fn;
6194 id.dst_fn = current_function_decl;
6195 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6196 id.decl_map = &decl_map;
6197
6198 id.copy_decl = copy_decl_no_change;
6199 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6200 id.transform_new_cfg = false;
6201 id.transform_return_to_modify = false;
6202 id.transform_parameter = true;
6203 id.transform_lang_insert_block = NULL;
6204
6205 /* Make sure not to unshare trees behind the front-end's back
6206 since front-end specific mechanisms may rely on sharing. */
6207 id.regimplify = false;
6208 id.do_not_unshare = true;
6209
6210 /* We're not inside any EH region. */
6211 id.eh_lp_nr = 0;
6212
6213 /* Remap the parameters and result and return them to the caller. */
6214 for (param = DECL_ARGUMENTS (fn);
6215 param;
6216 param = DECL_CHAIN (param))
6217 {
6218 *p = remap_decl (param, &id);
6219 p = &DECL_CHAIN (*p);
6220 }
6221
6222 if (DECL_RESULT (fn))
6223 result = remap_decl (DECL_RESULT (fn), &id);
6224 else
6225 result = NULL_TREE;
6226
6227 return copy_tree_body (&id);
6228 }