re PR tree-optimization/86214 (Strongly increased stack usage)
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63
64 /* I'm not real happy about this, but we need to handle gimple and
65 non-gimple trees. */
66
67 /* Inlining, Cloning, Versioning, Parallelization
68
69 Inlining: a function body is duplicated, but the PARM_DECLs are
70 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
71 MODIFY_EXPRs that store to a dedicated returned-value variable.
72 The duplicated eh_region info of the copy will later be appended
73 to the info for the caller; the eh_region info in copied throwing
74 statements and RESX statements are adjusted accordingly.
75
76 Cloning: (only in C++) We have one body for a con/de/structor, and
77 multiple function decls, each with a unique parameter list.
78 Duplicate the body, using the given splay tree; some parameters
79 will become constants (like 0 or 1).
80
81 Versioning: a function body is duplicated and the result is a new
82 function rather than into blocks of an existing function as with
83 inlining. Some parameters will become constants.
84
85 Parallelization: a region of a function is duplicated resulting in
86 a new function. Variables may be replaced with complex expressions
87 to enable shared variable semantics.
88
89 All of these will simultaneously lookup any callgraph edges. If
90 we're going to inline the duplicated function body, and the given
91 function has some cloned callgraph nodes (one for each place this
92 function will be inlined) those callgraph edges will be duplicated.
93 If we're cloning the body, those callgraph edges will be
94 updated to point into the new body. (Note that the original
95 callgraph node and edge list will not be altered.)
96
97 See the CALL_EXPR handling case in copy_tree_body_r (). */
98
99 /* To Do:
100
101 o In order to make inlining-on-trees work, we pessimized
102 function-local static constants. In particular, they are now
103 always output, even when not addressed. Fix this by treating
104 function-local static constants just like global static
105 constants; the back-end already knows not to output them if they
106 are not needed.
107
108 o Provide heuristics to clamp inlining of recursive template
109 calls? */
110
111
112 /* Weights that estimate_num_insns uses to estimate the size of the
113 produced code. */
114
115 eni_weights eni_size_weights;
116
117 /* Weights that estimate_num_insns uses to estimate the time necessary
118 to execute the produced code. */
119
120 eni_weights eni_time_weights;
121
122 /* Prototypes. */
123
124 static tree declare_return_variable (copy_body_data *, tree, tree,
125 basic_block);
126 static void remap_block (tree *, copy_body_data *);
127 static void copy_bind_expr (tree *, int *, copy_body_data *);
128 static void declare_inline_vars (tree, tree);
129 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
130 static void prepend_lexical_block (tree current_block, tree new_block);
131 static tree copy_decl_to_var (tree, copy_body_data *);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
140
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
143 {
144 id->decl_map->put (key, value);
145
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
150 }
151
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
154
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 {
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
160
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
163
164 if (!target_for_debug_bind (key))
165 return;
166
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
169
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
172
173 id->debug_map->put (key, value);
174 }
175
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
181
182 /* Construct new SSA name for old NAME. ID is the inline context. */
183
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
186 {
187 tree new_tree, var;
188 tree *n;
189
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
191
192 n = id->decl_map->get (name);
193 if (n)
194 return unshare_expr (*n);
195
196 if (processing_debug_stmt)
197 {
198 if (SSA_NAME_IS_DEFAULT_DEF (name)
199 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 && id->entry_bb == NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
202 {
203 tree vexpr = make_node (DEBUG_EXPR_DECL);
204 gimple *def_temp;
205 gimple_stmt_iterator gsi;
206 tree val = SSA_NAME_VAR (name);
207
208 n = id->decl_map->get (val);
209 if (n != NULL)
210 val = *n;
211 if (TREE_CODE (val) != PARM_DECL
212 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
213 {
214 processing_debug_stmt = -1;
215 return name;
216 }
217 n = id->decl_map->get (val);
218 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
219 return *n;
220 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
221 DECL_ARTIFICIAL (vexpr) = 1;
222 TREE_TYPE (vexpr) = TREE_TYPE (name);
223 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
224 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
225 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
226 insert_decl_map (id, val, vexpr);
227 return vexpr;
228 }
229
230 processing_debug_stmt = -1;
231 return name;
232 }
233
234 /* Remap anonymous SSA names or SSA names of anonymous decls. */
235 var = SSA_NAME_VAR (name);
236 if (!var
237 || (!SSA_NAME_IS_DEFAULT_DEF (name)
238 && VAR_P (var)
239 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
240 && DECL_ARTIFICIAL (var)
241 && DECL_IGNORED_P (var)
242 && !DECL_NAME (var)))
243 {
244 struct ptr_info_def *pi;
245 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
246 if (!var && SSA_NAME_IDENTIFIER (name))
247 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
248 insert_decl_map (id, name, new_tree);
249 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
250 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
251 /* At least IPA points-to info can be directly transferred. */
252 if (id->src_cfun->gimple_df
253 && id->src_cfun->gimple_df->ipa_pta
254 && POINTER_TYPE_P (TREE_TYPE (name))
255 && (pi = SSA_NAME_PTR_INFO (name))
256 && !pi->pt.anything)
257 {
258 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
259 new_pi->pt = pi->pt;
260 }
261 return new_tree;
262 }
263
264 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
265 in copy_bb. */
266 new_tree = remap_decl (var, id);
267
268 /* We might've substituted constant or another SSA_NAME for
269 the variable.
270
271 Replace the SSA name representing RESULT_DECL by variable during
272 inlining: this saves us from need to introduce PHI node in a case
273 return value is just partly initialized. */
274 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
275 && (!SSA_NAME_VAR (name)
276 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
277 || !id->transform_return_to_modify))
278 {
279 struct ptr_info_def *pi;
280 new_tree = make_ssa_name (new_tree);
281 insert_decl_map (id, name, new_tree);
282 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
283 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
284 /* At least IPA points-to info can be directly transferred. */
285 if (id->src_cfun->gimple_df
286 && id->src_cfun->gimple_df->ipa_pta
287 && POINTER_TYPE_P (TREE_TYPE (name))
288 && (pi = SSA_NAME_PTR_INFO (name))
289 && !pi->pt.anything)
290 {
291 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
292 new_pi->pt = pi->pt;
293 }
294 if (SSA_NAME_IS_DEFAULT_DEF (name))
295 {
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
299 abnormal edge, but also increase register pressure.
300
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
305 if (id->entry_bb
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
307 && (!SSA_NAME_VAR (name)
308 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
309 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
310 0)->dest
311 || EDGE_COUNT (id->entry_bb->preds) != 1))
312 {
313 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
314 gimple *init_stmt;
315 tree zero = build_zero_cst (TREE_TYPE (new_tree));
316
317 init_stmt = gimple_build_assign (new_tree, zero);
318 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
319 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
320 }
321 else
322 {
323 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
324 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
325 }
326 }
327 }
328 else
329 insert_decl_map (id, name, new_tree);
330 return new_tree;
331 }
332
333 /* Remap DECL during the copying of the BLOCK tree for the function. */
334
335 tree
336 remap_decl (tree decl, copy_body_data *id)
337 {
338 tree *n;
339
340 /* We only remap local variables in the current function. */
341
342 /* See if we have remapped this declaration. */
343
344 n = id->decl_map->get (decl);
345
346 if (!n && processing_debug_stmt)
347 {
348 processing_debug_stmt = -1;
349 return decl;
350 }
351
352 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
353 necessary DECLs have already been remapped and we do not want to duplicate
354 a decl coming from outside of the sequence we are copying. */
355 if (!n
356 && id->prevent_decl_creation_for_types
357 && id->remapping_type_depth > 0
358 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
359 return decl;
360
361 /* If we didn't already have an equivalent for this declaration, create one
362 now. */
363 if (!n)
364 {
365 /* Make a copy of the variable or label. */
366 tree t = id->copy_decl (decl, id);
367
368 /* Remember it, so that if we encounter this local entity again
369 we can reuse this copy. Do this early because remap_type may
370 need this decl for TYPE_STUB_DECL. */
371 insert_decl_map (id, decl, t);
372
373 if (!DECL_P (t))
374 return t;
375
376 /* Remap types, if necessary. */
377 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
378 if (TREE_CODE (t) == TYPE_DECL)
379 {
380 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
381
382 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
383 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
384 is not set on the TYPE_DECL, for example in LTO mode. */
385 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
386 {
387 tree x = build_variant_type_copy (TREE_TYPE (t));
388 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
389 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
390 DECL_ORIGINAL_TYPE (t) = x;
391 }
392 }
393
394 /* Remap sizes as necessary. */
395 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
396 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
397
398 /* If fields, do likewise for offset and qualifier. */
399 if (TREE_CODE (t) == FIELD_DECL)
400 {
401 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
402 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
403 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
404 }
405
406 return t;
407 }
408
409 if (id->do_not_unshare)
410 return *n;
411 else
412 return unshare_expr (*n);
413 }
414
415 static tree
416 remap_type_1 (tree type, copy_body_data *id)
417 {
418 tree new_tree, t;
419
420 /* We do need a copy. build and register it now. If this is a pointer or
421 reference type, remap the designated type and make a new pointer or
422 reference type. */
423 if (TREE_CODE (type) == POINTER_TYPE)
424 {
425 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
426 TYPE_MODE (type),
427 TYPE_REF_CAN_ALIAS_ALL (type));
428 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
429 new_tree = build_type_attribute_qual_variant (new_tree,
430 TYPE_ATTRIBUTES (type),
431 TYPE_QUALS (type));
432 insert_decl_map (id, type, new_tree);
433 return new_tree;
434 }
435 else if (TREE_CODE (type) == REFERENCE_TYPE)
436 {
437 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
438 TYPE_MODE (type),
439 TYPE_REF_CAN_ALIAS_ALL (type));
440 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
441 new_tree = build_type_attribute_qual_variant (new_tree,
442 TYPE_ATTRIBUTES (type),
443 TYPE_QUALS (type));
444 insert_decl_map (id, type, new_tree);
445 return new_tree;
446 }
447 else
448 new_tree = copy_node (type);
449
450 insert_decl_map (id, type, new_tree);
451
452 /* This is a new type, not a copy of an old type. Need to reassociate
453 variants. We can handle everything except the main variant lazily. */
454 t = TYPE_MAIN_VARIANT (type);
455 if (type != t)
456 {
457 t = remap_type (t, id);
458 TYPE_MAIN_VARIANT (new_tree) = t;
459 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
460 TYPE_NEXT_VARIANT (t) = new_tree;
461 }
462 else
463 {
464 TYPE_MAIN_VARIANT (new_tree) = new_tree;
465 TYPE_NEXT_VARIANT (new_tree) = NULL;
466 }
467
468 if (TYPE_STUB_DECL (type))
469 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
470
471 /* Lazily create pointer and reference types. */
472 TYPE_POINTER_TO (new_tree) = NULL;
473 TYPE_REFERENCE_TO (new_tree) = NULL;
474
475 /* Copy all types that may contain references to local variables; be sure to
476 preserve sharing in between type and its main variant when possible. */
477 switch (TREE_CODE (new_tree))
478 {
479 case INTEGER_TYPE:
480 case REAL_TYPE:
481 case FIXED_POINT_TYPE:
482 case ENUMERAL_TYPE:
483 case BOOLEAN_TYPE:
484 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
485 {
486 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
487 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
488
489 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
490 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 }
492 else
493 {
494 t = TYPE_MIN_VALUE (new_tree);
495 if (t && TREE_CODE (t) != INTEGER_CST)
496 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
497
498 t = TYPE_MAX_VALUE (new_tree);
499 if (t && TREE_CODE (t) != INTEGER_CST)
500 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
501 }
502 return new_tree;
503
504 case FUNCTION_TYPE:
505 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
506 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
507 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
508 else
509 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
510 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
511 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
512 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
513 else
514 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
515 return new_tree;
516
517 case ARRAY_TYPE:
518 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
519 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
520 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
521 else
522 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
523
524 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
525 {
526 gcc_checking_assert (TYPE_DOMAIN (type)
527 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
528 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
529 }
530 else
531 {
532 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
533 /* For array bounds where we have decided not to copy over the bounds
534 variable which isn't used in OpenMP/OpenACC region, change them to
535 an uninitialized VAR_DECL temporary. */
536 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
537 && id->adjust_array_error_bounds
538 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
539 {
540 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
541 DECL_ATTRIBUTES (v)
542 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
543 DECL_ATTRIBUTES (v));
544 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
545 }
546 }
547 break;
548
549 case RECORD_TYPE:
550 case UNION_TYPE:
551 case QUAL_UNION_TYPE:
552 if (TYPE_MAIN_VARIANT (type) != type
553 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
554 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
555 else
556 {
557 tree f, nf = NULL;
558
559 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
560 {
561 t = remap_decl (f, id);
562 DECL_CONTEXT (t) = new_tree;
563 DECL_CHAIN (t) = nf;
564 nf = t;
565 }
566 TYPE_FIELDS (new_tree) = nreverse (nf);
567 }
568 break;
569
570 case OFFSET_TYPE:
571 default:
572 /* Shouldn't have been thought variable sized. */
573 gcc_unreachable ();
574 }
575
576 /* All variants of type share the same size, so use the already remaped data. */
577 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
578 {
579 tree s = TYPE_SIZE (type);
580 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
581 tree su = TYPE_SIZE_UNIT (type);
582 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
583 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
584 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
585 || s == mvs);
586 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
587 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
588 || su == mvsu);
589 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
590 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
591 }
592 else
593 {
594 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
595 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
596 }
597
598 return new_tree;
599 }
600
601 tree
602 remap_type (tree type, copy_body_data *id)
603 {
604 tree *node;
605 tree tmp;
606
607 if (type == NULL)
608 return type;
609
610 /* See if we have remapped this type. */
611 node = id->decl_map->get (type);
612 if (node)
613 return *node;
614
615 /* The type only needs remapping if it's variably modified. */
616 if (! variably_modified_type_p (type, id->src_fn))
617 {
618 insert_decl_map (id, type, type);
619 return type;
620 }
621
622 id->remapping_type_depth++;
623 tmp = remap_type_1 (type, id);
624 id->remapping_type_depth--;
625
626 return tmp;
627 }
628
629 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
630
631 static bool
632 can_be_nonlocal (tree decl, copy_body_data *id)
633 {
634 /* We cannot duplicate function decls. */
635 if (TREE_CODE (decl) == FUNCTION_DECL)
636 return true;
637
638 /* Local static vars must be non-local or we get multiple declaration
639 problems. */
640 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
641 return true;
642
643 return false;
644 }
645
646 static tree
647 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
648 copy_body_data *id)
649 {
650 tree old_var;
651 tree new_decls = NULL_TREE;
652
653 /* Remap its variables. */
654 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
655 {
656 tree new_var;
657
658 if (can_be_nonlocal (old_var, id))
659 {
660 /* We need to add this variable to the local decls as otherwise
661 nothing else will do so. */
662 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
663 add_local_decl (cfun, old_var);
664 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
665 && !DECL_IGNORED_P (old_var)
666 && nonlocalized_list)
667 vec_safe_push (*nonlocalized_list, old_var);
668 continue;
669 }
670
671 /* Remap the variable. */
672 new_var = remap_decl (old_var, id);
673
674 /* If we didn't remap this variable, we can't mess with its
675 TREE_CHAIN. If we remapped this variable to the return slot, it's
676 already declared somewhere else, so don't declare it here. */
677
678 if (new_var == id->retvar)
679 ;
680 else if (!new_var)
681 {
682 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
683 && !DECL_IGNORED_P (old_var)
684 && nonlocalized_list)
685 vec_safe_push (*nonlocalized_list, old_var);
686 }
687 else
688 {
689 gcc_assert (DECL_P (new_var));
690 DECL_CHAIN (new_var) = new_decls;
691 new_decls = new_var;
692
693 /* Also copy value-expressions. */
694 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
695 {
696 tree tem = DECL_VALUE_EXPR (new_var);
697 bool old_regimplify = id->regimplify;
698 id->remapping_type_depth++;
699 walk_tree (&tem, copy_tree_body_r, id, NULL);
700 id->remapping_type_depth--;
701 id->regimplify = old_regimplify;
702 SET_DECL_VALUE_EXPR (new_var, tem);
703 }
704 }
705 }
706
707 return nreverse (new_decls);
708 }
709
710 /* Copy the BLOCK to contain remapped versions of the variables
711 therein. And hook the new block into the block-tree. */
712
713 static void
714 remap_block (tree *block, copy_body_data *id)
715 {
716 tree old_block;
717 tree new_block;
718
719 /* Make the new block. */
720 old_block = *block;
721 new_block = make_node (BLOCK);
722 TREE_USED (new_block) = TREE_USED (old_block);
723 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
724 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
725 BLOCK_NONLOCALIZED_VARS (new_block)
726 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
727 *block = new_block;
728
729 /* Remap its variables. */
730 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
731 &BLOCK_NONLOCALIZED_VARS (new_block),
732 id);
733
734 if (id->transform_lang_insert_block)
735 id->transform_lang_insert_block (new_block);
736
737 /* Remember the remapped block. */
738 insert_decl_map (id, old_block, new_block);
739 }
740
741 /* Copy the whole block tree and root it in id->block. */
742
743 static tree
744 remap_blocks (tree block, copy_body_data *id)
745 {
746 tree t;
747 tree new_tree = block;
748
749 if (!block)
750 return NULL;
751
752 remap_block (&new_tree, id);
753 gcc_assert (new_tree != block);
754 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
755 prepend_lexical_block (new_tree, remap_blocks (t, id));
756 /* Blocks are in arbitrary order, but make things slightly prettier and do
757 not swap order when producing a copy. */
758 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
759 return new_tree;
760 }
761
762 /* Remap the block tree rooted at BLOCK to nothing. */
763
764 static void
765 remap_blocks_to_null (tree block, copy_body_data *id)
766 {
767 tree t;
768 insert_decl_map (id, block, NULL_TREE);
769 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
770 remap_blocks_to_null (t, id);
771 }
772
773 /* Remap the location info pointed to by LOCUS. */
774
775 static location_t
776 remap_location (location_t locus, copy_body_data *id)
777 {
778 if (LOCATION_BLOCK (locus))
779 {
780 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
781 gcc_assert (n);
782 if (*n)
783 return set_block (locus, *n);
784 }
785
786 locus = LOCATION_LOCUS (locus);
787
788 if (locus != UNKNOWN_LOCATION && id->block)
789 return set_block (locus, id->block);
790
791 return locus;
792 }
793
794 static void
795 copy_statement_list (tree *tp)
796 {
797 tree_stmt_iterator oi, ni;
798 tree new_tree;
799
800 new_tree = alloc_stmt_list ();
801 ni = tsi_start (new_tree);
802 oi = tsi_start (*tp);
803 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
804 *tp = new_tree;
805
806 for (; !tsi_end_p (oi); tsi_next (&oi))
807 {
808 tree stmt = tsi_stmt (oi);
809 if (TREE_CODE (stmt) == STATEMENT_LIST)
810 /* This copy is not redundant; tsi_link_after will smash this
811 STATEMENT_LIST into the end of the one we're building, and we
812 don't want to do that with the original. */
813 copy_statement_list (&stmt);
814 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
815 }
816 }
817
818 static void
819 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
820 {
821 tree block = BIND_EXPR_BLOCK (*tp);
822 /* Copy (and replace) the statement. */
823 copy_tree_r (tp, walk_subtrees, NULL);
824 if (block)
825 {
826 remap_block (&block, id);
827 BIND_EXPR_BLOCK (*tp) = block;
828 }
829
830 if (BIND_EXPR_VARS (*tp))
831 /* This will remap a lot of the same decls again, but this should be
832 harmless. */
833 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
834 }
835
836
837 /* Create a new gimple_seq by remapping all the statements in BODY
838 using the inlining information in ID. */
839
840 static gimple_seq
841 remap_gimple_seq (gimple_seq body, copy_body_data *id)
842 {
843 gimple_stmt_iterator si;
844 gimple_seq new_body = NULL;
845
846 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
847 {
848 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
849 gimple_seq_add_seq (&new_body, new_stmts);
850 }
851
852 return new_body;
853 }
854
855
856 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
857 block using the mapping information in ID. */
858
859 static gimple *
860 copy_gimple_bind (gbind *stmt, copy_body_data *id)
861 {
862 gimple *new_bind;
863 tree new_block, new_vars;
864 gimple_seq body, new_body;
865
866 /* Copy the statement. Note that we purposely don't use copy_stmt
867 here because we need to remap statements as we copy. */
868 body = gimple_bind_body (stmt);
869 new_body = remap_gimple_seq (body, id);
870
871 new_block = gimple_bind_block (stmt);
872 if (new_block)
873 remap_block (&new_block, id);
874
875 /* This will remap a lot of the same decls again, but this should be
876 harmless. */
877 new_vars = gimple_bind_vars (stmt);
878 if (new_vars)
879 new_vars = remap_decls (new_vars, NULL, id);
880
881 new_bind = gimple_build_bind (new_vars, new_body, new_block);
882
883 return new_bind;
884 }
885
886 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
887
888 static bool
889 is_parm (tree decl)
890 {
891 if (TREE_CODE (decl) == SSA_NAME)
892 {
893 decl = SSA_NAME_VAR (decl);
894 if (!decl)
895 return false;
896 }
897
898 return (TREE_CODE (decl) == PARM_DECL);
899 }
900
901 /* Remap the dependence CLIQUE from the source to the destination function
902 as specified in ID. */
903
904 static unsigned short
905 remap_dependence_clique (copy_body_data *id, unsigned short clique)
906 {
907 if (clique == 0 || processing_debug_stmt)
908 return 0;
909 if (!id->dependence_map)
910 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
911 bool existed;
912 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
913 if (!existed)
914 newc = ++cfun->last_clique;
915 return newc;
916 }
917
918 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
919 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
920 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
921 recursing into the children nodes of *TP. */
922
923 static tree
924 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
925 {
926 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
927 copy_body_data *id = (copy_body_data *) wi_p->info;
928 tree fn = id->src_fn;
929
930 /* For recursive invocations this is no longer the LHS itself. */
931 bool is_lhs = wi_p->is_lhs;
932 wi_p->is_lhs = false;
933
934 if (TREE_CODE (*tp) == SSA_NAME)
935 {
936 *tp = remap_ssa_name (*tp, id);
937 *walk_subtrees = 0;
938 if (is_lhs)
939 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
940 return NULL;
941 }
942 else if (auto_var_in_fn_p (*tp, fn))
943 {
944 /* Local variables and labels need to be replaced by equivalent
945 variables. We don't want to copy static variables; there's
946 only one of those, no matter how many times we inline the
947 containing function. Similarly for globals from an outer
948 function. */
949 tree new_decl;
950
951 /* Remap the declaration. */
952 new_decl = remap_decl (*tp, id);
953 gcc_assert (new_decl);
954 /* Replace this variable with the copy. */
955 STRIP_TYPE_NOPS (new_decl);
956 /* ??? The C++ frontend uses void * pointer zero to initialize
957 any other type. This confuses the middle-end type verification.
958 As cloned bodies do not go through gimplification again the fixup
959 there doesn't trigger. */
960 if (TREE_CODE (new_decl) == INTEGER_CST
961 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
962 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
963 *tp = new_decl;
964 *walk_subtrees = 0;
965 }
966 else if (TREE_CODE (*tp) == STATEMENT_LIST)
967 gcc_unreachable ();
968 else if (TREE_CODE (*tp) == SAVE_EXPR)
969 gcc_unreachable ();
970 else if (TREE_CODE (*tp) == LABEL_DECL
971 && (!DECL_CONTEXT (*tp)
972 || decl_function_context (*tp) == id->src_fn))
973 /* These may need to be remapped for EH handling. */
974 *tp = remap_decl (*tp, id);
975 else if (TREE_CODE (*tp) == FIELD_DECL)
976 {
977 /* If the enclosing record type is variably_modified_type_p, the field
978 has already been remapped. Otherwise, it need not be. */
979 tree *n = id->decl_map->get (*tp);
980 if (n)
981 *tp = *n;
982 *walk_subtrees = 0;
983 }
984 else if (TYPE_P (*tp))
985 /* Types may need remapping as well. */
986 *tp = remap_type (*tp, id);
987 else if (CONSTANT_CLASS_P (*tp))
988 {
989 /* If this is a constant, we have to copy the node iff the type
990 will be remapped. copy_tree_r will not copy a constant. */
991 tree new_type = remap_type (TREE_TYPE (*tp), id);
992
993 if (new_type == TREE_TYPE (*tp))
994 *walk_subtrees = 0;
995
996 else if (TREE_CODE (*tp) == INTEGER_CST)
997 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
998 else
999 {
1000 *tp = copy_node (*tp);
1001 TREE_TYPE (*tp) = new_type;
1002 }
1003 }
1004 else
1005 {
1006 /* Otherwise, just copy the node. Note that copy_tree_r already
1007 knows not to copy VAR_DECLs, etc., so this is safe. */
1008
1009 if (TREE_CODE (*tp) == MEM_REF)
1010 {
1011 /* We need to re-canonicalize MEM_REFs from inline substitutions
1012 that can happen when a pointer argument is an ADDR_EXPR.
1013 Recurse here manually to allow that. */
1014 tree ptr = TREE_OPERAND (*tp, 0);
1015 tree type = remap_type (TREE_TYPE (*tp), id);
1016 tree old = *tp;
1017 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1018 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1019 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1020 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1021 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1022 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1023 {
1024 MR_DEPENDENCE_CLIQUE (*tp)
1025 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1026 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1027 }
1028 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1029 remapped a parameter as the property might be valid only
1030 for the parameter itself. */
1031 if (TREE_THIS_NOTRAP (old)
1032 && (!is_parm (TREE_OPERAND (old, 0))
1033 || (!id->transform_parameter && is_parm (ptr))))
1034 TREE_THIS_NOTRAP (*tp) = 1;
1035 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1036 *walk_subtrees = 0;
1037 return NULL;
1038 }
1039
1040 /* Here is the "usual case". Copy this tree node, and then
1041 tweak some special cases. */
1042 copy_tree_r (tp, walk_subtrees, NULL);
1043
1044 if (TREE_CODE (*tp) != OMP_CLAUSE)
1045 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1046
1047 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1048 {
1049 /* The copied TARGET_EXPR has never been expanded, even if the
1050 original node was expanded already. */
1051 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1052 TREE_OPERAND (*tp, 3) = NULL_TREE;
1053 }
1054 else if (TREE_CODE (*tp) == ADDR_EXPR)
1055 {
1056 /* Variable substitution need not be simple. In particular,
1057 the MEM_REF substitution above. Make sure that
1058 TREE_CONSTANT and friends are up-to-date. */
1059 int invariant = is_gimple_min_invariant (*tp);
1060 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1061 recompute_tree_invariant_for_addr_expr (*tp);
1062
1063 /* If this used to be invariant, but is not any longer,
1064 then regimplification is probably needed. */
1065 if (invariant && !is_gimple_min_invariant (*tp))
1066 id->regimplify = true;
1067
1068 *walk_subtrees = 0;
1069 }
1070 }
1071
1072 /* Update the TREE_BLOCK for the cloned expr. */
1073 if (EXPR_P (*tp))
1074 {
1075 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1076 tree old_block = TREE_BLOCK (*tp);
1077 if (old_block)
1078 {
1079 tree *n;
1080 n = id->decl_map->get (TREE_BLOCK (*tp));
1081 if (n)
1082 new_block = *n;
1083 }
1084 TREE_SET_BLOCK (*tp, new_block);
1085 }
1086
1087 /* Keep iterating. */
1088 return NULL_TREE;
1089 }
1090
1091
1092 /* Called from copy_body_id via walk_tree. DATA is really a
1093 `copy_body_data *'. */
1094
1095 tree
1096 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1097 {
1098 copy_body_data *id = (copy_body_data *) data;
1099 tree fn = id->src_fn;
1100 tree new_block;
1101
1102 /* Begin by recognizing trees that we'll completely rewrite for the
1103 inlining context. Our output for these trees is completely
1104 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1105 into an edge). Further down, we'll handle trees that get
1106 duplicated and/or tweaked. */
1107
1108 /* When requested, RETURN_EXPRs should be transformed to just the
1109 contained MODIFY_EXPR. The branch semantics of the return will
1110 be handled elsewhere by manipulating the CFG rather than a statement. */
1111 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1112 {
1113 tree assignment = TREE_OPERAND (*tp, 0);
1114
1115 /* If we're returning something, just turn that into an
1116 assignment into the equivalent of the original RESULT_DECL.
1117 If the "assignment" is just the result decl, the result
1118 decl has already been set (e.g. a recent "foo (&result_decl,
1119 ...)"); just toss the entire RETURN_EXPR. */
1120 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1121 {
1122 /* Replace the RETURN_EXPR with (a copy of) the
1123 MODIFY_EXPR hanging underneath. */
1124 *tp = copy_node (assignment);
1125 }
1126 else /* Else the RETURN_EXPR returns no value. */
1127 {
1128 *tp = NULL;
1129 return (tree) (void *)1;
1130 }
1131 }
1132 else if (TREE_CODE (*tp) == SSA_NAME)
1133 {
1134 *tp = remap_ssa_name (*tp, id);
1135 *walk_subtrees = 0;
1136 return NULL;
1137 }
1138
1139 /* Local variables and labels need to be replaced by equivalent
1140 variables. We don't want to copy static variables; there's only
1141 one of those, no matter how many times we inline the containing
1142 function. Similarly for globals from an outer function. */
1143 else if (auto_var_in_fn_p (*tp, fn))
1144 {
1145 tree new_decl;
1146
1147 /* Remap the declaration. */
1148 new_decl = remap_decl (*tp, id);
1149 gcc_assert (new_decl);
1150 /* Replace this variable with the copy. */
1151 STRIP_TYPE_NOPS (new_decl);
1152 *tp = new_decl;
1153 *walk_subtrees = 0;
1154 }
1155 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1156 copy_statement_list (tp);
1157 else if (TREE_CODE (*tp) == SAVE_EXPR
1158 || TREE_CODE (*tp) == TARGET_EXPR)
1159 remap_save_expr (tp, id->decl_map, walk_subtrees);
1160 else if (TREE_CODE (*tp) == LABEL_DECL
1161 && (! DECL_CONTEXT (*tp)
1162 || decl_function_context (*tp) == id->src_fn))
1163 /* These may need to be remapped for EH handling. */
1164 *tp = remap_decl (*tp, id);
1165 else if (TREE_CODE (*tp) == BIND_EXPR)
1166 copy_bind_expr (tp, walk_subtrees, id);
1167 /* Types may need remapping as well. */
1168 else if (TYPE_P (*tp))
1169 *tp = remap_type (*tp, id);
1170
1171 /* If this is a constant, we have to copy the node iff the type will be
1172 remapped. copy_tree_r will not copy a constant. */
1173 else if (CONSTANT_CLASS_P (*tp))
1174 {
1175 tree new_type = remap_type (TREE_TYPE (*tp), id);
1176
1177 if (new_type == TREE_TYPE (*tp))
1178 *walk_subtrees = 0;
1179
1180 else if (TREE_CODE (*tp) == INTEGER_CST)
1181 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1182 else
1183 {
1184 *tp = copy_node (*tp);
1185 TREE_TYPE (*tp) = new_type;
1186 }
1187 }
1188
1189 /* Otherwise, just copy the node. Note that copy_tree_r already
1190 knows not to copy VAR_DECLs, etc., so this is safe. */
1191 else
1192 {
1193 /* Here we handle trees that are not completely rewritten.
1194 First we detect some inlining-induced bogosities for
1195 discarding. */
1196 if (TREE_CODE (*tp) == MODIFY_EXPR
1197 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1198 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1199 {
1200 /* Some assignments VAR = VAR; don't generate any rtl code
1201 and thus don't count as variable modification. Avoid
1202 keeping bogosities like 0 = 0. */
1203 tree decl = TREE_OPERAND (*tp, 0), value;
1204 tree *n;
1205
1206 n = id->decl_map->get (decl);
1207 if (n)
1208 {
1209 value = *n;
1210 STRIP_TYPE_NOPS (value);
1211 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1212 {
1213 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1214 return copy_tree_body_r (tp, walk_subtrees, data);
1215 }
1216 }
1217 }
1218 else if (TREE_CODE (*tp) == INDIRECT_REF)
1219 {
1220 /* Get rid of *& from inline substitutions that can happen when a
1221 pointer argument is an ADDR_EXPR. */
1222 tree decl = TREE_OPERAND (*tp, 0);
1223 tree *n = id->decl_map->get (decl);
1224 if (n)
1225 {
1226 /* If we happen to get an ADDR_EXPR in n->value, strip
1227 it manually here as we'll eventually get ADDR_EXPRs
1228 which lie about their types pointed to. In this case
1229 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1230 but we absolutely rely on that. As fold_indirect_ref
1231 does other useful transformations, try that first, though. */
1232 tree type = TREE_TYPE (*tp);
1233 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1234 tree old = *tp;
1235 *tp = gimple_fold_indirect_ref (ptr);
1236 if (! *tp)
1237 {
1238 type = remap_type (type, id);
1239 if (TREE_CODE (ptr) == ADDR_EXPR)
1240 {
1241 *tp
1242 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1243 /* ??? We should either assert here or build
1244 a VIEW_CONVERT_EXPR instead of blindly leaking
1245 incompatible types to our IL. */
1246 if (! *tp)
1247 *tp = TREE_OPERAND (ptr, 0);
1248 }
1249 else
1250 {
1251 *tp = build1 (INDIRECT_REF, type, ptr);
1252 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1253 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1254 TREE_READONLY (*tp) = TREE_READONLY (old);
1255 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1256 have remapped a parameter as the property might be
1257 valid only for the parameter itself. */
1258 if (TREE_THIS_NOTRAP (old)
1259 && (!is_parm (TREE_OPERAND (old, 0))
1260 || (!id->transform_parameter && is_parm (ptr))))
1261 TREE_THIS_NOTRAP (*tp) = 1;
1262 }
1263 }
1264 *walk_subtrees = 0;
1265 return NULL;
1266 }
1267 }
1268 else if (TREE_CODE (*tp) == MEM_REF)
1269 {
1270 /* We need to re-canonicalize MEM_REFs from inline substitutions
1271 that can happen when a pointer argument is an ADDR_EXPR.
1272 Recurse here manually to allow that. */
1273 tree ptr = TREE_OPERAND (*tp, 0);
1274 tree type = remap_type (TREE_TYPE (*tp), id);
1275 tree old = *tp;
1276 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1277 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1278 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1279 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1280 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1281 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1282 {
1283 MR_DEPENDENCE_CLIQUE (*tp)
1284 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1285 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1286 }
1287 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1288 remapped a parameter as the property might be valid only
1289 for the parameter itself. */
1290 if (TREE_THIS_NOTRAP (old)
1291 && (!is_parm (TREE_OPERAND (old, 0))
1292 || (!id->transform_parameter && is_parm (ptr))))
1293 TREE_THIS_NOTRAP (*tp) = 1;
1294 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1295 *walk_subtrees = 0;
1296 return NULL;
1297 }
1298
1299 /* Here is the "usual case". Copy this tree node, and then
1300 tweak some special cases. */
1301 copy_tree_r (tp, walk_subtrees, NULL);
1302
1303 /* If EXPR has block defined, map it to newly constructed block.
1304 When inlining we want EXPRs without block appear in the block
1305 of function call if we are not remapping a type. */
1306 if (EXPR_P (*tp))
1307 {
1308 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1309 if (TREE_BLOCK (*tp))
1310 {
1311 tree *n;
1312 n = id->decl_map->get (TREE_BLOCK (*tp));
1313 if (n)
1314 new_block = *n;
1315 }
1316 TREE_SET_BLOCK (*tp, new_block);
1317 }
1318
1319 if (TREE_CODE (*tp) != OMP_CLAUSE)
1320 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1321
1322 /* The copied TARGET_EXPR has never been expanded, even if the
1323 original node was expanded already. */
1324 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1325 {
1326 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1327 TREE_OPERAND (*tp, 3) = NULL_TREE;
1328 }
1329
1330 /* Variable substitution need not be simple. In particular, the
1331 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1332 and friends are up-to-date. */
1333 else if (TREE_CODE (*tp) == ADDR_EXPR)
1334 {
1335 int invariant = is_gimple_min_invariant (*tp);
1336 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1337
1338 /* Handle the case where we substituted an INDIRECT_REF
1339 into the operand of the ADDR_EXPR. */
1340 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1341 {
1342 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1343 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1344 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1345 *tp = t;
1346 }
1347 else
1348 recompute_tree_invariant_for_addr_expr (*tp);
1349
1350 /* If this used to be invariant, but is not any longer,
1351 then regimplification is probably needed. */
1352 if (invariant && !is_gimple_min_invariant (*tp))
1353 id->regimplify = true;
1354
1355 *walk_subtrees = 0;
1356 }
1357 }
1358
1359 /* Keep iterating. */
1360 return NULL_TREE;
1361 }
1362
1363 /* Helper for remap_gimple_stmt. Given an EH region number for the
1364 source function, map that to the duplicate EH region number in
1365 the destination function. */
1366
1367 static int
1368 remap_eh_region_nr (int old_nr, copy_body_data *id)
1369 {
1370 eh_region old_r, new_r;
1371
1372 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1373 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1374
1375 return new_r->index;
1376 }
1377
1378 /* Similar, but operate on INTEGER_CSTs. */
1379
1380 static tree
1381 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1382 {
1383 int old_nr, new_nr;
1384
1385 old_nr = tree_to_shwi (old_t_nr);
1386 new_nr = remap_eh_region_nr (old_nr, id);
1387
1388 return build_int_cst (integer_type_node, new_nr);
1389 }
1390
1391 /* Helper for copy_bb. Remap statement STMT using the inlining
1392 information in ID. Return the new statement copy. */
1393
1394 static gimple_seq
1395 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1396 {
1397 gimple *copy = NULL;
1398 struct walk_stmt_info wi;
1399 bool skip_first = false;
1400 gimple_seq stmts = NULL;
1401
1402 if (is_gimple_debug (stmt)
1403 && (gimple_debug_nonbind_marker_p (stmt)
1404 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1405 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1406 return NULL;
1407
1408 /* Begin by recognizing trees that we'll completely rewrite for the
1409 inlining context. Our output for these trees is completely
1410 different from our input (e.g. RETURN_EXPR is deleted and morphs
1411 into an edge). Further down, we'll handle trees that get
1412 duplicated and/or tweaked. */
1413
1414 /* When requested, GIMPLE_RETURN should be transformed to just the
1415 contained GIMPLE_ASSIGN. The branch semantics of the return will
1416 be handled elsewhere by manipulating the CFG rather than the
1417 statement. */
1418 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1419 {
1420 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1421
1422 /* If we're returning something, just turn that into an
1423 assignment to the equivalent of the original RESULT_DECL.
1424 If RETVAL is just the result decl, the result decl has
1425 already been set (e.g. a recent "foo (&result_decl, ...)");
1426 just toss the entire GIMPLE_RETURN. */
1427 if (retval
1428 && (TREE_CODE (retval) != RESULT_DECL
1429 && (TREE_CODE (retval) != SSA_NAME
1430 || ! SSA_NAME_VAR (retval)
1431 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1432 {
1433 copy = gimple_build_assign (id->do_not_unshare
1434 ? id->retvar : unshare_expr (id->retvar),
1435 retval);
1436 /* id->retvar is already substituted. Skip it on later remapping. */
1437 skip_first = true;
1438 }
1439 else
1440 return NULL;
1441 }
1442 else if (gimple_has_substatements (stmt))
1443 {
1444 gimple_seq s1, s2;
1445
1446 /* When cloning bodies from the C++ front end, we will be handed bodies
1447 in High GIMPLE form. Handle here all the High GIMPLE statements that
1448 have embedded statements. */
1449 switch (gimple_code (stmt))
1450 {
1451 case GIMPLE_BIND:
1452 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1453 break;
1454
1455 case GIMPLE_CATCH:
1456 {
1457 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1458 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1459 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1460 }
1461 break;
1462
1463 case GIMPLE_EH_FILTER:
1464 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1465 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1466 break;
1467
1468 case GIMPLE_TRY:
1469 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1470 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1471 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1472 break;
1473
1474 case GIMPLE_WITH_CLEANUP_EXPR:
1475 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1476 copy = gimple_build_wce (s1);
1477 break;
1478
1479 case GIMPLE_OMP_PARALLEL:
1480 {
1481 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1482 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1483 copy = gimple_build_omp_parallel
1484 (s1,
1485 gimple_omp_parallel_clauses (omp_par_stmt),
1486 gimple_omp_parallel_child_fn (omp_par_stmt),
1487 gimple_omp_parallel_data_arg (omp_par_stmt));
1488 }
1489 break;
1490
1491 case GIMPLE_OMP_TASK:
1492 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1493 copy = gimple_build_omp_task
1494 (s1,
1495 gimple_omp_task_clauses (stmt),
1496 gimple_omp_task_child_fn (stmt),
1497 gimple_omp_task_data_arg (stmt),
1498 gimple_omp_task_copy_fn (stmt),
1499 gimple_omp_task_arg_size (stmt),
1500 gimple_omp_task_arg_align (stmt));
1501 break;
1502
1503 case GIMPLE_OMP_FOR:
1504 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1505 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1506 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1507 gimple_omp_for_clauses (stmt),
1508 gimple_omp_for_collapse (stmt), s2);
1509 {
1510 size_t i;
1511 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1512 {
1513 gimple_omp_for_set_index (copy, i,
1514 gimple_omp_for_index (stmt, i));
1515 gimple_omp_for_set_initial (copy, i,
1516 gimple_omp_for_initial (stmt, i));
1517 gimple_omp_for_set_final (copy, i,
1518 gimple_omp_for_final (stmt, i));
1519 gimple_omp_for_set_incr (copy, i,
1520 gimple_omp_for_incr (stmt, i));
1521 gimple_omp_for_set_cond (copy, i,
1522 gimple_omp_for_cond (stmt, i));
1523 }
1524 }
1525 break;
1526
1527 case GIMPLE_OMP_MASTER:
1528 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1529 copy = gimple_build_omp_master (s1);
1530 break;
1531
1532 case GIMPLE_OMP_TASKGROUP:
1533 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1534 copy = gimple_build_omp_taskgroup
1535 (s1, gimple_omp_taskgroup_clauses (stmt));
1536 break;
1537
1538 case GIMPLE_OMP_ORDERED:
1539 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1540 copy = gimple_build_omp_ordered
1541 (s1,
1542 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1543 break;
1544
1545 case GIMPLE_OMP_SECTION:
1546 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1547 copy = gimple_build_omp_section (s1);
1548 break;
1549
1550 case GIMPLE_OMP_SECTIONS:
1551 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1552 copy = gimple_build_omp_sections
1553 (s1, gimple_omp_sections_clauses (stmt));
1554 break;
1555
1556 case GIMPLE_OMP_SINGLE:
1557 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1558 copy = gimple_build_omp_single
1559 (s1, gimple_omp_single_clauses (stmt));
1560 break;
1561
1562 case GIMPLE_OMP_TARGET:
1563 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1564 copy = gimple_build_omp_target
1565 (s1, gimple_omp_target_kind (stmt),
1566 gimple_omp_target_clauses (stmt));
1567 break;
1568
1569 case GIMPLE_OMP_TEAMS:
1570 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1571 copy = gimple_build_omp_teams
1572 (s1, gimple_omp_teams_clauses (stmt));
1573 break;
1574
1575 case GIMPLE_OMP_CRITICAL:
1576 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1577 copy = gimple_build_omp_critical (s1,
1578 gimple_omp_critical_name
1579 (as_a <gomp_critical *> (stmt)),
1580 gimple_omp_critical_clauses
1581 (as_a <gomp_critical *> (stmt)));
1582 break;
1583
1584 case GIMPLE_TRANSACTION:
1585 {
1586 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1587 gtransaction *new_trans_stmt;
1588 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1589 id);
1590 copy = new_trans_stmt = gimple_build_transaction (s1);
1591 gimple_transaction_set_subcode (new_trans_stmt,
1592 gimple_transaction_subcode (old_trans_stmt));
1593 gimple_transaction_set_label_norm (new_trans_stmt,
1594 gimple_transaction_label_norm (old_trans_stmt));
1595 gimple_transaction_set_label_uninst (new_trans_stmt,
1596 gimple_transaction_label_uninst (old_trans_stmt));
1597 gimple_transaction_set_label_over (new_trans_stmt,
1598 gimple_transaction_label_over (old_trans_stmt));
1599 }
1600 break;
1601
1602 default:
1603 gcc_unreachable ();
1604 }
1605 }
1606 else
1607 {
1608 if (gimple_assign_copy_p (stmt)
1609 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1610 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1611 {
1612 /* Here we handle statements that are not completely rewritten.
1613 First we detect some inlining-induced bogosities for
1614 discarding. */
1615
1616 /* Some assignments VAR = VAR; don't generate any rtl code
1617 and thus don't count as variable modification. Avoid
1618 keeping bogosities like 0 = 0. */
1619 tree decl = gimple_assign_lhs (stmt), value;
1620 tree *n;
1621
1622 n = id->decl_map->get (decl);
1623 if (n)
1624 {
1625 value = *n;
1626 STRIP_TYPE_NOPS (value);
1627 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1628 return NULL;
1629 }
1630 }
1631
1632 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1633 in a block that we aren't copying during tree_function_versioning,
1634 just drop the clobber stmt. */
1635 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1636 {
1637 tree lhs = gimple_assign_lhs (stmt);
1638 if (TREE_CODE (lhs) == MEM_REF
1639 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1640 {
1641 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1642 if (gimple_bb (def_stmt)
1643 && !bitmap_bit_p (id->blocks_to_copy,
1644 gimple_bb (def_stmt)->index))
1645 return NULL;
1646 }
1647 }
1648
1649 if (gimple_debug_bind_p (stmt))
1650 {
1651 gdebug *copy
1652 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1653 gimple_debug_bind_get_value (stmt),
1654 stmt);
1655 if (id->reset_location)
1656 gimple_set_location (copy, input_location);
1657 id->debug_stmts.safe_push (copy);
1658 gimple_seq_add_stmt (&stmts, copy);
1659 return stmts;
1660 }
1661 if (gimple_debug_source_bind_p (stmt))
1662 {
1663 gdebug *copy = gimple_build_debug_source_bind
1664 (gimple_debug_source_bind_get_var (stmt),
1665 gimple_debug_source_bind_get_value (stmt),
1666 stmt);
1667 if (id->reset_location)
1668 gimple_set_location (copy, input_location);
1669 id->debug_stmts.safe_push (copy);
1670 gimple_seq_add_stmt (&stmts, copy);
1671 return stmts;
1672 }
1673 if (gimple_debug_nonbind_marker_p (stmt))
1674 {
1675 /* If the inlined function has too many debug markers,
1676 don't copy them. */
1677 if (id->src_cfun->debug_marker_count
1678 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1679 return stmts;
1680
1681 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1682 if (id->reset_location)
1683 gimple_set_location (copy, input_location);
1684 id->debug_stmts.safe_push (copy);
1685 gimple_seq_add_stmt (&stmts, copy);
1686 return stmts;
1687 }
1688
1689 /* Create a new deep copy of the statement. */
1690 copy = gimple_copy (stmt);
1691
1692 /* Clear flags that need revisiting. */
1693 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1694 {
1695 if (gimple_call_tail_p (call_stmt))
1696 gimple_call_set_tail (call_stmt, false);
1697 if (gimple_call_from_thunk_p (call_stmt))
1698 gimple_call_set_from_thunk (call_stmt, false);
1699 if (gimple_call_internal_p (call_stmt))
1700 switch (gimple_call_internal_fn (call_stmt))
1701 {
1702 case IFN_GOMP_SIMD_LANE:
1703 case IFN_GOMP_SIMD_VF:
1704 case IFN_GOMP_SIMD_LAST_LANE:
1705 case IFN_GOMP_SIMD_ORDERED_START:
1706 case IFN_GOMP_SIMD_ORDERED_END:
1707 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1708 break;
1709 default:
1710 break;
1711 }
1712 }
1713
1714 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1715 RESX and EH_DISPATCH. */
1716 if (id->eh_map)
1717 switch (gimple_code (copy))
1718 {
1719 case GIMPLE_CALL:
1720 {
1721 tree r, fndecl = gimple_call_fndecl (copy);
1722 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1723 switch (DECL_FUNCTION_CODE (fndecl))
1724 {
1725 case BUILT_IN_EH_COPY_VALUES:
1726 r = gimple_call_arg (copy, 1);
1727 r = remap_eh_region_tree_nr (r, id);
1728 gimple_call_set_arg (copy, 1, r);
1729 /* FALLTHRU */
1730
1731 case BUILT_IN_EH_POINTER:
1732 case BUILT_IN_EH_FILTER:
1733 r = gimple_call_arg (copy, 0);
1734 r = remap_eh_region_tree_nr (r, id);
1735 gimple_call_set_arg (copy, 0, r);
1736 break;
1737
1738 default:
1739 break;
1740 }
1741
1742 /* Reset alias info if we didn't apply measures to
1743 keep it valid over inlining by setting DECL_PT_UID. */
1744 if (!id->src_cfun->gimple_df
1745 || !id->src_cfun->gimple_df->ipa_pta)
1746 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1747 }
1748 break;
1749
1750 case GIMPLE_RESX:
1751 {
1752 gresx *resx_stmt = as_a <gresx *> (copy);
1753 int r = gimple_resx_region (resx_stmt);
1754 r = remap_eh_region_nr (r, id);
1755 gimple_resx_set_region (resx_stmt, r);
1756 }
1757 break;
1758
1759 case GIMPLE_EH_DISPATCH:
1760 {
1761 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1762 int r = gimple_eh_dispatch_region (eh_dispatch);
1763 r = remap_eh_region_nr (r, id);
1764 gimple_eh_dispatch_set_region (eh_dispatch, r);
1765 }
1766 break;
1767
1768 default:
1769 break;
1770 }
1771 }
1772
1773 /* If STMT has a block defined, map it to the newly constructed block. */
1774 if (gimple_block (copy))
1775 {
1776 tree *n;
1777 n = id->decl_map->get (gimple_block (copy));
1778 gcc_assert (n);
1779 gimple_set_block (copy, *n);
1780 }
1781
1782 if (id->reset_location)
1783 gimple_set_location (copy, input_location);
1784
1785 /* Debug statements ought to be rebuilt and not copied. */
1786 gcc_checking_assert (!is_gimple_debug (copy));
1787
1788 /* Remap all the operands in COPY. */
1789 memset (&wi, 0, sizeof (wi));
1790 wi.info = id;
1791 if (skip_first)
1792 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1793 else
1794 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1795
1796 /* Clear the copied virtual operands. We are not remapping them here
1797 but are going to recreate them from scratch. */
1798 if (gimple_has_mem_ops (copy))
1799 {
1800 gimple_set_vdef (copy, NULL_TREE);
1801 gimple_set_vuse (copy, NULL_TREE);
1802 }
1803
1804 gimple_seq_add_stmt (&stmts, copy);
1805 return stmts;
1806 }
1807
1808
1809 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1810 later */
1811
1812 static basic_block
1813 copy_bb (copy_body_data *id, basic_block bb,
1814 profile_count num, profile_count den)
1815 {
1816 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1817 basic_block copy_basic_block;
1818 tree decl;
1819 basic_block prev;
1820
1821 profile_count::adjust_for_ipa_scaling (&num, &den);
1822
1823 /* Search for previous copied basic block. */
1824 prev = bb->prev_bb;
1825 while (!prev->aux)
1826 prev = prev->prev_bb;
1827
1828 /* create_basic_block() will append every new block to
1829 basic_block_info automatically. */
1830 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1831 copy_basic_block->count = bb->count.apply_scale (num, den);
1832
1833 copy_gsi = gsi_start_bb (copy_basic_block);
1834
1835 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1836 {
1837 gimple_seq stmts;
1838 gimple *stmt = gsi_stmt (gsi);
1839 gimple *orig_stmt = stmt;
1840 gimple_stmt_iterator stmts_gsi;
1841 bool stmt_added = false;
1842
1843 id->regimplify = false;
1844 stmts = remap_gimple_stmt (stmt, id);
1845
1846 if (gimple_seq_empty_p (stmts))
1847 continue;
1848
1849 seq_gsi = copy_gsi;
1850
1851 for (stmts_gsi = gsi_start (stmts);
1852 !gsi_end_p (stmts_gsi); )
1853 {
1854 stmt = gsi_stmt (stmts_gsi);
1855
1856 /* Advance iterator now before stmt is moved to seq_gsi. */
1857 gsi_next (&stmts_gsi);
1858
1859 if (gimple_nop_p (stmt))
1860 continue;
1861
1862 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1863 orig_stmt);
1864
1865 /* With return slot optimization we can end up with
1866 non-gimple (foo *)&this->m, fix that here. */
1867 if (is_gimple_assign (stmt)
1868 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1869 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1870 {
1871 tree new_rhs;
1872 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1873 gimple_assign_rhs1 (stmt),
1874 true, NULL, false,
1875 GSI_CONTINUE_LINKING);
1876 gimple_assign_set_rhs1 (stmt, new_rhs);
1877 id->regimplify = false;
1878 }
1879
1880 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1881
1882 if (id->regimplify)
1883 gimple_regimplify_operands (stmt, &seq_gsi);
1884
1885 stmt_added = true;
1886 }
1887
1888 if (!stmt_added)
1889 continue;
1890
1891 /* If copy_basic_block has been empty at the start of this iteration,
1892 call gsi_start_bb again to get at the newly added statements. */
1893 if (gsi_end_p (copy_gsi))
1894 copy_gsi = gsi_start_bb (copy_basic_block);
1895 else
1896 gsi_next (&copy_gsi);
1897
1898 /* Process the new statement. The call to gimple_regimplify_operands
1899 possibly turned the statement into multiple statements, we
1900 need to process all of them. */
1901 do
1902 {
1903 tree fn;
1904 gcall *call_stmt;
1905
1906 stmt = gsi_stmt (copy_gsi);
1907 call_stmt = dyn_cast <gcall *> (stmt);
1908 if (call_stmt
1909 && gimple_call_va_arg_pack_p (call_stmt)
1910 && id->call_stmt
1911 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1912 {
1913 /* __builtin_va_arg_pack () should be replaced by
1914 all arguments corresponding to ... in the caller. */
1915 tree p;
1916 gcall *new_call;
1917 vec<tree> argarray;
1918 size_t nargs = gimple_call_num_args (id->call_stmt);
1919 size_t n;
1920
1921 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1922 nargs--;
1923
1924 /* Create the new array of arguments. */
1925 n = nargs + gimple_call_num_args (call_stmt);
1926 argarray.create (n);
1927 argarray.safe_grow_cleared (n);
1928
1929 /* Copy all the arguments before '...' */
1930 memcpy (argarray.address (),
1931 gimple_call_arg_ptr (call_stmt, 0),
1932 gimple_call_num_args (call_stmt) * sizeof (tree));
1933
1934 /* Append the arguments passed in '...' */
1935 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1936 gimple_call_arg_ptr (id->call_stmt, 0)
1937 + (gimple_call_num_args (id->call_stmt) - nargs),
1938 nargs * sizeof (tree));
1939
1940 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1941 argarray);
1942
1943 argarray.release ();
1944
1945 /* Copy all GIMPLE_CALL flags, location and block, except
1946 GF_CALL_VA_ARG_PACK. */
1947 gimple_call_copy_flags (new_call, call_stmt);
1948 gimple_call_set_va_arg_pack (new_call, false);
1949 gimple_set_location (new_call, gimple_location (stmt));
1950 gimple_set_block (new_call, gimple_block (stmt));
1951 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1952
1953 gsi_replace (&copy_gsi, new_call, false);
1954 stmt = new_call;
1955 }
1956 else if (call_stmt
1957 && id->call_stmt
1958 && (decl = gimple_call_fndecl (stmt))
1959 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
1960 {
1961 /* __builtin_va_arg_pack_len () should be replaced by
1962 the number of anonymous arguments. */
1963 size_t nargs = gimple_call_num_args (id->call_stmt);
1964 tree count, p;
1965 gimple *new_stmt;
1966
1967 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1968 nargs--;
1969
1970 if (!gimple_call_lhs (stmt))
1971 {
1972 /* Drop unused calls. */
1973 gsi_remove (&copy_gsi, false);
1974 continue;
1975 }
1976 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
1977 {
1978 count = build_int_cst (integer_type_node, nargs);
1979 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1980 gsi_replace (&copy_gsi, new_stmt, false);
1981 stmt = new_stmt;
1982 }
1983 else if (nargs != 0)
1984 {
1985 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
1986 count = build_int_cst (integer_type_node, nargs);
1987 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1988 PLUS_EXPR, newlhs, count);
1989 gimple_call_set_lhs (stmt, newlhs);
1990 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
1991 }
1992 }
1993 else if (call_stmt
1994 && id->call_stmt
1995 && gimple_call_internal_p (stmt)
1996 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1997 {
1998 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1999 gsi_remove (&copy_gsi, false);
2000 continue;
2001 }
2002
2003 /* Statements produced by inlining can be unfolded, especially
2004 when we constant propagated some operands. We can't fold
2005 them right now for two reasons:
2006 1) folding require SSA_NAME_DEF_STMTs to be correct
2007 2) we can't change function calls to builtins.
2008 So we just mark statement for later folding. We mark
2009 all new statements, instead just statements that has changed
2010 by some nontrivial substitution so even statements made
2011 foldable indirectly are updated. If this turns out to be
2012 expensive, copy_body can be told to watch for nontrivial
2013 changes. */
2014 if (id->statements_to_fold)
2015 id->statements_to_fold->add (stmt);
2016
2017 /* We're duplicating a CALL_EXPR. Find any corresponding
2018 callgraph edges and update or duplicate them. */
2019 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2020 {
2021 struct cgraph_edge *edge;
2022
2023 switch (id->transform_call_graph_edges)
2024 {
2025 case CB_CGE_DUPLICATE:
2026 edge = id->src_node->get_edge (orig_stmt);
2027 if (edge)
2028 {
2029 struct cgraph_edge *old_edge = edge;
2030 profile_count old_cnt = edge->count;
2031 edge = edge->clone (id->dst_node, call_stmt,
2032 gimple_uid (stmt),
2033 num, den,
2034 true);
2035
2036 /* Speculative calls consist of two edges - direct and
2037 indirect. Duplicate the whole thing and distribute
2038 frequencies accordingly. */
2039 if (edge->speculative)
2040 {
2041 struct cgraph_edge *direct, *indirect;
2042 struct ipa_ref *ref;
2043
2044 gcc_assert (!edge->indirect_unknown_callee);
2045 old_edge->speculative_call_info (direct, indirect, ref);
2046
2047 profile_count indir_cnt = indirect->count;
2048 indirect = indirect->clone (id->dst_node, call_stmt,
2049 gimple_uid (stmt),
2050 num, den,
2051 true);
2052
2053 profile_probability prob
2054 = indir_cnt.probability_in (old_cnt + indir_cnt);
2055 indirect->count
2056 = copy_basic_block->count.apply_probability (prob);
2057 edge->count = copy_basic_block->count - indirect->count;
2058 id->dst_node->clone_reference (ref, stmt);
2059 }
2060 else
2061 edge->count = copy_basic_block->count;
2062 }
2063 break;
2064
2065 case CB_CGE_MOVE_CLONES:
2066 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2067 call_stmt);
2068 edge = id->dst_node->get_edge (stmt);
2069 break;
2070
2071 case CB_CGE_MOVE:
2072 edge = id->dst_node->get_edge (orig_stmt);
2073 if (edge)
2074 edge->set_call_stmt (call_stmt);
2075 break;
2076
2077 default:
2078 gcc_unreachable ();
2079 }
2080
2081 /* Constant propagation on argument done during inlining
2082 may create new direct call. Produce an edge for it. */
2083 if ((!edge
2084 || (edge->indirect_inlining_edge
2085 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2086 && id->dst_node->definition
2087 && (fn = gimple_call_fndecl (stmt)) != NULL)
2088 {
2089 struct cgraph_node *dest = cgraph_node::get_create (fn);
2090
2091 /* We have missing edge in the callgraph. This can happen
2092 when previous inlining turned an indirect call into a
2093 direct call by constant propagating arguments or we are
2094 producing dead clone (for further cloning). In all
2095 other cases we hit a bug (incorrect node sharing is the
2096 most common reason for missing edges). */
2097 gcc_assert (!dest->definition
2098 || dest->address_taken
2099 || !id->src_node->definition
2100 || !id->dst_node->definition);
2101 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2102 id->dst_node->create_edge_including_clones
2103 (dest, orig_stmt, call_stmt, bb->count,
2104 CIF_ORIGINALLY_INDIRECT_CALL);
2105 else
2106 id->dst_node->create_edge (dest, call_stmt,
2107 bb->count)->inline_failed
2108 = CIF_ORIGINALLY_INDIRECT_CALL;
2109 if (dump_file)
2110 {
2111 fprintf (dump_file, "Created new direct edge to %s\n",
2112 dest->name ());
2113 }
2114 }
2115
2116 notice_special_calls (as_a <gcall *> (stmt));
2117 }
2118
2119 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2120 id->eh_map, id->eh_lp_nr);
2121
2122 gsi_next (&copy_gsi);
2123 }
2124 while (!gsi_end_p (copy_gsi));
2125
2126 copy_gsi = gsi_last_bb (copy_basic_block);
2127 }
2128
2129 return copy_basic_block;
2130 }
2131
2132 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2133 form is quite easy, since dominator relationship for old basic blocks does
2134 not change.
2135
2136 There is however exception where inlining might change dominator relation
2137 across EH edges from basic block within inlined functions destinating
2138 to landing pads in function we inline into.
2139
2140 The function fills in PHI_RESULTs of such PHI nodes if they refer
2141 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2142 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2143 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2144 set, and this means that there will be no overlapping live ranges
2145 for the underlying symbol.
2146
2147 This might change in future if we allow redirecting of EH edges and
2148 we might want to change way build CFG pre-inlining to include
2149 all the possible edges then. */
2150 static void
2151 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2152 bool can_throw, bool nonlocal_goto)
2153 {
2154 edge e;
2155 edge_iterator ei;
2156
2157 FOR_EACH_EDGE (e, ei, bb->succs)
2158 if (!e->dest->aux
2159 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2160 {
2161 gphi *phi;
2162 gphi_iterator si;
2163
2164 if (!nonlocal_goto)
2165 gcc_assert (e->flags & EDGE_EH);
2166
2167 if (!can_throw)
2168 gcc_assert (!(e->flags & EDGE_EH));
2169
2170 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2171 {
2172 edge re;
2173
2174 phi = si.phi ();
2175
2176 /* For abnormal goto/call edges the receiver can be the
2177 ENTRY_BLOCK. Do not assert this cannot happen. */
2178
2179 gcc_assert ((e->flags & EDGE_EH)
2180 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2181
2182 re = find_edge (ret_bb, e->dest);
2183 gcc_checking_assert (re);
2184 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2185 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2186
2187 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2188 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2189 }
2190 }
2191 }
2192
2193 /* Insert clobbers for automatic variables of inlined ID->src_fn
2194 function at the start of basic block BB. */
2195
2196 static void
2197 add_clobbers_to_eh_landing_pad (basic_block bb, copy_body_data *id)
2198 {
2199 tree var;
2200 unsigned int i;
2201 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2202 if (VAR_P (var)
2203 && !DECL_HARD_REGISTER (var)
2204 && !TREE_THIS_VOLATILE (var)
2205 && !DECL_HAS_VALUE_EXPR_P (var)
2206 && !is_gimple_reg (var)
2207 && auto_var_in_fn_p (var, id->src_fn))
2208 {
2209 tree *t = id->decl_map->get (var);
2210 if (!t)
2211 continue;
2212 tree new_var = *t;
2213 if (VAR_P (new_var)
2214 && !DECL_HARD_REGISTER (new_var)
2215 && !TREE_THIS_VOLATILE (new_var)
2216 && !DECL_HAS_VALUE_EXPR_P (new_var)
2217 && !is_gimple_reg (new_var)
2218 && auto_var_in_fn_p (new_var, id->dst_fn))
2219 {
2220 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2221 tree clobber = build_clobber (TREE_TYPE (new_var));
2222 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2223 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2224 }
2225 }
2226 }
2227
2228 /* Copy edges from BB into its copy constructed earlier, scale profile
2229 accordingly. Edges will be taken care of later. Assume aux
2230 pointers to point to the copies of each BB. Return true if any
2231 debug stmts are left after a statement that must end the basic block. */
2232
2233 static bool
2234 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2235 basic_block ret_bb, basic_block abnormal_goto_dest,
2236 copy_body_data *id)
2237 {
2238 basic_block new_bb = (basic_block) bb->aux;
2239 edge_iterator ei;
2240 edge old_edge;
2241 gimple_stmt_iterator si;
2242 bool need_debug_cleanup = false;
2243
2244 /* Use the indices from the original blocks to create edges for the
2245 new ones. */
2246 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2247 if (!(old_edge->flags & EDGE_EH))
2248 {
2249 edge new_edge;
2250 int flags = old_edge->flags;
2251 location_t locus = old_edge->goto_locus;
2252
2253 /* Return edges do get a FALLTHRU flag when they get inlined. */
2254 if (old_edge->dest->index == EXIT_BLOCK
2255 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2256 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2257 flags |= EDGE_FALLTHRU;
2258
2259 new_edge
2260 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2261 new_edge->probability = old_edge->probability;
2262 if (!id->reset_location)
2263 new_edge->goto_locus = remap_location (locus, id);
2264 }
2265
2266 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2267 return false;
2268
2269 /* When doing function splitting, we must decrease count of the return block
2270 which was previously reachable by block we did not copy. */
2271 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2272 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2273 if (old_edge->src->index != ENTRY_BLOCK
2274 && !old_edge->src->aux)
2275 new_bb->count -= old_edge->count ().apply_scale (num, den);
2276
2277 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2278 {
2279 gimple *copy_stmt;
2280 bool can_throw, nonlocal_goto;
2281
2282 copy_stmt = gsi_stmt (si);
2283 if (!is_gimple_debug (copy_stmt))
2284 update_stmt (copy_stmt);
2285
2286 /* Do this before the possible split_block. */
2287 gsi_next (&si);
2288
2289 /* If this tree could throw an exception, there are two
2290 cases where we need to add abnormal edge(s): the
2291 tree wasn't in a region and there is a "current
2292 region" in the caller; or the original tree had
2293 EH edges. In both cases split the block after the tree,
2294 and add abnormal edge(s) as needed; we need both
2295 those from the callee and the caller.
2296 We check whether the copy can throw, because the const
2297 propagation can change an INDIRECT_REF which throws
2298 into a COMPONENT_REF which doesn't. If the copy
2299 can throw, the original could also throw. */
2300 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2301 nonlocal_goto
2302 = (stmt_can_make_abnormal_goto (copy_stmt)
2303 && !computed_goto_p (copy_stmt));
2304
2305 if (can_throw || nonlocal_goto)
2306 {
2307 if (!gsi_end_p (si))
2308 {
2309 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2310 gsi_next (&si);
2311 if (gsi_end_p (si))
2312 need_debug_cleanup = true;
2313 }
2314 if (!gsi_end_p (si))
2315 /* Note that bb's predecessor edges aren't necessarily
2316 right at this point; split_block doesn't care. */
2317 {
2318 edge e = split_block (new_bb, copy_stmt);
2319
2320 new_bb = e->dest;
2321 new_bb->aux = e->src->aux;
2322 si = gsi_start_bb (new_bb);
2323 }
2324 }
2325
2326 bool update_probs = false;
2327
2328 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2329 {
2330 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2331 update_probs = true;
2332 }
2333 else if (can_throw)
2334 {
2335 make_eh_edges (copy_stmt);
2336 update_probs = true;
2337 }
2338
2339 /* EH edges may not match old edges. Copy as much as possible. */
2340 if (update_probs)
2341 {
2342 edge e;
2343 edge_iterator ei;
2344 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2345
2346 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2347 if ((old_edge->flags & EDGE_EH)
2348 && (e = find_edge (copy_stmt_bb,
2349 (basic_block) old_edge->dest->aux))
2350 && (e->flags & EDGE_EH))
2351 e->probability = old_edge->probability;
2352
2353 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2354 if (e->flags & EDGE_EH)
2355 {
2356 if (!e->probability.initialized_p ())
2357 e->probability = profile_probability::never ();
2358 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2359 {
2360 add_clobbers_to_eh_landing_pad (e->dest, id);
2361 id->add_clobbers_to_eh_landing_pads = 0;
2362 }
2363 }
2364 }
2365
2366
2367 /* If the call we inline cannot make abnormal goto do not add
2368 additional abnormal edges but only retain those already present
2369 in the original function body. */
2370 if (abnormal_goto_dest == NULL)
2371 nonlocal_goto = false;
2372 if (nonlocal_goto)
2373 {
2374 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2375
2376 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2377 nonlocal_goto = false;
2378 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2379 in OpenMP regions which aren't allowed to be left abnormally.
2380 So, no need to add abnormal edge in that case. */
2381 else if (is_gimple_call (copy_stmt)
2382 && gimple_call_internal_p (copy_stmt)
2383 && (gimple_call_internal_fn (copy_stmt)
2384 == IFN_ABNORMAL_DISPATCHER)
2385 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2386 nonlocal_goto = false;
2387 else
2388 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2389 EDGE_ABNORMAL);
2390 }
2391
2392 if ((can_throw || nonlocal_goto)
2393 && gimple_in_ssa_p (cfun))
2394 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2395 can_throw, nonlocal_goto);
2396 }
2397 return need_debug_cleanup;
2398 }
2399
2400 /* Copy the PHIs. All blocks and edges are copied, some blocks
2401 was possibly split and new outgoing EH edges inserted.
2402 BB points to the block of original function and AUX pointers links
2403 the original and newly copied blocks. */
2404
2405 static void
2406 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2407 {
2408 basic_block const new_bb = (basic_block) bb->aux;
2409 edge_iterator ei;
2410 gphi *phi;
2411 gphi_iterator si;
2412 edge new_edge;
2413 bool inserted = false;
2414
2415 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2416 {
2417 tree res, new_res;
2418 gphi *new_phi;
2419
2420 phi = si.phi ();
2421 res = PHI_RESULT (phi);
2422 new_res = res;
2423 if (!virtual_operand_p (res))
2424 {
2425 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2426 if (EDGE_COUNT (new_bb->preds) == 0)
2427 {
2428 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2429 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2430 }
2431 else
2432 {
2433 new_phi = create_phi_node (new_res, new_bb);
2434 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2435 {
2436 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2437 bb);
2438 tree arg;
2439 tree new_arg;
2440 edge_iterator ei2;
2441 location_t locus;
2442
2443 /* When doing partial cloning, we allow PHIs on the entry
2444 block as long as all the arguments are the same.
2445 Find any input edge to see argument to copy. */
2446 if (!old_edge)
2447 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2448 if (!old_edge->src->aux)
2449 break;
2450
2451 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2452 new_arg = arg;
2453 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2454 gcc_assert (new_arg);
2455 /* With return slot optimization we can end up with
2456 non-gimple (foo *)&this->m, fix that here. */
2457 if (TREE_CODE (new_arg) != SSA_NAME
2458 && TREE_CODE (new_arg) != FUNCTION_DECL
2459 && !is_gimple_val (new_arg))
2460 {
2461 gimple_seq stmts = NULL;
2462 new_arg = force_gimple_operand (new_arg, &stmts, true,
2463 NULL);
2464 gsi_insert_seq_on_edge (new_edge, stmts);
2465 inserted = true;
2466 }
2467 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2468 if (id->reset_location)
2469 locus = input_location;
2470 else
2471 locus = remap_location (locus, id);
2472 add_phi_arg (new_phi, new_arg, new_edge, locus);
2473 }
2474 }
2475 }
2476 }
2477
2478 /* Commit the delayed edge insertions. */
2479 if (inserted)
2480 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2481 gsi_commit_one_edge_insert (new_edge, NULL);
2482 }
2483
2484
2485 /* Wrapper for remap_decl so it can be used as a callback. */
2486
2487 static tree
2488 remap_decl_1 (tree decl, void *data)
2489 {
2490 return remap_decl (decl, (copy_body_data *) data);
2491 }
2492
2493 /* Build struct function and associated datastructures for the new clone
2494 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2495 the cfun to the function of new_fndecl (and current_function_decl too). */
2496
2497 static void
2498 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2499 {
2500 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2501
2502 if (!DECL_ARGUMENTS (new_fndecl))
2503 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2504 if (!DECL_RESULT (new_fndecl))
2505 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2506
2507 /* Register specific tree functions. */
2508 gimple_register_cfg_hooks ();
2509
2510 /* Get clean struct function. */
2511 push_struct_function (new_fndecl);
2512
2513 /* We will rebuild these, so just sanity check that they are empty. */
2514 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2515 gcc_assert (cfun->local_decls == NULL);
2516 gcc_assert (cfun->cfg == NULL);
2517 gcc_assert (cfun->decl == new_fndecl);
2518
2519 /* Copy items we preserve during cloning. */
2520 cfun->static_chain_decl = src_cfun->static_chain_decl;
2521 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2522 cfun->function_end_locus = src_cfun->function_end_locus;
2523 cfun->curr_properties = src_cfun->curr_properties;
2524 cfun->last_verified = src_cfun->last_verified;
2525 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2526 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2527 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2528 cfun->stdarg = src_cfun->stdarg;
2529 cfun->after_inlining = src_cfun->after_inlining;
2530 cfun->can_throw_non_call_exceptions
2531 = src_cfun->can_throw_non_call_exceptions;
2532 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2533 cfun->returns_struct = src_cfun->returns_struct;
2534 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2535
2536 init_empty_tree_cfg ();
2537
2538 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2539
2540 profile_count num = count;
2541 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2542 profile_count::adjust_for_ipa_scaling (&num, &den);
2543
2544 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2545 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2546 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2547 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2548 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2549 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2550 if (src_cfun->eh)
2551 init_eh_for_function ();
2552
2553 if (src_cfun->gimple_df)
2554 {
2555 init_tree_ssa (cfun);
2556 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2557 if (cfun->gimple_df->in_ssa_p)
2558 init_ssa_operands (cfun);
2559 }
2560 }
2561
2562 /* Helper function for copy_cfg_body. Move debug stmts from the end
2563 of NEW_BB to the beginning of successor basic blocks when needed. If the
2564 successor has multiple predecessors, reset them, otherwise keep
2565 their value. */
2566
2567 static void
2568 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2569 {
2570 edge e;
2571 edge_iterator ei;
2572 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2573
2574 if (gsi_end_p (si)
2575 || gsi_one_before_end_p (si)
2576 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2577 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2578 return;
2579
2580 FOR_EACH_EDGE (e, ei, new_bb->succs)
2581 {
2582 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2583 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2584 while (is_gimple_debug (gsi_stmt (ssi)))
2585 {
2586 gimple *stmt = gsi_stmt (ssi);
2587 gdebug *new_stmt;
2588 tree var;
2589 tree value;
2590
2591 /* For the last edge move the debug stmts instead of copying
2592 them. */
2593 if (ei_one_before_end_p (ei))
2594 {
2595 si = ssi;
2596 gsi_prev (&ssi);
2597 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2598 {
2599 gimple_debug_bind_reset_value (stmt);
2600 gimple_set_location (stmt, UNKNOWN_LOCATION);
2601 }
2602 gsi_remove (&si, false);
2603 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2604 continue;
2605 }
2606
2607 if (gimple_debug_bind_p (stmt))
2608 {
2609 var = gimple_debug_bind_get_var (stmt);
2610 if (single_pred_p (e->dest))
2611 {
2612 value = gimple_debug_bind_get_value (stmt);
2613 value = unshare_expr (value);
2614 new_stmt = gimple_build_debug_bind (var, value, stmt);
2615 }
2616 else
2617 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2618 }
2619 else if (gimple_debug_source_bind_p (stmt))
2620 {
2621 var = gimple_debug_source_bind_get_var (stmt);
2622 value = gimple_debug_source_bind_get_value (stmt);
2623 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2624 }
2625 else if (gimple_debug_nonbind_marker_p (stmt))
2626 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2627 else
2628 gcc_unreachable ();
2629 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2630 id->debug_stmts.safe_push (new_stmt);
2631 gsi_prev (&ssi);
2632 }
2633 }
2634 }
2635
2636 /* Make a copy of the sub-loops of SRC_PARENT and place them
2637 as siblings of DEST_PARENT. */
2638
2639 static void
2640 copy_loops (copy_body_data *id,
2641 struct loop *dest_parent, struct loop *src_parent)
2642 {
2643 struct loop *src_loop = src_parent->inner;
2644 while (src_loop)
2645 {
2646 if (!id->blocks_to_copy
2647 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2648 {
2649 struct loop *dest_loop = alloc_loop ();
2650
2651 /* Assign the new loop its header and latch and associate
2652 those with the new loop. */
2653 dest_loop->header = (basic_block)src_loop->header->aux;
2654 dest_loop->header->loop_father = dest_loop;
2655 if (src_loop->latch != NULL)
2656 {
2657 dest_loop->latch = (basic_block)src_loop->latch->aux;
2658 dest_loop->latch->loop_father = dest_loop;
2659 }
2660
2661 /* Copy loop meta-data. */
2662 copy_loop_info (src_loop, dest_loop);
2663
2664 /* Finally place it into the loop array and the loop tree. */
2665 place_new_loop (cfun, dest_loop);
2666 flow_loop_tree_node_add (dest_parent, dest_loop);
2667
2668 dest_loop->safelen = src_loop->safelen;
2669 if (src_loop->unroll)
2670 {
2671 dest_loop->unroll = src_loop->unroll;
2672 cfun->has_unroll = true;
2673 }
2674 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2675 if (src_loop->force_vectorize)
2676 {
2677 dest_loop->force_vectorize = true;
2678 cfun->has_force_vectorize_loops = true;
2679 }
2680 if (src_loop->simduid)
2681 {
2682 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2683 cfun->has_simduid_loops = true;
2684 }
2685
2686 /* Recurse. */
2687 copy_loops (id, dest_loop, src_loop);
2688 }
2689 src_loop = src_loop->next;
2690 }
2691 }
2692
2693 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2694
2695 void
2696 redirect_all_calls (copy_body_data * id, basic_block bb)
2697 {
2698 gimple_stmt_iterator si;
2699 gimple *last = last_stmt (bb);
2700 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2701 {
2702 gimple *stmt = gsi_stmt (si);
2703 if (is_gimple_call (stmt))
2704 {
2705 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2706 if (edge)
2707 {
2708 edge->redirect_call_stmt_to_callee ();
2709 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2710 gimple_purge_dead_eh_edges (bb);
2711 }
2712 }
2713 }
2714 }
2715
2716 /* Make a copy of the body of FN so that it can be inserted inline in
2717 another function. Walks FN via CFG, returns new fndecl. */
2718
2719 static tree
2720 copy_cfg_body (copy_body_data * id,
2721 basic_block entry_block_map, basic_block exit_block_map,
2722 basic_block new_entry)
2723 {
2724 tree callee_fndecl = id->src_fn;
2725 /* Original cfun for the callee, doesn't change. */
2726 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2727 struct function *cfun_to_copy;
2728 basic_block bb;
2729 tree new_fndecl = NULL;
2730 bool need_debug_cleanup = false;
2731 int last;
2732 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2733 profile_count num = entry_block_map->count;
2734
2735 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2736
2737 /* Register specific tree functions. */
2738 gimple_register_cfg_hooks ();
2739
2740 /* If we are inlining just region of the function, make sure to connect
2741 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2742 part of loop, we must compute frequency and probability of
2743 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2744 probabilities of edges incoming from nonduplicated region. */
2745 if (new_entry)
2746 {
2747 edge e;
2748 edge_iterator ei;
2749 den = profile_count::zero ();
2750
2751 FOR_EACH_EDGE (e, ei, new_entry->preds)
2752 if (!e->src->aux)
2753 den += e->count ();
2754 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2755 }
2756
2757 profile_count::adjust_for_ipa_scaling (&num, &den);
2758
2759 /* Must have a CFG here at this point. */
2760 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2761 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2762
2763
2764 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2765 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2766 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2767 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2768
2769 /* Duplicate any exception-handling regions. */
2770 if (cfun->eh)
2771 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2772 remap_decl_1, id);
2773
2774 /* Use aux pointers to map the original blocks to copy. */
2775 FOR_EACH_BB_FN (bb, cfun_to_copy)
2776 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2777 {
2778 basic_block new_bb = copy_bb (id, bb, num, den);
2779 bb->aux = new_bb;
2780 new_bb->aux = bb;
2781 new_bb->loop_father = entry_block_map->loop_father;
2782 }
2783
2784 last = last_basic_block_for_fn (cfun);
2785
2786 /* Now that we've duplicated the blocks, duplicate their edges. */
2787 basic_block abnormal_goto_dest = NULL;
2788 if (id->call_stmt
2789 && stmt_can_make_abnormal_goto (id->call_stmt))
2790 {
2791 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2792
2793 bb = gimple_bb (id->call_stmt);
2794 gsi_next (&gsi);
2795 if (gsi_end_p (gsi))
2796 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2797 }
2798 FOR_ALL_BB_FN (bb, cfun_to_copy)
2799 if (!id->blocks_to_copy
2800 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2801 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2802 abnormal_goto_dest, id);
2803
2804 if (new_entry)
2805 {
2806 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2807 EDGE_FALLTHRU);
2808 e->probability = profile_probability::always ();
2809 }
2810
2811 /* Duplicate the loop tree, if available and wanted. */
2812 if (loops_for_fn (src_cfun) != NULL
2813 && current_loops != NULL)
2814 {
2815 copy_loops (id, entry_block_map->loop_father,
2816 get_loop (src_cfun, 0));
2817 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2818 loops_state_set (LOOPS_NEED_FIXUP);
2819 }
2820
2821 /* If the loop tree in the source function needed fixup, mark the
2822 destination loop tree for fixup, too. */
2823 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2824 loops_state_set (LOOPS_NEED_FIXUP);
2825
2826 if (gimple_in_ssa_p (cfun))
2827 FOR_ALL_BB_FN (bb, cfun_to_copy)
2828 if (!id->blocks_to_copy
2829 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2830 copy_phis_for_bb (bb, id);
2831
2832 FOR_ALL_BB_FN (bb, cfun_to_copy)
2833 if (bb->aux)
2834 {
2835 if (need_debug_cleanup
2836 && bb->index != ENTRY_BLOCK
2837 && bb->index != EXIT_BLOCK)
2838 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2839 /* Update call edge destinations. This cannot be done before loop
2840 info is updated, because we may split basic blocks. */
2841 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2842 && bb->index != ENTRY_BLOCK
2843 && bb->index != EXIT_BLOCK)
2844 redirect_all_calls (id, (basic_block)bb->aux);
2845 ((basic_block)bb->aux)->aux = NULL;
2846 bb->aux = NULL;
2847 }
2848
2849 /* Zero out AUX fields of newly created block during EH edge
2850 insertion. */
2851 for (; last < last_basic_block_for_fn (cfun); last++)
2852 {
2853 if (need_debug_cleanup)
2854 maybe_move_debug_stmts_to_successors (id,
2855 BASIC_BLOCK_FOR_FN (cfun, last));
2856 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2857 /* Update call edge destinations. This cannot be done before loop
2858 info is updated, because we may split basic blocks. */
2859 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2860 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2861 }
2862 entry_block_map->aux = NULL;
2863 exit_block_map->aux = NULL;
2864
2865 if (id->eh_map)
2866 {
2867 delete id->eh_map;
2868 id->eh_map = NULL;
2869 }
2870 if (id->dependence_map)
2871 {
2872 delete id->dependence_map;
2873 id->dependence_map = NULL;
2874 }
2875
2876 return new_fndecl;
2877 }
2878
2879 /* Copy the debug STMT using ID. We deal with these statements in a
2880 special way: if any variable in their VALUE expression wasn't
2881 remapped yet, we won't remap it, because that would get decl uids
2882 out of sync, causing codegen differences between -g and -g0. If
2883 this arises, we drop the VALUE expression altogether. */
2884
2885 static void
2886 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2887 {
2888 tree t, *n;
2889 struct walk_stmt_info wi;
2890
2891 if (gimple_block (stmt))
2892 {
2893 n = id->decl_map->get (gimple_block (stmt));
2894 gimple_set_block (stmt, n ? *n : id->block);
2895 }
2896
2897 if (gimple_debug_nonbind_marker_p (stmt))
2898 return;
2899
2900 /* Remap all the operands in COPY. */
2901 memset (&wi, 0, sizeof (wi));
2902 wi.info = id;
2903
2904 processing_debug_stmt = 1;
2905
2906 if (gimple_debug_source_bind_p (stmt))
2907 t = gimple_debug_source_bind_get_var (stmt);
2908 else if (gimple_debug_bind_p (stmt))
2909 t = gimple_debug_bind_get_var (stmt);
2910 else
2911 gcc_unreachable ();
2912
2913 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2914 && (n = id->debug_map->get (t)))
2915 {
2916 gcc_assert (VAR_P (*n));
2917 t = *n;
2918 }
2919 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2920 /* T is a non-localized variable. */;
2921 else
2922 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2923
2924 if (gimple_debug_bind_p (stmt))
2925 {
2926 gimple_debug_bind_set_var (stmt, t);
2927
2928 if (gimple_debug_bind_has_value_p (stmt))
2929 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2930 remap_gimple_op_r, &wi, NULL);
2931
2932 /* Punt if any decl couldn't be remapped. */
2933 if (processing_debug_stmt < 0)
2934 gimple_debug_bind_reset_value (stmt);
2935 }
2936 else if (gimple_debug_source_bind_p (stmt))
2937 {
2938 gimple_debug_source_bind_set_var (stmt, t);
2939 /* When inlining and source bind refers to one of the optimized
2940 away parameters, change the source bind into normal debug bind
2941 referring to the corresponding DEBUG_EXPR_DECL that should have
2942 been bound before the call stmt. */
2943 t = gimple_debug_source_bind_get_value (stmt);
2944 if (t != NULL_TREE
2945 && TREE_CODE (t) == PARM_DECL
2946 && id->call_stmt)
2947 {
2948 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2949 unsigned int i;
2950 if (debug_args != NULL)
2951 {
2952 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2953 if ((**debug_args)[i] == DECL_ORIGIN (t)
2954 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2955 {
2956 t = (**debug_args)[i + 1];
2957 stmt->subcode = GIMPLE_DEBUG_BIND;
2958 gimple_debug_bind_set_value (stmt, t);
2959 break;
2960 }
2961 }
2962 }
2963 if (gimple_debug_source_bind_p (stmt))
2964 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2965 remap_gimple_op_r, &wi, NULL);
2966 }
2967
2968 processing_debug_stmt = 0;
2969
2970 update_stmt (stmt);
2971 }
2972
2973 /* Process deferred debug stmts. In order to give values better odds
2974 of being successfully remapped, we delay the processing of debug
2975 stmts until all other stmts that might require remapping are
2976 processed. */
2977
2978 static void
2979 copy_debug_stmts (copy_body_data *id)
2980 {
2981 size_t i;
2982 gdebug *stmt;
2983
2984 if (!id->debug_stmts.exists ())
2985 return;
2986
2987 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2988 copy_debug_stmt (stmt, id);
2989
2990 id->debug_stmts.release ();
2991 }
2992
2993 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2994 another function. */
2995
2996 static tree
2997 copy_tree_body (copy_body_data *id)
2998 {
2999 tree fndecl = id->src_fn;
3000 tree body = DECL_SAVED_TREE (fndecl);
3001
3002 walk_tree (&body, copy_tree_body_r, id, NULL);
3003
3004 return body;
3005 }
3006
3007 /* Make a copy of the body of FN so that it can be inserted inline in
3008 another function. */
3009
3010 static tree
3011 copy_body (copy_body_data *id,
3012 basic_block entry_block_map, basic_block exit_block_map,
3013 basic_block new_entry)
3014 {
3015 tree fndecl = id->src_fn;
3016 tree body;
3017
3018 /* If this body has a CFG, walk CFG and copy. */
3019 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3020 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3021 new_entry);
3022 copy_debug_stmts (id);
3023
3024 return body;
3025 }
3026
3027 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3028 defined in function FN, or of a data member thereof. */
3029
3030 static bool
3031 self_inlining_addr_expr (tree value, tree fn)
3032 {
3033 tree var;
3034
3035 if (TREE_CODE (value) != ADDR_EXPR)
3036 return false;
3037
3038 var = get_base_address (TREE_OPERAND (value, 0));
3039
3040 return var && auto_var_in_fn_p (var, fn);
3041 }
3042
3043 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3044 lexical block and line number information from base_stmt, if given,
3045 or from the last stmt of the block otherwise. */
3046
3047 static gimple *
3048 insert_init_debug_bind (copy_body_data *id,
3049 basic_block bb, tree var, tree value,
3050 gimple *base_stmt)
3051 {
3052 gimple *note;
3053 gimple_stmt_iterator gsi;
3054 tree tracked_var;
3055
3056 if (!gimple_in_ssa_p (id->src_cfun))
3057 return NULL;
3058
3059 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3060 return NULL;
3061
3062 tracked_var = target_for_debug_bind (var);
3063 if (!tracked_var)
3064 return NULL;
3065
3066 if (bb)
3067 {
3068 gsi = gsi_last_bb (bb);
3069 if (!base_stmt && !gsi_end_p (gsi))
3070 base_stmt = gsi_stmt (gsi);
3071 }
3072
3073 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3074
3075 if (bb)
3076 {
3077 if (!gsi_end_p (gsi))
3078 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3079 else
3080 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3081 }
3082
3083 return note;
3084 }
3085
3086 static void
3087 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3088 {
3089 /* If VAR represents a zero-sized variable, it's possible that the
3090 assignment statement may result in no gimple statements. */
3091 if (init_stmt)
3092 {
3093 gimple_stmt_iterator si = gsi_last_bb (bb);
3094
3095 /* We can end up with init statements that store to a non-register
3096 from a rhs with a conversion. Handle that here by forcing the
3097 rhs into a temporary. gimple_regimplify_operands is not
3098 prepared to do this for us. */
3099 if (!is_gimple_debug (init_stmt)
3100 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3101 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3102 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3103 {
3104 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3105 gimple_expr_type (init_stmt),
3106 gimple_assign_rhs1 (init_stmt));
3107 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3108 GSI_NEW_STMT);
3109 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3110 gimple_assign_set_rhs1 (init_stmt, rhs);
3111 }
3112 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3113 gimple_regimplify_operands (init_stmt, &si);
3114
3115 if (!is_gimple_debug (init_stmt))
3116 {
3117 tree def = gimple_assign_lhs (init_stmt);
3118 insert_init_debug_bind (id, bb, def, def, init_stmt);
3119 }
3120 }
3121 }
3122
3123 /* Initialize parameter P with VALUE. If needed, produce init statement
3124 at the end of BB. When BB is NULL, we return init statement to be
3125 output later. */
3126 static gimple *
3127 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3128 basic_block bb, tree *vars)
3129 {
3130 gimple *init_stmt = NULL;
3131 tree var;
3132 tree rhs = value;
3133 tree def = (gimple_in_ssa_p (cfun)
3134 ? ssa_default_def (id->src_cfun, p) : NULL);
3135
3136 if (value
3137 && value != error_mark_node
3138 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3139 {
3140 /* If we can match up types by promotion/demotion do so. */
3141 if (fold_convertible_p (TREE_TYPE (p), value))
3142 rhs = fold_convert (TREE_TYPE (p), value);
3143 else
3144 {
3145 /* ??? For valid programs we should not end up here.
3146 Still if we end up with truly mismatched types here, fall back
3147 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3148 GIMPLE to the following passes. */
3149 if (!is_gimple_reg_type (TREE_TYPE (value))
3150 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3151 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3152 else
3153 rhs = build_zero_cst (TREE_TYPE (p));
3154 }
3155 }
3156
3157 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3158 here since the type of this decl must be visible to the calling
3159 function. */
3160 var = copy_decl_to_var (p, id);
3161
3162 /* Declare this new variable. */
3163 DECL_CHAIN (var) = *vars;
3164 *vars = var;
3165
3166 /* Make gimplifier happy about this variable. */
3167 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3168
3169 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3170 we would not need to create a new variable here at all, if it
3171 weren't for debug info. Still, we can just use the argument
3172 value. */
3173 if (TREE_READONLY (p)
3174 && !TREE_ADDRESSABLE (p)
3175 && value && !TREE_SIDE_EFFECTS (value)
3176 && !def)
3177 {
3178 /* We may produce non-gimple trees by adding NOPs or introduce
3179 invalid sharing when operand is not really constant.
3180 It is not big deal to prohibit constant propagation here as
3181 we will constant propagate in DOM1 pass anyway. */
3182 if (is_gimple_min_invariant (value)
3183 && useless_type_conversion_p (TREE_TYPE (p),
3184 TREE_TYPE (value))
3185 /* We have to be very careful about ADDR_EXPR. Make sure
3186 the base variable isn't a local variable of the inlined
3187 function, e.g., when doing recursive inlining, direct or
3188 mutually-recursive or whatever, which is why we don't
3189 just test whether fn == current_function_decl. */
3190 && ! self_inlining_addr_expr (value, fn))
3191 {
3192 insert_decl_map (id, p, value);
3193 insert_debug_decl_map (id, p, var);
3194 return insert_init_debug_bind (id, bb, var, value, NULL);
3195 }
3196 }
3197
3198 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3199 that way, when the PARM_DECL is encountered, it will be
3200 automatically replaced by the VAR_DECL. */
3201 insert_decl_map (id, p, var);
3202
3203 /* Even if P was TREE_READONLY, the new VAR should not be.
3204 In the original code, we would have constructed a
3205 temporary, and then the function body would have never
3206 changed the value of P. However, now, we will be
3207 constructing VAR directly. The constructor body may
3208 change its value multiple times as it is being
3209 constructed. Therefore, it must not be TREE_READONLY;
3210 the back-end assumes that TREE_READONLY variable is
3211 assigned to only once. */
3212 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3213 TREE_READONLY (var) = 0;
3214
3215 /* If there is no setup required and we are in SSA, take the easy route
3216 replacing all SSA names representing the function parameter by the
3217 SSA name passed to function.
3218
3219 We need to construct map for the variable anyway as it might be used
3220 in different SSA names when parameter is set in function.
3221
3222 Do replacement at -O0 for const arguments replaced by constant.
3223 This is important for builtin_constant_p and other construct requiring
3224 constant argument to be visible in inlined function body. */
3225 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3226 && (optimize
3227 || (TREE_READONLY (p)
3228 && is_gimple_min_invariant (rhs)))
3229 && (TREE_CODE (rhs) == SSA_NAME
3230 || is_gimple_min_invariant (rhs))
3231 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3232 {
3233 insert_decl_map (id, def, rhs);
3234 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3235 }
3236
3237 /* If the value of argument is never used, don't care about initializing
3238 it. */
3239 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3240 {
3241 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3242 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3243 }
3244
3245 /* Initialize this VAR_DECL from the equivalent argument. Convert
3246 the argument to the proper type in case it was promoted. */
3247 if (value)
3248 {
3249 if (rhs == error_mark_node)
3250 {
3251 insert_decl_map (id, p, var);
3252 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3253 }
3254
3255 STRIP_USELESS_TYPE_CONVERSION (rhs);
3256
3257 /* If we are in SSA form properly remap the default definition
3258 or assign to a dummy SSA name if the parameter is unused and
3259 we are not optimizing. */
3260 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3261 {
3262 if (def)
3263 {
3264 def = remap_ssa_name (def, id);
3265 init_stmt = gimple_build_assign (def, rhs);
3266 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3267 set_ssa_default_def (cfun, var, NULL);
3268 }
3269 else if (!optimize)
3270 {
3271 def = make_ssa_name (var);
3272 init_stmt = gimple_build_assign (def, rhs);
3273 }
3274 }
3275 else
3276 init_stmt = gimple_build_assign (var, rhs);
3277
3278 if (bb && init_stmt)
3279 insert_init_stmt (id, bb, init_stmt);
3280 }
3281 return init_stmt;
3282 }
3283
3284 /* Generate code to initialize the parameters of the function at the
3285 top of the stack in ID from the GIMPLE_CALL STMT. */
3286
3287 static void
3288 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3289 tree fn, basic_block bb)
3290 {
3291 tree parms;
3292 size_t i;
3293 tree p;
3294 tree vars = NULL_TREE;
3295 tree static_chain = gimple_call_chain (stmt);
3296
3297 /* Figure out what the parameters are. */
3298 parms = DECL_ARGUMENTS (fn);
3299
3300 /* Loop through the parameter declarations, replacing each with an
3301 equivalent VAR_DECL, appropriately initialized. */
3302 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3303 {
3304 tree val;
3305 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3306 setup_one_parameter (id, p, val, fn, bb, &vars);
3307 }
3308 /* After remapping parameters remap their types. This has to be done
3309 in a second loop over all parameters to appropriately remap
3310 variable sized arrays when the size is specified in a
3311 parameter following the array. */
3312 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3313 {
3314 tree *varp = id->decl_map->get (p);
3315 if (varp && VAR_P (*varp))
3316 {
3317 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3318 ? ssa_default_def (id->src_cfun, p) : NULL);
3319 tree var = *varp;
3320 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3321 /* Also remap the default definition if it was remapped
3322 to the default definition of the parameter replacement
3323 by the parameter setup. */
3324 if (def)
3325 {
3326 tree *defp = id->decl_map->get (def);
3327 if (defp
3328 && TREE_CODE (*defp) == SSA_NAME
3329 && SSA_NAME_VAR (*defp) == var)
3330 TREE_TYPE (*defp) = TREE_TYPE (var);
3331 }
3332 }
3333 }
3334
3335 /* Initialize the static chain. */
3336 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3337 gcc_assert (fn != current_function_decl);
3338 if (p)
3339 {
3340 /* No static chain? Seems like a bug in tree-nested.c. */
3341 gcc_assert (static_chain);
3342
3343 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3344 }
3345
3346 declare_inline_vars (id->block, vars);
3347 }
3348
3349
3350 /* Declare a return variable to replace the RESULT_DECL for the
3351 function we are calling. An appropriate DECL_STMT is returned.
3352 The USE_STMT is filled to contain a use of the declaration to
3353 indicate the return value of the function.
3354
3355 RETURN_SLOT, if non-null is place where to store the result. It
3356 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3357 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3358
3359 The return value is a (possibly null) value that holds the result
3360 as seen by the caller. */
3361
3362 static tree
3363 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3364 basic_block entry_bb)
3365 {
3366 tree callee = id->src_fn;
3367 tree result = DECL_RESULT (callee);
3368 tree callee_type = TREE_TYPE (result);
3369 tree caller_type;
3370 tree var, use;
3371
3372 /* Handle type-mismatches in the function declaration return type
3373 vs. the call expression. */
3374 if (modify_dest)
3375 caller_type = TREE_TYPE (modify_dest);
3376 else
3377 caller_type = TREE_TYPE (TREE_TYPE (callee));
3378
3379 /* We don't need to do anything for functions that don't return anything. */
3380 if (VOID_TYPE_P (callee_type))
3381 return NULL_TREE;
3382
3383 /* If there was a return slot, then the return value is the
3384 dereferenced address of that object. */
3385 if (return_slot)
3386 {
3387 /* The front end shouldn't have used both return_slot and
3388 a modify expression. */
3389 gcc_assert (!modify_dest);
3390 if (DECL_BY_REFERENCE (result))
3391 {
3392 tree return_slot_addr = build_fold_addr_expr (return_slot);
3393 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3394
3395 /* We are going to construct *&return_slot and we can't do that
3396 for variables believed to be not addressable.
3397
3398 FIXME: This check possibly can match, because values returned
3399 via return slot optimization are not believed to have address
3400 taken by alias analysis. */
3401 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3402 var = return_slot_addr;
3403 }
3404 else
3405 {
3406 var = return_slot;
3407 gcc_assert (TREE_CODE (var) != SSA_NAME);
3408 if (TREE_ADDRESSABLE (result))
3409 mark_addressable (var);
3410 }
3411 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3412 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3413 && !DECL_GIMPLE_REG_P (result)
3414 && DECL_P (var))
3415 DECL_GIMPLE_REG_P (var) = 0;
3416 use = NULL;
3417 goto done;
3418 }
3419
3420 /* All types requiring non-trivial constructors should have been handled. */
3421 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3422
3423 /* Attempt to avoid creating a new temporary variable. */
3424 if (modify_dest
3425 && TREE_CODE (modify_dest) != SSA_NAME)
3426 {
3427 bool use_it = false;
3428
3429 /* We can't use MODIFY_DEST if there's type promotion involved. */
3430 if (!useless_type_conversion_p (callee_type, caller_type))
3431 use_it = false;
3432
3433 /* ??? If we're assigning to a variable sized type, then we must
3434 reuse the destination variable, because we've no good way to
3435 create variable sized temporaries at this point. */
3436 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3437 use_it = true;
3438
3439 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3440 reuse it as the result of the call directly. Don't do this if
3441 it would promote MODIFY_DEST to addressable. */
3442 else if (TREE_ADDRESSABLE (result))
3443 use_it = false;
3444 else
3445 {
3446 tree base_m = get_base_address (modify_dest);
3447
3448 /* If the base isn't a decl, then it's a pointer, and we don't
3449 know where that's going to go. */
3450 if (!DECL_P (base_m))
3451 use_it = false;
3452 else if (is_global_var (base_m))
3453 use_it = false;
3454 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3455 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3456 && !DECL_GIMPLE_REG_P (result)
3457 && DECL_GIMPLE_REG_P (base_m))
3458 use_it = false;
3459 else if (!TREE_ADDRESSABLE (base_m))
3460 use_it = true;
3461 }
3462
3463 if (use_it)
3464 {
3465 var = modify_dest;
3466 use = NULL;
3467 goto done;
3468 }
3469 }
3470
3471 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3472
3473 var = copy_result_decl_to_var (result, id);
3474 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3475
3476 /* Do not have the rest of GCC warn about this variable as it should
3477 not be visible to the user. */
3478 TREE_NO_WARNING (var) = 1;
3479
3480 declare_inline_vars (id->block, var);
3481
3482 /* Build the use expr. If the return type of the function was
3483 promoted, convert it back to the expected type. */
3484 use = var;
3485 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3486 {
3487 /* If we can match up types by promotion/demotion do so. */
3488 if (fold_convertible_p (caller_type, var))
3489 use = fold_convert (caller_type, var);
3490 else
3491 {
3492 /* ??? For valid programs we should not end up here.
3493 Still if we end up with truly mismatched types here, fall back
3494 to using a MEM_REF to not leak invalid GIMPLE to the following
3495 passes. */
3496 /* Prevent var from being written into SSA form. */
3497 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3498 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3499 DECL_GIMPLE_REG_P (var) = false;
3500 else if (is_gimple_reg_type (TREE_TYPE (var)))
3501 TREE_ADDRESSABLE (var) = true;
3502 use = fold_build2 (MEM_REF, caller_type,
3503 build_fold_addr_expr (var),
3504 build_int_cst (ptr_type_node, 0));
3505 }
3506 }
3507
3508 STRIP_USELESS_TYPE_CONVERSION (use);
3509
3510 if (DECL_BY_REFERENCE (result))
3511 {
3512 TREE_ADDRESSABLE (var) = 1;
3513 var = build_fold_addr_expr (var);
3514 }
3515
3516 done:
3517 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3518 way, when the RESULT_DECL is encountered, it will be
3519 automatically replaced by the VAR_DECL.
3520
3521 When returning by reference, ensure that RESULT_DECL remaps to
3522 gimple_val. */
3523 if (DECL_BY_REFERENCE (result)
3524 && !is_gimple_val (var))
3525 {
3526 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3527 insert_decl_map (id, result, temp);
3528 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3529 it's default_def SSA_NAME. */
3530 if (gimple_in_ssa_p (id->src_cfun)
3531 && is_gimple_reg (result))
3532 {
3533 temp = make_ssa_name (temp);
3534 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3535 }
3536 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3537 }
3538 else
3539 insert_decl_map (id, result, var);
3540
3541 /* Remember this so we can ignore it in remap_decls. */
3542 id->retvar = var;
3543 return use;
3544 }
3545
3546 /* Determine if the function can be copied. If so return NULL. If
3547 not return a string describng the reason for failure. */
3548
3549 const char *
3550 copy_forbidden (struct function *fun)
3551 {
3552 const char *reason = fun->cannot_be_copied_reason;
3553
3554 /* Only examine the function once. */
3555 if (fun->cannot_be_copied_set)
3556 return reason;
3557
3558 /* We cannot copy a function that receives a non-local goto
3559 because we cannot remap the destination label used in the
3560 function that is performing the non-local goto. */
3561 /* ??? Actually, this should be possible, if we work at it.
3562 No doubt there's just a handful of places that simply
3563 assume it doesn't happen and don't substitute properly. */
3564 if (fun->has_nonlocal_label)
3565 {
3566 reason = G_("function %q+F can never be copied "
3567 "because it receives a non-local goto");
3568 goto fail;
3569 }
3570
3571 if (fun->has_forced_label_in_static)
3572 {
3573 reason = G_("function %q+F can never be copied because it saves "
3574 "address of local label in a static variable");
3575 goto fail;
3576 }
3577
3578 fail:
3579 fun->cannot_be_copied_reason = reason;
3580 fun->cannot_be_copied_set = true;
3581 return reason;
3582 }
3583
3584
3585 static const char *inline_forbidden_reason;
3586
3587 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3588 iff a function cannot be inlined. Also sets the reason why. */
3589
3590 static tree
3591 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3592 struct walk_stmt_info *wip)
3593 {
3594 tree fn = (tree) wip->info;
3595 tree t;
3596 gimple *stmt = gsi_stmt (*gsi);
3597
3598 switch (gimple_code (stmt))
3599 {
3600 case GIMPLE_CALL:
3601 /* Refuse to inline alloca call unless user explicitly forced so as
3602 this may change program's memory overhead drastically when the
3603 function using alloca is called in loop. In GCC present in
3604 SPEC2000 inlining into schedule_block cause it to require 2GB of
3605 RAM instead of 256MB. Don't do so for alloca calls emitted for
3606 VLA objects as those can't cause unbounded growth (they're always
3607 wrapped inside stack_save/stack_restore regions. */
3608 if (gimple_maybe_alloca_call_p (stmt)
3609 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3610 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3611 {
3612 inline_forbidden_reason
3613 = G_("function %q+F can never be inlined because it uses "
3614 "alloca (override using the always_inline attribute)");
3615 *handled_ops_p = true;
3616 return fn;
3617 }
3618
3619 t = gimple_call_fndecl (stmt);
3620 if (t == NULL_TREE)
3621 break;
3622
3623 /* We cannot inline functions that call setjmp. */
3624 if (setjmp_call_p (t))
3625 {
3626 inline_forbidden_reason
3627 = G_("function %q+F can never be inlined because it uses setjmp");
3628 *handled_ops_p = true;
3629 return t;
3630 }
3631
3632 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3633 switch (DECL_FUNCTION_CODE (t))
3634 {
3635 /* We cannot inline functions that take a variable number of
3636 arguments. */
3637 case BUILT_IN_VA_START:
3638 case BUILT_IN_NEXT_ARG:
3639 case BUILT_IN_VA_END:
3640 inline_forbidden_reason
3641 = G_("function %q+F can never be inlined because it "
3642 "uses variable argument lists");
3643 *handled_ops_p = true;
3644 return t;
3645
3646 case BUILT_IN_LONGJMP:
3647 /* We can't inline functions that call __builtin_longjmp at
3648 all. The non-local goto machinery really requires the
3649 destination be in a different function. If we allow the
3650 function calling __builtin_longjmp to be inlined into the
3651 function calling __builtin_setjmp, Things will Go Awry. */
3652 inline_forbidden_reason
3653 = G_("function %q+F can never be inlined because "
3654 "it uses setjmp-longjmp exception handling");
3655 *handled_ops_p = true;
3656 return t;
3657
3658 case BUILT_IN_NONLOCAL_GOTO:
3659 /* Similarly. */
3660 inline_forbidden_reason
3661 = G_("function %q+F can never be inlined because "
3662 "it uses non-local goto");
3663 *handled_ops_p = true;
3664 return t;
3665
3666 case BUILT_IN_RETURN:
3667 case BUILT_IN_APPLY_ARGS:
3668 /* If a __builtin_apply_args caller would be inlined,
3669 it would be saving arguments of the function it has
3670 been inlined into. Similarly __builtin_return would
3671 return from the function the inline has been inlined into. */
3672 inline_forbidden_reason
3673 = G_("function %q+F can never be inlined because "
3674 "it uses __builtin_return or __builtin_apply_args");
3675 *handled_ops_p = true;
3676 return t;
3677
3678 default:
3679 break;
3680 }
3681 break;
3682
3683 case GIMPLE_GOTO:
3684 t = gimple_goto_dest (stmt);
3685
3686 /* We will not inline a function which uses computed goto. The
3687 addresses of its local labels, which may be tucked into
3688 global storage, are of course not constant across
3689 instantiations, which causes unexpected behavior. */
3690 if (TREE_CODE (t) != LABEL_DECL)
3691 {
3692 inline_forbidden_reason
3693 = G_("function %q+F can never be inlined "
3694 "because it contains a computed goto");
3695 *handled_ops_p = true;
3696 return t;
3697 }
3698 break;
3699
3700 default:
3701 break;
3702 }
3703
3704 *handled_ops_p = false;
3705 return NULL_TREE;
3706 }
3707
3708 /* Return true if FNDECL is a function that cannot be inlined into
3709 another one. */
3710
3711 static bool
3712 inline_forbidden_p (tree fndecl)
3713 {
3714 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3715 struct walk_stmt_info wi;
3716 basic_block bb;
3717 bool forbidden_p = false;
3718
3719 /* First check for shared reasons not to copy the code. */
3720 inline_forbidden_reason = copy_forbidden (fun);
3721 if (inline_forbidden_reason != NULL)
3722 return true;
3723
3724 /* Next, walk the statements of the function looking for
3725 constraucts we can't handle, or are non-optimal for inlining. */
3726 hash_set<tree> visited_nodes;
3727 memset (&wi, 0, sizeof (wi));
3728 wi.info = (void *) fndecl;
3729 wi.pset = &visited_nodes;
3730
3731 FOR_EACH_BB_FN (bb, fun)
3732 {
3733 gimple *ret;
3734 gimple_seq seq = bb_seq (bb);
3735 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3736 forbidden_p = (ret != NULL);
3737 if (forbidden_p)
3738 break;
3739 }
3740
3741 return forbidden_p;
3742 }
3743 \f
3744 /* Return false if the function FNDECL cannot be inlined on account of its
3745 attributes, true otherwise. */
3746 static bool
3747 function_attribute_inlinable_p (const_tree fndecl)
3748 {
3749 if (targetm.attribute_table)
3750 {
3751 const_tree a;
3752
3753 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3754 {
3755 const_tree name = TREE_PURPOSE (a);
3756 int i;
3757
3758 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3759 if (is_attribute_p (targetm.attribute_table[i].name, name))
3760 return targetm.function_attribute_inlinable_p (fndecl);
3761 }
3762 }
3763
3764 return true;
3765 }
3766
3767 /* Returns nonzero if FN is a function that does not have any
3768 fundamental inline blocking properties. */
3769
3770 bool
3771 tree_inlinable_function_p (tree fn)
3772 {
3773 bool inlinable = true;
3774 bool do_warning;
3775 tree always_inline;
3776
3777 /* If we've already decided this function shouldn't be inlined,
3778 there's no need to check again. */
3779 if (DECL_UNINLINABLE (fn))
3780 return false;
3781
3782 /* We only warn for functions declared `inline' by the user. */
3783 do_warning = (warn_inline
3784 && DECL_DECLARED_INLINE_P (fn)
3785 && !DECL_NO_INLINE_WARNING_P (fn)
3786 && !DECL_IN_SYSTEM_HEADER (fn));
3787
3788 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3789
3790 if (flag_no_inline
3791 && always_inline == NULL)
3792 {
3793 if (do_warning)
3794 warning (OPT_Winline, "function %q+F can never be inlined because it "
3795 "is suppressed using -fno-inline", fn);
3796 inlinable = false;
3797 }
3798
3799 else if (!function_attribute_inlinable_p (fn))
3800 {
3801 if (do_warning)
3802 warning (OPT_Winline, "function %q+F can never be inlined because it "
3803 "uses attributes conflicting with inlining", fn);
3804 inlinable = false;
3805 }
3806
3807 else if (inline_forbidden_p (fn))
3808 {
3809 /* See if we should warn about uninlinable functions. Previously,
3810 some of these warnings would be issued while trying to expand
3811 the function inline, but that would cause multiple warnings
3812 about functions that would for example call alloca. But since
3813 this a property of the function, just one warning is enough.
3814 As a bonus we can now give more details about the reason why a
3815 function is not inlinable. */
3816 if (always_inline)
3817 error (inline_forbidden_reason, fn);
3818 else if (do_warning)
3819 warning (OPT_Winline, inline_forbidden_reason, fn);
3820
3821 inlinable = false;
3822 }
3823
3824 /* Squirrel away the result so that we don't have to check again. */
3825 DECL_UNINLINABLE (fn) = !inlinable;
3826
3827 return inlinable;
3828 }
3829
3830 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3831 word size and take possible memcpy call into account and return
3832 cost based on whether optimizing for size or speed according to SPEED_P. */
3833
3834 int
3835 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3836 {
3837 HOST_WIDE_INT size;
3838
3839 gcc_assert (!VOID_TYPE_P (type));
3840
3841 if (TREE_CODE (type) == VECTOR_TYPE)
3842 {
3843 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3844 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3845 int orig_mode_size
3846 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3847 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3848 return ((orig_mode_size + simd_mode_size - 1)
3849 / simd_mode_size);
3850 }
3851
3852 size = int_size_in_bytes (type);
3853
3854 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3855 /* Cost of a memcpy call, 3 arguments and the call. */
3856 return 4;
3857 else
3858 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3859 }
3860
3861 /* Returns cost of operation CODE, according to WEIGHTS */
3862
3863 static int
3864 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3865 tree op1 ATTRIBUTE_UNUSED, tree op2)
3866 {
3867 switch (code)
3868 {
3869 /* These are "free" conversions, or their presumed cost
3870 is folded into other operations. */
3871 case RANGE_EXPR:
3872 CASE_CONVERT:
3873 case COMPLEX_EXPR:
3874 case PAREN_EXPR:
3875 case VIEW_CONVERT_EXPR:
3876 return 0;
3877
3878 /* Assign cost of 1 to usual operations.
3879 ??? We may consider mapping RTL costs to this. */
3880 case COND_EXPR:
3881 case VEC_COND_EXPR:
3882 case VEC_PERM_EXPR:
3883
3884 case PLUS_EXPR:
3885 case POINTER_PLUS_EXPR:
3886 case POINTER_DIFF_EXPR:
3887 case MINUS_EXPR:
3888 case MULT_EXPR:
3889 case MULT_HIGHPART_EXPR:
3890
3891 case ADDR_SPACE_CONVERT_EXPR:
3892 case FIXED_CONVERT_EXPR:
3893 case FIX_TRUNC_EXPR:
3894
3895 case NEGATE_EXPR:
3896 case FLOAT_EXPR:
3897 case MIN_EXPR:
3898 case MAX_EXPR:
3899 case ABS_EXPR:
3900 case ABSU_EXPR:
3901
3902 case LSHIFT_EXPR:
3903 case RSHIFT_EXPR:
3904 case LROTATE_EXPR:
3905 case RROTATE_EXPR:
3906
3907 case BIT_IOR_EXPR:
3908 case BIT_XOR_EXPR:
3909 case BIT_AND_EXPR:
3910 case BIT_NOT_EXPR:
3911
3912 case TRUTH_ANDIF_EXPR:
3913 case TRUTH_ORIF_EXPR:
3914 case TRUTH_AND_EXPR:
3915 case TRUTH_OR_EXPR:
3916 case TRUTH_XOR_EXPR:
3917 case TRUTH_NOT_EXPR:
3918
3919 case LT_EXPR:
3920 case LE_EXPR:
3921 case GT_EXPR:
3922 case GE_EXPR:
3923 case EQ_EXPR:
3924 case NE_EXPR:
3925 case ORDERED_EXPR:
3926 case UNORDERED_EXPR:
3927
3928 case UNLT_EXPR:
3929 case UNLE_EXPR:
3930 case UNGT_EXPR:
3931 case UNGE_EXPR:
3932 case UNEQ_EXPR:
3933 case LTGT_EXPR:
3934
3935 case CONJ_EXPR:
3936
3937 case PREDECREMENT_EXPR:
3938 case PREINCREMENT_EXPR:
3939 case POSTDECREMENT_EXPR:
3940 case POSTINCREMENT_EXPR:
3941
3942 case REALIGN_LOAD_EXPR:
3943
3944 case WIDEN_SUM_EXPR:
3945 case WIDEN_MULT_EXPR:
3946 case DOT_PROD_EXPR:
3947 case SAD_EXPR:
3948 case WIDEN_MULT_PLUS_EXPR:
3949 case WIDEN_MULT_MINUS_EXPR:
3950 case WIDEN_LSHIFT_EXPR:
3951
3952 case VEC_WIDEN_MULT_HI_EXPR:
3953 case VEC_WIDEN_MULT_LO_EXPR:
3954 case VEC_WIDEN_MULT_EVEN_EXPR:
3955 case VEC_WIDEN_MULT_ODD_EXPR:
3956 case VEC_UNPACK_HI_EXPR:
3957 case VEC_UNPACK_LO_EXPR:
3958 case VEC_UNPACK_FLOAT_HI_EXPR:
3959 case VEC_UNPACK_FLOAT_LO_EXPR:
3960 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3961 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3962 case VEC_PACK_TRUNC_EXPR:
3963 case VEC_PACK_SAT_EXPR:
3964 case VEC_PACK_FIX_TRUNC_EXPR:
3965 case VEC_PACK_FLOAT_EXPR:
3966 case VEC_WIDEN_LSHIFT_HI_EXPR:
3967 case VEC_WIDEN_LSHIFT_LO_EXPR:
3968 case VEC_DUPLICATE_EXPR:
3969 case VEC_SERIES_EXPR:
3970
3971 return 1;
3972
3973 /* Few special cases of expensive operations. This is useful
3974 to avoid inlining on functions having too many of these. */
3975 case TRUNC_DIV_EXPR:
3976 case CEIL_DIV_EXPR:
3977 case FLOOR_DIV_EXPR:
3978 case ROUND_DIV_EXPR:
3979 case EXACT_DIV_EXPR:
3980 case TRUNC_MOD_EXPR:
3981 case CEIL_MOD_EXPR:
3982 case FLOOR_MOD_EXPR:
3983 case ROUND_MOD_EXPR:
3984 case RDIV_EXPR:
3985 if (TREE_CODE (op2) != INTEGER_CST)
3986 return weights->div_mod_cost;
3987 return 1;
3988
3989 /* Bit-field insertion needs several shift and mask operations. */
3990 case BIT_INSERT_EXPR:
3991 return 3;
3992
3993 default:
3994 /* We expect a copy assignment with no operator. */
3995 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3996 return 0;
3997 }
3998 }
3999
4000
4001 /* Estimate number of instructions that will be created by expanding
4002 the statements in the statement sequence STMTS.
4003 WEIGHTS contains weights attributed to various constructs. */
4004
4005 int
4006 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4007 {
4008 int cost;
4009 gimple_stmt_iterator gsi;
4010
4011 cost = 0;
4012 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4013 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4014
4015 return cost;
4016 }
4017
4018
4019 /* Estimate number of instructions that will be created by expanding STMT.
4020 WEIGHTS contains weights attributed to various constructs. */
4021
4022 int
4023 estimate_num_insns (gimple *stmt, eni_weights *weights)
4024 {
4025 unsigned cost, i;
4026 enum gimple_code code = gimple_code (stmt);
4027 tree lhs;
4028 tree rhs;
4029
4030 switch (code)
4031 {
4032 case GIMPLE_ASSIGN:
4033 /* Try to estimate the cost of assignments. We have three cases to
4034 deal with:
4035 1) Simple assignments to registers;
4036 2) Stores to things that must live in memory. This includes
4037 "normal" stores to scalars, but also assignments of large
4038 structures, or constructors of big arrays;
4039
4040 Let us look at the first two cases, assuming we have "a = b + C":
4041 <GIMPLE_ASSIGN <var_decl "a">
4042 <plus_expr <var_decl "b"> <constant C>>
4043 If "a" is a GIMPLE register, the assignment to it is free on almost
4044 any target, because "a" usually ends up in a real register. Hence
4045 the only cost of this expression comes from the PLUS_EXPR, and we
4046 can ignore the GIMPLE_ASSIGN.
4047 If "a" is not a GIMPLE register, the assignment to "a" will most
4048 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4049 of moving something into "a", which we compute using the function
4050 estimate_move_cost. */
4051 if (gimple_clobber_p (stmt))
4052 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4053
4054 lhs = gimple_assign_lhs (stmt);
4055 rhs = gimple_assign_rhs1 (stmt);
4056
4057 cost = 0;
4058
4059 /* Account for the cost of moving to / from memory. */
4060 if (gimple_store_p (stmt))
4061 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4062 if (gimple_assign_load_p (stmt))
4063 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4064
4065 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4066 gimple_assign_rhs1 (stmt),
4067 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4068 == GIMPLE_BINARY_RHS
4069 ? gimple_assign_rhs2 (stmt) : NULL);
4070 break;
4071
4072 case GIMPLE_COND:
4073 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4074 gimple_op (stmt, 0),
4075 gimple_op (stmt, 1));
4076 break;
4077
4078 case GIMPLE_SWITCH:
4079 {
4080 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4081 /* Take into account cost of the switch + guess 2 conditional jumps for
4082 each case label.
4083
4084 TODO: once the switch expansion logic is sufficiently separated, we can
4085 do better job on estimating cost of the switch. */
4086 if (weights->time_based)
4087 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4088 else
4089 cost = gimple_switch_num_labels (switch_stmt) * 2;
4090 }
4091 break;
4092
4093 case GIMPLE_CALL:
4094 {
4095 tree decl;
4096
4097 if (gimple_call_internal_p (stmt))
4098 return 0;
4099 else if ((decl = gimple_call_fndecl (stmt))
4100 && fndecl_built_in_p (decl))
4101 {
4102 /* Do not special case builtins where we see the body.
4103 This just confuse inliner. */
4104 struct cgraph_node *node;
4105 if (!(node = cgraph_node::get (decl))
4106 || node->definition)
4107 ;
4108 /* For buitins that are likely expanded to nothing or
4109 inlined do not account operand costs. */
4110 else if (is_simple_builtin (decl))
4111 return 0;
4112 else if (is_inexpensive_builtin (decl))
4113 return weights->target_builtin_call_cost;
4114 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4115 {
4116 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4117 specialize the cheap expansion we do here.
4118 ??? This asks for a more general solution. */
4119 switch (DECL_FUNCTION_CODE (decl))
4120 {
4121 case BUILT_IN_POW:
4122 case BUILT_IN_POWF:
4123 case BUILT_IN_POWL:
4124 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4125 && (real_equal
4126 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4127 &dconst2)))
4128 return estimate_operator_cost
4129 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4130 gimple_call_arg (stmt, 0));
4131 break;
4132
4133 default:
4134 break;
4135 }
4136 }
4137 }
4138
4139 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4140 if (gimple_call_lhs (stmt))
4141 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4142 weights->time_based);
4143 for (i = 0; i < gimple_call_num_args (stmt); i++)
4144 {
4145 tree arg = gimple_call_arg (stmt, i);
4146 cost += estimate_move_cost (TREE_TYPE (arg),
4147 weights->time_based);
4148 }
4149 break;
4150 }
4151
4152 case GIMPLE_RETURN:
4153 return weights->return_cost;
4154
4155 case GIMPLE_GOTO:
4156 case GIMPLE_LABEL:
4157 case GIMPLE_NOP:
4158 case GIMPLE_PHI:
4159 case GIMPLE_PREDICT:
4160 case GIMPLE_DEBUG:
4161 return 0;
4162
4163 case GIMPLE_ASM:
4164 {
4165 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4166 /* 1000 means infinity. This avoids overflows later
4167 with very long asm statements. */
4168 if (count > 1000)
4169 count = 1000;
4170 /* If this asm is asm inline, count anything as minimum size. */
4171 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4172 count = MIN (1, count);
4173 return MAX (1, count);
4174 }
4175
4176 case GIMPLE_RESX:
4177 /* This is either going to be an external function call with one
4178 argument, or two register copy statements plus a goto. */
4179 return 2;
4180
4181 case GIMPLE_EH_DISPATCH:
4182 /* ??? This is going to turn into a switch statement. Ideally
4183 we'd have a look at the eh region and estimate the number of
4184 edges involved. */
4185 return 10;
4186
4187 case GIMPLE_BIND:
4188 return estimate_num_insns_seq (
4189 gimple_bind_body (as_a <gbind *> (stmt)),
4190 weights);
4191
4192 case GIMPLE_EH_FILTER:
4193 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4194
4195 case GIMPLE_CATCH:
4196 return estimate_num_insns_seq (gimple_catch_handler (
4197 as_a <gcatch *> (stmt)),
4198 weights);
4199
4200 case GIMPLE_TRY:
4201 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4202 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4203
4204 /* OMP directives are generally very expensive. */
4205
4206 case GIMPLE_OMP_RETURN:
4207 case GIMPLE_OMP_SECTIONS_SWITCH:
4208 case GIMPLE_OMP_ATOMIC_STORE:
4209 case GIMPLE_OMP_CONTINUE:
4210 /* ...except these, which are cheap. */
4211 return 0;
4212
4213 case GIMPLE_OMP_ATOMIC_LOAD:
4214 return weights->omp_cost;
4215
4216 case GIMPLE_OMP_FOR:
4217 return (weights->omp_cost
4218 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4219 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4220
4221 case GIMPLE_OMP_PARALLEL:
4222 case GIMPLE_OMP_TASK:
4223 case GIMPLE_OMP_CRITICAL:
4224 case GIMPLE_OMP_MASTER:
4225 case GIMPLE_OMP_TASKGROUP:
4226 case GIMPLE_OMP_ORDERED:
4227 case GIMPLE_OMP_SECTION:
4228 case GIMPLE_OMP_SECTIONS:
4229 case GIMPLE_OMP_SINGLE:
4230 case GIMPLE_OMP_TARGET:
4231 case GIMPLE_OMP_TEAMS:
4232 return (weights->omp_cost
4233 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4234
4235 case GIMPLE_TRANSACTION:
4236 return (weights->tm_cost
4237 + estimate_num_insns_seq (gimple_transaction_body (
4238 as_a <gtransaction *> (stmt)),
4239 weights));
4240
4241 default:
4242 gcc_unreachable ();
4243 }
4244
4245 return cost;
4246 }
4247
4248 /* Estimate number of instructions that will be created by expanding
4249 function FNDECL. WEIGHTS contains weights attributed to various
4250 constructs. */
4251
4252 int
4253 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4254 {
4255 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4256 gimple_stmt_iterator bsi;
4257 basic_block bb;
4258 int n = 0;
4259
4260 gcc_assert (my_function && my_function->cfg);
4261 FOR_EACH_BB_FN (bb, my_function)
4262 {
4263 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4264 n += estimate_num_insns (gsi_stmt (bsi), weights);
4265 }
4266
4267 return n;
4268 }
4269
4270
4271 /* Initializes weights used by estimate_num_insns. */
4272
4273 void
4274 init_inline_once (void)
4275 {
4276 eni_size_weights.call_cost = 1;
4277 eni_size_weights.indirect_call_cost = 3;
4278 eni_size_weights.target_builtin_call_cost = 1;
4279 eni_size_weights.div_mod_cost = 1;
4280 eni_size_weights.omp_cost = 40;
4281 eni_size_weights.tm_cost = 10;
4282 eni_size_weights.time_based = false;
4283 eni_size_weights.return_cost = 1;
4284
4285 /* Estimating time for call is difficult, since we have no idea what the
4286 called function does. In the current uses of eni_time_weights,
4287 underestimating the cost does less harm than overestimating it, so
4288 we choose a rather small value here. */
4289 eni_time_weights.call_cost = 10;
4290 eni_time_weights.indirect_call_cost = 15;
4291 eni_time_weights.target_builtin_call_cost = 1;
4292 eni_time_weights.div_mod_cost = 10;
4293 eni_time_weights.omp_cost = 40;
4294 eni_time_weights.tm_cost = 40;
4295 eni_time_weights.time_based = true;
4296 eni_time_weights.return_cost = 2;
4297 }
4298
4299
4300 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4301
4302 static void
4303 prepend_lexical_block (tree current_block, tree new_block)
4304 {
4305 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4306 BLOCK_SUBBLOCKS (current_block) = new_block;
4307 BLOCK_SUPERCONTEXT (new_block) = current_block;
4308 }
4309
4310 /* Add local variables from CALLEE to CALLER. */
4311
4312 static inline void
4313 add_local_variables (struct function *callee, struct function *caller,
4314 copy_body_data *id)
4315 {
4316 tree var;
4317 unsigned ix;
4318
4319 FOR_EACH_LOCAL_DECL (callee, ix, var)
4320 if (!can_be_nonlocal (var, id))
4321 {
4322 tree new_var = remap_decl (var, id);
4323
4324 /* Remap debug-expressions. */
4325 if (VAR_P (new_var)
4326 && DECL_HAS_DEBUG_EXPR_P (var)
4327 && new_var != var)
4328 {
4329 tree tem = DECL_DEBUG_EXPR (var);
4330 bool old_regimplify = id->regimplify;
4331 id->remapping_type_depth++;
4332 walk_tree (&tem, copy_tree_body_r, id, NULL);
4333 id->remapping_type_depth--;
4334 id->regimplify = old_regimplify;
4335 SET_DECL_DEBUG_EXPR (new_var, tem);
4336 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4337 }
4338 add_local_decl (caller, new_var);
4339 }
4340 }
4341
4342 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4343 have brought in or introduced any debug stmts for SRCVAR. */
4344
4345 static inline void
4346 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4347 {
4348 tree *remappedvarp = id->decl_map->get (srcvar);
4349
4350 if (!remappedvarp)
4351 return;
4352
4353 if (!VAR_P (*remappedvarp))
4354 return;
4355
4356 if (*remappedvarp == id->retvar)
4357 return;
4358
4359 tree tvar = target_for_debug_bind (*remappedvarp);
4360 if (!tvar)
4361 return;
4362
4363 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4364 id->call_stmt);
4365 gimple_seq_add_stmt (bindings, stmt);
4366 }
4367
4368 /* For each inlined variable for which we may have debug bind stmts,
4369 add before GSI a final debug stmt resetting it, marking the end of
4370 its life, so that var-tracking knows it doesn't have to compute
4371 further locations for it. */
4372
4373 static inline void
4374 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4375 {
4376 tree var;
4377 unsigned ix;
4378 gimple_seq bindings = NULL;
4379
4380 if (!gimple_in_ssa_p (id->src_cfun))
4381 return;
4382
4383 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4384 return;
4385
4386 for (var = DECL_ARGUMENTS (id->src_fn);
4387 var; var = DECL_CHAIN (var))
4388 reset_debug_binding (id, var, &bindings);
4389
4390 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4391 reset_debug_binding (id, var, &bindings);
4392
4393 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4394 }
4395
4396 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4397
4398 static bool
4399 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4400 {
4401 tree use_retvar;
4402 tree fn;
4403 hash_map<tree, tree> *dst;
4404 hash_map<tree, tree> *st = NULL;
4405 tree return_slot;
4406 tree modify_dest;
4407 struct cgraph_edge *cg_edge;
4408 cgraph_inline_failed_t reason;
4409 basic_block return_block;
4410 edge e;
4411 gimple_stmt_iterator gsi, stmt_gsi;
4412 bool successfully_inlined = false;
4413 bool purge_dead_abnormal_edges;
4414 gcall *call_stmt;
4415 unsigned int prop_mask, src_properties;
4416 struct function *dst_cfun;
4417 tree simduid;
4418 use_operand_p use;
4419 gimple *simtenter_stmt = NULL;
4420 vec<tree> *simtvars_save;
4421
4422 /* The gimplifier uses input_location in too many places, such as
4423 internal_get_tmp_var (). */
4424 location_t saved_location = input_location;
4425 input_location = gimple_location (stmt);
4426
4427 /* From here on, we're only interested in CALL_EXPRs. */
4428 call_stmt = dyn_cast <gcall *> (stmt);
4429 if (!call_stmt)
4430 goto egress;
4431
4432 cg_edge = id->dst_node->get_edge (stmt);
4433 gcc_checking_assert (cg_edge);
4434 /* First, see if we can figure out what function is being called.
4435 If we cannot, then there is no hope of inlining the function. */
4436 if (cg_edge->indirect_unknown_callee)
4437 goto egress;
4438 fn = cg_edge->callee->decl;
4439 gcc_checking_assert (fn);
4440
4441 /* If FN is a declaration of a function in a nested scope that was
4442 globally declared inline, we don't set its DECL_INITIAL.
4443 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4444 C++ front-end uses it for cdtors to refer to their internal
4445 declarations, that are not real functions. Fortunately those
4446 don't have trees to be saved, so we can tell by checking their
4447 gimple_body. */
4448 if (!DECL_INITIAL (fn)
4449 && DECL_ABSTRACT_ORIGIN (fn)
4450 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4451 fn = DECL_ABSTRACT_ORIGIN (fn);
4452
4453 /* Don't try to inline functions that are not well-suited to inlining. */
4454 if (cg_edge->inline_failed)
4455 {
4456 reason = cg_edge->inline_failed;
4457 /* If this call was originally indirect, we do not want to emit any
4458 inlining related warnings or sorry messages because there are no
4459 guarantees regarding those. */
4460 if (cg_edge->indirect_inlining_edge)
4461 goto egress;
4462
4463 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4464 /* For extern inline functions that get redefined we always
4465 silently ignored always_inline flag. Better behavior would
4466 be to be able to keep both bodies and use extern inline body
4467 for inlining, but we can't do that because frontends overwrite
4468 the body. */
4469 && !cg_edge->callee->local.redefined_extern_inline
4470 /* During early inline pass, report only when optimization is
4471 not turned on. */
4472 && (symtab->global_info_ready
4473 || !optimize
4474 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4475 /* PR 20090218-1_0.c. Body can be provided by another module. */
4476 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4477 {
4478 error ("inlining failed in call to always_inline %q+F: %s", fn,
4479 cgraph_inline_failed_string (reason));
4480 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4481 inform (gimple_location (stmt), "called from here");
4482 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4483 inform (DECL_SOURCE_LOCATION (cfun->decl),
4484 "called from this function");
4485 }
4486 else if (warn_inline
4487 && DECL_DECLARED_INLINE_P (fn)
4488 && !DECL_NO_INLINE_WARNING_P (fn)
4489 && !DECL_IN_SYSTEM_HEADER (fn)
4490 && reason != CIF_UNSPECIFIED
4491 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4492 /* Do not warn about not inlined recursive calls. */
4493 && !cg_edge->recursive_p ()
4494 /* Avoid warnings during early inline pass. */
4495 && symtab->global_info_ready)
4496 {
4497 auto_diagnostic_group d;
4498 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4499 fn, _(cgraph_inline_failed_string (reason))))
4500 {
4501 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4502 inform (gimple_location (stmt), "called from here");
4503 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4504 inform (DECL_SOURCE_LOCATION (cfun->decl),
4505 "called from this function");
4506 }
4507 }
4508 goto egress;
4509 }
4510 id->src_node = cg_edge->callee;
4511
4512 /* If callee is thunk, all we need is to adjust the THIS pointer
4513 and redirect to function being thunked. */
4514 if (id->src_node->thunk.thunk_p)
4515 {
4516 cgraph_edge *edge;
4517 tree virtual_offset = NULL;
4518 profile_count count = cg_edge->count;
4519 tree op;
4520 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4521
4522 cg_edge->remove ();
4523 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4524 gimple_uid (stmt),
4525 profile_count::one (),
4526 profile_count::one (),
4527 true);
4528 edge->count = count;
4529 if (id->src_node->thunk.virtual_offset_p)
4530 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4531 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4532 NULL);
4533 gsi_insert_before (&iter, gimple_build_assign (op,
4534 gimple_call_arg (stmt, 0)),
4535 GSI_NEW_STMT);
4536 gcc_assert (id->src_node->thunk.this_adjusting);
4537 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4538 virtual_offset, id->src_node->thunk.indirect_offset);
4539
4540 gimple_call_set_arg (stmt, 0, op);
4541 gimple_call_set_fndecl (stmt, edge->callee->decl);
4542 update_stmt (stmt);
4543 id->src_node->remove ();
4544 expand_call_inline (bb, stmt, id);
4545 maybe_remove_unused_call_args (cfun, stmt);
4546 return true;
4547 }
4548 fn = cg_edge->callee->decl;
4549 cg_edge->callee->get_untransformed_body ();
4550
4551 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4552 cg_edge->callee->verify ();
4553
4554 /* We will be inlining this callee. */
4555 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4556 id->assign_stmts.create (0);
4557
4558 /* Update the callers EH personality. */
4559 if (DECL_FUNCTION_PERSONALITY (fn))
4560 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4561 = DECL_FUNCTION_PERSONALITY (fn);
4562
4563 /* Split the block before the GIMPLE_CALL. */
4564 stmt_gsi = gsi_for_stmt (stmt);
4565 gsi_prev (&stmt_gsi);
4566 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4567 bb = e->src;
4568 return_block = e->dest;
4569 remove_edge (e);
4570
4571 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4572 been the source of abnormal edges. In this case, schedule
4573 the removal of dead abnormal edges. */
4574 gsi = gsi_start_bb (return_block);
4575 gsi_next (&gsi);
4576 purge_dead_abnormal_edges = gsi_end_p (gsi);
4577
4578 stmt_gsi = gsi_start_bb (return_block);
4579
4580 /* Build a block containing code to initialize the arguments, the
4581 actual inline expansion of the body, and a label for the return
4582 statements within the function to jump to. The type of the
4583 statement expression is the return type of the function call.
4584 ??? If the call does not have an associated block then we will
4585 remap all callee blocks to NULL, effectively dropping most of
4586 its debug information. This should only happen for calls to
4587 artificial decls inserted by the compiler itself. We need to
4588 either link the inlined blocks into the caller block tree or
4589 not refer to them in any way to not break GC for locations. */
4590 if (gimple_block (stmt))
4591 {
4592 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4593 to make inlined_function_outer_scope_p return true on this BLOCK. */
4594 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4595 if (loc == UNKNOWN_LOCATION)
4596 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4597 if (loc == UNKNOWN_LOCATION)
4598 loc = BUILTINS_LOCATION;
4599 id->block = make_node (BLOCK);
4600 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4601 BLOCK_SOURCE_LOCATION (id->block) = loc;
4602 prepend_lexical_block (gimple_block (stmt), id->block);
4603 }
4604
4605 /* Local declarations will be replaced by their equivalents in this map. */
4606 st = id->decl_map;
4607 id->decl_map = new hash_map<tree, tree>;
4608 dst = id->debug_map;
4609 id->debug_map = NULL;
4610 if (flag_stack_reuse != SR_NONE)
4611 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4612
4613 /* Record the function we are about to inline. */
4614 id->src_fn = fn;
4615 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4616 id->reset_location = DECL_IGNORED_P (fn);
4617 id->call_stmt = call_stmt;
4618
4619 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4620 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4621 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4622 simtvars_save = id->dst_simt_vars;
4623 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4624 && (simduid = bb->loop_father->simduid) != NULL_TREE
4625 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4626 && single_imm_use (simduid, &use, &simtenter_stmt)
4627 && is_gimple_call (simtenter_stmt)
4628 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4629 vec_alloc (id->dst_simt_vars, 0);
4630 else
4631 id->dst_simt_vars = NULL;
4632
4633 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4634 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4635
4636 /* If the src function contains an IFN_VA_ARG, then so will the dst
4637 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4638 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4639 src_properties = id->src_cfun->curr_properties & prop_mask;
4640 if (src_properties != prop_mask)
4641 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4642
4643 gcc_assert (!id->src_cfun->after_inlining);
4644
4645 id->entry_bb = bb;
4646 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4647 {
4648 gimple_stmt_iterator si = gsi_last_bb (bb);
4649 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4650 NOT_TAKEN),
4651 GSI_NEW_STMT);
4652 }
4653 initialize_inlined_parameters (id, stmt, fn, bb);
4654 if (debug_nonbind_markers_p && debug_inline_points && id->block
4655 && inlined_function_outer_scope_p (id->block))
4656 {
4657 gimple_stmt_iterator si = gsi_last_bb (bb);
4658 gsi_insert_after (&si, gimple_build_debug_inline_entry
4659 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4660 GSI_NEW_STMT);
4661 }
4662
4663 if (DECL_INITIAL (fn))
4664 {
4665 if (gimple_block (stmt))
4666 {
4667 tree *var;
4668
4669 prepend_lexical_block (id->block,
4670 remap_blocks (DECL_INITIAL (fn), id));
4671 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4672 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4673 == NULL_TREE));
4674 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4675 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4676 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4677 under it. The parameters can be then evaluated in the debugger,
4678 but don't show in backtraces. */
4679 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4680 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4681 {
4682 tree v = *var;
4683 *var = TREE_CHAIN (v);
4684 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4685 BLOCK_VARS (id->block) = v;
4686 }
4687 else
4688 var = &TREE_CHAIN (*var);
4689 }
4690 else
4691 remap_blocks_to_null (DECL_INITIAL (fn), id);
4692 }
4693
4694 /* Return statements in the function body will be replaced by jumps
4695 to the RET_LABEL. */
4696 gcc_assert (DECL_INITIAL (fn));
4697 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4698
4699 /* Find the LHS to which the result of this call is assigned. */
4700 return_slot = NULL;
4701 if (gimple_call_lhs (stmt))
4702 {
4703 modify_dest = gimple_call_lhs (stmt);
4704
4705 /* The function which we are inlining might not return a value,
4706 in which case we should issue a warning that the function
4707 does not return a value. In that case the optimizers will
4708 see that the variable to which the value is assigned was not
4709 initialized. We do not want to issue a warning about that
4710 uninitialized variable. */
4711 if (DECL_P (modify_dest))
4712 TREE_NO_WARNING (modify_dest) = 1;
4713
4714 if (gimple_call_return_slot_opt_p (call_stmt))
4715 {
4716 return_slot = modify_dest;
4717 modify_dest = NULL;
4718 }
4719 }
4720 else
4721 modify_dest = NULL;
4722
4723 /* If we are inlining a call to the C++ operator new, we don't want
4724 to use type based alias analysis on the return value. Otherwise
4725 we may get confused if the compiler sees that the inlined new
4726 function returns a pointer which was just deleted. See bug
4727 33407. */
4728 if (DECL_IS_OPERATOR_NEW (fn))
4729 {
4730 return_slot = NULL;
4731 modify_dest = NULL;
4732 }
4733
4734 /* Declare the return variable for the function. */
4735 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4736
4737 /* Add local vars in this inlined callee to caller. */
4738 add_local_variables (id->src_cfun, cfun, id);
4739
4740 if (dump_enabled_p ())
4741 {
4742 char buf[128];
4743 snprintf (buf, sizeof(buf), "%4.2f",
4744 cg_edge->sreal_frequency ().to_double ());
4745 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4746 call_stmt,
4747 "Inlining %C to %C with frequency %s\n",
4748 id->src_node, id->dst_node, buf);
4749 if (dump_file && (dump_flags & TDF_DETAILS))
4750 {
4751 id->src_node->dump (dump_file);
4752 id->dst_node->dump (dump_file);
4753 }
4754 }
4755
4756 /* This is it. Duplicate the callee body. Assume callee is
4757 pre-gimplified. Note that we must not alter the caller
4758 function in any way before this point, as this CALL_EXPR may be
4759 a self-referential call; if we're calling ourselves, we need to
4760 duplicate our body before altering anything. */
4761 copy_body (id, bb, return_block, NULL);
4762
4763 reset_debug_bindings (id, stmt_gsi);
4764
4765 if (flag_stack_reuse != SR_NONE)
4766 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4767 if (!TREE_THIS_VOLATILE (p))
4768 {
4769 tree *varp = id->decl_map->get (p);
4770 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4771 {
4772 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4773 gimple *clobber_stmt;
4774 TREE_THIS_VOLATILE (clobber) = 1;
4775 clobber_stmt = gimple_build_assign (*varp, clobber);
4776 gimple_set_location (clobber_stmt, gimple_location (stmt));
4777 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4778 }
4779 }
4780
4781 /* Reset the escaped solution. */
4782 if (cfun->gimple_df)
4783 pt_solution_reset (&cfun->gimple_df->escaped);
4784
4785 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4786 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4787 {
4788 size_t nargs = gimple_call_num_args (simtenter_stmt);
4789 vec<tree> *vars = id->dst_simt_vars;
4790 auto_vec<tree> newargs (nargs + vars->length ());
4791 for (size_t i = 0; i < nargs; i++)
4792 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4793 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4794 {
4795 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4796 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4797 }
4798 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4799 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4800 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4801 gsi_replace (&gsi, g, false);
4802 }
4803 vec_free (id->dst_simt_vars);
4804 id->dst_simt_vars = simtvars_save;
4805
4806 /* Clean up. */
4807 if (id->debug_map)
4808 {
4809 delete id->debug_map;
4810 id->debug_map = dst;
4811 }
4812 delete id->decl_map;
4813 id->decl_map = st;
4814
4815 /* Unlink the calls virtual operands before replacing it. */
4816 unlink_stmt_vdef (stmt);
4817 if (gimple_vdef (stmt)
4818 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4819 release_ssa_name (gimple_vdef (stmt));
4820
4821 /* If the inlined function returns a result that we care about,
4822 substitute the GIMPLE_CALL with an assignment of the return
4823 variable to the LHS of the call. That is, if STMT was
4824 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4825 if (use_retvar && gimple_call_lhs (stmt))
4826 {
4827 gimple *old_stmt = stmt;
4828 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4829 gimple_set_location (stmt, gimple_location (old_stmt));
4830 gsi_replace (&stmt_gsi, stmt, false);
4831 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4832 /* Append a clobber for id->retvar if easily possible. */
4833 if (flag_stack_reuse != SR_NONE
4834 && id->retvar
4835 && VAR_P (id->retvar)
4836 && id->retvar != return_slot
4837 && id->retvar != modify_dest
4838 && !TREE_THIS_VOLATILE (id->retvar)
4839 && !is_gimple_reg (id->retvar)
4840 && !stmt_ends_bb_p (stmt))
4841 {
4842 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4843 gimple *clobber_stmt;
4844 TREE_THIS_VOLATILE (clobber) = 1;
4845 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4846 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4847 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4848 }
4849 }
4850 else
4851 {
4852 /* Handle the case of inlining a function with no return
4853 statement, which causes the return value to become undefined. */
4854 if (gimple_call_lhs (stmt)
4855 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4856 {
4857 tree name = gimple_call_lhs (stmt);
4858 tree var = SSA_NAME_VAR (name);
4859 tree def = var ? ssa_default_def (cfun, var) : NULL;
4860
4861 if (def)
4862 {
4863 /* If the variable is used undefined, make this name
4864 undefined via a move. */
4865 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4866 gsi_replace (&stmt_gsi, stmt, true);
4867 }
4868 else
4869 {
4870 if (!var)
4871 {
4872 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4873 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4874 }
4875 /* Otherwise make this variable undefined. */
4876 gsi_remove (&stmt_gsi, true);
4877 set_ssa_default_def (cfun, var, name);
4878 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4879 }
4880 }
4881 /* Replace with a clobber for id->retvar. */
4882 else if (flag_stack_reuse != SR_NONE
4883 && id->retvar
4884 && VAR_P (id->retvar)
4885 && id->retvar != return_slot
4886 && id->retvar != modify_dest
4887 && !TREE_THIS_VOLATILE (id->retvar)
4888 && !is_gimple_reg (id->retvar))
4889 {
4890 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4891 gimple *clobber_stmt;
4892 TREE_THIS_VOLATILE (clobber) = 1;
4893 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4894 gimple_set_location (clobber_stmt, gimple_location (stmt));
4895 gsi_replace (&stmt_gsi, clobber_stmt, false);
4896 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4897 }
4898 else
4899 gsi_remove (&stmt_gsi, true);
4900 }
4901
4902 if (purge_dead_abnormal_edges)
4903 {
4904 gimple_purge_dead_eh_edges (return_block);
4905 gimple_purge_dead_abnormal_call_edges (return_block);
4906 }
4907
4908 /* If the value of the new expression is ignored, that's OK. We
4909 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4910 the equivalent inlined version either. */
4911 if (is_gimple_assign (stmt))
4912 {
4913 gcc_assert (gimple_assign_single_p (stmt)
4914 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4915 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4916 }
4917
4918 id->assign_stmts.release ();
4919 id->add_clobbers_to_eh_landing_pads = 0;
4920
4921 /* Output the inlining info for this abstract function, since it has been
4922 inlined. If we don't do this now, we can lose the information about the
4923 variables in the function when the blocks get blown away as soon as we
4924 remove the cgraph node. */
4925 if (gimple_block (stmt))
4926 (*debug_hooks->outlining_inline_function) (fn);
4927
4928 /* Update callgraph if needed. */
4929 cg_edge->callee->remove ();
4930
4931 id->block = NULL_TREE;
4932 id->retvar = NULL_TREE;
4933 successfully_inlined = true;
4934
4935 egress:
4936 input_location = saved_location;
4937 return successfully_inlined;
4938 }
4939
4940 /* Expand call statements reachable from STMT_P.
4941 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4942 in a MODIFY_EXPR. */
4943
4944 static bool
4945 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4946 {
4947 gimple_stmt_iterator gsi;
4948 bool inlined = false;
4949
4950 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4951 {
4952 gimple *stmt = gsi_stmt (gsi);
4953 gsi_prev (&gsi);
4954
4955 if (is_gimple_call (stmt)
4956 && !gimple_call_internal_p (stmt))
4957 inlined |= expand_call_inline (bb, stmt, id);
4958 }
4959
4960 return inlined;
4961 }
4962
4963
4964 /* Walk all basic blocks created after FIRST and try to fold every statement
4965 in the STATEMENTS pointer set. */
4966
4967 static void
4968 fold_marked_statements (int first, hash_set<gimple *> *statements)
4969 {
4970 for (; first < last_basic_block_for_fn (cfun); first++)
4971 if (BASIC_BLOCK_FOR_FN (cfun, first))
4972 {
4973 gimple_stmt_iterator gsi;
4974
4975 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4976 !gsi_end_p (gsi);
4977 gsi_next (&gsi))
4978 if (statements->contains (gsi_stmt (gsi)))
4979 {
4980 gimple *old_stmt = gsi_stmt (gsi);
4981 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4982
4983 if (old_decl && fndecl_built_in_p (old_decl))
4984 {
4985 /* Folding builtins can create multiple instructions,
4986 we need to look at all of them. */
4987 gimple_stmt_iterator i2 = gsi;
4988 gsi_prev (&i2);
4989 if (fold_stmt (&gsi))
4990 {
4991 gimple *new_stmt;
4992 /* If a builtin at the end of a bb folded into nothing,
4993 the following loop won't work. */
4994 if (gsi_end_p (gsi))
4995 {
4996 cgraph_update_edges_for_call_stmt (old_stmt,
4997 old_decl, NULL);
4998 break;
4999 }
5000 if (gsi_end_p (i2))
5001 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5002 else
5003 gsi_next (&i2);
5004 while (1)
5005 {
5006 new_stmt = gsi_stmt (i2);
5007 update_stmt (new_stmt);
5008 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5009 new_stmt);
5010
5011 if (new_stmt == gsi_stmt (gsi))
5012 {
5013 /* It is okay to check only for the very last
5014 of these statements. If it is a throwing
5015 statement nothing will change. If it isn't
5016 this can remove EH edges. If that weren't
5017 correct then because some intermediate stmts
5018 throw, but not the last one. That would mean
5019 we'd have to split the block, which we can't
5020 here and we'd loose anyway. And as builtins
5021 probably never throw, this all
5022 is mood anyway. */
5023 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5024 new_stmt))
5025 gimple_purge_dead_eh_edges (
5026 BASIC_BLOCK_FOR_FN (cfun, first));
5027 break;
5028 }
5029 gsi_next (&i2);
5030 }
5031 }
5032 }
5033 else if (fold_stmt (&gsi))
5034 {
5035 /* Re-read the statement from GSI as fold_stmt() may
5036 have changed it. */
5037 gimple *new_stmt = gsi_stmt (gsi);
5038 update_stmt (new_stmt);
5039
5040 if (is_gimple_call (old_stmt)
5041 || is_gimple_call (new_stmt))
5042 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5043 new_stmt);
5044
5045 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5046 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5047 first));
5048 }
5049 }
5050 }
5051 }
5052
5053 /* Expand calls to inline functions in the body of FN. */
5054
5055 unsigned int
5056 optimize_inline_calls (tree fn)
5057 {
5058 copy_body_data id;
5059 basic_block bb;
5060 int last = n_basic_blocks_for_fn (cfun);
5061 bool inlined_p = false;
5062
5063 /* Clear out ID. */
5064 memset (&id, 0, sizeof (id));
5065
5066 id.src_node = id.dst_node = cgraph_node::get (fn);
5067 gcc_assert (id.dst_node->definition);
5068 id.dst_fn = fn;
5069 /* Or any functions that aren't finished yet. */
5070 if (current_function_decl)
5071 id.dst_fn = current_function_decl;
5072
5073 id.copy_decl = copy_decl_maybe_to_var;
5074 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5075 id.transform_new_cfg = false;
5076 id.transform_return_to_modify = true;
5077 id.transform_parameter = true;
5078 id.transform_lang_insert_block = NULL;
5079 id.statements_to_fold = new hash_set<gimple *>;
5080
5081 push_gimplify_context ();
5082
5083 /* We make no attempts to keep dominance info up-to-date. */
5084 free_dominance_info (CDI_DOMINATORS);
5085 free_dominance_info (CDI_POST_DOMINATORS);
5086
5087 /* Register specific gimple functions. */
5088 gimple_register_cfg_hooks ();
5089
5090 /* Reach the trees by walking over the CFG, and note the
5091 enclosing basic-blocks in the call edges. */
5092 /* We walk the blocks going forward, because inlined function bodies
5093 will split id->current_basic_block, and the new blocks will
5094 follow it; we'll trudge through them, processing their CALL_EXPRs
5095 along the way. */
5096 FOR_EACH_BB_FN (bb, cfun)
5097 inlined_p |= gimple_expand_calls_inline (bb, &id);
5098
5099 pop_gimplify_context (NULL);
5100
5101 if (flag_checking)
5102 {
5103 struct cgraph_edge *e;
5104
5105 id.dst_node->verify ();
5106
5107 /* Double check that we inlined everything we are supposed to inline. */
5108 for (e = id.dst_node->callees; e; e = e->next_callee)
5109 gcc_assert (e->inline_failed);
5110 }
5111
5112 /* Fold queued statements. */
5113 update_max_bb_count ();
5114 fold_marked_statements (last, id.statements_to_fold);
5115 delete id.statements_to_fold;
5116
5117 gcc_assert (!id.debug_stmts.exists ());
5118
5119 /* If we didn't inline into the function there is nothing to do. */
5120 if (!inlined_p)
5121 return 0;
5122
5123 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5124 number_blocks (fn);
5125
5126 delete_unreachable_blocks_update_callgraph (&id);
5127 if (flag_checking)
5128 id.dst_node->verify ();
5129
5130 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5131 not possible yet - the IPA passes might make various functions to not
5132 throw and they don't care to proactively update local EH info. This is
5133 done later in fixup_cfg pass that also execute the verification. */
5134 return (TODO_update_ssa
5135 | TODO_cleanup_cfg
5136 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5137 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5138 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5139 ? TODO_rebuild_frequencies : 0));
5140 }
5141
5142 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5143
5144 tree
5145 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5146 {
5147 enum tree_code code = TREE_CODE (*tp);
5148 enum tree_code_class cl = TREE_CODE_CLASS (code);
5149
5150 /* We make copies of most nodes. */
5151 if (IS_EXPR_CODE_CLASS (cl)
5152 || code == TREE_LIST
5153 || code == TREE_VEC
5154 || code == TYPE_DECL
5155 || code == OMP_CLAUSE)
5156 {
5157 /* Because the chain gets clobbered when we make a copy, we save it
5158 here. */
5159 tree chain = NULL_TREE, new_tree;
5160
5161 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5162 chain = TREE_CHAIN (*tp);
5163
5164 /* Copy the node. */
5165 new_tree = copy_node (*tp);
5166
5167 *tp = new_tree;
5168
5169 /* Now, restore the chain, if appropriate. That will cause
5170 walk_tree to walk into the chain as well. */
5171 if (code == PARM_DECL
5172 || code == TREE_LIST
5173 || code == OMP_CLAUSE)
5174 TREE_CHAIN (*tp) = chain;
5175
5176 /* For now, we don't update BLOCKs when we make copies. So, we
5177 have to nullify all BIND_EXPRs. */
5178 if (TREE_CODE (*tp) == BIND_EXPR)
5179 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5180 }
5181 else if (code == CONSTRUCTOR)
5182 {
5183 /* CONSTRUCTOR nodes need special handling because
5184 we need to duplicate the vector of elements. */
5185 tree new_tree;
5186
5187 new_tree = copy_node (*tp);
5188 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5189 *tp = new_tree;
5190 }
5191 else if (code == STATEMENT_LIST)
5192 /* We used to just abort on STATEMENT_LIST, but we can run into them
5193 with statement-expressions (c++/40975). */
5194 copy_statement_list (tp);
5195 else if (TREE_CODE_CLASS (code) == tcc_type)
5196 *walk_subtrees = 0;
5197 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5198 *walk_subtrees = 0;
5199 else if (TREE_CODE_CLASS (code) == tcc_constant)
5200 *walk_subtrees = 0;
5201 return NULL_TREE;
5202 }
5203
5204 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5205 information indicating to what new SAVE_EXPR this one should be mapped,
5206 use that one. Otherwise, create a new node and enter it in ST. FN is
5207 the function into which the copy will be placed. */
5208
5209 static void
5210 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5211 {
5212 tree *n;
5213 tree t;
5214
5215 /* See if we already encountered this SAVE_EXPR. */
5216 n = st->get (*tp);
5217
5218 /* If we didn't already remap this SAVE_EXPR, do so now. */
5219 if (!n)
5220 {
5221 t = copy_node (*tp);
5222
5223 /* Remember this SAVE_EXPR. */
5224 st->put (*tp, t);
5225 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5226 st->put (t, t);
5227 }
5228 else
5229 {
5230 /* We've already walked into this SAVE_EXPR; don't do it again. */
5231 *walk_subtrees = 0;
5232 t = *n;
5233 }
5234
5235 /* Replace this SAVE_EXPR with the copy. */
5236 *tp = t;
5237 }
5238
5239 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5240 label, copies the declaration and enters it in the splay_tree in DATA (which
5241 is really a 'copy_body_data *'. */
5242
5243 static tree
5244 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5245 bool *handled_ops_p ATTRIBUTE_UNUSED,
5246 struct walk_stmt_info *wi)
5247 {
5248 copy_body_data *id = (copy_body_data *) wi->info;
5249 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5250
5251 if (stmt)
5252 {
5253 tree decl = gimple_label_label (stmt);
5254
5255 /* Copy the decl and remember the copy. */
5256 insert_decl_map (id, decl, id->copy_decl (decl, id));
5257 }
5258
5259 return NULL_TREE;
5260 }
5261
5262 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5263 struct walk_stmt_info *wi);
5264
5265 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5266 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5267 remaps all local declarations to appropriate replacements in gimple
5268 operands. */
5269
5270 static tree
5271 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5272 {
5273 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5274 copy_body_data *id = (copy_body_data *) wi->info;
5275 hash_map<tree, tree> *st = id->decl_map;
5276 tree *n;
5277 tree expr = *tp;
5278
5279 /* For recursive invocations this is no longer the LHS itself. */
5280 bool is_lhs = wi->is_lhs;
5281 wi->is_lhs = false;
5282
5283 if (TREE_CODE (expr) == SSA_NAME)
5284 {
5285 *tp = remap_ssa_name (*tp, id);
5286 *walk_subtrees = 0;
5287 if (is_lhs)
5288 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5289 }
5290 /* Only a local declaration (variable or label). */
5291 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5292 || TREE_CODE (expr) == LABEL_DECL)
5293 {
5294 /* Lookup the declaration. */
5295 n = st->get (expr);
5296
5297 /* If it's there, remap it. */
5298 if (n)
5299 *tp = *n;
5300 *walk_subtrees = 0;
5301 }
5302 else if (TREE_CODE (expr) == STATEMENT_LIST
5303 || TREE_CODE (expr) == BIND_EXPR
5304 || TREE_CODE (expr) == SAVE_EXPR)
5305 gcc_unreachable ();
5306 else if (TREE_CODE (expr) == TARGET_EXPR)
5307 {
5308 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5309 It's OK for this to happen if it was part of a subtree that
5310 isn't immediately expanded, such as operand 2 of another
5311 TARGET_EXPR. */
5312 if (!TREE_OPERAND (expr, 1))
5313 {
5314 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5315 TREE_OPERAND (expr, 3) = NULL_TREE;
5316 }
5317 }
5318 else if (TREE_CODE (expr) == OMP_CLAUSE)
5319 {
5320 /* Before the omplower pass completes, some OMP clauses can contain
5321 sequences that are neither copied by gimple_seq_copy nor walked by
5322 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5323 in those situations, we have to copy and process them explicitely. */
5324
5325 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5326 {
5327 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5328 seq = duplicate_remap_omp_clause_seq (seq, wi);
5329 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5330 }
5331 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5332 {
5333 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5334 seq = duplicate_remap_omp_clause_seq (seq, wi);
5335 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5336 }
5337 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5338 {
5339 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5340 seq = duplicate_remap_omp_clause_seq (seq, wi);
5341 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5342 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5343 seq = duplicate_remap_omp_clause_seq (seq, wi);
5344 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5345 }
5346 }
5347
5348 /* Keep iterating. */
5349 return NULL_TREE;
5350 }
5351
5352
5353 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5354 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5355 remaps all local declarations to appropriate replacements in gimple
5356 statements. */
5357
5358 static tree
5359 replace_locals_stmt (gimple_stmt_iterator *gsip,
5360 bool *handled_ops_p ATTRIBUTE_UNUSED,
5361 struct walk_stmt_info *wi)
5362 {
5363 copy_body_data *id = (copy_body_data *) wi->info;
5364 gimple *gs = gsi_stmt (*gsip);
5365
5366 if (gbind *stmt = dyn_cast <gbind *> (gs))
5367 {
5368 tree block = gimple_bind_block (stmt);
5369
5370 if (block)
5371 {
5372 remap_block (&block, id);
5373 gimple_bind_set_block (stmt, block);
5374 }
5375
5376 /* This will remap a lot of the same decls again, but this should be
5377 harmless. */
5378 if (gimple_bind_vars (stmt))
5379 {
5380 tree old_var, decls = gimple_bind_vars (stmt);
5381
5382 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5383 if (!can_be_nonlocal (old_var, id)
5384 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5385 remap_decl (old_var, id);
5386
5387 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5388 id->prevent_decl_creation_for_types = true;
5389 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5390 id->prevent_decl_creation_for_types = false;
5391 }
5392 }
5393
5394 /* Keep iterating. */
5395 return NULL_TREE;
5396 }
5397
5398 /* Create a copy of SEQ and remap all decls in it. */
5399
5400 static gimple_seq
5401 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5402 {
5403 if (!seq)
5404 return NULL;
5405
5406 /* If there are any labels in OMP sequences, they can be only referred to in
5407 the sequence itself and therefore we can do both here. */
5408 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5409 gimple_seq copy = gimple_seq_copy (seq);
5410 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5411 return copy;
5412 }
5413
5414 /* Copies everything in SEQ and replaces variables and labels local to
5415 current_function_decl. */
5416
5417 gimple_seq
5418 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5419 {
5420 copy_body_data id;
5421 struct walk_stmt_info wi;
5422 gimple_seq copy;
5423
5424 /* There's nothing to do for NULL_TREE. */
5425 if (seq == NULL)
5426 return seq;
5427
5428 /* Set up ID. */
5429 memset (&id, 0, sizeof (id));
5430 id.src_fn = current_function_decl;
5431 id.dst_fn = current_function_decl;
5432 id.src_cfun = cfun;
5433 id.decl_map = new hash_map<tree, tree>;
5434 id.debug_map = NULL;
5435
5436 id.copy_decl = copy_decl_no_change;
5437 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5438 id.transform_new_cfg = false;
5439 id.transform_return_to_modify = false;
5440 id.transform_parameter = false;
5441 id.transform_lang_insert_block = NULL;
5442
5443 /* Walk the tree once to find local labels. */
5444 memset (&wi, 0, sizeof (wi));
5445 hash_set<tree> visited;
5446 wi.info = &id;
5447 wi.pset = &visited;
5448 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5449
5450 copy = gimple_seq_copy (seq);
5451
5452 /* Walk the copy, remapping decls. */
5453 memset (&wi, 0, sizeof (wi));
5454 wi.info = &id;
5455 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5456
5457 /* Clean up. */
5458 delete id.decl_map;
5459 if (id.debug_map)
5460 delete id.debug_map;
5461 if (id.dependence_map)
5462 {
5463 delete id.dependence_map;
5464 id.dependence_map = NULL;
5465 }
5466
5467 return copy;
5468 }
5469
5470
5471 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5472
5473 static tree
5474 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5475 {
5476 if (*tp == data)
5477 return (tree) data;
5478 else
5479 return NULL;
5480 }
5481
5482 DEBUG_FUNCTION bool
5483 debug_find_tree (tree top, tree search)
5484 {
5485 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5486 }
5487
5488
5489 /* Declare the variables created by the inliner. Add all the variables in
5490 VARS to BIND_EXPR. */
5491
5492 static void
5493 declare_inline_vars (tree block, tree vars)
5494 {
5495 tree t;
5496 for (t = vars; t; t = DECL_CHAIN (t))
5497 {
5498 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5499 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5500 add_local_decl (cfun, t);
5501 }
5502
5503 if (block)
5504 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5505 }
5506
5507 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5508 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5509 VAR_DECL translation. */
5510
5511 tree
5512 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5513 {
5514 /* Don't generate debug information for the copy if we wouldn't have
5515 generated it for the copy either. */
5516 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5517 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5518
5519 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5520 declaration inspired this copy. */
5521 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5522
5523 /* The new variable/label has no RTL, yet. */
5524 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5525 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5526 SET_DECL_RTL (copy, 0);
5527 /* For vector typed decls make sure to update DECL_MODE according
5528 to the new function context. */
5529 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5530 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5531
5532 /* These args would always appear unused, if not for this. */
5533 TREE_USED (copy) = 1;
5534
5535 /* Set the context for the new declaration. */
5536 if (!DECL_CONTEXT (decl))
5537 /* Globals stay global. */
5538 ;
5539 else if (DECL_CONTEXT (decl) != id->src_fn)
5540 /* Things that weren't in the scope of the function we're inlining
5541 from aren't in the scope we're inlining to, either. */
5542 ;
5543 else if (TREE_STATIC (decl))
5544 /* Function-scoped static variables should stay in the original
5545 function. */
5546 ;
5547 else
5548 {
5549 /* Ordinary automatic local variables are now in the scope of the
5550 new function. */
5551 DECL_CONTEXT (copy) = id->dst_fn;
5552 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5553 {
5554 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5555 DECL_ATTRIBUTES (copy)
5556 = tree_cons (get_identifier ("omp simt private"), NULL,
5557 DECL_ATTRIBUTES (copy));
5558 id->dst_simt_vars->safe_push (copy);
5559 }
5560 }
5561
5562 return copy;
5563 }
5564
5565 static tree
5566 copy_decl_to_var (tree decl, copy_body_data *id)
5567 {
5568 tree copy, type;
5569
5570 gcc_assert (TREE_CODE (decl) == PARM_DECL
5571 || TREE_CODE (decl) == RESULT_DECL);
5572
5573 type = TREE_TYPE (decl);
5574
5575 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5576 VAR_DECL, DECL_NAME (decl), type);
5577 if (DECL_PT_UID_SET_P (decl))
5578 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5579 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5580 TREE_READONLY (copy) = TREE_READONLY (decl);
5581 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5582 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5583
5584 return copy_decl_for_dup_finish (id, decl, copy);
5585 }
5586
5587 /* Like copy_decl_to_var, but create a return slot object instead of a
5588 pointer variable for return by invisible reference. */
5589
5590 static tree
5591 copy_result_decl_to_var (tree decl, copy_body_data *id)
5592 {
5593 tree copy, type;
5594
5595 gcc_assert (TREE_CODE (decl) == PARM_DECL
5596 || TREE_CODE (decl) == RESULT_DECL);
5597
5598 type = TREE_TYPE (decl);
5599 if (DECL_BY_REFERENCE (decl))
5600 type = TREE_TYPE (type);
5601
5602 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5603 VAR_DECL, DECL_NAME (decl), type);
5604 if (DECL_PT_UID_SET_P (decl))
5605 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5606 TREE_READONLY (copy) = TREE_READONLY (decl);
5607 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5608 if (!DECL_BY_REFERENCE (decl))
5609 {
5610 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5611 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5612 }
5613
5614 return copy_decl_for_dup_finish (id, decl, copy);
5615 }
5616
5617 tree
5618 copy_decl_no_change (tree decl, copy_body_data *id)
5619 {
5620 tree copy;
5621
5622 copy = copy_node (decl);
5623
5624 /* The COPY is not abstract; it will be generated in DST_FN. */
5625 DECL_ABSTRACT_P (copy) = false;
5626 lang_hooks.dup_lang_specific_decl (copy);
5627
5628 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5629 been taken; it's for internal bookkeeping in expand_goto_internal. */
5630 if (TREE_CODE (copy) == LABEL_DECL)
5631 {
5632 TREE_ADDRESSABLE (copy) = 0;
5633 LABEL_DECL_UID (copy) = -1;
5634 }
5635
5636 return copy_decl_for_dup_finish (id, decl, copy);
5637 }
5638
5639 static tree
5640 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5641 {
5642 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5643 return copy_decl_to_var (decl, id);
5644 else
5645 return copy_decl_no_change (decl, id);
5646 }
5647
5648 /* Return a copy of the function's argument tree. */
5649 static tree
5650 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5651 bitmap args_to_skip, tree *vars)
5652 {
5653 tree arg, *parg;
5654 tree new_parm = NULL;
5655 int i = 0;
5656
5657 parg = &new_parm;
5658
5659 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5660 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5661 {
5662 tree new_tree = remap_decl (arg, id);
5663 if (TREE_CODE (new_tree) != PARM_DECL)
5664 new_tree = id->copy_decl (arg, id);
5665 lang_hooks.dup_lang_specific_decl (new_tree);
5666 *parg = new_tree;
5667 parg = &DECL_CHAIN (new_tree);
5668 }
5669 else if (!id->decl_map->get (arg))
5670 {
5671 /* Make an equivalent VAR_DECL. If the argument was used
5672 as temporary variable later in function, the uses will be
5673 replaced by local variable. */
5674 tree var = copy_decl_to_var (arg, id);
5675 insert_decl_map (id, arg, var);
5676 /* Declare this new variable. */
5677 DECL_CHAIN (var) = *vars;
5678 *vars = var;
5679 }
5680 return new_parm;
5681 }
5682
5683 /* Return a copy of the function's static chain. */
5684 static tree
5685 copy_static_chain (tree static_chain, copy_body_data * id)
5686 {
5687 tree *chain_copy, *pvar;
5688
5689 chain_copy = &static_chain;
5690 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5691 {
5692 tree new_tree = remap_decl (*pvar, id);
5693 lang_hooks.dup_lang_specific_decl (new_tree);
5694 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5695 *pvar = new_tree;
5696 }
5697 return static_chain;
5698 }
5699
5700 /* Return true if the function is allowed to be versioned.
5701 This is a guard for the versioning functionality. */
5702
5703 bool
5704 tree_versionable_function_p (tree fndecl)
5705 {
5706 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5707 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5708 }
5709
5710 /* Delete all unreachable basic blocks and update callgraph.
5711 Doing so is somewhat nontrivial because we need to update all clones and
5712 remove inline function that become unreachable. */
5713
5714 static bool
5715 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5716 {
5717 bool changed = false;
5718 basic_block b, next_bb;
5719
5720 find_unreachable_blocks ();
5721
5722 /* Delete all unreachable basic blocks. */
5723
5724 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5725 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5726 {
5727 next_bb = b->next_bb;
5728
5729 if (!(b->flags & BB_REACHABLE))
5730 {
5731 gimple_stmt_iterator bsi;
5732
5733 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5734 {
5735 struct cgraph_edge *e;
5736 struct cgraph_node *node;
5737
5738 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5739
5740 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5741 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5742 {
5743 if (!e->inline_failed)
5744 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5745 else
5746 e->remove ();
5747 }
5748 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5749 && id->dst_node->clones)
5750 for (node = id->dst_node->clones; node != id->dst_node;)
5751 {
5752 node->remove_stmt_references (gsi_stmt (bsi));
5753 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5754 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5755 {
5756 if (!e->inline_failed)
5757 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5758 else
5759 e->remove ();
5760 }
5761
5762 if (node->clones)
5763 node = node->clones;
5764 else if (node->next_sibling_clone)
5765 node = node->next_sibling_clone;
5766 else
5767 {
5768 while (node != id->dst_node && !node->next_sibling_clone)
5769 node = node->clone_of;
5770 if (node != id->dst_node)
5771 node = node->next_sibling_clone;
5772 }
5773 }
5774 }
5775 delete_basic_block (b);
5776 changed = true;
5777 }
5778 }
5779
5780 return changed;
5781 }
5782
5783 /* Update clone info after duplication. */
5784
5785 static void
5786 update_clone_info (copy_body_data * id)
5787 {
5788 struct cgraph_node *node;
5789 if (!id->dst_node->clones)
5790 return;
5791 for (node = id->dst_node->clones; node != id->dst_node;)
5792 {
5793 /* First update replace maps to match the new body. */
5794 if (node->clone.tree_map)
5795 {
5796 unsigned int i;
5797 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5798 {
5799 struct ipa_replace_map *replace_info;
5800 replace_info = (*node->clone.tree_map)[i];
5801 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5802 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5803 }
5804 }
5805 if (node->clones)
5806 node = node->clones;
5807 else if (node->next_sibling_clone)
5808 node = node->next_sibling_clone;
5809 else
5810 {
5811 while (node != id->dst_node && !node->next_sibling_clone)
5812 node = node->clone_of;
5813 if (node != id->dst_node)
5814 node = node->next_sibling_clone;
5815 }
5816 }
5817 }
5818
5819 /* Create a copy of a function's tree.
5820 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5821 of the original function and the new copied function
5822 respectively. In case we want to replace a DECL
5823 tree with another tree while duplicating the function's
5824 body, TREE_MAP represents the mapping between these
5825 trees. If UPDATE_CLONES is set, the call_stmt fields
5826 of edges of clones of the function will be updated.
5827
5828 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5829 from new version.
5830 If SKIP_RETURN is true, the new version will return void.
5831 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5832 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5833 */
5834 void
5835 tree_function_versioning (tree old_decl, tree new_decl,
5836 vec<ipa_replace_map *, va_gc> *tree_map,
5837 bool update_clones, bitmap args_to_skip,
5838 bool skip_return, bitmap blocks_to_copy,
5839 basic_block new_entry)
5840 {
5841 struct cgraph_node *old_version_node;
5842 struct cgraph_node *new_version_node;
5843 copy_body_data id;
5844 tree p;
5845 unsigned i;
5846 struct ipa_replace_map *replace_info;
5847 basic_block old_entry_block, bb;
5848 auto_vec<gimple *, 10> init_stmts;
5849 tree vars = NULL_TREE;
5850 bitmap debug_args_to_skip = args_to_skip;
5851
5852 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5853 && TREE_CODE (new_decl) == FUNCTION_DECL);
5854 DECL_POSSIBLY_INLINED (old_decl) = 1;
5855
5856 old_version_node = cgraph_node::get (old_decl);
5857 gcc_checking_assert (old_version_node);
5858 new_version_node = cgraph_node::get (new_decl);
5859 gcc_checking_assert (new_version_node);
5860
5861 /* Copy over debug args. */
5862 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5863 {
5864 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5865 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5866 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5867 old_debug_args = decl_debug_args_lookup (old_decl);
5868 if (old_debug_args)
5869 {
5870 new_debug_args = decl_debug_args_insert (new_decl);
5871 *new_debug_args = vec_safe_copy (*old_debug_args);
5872 }
5873 }
5874
5875 /* Output the inlining info for this abstract function, since it has been
5876 inlined. If we don't do this now, we can lose the information about the
5877 variables in the function when the blocks get blown away as soon as we
5878 remove the cgraph node. */
5879 (*debug_hooks->outlining_inline_function) (old_decl);
5880
5881 DECL_ARTIFICIAL (new_decl) = 1;
5882 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5883 if (DECL_ORIGIN (old_decl) == old_decl)
5884 old_version_node->used_as_abstract_origin = true;
5885 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5886
5887 /* Prepare the data structures for the tree copy. */
5888 memset (&id, 0, sizeof (id));
5889
5890 /* Generate a new name for the new version. */
5891 id.statements_to_fold = new hash_set<gimple *>;
5892
5893 id.decl_map = new hash_map<tree, tree>;
5894 id.debug_map = NULL;
5895 id.src_fn = old_decl;
5896 id.dst_fn = new_decl;
5897 id.src_node = old_version_node;
5898 id.dst_node = new_version_node;
5899 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5900 id.blocks_to_copy = blocks_to_copy;
5901
5902 id.copy_decl = copy_decl_no_change;
5903 id.transform_call_graph_edges
5904 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5905 id.transform_new_cfg = true;
5906 id.transform_return_to_modify = false;
5907 id.transform_parameter = false;
5908 id.transform_lang_insert_block = NULL;
5909
5910 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5911 (DECL_STRUCT_FUNCTION (old_decl));
5912 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5913 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5914 initialize_cfun (new_decl, old_decl,
5915 new_entry ? new_entry->count : old_entry_block->count);
5916 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5917 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5918 = id.src_cfun->gimple_df->ipa_pta;
5919
5920 /* Copy the function's static chain. */
5921 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5922 if (p)
5923 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5924 = copy_static_chain (p, &id);
5925
5926 /* If there's a tree_map, prepare for substitution. */
5927 if (tree_map)
5928 for (i = 0; i < tree_map->length (); i++)
5929 {
5930 gimple *init;
5931 replace_info = (*tree_map)[i];
5932 if (replace_info->replace_p)
5933 {
5934 int parm_num = -1;
5935 if (!replace_info->old_tree)
5936 {
5937 int p = replace_info->parm_num;
5938 tree parm;
5939 tree req_type, new_type;
5940
5941 for (parm = DECL_ARGUMENTS (old_decl); p;
5942 parm = DECL_CHAIN (parm))
5943 p--;
5944 replace_info->old_tree = parm;
5945 parm_num = replace_info->parm_num;
5946 req_type = TREE_TYPE (parm);
5947 new_type = TREE_TYPE (replace_info->new_tree);
5948 if (!useless_type_conversion_p (req_type, new_type))
5949 {
5950 if (fold_convertible_p (req_type, replace_info->new_tree))
5951 replace_info->new_tree
5952 = fold_build1 (NOP_EXPR, req_type,
5953 replace_info->new_tree);
5954 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5955 replace_info->new_tree
5956 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5957 replace_info->new_tree);
5958 else
5959 {
5960 if (dump_file)
5961 {
5962 fprintf (dump_file, " const ");
5963 print_generic_expr (dump_file,
5964 replace_info->new_tree);
5965 fprintf (dump_file,
5966 " can't be converted to param ");
5967 print_generic_expr (dump_file, parm);
5968 fprintf (dump_file, "\n");
5969 }
5970 replace_info->old_tree = NULL;
5971 }
5972 }
5973 }
5974 else
5975 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5976 if (replace_info->old_tree)
5977 {
5978 init = setup_one_parameter (&id, replace_info->old_tree,
5979 replace_info->new_tree, id.src_fn,
5980 NULL,
5981 &vars);
5982 if (init)
5983 init_stmts.safe_push (init);
5984 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5985 {
5986 if (parm_num == -1)
5987 {
5988 tree parm;
5989 int p;
5990 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5991 parm = DECL_CHAIN (parm), p++)
5992 if (parm == replace_info->old_tree)
5993 {
5994 parm_num = p;
5995 break;
5996 }
5997 }
5998 if (parm_num != -1)
5999 {
6000 if (debug_args_to_skip == args_to_skip)
6001 {
6002 debug_args_to_skip = BITMAP_ALLOC (NULL);
6003 bitmap_copy (debug_args_to_skip, args_to_skip);
6004 }
6005 bitmap_clear_bit (debug_args_to_skip, parm_num);
6006 }
6007 }
6008 }
6009 }
6010 }
6011 /* Copy the function's arguments. */
6012 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6013 DECL_ARGUMENTS (new_decl)
6014 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6015 args_to_skip, &vars);
6016
6017 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6018 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6019
6020 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6021
6022 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6023 /* Add local vars. */
6024 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6025
6026 if (DECL_RESULT (old_decl) == NULL_TREE)
6027 ;
6028 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6029 {
6030 DECL_RESULT (new_decl)
6031 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6032 RESULT_DECL, NULL_TREE, void_type_node);
6033 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6034 cfun->returns_struct = 0;
6035 cfun->returns_pcc_struct = 0;
6036 }
6037 else
6038 {
6039 tree old_name;
6040 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6041 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6042 if (gimple_in_ssa_p (id.src_cfun)
6043 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6044 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6045 {
6046 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6047 insert_decl_map (&id, old_name, new_name);
6048 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6049 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6050 }
6051 }
6052
6053 /* Set up the destination functions loop tree. */
6054 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6055 {
6056 cfun->curr_properties &= ~PROP_loops;
6057 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6058 cfun->curr_properties |= PROP_loops;
6059 }
6060
6061 /* Copy the Function's body. */
6062 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6063 new_entry);
6064
6065 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6066 number_blocks (new_decl);
6067
6068 /* We want to create the BB unconditionally, so that the addition of
6069 debug stmts doesn't affect BB count, which may in the end cause
6070 codegen differences. */
6071 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6072 while (init_stmts.length ())
6073 insert_init_stmt (&id, bb, init_stmts.pop ());
6074 update_clone_info (&id);
6075
6076 /* Remap the nonlocal_goto_save_area, if any. */
6077 if (cfun->nonlocal_goto_save_area)
6078 {
6079 struct walk_stmt_info wi;
6080
6081 memset (&wi, 0, sizeof (wi));
6082 wi.info = &id;
6083 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6084 }
6085
6086 /* Clean up. */
6087 delete id.decl_map;
6088 if (id.debug_map)
6089 delete id.debug_map;
6090 free_dominance_info (CDI_DOMINATORS);
6091 free_dominance_info (CDI_POST_DOMINATORS);
6092
6093 update_max_bb_count ();
6094 fold_marked_statements (0, id.statements_to_fold);
6095 delete id.statements_to_fold;
6096 delete_unreachable_blocks_update_callgraph (&id);
6097 if (id.dst_node->definition)
6098 cgraph_edge::rebuild_references ();
6099 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6100 {
6101 calculate_dominance_info (CDI_DOMINATORS);
6102 fix_loop_structure (NULL);
6103 }
6104 update_ssa (TODO_update_ssa);
6105
6106 /* After partial cloning we need to rescale frequencies, so they are
6107 within proper range in the cloned function. */
6108 if (new_entry)
6109 {
6110 struct cgraph_edge *e;
6111 rebuild_frequencies ();
6112
6113 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6114 for (e = new_version_node->callees; e; e = e->next_callee)
6115 {
6116 basic_block bb = gimple_bb (e->call_stmt);
6117 e->count = bb->count;
6118 }
6119 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6120 {
6121 basic_block bb = gimple_bb (e->call_stmt);
6122 e->count = bb->count;
6123 }
6124 }
6125
6126 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6127 {
6128 tree parm;
6129 vec<tree, va_gc> **debug_args = NULL;
6130 unsigned int len = 0;
6131 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6132 parm; parm = DECL_CHAIN (parm), i++)
6133 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6134 {
6135 tree ddecl;
6136
6137 if (debug_args == NULL)
6138 {
6139 debug_args = decl_debug_args_insert (new_decl);
6140 len = vec_safe_length (*debug_args);
6141 }
6142 ddecl = make_node (DEBUG_EXPR_DECL);
6143 DECL_ARTIFICIAL (ddecl) = 1;
6144 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6145 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6146 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6147 vec_safe_push (*debug_args, ddecl);
6148 }
6149 if (debug_args != NULL)
6150 {
6151 /* On the callee side, add
6152 DEBUG D#Y s=> parm
6153 DEBUG var => D#Y
6154 stmts to the first bb where var is a VAR_DECL created for the
6155 optimized away parameter in DECL_INITIAL block. This hints
6156 in the debug info that var (whole DECL_ORIGIN is the parm
6157 PARM_DECL) is optimized away, but could be looked up at the
6158 call site as value of D#X there. */
6159 tree var = vars, vexpr;
6160 gimple_stmt_iterator cgsi
6161 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6162 gimple *def_temp;
6163 var = vars;
6164 i = vec_safe_length (*debug_args);
6165 do
6166 {
6167 i -= 2;
6168 while (var != NULL_TREE
6169 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6170 var = TREE_CHAIN (var);
6171 if (var == NULL_TREE)
6172 break;
6173 vexpr = make_node (DEBUG_EXPR_DECL);
6174 parm = (**debug_args)[i];
6175 DECL_ARTIFICIAL (vexpr) = 1;
6176 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6177 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6178 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6179 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6180 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6181 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6182 }
6183 while (i > len);
6184 }
6185 }
6186
6187 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6188 BITMAP_FREE (debug_args_to_skip);
6189 free_dominance_info (CDI_DOMINATORS);
6190 free_dominance_info (CDI_POST_DOMINATORS);
6191
6192 gcc_assert (!id.debug_stmts.exists ());
6193 pop_cfun ();
6194 return;
6195 }
6196
6197 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6198 the callee and return the inlined body on success. */
6199
6200 tree
6201 maybe_inline_call_in_expr (tree exp)
6202 {
6203 tree fn = get_callee_fndecl (exp);
6204
6205 /* We can only try to inline "const" functions. */
6206 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6207 {
6208 call_expr_arg_iterator iter;
6209 copy_body_data id;
6210 tree param, arg, t;
6211 hash_map<tree, tree> decl_map;
6212
6213 /* Remap the parameters. */
6214 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6215 param;
6216 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6217 decl_map.put (param, arg);
6218
6219 memset (&id, 0, sizeof (id));
6220 id.src_fn = fn;
6221 id.dst_fn = current_function_decl;
6222 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6223 id.decl_map = &decl_map;
6224
6225 id.copy_decl = copy_decl_no_change;
6226 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6227 id.transform_new_cfg = false;
6228 id.transform_return_to_modify = true;
6229 id.transform_parameter = true;
6230 id.transform_lang_insert_block = NULL;
6231
6232 /* Make sure not to unshare trees behind the front-end's back
6233 since front-end specific mechanisms may rely on sharing. */
6234 id.regimplify = false;
6235 id.do_not_unshare = true;
6236
6237 /* We're not inside any EH region. */
6238 id.eh_lp_nr = 0;
6239
6240 t = copy_tree_body (&id);
6241
6242 /* We can only return something suitable for use in a GENERIC
6243 expression tree. */
6244 if (TREE_CODE (t) == MODIFY_EXPR)
6245 return TREE_OPERAND (t, 1);
6246 }
6247
6248 return NULL_TREE;
6249 }
6250
6251 /* Duplicate a type, fields and all. */
6252
6253 tree
6254 build_duplicate_type (tree type)
6255 {
6256 struct copy_body_data id;
6257
6258 memset (&id, 0, sizeof (id));
6259 id.src_fn = current_function_decl;
6260 id.dst_fn = current_function_decl;
6261 id.src_cfun = cfun;
6262 id.decl_map = new hash_map<tree, tree>;
6263 id.debug_map = NULL;
6264 id.copy_decl = copy_decl_no_change;
6265
6266 type = remap_type_1 (type, &id);
6267
6268 delete id.decl_map;
6269 if (id.debug_map)
6270 delete id.debug_map;
6271
6272 TYPE_CANONICAL (type) = type;
6273
6274 return type;
6275 }
6276
6277 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6278 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6279 evaluation. */
6280
6281 tree
6282 copy_fn (tree fn, tree& parms, tree& result)
6283 {
6284 copy_body_data id;
6285 tree param;
6286 hash_map<tree, tree> decl_map;
6287
6288 tree *p = &parms;
6289 *p = NULL_TREE;
6290
6291 memset (&id, 0, sizeof (id));
6292 id.src_fn = fn;
6293 id.dst_fn = current_function_decl;
6294 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6295 id.decl_map = &decl_map;
6296
6297 id.copy_decl = copy_decl_no_change;
6298 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6299 id.transform_new_cfg = false;
6300 id.transform_return_to_modify = false;
6301 id.transform_parameter = true;
6302 id.transform_lang_insert_block = NULL;
6303
6304 /* Make sure not to unshare trees behind the front-end's back
6305 since front-end specific mechanisms may rely on sharing. */
6306 id.regimplify = false;
6307 id.do_not_unshare = true;
6308
6309 /* We're not inside any EH region. */
6310 id.eh_lp_nr = 0;
6311
6312 /* Remap the parameters and result and return them to the caller. */
6313 for (param = DECL_ARGUMENTS (fn);
6314 param;
6315 param = DECL_CHAIN (param))
6316 {
6317 *p = remap_decl (param, &id);
6318 p = &DECL_CHAIN (*p);
6319 }
6320
6321 if (DECL_RESULT (fn))
6322 result = remap_decl (DECL_RESULT (fn), &id);
6323 else
6324 result = NULL_TREE;
6325
6326 return copy_tree_body (&id);
6327 }