re PR middle-end/85956 (ICE in wide_int_to_tree_1, at tree.c:1549)
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63
64 /* I'm not real happy about this, but we need to handle gimple and
65 non-gimple trees. */
66
67 /* Inlining, Cloning, Versioning, Parallelization
68
69 Inlining: a function body is duplicated, but the PARM_DECLs are
70 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
71 MODIFY_EXPRs that store to a dedicated returned-value variable.
72 The duplicated eh_region info of the copy will later be appended
73 to the info for the caller; the eh_region info in copied throwing
74 statements and RESX statements are adjusted accordingly.
75
76 Cloning: (only in C++) We have one body for a con/de/structor, and
77 multiple function decls, each with a unique parameter list.
78 Duplicate the body, using the given splay tree; some parameters
79 will become constants (like 0 or 1).
80
81 Versioning: a function body is duplicated and the result is a new
82 function rather than into blocks of an existing function as with
83 inlining. Some parameters will become constants.
84
85 Parallelization: a region of a function is duplicated resulting in
86 a new function. Variables may be replaced with complex expressions
87 to enable shared variable semantics.
88
89 All of these will simultaneously lookup any callgraph edges. If
90 we're going to inline the duplicated function body, and the given
91 function has some cloned callgraph nodes (one for each place this
92 function will be inlined) those callgraph edges will be duplicated.
93 If we're cloning the body, those callgraph edges will be
94 updated to point into the new body. (Note that the original
95 callgraph node and edge list will not be altered.)
96
97 See the CALL_EXPR handling case in copy_tree_body_r (). */
98
99 /* To Do:
100
101 o In order to make inlining-on-trees work, we pessimized
102 function-local static constants. In particular, they are now
103 always output, even when not addressed. Fix this by treating
104 function-local static constants just like global static
105 constants; the back-end already knows not to output them if they
106 are not needed.
107
108 o Provide heuristics to clamp inlining of recursive template
109 calls? */
110
111
112 /* Weights that estimate_num_insns uses to estimate the size of the
113 produced code. */
114
115 eni_weights eni_size_weights;
116
117 /* Weights that estimate_num_insns uses to estimate the time necessary
118 to execute the produced code. */
119
120 eni_weights eni_time_weights;
121
122 /* Prototypes. */
123
124 static tree declare_return_variable (copy_body_data *, tree, tree,
125 basic_block);
126 static void remap_block (tree *, copy_body_data *);
127 static void copy_bind_expr (tree *, int *, copy_body_data *);
128 static void declare_inline_vars (tree, tree);
129 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
130 static void prepend_lexical_block (tree current_block, tree new_block);
131 static tree copy_decl_to_var (tree, copy_body_data *);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
140
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
143 {
144 id->decl_map->put (key, value);
145
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
150 }
151
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
154
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 {
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
160
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
163
164 if (!target_for_debug_bind (key))
165 return;
166
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
169
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
172
173 id->debug_map->put (key, value);
174 }
175
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
181
182 /* Construct new SSA name for old NAME. ID is the inline context. */
183
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
186 {
187 tree new_tree, var;
188 tree *n;
189
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
191
192 n = id->decl_map->get (name);
193 if (n)
194 return unshare_expr (*n);
195
196 if (processing_debug_stmt)
197 {
198 if (SSA_NAME_IS_DEFAULT_DEF (name)
199 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 && id->entry_bb == NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
202 {
203 tree vexpr = make_node (DEBUG_EXPR_DECL);
204 gimple *def_temp;
205 gimple_stmt_iterator gsi;
206 tree val = SSA_NAME_VAR (name);
207
208 n = id->decl_map->get (val);
209 if (n != NULL)
210 val = *n;
211 if (TREE_CODE (val) != PARM_DECL
212 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
213 {
214 processing_debug_stmt = -1;
215 return name;
216 }
217 n = id->decl_map->get (val);
218 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
219 return *n;
220 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
221 DECL_ARTIFICIAL (vexpr) = 1;
222 TREE_TYPE (vexpr) = TREE_TYPE (name);
223 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
224 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
225 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
226 insert_decl_map (id, val, vexpr);
227 return vexpr;
228 }
229
230 processing_debug_stmt = -1;
231 return name;
232 }
233
234 /* Remap anonymous SSA names or SSA names of anonymous decls. */
235 var = SSA_NAME_VAR (name);
236 if (!var
237 || (!SSA_NAME_IS_DEFAULT_DEF (name)
238 && VAR_P (var)
239 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
240 && DECL_ARTIFICIAL (var)
241 && DECL_IGNORED_P (var)
242 && !DECL_NAME (var)))
243 {
244 struct ptr_info_def *pi;
245 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
246 if (!var && SSA_NAME_IDENTIFIER (name))
247 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
248 insert_decl_map (id, name, new_tree);
249 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
250 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
251 /* At least IPA points-to info can be directly transferred. */
252 if (id->src_cfun->gimple_df
253 && id->src_cfun->gimple_df->ipa_pta
254 && POINTER_TYPE_P (TREE_TYPE (name))
255 && (pi = SSA_NAME_PTR_INFO (name))
256 && !pi->pt.anything)
257 {
258 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
259 new_pi->pt = pi->pt;
260 }
261 return new_tree;
262 }
263
264 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
265 in copy_bb. */
266 new_tree = remap_decl (var, id);
267
268 /* We might've substituted constant or another SSA_NAME for
269 the variable.
270
271 Replace the SSA name representing RESULT_DECL by variable during
272 inlining: this saves us from need to introduce PHI node in a case
273 return value is just partly initialized. */
274 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
275 && (!SSA_NAME_VAR (name)
276 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
277 || !id->transform_return_to_modify))
278 {
279 struct ptr_info_def *pi;
280 new_tree = make_ssa_name (new_tree);
281 insert_decl_map (id, name, new_tree);
282 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
283 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
284 /* At least IPA points-to info can be directly transferred. */
285 if (id->src_cfun->gimple_df
286 && id->src_cfun->gimple_df->ipa_pta
287 && POINTER_TYPE_P (TREE_TYPE (name))
288 && (pi = SSA_NAME_PTR_INFO (name))
289 && !pi->pt.anything)
290 {
291 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
292 new_pi->pt = pi->pt;
293 }
294 if (SSA_NAME_IS_DEFAULT_DEF (name))
295 {
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
299 abnormal edge, but also increase register pressure.
300
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
305 if (id->entry_bb
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
307 && (!SSA_NAME_VAR (name)
308 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
309 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
310 0)->dest
311 || EDGE_COUNT (id->entry_bb->preds) != 1))
312 {
313 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
314 gimple *init_stmt;
315 tree zero = build_zero_cst (TREE_TYPE (new_tree));
316
317 init_stmt = gimple_build_assign (new_tree, zero);
318 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
319 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
320 }
321 else
322 {
323 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
324 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
325 }
326 }
327 }
328 else
329 insert_decl_map (id, name, new_tree);
330 return new_tree;
331 }
332
333 /* Remap DECL during the copying of the BLOCK tree for the function. */
334
335 tree
336 remap_decl (tree decl, copy_body_data *id)
337 {
338 tree *n;
339
340 /* We only remap local variables in the current function. */
341
342 /* See if we have remapped this declaration. */
343
344 n = id->decl_map->get (decl);
345
346 if (!n && processing_debug_stmt)
347 {
348 processing_debug_stmt = -1;
349 return decl;
350 }
351
352 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
353 necessary DECLs have already been remapped and we do not want to duplicate
354 a decl coming from outside of the sequence we are copying. */
355 if (!n
356 && id->prevent_decl_creation_for_types
357 && id->remapping_type_depth > 0
358 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
359 return decl;
360
361 /* If we didn't already have an equivalent for this declaration, create one
362 now. */
363 if (!n)
364 {
365 /* Make a copy of the variable or label. */
366 tree t = id->copy_decl (decl, id);
367
368 /* Remember it, so that if we encounter this local entity again
369 we can reuse this copy. Do this early because remap_type may
370 need this decl for TYPE_STUB_DECL. */
371 insert_decl_map (id, decl, t);
372
373 if (!DECL_P (t))
374 return t;
375
376 /* Remap types, if necessary. */
377 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
378 if (TREE_CODE (t) == TYPE_DECL)
379 {
380 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
381
382 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
383 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
384 is not set on the TYPE_DECL, for example in LTO mode. */
385 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
386 {
387 tree x = build_variant_type_copy (TREE_TYPE (t));
388 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
389 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
390 DECL_ORIGINAL_TYPE (t) = x;
391 }
392 }
393
394 /* Remap sizes as necessary. */
395 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
396 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
397
398 /* If fields, do likewise for offset and qualifier. */
399 if (TREE_CODE (t) == FIELD_DECL)
400 {
401 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
402 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
403 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
404 }
405
406 return t;
407 }
408
409 if (id->do_not_unshare)
410 return *n;
411 else
412 return unshare_expr (*n);
413 }
414
415 static tree
416 remap_type_1 (tree type, copy_body_data *id)
417 {
418 tree new_tree, t;
419
420 /* We do need a copy. build and register it now. If this is a pointer or
421 reference type, remap the designated type and make a new pointer or
422 reference type. */
423 if (TREE_CODE (type) == POINTER_TYPE)
424 {
425 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
426 TYPE_MODE (type),
427 TYPE_REF_CAN_ALIAS_ALL (type));
428 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
429 new_tree = build_type_attribute_qual_variant (new_tree,
430 TYPE_ATTRIBUTES (type),
431 TYPE_QUALS (type));
432 insert_decl_map (id, type, new_tree);
433 return new_tree;
434 }
435 else if (TREE_CODE (type) == REFERENCE_TYPE)
436 {
437 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
438 TYPE_MODE (type),
439 TYPE_REF_CAN_ALIAS_ALL (type));
440 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
441 new_tree = build_type_attribute_qual_variant (new_tree,
442 TYPE_ATTRIBUTES (type),
443 TYPE_QUALS (type));
444 insert_decl_map (id, type, new_tree);
445 return new_tree;
446 }
447 else
448 new_tree = copy_node (type);
449
450 insert_decl_map (id, type, new_tree);
451
452 /* This is a new type, not a copy of an old type. Need to reassociate
453 variants. We can handle everything except the main variant lazily. */
454 t = TYPE_MAIN_VARIANT (type);
455 if (type != t)
456 {
457 t = remap_type (t, id);
458 TYPE_MAIN_VARIANT (new_tree) = t;
459 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
460 TYPE_NEXT_VARIANT (t) = new_tree;
461 }
462 else
463 {
464 TYPE_MAIN_VARIANT (new_tree) = new_tree;
465 TYPE_NEXT_VARIANT (new_tree) = NULL;
466 }
467
468 if (TYPE_STUB_DECL (type))
469 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
470
471 /* Lazily create pointer and reference types. */
472 TYPE_POINTER_TO (new_tree) = NULL;
473 TYPE_REFERENCE_TO (new_tree) = NULL;
474
475 /* Copy all types that may contain references to local variables; be sure to
476 preserve sharing in between type and its main variant when possible. */
477 switch (TREE_CODE (new_tree))
478 {
479 case INTEGER_TYPE:
480 case REAL_TYPE:
481 case FIXED_POINT_TYPE:
482 case ENUMERAL_TYPE:
483 case BOOLEAN_TYPE:
484 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
485 {
486 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
487 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
488
489 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
490 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 }
492 else
493 {
494 t = TYPE_MIN_VALUE (new_tree);
495 if (t && TREE_CODE (t) != INTEGER_CST)
496 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
497
498 t = TYPE_MAX_VALUE (new_tree);
499 if (t && TREE_CODE (t) != INTEGER_CST)
500 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
501 }
502 return new_tree;
503
504 case FUNCTION_TYPE:
505 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
506 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
507 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
508 else
509 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
510 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
511 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
512 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
513 else
514 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
515 return new_tree;
516
517 case ARRAY_TYPE:
518 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
519 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
520 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
521 else
522 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
523
524 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
525 {
526 gcc_checking_assert (TYPE_DOMAIN (type)
527 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
528 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
529 }
530 else
531 {
532 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
533 /* For array bounds where we have decided not to copy over the bounds
534 variable which isn't used in OpenMP/OpenACC region, change them to
535 an uninitialized VAR_DECL temporary. */
536 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
537 && id->adjust_array_error_bounds
538 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
539 {
540 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
541 DECL_ATTRIBUTES (v)
542 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
543 DECL_ATTRIBUTES (v));
544 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
545 }
546 }
547 break;
548
549 case RECORD_TYPE:
550 case UNION_TYPE:
551 case QUAL_UNION_TYPE:
552 if (TYPE_MAIN_VARIANT (type) != type
553 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
554 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
555 else
556 {
557 tree f, nf = NULL;
558
559 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
560 {
561 t = remap_decl (f, id);
562 DECL_CONTEXT (t) = new_tree;
563 DECL_CHAIN (t) = nf;
564 nf = t;
565 }
566 TYPE_FIELDS (new_tree) = nreverse (nf);
567 }
568 break;
569
570 case OFFSET_TYPE:
571 default:
572 /* Shouldn't have been thought variable sized. */
573 gcc_unreachable ();
574 }
575
576 /* All variants of type share the same size, so use the already remaped data. */
577 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
578 {
579 tree s = TYPE_SIZE (type);
580 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
581 tree su = TYPE_SIZE_UNIT (type);
582 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
583 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
584 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
585 || s == mvs);
586 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
587 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
588 || su == mvsu);
589 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
590 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
591 }
592 else
593 {
594 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
595 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
596 }
597
598 return new_tree;
599 }
600
601 tree
602 remap_type (tree type, copy_body_data *id)
603 {
604 tree *node;
605 tree tmp;
606
607 if (type == NULL)
608 return type;
609
610 /* See if we have remapped this type. */
611 node = id->decl_map->get (type);
612 if (node)
613 return *node;
614
615 /* The type only needs remapping if it's variably modified. */
616 if (! variably_modified_type_p (type, id->src_fn))
617 {
618 insert_decl_map (id, type, type);
619 return type;
620 }
621
622 id->remapping_type_depth++;
623 tmp = remap_type_1 (type, id);
624 id->remapping_type_depth--;
625
626 return tmp;
627 }
628
629 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
630
631 static bool
632 can_be_nonlocal (tree decl, copy_body_data *id)
633 {
634 /* We cannot duplicate function decls. */
635 if (TREE_CODE (decl) == FUNCTION_DECL)
636 return true;
637
638 /* Local static vars must be non-local or we get multiple declaration
639 problems. */
640 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
641 return true;
642
643 return false;
644 }
645
646 static tree
647 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
648 copy_body_data *id)
649 {
650 tree old_var;
651 tree new_decls = NULL_TREE;
652
653 /* Remap its variables. */
654 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
655 {
656 tree new_var;
657
658 if (can_be_nonlocal (old_var, id))
659 {
660 /* We need to add this variable to the local decls as otherwise
661 nothing else will do so. */
662 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
663 add_local_decl (cfun, old_var);
664 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
665 && !DECL_IGNORED_P (old_var)
666 && nonlocalized_list)
667 vec_safe_push (*nonlocalized_list, old_var);
668 continue;
669 }
670
671 /* Remap the variable. */
672 new_var = remap_decl (old_var, id);
673
674 /* If we didn't remap this variable, we can't mess with its
675 TREE_CHAIN. If we remapped this variable to the return slot, it's
676 already declared somewhere else, so don't declare it here. */
677
678 if (new_var == id->retvar)
679 ;
680 else if (!new_var)
681 {
682 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
683 && !DECL_IGNORED_P (old_var)
684 && nonlocalized_list)
685 vec_safe_push (*nonlocalized_list, old_var);
686 }
687 else
688 {
689 gcc_assert (DECL_P (new_var));
690 DECL_CHAIN (new_var) = new_decls;
691 new_decls = new_var;
692
693 /* Also copy value-expressions. */
694 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
695 {
696 tree tem = DECL_VALUE_EXPR (new_var);
697 bool old_regimplify = id->regimplify;
698 id->remapping_type_depth++;
699 walk_tree (&tem, copy_tree_body_r, id, NULL);
700 id->remapping_type_depth--;
701 id->regimplify = old_regimplify;
702 SET_DECL_VALUE_EXPR (new_var, tem);
703 }
704 }
705 }
706
707 return nreverse (new_decls);
708 }
709
710 /* Copy the BLOCK to contain remapped versions of the variables
711 therein. And hook the new block into the block-tree. */
712
713 static void
714 remap_block (tree *block, copy_body_data *id)
715 {
716 tree old_block;
717 tree new_block;
718
719 /* Make the new block. */
720 old_block = *block;
721 new_block = make_node (BLOCK);
722 TREE_USED (new_block) = TREE_USED (old_block);
723 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
724 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
725 BLOCK_NONLOCALIZED_VARS (new_block)
726 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
727 *block = new_block;
728
729 /* Remap its variables. */
730 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
731 &BLOCK_NONLOCALIZED_VARS (new_block),
732 id);
733
734 if (id->transform_lang_insert_block)
735 id->transform_lang_insert_block (new_block);
736
737 /* Remember the remapped block. */
738 insert_decl_map (id, old_block, new_block);
739 }
740
741 /* Copy the whole block tree and root it in id->block. */
742
743 static tree
744 remap_blocks (tree block, copy_body_data *id)
745 {
746 tree t;
747 tree new_tree = block;
748
749 if (!block)
750 return NULL;
751
752 remap_block (&new_tree, id);
753 gcc_assert (new_tree != block);
754 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
755 prepend_lexical_block (new_tree, remap_blocks (t, id));
756 /* Blocks are in arbitrary order, but make things slightly prettier and do
757 not swap order when producing a copy. */
758 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
759 return new_tree;
760 }
761
762 /* Remap the block tree rooted at BLOCK to nothing. */
763
764 static void
765 remap_blocks_to_null (tree block, copy_body_data *id)
766 {
767 tree t;
768 insert_decl_map (id, block, NULL_TREE);
769 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
770 remap_blocks_to_null (t, id);
771 }
772
773 /* Remap the location info pointed to by LOCUS. */
774
775 static location_t
776 remap_location (location_t locus, copy_body_data *id)
777 {
778 if (LOCATION_BLOCK (locus))
779 {
780 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
781 gcc_assert (n);
782 if (*n)
783 return set_block (locus, *n);
784 }
785
786 locus = LOCATION_LOCUS (locus);
787
788 if (locus != UNKNOWN_LOCATION && id->block)
789 return set_block (locus, id->block);
790
791 return locus;
792 }
793
794 static void
795 copy_statement_list (tree *tp)
796 {
797 tree_stmt_iterator oi, ni;
798 tree new_tree;
799
800 new_tree = alloc_stmt_list ();
801 ni = tsi_start (new_tree);
802 oi = tsi_start (*tp);
803 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
804 *tp = new_tree;
805
806 for (; !tsi_end_p (oi); tsi_next (&oi))
807 {
808 tree stmt = tsi_stmt (oi);
809 if (TREE_CODE (stmt) == STATEMENT_LIST)
810 /* This copy is not redundant; tsi_link_after will smash this
811 STATEMENT_LIST into the end of the one we're building, and we
812 don't want to do that with the original. */
813 copy_statement_list (&stmt);
814 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
815 }
816 }
817
818 static void
819 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
820 {
821 tree block = BIND_EXPR_BLOCK (*tp);
822 /* Copy (and replace) the statement. */
823 copy_tree_r (tp, walk_subtrees, NULL);
824 if (block)
825 {
826 remap_block (&block, id);
827 BIND_EXPR_BLOCK (*tp) = block;
828 }
829
830 if (BIND_EXPR_VARS (*tp))
831 /* This will remap a lot of the same decls again, but this should be
832 harmless. */
833 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
834 }
835
836
837 /* Create a new gimple_seq by remapping all the statements in BODY
838 using the inlining information in ID. */
839
840 static gimple_seq
841 remap_gimple_seq (gimple_seq body, copy_body_data *id)
842 {
843 gimple_stmt_iterator si;
844 gimple_seq new_body = NULL;
845
846 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
847 {
848 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
849 gimple_seq_add_seq (&new_body, new_stmts);
850 }
851
852 return new_body;
853 }
854
855
856 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
857 block using the mapping information in ID. */
858
859 static gimple *
860 copy_gimple_bind (gbind *stmt, copy_body_data *id)
861 {
862 gimple *new_bind;
863 tree new_block, new_vars;
864 gimple_seq body, new_body;
865
866 /* Copy the statement. Note that we purposely don't use copy_stmt
867 here because we need to remap statements as we copy. */
868 body = gimple_bind_body (stmt);
869 new_body = remap_gimple_seq (body, id);
870
871 new_block = gimple_bind_block (stmt);
872 if (new_block)
873 remap_block (&new_block, id);
874
875 /* This will remap a lot of the same decls again, but this should be
876 harmless. */
877 new_vars = gimple_bind_vars (stmt);
878 if (new_vars)
879 new_vars = remap_decls (new_vars, NULL, id);
880
881 new_bind = gimple_build_bind (new_vars, new_body, new_block);
882
883 return new_bind;
884 }
885
886 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
887
888 static bool
889 is_parm (tree decl)
890 {
891 if (TREE_CODE (decl) == SSA_NAME)
892 {
893 decl = SSA_NAME_VAR (decl);
894 if (!decl)
895 return false;
896 }
897
898 return (TREE_CODE (decl) == PARM_DECL);
899 }
900
901 /* Remap the dependence CLIQUE from the source to the destination function
902 as specified in ID. */
903
904 static unsigned short
905 remap_dependence_clique (copy_body_data *id, unsigned short clique)
906 {
907 if (clique == 0 || processing_debug_stmt)
908 return 0;
909 if (!id->dependence_map)
910 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
911 bool existed;
912 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
913 if (!existed)
914 newc = ++cfun->last_clique;
915 return newc;
916 }
917
918 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
919 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
920 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
921 recursing into the children nodes of *TP. */
922
923 static tree
924 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
925 {
926 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
927 copy_body_data *id = (copy_body_data *) wi_p->info;
928 tree fn = id->src_fn;
929
930 /* For recursive invocations this is no longer the LHS itself. */
931 bool is_lhs = wi_p->is_lhs;
932 wi_p->is_lhs = false;
933
934 if (TREE_CODE (*tp) == SSA_NAME)
935 {
936 *tp = remap_ssa_name (*tp, id);
937 *walk_subtrees = 0;
938 if (is_lhs)
939 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
940 return NULL;
941 }
942 else if (auto_var_in_fn_p (*tp, fn))
943 {
944 /* Local variables and labels need to be replaced by equivalent
945 variables. We don't want to copy static variables; there's
946 only one of those, no matter how many times we inline the
947 containing function. Similarly for globals from an outer
948 function. */
949 tree new_decl;
950
951 /* Remap the declaration. */
952 new_decl = remap_decl (*tp, id);
953 gcc_assert (new_decl);
954 /* Replace this variable with the copy. */
955 STRIP_TYPE_NOPS (new_decl);
956 /* ??? The C++ frontend uses void * pointer zero to initialize
957 any other type. This confuses the middle-end type verification.
958 As cloned bodies do not go through gimplification again the fixup
959 there doesn't trigger. */
960 if (TREE_CODE (new_decl) == INTEGER_CST
961 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
962 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
963 *tp = new_decl;
964 *walk_subtrees = 0;
965 }
966 else if (TREE_CODE (*tp) == STATEMENT_LIST)
967 gcc_unreachable ();
968 else if (TREE_CODE (*tp) == SAVE_EXPR)
969 gcc_unreachable ();
970 else if (TREE_CODE (*tp) == LABEL_DECL
971 && (!DECL_CONTEXT (*tp)
972 || decl_function_context (*tp) == id->src_fn))
973 /* These may need to be remapped for EH handling. */
974 *tp = remap_decl (*tp, id);
975 else if (TREE_CODE (*tp) == FIELD_DECL)
976 {
977 /* If the enclosing record type is variably_modified_type_p, the field
978 has already been remapped. Otherwise, it need not be. */
979 tree *n = id->decl_map->get (*tp);
980 if (n)
981 *tp = *n;
982 *walk_subtrees = 0;
983 }
984 else if (TYPE_P (*tp))
985 /* Types may need remapping as well. */
986 *tp = remap_type (*tp, id);
987 else if (CONSTANT_CLASS_P (*tp))
988 {
989 /* If this is a constant, we have to copy the node iff the type
990 will be remapped. copy_tree_r will not copy a constant. */
991 tree new_type = remap_type (TREE_TYPE (*tp), id);
992
993 if (new_type == TREE_TYPE (*tp))
994 *walk_subtrees = 0;
995
996 else if (TREE_CODE (*tp) == INTEGER_CST)
997 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
998 else
999 {
1000 *tp = copy_node (*tp);
1001 TREE_TYPE (*tp) = new_type;
1002 }
1003 }
1004 else
1005 {
1006 /* Otherwise, just copy the node. Note that copy_tree_r already
1007 knows not to copy VAR_DECLs, etc., so this is safe. */
1008
1009 if (TREE_CODE (*tp) == MEM_REF)
1010 {
1011 /* We need to re-canonicalize MEM_REFs from inline substitutions
1012 that can happen when a pointer argument is an ADDR_EXPR.
1013 Recurse here manually to allow that. */
1014 tree ptr = TREE_OPERAND (*tp, 0);
1015 tree type = remap_type (TREE_TYPE (*tp), id);
1016 tree old = *tp;
1017 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1018 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1019 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1020 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1021 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1022 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1023 {
1024 MR_DEPENDENCE_CLIQUE (*tp)
1025 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1026 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1027 }
1028 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1029 remapped a parameter as the property might be valid only
1030 for the parameter itself. */
1031 if (TREE_THIS_NOTRAP (old)
1032 && (!is_parm (TREE_OPERAND (old, 0))
1033 || (!id->transform_parameter && is_parm (ptr))))
1034 TREE_THIS_NOTRAP (*tp) = 1;
1035 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1036 *walk_subtrees = 0;
1037 return NULL;
1038 }
1039
1040 /* Here is the "usual case". Copy this tree node, and then
1041 tweak some special cases. */
1042 copy_tree_r (tp, walk_subtrees, NULL);
1043
1044 if (TREE_CODE (*tp) != OMP_CLAUSE)
1045 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1046
1047 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1048 {
1049 /* The copied TARGET_EXPR has never been expanded, even if the
1050 original node was expanded already. */
1051 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1052 TREE_OPERAND (*tp, 3) = NULL_TREE;
1053 }
1054 else if (TREE_CODE (*tp) == ADDR_EXPR)
1055 {
1056 /* Variable substitution need not be simple. In particular,
1057 the MEM_REF substitution above. Make sure that
1058 TREE_CONSTANT and friends are up-to-date. */
1059 int invariant = is_gimple_min_invariant (*tp);
1060 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1061 recompute_tree_invariant_for_addr_expr (*tp);
1062
1063 /* If this used to be invariant, but is not any longer,
1064 then regimplification is probably needed. */
1065 if (invariant && !is_gimple_min_invariant (*tp))
1066 id->regimplify = true;
1067
1068 *walk_subtrees = 0;
1069 }
1070 }
1071
1072 /* Update the TREE_BLOCK for the cloned expr. */
1073 if (EXPR_P (*tp))
1074 {
1075 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1076 tree old_block = TREE_BLOCK (*tp);
1077 if (old_block)
1078 {
1079 tree *n;
1080 n = id->decl_map->get (TREE_BLOCK (*tp));
1081 if (n)
1082 new_block = *n;
1083 }
1084 TREE_SET_BLOCK (*tp, new_block);
1085 }
1086
1087 /* Keep iterating. */
1088 return NULL_TREE;
1089 }
1090
1091
1092 /* Called from copy_body_id via walk_tree. DATA is really a
1093 `copy_body_data *'. */
1094
1095 tree
1096 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1097 {
1098 copy_body_data *id = (copy_body_data *) data;
1099 tree fn = id->src_fn;
1100 tree new_block;
1101
1102 /* Begin by recognizing trees that we'll completely rewrite for the
1103 inlining context. Our output for these trees is completely
1104 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1105 into an edge). Further down, we'll handle trees that get
1106 duplicated and/or tweaked. */
1107
1108 /* When requested, RETURN_EXPRs should be transformed to just the
1109 contained MODIFY_EXPR. The branch semantics of the return will
1110 be handled elsewhere by manipulating the CFG rather than a statement. */
1111 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1112 {
1113 tree assignment = TREE_OPERAND (*tp, 0);
1114
1115 /* If we're returning something, just turn that into an
1116 assignment into the equivalent of the original RESULT_DECL.
1117 If the "assignment" is just the result decl, the result
1118 decl has already been set (e.g. a recent "foo (&result_decl,
1119 ...)"); just toss the entire RETURN_EXPR. */
1120 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1121 {
1122 /* Replace the RETURN_EXPR with (a copy of) the
1123 MODIFY_EXPR hanging underneath. */
1124 *tp = copy_node (assignment);
1125 }
1126 else /* Else the RETURN_EXPR returns no value. */
1127 {
1128 *tp = NULL;
1129 return (tree) (void *)1;
1130 }
1131 }
1132 else if (TREE_CODE (*tp) == SSA_NAME)
1133 {
1134 *tp = remap_ssa_name (*tp, id);
1135 *walk_subtrees = 0;
1136 return NULL;
1137 }
1138
1139 /* Local variables and labels need to be replaced by equivalent
1140 variables. We don't want to copy static variables; there's only
1141 one of those, no matter how many times we inline the containing
1142 function. Similarly for globals from an outer function. */
1143 else if (auto_var_in_fn_p (*tp, fn))
1144 {
1145 tree new_decl;
1146
1147 /* Remap the declaration. */
1148 new_decl = remap_decl (*tp, id);
1149 gcc_assert (new_decl);
1150 /* Replace this variable with the copy. */
1151 STRIP_TYPE_NOPS (new_decl);
1152 *tp = new_decl;
1153 *walk_subtrees = 0;
1154 }
1155 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1156 copy_statement_list (tp);
1157 else if (TREE_CODE (*tp) == SAVE_EXPR
1158 || TREE_CODE (*tp) == TARGET_EXPR)
1159 remap_save_expr (tp, id->decl_map, walk_subtrees);
1160 else if (TREE_CODE (*tp) == LABEL_DECL
1161 && (! DECL_CONTEXT (*tp)
1162 || decl_function_context (*tp) == id->src_fn))
1163 /* These may need to be remapped for EH handling. */
1164 *tp = remap_decl (*tp, id);
1165 else if (TREE_CODE (*tp) == BIND_EXPR)
1166 copy_bind_expr (tp, walk_subtrees, id);
1167 /* Types may need remapping as well. */
1168 else if (TYPE_P (*tp))
1169 *tp = remap_type (*tp, id);
1170
1171 /* If this is a constant, we have to copy the node iff the type will be
1172 remapped. copy_tree_r will not copy a constant. */
1173 else if (CONSTANT_CLASS_P (*tp))
1174 {
1175 tree new_type = remap_type (TREE_TYPE (*tp), id);
1176
1177 if (new_type == TREE_TYPE (*tp))
1178 *walk_subtrees = 0;
1179
1180 else if (TREE_CODE (*tp) == INTEGER_CST)
1181 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1182 else
1183 {
1184 *tp = copy_node (*tp);
1185 TREE_TYPE (*tp) = new_type;
1186 }
1187 }
1188
1189 /* Otherwise, just copy the node. Note that copy_tree_r already
1190 knows not to copy VAR_DECLs, etc., so this is safe. */
1191 else
1192 {
1193 /* Here we handle trees that are not completely rewritten.
1194 First we detect some inlining-induced bogosities for
1195 discarding. */
1196 if (TREE_CODE (*tp) == MODIFY_EXPR
1197 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1198 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1199 {
1200 /* Some assignments VAR = VAR; don't generate any rtl code
1201 and thus don't count as variable modification. Avoid
1202 keeping bogosities like 0 = 0. */
1203 tree decl = TREE_OPERAND (*tp, 0), value;
1204 tree *n;
1205
1206 n = id->decl_map->get (decl);
1207 if (n)
1208 {
1209 value = *n;
1210 STRIP_TYPE_NOPS (value);
1211 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1212 {
1213 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1214 return copy_tree_body_r (tp, walk_subtrees, data);
1215 }
1216 }
1217 }
1218 else if (TREE_CODE (*tp) == INDIRECT_REF)
1219 {
1220 /* Get rid of *& from inline substitutions that can happen when a
1221 pointer argument is an ADDR_EXPR. */
1222 tree decl = TREE_OPERAND (*tp, 0);
1223 tree *n = id->decl_map->get (decl);
1224 if (n)
1225 {
1226 /* If we happen to get an ADDR_EXPR in n->value, strip
1227 it manually here as we'll eventually get ADDR_EXPRs
1228 which lie about their types pointed to. In this case
1229 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1230 but we absolutely rely on that. As fold_indirect_ref
1231 does other useful transformations, try that first, though. */
1232 tree type = TREE_TYPE (*tp);
1233 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1234 tree old = *tp;
1235 *tp = gimple_fold_indirect_ref (ptr);
1236 if (! *tp)
1237 {
1238 type = remap_type (type, id);
1239 if (TREE_CODE (ptr) == ADDR_EXPR)
1240 {
1241 *tp
1242 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1243 /* ??? We should either assert here or build
1244 a VIEW_CONVERT_EXPR instead of blindly leaking
1245 incompatible types to our IL. */
1246 if (! *tp)
1247 *tp = TREE_OPERAND (ptr, 0);
1248 }
1249 else
1250 {
1251 *tp = build1 (INDIRECT_REF, type, ptr);
1252 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1253 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1254 TREE_READONLY (*tp) = TREE_READONLY (old);
1255 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1256 have remapped a parameter as the property might be
1257 valid only for the parameter itself. */
1258 if (TREE_THIS_NOTRAP (old)
1259 && (!is_parm (TREE_OPERAND (old, 0))
1260 || (!id->transform_parameter && is_parm (ptr))))
1261 TREE_THIS_NOTRAP (*tp) = 1;
1262 }
1263 }
1264 *walk_subtrees = 0;
1265 return NULL;
1266 }
1267 }
1268 else if (TREE_CODE (*tp) == MEM_REF)
1269 {
1270 /* We need to re-canonicalize MEM_REFs from inline substitutions
1271 that can happen when a pointer argument is an ADDR_EXPR.
1272 Recurse here manually to allow that. */
1273 tree ptr = TREE_OPERAND (*tp, 0);
1274 tree type = remap_type (TREE_TYPE (*tp), id);
1275 tree old = *tp;
1276 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1277 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1278 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1279 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1280 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1281 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1282 {
1283 MR_DEPENDENCE_CLIQUE (*tp)
1284 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1285 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1286 }
1287 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1288 remapped a parameter as the property might be valid only
1289 for the parameter itself. */
1290 if (TREE_THIS_NOTRAP (old)
1291 && (!is_parm (TREE_OPERAND (old, 0))
1292 || (!id->transform_parameter && is_parm (ptr))))
1293 TREE_THIS_NOTRAP (*tp) = 1;
1294 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1295 *walk_subtrees = 0;
1296 return NULL;
1297 }
1298
1299 /* Here is the "usual case". Copy this tree node, and then
1300 tweak some special cases. */
1301 copy_tree_r (tp, walk_subtrees, NULL);
1302
1303 /* If EXPR has block defined, map it to newly constructed block.
1304 When inlining we want EXPRs without block appear in the block
1305 of function call if we are not remapping a type. */
1306 if (EXPR_P (*tp))
1307 {
1308 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1309 if (TREE_BLOCK (*tp))
1310 {
1311 tree *n;
1312 n = id->decl_map->get (TREE_BLOCK (*tp));
1313 if (n)
1314 new_block = *n;
1315 }
1316 TREE_SET_BLOCK (*tp, new_block);
1317 }
1318
1319 if (TREE_CODE (*tp) != OMP_CLAUSE)
1320 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1321
1322 /* The copied TARGET_EXPR has never been expanded, even if the
1323 original node was expanded already. */
1324 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1325 {
1326 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1327 TREE_OPERAND (*tp, 3) = NULL_TREE;
1328 }
1329
1330 /* Variable substitution need not be simple. In particular, the
1331 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1332 and friends are up-to-date. */
1333 else if (TREE_CODE (*tp) == ADDR_EXPR)
1334 {
1335 int invariant = is_gimple_min_invariant (*tp);
1336 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1337
1338 /* Handle the case where we substituted an INDIRECT_REF
1339 into the operand of the ADDR_EXPR. */
1340 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1341 {
1342 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1343 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1344 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1345 *tp = t;
1346 }
1347 else
1348 recompute_tree_invariant_for_addr_expr (*tp);
1349
1350 /* If this used to be invariant, but is not any longer,
1351 then regimplification is probably needed. */
1352 if (invariant && !is_gimple_min_invariant (*tp))
1353 id->regimplify = true;
1354
1355 *walk_subtrees = 0;
1356 }
1357 }
1358
1359 /* Keep iterating. */
1360 return NULL_TREE;
1361 }
1362
1363 /* Helper for remap_gimple_stmt. Given an EH region number for the
1364 source function, map that to the duplicate EH region number in
1365 the destination function. */
1366
1367 static int
1368 remap_eh_region_nr (int old_nr, copy_body_data *id)
1369 {
1370 eh_region old_r, new_r;
1371
1372 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1373 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1374
1375 return new_r->index;
1376 }
1377
1378 /* Similar, but operate on INTEGER_CSTs. */
1379
1380 static tree
1381 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1382 {
1383 int old_nr, new_nr;
1384
1385 old_nr = tree_to_shwi (old_t_nr);
1386 new_nr = remap_eh_region_nr (old_nr, id);
1387
1388 return build_int_cst (integer_type_node, new_nr);
1389 }
1390
1391 /* Helper for copy_bb. Remap statement STMT using the inlining
1392 information in ID. Return the new statement copy. */
1393
1394 static gimple_seq
1395 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1396 {
1397 gimple *copy = NULL;
1398 struct walk_stmt_info wi;
1399 bool skip_first = false;
1400 gimple_seq stmts = NULL;
1401
1402 if (is_gimple_debug (stmt)
1403 && (gimple_debug_nonbind_marker_p (stmt)
1404 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1405 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1406 return NULL;
1407
1408 /* Begin by recognizing trees that we'll completely rewrite for the
1409 inlining context. Our output for these trees is completely
1410 different from our input (e.g. RETURN_EXPR is deleted and morphs
1411 into an edge). Further down, we'll handle trees that get
1412 duplicated and/or tweaked. */
1413
1414 /* When requested, GIMPLE_RETURN should be transformed to just the
1415 contained GIMPLE_ASSIGN. The branch semantics of the return will
1416 be handled elsewhere by manipulating the CFG rather than the
1417 statement. */
1418 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1419 {
1420 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1421
1422 /* If we're returning something, just turn that into an
1423 assignment to the equivalent of the original RESULT_DECL.
1424 If RETVAL is just the result decl, the result decl has
1425 already been set (e.g. a recent "foo (&result_decl, ...)");
1426 just toss the entire GIMPLE_RETURN. */
1427 if (retval
1428 && (TREE_CODE (retval) != RESULT_DECL
1429 && (TREE_CODE (retval) != SSA_NAME
1430 || ! SSA_NAME_VAR (retval)
1431 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1432 {
1433 copy = gimple_build_assign (id->do_not_unshare
1434 ? id->retvar : unshare_expr (id->retvar),
1435 retval);
1436 /* id->retvar is already substituted. Skip it on later remapping. */
1437 skip_first = true;
1438 }
1439 else
1440 return NULL;
1441 }
1442 else if (gimple_has_substatements (stmt))
1443 {
1444 gimple_seq s1, s2;
1445
1446 /* When cloning bodies from the C++ front end, we will be handed bodies
1447 in High GIMPLE form. Handle here all the High GIMPLE statements that
1448 have embedded statements. */
1449 switch (gimple_code (stmt))
1450 {
1451 case GIMPLE_BIND:
1452 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1453 break;
1454
1455 case GIMPLE_CATCH:
1456 {
1457 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1458 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1459 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1460 }
1461 break;
1462
1463 case GIMPLE_EH_FILTER:
1464 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1465 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1466 break;
1467
1468 case GIMPLE_TRY:
1469 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1470 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1471 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1472 break;
1473
1474 case GIMPLE_WITH_CLEANUP_EXPR:
1475 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1476 copy = gimple_build_wce (s1);
1477 break;
1478
1479 case GIMPLE_OMP_PARALLEL:
1480 {
1481 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1482 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1483 copy = gimple_build_omp_parallel
1484 (s1,
1485 gimple_omp_parallel_clauses (omp_par_stmt),
1486 gimple_omp_parallel_child_fn (omp_par_stmt),
1487 gimple_omp_parallel_data_arg (omp_par_stmt));
1488 }
1489 break;
1490
1491 case GIMPLE_OMP_TASK:
1492 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1493 copy = gimple_build_omp_task
1494 (s1,
1495 gimple_omp_task_clauses (stmt),
1496 gimple_omp_task_child_fn (stmt),
1497 gimple_omp_task_data_arg (stmt),
1498 gimple_omp_task_copy_fn (stmt),
1499 gimple_omp_task_arg_size (stmt),
1500 gimple_omp_task_arg_align (stmt));
1501 break;
1502
1503 case GIMPLE_OMP_FOR:
1504 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1505 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1506 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1507 gimple_omp_for_clauses (stmt),
1508 gimple_omp_for_collapse (stmt), s2);
1509 {
1510 size_t i;
1511 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1512 {
1513 gimple_omp_for_set_index (copy, i,
1514 gimple_omp_for_index (stmt, i));
1515 gimple_omp_for_set_initial (copy, i,
1516 gimple_omp_for_initial (stmt, i));
1517 gimple_omp_for_set_final (copy, i,
1518 gimple_omp_for_final (stmt, i));
1519 gimple_omp_for_set_incr (copy, i,
1520 gimple_omp_for_incr (stmt, i));
1521 gimple_omp_for_set_cond (copy, i,
1522 gimple_omp_for_cond (stmt, i));
1523 }
1524 }
1525 break;
1526
1527 case GIMPLE_OMP_MASTER:
1528 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1529 copy = gimple_build_omp_master (s1);
1530 break;
1531
1532 case GIMPLE_OMP_TASKGROUP:
1533 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1534 copy = gimple_build_omp_taskgroup
1535 (s1, gimple_omp_taskgroup_clauses (stmt));
1536 break;
1537
1538 case GIMPLE_OMP_ORDERED:
1539 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1540 copy = gimple_build_omp_ordered
1541 (s1,
1542 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1543 break;
1544
1545 case GIMPLE_OMP_SECTION:
1546 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1547 copy = gimple_build_omp_section (s1);
1548 break;
1549
1550 case GIMPLE_OMP_SECTIONS:
1551 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1552 copy = gimple_build_omp_sections
1553 (s1, gimple_omp_sections_clauses (stmt));
1554 break;
1555
1556 case GIMPLE_OMP_SINGLE:
1557 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1558 copy = gimple_build_omp_single
1559 (s1, gimple_omp_single_clauses (stmt));
1560 break;
1561
1562 case GIMPLE_OMP_TARGET:
1563 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1564 copy = gimple_build_omp_target
1565 (s1, gimple_omp_target_kind (stmt),
1566 gimple_omp_target_clauses (stmt));
1567 break;
1568
1569 case GIMPLE_OMP_TEAMS:
1570 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1571 copy = gimple_build_omp_teams
1572 (s1, gimple_omp_teams_clauses (stmt));
1573 break;
1574
1575 case GIMPLE_OMP_CRITICAL:
1576 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1577 copy = gimple_build_omp_critical (s1,
1578 gimple_omp_critical_name
1579 (as_a <gomp_critical *> (stmt)),
1580 gimple_omp_critical_clauses
1581 (as_a <gomp_critical *> (stmt)));
1582 break;
1583
1584 case GIMPLE_TRANSACTION:
1585 {
1586 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1587 gtransaction *new_trans_stmt;
1588 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1589 id);
1590 copy = new_trans_stmt = gimple_build_transaction (s1);
1591 gimple_transaction_set_subcode (new_trans_stmt,
1592 gimple_transaction_subcode (old_trans_stmt));
1593 gimple_transaction_set_label_norm (new_trans_stmt,
1594 gimple_transaction_label_norm (old_trans_stmt));
1595 gimple_transaction_set_label_uninst (new_trans_stmt,
1596 gimple_transaction_label_uninst (old_trans_stmt));
1597 gimple_transaction_set_label_over (new_trans_stmt,
1598 gimple_transaction_label_over (old_trans_stmt));
1599 }
1600 break;
1601
1602 default:
1603 gcc_unreachable ();
1604 }
1605 }
1606 else
1607 {
1608 if (gimple_assign_copy_p (stmt)
1609 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1610 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1611 {
1612 /* Here we handle statements that are not completely rewritten.
1613 First we detect some inlining-induced bogosities for
1614 discarding. */
1615
1616 /* Some assignments VAR = VAR; don't generate any rtl code
1617 and thus don't count as variable modification. Avoid
1618 keeping bogosities like 0 = 0. */
1619 tree decl = gimple_assign_lhs (stmt), value;
1620 tree *n;
1621
1622 n = id->decl_map->get (decl);
1623 if (n)
1624 {
1625 value = *n;
1626 STRIP_TYPE_NOPS (value);
1627 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1628 return NULL;
1629 }
1630 }
1631
1632 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1633 in a block that we aren't copying during tree_function_versioning,
1634 just drop the clobber stmt. */
1635 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1636 {
1637 tree lhs = gimple_assign_lhs (stmt);
1638 if (TREE_CODE (lhs) == MEM_REF
1639 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1640 {
1641 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1642 if (gimple_bb (def_stmt)
1643 && !bitmap_bit_p (id->blocks_to_copy,
1644 gimple_bb (def_stmt)->index))
1645 return NULL;
1646 }
1647 }
1648
1649 if (gimple_debug_bind_p (stmt))
1650 {
1651 gdebug *copy
1652 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1653 gimple_debug_bind_get_value (stmt),
1654 stmt);
1655 if (id->reset_location)
1656 gimple_set_location (copy, input_location);
1657 id->debug_stmts.safe_push (copy);
1658 gimple_seq_add_stmt (&stmts, copy);
1659 return stmts;
1660 }
1661 if (gimple_debug_source_bind_p (stmt))
1662 {
1663 gdebug *copy = gimple_build_debug_source_bind
1664 (gimple_debug_source_bind_get_var (stmt),
1665 gimple_debug_source_bind_get_value (stmt),
1666 stmt);
1667 if (id->reset_location)
1668 gimple_set_location (copy, input_location);
1669 id->debug_stmts.safe_push (copy);
1670 gimple_seq_add_stmt (&stmts, copy);
1671 return stmts;
1672 }
1673 if (gimple_debug_nonbind_marker_p (stmt))
1674 {
1675 /* If the inlined function has too many debug markers,
1676 don't copy them. */
1677 if (id->src_cfun->debug_marker_count
1678 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1679 return stmts;
1680
1681 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1682 if (id->reset_location)
1683 gimple_set_location (copy, input_location);
1684 id->debug_stmts.safe_push (copy);
1685 gimple_seq_add_stmt (&stmts, copy);
1686 return stmts;
1687 }
1688
1689 /* Create a new deep copy of the statement. */
1690 copy = gimple_copy (stmt);
1691
1692 /* Clear flags that need revisiting. */
1693 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1694 {
1695 if (gimple_call_tail_p (call_stmt))
1696 gimple_call_set_tail (call_stmt, false);
1697 if (gimple_call_from_thunk_p (call_stmt))
1698 gimple_call_set_from_thunk (call_stmt, false);
1699 if (gimple_call_internal_p (call_stmt))
1700 switch (gimple_call_internal_fn (call_stmt))
1701 {
1702 case IFN_GOMP_SIMD_LANE:
1703 case IFN_GOMP_SIMD_VF:
1704 case IFN_GOMP_SIMD_LAST_LANE:
1705 case IFN_GOMP_SIMD_ORDERED_START:
1706 case IFN_GOMP_SIMD_ORDERED_END:
1707 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1708 break;
1709 default:
1710 break;
1711 }
1712 }
1713
1714 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1715 RESX and EH_DISPATCH. */
1716 if (id->eh_map)
1717 switch (gimple_code (copy))
1718 {
1719 case GIMPLE_CALL:
1720 {
1721 tree r, fndecl = gimple_call_fndecl (copy);
1722 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1723 switch (DECL_FUNCTION_CODE (fndecl))
1724 {
1725 case BUILT_IN_EH_COPY_VALUES:
1726 r = gimple_call_arg (copy, 1);
1727 r = remap_eh_region_tree_nr (r, id);
1728 gimple_call_set_arg (copy, 1, r);
1729 /* FALLTHRU */
1730
1731 case BUILT_IN_EH_POINTER:
1732 case BUILT_IN_EH_FILTER:
1733 r = gimple_call_arg (copy, 0);
1734 r = remap_eh_region_tree_nr (r, id);
1735 gimple_call_set_arg (copy, 0, r);
1736 break;
1737
1738 default:
1739 break;
1740 }
1741
1742 /* Reset alias info if we didn't apply measures to
1743 keep it valid over inlining by setting DECL_PT_UID. */
1744 if (!id->src_cfun->gimple_df
1745 || !id->src_cfun->gimple_df->ipa_pta)
1746 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1747 }
1748 break;
1749
1750 case GIMPLE_RESX:
1751 {
1752 gresx *resx_stmt = as_a <gresx *> (copy);
1753 int r = gimple_resx_region (resx_stmt);
1754 r = remap_eh_region_nr (r, id);
1755 gimple_resx_set_region (resx_stmt, r);
1756 }
1757 break;
1758
1759 case GIMPLE_EH_DISPATCH:
1760 {
1761 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1762 int r = gimple_eh_dispatch_region (eh_dispatch);
1763 r = remap_eh_region_nr (r, id);
1764 gimple_eh_dispatch_set_region (eh_dispatch, r);
1765 }
1766 break;
1767
1768 default:
1769 break;
1770 }
1771 }
1772
1773 /* If STMT has a block defined, map it to the newly constructed block. */
1774 if (gimple_block (copy))
1775 {
1776 tree *n;
1777 n = id->decl_map->get (gimple_block (copy));
1778 gcc_assert (n);
1779 gimple_set_block (copy, *n);
1780 }
1781
1782 if (id->reset_location)
1783 gimple_set_location (copy, input_location);
1784
1785 /* Debug statements ought to be rebuilt and not copied. */
1786 gcc_checking_assert (!is_gimple_debug (copy));
1787
1788 /* Remap all the operands in COPY. */
1789 memset (&wi, 0, sizeof (wi));
1790 wi.info = id;
1791 if (skip_first)
1792 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1793 else
1794 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1795
1796 /* Clear the copied virtual operands. We are not remapping them here
1797 but are going to recreate them from scratch. */
1798 if (gimple_has_mem_ops (copy))
1799 {
1800 gimple_set_vdef (copy, NULL_TREE);
1801 gimple_set_vuse (copy, NULL_TREE);
1802 }
1803
1804 gimple_seq_add_stmt (&stmts, copy);
1805 return stmts;
1806 }
1807
1808
1809 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1810 later */
1811
1812 static basic_block
1813 copy_bb (copy_body_data *id, basic_block bb,
1814 profile_count num, profile_count den)
1815 {
1816 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1817 basic_block copy_basic_block;
1818 tree decl;
1819 basic_block prev;
1820
1821 profile_count::adjust_for_ipa_scaling (&num, &den);
1822
1823 /* Search for previous copied basic block. */
1824 prev = bb->prev_bb;
1825 while (!prev->aux)
1826 prev = prev->prev_bb;
1827
1828 /* create_basic_block() will append every new block to
1829 basic_block_info automatically. */
1830 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1831 copy_basic_block->count = bb->count.apply_scale (num, den);
1832
1833 copy_gsi = gsi_start_bb (copy_basic_block);
1834
1835 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1836 {
1837 gimple_seq stmts;
1838 gimple *stmt = gsi_stmt (gsi);
1839 gimple *orig_stmt = stmt;
1840 gimple_stmt_iterator stmts_gsi;
1841 bool stmt_added = false;
1842
1843 id->regimplify = false;
1844 stmts = remap_gimple_stmt (stmt, id);
1845
1846 if (gimple_seq_empty_p (stmts))
1847 continue;
1848
1849 seq_gsi = copy_gsi;
1850
1851 for (stmts_gsi = gsi_start (stmts);
1852 !gsi_end_p (stmts_gsi); )
1853 {
1854 stmt = gsi_stmt (stmts_gsi);
1855
1856 /* Advance iterator now before stmt is moved to seq_gsi. */
1857 gsi_next (&stmts_gsi);
1858
1859 if (gimple_nop_p (stmt))
1860 continue;
1861
1862 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1863 orig_stmt);
1864
1865 /* With return slot optimization we can end up with
1866 non-gimple (foo *)&this->m, fix that here. */
1867 if (is_gimple_assign (stmt)
1868 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1869 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1870 {
1871 tree new_rhs;
1872 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1873 gimple_assign_rhs1 (stmt),
1874 true, NULL, false,
1875 GSI_CONTINUE_LINKING);
1876 gimple_assign_set_rhs1 (stmt, new_rhs);
1877 id->regimplify = false;
1878 }
1879
1880 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1881
1882 if (id->regimplify)
1883 gimple_regimplify_operands (stmt, &seq_gsi);
1884
1885 stmt_added = true;
1886 }
1887
1888 if (!stmt_added)
1889 continue;
1890
1891 /* If copy_basic_block has been empty at the start of this iteration,
1892 call gsi_start_bb again to get at the newly added statements. */
1893 if (gsi_end_p (copy_gsi))
1894 copy_gsi = gsi_start_bb (copy_basic_block);
1895 else
1896 gsi_next (&copy_gsi);
1897
1898 /* Process the new statement. The call to gimple_regimplify_operands
1899 possibly turned the statement into multiple statements, we
1900 need to process all of them. */
1901 do
1902 {
1903 tree fn;
1904 gcall *call_stmt;
1905
1906 stmt = gsi_stmt (copy_gsi);
1907 call_stmt = dyn_cast <gcall *> (stmt);
1908 if (call_stmt
1909 && gimple_call_va_arg_pack_p (call_stmt)
1910 && id->call_stmt
1911 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1912 {
1913 /* __builtin_va_arg_pack () should be replaced by
1914 all arguments corresponding to ... in the caller. */
1915 tree p;
1916 gcall *new_call;
1917 vec<tree> argarray;
1918 size_t nargs = gimple_call_num_args (id->call_stmt);
1919 size_t n;
1920
1921 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1922 nargs--;
1923
1924 /* Create the new array of arguments. */
1925 n = nargs + gimple_call_num_args (call_stmt);
1926 argarray.create (n);
1927 argarray.safe_grow_cleared (n);
1928
1929 /* Copy all the arguments before '...' */
1930 memcpy (argarray.address (),
1931 gimple_call_arg_ptr (call_stmt, 0),
1932 gimple_call_num_args (call_stmt) * sizeof (tree));
1933
1934 /* Append the arguments passed in '...' */
1935 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1936 gimple_call_arg_ptr (id->call_stmt, 0)
1937 + (gimple_call_num_args (id->call_stmt) - nargs),
1938 nargs * sizeof (tree));
1939
1940 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1941 argarray);
1942
1943 argarray.release ();
1944
1945 /* Copy all GIMPLE_CALL flags, location and block, except
1946 GF_CALL_VA_ARG_PACK. */
1947 gimple_call_copy_flags (new_call, call_stmt);
1948 gimple_call_set_va_arg_pack (new_call, false);
1949 gimple_set_location (new_call, gimple_location (stmt));
1950 gimple_set_block (new_call, gimple_block (stmt));
1951 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1952
1953 gsi_replace (&copy_gsi, new_call, false);
1954 stmt = new_call;
1955 }
1956 else if (call_stmt
1957 && id->call_stmt
1958 && (decl = gimple_call_fndecl (stmt))
1959 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
1960 {
1961 /* __builtin_va_arg_pack_len () should be replaced by
1962 the number of anonymous arguments. */
1963 size_t nargs = gimple_call_num_args (id->call_stmt);
1964 tree count, p;
1965 gimple *new_stmt;
1966
1967 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1968 nargs--;
1969
1970 if (!gimple_call_lhs (stmt))
1971 {
1972 /* Drop unused calls. */
1973 gsi_remove (&copy_gsi, false);
1974 continue;
1975 }
1976 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
1977 {
1978 count = build_int_cst (integer_type_node, nargs);
1979 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1980 gsi_replace (&copy_gsi, new_stmt, false);
1981 stmt = new_stmt;
1982 }
1983 else if (nargs != 0)
1984 {
1985 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
1986 count = build_int_cst (integer_type_node, nargs);
1987 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1988 PLUS_EXPR, newlhs, count);
1989 gimple_call_set_lhs (stmt, newlhs);
1990 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
1991 }
1992 }
1993 else if (call_stmt
1994 && id->call_stmt
1995 && gimple_call_internal_p (stmt)
1996 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1997 {
1998 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1999 gsi_remove (&copy_gsi, false);
2000 continue;
2001 }
2002
2003 /* Statements produced by inlining can be unfolded, especially
2004 when we constant propagated some operands. We can't fold
2005 them right now for two reasons:
2006 1) folding require SSA_NAME_DEF_STMTs to be correct
2007 2) we can't change function calls to builtins.
2008 So we just mark statement for later folding. We mark
2009 all new statements, instead just statements that has changed
2010 by some nontrivial substitution so even statements made
2011 foldable indirectly are updated. If this turns out to be
2012 expensive, copy_body can be told to watch for nontrivial
2013 changes. */
2014 if (id->statements_to_fold)
2015 id->statements_to_fold->add (stmt);
2016
2017 /* We're duplicating a CALL_EXPR. Find any corresponding
2018 callgraph edges and update or duplicate them. */
2019 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2020 {
2021 struct cgraph_edge *edge;
2022
2023 switch (id->transform_call_graph_edges)
2024 {
2025 case CB_CGE_DUPLICATE:
2026 edge = id->src_node->get_edge (orig_stmt);
2027 if (edge)
2028 {
2029 struct cgraph_edge *old_edge = edge;
2030 profile_count old_cnt = edge->count;
2031 edge = edge->clone (id->dst_node, call_stmt,
2032 gimple_uid (stmt),
2033 num, den,
2034 true);
2035
2036 /* Speculative calls consist of two edges - direct and
2037 indirect. Duplicate the whole thing and distribute
2038 frequencies accordingly. */
2039 if (edge->speculative)
2040 {
2041 struct cgraph_edge *direct, *indirect;
2042 struct ipa_ref *ref;
2043
2044 gcc_assert (!edge->indirect_unknown_callee);
2045 old_edge->speculative_call_info (direct, indirect, ref);
2046
2047 profile_count indir_cnt = indirect->count;
2048 indirect = indirect->clone (id->dst_node, call_stmt,
2049 gimple_uid (stmt),
2050 num, den,
2051 true);
2052
2053 profile_probability prob
2054 = indir_cnt.probability_in (old_cnt + indir_cnt);
2055 indirect->count
2056 = copy_basic_block->count.apply_probability (prob);
2057 edge->count = copy_basic_block->count - indirect->count;
2058 id->dst_node->clone_reference (ref, stmt);
2059 }
2060 else
2061 edge->count = copy_basic_block->count;
2062 }
2063 break;
2064
2065 case CB_CGE_MOVE_CLONES:
2066 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2067 call_stmt);
2068 edge = id->dst_node->get_edge (stmt);
2069 break;
2070
2071 case CB_CGE_MOVE:
2072 edge = id->dst_node->get_edge (orig_stmt);
2073 if (edge)
2074 edge->set_call_stmt (call_stmt);
2075 break;
2076
2077 default:
2078 gcc_unreachable ();
2079 }
2080
2081 /* Constant propagation on argument done during inlining
2082 may create new direct call. Produce an edge for it. */
2083 if ((!edge
2084 || (edge->indirect_inlining_edge
2085 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2086 && id->dst_node->definition
2087 && (fn = gimple_call_fndecl (stmt)) != NULL)
2088 {
2089 struct cgraph_node *dest = cgraph_node::get_create (fn);
2090
2091 /* We have missing edge in the callgraph. This can happen
2092 when previous inlining turned an indirect call into a
2093 direct call by constant propagating arguments or we are
2094 producing dead clone (for further cloning). In all
2095 other cases we hit a bug (incorrect node sharing is the
2096 most common reason for missing edges). */
2097 gcc_assert (!dest->definition
2098 || dest->address_taken
2099 || !id->src_node->definition
2100 || !id->dst_node->definition);
2101 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2102 id->dst_node->create_edge_including_clones
2103 (dest, orig_stmt, call_stmt, bb->count,
2104 CIF_ORIGINALLY_INDIRECT_CALL);
2105 else
2106 id->dst_node->create_edge (dest, call_stmt,
2107 bb->count)->inline_failed
2108 = CIF_ORIGINALLY_INDIRECT_CALL;
2109 if (dump_file)
2110 {
2111 fprintf (dump_file, "Created new direct edge to %s\n",
2112 dest->name ());
2113 }
2114 }
2115
2116 notice_special_calls (as_a <gcall *> (stmt));
2117 }
2118
2119 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2120 id->eh_map, id->eh_lp_nr);
2121
2122 gsi_next (&copy_gsi);
2123 }
2124 while (!gsi_end_p (copy_gsi));
2125
2126 copy_gsi = gsi_last_bb (copy_basic_block);
2127 }
2128
2129 return copy_basic_block;
2130 }
2131
2132 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2133 form is quite easy, since dominator relationship for old basic blocks does
2134 not change.
2135
2136 There is however exception where inlining might change dominator relation
2137 across EH edges from basic block within inlined functions destinating
2138 to landing pads in function we inline into.
2139
2140 The function fills in PHI_RESULTs of such PHI nodes if they refer
2141 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2142 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2143 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2144 set, and this means that there will be no overlapping live ranges
2145 for the underlying symbol.
2146
2147 This might change in future if we allow redirecting of EH edges and
2148 we might want to change way build CFG pre-inlining to include
2149 all the possible edges then. */
2150 static void
2151 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2152 bool can_throw, bool nonlocal_goto)
2153 {
2154 edge e;
2155 edge_iterator ei;
2156
2157 FOR_EACH_EDGE (e, ei, bb->succs)
2158 if (!e->dest->aux
2159 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2160 {
2161 gphi *phi;
2162 gphi_iterator si;
2163
2164 if (!nonlocal_goto)
2165 gcc_assert (e->flags & EDGE_EH);
2166
2167 if (!can_throw)
2168 gcc_assert (!(e->flags & EDGE_EH));
2169
2170 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2171 {
2172 edge re;
2173
2174 phi = si.phi ();
2175
2176 /* For abnormal goto/call edges the receiver can be the
2177 ENTRY_BLOCK. Do not assert this cannot happen. */
2178
2179 gcc_assert ((e->flags & EDGE_EH)
2180 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2181
2182 re = find_edge (ret_bb, e->dest);
2183 gcc_checking_assert (re);
2184 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2185 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2186
2187 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2188 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2189 }
2190 }
2191 }
2192
2193
2194 /* Copy edges from BB into its copy constructed earlier, scale profile
2195 accordingly. Edges will be taken care of later. Assume aux
2196 pointers to point to the copies of each BB. Return true if any
2197 debug stmts are left after a statement that must end the basic block. */
2198
2199 static bool
2200 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2201 basic_block ret_bb, basic_block abnormal_goto_dest,
2202 copy_body_data *id)
2203 {
2204 basic_block new_bb = (basic_block) bb->aux;
2205 edge_iterator ei;
2206 edge old_edge;
2207 gimple_stmt_iterator si;
2208 bool need_debug_cleanup = false;
2209
2210 /* Use the indices from the original blocks to create edges for the
2211 new ones. */
2212 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2213 if (!(old_edge->flags & EDGE_EH))
2214 {
2215 edge new_edge;
2216 int flags = old_edge->flags;
2217 location_t locus = old_edge->goto_locus;
2218
2219 /* Return edges do get a FALLTHRU flag when they get inlined. */
2220 if (old_edge->dest->index == EXIT_BLOCK
2221 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2222 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2223 flags |= EDGE_FALLTHRU;
2224
2225 new_edge
2226 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2227 new_edge->probability = old_edge->probability;
2228 if (!id->reset_location)
2229 new_edge->goto_locus = remap_location (locus, id);
2230 }
2231
2232 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2233 return false;
2234
2235 /* When doing function splitting, we must decreate count of the return block
2236 which was previously reachable by block we did not copy. */
2237 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2238 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2239 if (old_edge->src->index != ENTRY_BLOCK
2240 && !old_edge->src->aux)
2241 new_bb->count -= old_edge->count ().apply_scale (num, den);
2242
2243 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2244 {
2245 gimple *copy_stmt;
2246 bool can_throw, nonlocal_goto;
2247
2248 copy_stmt = gsi_stmt (si);
2249 if (!is_gimple_debug (copy_stmt))
2250 update_stmt (copy_stmt);
2251
2252 /* Do this before the possible split_block. */
2253 gsi_next (&si);
2254
2255 /* If this tree could throw an exception, there are two
2256 cases where we need to add abnormal edge(s): the
2257 tree wasn't in a region and there is a "current
2258 region" in the caller; or the original tree had
2259 EH edges. In both cases split the block after the tree,
2260 and add abnormal edge(s) as needed; we need both
2261 those from the callee and the caller.
2262 We check whether the copy can throw, because the const
2263 propagation can change an INDIRECT_REF which throws
2264 into a COMPONENT_REF which doesn't. If the copy
2265 can throw, the original could also throw. */
2266 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2267 nonlocal_goto
2268 = (stmt_can_make_abnormal_goto (copy_stmt)
2269 && !computed_goto_p (copy_stmt));
2270
2271 if (can_throw || nonlocal_goto)
2272 {
2273 if (!gsi_end_p (si))
2274 {
2275 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2276 gsi_next (&si);
2277 if (gsi_end_p (si))
2278 need_debug_cleanup = true;
2279 }
2280 if (!gsi_end_p (si))
2281 /* Note that bb's predecessor edges aren't necessarily
2282 right at this point; split_block doesn't care. */
2283 {
2284 edge e = split_block (new_bb, copy_stmt);
2285
2286 new_bb = e->dest;
2287 new_bb->aux = e->src->aux;
2288 si = gsi_start_bb (new_bb);
2289 }
2290 }
2291
2292 bool update_probs = false;
2293
2294 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2295 {
2296 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2297 update_probs = true;
2298 }
2299 else if (can_throw)
2300 {
2301 make_eh_edges (copy_stmt);
2302 update_probs = true;
2303 }
2304
2305 /* EH edges may not match old edges. Copy as much as possible. */
2306 if (update_probs)
2307 {
2308 edge e;
2309 edge_iterator ei;
2310 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2311
2312 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2313 if ((old_edge->flags & EDGE_EH)
2314 && (e = find_edge (copy_stmt_bb,
2315 (basic_block) old_edge->dest->aux))
2316 && (e->flags & EDGE_EH))
2317 e->probability = old_edge->probability;
2318
2319 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2320 if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
2321 e->probability = profile_probability::never ();
2322 }
2323
2324
2325 /* If the call we inline cannot make abnormal goto do not add
2326 additional abnormal edges but only retain those already present
2327 in the original function body. */
2328 if (abnormal_goto_dest == NULL)
2329 nonlocal_goto = false;
2330 if (nonlocal_goto)
2331 {
2332 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2333
2334 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2335 nonlocal_goto = false;
2336 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2337 in OpenMP regions which aren't allowed to be left abnormally.
2338 So, no need to add abnormal edge in that case. */
2339 else if (is_gimple_call (copy_stmt)
2340 && gimple_call_internal_p (copy_stmt)
2341 && (gimple_call_internal_fn (copy_stmt)
2342 == IFN_ABNORMAL_DISPATCHER)
2343 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2344 nonlocal_goto = false;
2345 else
2346 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2347 EDGE_ABNORMAL);
2348 }
2349
2350 if ((can_throw || nonlocal_goto)
2351 && gimple_in_ssa_p (cfun))
2352 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2353 can_throw, nonlocal_goto);
2354 }
2355 return need_debug_cleanup;
2356 }
2357
2358 /* Copy the PHIs. All blocks and edges are copied, some blocks
2359 was possibly split and new outgoing EH edges inserted.
2360 BB points to the block of original function and AUX pointers links
2361 the original and newly copied blocks. */
2362
2363 static void
2364 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2365 {
2366 basic_block const new_bb = (basic_block) bb->aux;
2367 edge_iterator ei;
2368 gphi *phi;
2369 gphi_iterator si;
2370 edge new_edge;
2371 bool inserted = false;
2372
2373 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2374 {
2375 tree res, new_res;
2376 gphi *new_phi;
2377
2378 phi = si.phi ();
2379 res = PHI_RESULT (phi);
2380 new_res = res;
2381 if (!virtual_operand_p (res))
2382 {
2383 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2384 if (EDGE_COUNT (new_bb->preds) == 0)
2385 {
2386 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2387 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2388 }
2389 else
2390 {
2391 new_phi = create_phi_node (new_res, new_bb);
2392 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2393 {
2394 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2395 bb);
2396 tree arg;
2397 tree new_arg;
2398 edge_iterator ei2;
2399 location_t locus;
2400
2401 /* When doing partial cloning, we allow PHIs on the entry
2402 block as long as all the arguments are the same.
2403 Find any input edge to see argument to copy. */
2404 if (!old_edge)
2405 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2406 if (!old_edge->src->aux)
2407 break;
2408
2409 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2410 new_arg = arg;
2411 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2412 gcc_assert (new_arg);
2413 /* With return slot optimization we can end up with
2414 non-gimple (foo *)&this->m, fix that here. */
2415 if (TREE_CODE (new_arg) != SSA_NAME
2416 && TREE_CODE (new_arg) != FUNCTION_DECL
2417 && !is_gimple_val (new_arg))
2418 {
2419 gimple_seq stmts = NULL;
2420 new_arg = force_gimple_operand (new_arg, &stmts, true,
2421 NULL);
2422 gsi_insert_seq_on_edge (new_edge, stmts);
2423 inserted = true;
2424 }
2425 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2426 if (id->reset_location)
2427 locus = input_location;
2428 else
2429 locus = remap_location (locus, id);
2430 add_phi_arg (new_phi, new_arg, new_edge, locus);
2431 }
2432 }
2433 }
2434 }
2435
2436 /* Commit the delayed edge insertions. */
2437 if (inserted)
2438 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2439 gsi_commit_one_edge_insert (new_edge, NULL);
2440 }
2441
2442
2443 /* Wrapper for remap_decl so it can be used as a callback. */
2444
2445 static tree
2446 remap_decl_1 (tree decl, void *data)
2447 {
2448 return remap_decl (decl, (copy_body_data *) data);
2449 }
2450
2451 /* Build struct function and associated datastructures for the new clone
2452 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2453 the cfun to the function of new_fndecl (and current_function_decl too). */
2454
2455 static void
2456 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2457 {
2458 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2459
2460 if (!DECL_ARGUMENTS (new_fndecl))
2461 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2462 if (!DECL_RESULT (new_fndecl))
2463 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2464
2465 /* Register specific tree functions. */
2466 gimple_register_cfg_hooks ();
2467
2468 /* Get clean struct function. */
2469 push_struct_function (new_fndecl);
2470
2471 /* We will rebuild these, so just sanity check that they are empty. */
2472 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2473 gcc_assert (cfun->local_decls == NULL);
2474 gcc_assert (cfun->cfg == NULL);
2475 gcc_assert (cfun->decl == new_fndecl);
2476
2477 /* Copy items we preserve during cloning. */
2478 cfun->static_chain_decl = src_cfun->static_chain_decl;
2479 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2480 cfun->function_end_locus = src_cfun->function_end_locus;
2481 cfun->curr_properties = src_cfun->curr_properties;
2482 cfun->last_verified = src_cfun->last_verified;
2483 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2484 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2485 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2486 cfun->stdarg = src_cfun->stdarg;
2487 cfun->after_inlining = src_cfun->after_inlining;
2488 cfun->can_throw_non_call_exceptions
2489 = src_cfun->can_throw_non_call_exceptions;
2490 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2491 cfun->returns_struct = src_cfun->returns_struct;
2492 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2493
2494 init_empty_tree_cfg ();
2495
2496 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2497
2498 profile_count num = count;
2499 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2500 profile_count::adjust_for_ipa_scaling (&num, &den);
2501
2502 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2503 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2504 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2505 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2506 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2507 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2508 if (src_cfun->eh)
2509 init_eh_for_function ();
2510
2511 if (src_cfun->gimple_df)
2512 {
2513 init_tree_ssa (cfun);
2514 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2515 if (cfun->gimple_df->in_ssa_p)
2516 init_ssa_operands (cfun);
2517 }
2518 }
2519
2520 /* Helper function for copy_cfg_body. Move debug stmts from the end
2521 of NEW_BB to the beginning of successor basic blocks when needed. If the
2522 successor has multiple predecessors, reset them, otherwise keep
2523 their value. */
2524
2525 static void
2526 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2527 {
2528 edge e;
2529 edge_iterator ei;
2530 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2531
2532 if (gsi_end_p (si)
2533 || gsi_one_before_end_p (si)
2534 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2535 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2536 return;
2537
2538 FOR_EACH_EDGE (e, ei, new_bb->succs)
2539 {
2540 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2541 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2542 while (is_gimple_debug (gsi_stmt (ssi)))
2543 {
2544 gimple *stmt = gsi_stmt (ssi);
2545 gdebug *new_stmt;
2546 tree var;
2547 tree value;
2548
2549 /* For the last edge move the debug stmts instead of copying
2550 them. */
2551 if (ei_one_before_end_p (ei))
2552 {
2553 si = ssi;
2554 gsi_prev (&ssi);
2555 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2556 {
2557 gimple_debug_bind_reset_value (stmt);
2558 gimple_set_location (stmt, UNKNOWN_LOCATION);
2559 }
2560 gsi_remove (&si, false);
2561 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2562 continue;
2563 }
2564
2565 if (gimple_debug_bind_p (stmt))
2566 {
2567 var = gimple_debug_bind_get_var (stmt);
2568 if (single_pred_p (e->dest))
2569 {
2570 value = gimple_debug_bind_get_value (stmt);
2571 value = unshare_expr (value);
2572 new_stmt = gimple_build_debug_bind (var, value, stmt);
2573 }
2574 else
2575 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2576 }
2577 else if (gimple_debug_source_bind_p (stmt))
2578 {
2579 var = gimple_debug_source_bind_get_var (stmt);
2580 value = gimple_debug_source_bind_get_value (stmt);
2581 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2582 }
2583 else if (gimple_debug_nonbind_marker_p (stmt))
2584 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2585 else
2586 gcc_unreachable ();
2587 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2588 id->debug_stmts.safe_push (new_stmt);
2589 gsi_prev (&ssi);
2590 }
2591 }
2592 }
2593
2594 /* Make a copy of the sub-loops of SRC_PARENT and place them
2595 as siblings of DEST_PARENT. */
2596
2597 static void
2598 copy_loops (copy_body_data *id,
2599 struct loop *dest_parent, struct loop *src_parent)
2600 {
2601 struct loop *src_loop = src_parent->inner;
2602 while (src_loop)
2603 {
2604 if (!id->blocks_to_copy
2605 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2606 {
2607 struct loop *dest_loop = alloc_loop ();
2608
2609 /* Assign the new loop its header and latch and associate
2610 those with the new loop. */
2611 dest_loop->header = (basic_block)src_loop->header->aux;
2612 dest_loop->header->loop_father = dest_loop;
2613 if (src_loop->latch != NULL)
2614 {
2615 dest_loop->latch = (basic_block)src_loop->latch->aux;
2616 dest_loop->latch->loop_father = dest_loop;
2617 }
2618
2619 /* Copy loop meta-data. */
2620 copy_loop_info (src_loop, dest_loop);
2621
2622 /* Finally place it into the loop array and the loop tree. */
2623 place_new_loop (cfun, dest_loop);
2624 flow_loop_tree_node_add (dest_parent, dest_loop);
2625
2626 dest_loop->safelen = src_loop->safelen;
2627 if (src_loop->unroll)
2628 {
2629 dest_loop->unroll = src_loop->unroll;
2630 cfun->has_unroll = true;
2631 }
2632 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2633 if (src_loop->force_vectorize)
2634 {
2635 dest_loop->force_vectorize = true;
2636 cfun->has_force_vectorize_loops = true;
2637 }
2638 if (src_loop->simduid)
2639 {
2640 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2641 cfun->has_simduid_loops = true;
2642 }
2643
2644 /* Recurse. */
2645 copy_loops (id, dest_loop, src_loop);
2646 }
2647 src_loop = src_loop->next;
2648 }
2649 }
2650
2651 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2652
2653 void
2654 redirect_all_calls (copy_body_data * id, basic_block bb)
2655 {
2656 gimple_stmt_iterator si;
2657 gimple *last = last_stmt (bb);
2658 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2659 {
2660 gimple *stmt = gsi_stmt (si);
2661 if (is_gimple_call (stmt))
2662 {
2663 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2664 if (edge)
2665 {
2666 edge->redirect_call_stmt_to_callee ();
2667 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2668 gimple_purge_dead_eh_edges (bb);
2669 }
2670 }
2671 }
2672 }
2673
2674 /* Make a copy of the body of FN so that it can be inserted inline in
2675 another function. Walks FN via CFG, returns new fndecl. */
2676
2677 static tree
2678 copy_cfg_body (copy_body_data * id,
2679 basic_block entry_block_map, basic_block exit_block_map,
2680 basic_block new_entry)
2681 {
2682 tree callee_fndecl = id->src_fn;
2683 /* Original cfun for the callee, doesn't change. */
2684 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2685 struct function *cfun_to_copy;
2686 basic_block bb;
2687 tree new_fndecl = NULL;
2688 bool need_debug_cleanup = false;
2689 int last;
2690 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2691 profile_count num = entry_block_map->count;
2692
2693 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2694
2695 /* Register specific tree functions. */
2696 gimple_register_cfg_hooks ();
2697
2698 /* If we are inlining just region of the function, make sure to connect
2699 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2700 part of loop, we must compute frequency and probability of
2701 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2702 probabilities of edges incoming from nonduplicated region. */
2703 if (new_entry)
2704 {
2705 edge e;
2706 edge_iterator ei;
2707 den = profile_count::zero ();
2708
2709 FOR_EACH_EDGE (e, ei, new_entry->preds)
2710 if (!e->src->aux)
2711 den += e->count ();
2712 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2713 }
2714
2715 profile_count::adjust_for_ipa_scaling (&num, &den);
2716
2717 /* Must have a CFG here at this point. */
2718 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2719 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2720
2721
2722 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2723 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2724 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2725 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2726
2727 /* Duplicate any exception-handling regions. */
2728 if (cfun->eh)
2729 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2730 remap_decl_1, id);
2731
2732 /* Use aux pointers to map the original blocks to copy. */
2733 FOR_EACH_BB_FN (bb, cfun_to_copy)
2734 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2735 {
2736 basic_block new_bb = copy_bb (id, bb, num, den);
2737 bb->aux = new_bb;
2738 new_bb->aux = bb;
2739 new_bb->loop_father = entry_block_map->loop_father;
2740 }
2741
2742 last = last_basic_block_for_fn (cfun);
2743
2744 /* Now that we've duplicated the blocks, duplicate their edges. */
2745 basic_block abnormal_goto_dest = NULL;
2746 if (id->call_stmt
2747 && stmt_can_make_abnormal_goto (id->call_stmt))
2748 {
2749 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2750
2751 bb = gimple_bb (id->call_stmt);
2752 gsi_next (&gsi);
2753 if (gsi_end_p (gsi))
2754 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2755 }
2756 FOR_ALL_BB_FN (bb, cfun_to_copy)
2757 if (!id->blocks_to_copy
2758 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2759 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2760 abnormal_goto_dest, id);
2761
2762 if (new_entry)
2763 {
2764 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2765 EDGE_FALLTHRU);
2766 e->probability = profile_probability::always ();
2767 }
2768
2769 /* Duplicate the loop tree, if available and wanted. */
2770 if (loops_for_fn (src_cfun) != NULL
2771 && current_loops != NULL)
2772 {
2773 copy_loops (id, entry_block_map->loop_father,
2774 get_loop (src_cfun, 0));
2775 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2776 loops_state_set (LOOPS_NEED_FIXUP);
2777 }
2778
2779 /* If the loop tree in the source function needed fixup, mark the
2780 destination loop tree for fixup, too. */
2781 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2782 loops_state_set (LOOPS_NEED_FIXUP);
2783
2784 if (gimple_in_ssa_p (cfun))
2785 FOR_ALL_BB_FN (bb, cfun_to_copy)
2786 if (!id->blocks_to_copy
2787 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2788 copy_phis_for_bb (bb, id);
2789
2790 FOR_ALL_BB_FN (bb, cfun_to_copy)
2791 if (bb->aux)
2792 {
2793 if (need_debug_cleanup
2794 && bb->index != ENTRY_BLOCK
2795 && bb->index != EXIT_BLOCK)
2796 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2797 /* Update call edge destinations. This cannot be done before loop
2798 info is updated, because we may split basic blocks. */
2799 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2800 && bb->index != ENTRY_BLOCK
2801 && bb->index != EXIT_BLOCK)
2802 redirect_all_calls (id, (basic_block)bb->aux);
2803 ((basic_block)bb->aux)->aux = NULL;
2804 bb->aux = NULL;
2805 }
2806
2807 /* Zero out AUX fields of newly created block during EH edge
2808 insertion. */
2809 for (; last < last_basic_block_for_fn (cfun); last++)
2810 {
2811 if (need_debug_cleanup)
2812 maybe_move_debug_stmts_to_successors (id,
2813 BASIC_BLOCK_FOR_FN (cfun, last));
2814 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2815 /* Update call edge destinations. This cannot be done before loop
2816 info is updated, because we may split basic blocks. */
2817 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2818 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2819 }
2820 entry_block_map->aux = NULL;
2821 exit_block_map->aux = NULL;
2822
2823 if (id->eh_map)
2824 {
2825 delete id->eh_map;
2826 id->eh_map = NULL;
2827 }
2828 if (id->dependence_map)
2829 {
2830 delete id->dependence_map;
2831 id->dependence_map = NULL;
2832 }
2833
2834 return new_fndecl;
2835 }
2836
2837 /* Copy the debug STMT using ID. We deal with these statements in a
2838 special way: if any variable in their VALUE expression wasn't
2839 remapped yet, we won't remap it, because that would get decl uids
2840 out of sync, causing codegen differences between -g and -g0. If
2841 this arises, we drop the VALUE expression altogether. */
2842
2843 static void
2844 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2845 {
2846 tree t, *n;
2847 struct walk_stmt_info wi;
2848
2849 if (gimple_block (stmt))
2850 {
2851 n = id->decl_map->get (gimple_block (stmt));
2852 gimple_set_block (stmt, n ? *n : id->block);
2853 }
2854
2855 if (gimple_debug_nonbind_marker_p (stmt))
2856 return;
2857
2858 /* Remap all the operands in COPY. */
2859 memset (&wi, 0, sizeof (wi));
2860 wi.info = id;
2861
2862 processing_debug_stmt = 1;
2863
2864 if (gimple_debug_source_bind_p (stmt))
2865 t = gimple_debug_source_bind_get_var (stmt);
2866 else if (gimple_debug_bind_p (stmt))
2867 t = gimple_debug_bind_get_var (stmt);
2868 else
2869 gcc_unreachable ();
2870
2871 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2872 && (n = id->debug_map->get (t)))
2873 {
2874 gcc_assert (VAR_P (*n));
2875 t = *n;
2876 }
2877 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2878 /* T is a non-localized variable. */;
2879 else
2880 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2881
2882 if (gimple_debug_bind_p (stmt))
2883 {
2884 gimple_debug_bind_set_var (stmt, t);
2885
2886 if (gimple_debug_bind_has_value_p (stmt))
2887 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2888 remap_gimple_op_r, &wi, NULL);
2889
2890 /* Punt if any decl couldn't be remapped. */
2891 if (processing_debug_stmt < 0)
2892 gimple_debug_bind_reset_value (stmt);
2893 }
2894 else if (gimple_debug_source_bind_p (stmt))
2895 {
2896 gimple_debug_source_bind_set_var (stmt, t);
2897 /* When inlining and source bind refers to one of the optimized
2898 away parameters, change the source bind into normal debug bind
2899 referring to the corresponding DEBUG_EXPR_DECL that should have
2900 been bound before the call stmt. */
2901 t = gimple_debug_source_bind_get_value (stmt);
2902 if (t != NULL_TREE
2903 && TREE_CODE (t) == PARM_DECL
2904 && id->call_stmt)
2905 {
2906 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2907 unsigned int i;
2908 if (debug_args != NULL)
2909 {
2910 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2911 if ((**debug_args)[i] == DECL_ORIGIN (t)
2912 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2913 {
2914 t = (**debug_args)[i + 1];
2915 stmt->subcode = GIMPLE_DEBUG_BIND;
2916 gimple_debug_bind_set_value (stmt, t);
2917 break;
2918 }
2919 }
2920 }
2921 if (gimple_debug_source_bind_p (stmt))
2922 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2923 remap_gimple_op_r, &wi, NULL);
2924 }
2925
2926 processing_debug_stmt = 0;
2927
2928 update_stmt (stmt);
2929 }
2930
2931 /* Process deferred debug stmts. In order to give values better odds
2932 of being successfully remapped, we delay the processing of debug
2933 stmts until all other stmts that might require remapping are
2934 processed. */
2935
2936 static void
2937 copy_debug_stmts (copy_body_data *id)
2938 {
2939 size_t i;
2940 gdebug *stmt;
2941
2942 if (!id->debug_stmts.exists ())
2943 return;
2944
2945 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2946 copy_debug_stmt (stmt, id);
2947
2948 id->debug_stmts.release ();
2949 }
2950
2951 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2952 another function. */
2953
2954 static tree
2955 copy_tree_body (copy_body_data *id)
2956 {
2957 tree fndecl = id->src_fn;
2958 tree body = DECL_SAVED_TREE (fndecl);
2959
2960 walk_tree (&body, copy_tree_body_r, id, NULL);
2961
2962 return body;
2963 }
2964
2965 /* Make a copy of the body of FN so that it can be inserted inline in
2966 another function. */
2967
2968 static tree
2969 copy_body (copy_body_data *id,
2970 basic_block entry_block_map, basic_block exit_block_map,
2971 basic_block new_entry)
2972 {
2973 tree fndecl = id->src_fn;
2974 tree body;
2975
2976 /* If this body has a CFG, walk CFG and copy. */
2977 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2978 body = copy_cfg_body (id, entry_block_map, exit_block_map,
2979 new_entry);
2980 copy_debug_stmts (id);
2981
2982 return body;
2983 }
2984
2985 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2986 defined in function FN, or of a data member thereof. */
2987
2988 static bool
2989 self_inlining_addr_expr (tree value, tree fn)
2990 {
2991 tree var;
2992
2993 if (TREE_CODE (value) != ADDR_EXPR)
2994 return false;
2995
2996 var = get_base_address (TREE_OPERAND (value, 0));
2997
2998 return var && auto_var_in_fn_p (var, fn);
2999 }
3000
3001 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3002 lexical block and line number information from base_stmt, if given,
3003 or from the last stmt of the block otherwise. */
3004
3005 static gimple *
3006 insert_init_debug_bind (copy_body_data *id,
3007 basic_block bb, tree var, tree value,
3008 gimple *base_stmt)
3009 {
3010 gimple *note;
3011 gimple_stmt_iterator gsi;
3012 tree tracked_var;
3013
3014 if (!gimple_in_ssa_p (id->src_cfun))
3015 return NULL;
3016
3017 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3018 return NULL;
3019
3020 tracked_var = target_for_debug_bind (var);
3021 if (!tracked_var)
3022 return NULL;
3023
3024 if (bb)
3025 {
3026 gsi = gsi_last_bb (bb);
3027 if (!base_stmt && !gsi_end_p (gsi))
3028 base_stmt = gsi_stmt (gsi);
3029 }
3030
3031 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3032
3033 if (bb)
3034 {
3035 if (!gsi_end_p (gsi))
3036 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3037 else
3038 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3039 }
3040
3041 return note;
3042 }
3043
3044 static void
3045 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3046 {
3047 /* If VAR represents a zero-sized variable, it's possible that the
3048 assignment statement may result in no gimple statements. */
3049 if (init_stmt)
3050 {
3051 gimple_stmt_iterator si = gsi_last_bb (bb);
3052
3053 /* We can end up with init statements that store to a non-register
3054 from a rhs with a conversion. Handle that here by forcing the
3055 rhs into a temporary. gimple_regimplify_operands is not
3056 prepared to do this for us. */
3057 if (!is_gimple_debug (init_stmt)
3058 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3059 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3060 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3061 {
3062 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3063 gimple_expr_type (init_stmt),
3064 gimple_assign_rhs1 (init_stmt));
3065 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3066 GSI_NEW_STMT);
3067 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3068 gimple_assign_set_rhs1 (init_stmt, rhs);
3069 }
3070 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3071 gimple_regimplify_operands (init_stmt, &si);
3072
3073 if (!is_gimple_debug (init_stmt))
3074 {
3075 tree def = gimple_assign_lhs (init_stmt);
3076 insert_init_debug_bind (id, bb, def, def, init_stmt);
3077 }
3078 }
3079 }
3080
3081 /* Initialize parameter P with VALUE. If needed, produce init statement
3082 at the end of BB. When BB is NULL, we return init statement to be
3083 output later. */
3084 static gimple *
3085 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3086 basic_block bb, tree *vars)
3087 {
3088 gimple *init_stmt = NULL;
3089 tree var;
3090 tree rhs = value;
3091 tree def = (gimple_in_ssa_p (cfun)
3092 ? ssa_default_def (id->src_cfun, p) : NULL);
3093
3094 if (value
3095 && value != error_mark_node
3096 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3097 {
3098 /* If we can match up types by promotion/demotion do so. */
3099 if (fold_convertible_p (TREE_TYPE (p), value))
3100 rhs = fold_convert (TREE_TYPE (p), value);
3101 else
3102 {
3103 /* ??? For valid programs we should not end up here.
3104 Still if we end up with truly mismatched types here, fall back
3105 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3106 GIMPLE to the following passes. */
3107 if (!is_gimple_reg_type (TREE_TYPE (value))
3108 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3109 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3110 else
3111 rhs = build_zero_cst (TREE_TYPE (p));
3112 }
3113 }
3114
3115 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3116 here since the type of this decl must be visible to the calling
3117 function. */
3118 var = copy_decl_to_var (p, id);
3119
3120 /* Declare this new variable. */
3121 DECL_CHAIN (var) = *vars;
3122 *vars = var;
3123
3124 /* Make gimplifier happy about this variable. */
3125 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3126
3127 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3128 we would not need to create a new variable here at all, if it
3129 weren't for debug info. Still, we can just use the argument
3130 value. */
3131 if (TREE_READONLY (p)
3132 && !TREE_ADDRESSABLE (p)
3133 && value && !TREE_SIDE_EFFECTS (value)
3134 && !def)
3135 {
3136 /* We may produce non-gimple trees by adding NOPs or introduce
3137 invalid sharing when operand is not really constant.
3138 It is not big deal to prohibit constant propagation here as
3139 we will constant propagate in DOM1 pass anyway. */
3140 if (is_gimple_min_invariant (value)
3141 && useless_type_conversion_p (TREE_TYPE (p),
3142 TREE_TYPE (value))
3143 /* We have to be very careful about ADDR_EXPR. Make sure
3144 the base variable isn't a local variable of the inlined
3145 function, e.g., when doing recursive inlining, direct or
3146 mutually-recursive or whatever, which is why we don't
3147 just test whether fn == current_function_decl. */
3148 && ! self_inlining_addr_expr (value, fn))
3149 {
3150 insert_decl_map (id, p, value);
3151 insert_debug_decl_map (id, p, var);
3152 return insert_init_debug_bind (id, bb, var, value, NULL);
3153 }
3154 }
3155
3156 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3157 that way, when the PARM_DECL is encountered, it will be
3158 automatically replaced by the VAR_DECL. */
3159 insert_decl_map (id, p, var);
3160
3161 /* Even if P was TREE_READONLY, the new VAR should not be.
3162 In the original code, we would have constructed a
3163 temporary, and then the function body would have never
3164 changed the value of P. However, now, we will be
3165 constructing VAR directly. The constructor body may
3166 change its value multiple times as it is being
3167 constructed. Therefore, it must not be TREE_READONLY;
3168 the back-end assumes that TREE_READONLY variable is
3169 assigned to only once. */
3170 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3171 TREE_READONLY (var) = 0;
3172
3173 /* If there is no setup required and we are in SSA, take the easy route
3174 replacing all SSA names representing the function parameter by the
3175 SSA name passed to function.
3176
3177 We need to construct map for the variable anyway as it might be used
3178 in different SSA names when parameter is set in function.
3179
3180 Do replacement at -O0 for const arguments replaced by constant.
3181 This is important for builtin_constant_p and other construct requiring
3182 constant argument to be visible in inlined function body. */
3183 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3184 && (optimize
3185 || (TREE_READONLY (p)
3186 && is_gimple_min_invariant (rhs)))
3187 && (TREE_CODE (rhs) == SSA_NAME
3188 || is_gimple_min_invariant (rhs))
3189 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3190 {
3191 insert_decl_map (id, def, rhs);
3192 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3193 }
3194
3195 /* If the value of argument is never used, don't care about initializing
3196 it. */
3197 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3198 {
3199 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3200 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3201 }
3202
3203 /* Initialize this VAR_DECL from the equivalent argument. Convert
3204 the argument to the proper type in case it was promoted. */
3205 if (value)
3206 {
3207 if (rhs == error_mark_node)
3208 {
3209 insert_decl_map (id, p, var);
3210 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3211 }
3212
3213 STRIP_USELESS_TYPE_CONVERSION (rhs);
3214
3215 /* If we are in SSA form properly remap the default definition
3216 or assign to a dummy SSA name if the parameter is unused and
3217 we are not optimizing. */
3218 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3219 {
3220 if (def)
3221 {
3222 def = remap_ssa_name (def, id);
3223 init_stmt = gimple_build_assign (def, rhs);
3224 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3225 set_ssa_default_def (cfun, var, NULL);
3226 }
3227 else if (!optimize)
3228 {
3229 def = make_ssa_name (var);
3230 init_stmt = gimple_build_assign (def, rhs);
3231 }
3232 }
3233 else
3234 init_stmt = gimple_build_assign (var, rhs);
3235
3236 if (bb && init_stmt)
3237 insert_init_stmt (id, bb, init_stmt);
3238 }
3239 return init_stmt;
3240 }
3241
3242 /* Generate code to initialize the parameters of the function at the
3243 top of the stack in ID from the GIMPLE_CALL STMT. */
3244
3245 static void
3246 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3247 tree fn, basic_block bb)
3248 {
3249 tree parms;
3250 size_t i;
3251 tree p;
3252 tree vars = NULL_TREE;
3253 tree static_chain = gimple_call_chain (stmt);
3254
3255 /* Figure out what the parameters are. */
3256 parms = DECL_ARGUMENTS (fn);
3257
3258 /* Loop through the parameter declarations, replacing each with an
3259 equivalent VAR_DECL, appropriately initialized. */
3260 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3261 {
3262 tree val;
3263 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3264 setup_one_parameter (id, p, val, fn, bb, &vars);
3265 }
3266 /* After remapping parameters remap their types. This has to be done
3267 in a second loop over all parameters to appropriately remap
3268 variable sized arrays when the size is specified in a
3269 parameter following the array. */
3270 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3271 {
3272 tree *varp = id->decl_map->get (p);
3273 if (varp && VAR_P (*varp))
3274 {
3275 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3276 ? ssa_default_def (id->src_cfun, p) : NULL);
3277 tree var = *varp;
3278 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3279 /* Also remap the default definition if it was remapped
3280 to the default definition of the parameter replacement
3281 by the parameter setup. */
3282 if (def)
3283 {
3284 tree *defp = id->decl_map->get (def);
3285 if (defp
3286 && TREE_CODE (*defp) == SSA_NAME
3287 && SSA_NAME_VAR (*defp) == var)
3288 TREE_TYPE (*defp) = TREE_TYPE (var);
3289 }
3290 }
3291 }
3292
3293 /* Initialize the static chain. */
3294 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3295 gcc_assert (fn != current_function_decl);
3296 if (p)
3297 {
3298 /* No static chain? Seems like a bug in tree-nested.c. */
3299 gcc_assert (static_chain);
3300
3301 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3302 }
3303
3304 declare_inline_vars (id->block, vars);
3305 }
3306
3307
3308 /* Declare a return variable to replace the RESULT_DECL for the
3309 function we are calling. An appropriate DECL_STMT is returned.
3310 The USE_STMT is filled to contain a use of the declaration to
3311 indicate the return value of the function.
3312
3313 RETURN_SLOT, if non-null is place where to store the result. It
3314 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3315 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3316
3317 The return value is a (possibly null) value that holds the result
3318 as seen by the caller. */
3319
3320 static tree
3321 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3322 basic_block entry_bb)
3323 {
3324 tree callee = id->src_fn;
3325 tree result = DECL_RESULT (callee);
3326 tree callee_type = TREE_TYPE (result);
3327 tree caller_type;
3328 tree var, use;
3329
3330 /* Handle type-mismatches in the function declaration return type
3331 vs. the call expression. */
3332 if (modify_dest)
3333 caller_type = TREE_TYPE (modify_dest);
3334 else
3335 caller_type = TREE_TYPE (TREE_TYPE (callee));
3336
3337 /* We don't need to do anything for functions that don't return anything. */
3338 if (VOID_TYPE_P (callee_type))
3339 return NULL_TREE;
3340
3341 /* If there was a return slot, then the return value is the
3342 dereferenced address of that object. */
3343 if (return_slot)
3344 {
3345 /* The front end shouldn't have used both return_slot and
3346 a modify expression. */
3347 gcc_assert (!modify_dest);
3348 if (DECL_BY_REFERENCE (result))
3349 {
3350 tree return_slot_addr = build_fold_addr_expr (return_slot);
3351 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3352
3353 /* We are going to construct *&return_slot and we can't do that
3354 for variables believed to be not addressable.
3355
3356 FIXME: This check possibly can match, because values returned
3357 via return slot optimization are not believed to have address
3358 taken by alias analysis. */
3359 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3360 var = return_slot_addr;
3361 }
3362 else
3363 {
3364 var = return_slot;
3365 gcc_assert (TREE_CODE (var) != SSA_NAME);
3366 if (TREE_ADDRESSABLE (result))
3367 mark_addressable (var);
3368 }
3369 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3370 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3371 && !DECL_GIMPLE_REG_P (result)
3372 && DECL_P (var))
3373 DECL_GIMPLE_REG_P (var) = 0;
3374 use = NULL;
3375 goto done;
3376 }
3377
3378 /* All types requiring non-trivial constructors should have been handled. */
3379 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3380
3381 /* Attempt to avoid creating a new temporary variable. */
3382 if (modify_dest
3383 && TREE_CODE (modify_dest) != SSA_NAME)
3384 {
3385 bool use_it = false;
3386
3387 /* We can't use MODIFY_DEST if there's type promotion involved. */
3388 if (!useless_type_conversion_p (callee_type, caller_type))
3389 use_it = false;
3390
3391 /* ??? If we're assigning to a variable sized type, then we must
3392 reuse the destination variable, because we've no good way to
3393 create variable sized temporaries at this point. */
3394 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3395 use_it = true;
3396
3397 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3398 reuse it as the result of the call directly. Don't do this if
3399 it would promote MODIFY_DEST to addressable. */
3400 else if (TREE_ADDRESSABLE (result))
3401 use_it = false;
3402 else
3403 {
3404 tree base_m = get_base_address (modify_dest);
3405
3406 /* If the base isn't a decl, then it's a pointer, and we don't
3407 know where that's going to go. */
3408 if (!DECL_P (base_m))
3409 use_it = false;
3410 else if (is_global_var (base_m))
3411 use_it = false;
3412 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3413 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3414 && !DECL_GIMPLE_REG_P (result)
3415 && DECL_GIMPLE_REG_P (base_m))
3416 use_it = false;
3417 else if (!TREE_ADDRESSABLE (base_m))
3418 use_it = true;
3419 }
3420
3421 if (use_it)
3422 {
3423 var = modify_dest;
3424 use = NULL;
3425 goto done;
3426 }
3427 }
3428
3429 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3430
3431 var = copy_result_decl_to_var (result, id);
3432 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3433
3434 /* Do not have the rest of GCC warn about this variable as it should
3435 not be visible to the user. */
3436 TREE_NO_WARNING (var) = 1;
3437
3438 declare_inline_vars (id->block, var);
3439
3440 /* Build the use expr. If the return type of the function was
3441 promoted, convert it back to the expected type. */
3442 use = var;
3443 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3444 {
3445 /* If we can match up types by promotion/demotion do so. */
3446 if (fold_convertible_p (caller_type, var))
3447 use = fold_convert (caller_type, var);
3448 else
3449 {
3450 /* ??? For valid programs we should not end up here.
3451 Still if we end up with truly mismatched types here, fall back
3452 to using a MEM_REF to not leak invalid GIMPLE to the following
3453 passes. */
3454 /* Prevent var from being written into SSA form. */
3455 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3456 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3457 DECL_GIMPLE_REG_P (var) = false;
3458 else if (is_gimple_reg_type (TREE_TYPE (var)))
3459 TREE_ADDRESSABLE (var) = true;
3460 use = fold_build2 (MEM_REF, caller_type,
3461 build_fold_addr_expr (var),
3462 build_int_cst (ptr_type_node, 0));
3463 }
3464 }
3465
3466 STRIP_USELESS_TYPE_CONVERSION (use);
3467
3468 if (DECL_BY_REFERENCE (result))
3469 {
3470 TREE_ADDRESSABLE (var) = 1;
3471 var = build_fold_addr_expr (var);
3472 }
3473
3474 done:
3475 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3476 way, when the RESULT_DECL is encountered, it will be
3477 automatically replaced by the VAR_DECL.
3478
3479 When returning by reference, ensure that RESULT_DECL remaps to
3480 gimple_val. */
3481 if (DECL_BY_REFERENCE (result)
3482 && !is_gimple_val (var))
3483 {
3484 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3485 insert_decl_map (id, result, temp);
3486 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3487 it's default_def SSA_NAME. */
3488 if (gimple_in_ssa_p (id->src_cfun)
3489 && is_gimple_reg (result))
3490 {
3491 temp = make_ssa_name (temp);
3492 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3493 }
3494 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3495 }
3496 else
3497 insert_decl_map (id, result, var);
3498
3499 /* Remember this so we can ignore it in remap_decls. */
3500 id->retvar = var;
3501 return use;
3502 }
3503
3504 /* Determine if the function can be copied. If so return NULL. If
3505 not return a string describng the reason for failure. */
3506
3507 const char *
3508 copy_forbidden (struct function *fun)
3509 {
3510 const char *reason = fun->cannot_be_copied_reason;
3511
3512 /* Only examine the function once. */
3513 if (fun->cannot_be_copied_set)
3514 return reason;
3515
3516 /* We cannot copy a function that receives a non-local goto
3517 because we cannot remap the destination label used in the
3518 function that is performing the non-local goto. */
3519 /* ??? Actually, this should be possible, if we work at it.
3520 No doubt there's just a handful of places that simply
3521 assume it doesn't happen and don't substitute properly. */
3522 if (fun->has_nonlocal_label)
3523 {
3524 reason = G_("function %q+F can never be copied "
3525 "because it receives a non-local goto");
3526 goto fail;
3527 }
3528
3529 if (fun->has_forced_label_in_static)
3530 {
3531 reason = G_("function %q+F can never be copied because it saves "
3532 "address of local label in a static variable");
3533 goto fail;
3534 }
3535
3536 fail:
3537 fun->cannot_be_copied_reason = reason;
3538 fun->cannot_be_copied_set = true;
3539 return reason;
3540 }
3541
3542
3543 static const char *inline_forbidden_reason;
3544
3545 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3546 iff a function cannot be inlined. Also sets the reason why. */
3547
3548 static tree
3549 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3550 struct walk_stmt_info *wip)
3551 {
3552 tree fn = (tree) wip->info;
3553 tree t;
3554 gimple *stmt = gsi_stmt (*gsi);
3555
3556 switch (gimple_code (stmt))
3557 {
3558 case GIMPLE_CALL:
3559 /* Refuse to inline alloca call unless user explicitly forced so as
3560 this may change program's memory overhead drastically when the
3561 function using alloca is called in loop. In GCC present in
3562 SPEC2000 inlining into schedule_block cause it to require 2GB of
3563 RAM instead of 256MB. Don't do so for alloca calls emitted for
3564 VLA objects as those can't cause unbounded growth (they're always
3565 wrapped inside stack_save/stack_restore regions. */
3566 if (gimple_maybe_alloca_call_p (stmt)
3567 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3568 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3569 {
3570 inline_forbidden_reason
3571 = G_("function %q+F can never be inlined because it uses "
3572 "alloca (override using the always_inline attribute)");
3573 *handled_ops_p = true;
3574 return fn;
3575 }
3576
3577 t = gimple_call_fndecl (stmt);
3578 if (t == NULL_TREE)
3579 break;
3580
3581 /* We cannot inline functions that call setjmp. */
3582 if (setjmp_call_p (t))
3583 {
3584 inline_forbidden_reason
3585 = G_("function %q+F can never be inlined because it uses setjmp");
3586 *handled_ops_p = true;
3587 return t;
3588 }
3589
3590 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3591 switch (DECL_FUNCTION_CODE (t))
3592 {
3593 /* We cannot inline functions that take a variable number of
3594 arguments. */
3595 case BUILT_IN_VA_START:
3596 case BUILT_IN_NEXT_ARG:
3597 case BUILT_IN_VA_END:
3598 inline_forbidden_reason
3599 = G_("function %q+F can never be inlined because it "
3600 "uses variable argument lists");
3601 *handled_ops_p = true;
3602 return t;
3603
3604 case BUILT_IN_LONGJMP:
3605 /* We can't inline functions that call __builtin_longjmp at
3606 all. The non-local goto machinery really requires the
3607 destination be in a different function. If we allow the
3608 function calling __builtin_longjmp to be inlined into the
3609 function calling __builtin_setjmp, Things will Go Awry. */
3610 inline_forbidden_reason
3611 = G_("function %q+F can never be inlined because "
3612 "it uses setjmp-longjmp exception handling");
3613 *handled_ops_p = true;
3614 return t;
3615
3616 case BUILT_IN_NONLOCAL_GOTO:
3617 /* Similarly. */
3618 inline_forbidden_reason
3619 = G_("function %q+F can never be inlined because "
3620 "it uses non-local goto");
3621 *handled_ops_p = true;
3622 return t;
3623
3624 case BUILT_IN_RETURN:
3625 case BUILT_IN_APPLY_ARGS:
3626 /* If a __builtin_apply_args caller would be inlined,
3627 it would be saving arguments of the function it has
3628 been inlined into. Similarly __builtin_return would
3629 return from the function the inline has been inlined into. */
3630 inline_forbidden_reason
3631 = G_("function %q+F can never be inlined because "
3632 "it uses __builtin_return or __builtin_apply_args");
3633 *handled_ops_p = true;
3634 return t;
3635
3636 default:
3637 break;
3638 }
3639 break;
3640
3641 case GIMPLE_GOTO:
3642 t = gimple_goto_dest (stmt);
3643
3644 /* We will not inline a function which uses computed goto. The
3645 addresses of its local labels, which may be tucked into
3646 global storage, are of course not constant across
3647 instantiations, which causes unexpected behavior. */
3648 if (TREE_CODE (t) != LABEL_DECL)
3649 {
3650 inline_forbidden_reason
3651 = G_("function %q+F can never be inlined "
3652 "because it contains a computed goto");
3653 *handled_ops_p = true;
3654 return t;
3655 }
3656 break;
3657
3658 default:
3659 break;
3660 }
3661
3662 *handled_ops_p = false;
3663 return NULL_TREE;
3664 }
3665
3666 /* Return true if FNDECL is a function that cannot be inlined into
3667 another one. */
3668
3669 static bool
3670 inline_forbidden_p (tree fndecl)
3671 {
3672 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3673 struct walk_stmt_info wi;
3674 basic_block bb;
3675 bool forbidden_p = false;
3676
3677 /* First check for shared reasons not to copy the code. */
3678 inline_forbidden_reason = copy_forbidden (fun);
3679 if (inline_forbidden_reason != NULL)
3680 return true;
3681
3682 /* Next, walk the statements of the function looking for
3683 constraucts we can't handle, or are non-optimal for inlining. */
3684 hash_set<tree> visited_nodes;
3685 memset (&wi, 0, sizeof (wi));
3686 wi.info = (void *) fndecl;
3687 wi.pset = &visited_nodes;
3688
3689 FOR_EACH_BB_FN (bb, fun)
3690 {
3691 gimple *ret;
3692 gimple_seq seq = bb_seq (bb);
3693 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3694 forbidden_p = (ret != NULL);
3695 if (forbidden_p)
3696 break;
3697 }
3698
3699 return forbidden_p;
3700 }
3701 \f
3702 /* Return false if the function FNDECL cannot be inlined on account of its
3703 attributes, true otherwise. */
3704 static bool
3705 function_attribute_inlinable_p (const_tree fndecl)
3706 {
3707 if (targetm.attribute_table)
3708 {
3709 const_tree a;
3710
3711 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3712 {
3713 const_tree name = TREE_PURPOSE (a);
3714 int i;
3715
3716 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3717 if (is_attribute_p (targetm.attribute_table[i].name, name))
3718 return targetm.function_attribute_inlinable_p (fndecl);
3719 }
3720 }
3721
3722 return true;
3723 }
3724
3725 /* Returns nonzero if FN is a function that does not have any
3726 fundamental inline blocking properties. */
3727
3728 bool
3729 tree_inlinable_function_p (tree fn)
3730 {
3731 bool inlinable = true;
3732 bool do_warning;
3733 tree always_inline;
3734
3735 /* If we've already decided this function shouldn't be inlined,
3736 there's no need to check again. */
3737 if (DECL_UNINLINABLE (fn))
3738 return false;
3739
3740 /* We only warn for functions declared `inline' by the user. */
3741 do_warning = (warn_inline
3742 && DECL_DECLARED_INLINE_P (fn)
3743 && !DECL_NO_INLINE_WARNING_P (fn)
3744 && !DECL_IN_SYSTEM_HEADER (fn));
3745
3746 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3747
3748 if (flag_no_inline
3749 && always_inline == NULL)
3750 {
3751 if (do_warning)
3752 warning (OPT_Winline, "function %q+F can never be inlined because it "
3753 "is suppressed using -fno-inline", fn);
3754 inlinable = false;
3755 }
3756
3757 else if (!function_attribute_inlinable_p (fn))
3758 {
3759 if (do_warning)
3760 warning (OPT_Winline, "function %q+F can never be inlined because it "
3761 "uses attributes conflicting with inlining", fn);
3762 inlinable = false;
3763 }
3764
3765 else if (inline_forbidden_p (fn))
3766 {
3767 /* See if we should warn about uninlinable functions. Previously,
3768 some of these warnings would be issued while trying to expand
3769 the function inline, but that would cause multiple warnings
3770 about functions that would for example call alloca. But since
3771 this a property of the function, just one warning is enough.
3772 As a bonus we can now give more details about the reason why a
3773 function is not inlinable. */
3774 if (always_inline)
3775 error (inline_forbidden_reason, fn);
3776 else if (do_warning)
3777 warning (OPT_Winline, inline_forbidden_reason, fn);
3778
3779 inlinable = false;
3780 }
3781
3782 /* Squirrel away the result so that we don't have to check again. */
3783 DECL_UNINLINABLE (fn) = !inlinable;
3784
3785 return inlinable;
3786 }
3787
3788 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3789 word size and take possible memcpy call into account and return
3790 cost based on whether optimizing for size or speed according to SPEED_P. */
3791
3792 int
3793 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3794 {
3795 HOST_WIDE_INT size;
3796
3797 gcc_assert (!VOID_TYPE_P (type));
3798
3799 if (TREE_CODE (type) == VECTOR_TYPE)
3800 {
3801 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3802 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3803 int orig_mode_size
3804 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3805 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3806 return ((orig_mode_size + simd_mode_size - 1)
3807 / simd_mode_size);
3808 }
3809
3810 size = int_size_in_bytes (type);
3811
3812 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3813 /* Cost of a memcpy call, 3 arguments and the call. */
3814 return 4;
3815 else
3816 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3817 }
3818
3819 /* Returns cost of operation CODE, according to WEIGHTS */
3820
3821 static int
3822 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3823 tree op1 ATTRIBUTE_UNUSED, tree op2)
3824 {
3825 switch (code)
3826 {
3827 /* These are "free" conversions, or their presumed cost
3828 is folded into other operations. */
3829 case RANGE_EXPR:
3830 CASE_CONVERT:
3831 case COMPLEX_EXPR:
3832 case PAREN_EXPR:
3833 case VIEW_CONVERT_EXPR:
3834 return 0;
3835
3836 /* Assign cost of 1 to usual operations.
3837 ??? We may consider mapping RTL costs to this. */
3838 case COND_EXPR:
3839 case VEC_COND_EXPR:
3840 case VEC_PERM_EXPR:
3841
3842 case PLUS_EXPR:
3843 case POINTER_PLUS_EXPR:
3844 case POINTER_DIFF_EXPR:
3845 case MINUS_EXPR:
3846 case MULT_EXPR:
3847 case MULT_HIGHPART_EXPR:
3848
3849 case ADDR_SPACE_CONVERT_EXPR:
3850 case FIXED_CONVERT_EXPR:
3851 case FIX_TRUNC_EXPR:
3852
3853 case NEGATE_EXPR:
3854 case FLOAT_EXPR:
3855 case MIN_EXPR:
3856 case MAX_EXPR:
3857 case ABS_EXPR:
3858 case ABSU_EXPR:
3859
3860 case LSHIFT_EXPR:
3861 case RSHIFT_EXPR:
3862 case LROTATE_EXPR:
3863 case RROTATE_EXPR:
3864
3865 case BIT_IOR_EXPR:
3866 case BIT_XOR_EXPR:
3867 case BIT_AND_EXPR:
3868 case BIT_NOT_EXPR:
3869
3870 case TRUTH_ANDIF_EXPR:
3871 case TRUTH_ORIF_EXPR:
3872 case TRUTH_AND_EXPR:
3873 case TRUTH_OR_EXPR:
3874 case TRUTH_XOR_EXPR:
3875 case TRUTH_NOT_EXPR:
3876
3877 case LT_EXPR:
3878 case LE_EXPR:
3879 case GT_EXPR:
3880 case GE_EXPR:
3881 case EQ_EXPR:
3882 case NE_EXPR:
3883 case ORDERED_EXPR:
3884 case UNORDERED_EXPR:
3885
3886 case UNLT_EXPR:
3887 case UNLE_EXPR:
3888 case UNGT_EXPR:
3889 case UNGE_EXPR:
3890 case UNEQ_EXPR:
3891 case LTGT_EXPR:
3892
3893 case CONJ_EXPR:
3894
3895 case PREDECREMENT_EXPR:
3896 case PREINCREMENT_EXPR:
3897 case POSTDECREMENT_EXPR:
3898 case POSTINCREMENT_EXPR:
3899
3900 case REALIGN_LOAD_EXPR:
3901
3902 case WIDEN_SUM_EXPR:
3903 case WIDEN_MULT_EXPR:
3904 case DOT_PROD_EXPR:
3905 case SAD_EXPR:
3906 case WIDEN_MULT_PLUS_EXPR:
3907 case WIDEN_MULT_MINUS_EXPR:
3908 case WIDEN_LSHIFT_EXPR:
3909
3910 case VEC_WIDEN_MULT_HI_EXPR:
3911 case VEC_WIDEN_MULT_LO_EXPR:
3912 case VEC_WIDEN_MULT_EVEN_EXPR:
3913 case VEC_WIDEN_MULT_ODD_EXPR:
3914 case VEC_UNPACK_HI_EXPR:
3915 case VEC_UNPACK_LO_EXPR:
3916 case VEC_UNPACK_FLOAT_HI_EXPR:
3917 case VEC_UNPACK_FLOAT_LO_EXPR:
3918 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3919 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3920 case VEC_PACK_TRUNC_EXPR:
3921 case VEC_PACK_SAT_EXPR:
3922 case VEC_PACK_FIX_TRUNC_EXPR:
3923 case VEC_PACK_FLOAT_EXPR:
3924 case VEC_WIDEN_LSHIFT_HI_EXPR:
3925 case VEC_WIDEN_LSHIFT_LO_EXPR:
3926 case VEC_DUPLICATE_EXPR:
3927 case VEC_SERIES_EXPR:
3928
3929 return 1;
3930
3931 /* Few special cases of expensive operations. This is useful
3932 to avoid inlining on functions having too many of these. */
3933 case TRUNC_DIV_EXPR:
3934 case CEIL_DIV_EXPR:
3935 case FLOOR_DIV_EXPR:
3936 case ROUND_DIV_EXPR:
3937 case EXACT_DIV_EXPR:
3938 case TRUNC_MOD_EXPR:
3939 case CEIL_MOD_EXPR:
3940 case FLOOR_MOD_EXPR:
3941 case ROUND_MOD_EXPR:
3942 case RDIV_EXPR:
3943 if (TREE_CODE (op2) != INTEGER_CST)
3944 return weights->div_mod_cost;
3945 return 1;
3946
3947 /* Bit-field insertion needs several shift and mask operations. */
3948 case BIT_INSERT_EXPR:
3949 return 3;
3950
3951 default:
3952 /* We expect a copy assignment with no operator. */
3953 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3954 return 0;
3955 }
3956 }
3957
3958
3959 /* Estimate number of instructions that will be created by expanding
3960 the statements in the statement sequence STMTS.
3961 WEIGHTS contains weights attributed to various constructs. */
3962
3963 int
3964 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3965 {
3966 int cost;
3967 gimple_stmt_iterator gsi;
3968
3969 cost = 0;
3970 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3971 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3972
3973 return cost;
3974 }
3975
3976
3977 /* Estimate number of instructions that will be created by expanding STMT.
3978 WEIGHTS contains weights attributed to various constructs. */
3979
3980 int
3981 estimate_num_insns (gimple *stmt, eni_weights *weights)
3982 {
3983 unsigned cost, i;
3984 enum gimple_code code = gimple_code (stmt);
3985 tree lhs;
3986 tree rhs;
3987
3988 switch (code)
3989 {
3990 case GIMPLE_ASSIGN:
3991 /* Try to estimate the cost of assignments. We have three cases to
3992 deal with:
3993 1) Simple assignments to registers;
3994 2) Stores to things that must live in memory. This includes
3995 "normal" stores to scalars, but also assignments of large
3996 structures, or constructors of big arrays;
3997
3998 Let us look at the first two cases, assuming we have "a = b + C":
3999 <GIMPLE_ASSIGN <var_decl "a">
4000 <plus_expr <var_decl "b"> <constant C>>
4001 If "a" is a GIMPLE register, the assignment to it is free on almost
4002 any target, because "a" usually ends up in a real register. Hence
4003 the only cost of this expression comes from the PLUS_EXPR, and we
4004 can ignore the GIMPLE_ASSIGN.
4005 If "a" is not a GIMPLE register, the assignment to "a" will most
4006 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4007 of moving something into "a", which we compute using the function
4008 estimate_move_cost. */
4009 if (gimple_clobber_p (stmt))
4010 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4011
4012 lhs = gimple_assign_lhs (stmt);
4013 rhs = gimple_assign_rhs1 (stmt);
4014
4015 cost = 0;
4016
4017 /* Account for the cost of moving to / from memory. */
4018 if (gimple_store_p (stmt))
4019 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4020 if (gimple_assign_load_p (stmt))
4021 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4022
4023 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4024 gimple_assign_rhs1 (stmt),
4025 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4026 == GIMPLE_BINARY_RHS
4027 ? gimple_assign_rhs2 (stmt) : NULL);
4028 break;
4029
4030 case GIMPLE_COND:
4031 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4032 gimple_op (stmt, 0),
4033 gimple_op (stmt, 1));
4034 break;
4035
4036 case GIMPLE_SWITCH:
4037 {
4038 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4039 /* Take into account cost of the switch + guess 2 conditional jumps for
4040 each case label.
4041
4042 TODO: once the switch expansion logic is sufficiently separated, we can
4043 do better job on estimating cost of the switch. */
4044 if (weights->time_based)
4045 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4046 else
4047 cost = gimple_switch_num_labels (switch_stmt) * 2;
4048 }
4049 break;
4050
4051 case GIMPLE_CALL:
4052 {
4053 tree decl;
4054
4055 if (gimple_call_internal_p (stmt))
4056 return 0;
4057 else if ((decl = gimple_call_fndecl (stmt))
4058 && fndecl_built_in_p (decl))
4059 {
4060 /* Do not special case builtins where we see the body.
4061 This just confuse inliner. */
4062 struct cgraph_node *node;
4063 if (!(node = cgraph_node::get (decl))
4064 || node->definition)
4065 ;
4066 /* For buitins that are likely expanded to nothing or
4067 inlined do not account operand costs. */
4068 else if (is_simple_builtin (decl))
4069 return 0;
4070 else if (is_inexpensive_builtin (decl))
4071 return weights->target_builtin_call_cost;
4072 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4073 {
4074 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4075 specialize the cheap expansion we do here.
4076 ??? This asks for a more general solution. */
4077 switch (DECL_FUNCTION_CODE (decl))
4078 {
4079 case BUILT_IN_POW:
4080 case BUILT_IN_POWF:
4081 case BUILT_IN_POWL:
4082 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4083 && (real_equal
4084 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4085 &dconst2)))
4086 return estimate_operator_cost
4087 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4088 gimple_call_arg (stmt, 0));
4089 break;
4090
4091 default:
4092 break;
4093 }
4094 }
4095 }
4096
4097 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4098 if (gimple_call_lhs (stmt))
4099 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4100 weights->time_based);
4101 for (i = 0; i < gimple_call_num_args (stmt); i++)
4102 {
4103 tree arg = gimple_call_arg (stmt, i);
4104 cost += estimate_move_cost (TREE_TYPE (arg),
4105 weights->time_based);
4106 }
4107 break;
4108 }
4109
4110 case GIMPLE_RETURN:
4111 return weights->return_cost;
4112
4113 case GIMPLE_GOTO:
4114 case GIMPLE_LABEL:
4115 case GIMPLE_NOP:
4116 case GIMPLE_PHI:
4117 case GIMPLE_PREDICT:
4118 case GIMPLE_DEBUG:
4119 return 0;
4120
4121 case GIMPLE_ASM:
4122 {
4123 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4124 /* 1000 means infinity. This avoids overflows later
4125 with very long asm statements. */
4126 if (count > 1000)
4127 count = 1000;
4128 /* If this asm is asm inline, count anything as minimum size. */
4129 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4130 count = MIN (1, count);
4131 return MAX (1, count);
4132 }
4133
4134 case GIMPLE_RESX:
4135 /* This is either going to be an external function call with one
4136 argument, or two register copy statements plus a goto. */
4137 return 2;
4138
4139 case GIMPLE_EH_DISPATCH:
4140 /* ??? This is going to turn into a switch statement. Ideally
4141 we'd have a look at the eh region and estimate the number of
4142 edges involved. */
4143 return 10;
4144
4145 case GIMPLE_BIND:
4146 return estimate_num_insns_seq (
4147 gimple_bind_body (as_a <gbind *> (stmt)),
4148 weights);
4149
4150 case GIMPLE_EH_FILTER:
4151 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4152
4153 case GIMPLE_CATCH:
4154 return estimate_num_insns_seq (gimple_catch_handler (
4155 as_a <gcatch *> (stmt)),
4156 weights);
4157
4158 case GIMPLE_TRY:
4159 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4160 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4161
4162 /* OMP directives are generally very expensive. */
4163
4164 case GIMPLE_OMP_RETURN:
4165 case GIMPLE_OMP_SECTIONS_SWITCH:
4166 case GIMPLE_OMP_ATOMIC_STORE:
4167 case GIMPLE_OMP_CONTINUE:
4168 /* ...except these, which are cheap. */
4169 return 0;
4170
4171 case GIMPLE_OMP_ATOMIC_LOAD:
4172 return weights->omp_cost;
4173
4174 case GIMPLE_OMP_FOR:
4175 return (weights->omp_cost
4176 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4177 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4178
4179 case GIMPLE_OMP_PARALLEL:
4180 case GIMPLE_OMP_TASK:
4181 case GIMPLE_OMP_CRITICAL:
4182 case GIMPLE_OMP_MASTER:
4183 case GIMPLE_OMP_TASKGROUP:
4184 case GIMPLE_OMP_ORDERED:
4185 case GIMPLE_OMP_SECTION:
4186 case GIMPLE_OMP_SECTIONS:
4187 case GIMPLE_OMP_SINGLE:
4188 case GIMPLE_OMP_TARGET:
4189 case GIMPLE_OMP_TEAMS:
4190 return (weights->omp_cost
4191 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4192
4193 case GIMPLE_TRANSACTION:
4194 return (weights->tm_cost
4195 + estimate_num_insns_seq (gimple_transaction_body (
4196 as_a <gtransaction *> (stmt)),
4197 weights));
4198
4199 default:
4200 gcc_unreachable ();
4201 }
4202
4203 return cost;
4204 }
4205
4206 /* Estimate number of instructions that will be created by expanding
4207 function FNDECL. WEIGHTS contains weights attributed to various
4208 constructs. */
4209
4210 int
4211 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4212 {
4213 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4214 gimple_stmt_iterator bsi;
4215 basic_block bb;
4216 int n = 0;
4217
4218 gcc_assert (my_function && my_function->cfg);
4219 FOR_EACH_BB_FN (bb, my_function)
4220 {
4221 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4222 n += estimate_num_insns (gsi_stmt (bsi), weights);
4223 }
4224
4225 return n;
4226 }
4227
4228
4229 /* Initializes weights used by estimate_num_insns. */
4230
4231 void
4232 init_inline_once (void)
4233 {
4234 eni_size_weights.call_cost = 1;
4235 eni_size_weights.indirect_call_cost = 3;
4236 eni_size_weights.target_builtin_call_cost = 1;
4237 eni_size_weights.div_mod_cost = 1;
4238 eni_size_weights.omp_cost = 40;
4239 eni_size_weights.tm_cost = 10;
4240 eni_size_weights.time_based = false;
4241 eni_size_weights.return_cost = 1;
4242
4243 /* Estimating time for call is difficult, since we have no idea what the
4244 called function does. In the current uses of eni_time_weights,
4245 underestimating the cost does less harm than overestimating it, so
4246 we choose a rather small value here. */
4247 eni_time_weights.call_cost = 10;
4248 eni_time_weights.indirect_call_cost = 15;
4249 eni_time_weights.target_builtin_call_cost = 1;
4250 eni_time_weights.div_mod_cost = 10;
4251 eni_time_weights.omp_cost = 40;
4252 eni_time_weights.tm_cost = 40;
4253 eni_time_weights.time_based = true;
4254 eni_time_weights.return_cost = 2;
4255 }
4256
4257
4258 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4259
4260 static void
4261 prepend_lexical_block (tree current_block, tree new_block)
4262 {
4263 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4264 BLOCK_SUBBLOCKS (current_block) = new_block;
4265 BLOCK_SUPERCONTEXT (new_block) = current_block;
4266 }
4267
4268 /* Add local variables from CALLEE to CALLER. */
4269
4270 static inline void
4271 add_local_variables (struct function *callee, struct function *caller,
4272 copy_body_data *id)
4273 {
4274 tree var;
4275 unsigned ix;
4276
4277 FOR_EACH_LOCAL_DECL (callee, ix, var)
4278 if (!can_be_nonlocal (var, id))
4279 {
4280 tree new_var = remap_decl (var, id);
4281
4282 /* Remap debug-expressions. */
4283 if (VAR_P (new_var)
4284 && DECL_HAS_DEBUG_EXPR_P (var)
4285 && new_var != var)
4286 {
4287 tree tem = DECL_DEBUG_EXPR (var);
4288 bool old_regimplify = id->regimplify;
4289 id->remapping_type_depth++;
4290 walk_tree (&tem, copy_tree_body_r, id, NULL);
4291 id->remapping_type_depth--;
4292 id->regimplify = old_regimplify;
4293 SET_DECL_DEBUG_EXPR (new_var, tem);
4294 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4295 }
4296 add_local_decl (caller, new_var);
4297 }
4298 }
4299
4300 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4301 have brought in or introduced any debug stmts for SRCVAR. */
4302
4303 static inline void
4304 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4305 {
4306 tree *remappedvarp = id->decl_map->get (srcvar);
4307
4308 if (!remappedvarp)
4309 return;
4310
4311 if (!VAR_P (*remappedvarp))
4312 return;
4313
4314 if (*remappedvarp == id->retvar)
4315 return;
4316
4317 tree tvar = target_for_debug_bind (*remappedvarp);
4318 if (!tvar)
4319 return;
4320
4321 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4322 id->call_stmt);
4323 gimple_seq_add_stmt (bindings, stmt);
4324 }
4325
4326 /* For each inlined variable for which we may have debug bind stmts,
4327 add before GSI a final debug stmt resetting it, marking the end of
4328 its life, so that var-tracking knows it doesn't have to compute
4329 further locations for it. */
4330
4331 static inline void
4332 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4333 {
4334 tree var;
4335 unsigned ix;
4336 gimple_seq bindings = NULL;
4337
4338 if (!gimple_in_ssa_p (id->src_cfun))
4339 return;
4340
4341 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4342 return;
4343
4344 for (var = DECL_ARGUMENTS (id->src_fn);
4345 var; var = DECL_CHAIN (var))
4346 reset_debug_binding (id, var, &bindings);
4347
4348 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4349 reset_debug_binding (id, var, &bindings);
4350
4351 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4352 }
4353
4354 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4355
4356 static bool
4357 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4358 {
4359 tree use_retvar;
4360 tree fn;
4361 hash_map<tree, tree> *dst;
4362 hash_map<tree, tree> *st = NULL;
4363 tree return_slot;
4364 tree modify_dest;
4365 struct cgraph_edge *cg_edge;
4366 cgraph_inline_failed_t reason;
4367 basic_block return_block;
4368 edge e;
4369 gimple_stmt_iterator gsi, stmt_gsi;
4370 bool successfully_inlined = false;
4371 bool purge_dead_abnormal_edges;
4372 gcall *call_stmt;
4373 unsigned int prop_mask, src_properties;
4374 struct function *dst_cfun;
4375 tree simduid;
4376 use_operand_p use;
4377 gimple *simtenter_stmt = NULL;
4378 vec<tree> *simtvars_save;
4379
4380 /* The gimplifier uses input_location in too many places, such as
4381 internal_get_tmp_var (). */
4382 location_t saved_location = input_location;
4383 input_location = gimple_location (stmt);
4384
4385 /* From here on, we're only interested in CALL_EXPRs. */
4386 call_stmt = dyn_cast <gcall *> (stmt);
4387 if (!call_stmt)
4388 goto egress;
4389
4390 cg_edge = id->dst_node->get_edge (stmt);
4391 gcc_checking_assert (cg_edge);
4392 /* First, see if we can figure out what function is being called.
4393 If we cannot, then there is no hope of inlining the function. */
4394 if (cg_edge->indirect_unknown_callee)
4395 goto egress;
4396 fn = cg_edge->callee->decl;
4397 gcc_checking_assert (fn);
4398
4399 /* If FN is a declaration of a function in a nested scope that was
4400 globally declared inline, we don't set its DECL_INITIAL.
4401 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4402 C++ front-end uses it for cdtors to refer to their internal
4403 declarations, that are not real functions. Fortunately those
4404 don't have trees to be saved, so we can tell by checking their
4405 gimple_body. */
4406 if (!DECL_INITIAL (fn)
4407 && DECL_ABSTRACT_ORIGIN (fn)
4408 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4409 fn = DECL_ABSTRACT_ORIGIN (fn);
4410
4411 /* Don't try to inline functions that are not well-suited to inlining. */
4412 if (cg_edge->inline_failed)
4413 {
4414 reason = cg_edge->inline_failed;
4415 /* If this call was originally indirect, we do not want to emit any
4416 inlining related warnings or sorry messages because there are no
4417 guarantees regarding those. */
4418 if (cg_edge->indirect_inlining_edge)
4419 goto egress;
4420
4421 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4422 /* For extern inline functions that get redefined we always
4423 silently ignored always_inline flag. Better behavior would
4424 be to be able to keep both bodies and use extern inline body
4425 for inlining, but we can't do that because frontends overwrite
4426 the body. */
4427 && !cg_edge->callee->local.redefined_extern_inline
4428 /* During early inline pass, report only when optimization is
4429 not turned on. */
4430 && (symtab->global_info_ready
4431 || !optimize
4432 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4433 /* PR 20090218-1_0.c. Body can be provided by another module. */
4434 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4435 {
4436 error ("inlining failed in call to always_inline %q+F: %s", fn,
4437 cgraph_inline_failed_string (reason));
4438 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4439 inform (gimple_location (stmt), "called from here");
4440 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4441 inform (DECL_SOURCE_LOCATION (cfun->decl),
4442 "called from this function");
4443 }
4444 else if (warn_inline
4445 && DECL_DECLARED_INLINE_P (fn)
4446 && !DECL_NO_INLINE_WARNING_P (fn)
4447 && !DECL_IN_SYSTEM_HEADER (fn)
4448 && reason != CIF_UNSPECIFIED
4449 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4450 /* Do not warn about not inlined recursive calls. */
4451 && !cg_edge->recursive_p ()
4452 /* Avoid warnings during early inline pass. */
4453 && symtab->global_info_ready)
4454 {
4455 auto_diagnostic_group d;
4456 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4457 fn, _(cgraph_inline_failed_string (reason))))
4458 {
4459 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4460 inform (gimple_location (stmt), "called from here");
4461 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4462 inform (DECL_SOURCE_LOCATION (cfun->decl),
4463 "called from this function");
4464 }
4465 }
4466 goto egress;
4467 }
4468 id->src_node = cg_edge->callee;
4469
4470 /* If callee is thunk, all we need is to adjust the THIS pointer
4471 and redirect to function being thunked. */
4472 if (id->src_node->thunk.thunk_p)
4473 {
4474 cgraph_edge *edge;
4475 tree virtual_offset = NULL;
4476 profile_count count = cg_edge->count;
4477 tree op;
4478 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4479
4480 cg_edge->remove ();
4481 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4482 gimple_uid (stmt),
4483 profile_count::one (),
4484 profile_count::one (),
4485 true);
4486 edge->count = count;
4487 if (id->src_node->thunk.virtual_offset_p)
4488 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4489 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4490 NULL);
4491 gsi_insert_before (&iter, gimple_build_assign (op,
4492 gimple_call_arg (stmt, 0)),
4493 GSI_NEW_STMT);
4494 gcc_assert (id->src_node->thunk.this_adjusting);
4495 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4496 virtual_offset, id->src_node->thunk.indirect_offset);
4497
4498 gimple_call_set_arg (stmt, 0, op);
4499 gimple_call_set_fndecl (stmt, edge->callee->decl);
4500 update_stmt (stmt);
4501 id->src_node->remove ();
4502 expand_call_inline (bb, stmt, id);
4503 maybe_remove_unused_call_args (cfun, stmt);
4504 return true;
4505 }
4506 fn = cg_edge->callee->decl;
4507 cg_edge->callee->get_untransformed_body ();
4508
4509 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4510 cg_edge->callee->verify ();
4511
4512 /* We will be inlining this callee. */
4513 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4514 id->assign_stmts.create (0);
4515
4516 /* Update the callers EH personality. */
4517 if (DECL_FUNCTION_PERSONALITY (fn))
4518 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4519 = DECL_FUNCTION_PERSONALITY (fn);
4520
4521 /* Split the block before the GIMPLE_CALL. */
4522 stmt_gsi = gsi_for_stmt (stmt);
4523 gsi_prev (&stmt_gsi);
4524 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4525 bb = e->src;
4526 return_block = e->dest;
4527 remove_edge (e);
4528
4529 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4530 been the source of abnormal edges. In this case, schedule
4531 the removal of dead abnormal edges. */
4532 gsi = gsi_start_bb (return_block);
4533 gsi_next (&gsi);
4534 purge_dead_abnormal_edges = gsi_end_p (gsi);
4535
4536 stmt_gsi = gsi_start_bb (return_block);
4537
4538 /* Build a block containing code to initialize the arguments, the
4539 actual inline expansion of the body, and a label for the return
4540 statements within the function to jump to. The type of the
4541 statement expression is the return type of the function call.
4542 ??? If the call does not have an associated block then we will
4543 remap all callee blocks to NULL, effectively dropping most of
4544 its debug information. This should only happen for calls to
4545 artificial decls inserted by the compiler itself. We need to
4546 either link the inlined blocks into the caller block tree or
4547 not refer to them in any way to not break GC for locations. */
4548 if (gimple_block (stmt))
4549 {
4550 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4551 to make inlined_function_outer_scope_p return true on this BLOCK. */
4552 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4553 if (loc == UNKNOWN_LOCATION)
4554 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4555 if (loc == UNKNOWN_LOCATION)
4556 loc = BUILTINS_LOCATION;
4557 id->block = make_node (BLOCK);
4558 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4559 BLOCK_SOURCE_LOCATION (id->block) = loc;
4560 prepend_lexical_block (gimple_block (stmt), id->block);
4561 }
4562
4563 /* Local declarations will be replaced by their equivalents in this map. */
4564 st = id->decl_map;
4565 id->decl_map = new hash_map<tree, tree>;
4566 dst = id->debug_map;
4567 id->debug_map = NULL;
4568
4569 /* Record the function we are about to inline. */
4570 id->src_fn = fn;
4571 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4572 id->reset_location = DECL_IGNORED_P (fn);
4573 id->call_stmt = call_stmt;
4574
4575 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4576 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4577 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4578 simtvars_save = id->dst_simt_vars;
4579 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4580 && (simduid = bb->loop_father->simduid) != NULL_TREE
4581 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4582 && single_imm_use (simduid, &use, &simtenter_stmt)
4583 && is_gimple_call (simtenter_stmt)
4584 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4585 vec_alloc (id->dst_simt_vars, 0);
4586 else
4587 id->dst_simt_vars = NULL;
4588
4589 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4590 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4591
4592 /* If the src function contains an IFN_VA_ARG, then so will the dst
4593 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4594 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4595 src_properties = id->src_cfun->curr_properties & prop_mask;
4596 if (src_properties != prop_mask)
4597 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4598
4599 gcc_assert (!id->src_cfun->after_inlining);
4600
4601 id->entry_bb = bb;
4602 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4603 {
4604 gimple_stmt_iterator si = gsi_last_bb (bb);
4605 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4606 NOT_TAKEN),
4607 GSI_NEW_STMT);
4608 }
4609 initialize_inlined_parameters (id, stmt, fn, bb);
4610 if (debug_nonbind_markers_p && debug_inline_points && id->block
4611 && inlined_function_outer_scope_p (id->block))
4612 {
4613 gimple_stmt_iterator si = gsi_last_bb (bb);
4614 gsi_insert_after (&si, gimple_build_debug_inline_entry
4615 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4616 GSI_NEW_STMT);
4617 }
4618
4619 if (DECL_INITIAL (fn))
4620 {
4621 if (gimple_block (stmt))
4622 {
4623 tree *var;
4624
4625 prepend_lexical_block (id->block,
4626 remap_blocks (DECL_INITIAL (fn), id));
4627 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4628 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4629 == NULL_TREE));
4630 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4631 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4632 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4633 under it. The parameters can be then evaluated in the debugger,
4634 but don't show in backtraces. */
4635 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4636 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4637 {
4638 tree v = *var;
4639 *var = TREE_CHAIN (v);
4640 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4641 BLOCK_VARS (id->block) = v;
4642 }
4643 else
4644 var = &TREE_CHAIN (*var);
4645 }
4646 else
4647 remap_blocks_to_null (DECL_INITIAL (fn), id);
4648 }
4649
4650 /* Return statements in the function body will be replaced by jumps
4651 to the RET_LABEL. */
4652 gcc_assert (DECL_INITIAL (fn));
4653 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4654
4655 /* Find the LHS to which the result of this call is assigned. */
4656 return_slot = NULL;
4657 if (gimple_call_lhs (stmt))
4658 {
4659 modify_dest = gimple_call_lhs (stmt);
4660
4661 /* The function which we are inlining might not return a value,
4662 in which case we should issue a warning that the function
4663 does not return a value. In that case the optimizers will
4664 see that the variable to which the value is assigned was not
4665 initialized. We do not want to issue a warning about that
4666 uninitialized variable. */
4667 if (DECL_P (modify_dest))
4668 TREE_NO_WARNING (modify_dest) = 1;
4669
4670 if (gimple_call_return_slot_opt_p (call_stmt))
4671 {
4672 return_slot = modify_dest;
4673 modify_dest = NULL;
4674 }
4675 }
4676 else
4677 modify_dest = NULL;
4678
4679 /* If we are inlining a call to the C++ operator new, we don't want
4680 to use type based alias analysis on the return value. Otherwise
4681 we may get confused if the compiler sees that the inlined new
4682 function returns a pointer which was just deleted. See bug
4683 33407. */
4684 if (DECL_IS_OPERATOR_NEW (fn))
4685 {
4686 return_slot = NULL;
4687 modify_dest = NULL;
4688 }
4689
4690 /* Declare the return variable for the function. */
4691 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4692
4693 /* Add local vars in this inlined callee to caller. */
4694 add_local_variables (id->src_cfun, cfun, id);
4695
4696 if (dump_enabled_p ())
4697 {
4698 char buf[128];
4699 snprintf (buf, sizeof(buf), "%4.2f",
4700 cg_edge->sreal_frequency ().to_double ());
4701 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4702 call_stmt,
4703 "Inlining %C to %C with frequency %s\n",
4704 id->src_node, id->dst_node, buf);
4705 if (dump_file && (dump_flags & TDF_DETAILS))
4706 {
4707 id->src_node->dump (dump_file);
4708 id->dst_node->dump (dump_file);
4709 }
4710 }
4711
4712 /* This is it. Duplicate the callee body. Assume callee is
4713 pre-gimplified. Note that we must not alter the caller
4714 function in any way before this point, as this CALL_EXPR may be
4715 a self-referential call; if we're calling ourselves, we need to
4716 duplicate our body before altering anything. */
4717 copy_body (id, bb, return_block, NULL);
4718
4719 reset_debug_bindings (id, stmt_gsi);
4720
4721 if (flag_stack_reuse != SR_NONE)
4722 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4723 if (!TREE_THIS_VOLATILE (p))
4724 {
4725 tree *varp = id->decl_map->get (p);
4726 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4727 {
4728 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4729 gimple *clobber_stmt;
4730 TREE_THIS_VOLATILE (clobber) = 1;
4731 clobber_stmt = gimple_build_assign (*varp, clobber);
4732 gimple_set_location (clobber_stmt, gimple_location (stmt));
4733 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4734 }
4735 }
4736
4737 /* Reset the escaped solution. */
4738 if (cfun->gimple_df)
4739 pt_solution_reset (&cfun->gimple_df->escaped);
4740
4741 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4742 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4743 {
4744 size_t nargs = gimple_call_num_args (simtenter_stmt);
4745 vec<tree> *vars = id->dst_simt_vars;
4746 auto_vec<tree> newargs (nargs + vars->length ());
4747 for (size_t i = 0; i < nargs; i++)
4748 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4749 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4750 {
4751 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4752 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4753 }
4754 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4755 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4756 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4757 gsi_replace (&gsi, g, false);
4758 }
4759 vec_free (id->dst_simt_vars);
4760 id->dst_simt_vars = simtvars_save;
4761
4762 /* Clean up. */
4763 if (id->debug_map)
4764 {
4765 delete id->debug_map;
4766 id->debug_map = dst;
4767 }
4768 delete id->decl_map;
4769 id->decl_map = st;
4770
4771 /* Unlink the calls virtual operands before replacing it. */
4772 unlink_stmt_vdef (stmt);
4773 if (gimple_vdef (stmt)
4774 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4775 release_ssa_name (gimple_vdef (stmt));
4776
4777 /* If the inlined function returns a result that we care about,
4778 substitute the GIMPLE_CALL with an assignment of the return
4779 variable to the LHS of the call. That is, if STMT was
4780 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4781 if (use_retvar && gimple_call_lhs (stmt))
4782 {
4783 gimple *old_stmt = stmt;
4784 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4785 gimple_set_location (stmt, gimple_location (old_stmt));
4786 gsi_replace (&stmt_gsi, stmt, false);
4787 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4788 /* Append a clobber for id->retvar if easily possible. */
4789 if (flag_stack_reuse != SR_NONE
4790 && id->retvar
4791 && VAR_P (id->retvar)
4792 && id->retvar != return_slot
4793 && id->retvar != modify_dest
4794 && !TREE_THIS_VOLATILE (id->retvar)
4795 && !is_gimple_reg (id->retvar)
4796 && !stmt_ends_bb_p (stmt))
4797 {
4798 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4799 gimple *clobber_stmt;
4800 TREE_THIS_VOLATILE (clobber) = 1;
4801 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4802 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4803 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4804 }
4805 }
4806 else
4807 {
4808 /* Handle the case of inlining a function with no return
4809 statement, which causes the return value to become undefined. */
4810 if (gimple_call_lhs (stmt)
4811 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4812 {
4813 tree name = gimple_call_lhs (stmt);
4814 tree var = SSA_NAME_VAR (name);
4815 tree def = var ? ssa_default_def (cfun, var) : NULL;
4816
4817 if (def)
4818 {
4819 /* If the variable is used undefined, make this name
4820 undefined via a move. */
4821 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4822 gsi_replace (&stmt_gsi, stmt, true);
4823 }
4824 else
4825 {
4826 if (!var)
4827 {
4828 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4829 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4830 }
4831 /* Otherwise make this variable undefined. */
4832 gsi_remove (&stmt_gsi, true);
4833 set_ssa_default_def (cfun, var, name);
4834 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4835 }
4836 }
4837 /* Replace with a clobber for id->retvar. */
4838 else if (flag_stack_reuse != SR_NONE
4839 && id->retvar
4840 && VAR_P (id->retvar)
4841 && id->retvar != return_slot
4842 && id->retvar != modify_dest
4843 && !TREE_THIS_VOLATILE (id->retvar)
4844 && !is_gimple_reg (id->retvar))
4845 {
4846 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4847 gimple *clobber_stmt;
4848 TREE_THIS_VOLATILE (clobber) = 1;
4849 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4850 gimple_set_location (clobber_stmt, gimple_location (stmt));
4851 gsi_replace (&stmt_gsi, clobber_stmt, false);
4852 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4853 }
4854 else
4855 gsi_remove (&stmt_gsi, true);
4856 }
4857
4858 if (purge_dead_abnormal_edges)
4859 {
4860 gimple_purge_dead_eh_edges (return_block);
4861 gimple_purge_dead_abnormal_call_edges (return_block);
4862 }
4863
4864 /* If the value of the new expression is ignored, that's OK. We
4865 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4866 the equivalent inlined version either. */
4867 if (is_gimple_assign (stmt))
4868 {
4869 gcc_assert (gimple_assign_single_p (stmt)
4870 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4871 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4872 }
4873
4874 id->assign_stmts.release ();
4875
4876 /* Output the inlining info for this abstract function, since it has been
4877 inlined. If we don't do this now, we can lose the information about the
4878 variables in the function when the blocks get blown away as soon as we
4879 remove the cgraph node. */
4880 if (gimple_block (stmt))
4881 (*debug_hooks->outlining_inline_function) (fn);
4882
4883 /* Update callgraph if needed. */
4884 cg_edge->callee->remove ();
4885
4886 id->block = NULL_TREE;
4887 id->retvar = NULL_TREE;
4888 successfully_inlined = true;
4889
4890 egress:
4891 input_location = saved_location;
4892 return successfully_inlined;
4893 }
4894
4895 /* Expand call statements reachable from STMT_P.
4896 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4897 in a MODIFY_EXPR. */
4898
4899 static bool
4900 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4901 {
4902 gimple_stmt_iterator gsi;
4903 bool inlined = false;
4904
4905 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4906 {
4907 gimple *stmt = gsi_stmt (gsi);
4908 gsi_prev (&gsi);
4909
4910 if (is_gimple_call (stmt)
4911 && !gimple_call_internal_p (stmt))
4912 inlined |= expand_call_inline (bb, stmt, id);
4913 }
4914
4915 return inlined;
4916 }
4917
4918
4919 /* Walk all basic blocks created after FIRST and try to fold every statement
4920 in the STATEMENTS pointer set. */
4921
4922 static void
4923 fold_marked_statements (int first, hash_set<gimple *> *statements)
4924 {
4925 for (; first < last_basic_block_for_fn (cfun); first++)
4926 if (BASIC_BLOCK_FOR_FN (cfun, first))
4927 {
4928 gimple_stmt_iterator gsi;
4929
4930 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4931 !gsi_end_p (gsi);
4932 gsi_next (&gsi))
4933 if (statements->contains (gsi_stmt (gsi)))
4934 {
4935 gimple *old_stmt = gsi_stmt (gsi);
4936 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4937
4938 if (old_decl && fndecl_built_in_p (old_decl))
4939 {
4940 /* Folding builtins can create multiple instructions,
4941 we need to look at all of them. */
4942 gimple_stmt_iterator i2 = gsi;
4943 gsi_prev (&i2);
4944 if (fold_stmt (&gsi))
4945 {
4946 gimple *new_stmt;
4947 /* If a builtin at the end of a bb folded into nothing,
4948 the following loop won't work. */
4949 if (gsi_end_p (gsi))
4950 {
4951 cgraph_update_edges_for_call_stmt (old_stmt,
4952 old_decl, NULL);
4953 break;
4954 }
4955 if (gsi_end_p (i2))
4956 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4957 else
4958 gsi_next (&i2);
4959 while (1)
4960 {
4961 new_stmt = gsi_stmt (i2);
4962 update_stmt (new_stmt);
4963 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4964 new_stmt);
4965
4966 if (new_stmt == gsi_stmt (gsi))
4967 {
4968 /* It is okay to check only for the very last
4969 of these statements. If it is a throwing
4970 statement nothing will change. If it isn't
4971 this can remove EH edges. If that weren't
4972 correct then because some intermediate stmts
4973 throw, but not the last one. That would mean
4974 we'd have to split the block, which we can't
4975 here and we'd loose anyway. And as builtins
4976 probably never throw, this all
4977 is mood anyway. */
4978 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4979 new_stmt))
4980 gimple_purge_dead_eh_edges (
4981 BASIC_BLOCK_FOR_FN (cfun, first));
4982 break;
4983 }
4984 gsi_next (&i2);
4985 }
4986 }
4987 }
4988 else if (fold_stmt (&gsi))
4989 {
4990 /* Re-read the statement from GSI as fold_stmt() may
4991 have changed it. */
4992 gimple *new_stmt = gsi_stmt (gsi);
4993 update_stmt (new_stmt);
4994
4995 if (is_gimple_call (old_stmt)
4996 || is_gimple_call (new_stmt))
4997 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4998 new_stmt);
4999
5000 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5001 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5002 first));
5003 }
5004 }
5005 }
5006 }
5007
5008 /* Expand calls to inline functions in the body of FN. */
5009
5010 unsigned int
5011 optimize_inline_calls (tree fn)
5012 {
5013 copy_body_data id;
5014 basic_block bb;
5015 int last = n_basic_blocks_for_fn (cfun);
5016 bool inlined_p = false;
5017
5018 /* Clear out ID. */
5019 memset (&id, 0, sizeof (id));
5020
5021 id.src_node = id.dst_node = cgraph_node::get (fn);
5022 gcc_assert (id.dst_node->definition);
5023 id.dst_fn = fn;
5024 /* Or any functions that aren't finished yet. */
5025 if (current_function_decl)
5026 id.dst_fn = current_function_decl;
5027
5028 id.copy_decl = copy_decl_maybe_to_var;
5029 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5030 id.transform_new_cfg = false;
5031 id.transform_return_to_modify = true;
5032 id.transform_parameter = true;
5033 id.transform_lang_insert_block = NULL;
5034 id.statements_to_fold = new hash_set<gimple *>;
5035
5036 push_gimplify_context ();
5037
5038 /* We make no attempts to keep dominance info up-to-date. */
5039 free_dominance_info (CDI_DOMINATORS);
5040 free_dominance_info (CDI_POST_DOMINATORS);
5041
5042 /* Register specific gimple functions. */
5043 gimple_register_cfg_hooks ();
5044
5045 /* Reach the trees by walking over the CFG, and note the
5046 enclosing basic-blocks in the call edges. */
5047 /* We walk the blocks going forward, because inlined function bodies
5048 will split id->current_basic_block, and the new blocks will
5049 follow it; we'll trudge through them, processing their CALL_EXPRs
5050 along the way. */
5051 FOR_EACH_BB_FN (bb, cfun)
5052 inlined_p |= gimple_expand_calls_inline (bb, &id);
5053
5054 pop_gimplify_context (NULL);
5055
5056 if (flag_checking)
5057 {
5058 struct cgraph_edge *e;
5059
5060 id.dst_node->verify ();
5061
5062 /* Double check that we inlined everything we are supposed to inline. */
5063 for (e = id.dst_node->callees; e; e = e->next_callee)
5064 gcc_assert (e->inline_failed);
5065 }
5066
5067 /* Fold queued statements. */
5068 update_max_bb_count ();
5069 fold_marked_statements (last, id.statements_to_fold);
5070 delete id.statements_to_fold;
5071
5072 gcc_assert (!id.debug_stmts.exists ());
5073
5074 /* If we didn't inline into the function there is nothing to do. */
5075 if (!inlined_p)
5076 return 0;
5077
5078 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5079 number_blocks (fn);
5080
5081 delete_unreachable_blocks_update_callgraph (&id);
5082 if (flag_checking)
5083 id.dst_node->verify ();
5084
5085 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5086 not possible yet - the IPA passes might make various functions to not
5087 throw and they don't care to proactively update local EH info. This is
5088 done later in fixup_cfg pass that also execute the verification. */
5089 return (TODO_update_ssa
5090 | TODO_cleanup_cfg
5091 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5092 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5093 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5094 ? TODO_rebuild_frequencies : 0));
5095 }
5096
5097 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5098
5099 tree
5100 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5101 {
5102 enum tree_code code = TREE_CODE (*tp);
5103 enum tree_code_class cl = TREE_CODE_CLASS (code);
5104
5105 /* We make copies of most nodes. */
5106 if (IS_EXPR_CODE_CLASS (cl)
5107 || code == TREE_LIST
5108 || code == TREE_VEC
5109 || code == TYPE_DECL
5110 || code == OMP_CLAUSE)
5111 {
5112 /* Because the chain gets clobbered when we make a copy, we save it
5113 here. */
5114 tree chain = NULL_TREE, new_tree;
5115
5116 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5117 chain = TREE_CHAIN (*tp);
5118
5119 /* Copy the node. */
5120 new_tree = copy_node (*tp);
5121
5122 *tp = new_tree;
5123
5124 /* Now, restore the chain, if appropriate. That will cause
5125 walk_tree to walk into the chain as well. */
5126 if (code == PARM_DECL
5127 || code == TREE_LIST
5128 || code == OMP_CLAUSE)
5129 TREE_CHAIN (*tp) = chain;
5130
5131 /* For now, we don't update BLOCKs when we make copies. So, we
5132 have to nullify all BIND_EXPRs. */
5133 if (TREE_CODE (*tp) == BIND_EXPR)
5134 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5135 }
5136 else if (code == CONSTRUCTOR)
5137 {
5138 /* CONSTRUCTOR nodes need special handling because
5139 we need to duplicate the vector of elements. */
5140 tree new_tree;
5141
5142 new_tree = copy_node (*tp);
5143 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5144 *tp = new_tree;
5145 }
5146 else if (code == STATEMENT_LIST)
5147 /* We used to just abort on STATEMENT_LIST, but we can run into them
5148 with statement-expressions (c++/40975). */
5149 copy_statement_list (tp);
5150 else if (TREE_CODE_CLASS (code) == tcc_type)
5151 *walk_subtrees = 0;
5152 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5153 *walk_subtrees = 0;
5154 else if (TREE_CODE_CLASS (code) == tcc_constant)
5155 *walk_subtrees = 0;
5156 return NULL_TREE;
5157 }
5158
5159 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5160 information indicating to what new SAVE_EXPR this one should be mapped,
5161 use that one. Otherwise, create a new node and enter it in ST. FN is
5162 the function into which the copy will be placed. */
5163
5164 static void
5165 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5166 {
5167 tree *n;
5168 tree t;
5169
5170 /* See if we already encountered this SAVE_EXPR. */
5171 n = st->get (*tp);
5172
5173 /* If we didn't already remap this SAVE_EXPR, do so now. */
5174 if (!n)
5175 {
5176 t = copy_node (*tp);
5177
5178 /* Remember this SAVE_EXPR. */
5179 st->put (*tp, t);
5180 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5181 st->put (t, t);
5182 }
5183 else
5184 {
5185 /* We've already walked into this SAVE_EXPR; don't do it again. */
5186 *walk_subtrees = 0;
5187 t = *n;
5188 }
5189
5190 /* Replace this SAVE_EXPR with the copy. */
5191 *tp = t;
5192 }
5193
5194 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5195 label, copies the declaration and enters it in the splay_tree in DATA (which
5196 is really a 'copy_body_data *'. */
5197
5198 static tree
5199 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5200 bool *handled_ops_p ATTRIBUTE_UNUSED,
5201 struct walk_stmt_info *wi)
5202 {
5203 copy_body_data *id = (copy_body_data *) wi->info;
5204 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5205
5206 if (stmt)
5207 {
5208 tree decl = gimple_label_label (stmt);
5209
5210 /* Copy the decl and remember the copy. */
5211 insert_decl_map (id, decl, id->copy_decl (decl, id));
5212 }
5213
5214 return NULL_TREE;
5215 }
5216
5217 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5218 struct walk_stmt_info *wi);
5219
5220 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5221 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5222 remaps all local declarations to appropriate replacements in gimple
5223 operands. */
5224
5225 static tree
5226 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5227 {
5228 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5229 copy_body_data *id = (copy_body_data *) wi->info;
5230 hash_map<tree, tree> *st = id->decl_map;
5231 tree *n;
5232 tree expr = *tp;
5233
5234 /* For recursive invocations this is no longer the LHS itself. */
5235 bool is_lhs = wi->is_lhs;
5236 wi->is_lhs = false;
5237
5238 if (TREE_CODE (expr) == SSA_NAME)
5239 {
5240 *tp = remap_ssa_name (*tp, id);
5241 *walk_subtrees = 0;
5242 if (is_lhs)
5243 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5244 }
5245 /* Only a local declaration (variable or label). */
5246 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5247 || TREE_CODE (expr) == LABEL_DECL)
5248 {
5249 /* Lookup the declaration. */
5250 n = st->get (expr);
5251
5252 /* If it's there, remap it. */
5253 if (n)
5254 *tp = *n;
5255 *walk_subtrees = 0;
5256 }
5257 else if (TREE_CODE (expr) == STATEMENT_LIST
5258 || TREE_CODE (expr) == BIND_EXPR
5259 || TREE_CODE (expr) == SAVE_EXPR)
5260 gcc_unreachable ();
5261 else if (TREE_CODE (expr) == TARGET_EXPR)
5262 {
5263 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5264 It's OK for this to happen if it was part of a subtree that
5265 isn't immediately expanded, such as operand 2 of another
5266 TARGET_EXPR. */
5267 if (!TREE_OPERAND (expr, 1))
5268 {
5269 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5270 TREE_OPERAND (expr, 3) = NULL_TREE;
5271 }
5272 }
5273 else if (TREE_CODE (expr) == OMP_CLAUSE)
5274 {
5275 /* Before the omplower pass completes, some OMP clauses can contain
5276 sequences that are neither copied by gimple_seq_copy nor walked by
5277 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5278 in those situations, we have to copy and process them explicitely. */
5279
5280 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5281 {
5282 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5283 seq = duplicate_remap_omp_clause_seq (seq, wi);
5284 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5285 }
5286 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5287 {
5288 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5289 seq = duplicate_remap_omp_clause_seq (seq, wi);
5290 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5291 }
5292 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5293 {
5294 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5295 seq = duplicate_remap_omp_clause_seq (seq, wi);
5296 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5297 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5298 seq = duplicate_remap_omp_clause_seq (seq, wi);
5299 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5300 }
5301 }
5302
5303 /* Keep iterating. */
5304 return NULL_TREE;
5305 }
5306
5307
5308 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5309 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5310 remaps all local declarations to appropriate replacements in gimple
5311 statements. */
5312
5313 static tree
5314 replace_locals_stmt (gimple_stmt_iterator *gsip,
5315 bool *handled_ops_p ATTRIBUTE_UNUSED,
5316 struct walk_stmt_info *wi)
5317 {
5318 copy_body_data *id = (copy_body_data *) wi->info;
5319 gimple *gs = gsi_stmt (*gsip);
5320
5321 if (gbind *stmt = dyn_cast <gbind *> (gs))
5322 {
5323 tree block = gimple_bind_block (stmt);
5324
5325 if (block)
5326 {
5327 remap_block (&block, id);
5328 gimple_bind_set_block (stmt, block);
5329 }
5330
5331 /* This will remap a lot of the same decls again, but this should be
5332 harmless. */
5333 if (gimple_bind_vars (stmt))
5334 {
5335 tree old_var, decls = gimple_bind_vars (stmt);
5336
5337 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5338 if (!can_be_nonlocal (old_var, id)
5339 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5340 remap_decl (old_var, id);
5341
5342 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5343 id->prevent_decl_creation_for_types = true;
5344 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5345 id->prevent_decl_creation_for_types = false;
5346 }
5347 }
5348
5349 /* Keep iterating. */
5350 return NULL_TREE;
5351 }
5352
5353 /* Create a copy of SEQ and remap all decls in it. */
5354
5355 static gimple_seq
5356 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5357 {
5358 if (!seq)
5359 return NULL;
5360
5361 /* If there are any labels in OMP sequences, they can be only referred to in
5362 the sequence itself and therefore we can do both here. */
5363 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5364 gimple_seq copy = gimple_seq_copy (seq);
5365 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5366 return copy;
5367 }
5368
5369 /* Copies everything in SEQ and replaces variables and labels local to
5370 current_function_decl. */
5371
5372 gimple_seq
5373 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5374 {
5375 copy_body_data id;
5376 struct walk_stmt_info wi;
5377 gimple_seq copy;
5378
5379 /* There's nothing to do for NULL_TREE. */
5380 if (seq == NULL)
5381 return seq;
5382
5383 /* Set up ID. */
5384 memset (&id, 0, sizeof (id));
5385 id.src_fn = current_function_decl;
5386 id.dst_fn = current_function_decl;
5387 id.src_cfun = cfun;
5388 id.decl_map = new hash_map<tree, tree>;
5389 id.debug_map = NULL;
5390
5391 id.copy_decl = copy_decl_no_change;
5392 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5393 id.transform_new_cfg = false;
5394 id.transform_return_to_modify = false;
5395 id.transform_parameter = false;
5396 id.transform_lang_insert_block = NULL;
5397
5398 /* Walk the tree once to find local labels. */
5399 memset (&wi, 0, sizeof (wi));
5400 hash_set<tree> visited;
5401 wi.info = &id;
5402 wi.pset = &visited;
5403 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5404
5405 copy = gimple_seq_copy (seq);
5406
5407 /* Walk the copy, remapping decls. */
5408 memset (&wi, 0, sizeof (wi));
5409 wi.info = &id;
5410 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5411
5412 /* Clean up. */
5413 delete id.decl_map;
5414 if (id.debug_map)
5415 delete id.debug_map;
5416 if (id.dependence_map)
5417 {
5418 delete id.dependence_map;
5419 id.dependence_map = NULL;
5420 }
5421
5422 return copy;
5423 }
5424
5425
5426 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5427
5428 static tree
5429 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5430 {
5431 if (*tp == data)
5432 return (tree) data;
5433 else
5434 return NULL;
5435 }
5436
5437 DEBUG_FUNCTION bool
5438 debug_find_tree (tree top, tree search)
5439 {
5440 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5441 }
5442
5443
5444 /* Declare the variables created by the inliner. Add all the variables in
5445 VARS to BIND_EXPR. */
5446
5447 static void
5448 declare_inline_vars (tree block, tree vars)
5449 {
5450 tree t;
5451 for (t = vars; t; t = DECL_CHAIN (t))
5452 {
5453 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5454 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5455 add_local_decl (cfun, t);
5456 }
5457
5458 if (block)
5459 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5460 }
5461
5462 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5463 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5464 VAR_DECL translation. */
5465
5466 tree
5467 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5468 {
5469 /* Don't generate debug information for the copy if we wouldn't have
5470 generated it for the copy either. */
5471 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5472 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5473
5474 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5475 declaration inspired this copy. */
5476 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5477
5478 /* The new variable/label has no RTL, yet. */
5479 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5480 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5481 SET_DECL_RTL (copy, 0);
5482
5483 /* These args would always appear unused, if not for this. */
5484 TREE_USED (copy) = 1;
5485
5486 /* Set the context for the new declaration. */
5487 if (!DECL_CONTEXT (decl))
5488 /* Globals stay global. */
5489 ;
5490 else if (DECL_CONTEXT (decl) != id->src_fn)
5491 /* Things that weren't in the scope of the function we're inlining
5492 from aren't in the scope we're inlining to, either. */
5493 ;
5494 else if (TREE_STATIC (decl))
5495 /* Function-scoped static variables should stay in the original
5496 function. */
5497 ;
5498 else
5499 {
5500 /* Ordinary automatic local variables are now in the scope of the
5501 new function. */
5502 DECL_CONTEXT (copy) = id->dst_fn;
5503 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5504 {
5505 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5506 DECL_ATTRIBUTES (copy)
5507 = tree_cons (get_identifier ("omp simt private"), NULL,
5508 DECL_ATTRIBUTES (copy));
5509 id->dst_simt_vars->safe_push (copy);
5510 }
5511 }
5512
5513 return copy;
5514 }
5515
5516 static tree
5517 copy_decl_to_var (tree decl, copy_body_data *id)
5518 {
5519 tree copy, type;
5520
5521 gcc_assert (TREE_CODE (decl) == PARM_DECL
5522 || TREE_CODE (decl) == RESULT_DECL);
5523
5524 type = TREE_TYPE (decl);
5525
5526 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5527 VAR_DECL, DECL_NAME (decl), type);
5528 if (DECL_PT_UID_SET_P (decl))
5529 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5530 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5531 TREE_READONLY (copy) = TREE_READONLY (decl);
5532 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5533 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5534
5535 return copy_decl_for_dup_finish (id, decl, copy);
5536 }
5537
5538 /* Like copy_decl_to_var, but create a return slot object instead of a
5539 pointer variable for return by invisible reference. */
5540
5541 static tree
5542 copy_result_decl_to_var (tree decl, copy_body_data *id)
5543 {
5544 tree copy, type;
5545
5546 gcc_assert (TREE_CODE (decl) == PARM_DECL
5547 || TREE_CODE (decl) == RESULT_DECL);
5548
5549 type = TREE_TYPE (decl);
5550 if (DECL_BY_REFERENCE (decl))
5551 type = TREE_TYPE (type);
5552
5553 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5554 VAR_DECL, DECL_NAME (decl), type);
5555 if (DECL_PT_UID_SET_P (decl))
5556 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5557 TREE_READONLY (copy) = TREE_READONLY (decl);
5558 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5559 if (!DECL_BY_REFERENCE (decl))
5560 {
5561 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5562 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5563 }
5564
5565 return copy_decl_for_dup_finish (id, decl, copy);
5566 }
5567
5568 tree
5569 copy_decl_no_change (tree decl, copy_body_data *id)
5570 {
5571 tree copy;
5572
5573 copy = copy_node (decl);
5574
5575 /* The COPY is not abstract; it will be generated in DST_FN. */
5576 DECL_ABSTRACT_P (copy) = false;
5577 lang_hooks.dup_lang_specific_decl (copy);
5578
5579 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5580 been taken; it's for internal bookkeeping in expand_goto_internal. */
5581 if (TREE_CODE (copy) == LABEL_DECL)
5582 {
5583 TREE_ADDRESSABLE (copy) = 0;
5584 LABEL_DECL_UID (copy) = -1;
5585 }
5586
5587 return copy_decl_for_dup_finish (id, decl, copy);
5588 }
5589
5590 static tree
5591 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5592 {
5593 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5594 return copy_decl_to_var (decl, id);
5595 else
5596 return copy_decl_no_change (decl, id);
5597 }
5598
5599 /* Return a copy of the function's argument tree. */
5600 static tree
5601 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5602 bitmap args_to_skip, tree *vars)
5603 {
5604 tree arg, *parg;
5605 tree new_parm = NULL;
5606 int i = 0;
5607
5608 parg = &new_parm;
5609
5610 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5611 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5612 {
5613 tree new_tree = remap_decl (arg, id);
5614 if (TREE_CODE (new_tree) != PARM_DECL)
5615 new_tree = id->copy_decl (arg, id);
5616 lang_hooks.dup_lang_specific_decl (new_tree);
5617 *parg = new_tree;
5618 parg = &DECL_CHAIN (new_tree);
5619 }
5620 else if (!id->decl_map->get (arg))
5621 {
5622 /* Make an equivalent VAR_DECL. If the argument was used
5623 as temporary variable later in function, the uses will be
5624 replaced by local variable. */
5625 tree var = copy_decl_to_var (arg, id);
5626 insert_decl_map (id, arg, var);
5627 /* Declare this new variable. */
5628 DECL_CHAIN (var) = *vars;
5629 *vars = var;
5630 }
5631 return new_parm;
5632 }
5633
5634 /* Return a copy of the function's static chain. */
5635 static tree
5636 copy_static_chain (tree static_chain, copy_body_data * id)
5637 {
5638 tree *chain_copy, *pvar;
5639
5640 chain_copy = &static_chain;
5641 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5642 {
5643 tree new_tree = remap_decl (*pvar, id);
5644 lang_hooks.dup_lang_specific_decl (new_tree);
5645 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5646 *pvar = new_tree;
5647 }
5648 return static_chain;
5649 }
5650
5651 /* Return true if the function is allowed to be versioned.
5652 This is a guard for the versioning functionality. */
5653
5654 bool
5655 tree_versionable_function_p (tree fndecl)
5656 {
5657 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5658 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5659 }
5660
5661 /* Delete all unreachable basic blocks and update callgraph.
5662 Doing so is somewhat nontrivial because we need to update all clones and
5663 remove inline function that become unreachable. */
5664
5665 static bool
5666 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5667 {
5668 bool changed = false;
5669 basic_block b, next_bb;
5670
5671 find_unreachable_blocks ();
5672
5673 /* Delete all unreachable basic blocks. */
5674
5675 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5676 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5677 {
5678 next_bb = b->next_bb;
5679
5680 if (!(b->flags & BB_REACHABLE))
5681 {
5682 gimple_stmt_iterator bsi;
5683
5684 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5685 {
5686 struct cgraph_edge *e;
5687 struct cgraph_node *node;
5688
5689 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5690
5691 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5692 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5693 {
5694 if (!e->inline_failed)
5695 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5696 else
5697 e->remove ();
5698 }
5699 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5700 && id->dst_node->clones)
5701 for (node = id->dst_node->clones; node != id->dst_node;)
5702 {
5703 node->remove_stmt_references (gsi_stmt (bsi));
5704 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5705 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5706 {
5707 if (!e->inline_failed)
5708 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5709 else
5710 e->remove ();
5711 }
5712
5713 if (node->clones)
5714 node = node->clones;
5715 else if (node->next_sibling_clone)
5716 node = node->next_sibling_clone;
5717 else
5718 {
5719 while (node != id->dst_node && !node->next_sibling_clone)
5720 node = node->clone_of;
5721 if (node != id->dst_node)
5722 node = node->next_sibling_clone;
5723 }
5724 }
5725 }
5726 delete_basic_block (b);
5727 changed = true;
5728 }
5729 }
5730
5731 return changed;
5732 }
5733
5734 /* Update clone info after duplication. */
5735
5736 static void
5737 update_clone_info (copy_body_data * id)
5738 {
5739 struct cgraph_node *node;
5740 if (!id->dst_node->clones)
5741 return;
5742 for (node = id->dst_node->clones; node != id->dst_node;)
5743 {
5744 /* First update replace maps to match the new body. */
5745 if (node->clone.tree_map)
5746 {
5747 unsigned int i;
5748 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5749 {
5750 struct ipa_replace_map *replace_info;
5751 replace_info = (*node->clone.tree_map)[i];
5752 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5753 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5754 }
5755 }
5756 if (node->clones)
5757 node = node->clones;
5758 else if (node->next_sibling_clone)
5759 node = node->next_sibling_clone;
5760 else
5761 {
5762 while (node != id->dst_node && !node->next_sibling_clone)
5763 node = node->clone_of;
5764 if (node != id->dst_node)
5765 node = node->next_sibling_clone;
5766 }
5767 }
5768 }
5769
5770 /* Create a copy of a function's tree.
5771 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5772 of the original function and the new copied function
5773 respectively. In case we want to replace a DECL
5774 tree with another tree while duplicating the function's
5775 body, TREE_MAP represents the mapping between these
5776 trees. If UPDATE_CLONES is set, the call_stmt fields
5777 of edges of clones of the function will be updated.
5778
5779 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5780 from new version.
5781 If SKIP_RETURN is true, the new version will return void.
5782 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5783 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5784 */
5785 void
5786 tree_function_versioning (tree old_decl, tree new_decl,
5787 vec<ipa_replace_map *, va_gc> *tree_map,
5788 bool update_clones, bitmap args_to_skip,
5789 bool skip_return, bitmap blocks_to_copy,
5790 basic_block new_entry)
5791 {
5792 struct cgraph_node *old_version_node;
5793 struct cgraph_node *new_version_node;
5794 copy_body_data id;
5795 tree p;
5796 unsigned i;
5797 struct ipa_replace_map *replace_info;
5798 basic_block old_entry_block, bb;
5799 auto_vec<gimple *, 10> init_stmts;
5800 tree vars = NULL_TREE;
5801 bitmap debug_args_to_skip = args_to_skip;
5802
5803 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5804 && TREE_CODE (new_decl) == FUNCTION_DECL);
5805 DECL_POSSIBLY_INLINED (old_decl) = 1;
5806
5807 old_version_node = cgraph_node::get (old_decl);
5808 gcc_checking_assert (old_version_node);
5809 new_version_node = cgraph_node::get (new_decl);
5810 gcc_checking_assert (new_version_node);
5811
5812 /* Copy over debug args. */
5813 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5814 {
5815 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5816 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5817 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5818 old_debug_args = decl_debug_args_lookup (old_decl);
5819 if (old_debug_args)
5820 {
5821 new_debug_args = decl_debug_args_insert (new_decl);
5822 *new_debug_args = vec_safe_copy (*old_debug_args);
5823 }
5824 }
5825
5826 /* Output the inlining info for this abstract function, since it has been
5827 inlined. If we don't do this now, we can lose the information about the
5828 variables in the function when the blocks get blown away as soon as we
5829 remove the cgraph node. */
5830 (*debug_hooks->outlining_inline_function) (old_decl);
5831
5832 DECL_ARTIFICIAL (new_decl) = 1;
5833 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5834 if (DECL_ORIGIN (old_decl) == old_decl)
5835 old_version_node->used_as_abstract_origin = true;
5836 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5837
5838 /* Prepare the data structures for the tree copy. */
5839 memset (&id, 0, sizeof (id));
5840
5841 /* Generate a new name for the new version. */
5842 id.statements_to_fold = new hash_set<gimple *>;
5843
5844 id.decl_map = new hash_map<tree, tree>;
5845 id.debug_map = NULL;
5846 id.src_fn = old_decl;
5847 id.dst_fn = new_decl;
5848 id.src_node = old_version_node;
5849 id.dst_node = new_version_node;
5850 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5851 id.blocks_to_copy = blocks_to_copy;
5852
5853 id.copy_decl = copy_decl_no_change;
5854 id.transform_call_graph_edges
5855 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5856 id.transform_new_cfg = true;
5857 id.transform_return_to_modify = false;
5858 id.transform_parameter = false;
5859 id.transform_lang_insert_block = NULL;
5860
5861 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5862 (DECL_STRUCT_FUNCTION (old_decl));
5863 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5864 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5865 initialize_cfun (new_decl, old_decl,
5866 new_entry ? new_entry->count : old_entry_block->count);
5867 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5868 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5869 = id.src_cfun->gimple_df->ipa_pta;
5870
5871 /* Copy the function's static chain. */
5872 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5873 if (p)
5874 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5875 = copy_static_chain (p, &id);
5876
5877 /* If there's a tree_map, prepare for substitution. */
5878 if (tree_map)
5879 for (i = 0; i < tree_map->length (); i++)
5880 {
5881 gimple *init;
5882 replace_info = (*tree_map)[i];
5883 if (replace_info->replace_p)
5884 {
5885 int parm_num = -1;
5886 if (!replace_info->old_tree)
5887 {
5888 int p = replace_info->parm_num;
5889 tree parm;
5890 tree req_type, new_type;
5891
5892 for (parm = DECL_ARGUMENTS (old_decl); p;
5893 parm = DECL_CHAIN (parm))
5894 p--;
5895 replace_info->old_tree = parm;
5896 parm_num = replace_info->parm_num;
5897 req_type = TREE_TYPE (parm);
5898 new_type = TREE_TYPE (replace_info->new_tree);
5899 if (!useless_type_conversion_p (req_type, new_type))
5900 {
5901 if (fold_convertible_p (req_type, replace_info->new_tree))
5902 replace_info->new_tree
5903 = fold_build1 (NOP_EXPR, req_type,
5904 replace_info->new_tree);
5905 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5906 replace_info->new_tree
5907 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5908 replace_info->new_tree);
5909 else
5910 {
5911 if (dump_file)
5912 {
5913 fprintf (dump_file, " const ");
5914 print_generic_expr (dump_file,
5915 replace_info->new_tree);
5916 fprintf (dump_file,
5917 " can't be converted to param ");
5918 print_generic_expr (dump_file, parm);
5919 fprintf (dump_file, "\n");
5920 }
5921 replace_info->old_tree = NULL;
5922 }
5923 }
5924 }
5925 else
5926 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5927 if (replace_info->old_tree)
5928 {
5929 init = setup_one_parameter (&id, replace_info->old_tree,
5930 replace_info->new_tree, id.src_fn,
5931 NULL,
5932 &vars);
5933 if (init)
5934 init_stmts.safe_push (init);
5935 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5936 {
5937 if (parm_num == -1)
5938 {
5939 tree parm;
5940 int p;
5941 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5942 parm = DECL_CHAIN (parm), p++)
5943 if (parm == replace_info->old_tree)
5944 {
5945 parm_num = p;
5946 break;
5947 }
5948 }
5949 if (parm_num != -1)
5950 {
5951 if (debug_args_to_skip == args_to_skip)
5952 {
5953 debug_args_to_skip = BITMAP_ALLOC (NULL);
5954 bitmap_copy (debug_args_to_skip, args_to_skip);
5955 }
5956 bitmap_clear_bit (debug_args_to_skip, parm_num);
5957 }
5958 }
5959 }
5960 }
5961 }
5962 /* Copy the function's arguments. */
5963 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5964 DECL_ARGUMENTS (new_decl)
5965 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5966 args_to_skip, &vars);
5967
5968 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5969 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5970
5971 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5972
5973 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5974 /* Add local vars. */
5975 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5976
5977 if (DECL_RESULT (old_decl) == NULL_TREE)
5978 ;
5979 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5980 {
5981 DECL_RESULT (new_decl)
5982 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5983 RESULT_DECL, NULL_TREE, void_type_node);
5984 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5985 cfun->returns_struct = 0;
5986 cfun->returns_pcc_struct = 0;
5987 }
5988 else
5989 {
5990 tree old_name;
5991 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5992 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5993 if (gimple_in_ssa_p (id.src_cfun)
5994 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5995 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5996 {
5997 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5998 insert_decl_map (&id, old_name, new_name);
5999 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6000 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6001 }
6002 }
6003
6004 /* Set up the destination functions loop tree. */
6005 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6006 {
6007 cfun->curr_properties &= ~PROP_loops;
6008 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6009 cfun->curr_properties |= PROP_loops;
6010 }
6011
6012 /* Copy the Function's body. */
6013 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6014 new_entry);
6015
6016 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6017 number_blocks (new_decl);
6018
6019 /* We want to create the BB unconditionally, so that the addition of
6020 debug stmts doesn't affect BB count, which may in the end cause
6021 codegen differences. */
6022 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6023 while (init_stmts.length ())
6024 insert_init_stmt (&id, bb, init_stmts.pop ());
6025 update_clone_info (&id);
6026
6027 /* Remap the nonlocal_goto_save_area, if any. */
6028 if (cfun->nonlocal_goto_save_area)
6029 {
6030 struct walk_stmt_info wi;
6031
6032 memset (&wi, 0, sizeof (wi));
6033 wi.info = &id;
6034 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6035 }
6036
6037 /* Clean up. */
6038 delete id.decl_map;
6039 if (id.debug_map)
6040 delete id.debug_map;
6041 free_dominance_info (CDI_DOMINATORS);
6042 free_dominance_info (CDI_POST_DOMINATORS);
6043
6044 update_max_bb_count ();
6045 fold_marked_statements (0, id.statements_to_fold);
6046 delete id.statements_to_fold;
6047 delete_unreachable_blocks_update_callgraph (&id);
6048 if (id.dst_node->definition)
6049 cgraph_edge::rebuild_references ();
6050 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6051 {
6052 calculate_dominance_info (CDI_DOMINATORS);
6053 fix_loop_structure (NULL);
6054 }
6055 update_ssa (TODO_update_ssa);
6056
6057 /* After partial cloning we need to rescale frequencies, so they are
6058 within proper range in the cloned function. */
6059 if (new_entry)
6060 {
6061 struct cgraph_edge *e;
6062 rebuild_frequencies ();
6063
6064 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6065 for (e = new_version_node->callees; e; e = e->next_callee)
6066 {
6067 basic_block bb = gimple_bb (e->call_stmt);
6068 e->count = bb->count;
6069 }
6070 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6071 {
6072 basic_block bb = gimple_bb (e->call_stmt);
6073 e->count = bb->count;
6074 }
6075 }
6076
6077 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6078 {
6079 tree parm;
6080 vec<tree, va_gc> **debug_args = NULL;
6081 unsigned int len = 0;
6082 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6083 parm; parm = DECL_CHAIN (parm), i++)
6084 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6085 {
6086 tree ddecl;
6087
6088 if (debug_args == NULL)
6089 {
6090 debug_args = decl_debug_args_insert (new_decl);
6091 len = vec_safe_length (*debug_args);
6092 }
6093 ddecl = make_node (DEBUG_EXPR_DECL);
6094 DECL_ARTIFICIAL (ddecl) = 1;
6095 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6096 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6097 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6098 vec_safe_push (*debug_args, ddecl);
6099 }
6100 if (debug_args != NULL)
6101 {
6102 /* On the callee side, add
6103 DEBUG D#Y s=> parm
6104 DEBUG var => D#Y
6105 stmts to the first bb where var is a VAR_DECL created for the
6106 optimized away parameter in DECL_INITIAL block. This hints
6107 in the debug info that var (whole DECL_ORIGIN is the parm
6108 PARM_DECL) is optimized away, but could be looked up at the
6109 call site as value of D#X there. */
6110 tree var = vars, vexpr;
6111 gimple_stmt_iterator cgsi
6112 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6113 gimple *def_temp;
6114 var = vars;
6115 i = vec_safe_length (*debug_args);
6116 do
6117 {
6118 i -= 2;
6119 while (var != NULL_TREE
6120 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6121 var = TREE_CHAIN (var);
6122 if (var == NULL_TREE)
6123 break;
6124 vexpr = make_node (DEBUG_EXPR_DECL);
6125 parm = (**debug_args)[i];
6126 DECL_ARTIFICIAL (vexpr) = 1;
6127 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6128 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6129 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6130 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6131 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6132 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6133 }
6134 while (i > len);
6135 }
6136 }
6137
6138 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6139 BITMAP_FREE (debug_args_to_skip);
6140 free_dominance_info (CDI_DOMINATORS);
6141 free_dominance_info (CDI_POST_DOMINATORS);
6142
6143 gcc_assert (!id.debug_stmts.exists ());
6144 pop_cfun ();
6145 return;
6146 }
6147
6148 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6149 the callee and return the inlined body on success. */
6150
6151 tree
6152 maybe_inline_call_in_expr (tree exp)
6153 {
6154 tree fn = get_callee_fndecl (exp);
6155
6156 /* We can only try to inline "const" functions. */
6157 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6158 {
6159 call_expr_arg_iterator iter;
6160 copy_body_data id;
6161 tree param, arg, t;
6162 hash_map<tree, tree> decl_map;
6163
6164 /* Remap the parameters. */
6165 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6166 param;
6167 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6168 decl_map.put (param, arg);
6169
6170 memset (&id, 0, sizeof (id));
6171 id.src_fn = fn;
6172 id.dst_fn = current_function_decl;
6173 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6174 id.decl_map = &decl_map;
6175
6176 id.copy_decl = copy_decl_no_change;
6177 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6178 id.transform_new_cfg = false;
6179 id.transform_return_to_modify = true;
6180 id.transform_parameter = true;
6181 id.transform_lang_insert_block = NULL;
6182
6183 /* Make sure not to unshare trees behind the front-end's back
6184 since front-end specific mechanisms may rely on sharing. */
6185 id.regimplify = false;
6186 id.do_not_unshare = true;
6187
6188 /* We're not inside any EH region. */
6189 id.eh_lp_nr = 0;
6190
6191 t = copy_tree_body (&id);
6192
6193 /* We can only return something suitable for use in a GENERIC
6194 expression tree. */
6195 if (TREE_CODE (t) == MODIFY_EXPR)
6196 return TREE_OPERAND (t, 1);
6197 }
6198
6199 return NULL_TREE;
6200 }
6201
6202 /* Duplicate a type, fields and all. */
6203
6204 tree
6205 build_duplicate_type (tree type)
6206 {
6207 struct copy_body_data id;
6208
6209 memset (&id, 0, sizeof (id));
6210 id.src_fn = current_function_decl;
6211 id.dst_fn = current_function_decl;
6212 id.src_cfun = cfun;
6213 id.decl_map = new hash_map<tree, tree>;
6214 id.debug_map = NULL;
6215 id.copy_decl = copy_decl_no_change;
6216
6217 type = remap_type_1 (type, &id);
6218
6219 delete id.decl_map;
6220 if (id.debug_map)
6221 delete id.debug_map;
6222
6223 TYPE_CANONICAL (type) = type;
6224
6225 return type;
6226 }
6227
6228 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6229 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6230 evaluation. */
6231
6232 tree
6233 copy_fn (tree fn, tree& parms, tree& result)
6234 {
6235 copy_body_data id;
6236 tree param;
6237 hash_map<tree, tree> decl_map;
6238
6239 tree *p = &parms;
6240 *p = NULL_TREE;
6241
6242 memset (&id, 0, sizeof (id));
6243 id.src_fn = fn;
6244 id.dst_fn = current_function_decl;
6245 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6246 id.decl_map = &decl_map;
6247
6248 id.copy_decl = copy_decl_no_change;
6249 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6250 id.transform_new_cfg = false;
6251 id.transform_return_to_modify = false;
6252 id.transform_parameter = true;
6253 id.transform_lang_insert_block = NULL;
6254
6255 /* Make sure not to unshare trees behind the front-end's back
6256 since front-end specific mechanisms may rely on sharing. */
6257 id.regimplify = false;
6258 id.do_not_unshare = true;
6259
6260 /* We're not inside any EH region. */
6261 id.eh_lp_nr = 0;
6262
6263 /* Remap the parameters and result and return them to the caller. */
6264 for (param = DECL_ARGUMENTS (fn);
6265 param;
6266 param = DECL_CHAIN (param))
6267 {
6268 *p = remap_decl (param, &id);
6269 p = &DECL_CHAIN (*p);
6270 }
6271
6272 if (DECL_RESULT (fn))
6273 result = remap_decl (DECL_RESULT (fn), &id);
6274 else
6275 result = NULL_TREE;
6276
6277 return copy_tree_body (&id);
6278 }