Fix fortran/85982 ICE in resolve_component.
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64
65 /* I'm not real happy about this, but we need to handle gimple and
66 non-gimple trees. */
67
68 /* Inlining, Cloning, Versioning, Parallelization
69
70 Inlining: a function body is duplicated, but the PARM_DECLs are
71 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72 MODIFY_EXPRs that store to a dedicated returned-value variable.
73 The duplicated eh_region info of the copy will later be appended
74 to the info for the caller; the eh_region info in copied throwing
75 statements and RESX statements are adjusted accordingly.
76
77 Cloning: (only in C++) We have one body for a con/de/structor, and
78 multiple function decls, each with a unique parameter list.
79 Duplicate the body, using the given splay tree; some parameters
80 will become constants (like 0 or 1).
81
82 Versioning: a function body is duplicated and the result is a new
83 function rather than into blocks of an existing function as with
84 inlining. Some parameters will become constants.
85
86 Parallelization: a region of a function is duplicated resulting in
87 a new function. Variables may be replaced with complex expressions
88 to enable shared variable semantics.
89
90 All of these will simultaneously lookup any callgraph edges. If
91 we're going to inline the duplicated function body, and the given
92 function has some cloned callgraph nodes (one for each place this
93 function will be inlined) those callgraph edges will be duplicated.
94 If we're cloning the body, those callgraph edges will be
95 updated to point into the new body. (Note that the original
96 callgraph node and edge list will not be altered.)
97
98 See the CALL_EXPR handling case in copy_tree_body_r (). */
99
100 /* To Do:
101
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
108
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
111
112
113 /* Weights that estimate_num_insns uses to estimate the size of the
114 produced code. */
115
116 eni_weights eni_size_weights;
117
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119 to execute the produced code. */
120
121 eni_weights eni_time_weights;
122
123 /* Prototypes. */
124
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
136
137 /* Insert a tree->tree mapping for ID. Despite the name suggests
138 that the trees should be variables, it is used for more than that. */
139
140 void
141 insert_decl_map (copy_body_data *id, tree key, tree value)
142 {
143 id->decl_map->put (key, value);
144
145 /* Always insert an identity map as well. If we see this same new
146 node again, we won't want to duplicate it a second time. */
147 if (key != value)
148 id->decl_map->put (value, value);
149 }
150
151 /* Insert a tree->tree mapping for ID. This is only used for
152 variables. */
153
154 static void
155 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 {
157 if (!gimple_in_ssa_p (id->src_cfun))
158 return;
159
160 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
161 return;
162
163 if (!target_for_debug_bind (key))
164 return;
165
166 gcc_assert (TREE_CODE (key) == PARM_DECL);
167 gcc_assert (VAR_P (value));
168
169 if (!id->debug_map)
170 id->debug_map = new hash_map<tree, tree>;
171
172 id->debug_map->put (key, value);
173 }
174
175 /* If nonzero, we're remapping the contents of inlined debug
176 statements. If negative, an error has occurred, such as a
177 reference to a variable that isn't available in the inlined
178 context. */
179 static int processing_debug_stmt = 0;
180
181 /* Construct new SSA name for old NAME. ID is the inline context. */
182
183 static tree
184 remap_ssa_name (tree name, copy_body_data *id)
185 {
186 tree new_tree, var;
187 tree *n;
188
189 gcc_assert (TREE_CODE (name) == SSA_NAME);
190
191 n = id->decl_map->get (name);
192 if (n)
193 {
194 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
195 remove an unused LHS from a call statement. Such LHS can however
196 still appear in debug statements, but their value is lost in this
197 function and we do not want to map them. */
198 if (id->killed_new_ssa_names
199 && id->killed_new_ssa_names->contains (*n))
200 {
201 gcc_assert (processing_debug_stmt);
202 processing_debug_stmt = -1;
203 return name;
204 }
205
206 return unshare_expr (*n);
207 }
208
209 if (processing_debug_stmt)
210 {
211 if (SSA_NAME_IS_DEFAULT_DEF (name)
212 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
213 && id->entry_bb == NULL
214 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
215 {
216 tree vexpr = make_node (DEBUG_EXPR_DECL);
217 gimple *def_temp;
218 gimple_stmt_iterator gsi;
219 tree val = SSA_NAME_VAR (name);
220
221 n = id->decl_map->get (val);
222 if (n != NULL)
223 val = *n;
224 if (TREE_CODE (val) != PARM_DECL
225 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
226 {
227 processing_debug_stmt = -1;
228 return name;
229 }
230 n = id->decl_map->get (val);
231 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
232 return *n;
233 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
234 DECL_ARTIFICIAL (vexpr) = 1;
235 TREE_TYPE (vexpr) = TREE_TYPE (name);
236 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
237 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
238 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
239 insert_decl_map (id, val, vexpr);
240 return vexpr;
241 }
242
243 processing_debug_stmt = -1;
244 return name;
245 }
246
247 /* Remap anonymous SSA names or SSA names of anonymous decls. */
248 var = SSA_NAME_VAR (name);
249 if (!var
250 || (!SSA_NAME_IS_DEFAULT_DEF (name)
251 && VAR_P (var)
252 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
253 && DECL_ARTIFICIAL (var)
254 && DECL_IGNORED_P (var)
255 && !DECL_NAME (var)))
256 {
257 struct ptr_info_def *pi;
258 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
259 if (!var && SSA_NAME_IDENTIFIER (name))
260 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
261 insert_decl_map (id, name, new_tree);
262 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
263 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
264 /* At least IPA points-to info can be directly transferred. */
265 if (id->src_cfun->gimple_df
266 && id->src_cfun->gimple_df->ipa_pta
267 && POINTER_TYPE_P (TREE_TYPE (name))
268 && (pi = SSA_NAME_PTR_INFO (name))
269 && !pi->pt.anything)
270 {
271 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
272 new_pi->pt = pi->pt;
273 }
274 /* So can range-info. */
275 if (!POINTER_TYPE_P (TREE_TYPE (name))
276 && SSA_NAME_RANGE_INFO (name))
277 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
278 SSA_NAME_RANGE_INFO (name));
279 return new_tree;
280 }
281
282 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
283 in copy_bb. */
284 new_tree = remap_decl (var, id);
285
286 /* We might've substituted constant or another SSA_NAME for
287 the variable.
288
289 Replace the SSA name representing RESULT_DECL by variable during
290 inlining: this saves us from need to introduce PHI node in a case
291 return value is just partly initialized. */
292 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
293 && (!SSA_NAME_VAR (name)
294 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
295 || !id->transform_return_to_modify))
296 {
297 struct ptr_info_def *pi;
298 new_tree = make_ssa_name (new_tree);
299 insert_decl_map (id, name, new_tree);
300 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
301 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
302 /* At least IPA points-to info can be directly transferred. */
303 if (id->src_cfun->gimple_df
304 && id->src_cfun->gimple_df->ipa_pta
305 && POINTER_TYPE_P (TREE_TYPE (name))
306 && (pi = SSA_NAME_PTR_INFO (name))
307 && !pi->pt.anything)
308 {
309 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
310 new_pi->pt = pi->pt;
311 }
312 /* So can range-info. */
313 if (!POINTER_TYPE_P (TREE_TYPE (name))
314 && SSA_NAME_RANGE_INFO (name))
315 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
316 SSA_NAME_RANGE_INFO (name));
317 if (SSA_NAME_IS_DEFAULT_DEF (name))
318 {
319 /* By inlining function having uninitialized variable, we might
320 extend the lifetime (variable might get reused). This cause
321 ICE in the case we end up extending lifetime of SSA name across
322 abnormal edge, but also increase register pressure.
323
324 We simply initialize all uninitialized vars by 0 except
325 for case we are inlining to very first BB. We can avoid
326 this for all BBs that are not inside strongly connected
327 regions of the CFG, but this is expensive to test. */
328 if (id->entry_bb
329 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
330 && (!SSA_NAME_VAR (name)
331 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
332 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
333 0)->dest
334 || EDGE_COUNT (id->entry_bb->preds) != 1))
335 {
336 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
337 gimple *init_stmt;
338 tree zero = build_zero_cst (TREE_TYPE (new_tree));
339
340 init_stmt = gimple_build_assign (new_tree, zero);
341 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
342 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
343 }
344 else
345 {
346 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
347 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
348 }
349 }
350 }
351 else
352 insert_decl_map (id, name, new_tree);
353 return new_tree;
354 }
355
356 /* Remap DECL during the copying of the BLOCK tree for the function. */
357
358 tree
359 remap_decl (tree decl, copy_body_data *id)
360 {
361 tree *n;
362
363 /* We only remap local variables in the current function. */
364
365 /* See if we have remapped this declaration. */
366
367 n = id->decl_map->get (decl);
368
369 if (!n && processing_debug_stmt)
370 {
371 processing_debug_stmt = -1;
372 return decl;
373 }
374
375 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
376 necessary DECLs have already been remapped and we do not want to duplicate
377 a decl coming from outside of the sequence we are copying. */
378 if (!n
379 && id->prevent_decl_creation_for_types
380 && id->remapping_type_depth > 0
381 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
382 return decl;
383
384 /* If we didn't already have an equivalent for this declaration, create one
385 now. */
386 if (!n)
387 {
388 /* Make a copy of the variable or label. */
389 tree t = id->copy_decl (decl, id);
390
391 /* Remember it, so that if we encounter this local entity again
392 we can reuse this copy. Do this early because remap_type may
393 need this decl for TYPE_STUB_DECL. */
394 insert_decl_map (id, decl, t);
395
396 if (!DECL_P (t))
397 return t;
398
399 /* Remap types, if necessary. */
400 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
401 if (TREE_CODE (t) == TYPE_DECL)
402 {
403 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
404
405 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
406 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
407 is not set on the TYPE_DECL, for example in LTO mode. */
408 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
409 {
410 tree x = build_variant_type_copy (TREE_TYPE (t));
411 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
412 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
413 DECL_ORIGINAL_TYPE (t) = x;
414 }
415 }
416
417 /* Remap sizes as necessary. */
418 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
419 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
420
421 /* If fields, do likewise for offset and qualifier. */
422 if (TREE_CODE (t) == FIELD_DECL)
423 {
424 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
425 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
426 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
427 }
428
429 return t;
430 }
431
432 if (id->do_not_unshare)
433 return *n;
434 else
435 return unshare_expr (*n);
436 }
437
438 static tree
439 remap_type_1 (tree type, copy_body_data *id)
440 {
441 tree new_tree, t;
442
443 /* We do need a copy. build and register it now. If this is a pointer or
444 reference type, remap the designated type and make a new pointer or
445 reference type. */
446 if (TREE_CODE (type) == POINTER_TYPE)
447 {
448 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
449 TYPE_MODE (type),
450 TYPE_REF_CAN_ALIAS_ALL (type));
451 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 new_tree = build_type_attribute_qual_variant (new_tree,
453 TYPE_ATTRIBUTES (type),
454 TYPE_QUALS (type));
455 insert_decl_map (id, type, new_tree);
456 return new_tree;
457 }
458 else if (TREE_CODE (type) == REFERENCE_TYPE)
459 {
460 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
461 TYPE_MODE (type),
462 TYPE_REF_CAN_ALIAS_ALL (type));
463 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
464 new_tree = build_type_attribute_qual_variant (new_tree,
465 TYPE_ATTRIBUTES (type),
466 TYPE_QUALS (type));
467 insert_decl_map (id, type, new_tree);
468 return new_tree;
469 }
470 else
471 new_tree = copy_node (type);
472
473 insert_decl_map (id, type, new_tree);
474
475 /* This is a new type, not a copy of an old type. Need to reassociate
476 variants. We can handle everything except the main variant lazily. */
477 t = TYPE_MAIN_VARIANT (type);
478 if (type != t)
479 {
480 t = remap_type (t, id);
481 TYPE_MAIN_VARIANT (new_tree) = t;
482 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
483 TYPE_NEXT_VARIANT (t) = new_tree;
484 }
485 else
486 {
487 TYPE_MAIN_VARIANT (new_tree) = new_tree;
488 TYPE_NEXT_VARIANT (new_tree) = NULL;
489 }
490
491 if (TYPE_STUB_DECL (type))
492 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
493
494 /* Lazily create pointer and reference types. */
495 TYPE_POINTER_TO (new_tree) = NULL;
496 TYPE_REFERENCE_TO (new_tree) = NULL;
497
498 /* Copy all types that may contain references to local variables; be sure to
499 preserve sharing in between type and its main variant when possible. */
500 switch (TREE_CODE (new_tree))
501 {
502 case INTEGER_TYPE:
503 case REAL_TYPE:
504 case FIXED_POINT_TYPE:
505 case ENUMERAL_TYPE:
506 case BOOLEAN_TYPE:
507 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
508 {
509 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
510 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
511
512 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
513 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
514 }
515 else
516 {
517 t = TYPE_MIN_VALUE (new_tree);
518 if (t && TREE_CODE (t) != INTEGER_CST)
519 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
520
521 t = TYPE_MAX_VALUE (new_tree);
522 if (t && TREE_CODE (t) != INTEGER_CST)
523 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
524 }
525 return new_tree;
526
527 case FUNCTION_TYPE:
528 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
529 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
530 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
531 else
532 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
533 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
534 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
535 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
536 else
537 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
538 return new_tree;
539
540 case ARRAY_TYPE:
541 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
542 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
543 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
544 else
545 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
546
547 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
548 {
549 gcc_checking_assert (TYPE_DOMAIN (type)
550 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
551 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
552 }
553 else
554 {
555 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
556 /* For array bounds where we have decided not to copy over the bounds
557 variable which isn't used in OpenMP/OpenACC region, change them to
558 an uninitialized VAR_DECL temporary. */
559 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
560 && id->adjust_array_error_bounds
561 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
562 {
563 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
564 DECL_ATTRIBUTES (v)
565 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
566 DECL_ATTRIBUTES (v));
567 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
568 }
569 }
570 break;
571
572 case RECORD_TYPE:
573 case UNION_TYPE:
574 case QUAL_UNION_TYPE:
575 if (TYPE_MAIN_VARIANT (type) != type
576 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
577 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
578 else
579 {
580 tree f, nf = NULL;
581
582 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
583 {
584 t = remap_decl (f, id);
585 DECL_CONTEXT (t) = new_tree;
586 DECL_CHAIN (t) = nf;
587 nf = t;
588 }
589 TYPE_FIELDS (new_tree) = nreverse (nf);
590 }
591 break;
592
593 case OFFSET_TYPE:
594 default:
595 /* Shouldn't have been thought variable sized. */
596 gcc_unreachable ();
597 }
598
599 /* All variants of type share the same size, so use the already remaped data. */
600 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
601 {
602 tree s = TYPE_SIZE (type);
603 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
604 tree su = TYPE_SIZE_UNIT (type);
605 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
606 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
607 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
608 || s == mvs);
609 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
610 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
611 || su == mvsu);
612 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
613 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
614 }
615 else
616 {
617 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
618 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
619 }
620
621 return new_tree;
622 }
623
624 /* Helper function for remap_type_2, called through walk_tree. */
625
626 static tree
627 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
628 {
629 copy_body_data *id = (copy_body_data *) data;
630
631 if (TYPE_P (*tp))
632 *walk_subtrees = 0;
633
634 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
635 return *tp;
636
637 return NULL_TREE;
638 }
639
640 /* Return true if TYPE needs to be remapped because remap_decl on any
641 needed embedded decl returns something other than that decl. */
642
643 static bool
644 remap_type_2 (tree type, copy_body_data *id)
645 {
646 tree t;
647
648 #define RETURN_TRUE_IF_VAR(T) \
649 do \
650 { \
651 tree _t = (T); \
652 if (_t) \
653 { \
654 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
655 return true; \
656 if (!TYPE_SIZES_GIMPLIFIED (type) \
657 && walk_tree (&_t, remap_type_3, id, NULL)) \
658 return true; \
659 } \
660 } \
661 while (0)
662
663 switch (TREE_CODE (type))
664 {
665 case POINTER_TYPE:
666 case REFERENCE_TYPE:
667 case FUNCTION_TYPE:
668 case METHOD_TYPE:
669 return remap_type_2 (TREE_TYPE (type), id);
670
671 case INTEGER_TYPE:
672 case REAL_TYPE:
673 case FIXED_POINT_TYPE:
674 case ENUMERAL_TYPE:
675 case BOOLEAN_TYPE:
676 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
677 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
678 return false;
679
680 case ARRAY_TYPE:
681 if (remap_type_2 (TREE_TYPE (type), id)
682 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
683 return true;
684 break;
685
686 case RECORD_TYPE:
687 case UNION_TYPE:
688 case QUAL_UNION_TYPE:
689 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
690 if (TREE_CODE (t) == FIELD_DECL)
691 {
692 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
693 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
694 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
695 if (TREE_CODE (type) == QUAL_UNION_TYPE)
696 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
697 }
698 break;
699
700 default:
701 return false;
702 }
703
704 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
705 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
706 return false;
707 #undef RETURN_TRUE_IF_VAR
708 }
709
710 tree
711 remap_type (tree type, copy_body_data *id)
712 {
713 tree *node;
714 tree tmp;
715
716 if (type == NULL)
717 return type;
718
719 /* See if we have remapped this type. */
720 node = id->decl_map->get (type);
721 if (node)
722 return *node;
723
724 /* The type only needs remapping if it's variably modified. */
725 if (! variably_modified_type_p (type, id->src_fn)
726 /* Don't remap if copy_decl method doesn't always return a new
727 decl and for all embedded decls returns the passed in decl. */
728 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
729 {
730 insert_decl_map (id, type, type);
731 return type;
732 }
733
734 id->remapping_type_depth++;
735 tmp = remap_type_1 (type, id);
736 id->remapping_type_depth--;
737
738 return tmp;
739 }
740
741 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
742
743 static bool
744 can_be_nonlocal (tree decl, copy_body_data *id)
745 {
746 /* We cannot duplicate function decls. */
747 if (TREE_CODE (decl) == FUNCTION_DECL)
748 return true;
749
750 /* Local static vars must be non-local or we get multiple declaration
751 problems. */
752 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
753 return true;
754
755 return false;
756 }
757
758 static tree
759 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
760 copy_body_data *id)
761 {
762 tree old_var;
763 tree new_decls = NULL_TREE;
764
765 /* Remap its variables. */
766 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
767 {
768 tree new_var;
769
770 if (can_be_nonlocal (old_var, id))
771 {
772 /* We need to add this variable to the local decls as otherwise
773 nothing else will do so. */
774 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
775 add_local_decl (cfun, old_var);
776 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
777 && !DECL_IGNORED_P (old_var)
778 && nonlocalized_list)
779 vec_safe_push (*nonlocalized_list, old_var);
780 continue;
781 }
782
783 /* Remap the variable. */
784 new_var = remap_decl (old_var, id);
785
786 /* If we didn't remap this variable, we can't mess with its
787 TREE_CHAIN. If we remapped this variable to the return slot, it's
788 already declared somewhere else, so don't declare it here. */
789
790 if (new_var == id->retvar)
791 ;
792 else if (!new_var)
793 {
794 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
795 && !DECL_IGNORED_P (old_var)
796 && nonlocalized_list)
797 vec_safe_push (*nonlocalized_list, old_var);
798 }
799 else
800 {
801 gcc_assert (DECL_P (new_var));
802 DECL_CHAIN (new_var) = new_decls;
803 new_decls = new_var;
804
805 /* Also copy value-expressions. */
806 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
807 {
808 tree tem = DECL_VALUE_EXPR (new_var);
809 bool old_regimplify = id->regimplify;
810 id->remapping_type_depth++;
811 walk_tree (&tem, copy_tree_body_r, id, NULL);
812 id->remapping_type_depth--;
813 id->regimplify = old_regimplify;
814 SET_DECL_VALUE_EXPR (new_var, tem);
815 }
816 }
817 }
818
819 return nreverse (new_decls);
820 }
821
822 /* Copy the BLOCK to contain remapped versions of the variables
823 therein. And hook the new block into the block-tree. */
824
825 static void
826 remap_block (tree *block, copy_body_data *id)
827 {
828 tree old_block;
829 tree new_block;
830
831 /* Make the new block. */
832 old_block = *block;
833 new_block = make_node (BLOCK);
834 TREE_USED (new_block) = TREE_USED (old_block);
835 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
836 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
837 BLOCK_NONLOCALIZED_VARS (new_block)
838 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
839 *block = new_block;
840
841 /* Remap its variables. */
842 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
843 &BLOCK_NONLOCALIZED_VARS (new_block),
844 id);
845
846 if (id->transform_lang_insert_block)
847 id->transform_lang_insert_block (new_block);
848
849 /* Remember the remapped block. */
850 insert_decl_map (id, old_block, new_block);
851 }
852
853 /* Copy the whole block tree and root it in id->block. */
854
855 static tree
856 remap_blocks (tree block, copy_body_data *id)
857 {
858 tree t;
859 tree new_tree = block;
860
861 if (!block)
862 return NULL;
863
864 remap_block (&new_tree, id);
865 gcc_assert (new_tree != block);
866 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
867 prepend_lexical_block (new_tree, remap_blocks (t, id));
868 /* Blocks are in arbitrary order, but make things slightly prettier and do
869 not swap order when producing a copy. */
870 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
871 return new_tree;
872 }
873
874 /* Remap the block tree rooted at BLOCK to nothing. */
875
876 static void
877 remap_blocks_to_null (tree block, copy_body_data *id)
878 {
879 tree t;
880 insert_decl_map (id, block, NULL_TREE);
881 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
882 remap_blocks_to_null (t, id);
883 }
884
885 /* Remap the location info pointed to by LOCUS. */
886
887 static location_t
888 remap_location (location_t locus, copy_body_data *id)
889 {
890 if (LOCATION_BLOCK (locus))
891 {
892 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
893 gcc_assert (n);
894 if (*n)
895 return set_block (locus, *n);
896 }
897
898 locus = LOCATION_LOCUS (locus);
899
900 if (locus != UNKNOWN_LOCATION && id->block)
901 return set_block (locus, id->block);
902
903 return locus;
904 }
905
906 static void
907 copy_statement_list (tree *tp)
908 {
909 tree_stmt_iterator oi, ni;
910 tree new_tree;
911
912 new_tree = alloc_stmt_list ();
913 ni = tsi_start (new_tree);
914 oi = tsi_start (*tp);
915 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
916 *tp = new_tree;
917
918 for (; !tsi_end_p (oi); tsi_next (&oi))
919 {
920 tree stmt = tsi_stmt (oi);
921 if (TREE_CODE (stmt) == STATEMENT_LIST)
922 /* This copy is not redundant; tsi_link_after will smash this
923 STATEMENT_LIST into the end of the one we're building, and we
924 don't want to do that with the original. */
925 copy_statement_list (&stmt);
926 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
927 }
928 }
929
930 static void
931 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
932 {
933 tree block = BIND_EXPR_BLOCK (*tp);
934 /* Copy (and replace) the statement. */
935 copy_tree_r (tp, walk_subtrees, NULL);
936 if (block)
937 {
938 remap_block (&block, id);
939 BIND_EXPR_BLOCK (*tp) = block;
940 }
941
942 if (BIND_EXPR_VARS (*tp))
943 /* This will remap a lot of the same decls again, but this should be
944 harmless. */
945 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
946 }
947
948
949 /* Create a new gimple_seq by remapping all the statements in BODY
950 using the inlining information in ID. */
951
952 static gimple_seq
953 remap_gimple_seq (gimple_seq body, copy_body_data *id)
954 {
955 gimple_stmt_iterator si;
956 gimple_seq new_body = NULL;
957
958 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
959 {
960 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
961 gimple_seq_add_seq (&new_body, new_stmts);
962 }
963
964 return new_body;
965 }
966
967
968 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
969 block using the mapping information in ID. */
970
971 static gimple *
972 copy_gimple_bind (gbind *stmt, copy_body_data *id)
973 {
974 gimple *new_bind;
975 tree new_block, new_vars;
976 gimple_seq body, new_body;
977
978 /* Copy the statement. Note that we purposely don't use copy_stmt
979 here because we need to remap statements as we copy. */
980 body = gimple_bind_body (stmt);
981 new_body = remap_gimple_seq (body, id);
982
983 new_block = gimple_bind_block (stmt);
984 if (new_block)
985 remap_block (&new_block, id);
986
987 /* This will remap a lot of the same decls again, but this should be
988 harmless. */
989 new_vars = gimple_bind_vars (stmt);
990 if (new_vars)
991 new_vars = remap_decls (new_vars, NULL, id);
992
993 new_bind = gimple_build_bind (new_vars, new_body, new_block);
994
995 return new_bind;
996 }
997
998 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
999
1000 static bool
1001 is_parm (tree decl)
1002 {
1003 if (TREE_CODE (decl) == SSA_NAME)
1004 {
1005 decl = SSA_NAME_VAR (decl);
1006 if (!decl)
1007 return false;
1008 }
1009
1010 return (TREE_CODE (decl) == PARM_DECL);
1011 }
1012
1013 /* Remap the dependence CLIQUE from the source to the destination function
1014 as specified in ID. */
1015
1016 static unsigned short
1017 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1018 {
1019 if (clique == 0 || processing_debug_stmt)
1020 return 0;
1021 if (!id->dependence_map)
1022 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1023 bool existed;
1024 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1025 if (!existed)
1026 {
1027 /* Clique 1 is reserved for local ones set by PTA. */
1028 if (cfun->last_clique == 0)
1029 cfun->last_clique = 1;
1030 newc = ++cfun->last_clique;
1031 }
1032 return newc;
1033 }
1034
1035 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1036 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1037 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1038 recursing into the children nodes of *TP. */
1039
1040 static tree
1041 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1042 {
1043 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1044 copy_body_data *id = (copy_body_data *) wi_p->info;
1045 tree fn = id->src_fn;
1046
1047 /* For recursive invocations this is no longer the LHS itself. */
1048 bool is_lhs = wi_p->is_lhs;
1049 wi_p->is_lhs = false;
1050
1051 if (TREE_CODE (*tp) == SSA_NAME)
1052 {
1053 *tp = remap_ssa_name (*tp, id);
1054 *walk_subtrees = 0;
1055 if (is_lhs)
1056 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1057 return NULL;
1058 }
1059 else if (auto_var_in_fn_p (*tp, fn))
1060 {
1061 /* Local variables and labels need to be replaced by equivalent
1062 variables. We don't want to copy static variables; there's
1063 only one of those, no matter how many times we inline the
1064 containing function. Similarly for globals from an outer
1065 function. */
1066 tree new_decl;
1067
1068 /* Remap the declaration. */
1069 new_decl = remap_decl (*tp, id);
1070 gcc_assert (new_decl);
1071 /* Replace this variable with the copy. */
1072 STRIP_TYPE_NOPS (new_decl);
1073 /* ??? The C++ frontend uses void * pointer zero to initialize
1074 any other type. This confuses the middle-end type verification.
1075 As cloned bodies do not go through gimplification again the fixup
1076 there doesn't trigger. */
1077 if (TREE_CODE (new_decl) == INTEGER_CST
1078 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1079 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1080 *tp = new_decl;
1081 *walk_subtrees = 0;
1082 }
1083 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1084 gcc_unreachable ();
1085 else if (TREE_CODE (*tp) == SAVE_EXPR)
1086 gcc_unreachable ();
1087 else if (TREE_CODE (*tp) == LABEL_DECL
1088 && (!DECL_CONTEXT (*tp)
1089 || decl_function_context (*tp) == id->src_fn))
1090 /* These may need to be remapped for EH handling. */
1091 *tp = remap_decl (*tp, id);
1092 else if (TREE_CODE (*tp) == FIELD_DECL)
1093 {
1094 /* If the enclosing record type is variably_modified_type_p, the field
1095 has already been remapped. Otherwise, it need not be. */
1096 tree *n = id->decl_map->get (*tp);
1097 if (n)
1098 *tp = *n;
1099 *walk_subtrees = 0;
1100 }
1101 else if (TYPE_P (*tp))
1102 /* Types may need remapping as well. */
1103 *tp = remap_type (*tp, id);
1104 else if (CONSTANT_CLASS_P (*tp))
1105 {
1106 /* If this is a constant, we have to copy the node iff the type
1107 will be remapped. copy_tree_r will not copy a constant. */
1108 tree new_type = remap_type (TREE_TYPE (*tp), id);
1109
1110 if (new_type == TREE_TYPE (*tp))
1111 *walk_subtrees = 0;
1112
1113 else if (TREE_CODE (*tp) == INTEGER_CST)
1114 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1115 else
1116 {
1117 *tp = copy_node (*tp);
1118 TREE_TYPE (*tp) = new_type;
1119 }
1120 }
1121 else
1122 {
1123 /* Otherwise, just copy the node. Note that copy_tree_r already
1124 knows not to copy VAR_DECLs, etc., so this is safe. */
1125
1126 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1127 {
1128 /* We need to re-canonicalize MEM_REFs from inline substitutions
1129 that can happen when a pointer argument is an ADDR_EXPR.
1130 Recurse here manually to allow that. */
1131 tree ptr = TREE_OPERAND (*tp, 0);
1132 tree type = remap_type (TREE_TYPE (*tp), id);
1133 tree old = *tp;
1134 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1135 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1136 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1137 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1138 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1139 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1140 {
1141 MR_DEPENDENCE_CLIQUE (*tp)
1142 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1143 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1144 }
1145 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1146 remapped a parameter as the property might be valid only
1147 for the parameter itself. */
1148 if (TREE_THIS_NOTRAP (old)
1149 && (!is_parm (TREE_OPERAND (old, 0))
1150 || (!id->transform_parameter && is_parm (ptr))))
1151 TREE_THIS_NOTRAP (*tp) = 1;
1152 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1153 *walk_subtrees = 0;
1154 return NULL;
1155 }
1156
1157 /* Here is the "usual case". Copy this tree node, and then
1158 tweak some special cases. */
1159 copy_tree_r (tp, walk_subtrees, NULL);
1160
1161 if (TREE_CODE (*tp) != OMP_CLAUSE)
1162 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1163
1164 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1165 {
1166 /* The copied TARGET_EXPR has never been expanded, even if the
1167 original node was expanded already. */
1168 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1169 TREE_OPERAND (*tp, 3) = NULL_TREE;
1170 }
1171 else if (TREE_CODE (*tp) == ADDR_EXPR)
1172 {
1173 /* Variable substitution need not be simple. In particular,
1174 the MEM_REF substitution above. Make sure that
1175 TREE_CONSTANT and friends are up-to-date. */
1176 int invariant = is_gimple_min_invariant (*tp);
1177 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1178 recompute_tree_invariant_for_addr_expr (*tp);
1179
1180 /* If this used to be invariant, but is not any longer,
1181 then regimplification is probably needed. */
1182 if (invariant && !is_gimple_min_invariant (*tp))
1183 id->regimplify = true;
1184
1185 *walk_subtrees = 0;
1186 }
1187 }
1188
1189 /* Update the TREE_BLOCK for the cloned expr. */
1190 if (EXPR_P (*tp))
1191 {
1192 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1193 tree old_block = TREE_BLOCK (*tp);
1194 if (old_block)
1195 {
1196 tree *n;
1197 n = id->decl_map->get (TREE_BLOCK (*tp));
1198 if (n)
1199 new_block = *n;
1200 }
1201 TREE_SET_BLOCK (*tp, new_block);
1202 }
1203
1204 /* Keep iterating. */
1205 return NULL_TREE;
1206 }
1207
1208
1209 /* Called from copy_body_id via walk_tree. DATA is really a
1210 `copy_body_data *'. */
1211
1212 tree
1213 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1214 {
1215 copy_body_data *id = (copy_body_data *) data;
1216 tree fn = id->src_fn;
1217 tree new_block;
1218
1219 /* Begin by recognizing trees that we'll completely rewrite for the
1220 inlining context. Our output for these trees is completely
1221 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1222 into an edge). Further down, we'll handle trees that get
1223 duplicated and/or tweaked. */
1224
1225 /* When requested, RETURN_EXPRs should be transformed to just the
1226 contained MODIFY_EXPR. The branch semantics of the return will
1227 be handled elsewhere by manipulating the CFG rather than a statement. */
1228 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1229 {
1230 tree assignment = TREE_OPERAND (*tp, 0);
1231
1232 /* If we're returning something, just turn that into an
1233 assignment into the equivalent of the original RESULT_DECL.
1234 If the "assignment" is just the result decl, the result
1235 decl has already been set (e.g. a recent "foo (&result_decl,
1236 ...)"); just toss the entire RETURN_EXPR. */
1237 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1238 {
1239 /* Replace the RETURN_EXPR with (a copy of) the
1240 MODIFY_EXPR hanging underneath. */
1241 *tp = copy_node (assignment);
1242 }
1243 else /* Else the RETURN_EXPR returns no value. */
1244 {
1245 *tp = NULL;
1246 return (tree) (void *)1;
1247 }
1248 }
1249 else if (TREE_CODE (*tp) == SSA_NAME)
1250 {
1251 *tp = remap_ssa_name (*tp, id);
1252 *walk_subtrees = 0;
1253 return NULL;
1254 }
1255
1256 /* Local variables and labels need to be replaced by equivalent
1257 variables. We don't want to copy static variables; there's only
1258 one of those, no matter how many times we inline the containing
1259 function. Similarly for globals from an outer function. */
1260 else if (auto_var_in_fn_p (*tp, fn))
1261 {
1262 tree new_decl;
1263
1264 /* Remap the declaration. */
1265 new_decl = remap_decl (*tp, id);
1266 gcc_assert (new_decl);
1267 /* Replace this variable with the copy. */
1268 STRIP_TYPE_NOPS (new_decl);
1269 *tp = new_decl;
1270 *walk_subtrees = 0;
1271 }
1272 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1273 copy_statement_list (tp);
1274 else if (TREE_CODE (*tp) == SAVE_EXPR
1275 || TREE_CODE (*tp) == TARGET_EXPR)
1276 remap_save_expr (tp, id->decl_map, walk_subtrees);
1277 else if (TREE_CODE (*tp) == LABEL_DECL
1278 && (! DECL_CONTEXT (*tp)
1279 || decl_function_context (*tp) == id->src_fn))
1280 /* These may need to be remapped for EH handling. */
1281 *tp = remap_decl (*tp, id);
1282 else if (TREE_CODE (*tp) == BIND_EXPR)
1283 copy_bind_expr (tp, walk_subtrees, id);
1284 /* Types may need remapping as well. */
1285 else if (TYPE_P (*tp))
1286 *tp = remap_type (*tp, id);
1287
1288 /* If this is a constant, we have to copy the node iff the type will be
1289 remapped. copy_tree_r will not copy a constant. */
1290 else if (CONSTANT_CLASS_P (*tp))
1291 {
1292 tree new_type = remap_type (TREE_TYPE (*tp), id);
1293
1294 if (new_type == TREE_TYPE (*tp))
1295 *walk_subtrees = 0;
1296
1297 else if (TREE_CODE (*tp) == INTEGER_CST)
1298 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1299 else
1300 {
1301 *tp = copy_node (*tp);
1302 TREE_TYPE (*tp) = new_type;
1303 }
1304 }
1305
1306 /* Otherwise, just copy the node. Note that copy_tree_r already
1307 knows not to copy VAR_DECLs, etc., so this is safe. */
1308 else
1309 {
1310 /* Here we handle trees that are not completely rewritten.
1311 First we detect some inlining-induced bogosities for
1312 discarding. */
1313 if (TREE_CODE (*tp) == MODIFY_EXPR
1314 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1315 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1316 {
1317 /* Some assignments VAR = VAR; don't generate any rtl code
1318 and thus don't count as variable modification. Avoid
1319 keeping bogosities like 0 = 0. */
1320 tree decl = TREE_OPERAND (*tp, 0), value;
1321 tree *n;
1322
1323 n = id->decl_map->get (decl);
1324 if (n)
1325 {
1326 value = *n;
1327 STRIP_TYPE_NOPS (value);
1328 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1329 {
1330 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1331 return copy_tree_body_r (tp, walk_subtrees, data);
1332 }
1333 }
1334 }
1335 else if (TREE_CODE (*tp) == INDIRECT_REF)
1336 {
1337 /* Get rid of *& from inline substitutions that can happen when a
1338 pointer argument is an ADDR_EXPR. */
1339 tree decl = TREE_OPERAND (*tp, 0);
1340 tree *n = id->decl_map->get (decl);
1341 if (n)
1342 {
1343 /* If we happen to get an ADDR_EXPR in n->value, strip
1344 it manually here as we'll eventually get ADDR_EXPRs
1345 which lie about their types pointed to. In this case
1346 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1347 but we absolutely rely on that. As fold_indirect_ref
1348 does other useful transformations, try that first, though. */
1349 tree type = TREE_TYPE (*tp);
1350 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1351 tree old = *tp;
1352 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1353 if (! *tp)
1354 {
1355 type = remap_type (type, id);
1356 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1357 {
1358 *tp
1359 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1360 /* ??? We should either assert here or build
1361 a VIEW_CONVERT_EXPR instead of blindly leaking
1362 incompatible types to our IL. */
1363 if (! *tp)
1364 *tp = TREE_OPERAND (ptr, 0);
1365 }
1366 else
1367 {
1368 *tp = build1 (INDIRECT_REF, type, ptr);
1369 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1370 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1371 TREE_READONLY (*tp) = TREE_READONLY (old);
1372 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1373 have remapped a parameter as the property might be
1374 valid only for the parameter itself. */
1375 if (TREE_THIS_NOTRAP (old)
1376 && (!is_parm (TREE_OPERAND (old, 0))
1377 || (!id->transform_parameter && is_parm (ptr))))
1378 TREE_THIS_NOTRAP (*tp) = 1;
1379 }
1380 }
1381 *walk_subtrees = 0;
1382 return NULL;
1383 }
1384 }
1385 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1386 {
1387 /* We need to re-canonicalize MEM_REFs from inline substitutions
1388 that can happen when a pointer argument is an ADDR_EXPR.
1389 Recurse here manually to allow that. */
1390 tree ptr = TREE_OPERAND (*tp, 0);
1391 tree type = remap_type (TREE_TYPE (*tp), id);
1392 tree old = *tp;
1393 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1394 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1395 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1396 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1397 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1398 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1399 {
1400 MR_DEPENDENCE_CLIQUE (*tp)
1401 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1402 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1403 }
1404 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1405 remapped a parameter as the property might be valid only
1406 for the parameter itself. */
1407 if (TREE_THIS_NOTRAP (old)
1408 && (!is_parm (TREE_OPERAND (old, 0))
1409 || (!id->transform_parameter && is_parm (ptr))))
1410 TREE_THIS_NOTRAP (*tp) = 1;
1411 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1412 *walk_subtrees = 0;
1413 return NULL;
1414 }
1415
1416 /* Here is the "usual case". Copy this tree node, and then
1417 tweak some special cases. */
1418 copy_tree_r (tp, walk_subtrees, NULL);
1419
1420 /* If EXPR has block defined, map it to newly constructed block.
1421 When inlining we want EXPRs without block appear in the block
1422 of function call if we are not remapping a type. */
1423 if (EXPR_P (*tp))
1424 {
1425 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1426 if (TREE_BLOCK (*tp))
1427 {
1428 tree *n;
1429 n = id->decl_map->get (TREE_BLOCK (*tp));
1430 if (n)
1431 new_block = *n;
1432 }
1433 TREE_SET_BLOCK (*tp, new_block);
1434 }
1435
1436 if (TREE_CODE (*tp) != OMP_CLAUSE)
1437 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1438
1439 /* The copied TARGET_EXPR has never been expanded, even if the
1440 original node was expanded already. */
1441 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1442 {
1443 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1444 TREE_OPERAND (*tp, 3) = NULL_TREE;
1445 }
1446
1447 /* Variable substitution need not be simple. In particular, the
1448 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1449 and friends are up-to-date. */
1450 else if (TREE_CODE (*tp) == ADDR_EXPR)
1451 {
1452 int invariant = is_gimple_min_invariant (*tp);
1453 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1454
1455 /* Handle the case where we substituted an INDIRECT_REF
1456 into the operand of the ADDR_EXPR. */
1457 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1458 && !id->do_not_fold)
1459 {
1460 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1461 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1462 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1463 *tp = t;
1464 }
1465 else
1466 recompute_tree_invariant_for_addr_expr (*tp);
1467
1468 /* If this used to be invariant, but is not any longer,
1469 then regimplification is probably needed. */
1470 if (invariant && !is_gimple_min_invariant (*tp))
1471 id->regimplify = true;
1472
1473 *walk_subtrees = 0;
1474 }
1475 }
1476
1477 /* Keep iterating. */
1478 return NULL_TREE;
1479 }
1480
1481 /* Helper for remap_gimple_stmt. Given an EH region number for the
1482 source function, map that to the duplicate EH region number in
1483 the destination function. */
1484
1485 static int
1486 remap_eh_region_nr (int old_nr, copy_body_data *id)
1487 {
1488 eh_region old_r, new_r;
1489
1490 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1491 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1492
1493 return new_r->index;
1494 }
1495
1496 /* Similar, but operate on INTEGER_CSTs. */
1497
1498 static tree
1499 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1500 {
1501 int old_nr, new_nr;
1502
1503 old_nr = tree_to_shwi (old_t_nr);
1504 new_nr = remap_eh_region_nr (old_nr, id);
1505
1506 return build_int_cst (integer_type_node, new_nr);
1507 }
1508
1509 /* Helper for copy_bb. Remap statement STMT using the inlining
1510 information in ID. Return the new statement copy. */
1511
1512 static gimple_seq
1513 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1514 {
1515 gimple *copy = NULL;
1516 struct walk_stmt_info wi;
1517 bool skip_first = false;
1518 gimple_seq stmts = NULL;
1519
1520 if (is_gimple_debug (stmt)
1521 && (gimple_debug_nonbind_marker_p (stmt)
1522 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1523 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1524 return NULL;
1525
1526 /* Begin by recognizing trees that we'll completely rewrite for the
1527 inlining context. Our output for these trees is completely
1528 different from our input (e.g. RETURN_EXPR is deleted and morphs
1529 into an edge). Further down, we'll handle trees that get
1530 duplicated and/or tweaked. */
1531
1532 /* When requested, GIMPLE_RETURN should be transformed to just the
1533 contained GIMPLE_ASSIGN. The branch semantics of the return will
1534 be handled elsewhere by manipulating the CFG rather than the
1535 statement. */
1536 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1537 {
1538 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1539
1540 /* If we're returning something, just turn that into an
1541 assignment to the equivalent of the original RESULT_DECL.
1542 If RETVAL is just the result decl, the result decl has
1543 already been set (e.g. a recent "foo (&result_decl, ...)");
1544 just toss the entire GIMPLE_RETURN. Likewise for when the
1545 call doesn't want the return value. */
1546 if (retval
1547 && (TREE_CODE (retval) != RESULT_DECL
1548 && (!id->call_stmt
1549 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1550 && (TREE_CODE (retval) != SSA_NAME
1551 || ! SSA_NAME_VAR (retval)
1552 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1553 {
1554 copy = gimple_build_assign (id->do_not_unshare
1555 ? id->retvar : unshare_expr (id->retvar),
1556 retval);
1557 /* id->retvar is already substituted. Skip it on later remapping. */
1558 skip_first = true;
1559 }
1560 else
1561 return NULL;
1562 }
1563 else if (gimple_has_substatements (stmt))
1564 {
1565 gimple_seq s1, s2;
1566
1567 /* When cloning bodies from the C++ front end, we will be handed bodies
1568 in High GIMPLE form. Handle here all the High GIMPLE statements that
1569 have embedded statements. */
1570 switch (gimple_code (stmt))
1571 {
1572 case GIMPLE_BIND:
1573 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1574 break;
1575
1576 case GIMPLE_CATCH:
1577 {
1578 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1579 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1580 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1581 }
1582 break;
1583
1584 case GIMPLE_EH_FILTER:
1585 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1586 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1587 break;
1588
1589 case GIMPLE_TRY:
1590 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1591 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1592 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1593 break;
1594
1595 case GIMPLE_WITH_CLEANUP_EXPR:
1596 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1597 copy = gimple_build_wce (s1);
1598 break;
1599
1600 case GIMPLE_OMP_PARALLEL:
1601 {
1602 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1603 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1604 copy = gimple_build_omp_parallel
1605 (s1,
1606 gimple_omp_parallel_clauses (omp_par_stmt),
1607 gimple_omp_parallel_child_fn (omp_par_stmt),
1608 gimple_omp_parallel_data_arg (omp_par_stmt));
1609 }
1610 break;
1611
1612 case GIMPLE_OMP_TASK:
1613 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1614 copy = gimple_build_omp_task
1615 (s1,
1616 gimple_omp_task_clauses (stmt),
1617 gimple_omp_task_child_fn (stmt),
1618 gimple_omp_task_data_arg (stmt),
1619 gimple_omp_task_copy_fn (stmt),
1620 gimple_omp_task_arg_size (stmt),
1621 gimple_omp_task_arg_align (stmt));
1622 break;
1623
1624 case GIMPLE_OMP_FOR:
1625 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1626 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1627 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1628 gimple_omp_for_clauses (stmt),
1629 gimple_omp_for_collapse (stmt), s2);
1630 {
1631 size_t i;
1632 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1633 {
1634 gimple_omp_for_set_index (copy, i,
1635 gimple_omp_for_index (stmt, i));
1636 gimple_omp_for_set_initial (copy, i,
1637 gimple_omp_for_initial (stmt, i));
1638 gimple_omp_for_set_final (copy, i,
1639 gimple_omp_for_final (stmt, i));
1640 gimple_omp_for_set_incr (copy, i,
1641 gimple_omp_for_incr (stmt, i));
1642 gimple_omp_for_set_cond (copy, i,
1643 gimple_omp_for_cond (stmt, i));
1644 }
1645 }
1646 break;
1647
1648 case GIMPLE_OMP_MASTER:
1649 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1650 copy = gimple_build_omp_master (s1);
1651 break;
1652
1653 case GIMPLE_OMP_TASKGROUP:
1654 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1655 copy = gimple_build_omp_taskgroup
1656 (s1, gimple_omp_taskgroup_clauses (stmt));
1657 break;
1658
1659 case GIMPLE_OMP_ORDERED:
1660 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1661 copy = gimple_build_omp_ordered
1662 (s1,
1663 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1664 break;
1665
1666 case GIMPLE_OMP_SCAN:
1667 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1668 copy = gimple_build_omp_scan
1669 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1670 break;
1671
1672 case GIMPLE_OMP_SECTION:
1673 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1674 copy = gimple_build_omp_section (s1);
1675 break;
1676
1677 case GIMPLE_OMP_SECTIONS:
1678 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1679 copy = gimple_build_omp_sections
1680 (s1, gimple_omp_sections_clauses (stmt));
1681 break;
1682
1683 case GIMPLE_OMP_SINGLE:
1684 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1685 copy = gimple_build_omp_single
1686 (s1, gimple_omp_single_clauses (stmt));
1687 break;
1688
1689 case GIMPLE_OMP_TARGET:
1690 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1691 copy = gimple_build_omp_target
1692 (s1, gimple_omp_target_kind (stmt),
1693 gimple_omp_target_clauses (stmt));
1694 break;
1695
1696 case GIMPLE_OMP_TEAMS:
1697 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1698 copy = gimple_build_omp_teams
1699 (s1, gimple_omp_teams_clauses (stmt));
1700 break;
1701
1702 case GIMPLE_OMP_CRITICAL:
1703 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1704 copy = gimple_build_omp_critical (s1,
1705 gimple_omp_critical_name
1706 (as_a <gomp_critical *> (stmt)),
1707 gimple_omp_critical_clauses
1708 (as_a <gomp_critical *> (stmt)));
1709 break;
1710
1711 case GIMPLE_TRANSACTION:
1712 {
1713 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1714 gtransaction *new_trans_stmt;
1715 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1716 id);
1717 copy = new_trans_stmt = gimple_build_transaction (s1);
1718 gimple_transaction_set_subcode (new_trans_stmt,
1719 gimple_transaction_subcode (old_trans_stmt));
1720 gimple_transaction_set_label_norm (new_trans_stmt,
1721 gimple_transaction_label_norm (old_trans_stmt));
1722 gimple_transaction_set_label_uninst (new_trans_stmt,
1723 gimple_transaction_label_uninst (old_trans_stmt));
1724 gimple_transaction_set_label_over (new_trans_stmt,
1725 gimple_transaction_label_over (old_trans_stmt));
1726 }
1727 break;
1728
1729 default:
1730 gcc_unreachable ();
1731 }
1732 }
1733 else
1734 {
1735 if (gimple_assign_copy_p (stmt)
1736 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1737 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1738 {
1739 /* Here we handle statements that are not completely rewritten.
1740 First we detect some inlining-induced bogosities for
1741 discarding. */
1742
1743 /* Some assignments VAR = VAR; don't generate any rtl code
1744 and thus don't count as variable modification. Avoid
1745 keeping bogosities like 0 = 0. */
1746 tree decl = gimple_assign_lhs (stmt), value;
1747 tree *n;
1748
1749 n = id->decl_map->get (decl);
1750 if (n)
1751 {
1752 value = *n;
1753 STRIP_TYPE_NOPS (value);
1754 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1755 return NULL;
1756 }
1757 }
1758
1759 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1760 in a block that we aren't copying during tree_function_versioning,
1761 just drop the clobber stmt. */
1762 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1763 {
1764 tree lhs = gimple_assign_lhs (stmt);
1765 if (TREE_CODE (lhs) == MEM_REF
1766 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1767 {
1768 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1769 if (gimple_bb (def_stmt)
1770 && !bitmap_bit_p (id->blocks_to_copy,
1771 gimple_bb (def_stmt)->index))
1772 return NULL;
1773 }
1774 }
1775
1776 /* We do not allow CLOBBERs of handled components. In case
1777 returned value is stored via such handled component, remove
1778 the clobber so stmt verifier is happy. */
1779 if (gimple_clobber_p (stmt)
1780 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1781 {
1782 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1783 if (!DECL_P (remapped)
1784 && TREE_CODE (remapped) != MEM_REF)
1785 return NULL;
1786 }
1787
1788 if (gimple_debug_bind_p (stmt))
1789 {
1790 gdebug *copy
1791 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1792 gimple_debug_bind_get_value (stmt),
1793 stmt);
1794 if (id->reset_location)
1795 gimple_set_location (copy, input_location);
1796 id->debug_stmts.safe_push (copy);
1797 gimple_seq_add_stmt (&stmts, copy);
1798 return stmts;
1799 }
1800 if (gimple_debug_source_bind_p (stmt))
1801 {
1802 gdebug *copy = gimple_build_debug_source_bind
1803 (gimple_debug_source_bind_get_var (stmt),
1804 gimple_debug_source_bind_get_value (stmt),
1805 stmt);
1806 if (id->reset_location)
1807 gimple_set_location (copy, input_location);
1808 id->debug_stmts.safe_push (copy);
1809 gimple_seq_add_stmt (&stmts, copy);
1810 return stmts;
1811 }
1812 if (gimple_debug_nonbind_marker_p (stmt))
1813 {
1814 /* If the inlined function has too many debug markers,
1815 don't copy them. */
1816 if (id->src_cfun->debug_marker_count
1817 > param_max_debug_marker_count)
1818 return stmts;
1819
1820 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1821 if (id->reset_location)
1822 gimple_set_location (copy, input_location);
1823 id->debug_stmts.safe_push (copy);
1824 gimple_seq_add_stmt (&stmts, copy);
1825 return stmts;
1826 }
1827
1828 /* Create a new deep copy of the statement. */
1829 copy = gimple_copy (stmt);
1830
1831 /* Clear flags that need revisiting. */
1832 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1833 {
1834 if (gimple_call_tail_p (call_stmt))
1835 gimple_call_set_tail (call_stmt, false);
1836 if (gimple_call_from_thunk_p (call_stmt))
1837 gimple_call_set_from_thunk (call_stmt, false);
1838 if (gimple_call_internal_p (call_stmt))
1839 switch (gimple_call_internal_fn (call_stmt))
1840 {
1841 case IFN_GOMP_SIMD_LANE:
1842 case IFN_GOMP_SIMD_VF:
1843 case IFN_GOMP_SIMD_LAST_LANE:
1844 case IFN_GOMP_SIMD_ORDERED_START:
1845 case IFN_GOMP_SIMD_ORDERED_END:
1846 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1847 break;
1848 default:
1849 break;
1850 }
1851 }
1852
1853 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1854 RESX and EH_DISPATCH. */
1855 if (id->eh_map)
1856 switch (gimple_code (copy))
1857 {
1858 case GIMPLE_CALL:
1859 {
1860 tree r, fndecl = gimple_call_fndecl (copy);
1861 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1862 switch (DECL_FUNCTION_CODE (fndecl))
1863 {
1864 case BUILT_IN_EH_COPY_VALUES:
1865 r = gimple_call_arg (copy, 1);
1866 r = remap_eh_region_tree_nr (r, id);
1867 gimple_call_set_arg (copy, 1, r);
1868 /* FALLTHRU */
1869
1870 case BUILT_IN_EH_POINTER:
1871 case BUILT_IN_EH_FILTER:
1872 r = gimple_call_arg (copy, 0);
1873 r = remap_eh_region_tree_nr (r, id);
1874 gimple_call_set_arg (copy, 0, r);
1875 break;
1876
1877 default:
1878 break;
1879 }
1880
1881 /* Reset alias info if we didn't apply measures to
1882 keep it valid over inlining by setting DECL_PT_UID. */
1883 if (!id->src_cfun->gimple_df
1884 || !id->src_cfun->gimple_df->ipa_pta)
1885 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1886 }
1887 break;
1888
1889 case GIMPLE_RESX:
1890 {
1891 gresx *resx_stmt = as_a <gresx *> (copy);
1892 int r = gimple_resx_region (resx_stmt);
1893 r = remap_eh_region_nr (r, id);
1894 gimple_resx_set_region (resx_stmt, r);
1895 }
1896 break;
1897
1898 case GIMPLE_EH_DISPATCH:
1899 {
1900 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1901 int r = gimple_eh_dispatch_region (eh_dispatch);
1902 r = remap_eh_region_nr (r, id);
1903 gimple_eh_dispatch_set_region (eh_dispatch, r);
1904 }
1905 break;
1906
1907 default:
1908 break;
1909 }
1910 }
1911
1912 /* If STMT has a block defined, map it to the newly constructed block. */
1913 if (tree block = gimple_block (copy))
1914 {
1915 tree *n;
1916 n = id->decl_map->get (block);
1917 gcc_assert (n);
1918 gimple_set_block (copy, *n);
1919 }
1920 if (id->param_body_adjs)
1921 {
1922 gimple_seq extra_stmts = NULL;
1923 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1924 if (!gimple_seq_empty_p (extra_stmts))
1925 {
1926 memset (&wi, 0, sizeof (wi));
1927 wi.info = id;
1928 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1929 !gsi_end_p (egsi);
1930 gsi_next (&egsi))
1931 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1932 gimple_seq_add_seq (&stmts, extra_stmts);
1933 }
1934 }
1935
1936 if (id->reset_location)
1937 gimple_set_location (copy, input_location);
1938
1939 /* Debug statements ought to be rebuilt and not copied. */
1940 gcc_checking_assert (!is_gimple_debug (copy));
1941
1942 /* Remap all the operands in COPY. */
1943 memset (&wi, 0, sizeof (wi));
1944 wi.info = id;
1945 if (skip_first)
1946 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1947 else
1948 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1949
1950 /* Clear the copied virtual operands. We are not remapping them here
1951 but are going to recreate them from scratch. */
1952 if (gimple_has_mem_ops (copy))
1953 {
1954 gimple_set_vdef (copy, NULL_TREE);
1955 gimple_set_vuse (copy, NULL_TREE);
1956 }
1957
1958 gimple_seq_add_stmt (&stmts, copy);
1959 return stmts;
1960 }
1961
1962
1963 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1964 later */
1965
1966 static basic_block
1967 copy_bb (copy_body_data *id, basic_block bb,
1968 profile_count num, profile_count den)
1969 {
1970 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1971 basic_block copy_basic_block;
1972 tree decl;
1973 basic_block prev;
1974
1975 profile_count::adjust_for_ipa_scaling (&num, &den);
1976
1977 /* Search for previous copied basic block. */
1978 prev = bb->prev_bb;
1979 while (!prev->aux)
1980 prev = prev->prev_bb;
1981
1982 /* create_basic_block() will append every new block to
1983 basic_block_info automatically. */
1984 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1985 copy_basic_block->count = bb->count.apply_scale (num, den);
1986
1987 copy_gsi = gsi_start_bb (copy_basic_block);
1988
1989 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1990 {
1991 gimple_seq stmts;
1992 gimple *stmt = gsi_stmt (gsi);
1993 gimple *orig_stmt = stmt;
1994 gimple_stmt_iterator stmts_gsi;
1995 bool stmt_added = false;
1996
1997 id->regimplify = false;
1998 stmts = remap_gimple_stmt (stmt, id);
1999
2000 if (gimple_seq_empty_p (stmts))
2001 continue;
2002
2003 seq_gsi = copy_gsi;
2004
2005 for (stmts_gsi = gsi_start (stmts);
2006 !gsi_end_p (stmts_gsi); )
2007 {
2008 stmt = gsi_stmt (stmts_gsi);
2009
2010 /* Advance iterator now before stmt is moved to seq_gsi. */
2011 gsi_next (&stmts_gsi);
2012
2013 if (gimple_nop_p (stmt))
2014 continue;
2015
2016 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2017 orig_stmt);
2018
2019 /* With return slot optimization we can end up with
2020 non-gimple (foo *)&this->m, fix that here. */
2021 if (is_gimple_assign (stmt)
2022 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2023 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2024 {
2025 tree new_rhs;
2026 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2027 gimple_assign_rhs1 (stmt),
2028 true, NULL, false,
2029 GSI_CONTINUE_LINKING);
2030 gimple_assign_set_rhs1 (stmt, new_rhs);
2031 id->regimplify = false;
2032 }
2033
2034 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2035
2036 if (id->regimplify)
2037 gimple_regimplify_operands (stmt, &seq_gsi);
2038
2039 stmt_added = true;
2040 }
2041
2042 if (!stmt_added)
2043 continue;
2044
2045 /* If copy_basic_block has been empty at the start of this iteration,
2046 call gsi_start_bb again to get at the newly added statements. */
2047 if (gsi_end_p (copy_gsi))
2048 copy_gsi = gsi_start_bb (copy_basic_block);
2049 else
2050 gsi_next (&copy_gsi);
2051
2052 /* Process the new statement. The call to gimple_regimplify_operands
2053 possibly turned the statement into multiple statements, we
2054 need to process all of them. */
2055 do
2056 {
2057 tree fn;
2058 gcall *call_stmt;
2059
2060 stmt = gsi_stmt (copy_gsi);
2061 call_stmt = dyn_cast <gcall *> (stmt);
2062 if (call_stmt
2063 && gimple_call_va_arg_pack_p (call_stmt)
2064 && id->call_stmt
2065 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2066 {
2067 /* __builtin_va_arg_pack () should be replaced by
2068 all arguments corresponding to ... in the caller. */
2069 tree p;
2070 gcall *new_call;
2071 vec<tree> argarray;
2072 size_t nargs = gimple_call_num_args (id->call_stmt);
2073 size_t n;
2074
2075 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2076 nargs--;
2077
2078 /* Create the new array of arguments. */
2079 n = nargs + gimple_call_num_args (call_stmt);
2080 argarray.create (n);
2081 argarray.safe_grow_cleared (n);
2082
2083 /* Copy all the arguments before '...' */
2084 memcpy (argarray.address (),
2085 gimple_call_arg_ptr (call_stmt, 0),
2086 gimple_call_num_args (call_stmt) * sizeof (tree));
2087
2088 /* Append the arguments passed in '...' */
2089 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2090 gimple_call_arg_ptr (id->call_stmt, 0)
2091 + (gimple_call_num_args (id->call_stmt) - nargs),
2092 nargs * sizeof (tree));
2093
2094 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2095 argarray);
2096
2097 argarray.release ();
2098
2099 /* Copy all GIMPLE_CALL flags, location and block, except
2100 GF_CALL_VA_ARG_PACK. */
2101 gimple_call_copy_flags (new_call, call_stmt);
2102 gimple_call_set_va_arg_pack (new_call, false);
2103 /* location includes block. */
2104 gimple_set_location (new_call, gimple_location (stmt));
2105 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2106
2107 gsi_replace (&copy_gsi, new_call, false);
2108 stmt = new_call;
2109 }
2110 else if (call_stmt
2111 && id->call_stmt
2112 && (decl = gimple_call_fndecl (stmt))
2113 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2114 {
2115 /* __builtin_va_arg_pack_len () should be replaced by
2116 the number of anonymous arguments. */
2117 size_t nargs = gimple_call_num_args (id->call_stmt);
2118 tree count, p;
2119 gimple *new_stmt;
2120
2121 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2122 nargs--;
2123
2124 if (!gimple_call_lhs (stmt))
2125 {
2126 /* Drop unused calls. */
2127 gsi_remove (&copy_gsi, false);
2128 continue;
2129 }
2130 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2131 {
2132 count = build_int_cst (integer_type_node, nargs);
2133 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2134 gsi_replace (&copy_gsi, new_stmt, false);
2135 stmt = new_stmt;
2136 }
2137 else if (nargs != 0)
2138 {
2139 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2140 count = build_int_cst (integer_type_node, nargs);
2141 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2142 PLUS_EXPR, newlhs, count);
2143 gimple_call_set_lhs (stmt, newlhs);
2144 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2145 }
2146 }
2147 else if (call_stmt
2148 && id->call_stmt
2149 && gimple_call_internal_p (stmt)
2150 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2151 {
2152 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2153 gsi_remove (&copy_gsi, false);
2154 continue;
2155 }
2156
2157 /* Statements produced by inlining can be unfolded, especially
2158 when we constant propagated some operands. We can't fold
2159 them right now for two reasons:
2160 1) folding require SSA_NAME_DEF_STMTs to be correct
2161 2) we can't change function calls to builtins.
2162 So we just mark statement for later folding. We mark
2163 all new statements, instead just statements that has changed
2164 by some nontrivial substitution so even statements made
2165 foldable indirectly are updated. If this turns out to be
2166 expensive, copy_body can be told to watch for nontrivial
2167 changes. */
2168 if (id->statements_to_fold)
2169 id->statements_to_fold->add (stmt);
2170
2171 /* We're duplicating a CALL_EXPR. Find any corresponding
2172 callgraph edges and update or duplicate them. */
2173 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2174 {
2175 struct cgraph_edge *edge;
2176
2177 switch (id->transform_call_graph_edges)
2178 {
2179 case CB_CGE_DUPLICATE:
2180 edge = id->src_node->get_edge (orig_stmt);
2181 if (edge)
2182 {
2183 struct cgraph_edge *old_edge = edge;
2184
2185 /* A speculative call is consist of multiple
2186 edges - indirect edge and one or more direct edges
2187 Duplicate the whole thing and distribute frequencies
2188 accordingly. */
2189 if (edge->speculative)
2190 {
2191 int n = 0;
2192 profile_count direct_cnt
2193 = profile_count::zero ();
2194
2195 /* First figure out the distribution of counts
2196 so we can re-scale BB profile accordingly. */
2197 for (cgraph_edge *e = old_edge; e;
2198 e = e->next_speculative_call_target ())
2199 direct_cnt = direct_cnt + e->count;
2200
2201 cgraph_edge *indirect
2202 = old_edge->speculative_call_indirect_edge ();
2203 profile_count indir_cnt = indirect->count;
2204
2205 /* Next iterate all direct edges, clone it and its
2206 corresponding reference and update profile. */
2207 for (cgraph_edge *e = old_edge;
2208 e;
2209 e = e->next_speculative_call_target ())
2210 {
2211 profile_count cnt = e->count;
2212
2213 id->dst_node->clone_reference
2214 (e->speculative_call_target_ref (), stmt);
2215 edge = e->clone (id->dst_node, call_stmt,
2216 gimple_uid (stmt), num, den,
2217 true);
2218 profile_probability prob
2219 = cnt.probability_in (direct_cnt
2220 + indir_cnt);
2221 edge->count
2222 = copy_basic_block->count.apply_probability
2223 (prob);
2224 n++;
2225 }
2226 gcc_checking_assert
2227 (indirect->num_speculative_call_targets_p ()
2228 == n);
2229
2230 /* Duplicate the indirect edge after all direct edges
2231 cloned. */
2232 indirect = indirect->clone (id->dst_node, call_stmt,
2233 gimple_uid (stmt),
2234 num, den,
2235 true);
2236
2237 profile_probability prob
2238 = indir_cnt.probability_in (direct_cnt
2239 + indir_cnt);
2240 indirect->count
2241 = copy_basic_block->count.apply_probability (prob);
2242 }
2243 else
2244 {
2245 edge = edge->clone (id->dst_node, call_stmt,
2246 gimple_uid (stmt),
2247 num, den,
2248 true);
2249 edge->count = copy_basic_block->count;
2250 }
2251 }
2252 break;
2253
2254 case CB_CGE_MOVE_CLONES:
2255 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2256 call_stmt);
2257 edge = id->dst_node->get_edge (stmt);
2258 break;
2259
2260 case CB_CGE_MOVE:
2261 edge = id->dst_node->get_edge (orig_stmt);
2262 if (edge)
2263 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2264 break;
2265
2266 default:
2267 gcc_unreachable ();
2268 }
2269
2270 /* Constant propagation on argument done during inlining
2271 may create new direct call. Produce an edge for it. */
2272 if ((!edge
2273 || (edge->indirect_inlining_edge
2274 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2275 && id->dst_node->definition
2276 && (fn = gimple_call_fndecl (stmt)) != NULL)
2277 {
2278 struct cgraph_node *dest = cgraph_node::get_create (fn);
2279
2280 /* We have missing edge in the callgraph. This can happen
2281 when previous inlining turned an indirect call into a
2282 direct call by constant propagating arguments or we are
2283 producing dead clone (for further cloning). In all
2284 other cases we hit a bug (incorrect node sharing is the
2285 most common reason for missing edges). */
2286 gcc_assert (!dest->definition
2287 || dest->address_taken
2288 || !id->src_node->definition
2289 || !id->dst_node->definition);
2290 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2291 id->dst_node->create_edge_including_clones
2292 (dest, orig_stmt, call_stmt, bb->count,
2293 CIF_ORIGINALLY_INDIRECT_CALL);
2294 else
2295 id->dst_node->create_edge (dest, call_stmt,
2296 bb->count)->inline_failed
2297 = CIF_ORIGINALLY_INDIRECT_CALL;
2298 if (dump_file)
2299 {
2300 fprintf (dump_file, "Created new direct edge to %s\n",
2301 dest->dump_name ());
2302 }
2303 }
2304
2305 notice_special_calls (as_a <gcall *> (stmt));
2306 }
2307
2308 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2309 id->eh_map, id->eh_lp_nr);
2310
2311 gsi_next (&copy_gsi);
2312 }
2313 while (!gsi_end_p (copy_gsi));
2314
2315 copy_gsi = gsi_last_bb (copy_basic_block);
2316 }
2317
2318 return copy_basic_block;
2319 }
2320
2321 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2322 form is quite easy, since dominator relationship for old basic blocks does
2323 not change.
2324
2325 There is however exception where inlining might change dominator relation
2326 across EH edges from basic block within inlined functions destinating
2327 to landing pads in function we inline into.
2328
2329 The function fills in PHI_RESULTs of such PHI nodes if they refer
2330 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2331 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2332 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2333 set, and this means that there will be no overlapping live ranges
2334 for the underlying symbol.
2335
2336 This might change in future if we allow redirecting of EH edges and
2337 we might want to change way build CFG pre-inlining to include
2338 all the possible edges then. */
2339 static void
2340 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2341 bool can_throw, bool nonlocal_goto)
2342 {
2343 edge e;
2344 edge_iterator ei;
2345
2346 FOR_EACH_EDGE (e, ei, bb->succs)
2347 if (!e->dest->aux
2348 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2349 {
2350 gphi *phi;
2351 gphi_iterator si;
2352
2353 if (!nonlocal_goto)
2354 gcc_assert (e->flags & EDGE_EH);
2355
2356 if (!can_throw)
2357 gcc_assert (!(e->flags & EDGE_EH));
2358
2359 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2360 {
2361 edge re;
2362
2363 phi = si.phi ();
2364
2365 /* For abnormal goto/call edges the receiver can be the
2366 ENTRY_BLOCK. Do not assert this cannot happen. */
2367
2368 gcc_assert ((e->flags & EDGE_EH)
2369 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2370
2371 re = find_edge (ret_bb, e->dest);
2372 gcc_checking_assert (re);
2373 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2374 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2375
2376 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2377 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2378 }
2379 }
2380 }
2381
2382 /* Insert clobbers for automatic variables of inlined ID->src_fn
2383 function at the start of basic block ID->eh_landing_pad_dest. */
2384
2385 static void
2386 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2387 {
2388 tree var;
2389 basic_block bb = id->eh_landing_pad_dest;
2390 live_vars_map *vars = NULL;
2391 unsigned int cnt = 0;
2392 unsigned int i;
2393 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2394 if (VAR_P (var)
2395 && !DECL_HARD_REGISTER (var)
2396 && !TREE_THIS_VOLATILE (var)
2397 && !DECL_HAS_VALUE_EXPR_P (var)
2398 && !is_gimple_reg (var)
2399 && auto_var_in_fn_p (var, id->src_fn)
2400 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2401 {
2402 tree *t = id->decl_map->get (var);
2403 if (!t)
2404 continue;
2405 tree new_var = *t;
2406 if (VAR_P (new_var)
2407 && !DECL_HARD_REGISTER (new_var)
2408 && !TREE_THIS_VOLATILE (new_var)
2409 && !DECL_HAS_VALUE_EXPR_P (new_var)
2410 && !is_gimple_reg (new_var)
2411 && auto_var_in_fn_p (new_var, id->dst_fn))
2412 {
2413 if (vars == NULL)
2414 vars = new live_vars_map;
2415 vars->put (DECL_UID (var), cnt++);
2416 }
2417 }
2418 if (vars == NULL)
2419 return;
2420
2421 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2422 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2423 if (VAR_P (var))
2424 {
2425 edge e;
2426 edge_iterator ei;
2427 bool needed = false;
2428 unsigned int *v = vars->get (DECL_UID (var));
2429 if (v == NULL)
2430 continue;
2431 FOR_EACH_EDGE (e, ei, bb->preds)
2432 if ((e->flags & EDGE_EH) != 0
2433 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2434 {
2435 basic_block src_bb = (basic_block) e->src->aux;
2436
2437 if (bitmap_bit_p (&live[src_bb->index], *v))
2438 {
2439 needed = true;
2440 break;
2441 }
2442 }
2443 if (needed)
2444 {
2445 tree new_var = *id->decl_map->get (var);
2446 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2447 tree clobber = build_clobber (TREE_TYPE (new_var));
2448 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2449 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2450 }
2451 }
2452 destroy_live_vars (live);
2453 delete vars;
2454 }
2455
2456 /* Copy edges from BB into its copy constructed earlier, scale profile
2457 accordingly. Edges will be taken care of later. Assume aux
2458 pointers to point to the copies of each BB. Return true if any
2459 debug stmts are left after a statement that must end the basic block. */
2460
2461 static bool
2462 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2463 basic_block ret_bb, basic_block abnormal_goto_dest,
2464 copy_body_data *id)
2465 {
2466 basic_block new_bb = (basic_block) bb->aux;
2467 edge_iterator ei;
2468 edge old_edge;
2469 gimple_stmt_iterator si;
2470 bool need_debug_cleanup = false;
2471
2472 /* Use the indices from the original blocks to create edges for the
2473 new ones. */
2474 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2475 if (!(old_edge->flags & EDGE_EH))
2476 {
2477 edge new_edge;
2478 int flags = old_edge->flags;
2479 location_t locus = old_edge->goto_locus;
2480
2481 /* Return edges do get a FALLTHRU flag when they get inlined. */
2482 if (old_edge->dest->index == EXIT_BLOCK
2483 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2484 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2485 flags |= EDGE_FALLTHRU;
2486
2487 new_edge
2488 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2489 new_edge->probability = old_edge->probability;
2490 if (!id->reset_location)
2491 new_edge->goto_locus = remap_location (locus, id);
2492 }
2493
2494 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2495 return false;
2496
2497 /* When doing function splitting, we must decrease count of the return block
2498 which was previously reachable by block we did not copy. */
2499 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2500 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2501 if (old_edge->src->index != ENTRY_BLOCK
2502 && !old_edge->src->aux)
2503 new_bb->count -= old_edge->count ().apply_scale (num, den);
2504
2505 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2506 {
2507 gimple *copy_stmt;
2508 bool can_throw, nonlocal_goto;
2509
2510 copy_stmt = gsi_stmt (si);
2511 if (!is_gimple_debug (copy_stmt))
2512 update_stmt (copy_stmt);
2513
2514 /* Do this before the possible split_block. */
2515 gsi_next (&si);
2516
2517 /* If this tree could throw an exception, there are two
2518 cases where we need to add abnormal edge(s): the
2519 tree wasn't in a region and there is a "current
2520 region" in the caller; or the original tree had
2521 EH edges. In both cases split the block after the tree,
2522 and add abnormal edge(s) as needed; we need both
2523 those from the callee and the caller.
2524 We check whether the copy can throw, because the const
2525 propagation can change an INDIRECT_REF which throws
2526 into a COMPONENT_REF which doesn't. If the copy
2527 can throw, the original could also throw. */
2528 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2529 nonlocal_goto
2530 = (stmt_can_make_abnormal_goto (copy_stmt)
2531 && !computed_goto_p (copy_stmt));
2532
2533 if (can_throw || nonlocal_goto)
2534 {
2535 if (!gsi_end_p (si))
2536 {
2537 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2538 gsi_next (&si);
2539 if (gsi_end_p (si))
2540 need_debug_cleanup = true;
2541 }
2542 if (!gsi_end_p (si))
2543 /* Note that bb's predecessor edges aren't necessarily
2544 right at this point; split_block doesn't care. */
2545 {
2546 edge e = split_block (new_bb, copy_stmt);
2547
2548 new_bb = e->dest;
2549 new_bb->aux = e->src->aux;
2550 si = gsi_start_bb (new_bb);
2551 }
2552 }
2553
2554 bool update_probs = false;
2555
2556 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2557 {
2558 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2559 update_probs = true;
2560 }
2561 else if (can_throw)
2562 {
2563 make_eh_edges (copy_stmt);
2564 update_probs = true;
2565 }
2566
2567 /* EH edges may not match old edges. Copy as much as possible. */
2568 if (update_probs)
2569 {
2570 edge e;
2571 edge_iterator ei;
2572 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2573
2574 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2575 if ((old_edge->flags & EDGE_EH)
2576 && (e = find_edge (copy_stmt_bb,
2577 (basic_block) old_edge->dest->aux))
2578 && (e->flags & EDGE_EH))
2579 e->probability = old_edge->probability;
2580
2581 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2582 if (e->flags & EDGE_EH)
2583 {
2584 if (!e->probability.initialized_p ())
2585 e->probability = profile_probability::never ();
2586 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2587 {
2588 if (id->eh_landing_pad_dest == NULL)
2589 id->eh_landing_pad_dest = e->dest;
2590 else
2591 gcc_assert (id->eh_landing_pad_dest == e->dest);
2592 }
2593 }
2594 }
2595
2596
2597 /* If the call we inline cannot make abnormal goto do not add
2598 additional abnormal edges but only retain those already present
2599 in the original function body. */
2600 if (abnormal_goto_dest == NULL)
2601 nonlocal_goto = false;
2602 if (nonlocal_goto)
2603 {
2604 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2605
2606 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2607 nonlocal_goto = false;
2608 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2609 in OpenMP regions which aren't allowed to be left abnormally.
2610 So, no need to add abnormal edge in that case. */
2611 else if (is_gimple_call (copy_stmt)
2612 && gimple_call_internal_p (copy_stmt)
2613 && (gimple_call_internal_fn (copy_stmt)
2614 == IFN_ABNORMAL_DISPATCHER)
2615 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2616 nonlocal_goto = false;
2617 else
2618 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2619 EDGE_ABNORMAL);
2620 }
2621
2622 if ((can_throw || nonlocal_goto)
2623 && gimple_in_ssa_p (cfun))
2624 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2625 can_throw, nonlocal_goto);
2626 }
2627 return need_debug_cleanup;
2628 }
2629
2630 /* Copy the PHIs. All blocks and edges are copied, some blocks
2631 was possibly split and new outgoing EH edges inserted.
2632 BB points to the block of original function and AUX pointers links
2633 the original and newly copied blocks. */
2634
2635 static void
2636 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2637 {
2638 basic_block const new_bb = (basic_block) bb->aux;
2639 edge_iterator ei;
2640 gphi *phi;
2641 gphi_iterator si;
2642 edge new_edge;
2643 bool inserted = false;
2644
2645 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2646 {
2647 tree res, new_res;
2648 gphi *new_phi;
2649
2650 phi = si.phi ();
2651 res = PHI_RESULT (phi);
2652 new_res = res;
2653 if (!virtual_operand_p (res))
2654 {
2655 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2656 if (EDGE_COUNT (new_bb->preds) == 0)
2657 {
2658 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2659 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2660 }
2661 else
2662 {
2663 new_phi = create_phi_node (new_res, new_bb);
2664 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2665 {
2666 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2667 bb);
2668 tree arg;
2669 tree new_arg;
2670 edge_iterator ei2;
2671 location_t locus;
2672
2673 /* When doing partial cloning, we allow PHIs on the entry
2674 block as long as all the arguments are the same.
2675 Find any input edge to see argument to copy. */
2676 if (!old_edge)
2677 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2678 if (!old_edge->src->aux)
2679 break;
2680
2681 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2682 new_arg = arg;
2683 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2684 gcc_assert (new_arg);
2685 /* With return slot optimization we can end up with
2686 non-gimple (foo *)&this->m, fix that here. */
2687 if (TREE_CODE (new_arg) != SSA_NAME
2688 && TREE_CODE (new_arg) != FUNCTION_DECL
2689 && !is_gimple_val (new_arg))
2690 {
2691 gimple_seq stmts = NULL;
2692 new_arg = force_gimple_operand (new_arg, &stmts, true,
2693 NULL);
2694 gsi_insert_seq_on_edge (new_edge, stmts);
2695 inserted = true;
2696 }
2697 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2698 if (id->reset_location)
2699 locus = input_location;
2700 else
2701 locus = remap_location (locus, id);
2702 add_phi_arg (new_phi, new_arg, new_edge, locus);
2703 }
2704 }
2705 }
2706 }
2707
2708 /* Commit the delayed edge insertions. */
2709 if (inserted)
2710 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2711 gsi_commit_one_edge_insert (new_edge, NULL);
2712 }
2713
2714
2715 /* Wrapper for remap_decl so it can be used as a callback. */
2716
2717 static tree
2718 remap_decl_1 (tree decl, void *data)
2719 {
2720 return remap_decl (decl, (copy_body_data *) data);
2721 }
2722
2723 /* Build struct function and associated datastructures for the new clone
2724 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2725 the cfun to the function of new_fndecl (and current_function_decl too). */
2726
2727 static void
2728 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2729 {
2730 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2731
2732 if (!DECL_ARGUMENTS (new_fndecl))
2733 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2734 if (!DECL_RESULT (new_fndecl))
2735 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2736
2737 /* Register specific tree functions. */
2738 gimple_register_cfg_hooks ();
2739
2740 /* Get clean struct function. */
2741 push_struct_function (new_fndecl);
2742
2743 /* We will rebuild these, so just sanity check that they are empty. */
2744 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2745 gcc_assert (cfun->local_decls == NULL);
2746 gcc_assert (cfun->cfg == NULL);
2747 gcc_assert (cfun->decl == new_fndecl);
2748
2749 /* Copy items we preserve during cloning. */
2750 cfun->static_chain_decl = src_cfun->static_chain_decl;
2751 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2752 cfun->function_end_locus = src_cfun->function_end_locus;
2753 cfun->curr_properties = src_cfun->curr_properties;
2754 cfun->last_verified = src_cfun->last_verified;
2755 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2756 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2757 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2758 cfun->calls_eh_return = src_cfun->calls_eh_return;
2759 cfun->stdarg = src_cfun->stdarg;
2760 cfun->after_inlining = src_cfun->after_inlining;
2761 cfun->can_throw_non_call_exceptions
2762 = src_cfun->can_throw_non_call_exceptions;
2763 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2764 cfun->returns_struct = src_cfun->returns_struct;
2765 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2766
2767 init_empty_tree_cfg ();
2768
2769 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2770
2771 profile_count num = count;
2772 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2773 profile_count::adjust_for_ipa_scaling (&num, &den);
2774
2775 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2776 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2777 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2778 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2779 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2780 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2781 if (src_cfun->eh)
2782 init_eh_for_function ();
2783
2784 if (src_cfun->gimple_df)
2785 {
2786 init_tree_ssa (cfun);
2787 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2788 if (cfun->gimple_df->in_ssa_p)
2789 init_ssa_operands (cfun);
2790 }
2791 }
2792
2793 /* Helper function for copy_cfg_body. Move debug stmts from the end
2794 of NEW_BB to the beginning of successor basic blocks when needed. If the
2795 successor has multiple predecessors, reset them, otherwise keep
2796 their value. */
2797
2798 static void
2799 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2800 {
2801 edge e;
2802 edge_iterator ei;
2803 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2804
2805 if (gsi_end_p (si)
2806 || gsi_one_before_end_p (si)
2807 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2808 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2809 return;
2810
2811 FOR_EACH_EDGE (e, ei, new_bb->succs)
2812 {
2813 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2814 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2815 while (is_gimple_debug (gsi_stmt (ssi)))
2816 {
2817 gimple *stmt = gsi_stmt (ssi);
2818 gdebug *new_stmt;
2819 tree var;
2820 tree value;
2821
2822 /* For the last edge move the debug stmts instead of copying
2823 them. */
2824 if (ei_one_before_end_p (ei))
2825 {
2826 si = ssi;
2827 gsi_prev (&ssi);
2828 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2829 {
2830 gimple_debug_bind_reset_value (stmt);
2831 gimple_set_location (stmt, UNKNOWN_LOCATION);
2832 }
2833 gsi_remove (&si, false);
2834 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2835 continue;
2836 }
2837
2838 if (gimple_debug_bind_p (stmt))
2839 {
2840 var = gimple_debug_bind_get_var (stmt);
2841 if (single_pred_p (e->dest))
2842 {
2843 value = gimple_debug_bind_get_value (stmt);
2844 value = unshare_expr (value);
2845 new_stmt = gimple_build_debug_bind (var, value, stmt);
2846 }
2847 else
2848 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2849 }
2850 else if (gimple_debug_source_bind_p (stmt))
2851 {
2852 var = gimple_debug_source_bind_get_var (stmt);
2853 value = gimple_debug_source_bind_get_value (stmt);
2854 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2855 }
2856 else if (gimple_debug_nonbind_marker_p (stmt))
2857 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2858 else
2859 gcc_unreachable ();
2860 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2861 id->debug_stmts.safe_push (new_stmt);
2862 gsi_prev (&ssi);
2863 }
2864 }
2865 }
2866
2867 /* Make a copy of the sub-loops of SRC_PARENT and place them
2868 as siblings of DEST_PARENT. */
2869
2870 static void
2871 copy_loops (copy_body_data *id,
2872 class loop *dest_parent, class loop *src_parent)
2873 {
2874 class loop *src_loop = src_parent->inner;
2875 while (src_loop)
2876 {
2877 if (!id->blocks_to_copy
2878 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2879 {
2880 class loop *dest_loop = alloc_loop ();
2881
2882 /* Assign the new loop its header and latch and associate
2883 those with the new loop. */
2884 dest_loop->header = (basic_block)src_loop->header->aux;
2885 dest_loop->header->loop_father = dest_loop;
2886 if (src_loop->latch != NULL)
2887 {
2888 dest_loop->latch = (basic_block)src_loop->latch->aux;
2889 dest_loop->latch->loop_father = dest_loop;
2890 }
2891
2892 /* Copy loop meta-data. */
2893 copy_loop_info (src_loop, dest_loop);
2894 if (dest_loop->unroll)
2895 cfun->has_unroll = true;
2896 if (dest_loop->force_vectorize)
2897 cfun->has_force_vectorize_loops = true;
2898 if (id->src_cfun->last_clique != 0)
2899 dest_loop->owned_clique
2900 = remap_dependence_clique (id,
2901 src_loop->owned_clique
2902 ? src_loop->owned_clique : 1);
2903
2904 /* Finally place it into the loop array and the loop tree. */
2905 place_new_loop (cfun, dest_loop);
2906 flow_loop_tree_node_add (dest_parent, dest_loop);
2907
2908 if (src_loop->simduid)
2909 {
2910 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2911 cfun->has_simduid_loops = true;
2912 }
2913
2914 /* Recurse. */
2915 copy_loops (id, dest_loop, src_loop);
2916 }
2917 src_loop = src_loop->next;
2918 }
2919 }
2920
2921 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2922
2923 void
2924 redirect_all_calls (copy_body_data * id, basic_block bb)
2925 {
2926 gimple_stmt_iterator si;
2927 gimple *last = last_stmt (bb);
2928 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2929 {
2930 gimple *stmt = gsi_stmt (si);
2931 if (is_gimple_call (stmt))
2932 {
2933 tree old_lhs = gimple_call_lhs (stmt);
2934 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2935 if (edge)
2936 {
2937 gimple *new_stmt
2938 = cgraph_edge::redirect_call_stmt_to_callee (edge);
2939 /* If IPA-SRA transformation, run as part of edge redirection,
2940 removed the LHS because it is unused, save it to
2941 killed_new_ssa_names so that we can prune it from debug
2942 statements. */
2943 if (old_lhs
2944 && TREE_CODE (old_lhs) == SSA_NAME
2945 && !gimple_call_lhs (new_stmt))
2946 {
2947 if (!id->killed_new_ssa_names)
2948 id->killed_new_ssa_names = new hash_set<tree> (16);
2949 id->killed_new_ssa_names->add (old_lhs);
2950 }
2951
2952 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2953 gimple_purge_dead_eh_edges (bb);
2954 }
2955 }
2956 }
2957 }
2958
2959 /* Make a copy of the body of FN so that it can be inserted inline in
2960 another function. Walks FN via CFG, returns new fndecl. */
2961
2962 static tree
2963 copy_cfg_body (copy_body_data * id,
2964 basic_block entry_block_map, basic_block exit_block_map,
2965 basic_block new_entry)
2966 {
2967 tree callee_fndecl = id->src_fn;
2968 /* Original cfun for the callee, doesn't change. */
2969 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2970 struct function *cfun_to_copy;
2971 basic_block bb;
2972 tree new_fndecl = NULL;
2973 bool need_debug_cleanup = false;
2974 int last;
2975 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2976 profile_count num = entry_block_map->count;
2977
2978 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2979
2980 /* Register specific tree functions. */
2981 gimple_register_cfg_hooks ();
2982
2983 /* If we are inlining just region of the function, make sure to connect
2984 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2985 part of loop, we must compute frequency and probability of
2986 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2987 probabilities of edges incoming from nonduplicated region. */
2988 if (new_entry)
2989 {
2990 edge e;
2991 edge_iterator ei;
2992 den = profile_count::zero ();
2993
2994 FOR_EACH_EDGE (e, ei, new_entry->preds)
2995 if (!e->src->aux)
2996 den += e->count ();
2997 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2998 }
2999
3000 profile_count::adjust_for_ipa_scaling (&num, &den);
3001
3002 /* Must have a CFG here at this point. */
3003 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3004 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3005
3006
3007 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3008 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3009 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3010 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3011
3012 /* Duplicate any exception-handling regions. */
3013 if (cfun->eh)
3014 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3015 remap_decl_1, id);
3016
3017 /* Use aux pointers to map the original blocks to copy. */
3018 FOR_EACH_BB_FN (bb, cfun_to_copy)
3019 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3020 {
3021 basic_block new_bb = copy_bb (id, bb, num, den);
3022 bb->aux = new_bb;
3023 new_bb->aux = bb;
3024 new_bb->loop_father = entry_block_map->loop_father;
3025 }
3026
3027 last = last_basic_block_for_fn (cfun);
3028
3029 /* Now that we've duplicated the blocks, duplicate their edges. */
3030 basic_block abnormal_goto_dest = NULL;
3031 if (id->call_stmt
3032 && stmt_can_make_abnormal_goto (id->call_stmt))
3033 {
3034 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3035
3036 bb = gimple_bb (id->call_stmt);
3037 gsi_next (&gsi);
3038 if (gsi_end_p (gsi))
3039 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3040 }
3041 FOR_ALL_BB_FN (bb, cfun_to_copy)
3042 if (!id->blocks_to_copy
3043 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3044 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3045 abnormal_goto_dest, id);
3046
3047 if (id->eh_landing_pad_dest)
3048 {
3049 add_clobbers_to_eh_landing_pad (id);
3050 id->eh_landing_pad_dest = NULL;
3051 }
3052
3053 if (new_entry)
3054 {
3055 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3056 EDGE_FALLTHRU);
3057 e->probability = profile_probability::always ();
3058 }
3059
3060 /* Duplicate the loop tree, if available and wanted. */
3061 if (loops_for_fn (src_cfun) != NULL
3062 && current_loops != NULL)
3063 {
3064 copy_loops (id, entry_block_map->loop_father,
3065 get_loop (src_cfun, 0));
3066 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3067 loops_state_set (LOOPS_NEED_FIXUP);
3068 }
3069
3070 /* If the loop tree in the source function needed fixup, mark the
3071 destination loop tree for fixup, too. */
3072 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3073 loops_state_set (LOOPS_NEED_FIXUP);
3074
3075 if (gimple_in_ssa_p (cfun))
3076 FOR_ALL_BB_FN (bb, cfun_to_copy)
3077 if (!id->blocks_to_copy
3078 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3079 copy_phis_for_bb (bb, id);
3080
3081 FOR_ALL_BB_FN (bb, cfun_to_copy)
3082 if (bb->aux)
3083 {
3084 if (need_debug_cleanup
3085 && bb->index != ENTRY_BLOCK
3086 && bb->index != EXIT_BLOCK)
3087 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3088 /* Update call edge destinations. This cannot be done before loop
3089 info is updated, because we may split basic blocks. */
3090 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3091 && bb->index != ENTRY_BLOCK
3092 && bb->index != EXIT_BLOCK)
3093 redirect_all_calls (id, (basic_block)bb->aux);
3094 ((basic_block)bb->aux)->aux = NULL;
3095 bb->aux = NULL;
3096 }
3097
3098 /* Zero out AUX fields of newly created block during EH edge
3099 insertion. */
3100 for (; last < last_basic_block_for_fn (cfun); last++)
3101 {
3102 if (need_debug_cleanup)
3103 maybe_move_debug_stmts_to_successors (id,
3104 BASIC_BLOCK_FOR_FN (cfun, last));
3105 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3106 /* Update call edge destinations. This cannot be done before loop
3107 info is updated, because we may split basic blocks. */
3108 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3109 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3110 }
3111 entry_block_map->aux = NULL;
3112 exit_block_map->aux = NULL;
3113
3114 if (id->eh_map)
3115 {
3116 delete id->eh_map;
3117 id->eh_map = NULL;
3118 }
3119 if (id->dependence_map)
3120 {
3121 delete id->dependence_map;
3122 id->dependence_map = NULL;
3123 }
3124
3125 return new_fndecl;
3126 }
3127
3128 /* Copy the debug STMT using ID. We deal with these statements in a
3129 special way: if any variable in their VALUE expression wasn't
3130 remapped yet, we won't remap it, because that would get decl uids
3131 out of sync, causing codegen differences between -g and -g0. If
3132 this arises, we drop the VALUE expression altogether. */
3133
3134 static void
3135 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3136 {
3137 tree t, *n;
3138 struct walk_stmt_info wi;
3139
3140 if (tree block = gimple_block (stmt))
3141 {
3142 n = id->decl_map->get (block);
3143 gimple_set_block (stmt, n ? *n : id->block);
3144 }
3145
3146 if (gimple_debug_nonbind_marker_p (stmt))
3147 return;
3148
3149 /* Remap all the operands in COPY. */
3150 memset (&wi, 0, sizeof (wi));
3151 wi.info = id;
3152
3153 processing_debug_stmt = 1;
3154
3155 if (gimple_debug_source_bind_p (stmt))
3156 t = gimple_debug_source_bind_get_var (stmt);
3157 else if (gimple_debug_bind_p (stmt))
3158 t = gimple_debug_bind_get_var (stmt);
3159 else
3160 gcc_unreachable ();
3161
3162 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3163 && (n = id->debug_map->get (t)))
3164 {
3165 gcc_assert (VAR_P (*n));
3166 t = *n;
3167 }
3168 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3169 /* T is a non-localized variable. */;
3170 else
3171 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3172
3173 if (gimple_debug_bind_p (stmt))
3174 {
3175 gimple_debug_bind_set_var (stmt, t);
3176
3177 if (gimple_debug_bind_has_value_p (stmt))
3178 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3179 remap_gimple_op_r, &wi, NULL);
3180
3181 /* Punt if any decl couldn't be remapped. */
3182 if (processing_debug_stmt < 0)
3183 gimple_debug_bind_reset_value (stmt);
3184 }
3185 else if (gimple_debug_source_bind_p (stmt))
3186 {
3187 gimple_debug_source_bind_set_var (stmt, t);
3188 /* When inlining and source bind refers to one of the optimized
3189 away parameters, change the source bind into normal debug bind
3190 referring to the corresponding DEBUG_EXPR_DECL that should have
3191 been bound before the call stmt. */
3192 t = gimple_debug_source_bind_get_value (stmt);
3193 if (t != NULL_TREE
3194 && TREE_CODE (t) == PARM_DECL
3195 && id->call_stmt)
3196 {
3197 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3198 unsigned int i;
3199 if (debug_args != NULL)
3200 {
3201 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3202 if ((**debug_args)[i] == DECL_ORIGIN (t)
3203 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3204 {
3205 t = (**debug_args)[i + 1];
3206 stmt->subcode = GIMPLE_DEBUG_BIND;
3207 gimple_debug_bind_set_value (stmt, t);
3208 break;
3209 }
3210 }
3211 }
3212 if (gimple_debug_source_bind_p (stmt))
3213 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3214 remap_gimple_op_r, &wi, NULL);
3215 }
3216
3217 processing_debug_stmt = 0;
3218
3219 update_stmt (stmt);
3220 }
3221
3222 /* Process deferred debug stmts. In order to give values better odds
3223 of being successfully remapped, we delay the processing of debug
3224 stmts until all other stmts that might require remapping are
3225 processed. */
3226
3227 static void
3228 copy_debug_stmts (copy_body_data *id)
3229 {
3230 size_t i;
3231 gdebug *stmt;
3232
3233 if (!id->debug_stmts.exists ())
3234 return;
3235
3236 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3237 copy_debug_stmt (stmt, id);
3238
3239 id->debug_stmts.release ();
3240 }
3241
3242 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3243 another function. */
3244
3245 static tree
3246 copy_tree_body (copy_body_data *id)
3247 {
3248 tree fndecl = id->src_fn;
3249 tree body = DECL_SAVED_TREE (fndecl);
3250
3251 walk_tree (&body, copy_tree_body_r, id, NULL);
3252
3253 return body;
3254 }
3255
3256 /* Make a copy of the body of FN so that it can be inserted inline in
3257 another function. */
3258
3259 static tree
3260 copy_body (copy_body_data *id,
3261 basic_block entry_block_map, basic_block exit_block_map,
3262 basic_block new_entry)
3263 {
3264 tree fndecl = id->src_fn;
3265 tree body;
3266
3267 /* If this body has a CFG, walk CFG and copy. */
3268 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3269 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3270 new_entry);
3271 copy_debug_stmts (id);
3272 delete id->killed_new_ssa_names;
3273 id->killed_new_ssa_names = NULL;
3274
3275 return body;
3276 }
3277
3278 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3279 defined in function FN, or of a data member thereof. */
3280
3281 static bool
3282 self_inlining_addr_expr (tree value, tree fn)
3283 {
3284 tree var;
3285
3286 if (TREE_CODE (value) != ADDR_EXPR)
3287 return false;
3288
3289 var = get_base_address (TREE_OPERAND (value, 0));
3290
3291 return var && auto_var_in_fn_p (var, fn);
3292 }
3293
3294 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3295 lexical block and line number information from base_stmt, if given,
3296 or from the last stmt of the block otherwise. */
3297
3298 static gimple *
3299 insert_init_debug_bind (copy_body_data *id,
3300 basic_block bb, tree var, tree value,
3301 gimple *base_stmt)
3302 {
3303 gimple *note;
3304 gimple_stmt_iterator gsi;
3305 tree tracked_var;
3306
3307 if (!gimple_in_ssa_p (id->src_cfun))
3308 return NULL;
3309
3310 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3311 return NULL;
3312
3313 tracked_var = target_for_debug_bind (var);
3314 if (!tracked_var)
3315 return NULL;
3316
3317 if (bb)
3318 {
3319 gsi = gsi_last_bb (bb);
3320 if (!base_stmt && !gsi_end_p (gsi))
3321 base_stmt = gsi_stmt (gsi);
3322 }
3323
3324 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3325
3326 if (bb)
3327 {
3328 if (!gsi_end_p (gsi))
3329 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3330 else
3331 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3332 }
3333
3334 return note;
3335 }
3336
3337 static void
3338 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3339 {
3340 /* If VAR represents a zero-sized variable, it's possible that the
3341 assignment statement may result in no gimple statements. */
3342 if (init_stmt)
3343 {
3344 gimple_stmt_iterator si = gsi_last_bb (bb);
3345
3346 /* We can end up with init statements that store to a non-register
3347 from a rhs with a conversion. Handle that here by forcing the
3348 rhs into a temporary. gimple_regimplify_operands is not
3349 prepared to do this for us. */
3350 if (!is_gimple_debug (init_stmt)
3351 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3352 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3353 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3354 {
3355 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3356 gimple_expr_type (init_stmt),
3357 gimple_assign_rhs1 (init_stmt));
3358 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3359 GSI_NEW_STMT);
3360 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3361 gimple_assign_set_rhs1 (init_stmt, rhs);
3362 }
3363 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3364 if (!is_gimple_debug (init_stmt))
3365 {
3366 gimple_regimplify_operands (init_stmt, &si);
3367
3368 tree def = gimple_assign_lhs (init_stmt);
3369 insert_init_debug_bind (id, bb, def, def, init_stmt);
3370 }
3371 }
3372 }
3373
3374 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3375 if need be (which should only be necessary for invalid programs). Attempt
3376 to convert VAL to TYPE and return the result if it is possible, just return
3377 a zero constant of the given type if it fails. */
3378
3379 tree
3380 force_value_to_type (tree type, tree value)
3381 {
3382 /* If we can match up types by promotion/demotion do so. */
3383 if (fold_convertible_p (type, value))
3384 return fold_convert (type, value);
3385
3386 /* ??? For valid programs we should not end up here.
3387 Still if we end up with truly mismatched types here, fall back
3388 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3389 GIMPLE to the following passes. */
3390 if (!is_gimple_reg_type (TREE_TYPE (value))
3391 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3392 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3393 else
3394 return build_zero_cst (type);
3395 }
3396
3397 /* Initialize parameter P with VALUE. If needed, produce init statement
3398 at the end of BB. When BB is NULL, we return init statement to be
3399 output later. */
3400 static gimple *
3401 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3402 basic_block bb, tree *vars)
3403 {
3404 gimple *init_stmt = NULL;
3405 tree var;
3406 tree rhs = value;
3407 tree def = (gimple_in_ssa_p (cfun)
3408 ? ssa_default_def (id->src_cfun, p) : NULL);
3409
3410 if (value
3411 && value != error_mark_node
3412 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3413 rhs = force_value_to_type (TREE_TYPE (p), value);
3414
3415 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3416 here since the type of this decl must be visible to the calling
3417 function. */
3418 var = copy_decl_to_var (p, id);
3419
3420 /* Declare this new variable. */
3421 DECL_CHAIN (var) = *vars;
3422 *vars = var;
3423
3424 /* Make gimplifier happy about this variable. */
3425 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3426
3427 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3428 we would not need to create a new variable here at all, if it
3429 weren't for debug info. Still, we can just use the argument
3430 value. */
3431 if (TREE_READONLY (p)
3432 && !TREE_ADDRESSABLE (p)
3433 && value && !TREE_SIDE_EFFECTS (value)
3434 && !def)
3435 {
3436 /* We may produce non-gimple trees by adding NOPs or introduce
3437 invalid sharing when operand is not really constant.
3438 It is not big deal to prohibit constant propagation here as
3439 we will constant propagate in DOM1 pass anyway. */
3440 if (is_gimple_min_invariant (value)
3441 && useless_type_conversion_p (TREE_TYPE (p),
3442 TREE_TYPE (value))
3443 /* We have to be very careful about ADDR_EXPR. Make sure
3444 the base variable isn't a local variable of the inlined
3445 function, e.g., when doing recursive inlining, direct or
3446 mutually-recursive or whatever, which is why we don't
3447 just test whether fn == current_function_decl. */
3448 && ! self_inlining_addr_expr (value, fn))
3449 {
3450 insert_decl_map (id, p, value);
3451 insert_debug_decl_map (id, p, var);
3452 return insert_init_debug_bind (id, bb, var, value, NULL);
3453 }
3454 }
3455
3456 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3457 that way, when the PARM_DECL is encountered, it will be
3458 automatically replaced by the VAR_DECL. */
3459 insert_decl_map (id, p, var);
3460
3461 /* Even if P was TREE_READONLY, the new VAR should not be.
3462 In the original code, we would have constructed a
3463 temporary, and then the function body would have never
3464 changed the value of P. However, now, we will be
3465 constructing VAR directly. The constructor body may
3466 change its value multiple times as it is being
3467 constructed. Therefore, it must not be TREE_READONLY;
3468 the back-end assumes that TREE_READONLY variable is
3469 assigned to only once. */
3470 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3471 TREE_READONLY (var) = 0;
3472
3473 /* If there is no setup required and we are in SSA, take the easy route
3474 replacing all SSA names representing the function parameter by the
3475 SSA name passed to function.
3476
3477 We need to construct map for the variable anyway as it might be used
3478 in different SSA names when parameter is set in function.
3479
3480 Do replacement at -O0 for const arguments replaced by constant.
3481 This is important for builtin_constant_p and other construct requiring
3482 constant argument to be visible in inlined function body. */
3483 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3484 && (optimize
3485 || (TREE_READONLY (p)
3486 && is_gimple_min_invariant (rhs)))
3487 && (TREE_CODE (rhs) == SSA_NAME
3488 || is_gimple_min_invariant (rhs))
3489 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3490 {
3491 insert_decl_map (id, def, rhs);
3492 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3493 }
3494
3495 /* If the value of argument is never used, don't care about initializing
3496 it. */
3497 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3498 {
3499 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3500 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3501 }
3502
3503 /* Initialize this VAR_DECL from the equivalent argument. Convert
3504 the argument to the proper type in case it was promoted. */
3505 if (value)
3506 {
3507 if (rhs == error_mark_node)
3508 {
3509 insert_decl_map (id, p, var);
3510 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3511 }
3512
3513 STRIP_USELESS_TYPE_CONVERSION (rhs);
3514
3515 /* If we are in SSA form properly remap the default definition
3516 or assign to a dummy SSA name if the parameter is unused and
3517 we are not optimizing. */
3518 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3519 {
3520 if (def)
3521 {
3522 def = remap_ssa_name (def, id);
3523 init_stmt = gimple_build_assign (def, rhs);
3524 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3525 set_ssa_default_def (cfun, var, NULL);
3526 }
3527 else if (!optimize)
3528 {
3529 def = make_ssa_name (var);
3530 init_stmt = gimple_build_assign (def, rhs);
3531 }
3532 }
3533 else
3534 init_stmt = gimple_build_assign (var, rhs);
3535
3536 if (bb && init_stmt)
3537 insert_init_stmt (id, bb, init_stmt);
3538 }
3539 return init_stmt;
3540 }
3541
3542 /* Generate code to initialize the parameters of the function at the
3543 top of the stack in ID from the GIMPLE_CALL STMT. */
3544
3545 static void
3546 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3547 tree fn, basic_block bb)
3548 {
3549 tree parms;
3550 size_t i;
3551 tree p;
3552 tree vars = NULL_TREE;
3553 tree static_chain = gimple_call_chain (stmt);
3554
3555 /* Figure out what the parameters are. */
3556 parms = DECL_ARGUMENTS (fn);
3557
3558 /* Loop through the parameter declarations, replacing each with an
3559 equivalent VAR_DECL, appropriately initialized. */
3560 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3561 {
3562 tree val;
3563 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3564 setup_one_parameter (id, p, val, fn, bb, &vars);
3565 }
3566 /* After remapping parameters remap their types. This has to be done
3567 in a second loop over all parameters to appropriately remap
3568 variable sized arrays when the size is specified in a
3569 parameter following the array. */
3570 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3571 {
3572 tree *varp = id->decl_map->get (p);
3573 if (varp && VAR_P (*varp))
3574 {
3575 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3576 ? ssa_default_def (id->src_cfun, p) : NULL);
3577 tree var = *varp;
3578 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3579 /* Also remap the default definition if it was remapped
3580 to the default definition of the parameter replacement
3581 by the parameter setup. */
3582 if (def)
3583 {
3584 tree *defp = id->decl_map->get (def);
3585 if (defp
3586 && TREE_CODE (*defp) == SSA_NAME
3587 && SSA_NAME_VAR (*defp) == var)
3588 TREE_TYPE (*defp) = TREE_TYPE (var);
3589 }
3590 }
3591 }
3592
3593 /* Initialize the static chain. */
3594 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3595 gcc_assert (fn != current_function_decl);
3596 if (p)
3597 {
3598 /* No static chain? Seems like a bug in tree-nested.c. */
3599 gcc_assert (static_chain);
3600
3601 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3602 }
3603
3604 declare_inline_vars (id->block, vars);
3605 }
3606
3607
3608 /* Declare a return variable to replace the RESULT_DECL for the
3609 function we are calling. An appropriate DECL_STMT is returned.
3610 The USE_STMT is filled to contain a use of the declaration to
3611 indicate the return value of the function.
3612
3613 RETURN_SLOT, if non-null is place where to store the result. It
3614 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3615 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3616
3617 The return value is a (possibly null) value that holds the result
3618 as seen by the caller. */
3619
3620 static tree
3621 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3622 basic_block entry_bb)
3623 {
3624 tree callee = id->src_fn;
3625 tree result = DECL_RESULT (callee);
3626 tree callee_type = TREE_TYPE (result);
3627 tree caller_type;
3628 tree var, use;
3629
3630 /* Handle type-mismatches in the function declaration return type
3631 vs. the call expression. */
3632 if (modify_dest)
3633 caller_type = TREE_TYPE (modify_dest);
3634 else if (return_slot)
3635 caller_type = TREE_TYPE (return_slot);
3636 else /* No LHS on the call. */
3637 caller_type = TREE_TYPE (TREE_TYPE (callee));
3638
3639 /* We don't need to do anything for functions that don't return anything. */
3640 if (VOID_TYPE_P (callee_type))
3641 return NULL_TREE;
3642
3643 /* If there was a return slot, then the return value is the
3644 dereferenced address of that object. */
3645 if (return_slot)
3646 {
3647 /* The front end shouldn't have used both return_slot and
3648 a modify expression. */
3649 gcc_assert (!modify_dest);
3650 if (DECL_BY_REFERENCE (result))
3651 {
3652 tree return_slot_addr = build_fold_addr_expr (return_slot);
3653 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3654
3655 /* We are going to construct *&return_slot and we can't do that
3656 for variables believed to be not addressable.
3657
3658 FIXME: This check possibly can match, because values returned
3659 via return slot optimization are not believed to have address
3660 taken by alias analysis. */
3661 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3662 var = return_slot_addr;
3663 mark_addressable (return_slot);
3664 }
3665 else
3666 {
3667 var = return_slot;
3668 gcc_assert (TREE_CODE (var) != SSA_NAME);
3669 if (TREE_ADDRESSABLE (result))
3670 mark_addressable (var);
3671 }
3672 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3673 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3674 && !DECL_GIMPLE_REG_P (result)
3675 && DECL_P (var))
3676 DECL_GIMPLE_REG_P (var) = 0;
3677
3678 if (!useless_type_conversion_p (callee_type, caller_type))
3679 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3680
3681 use = NULL;
3682 goto done;
3683 }
3684
3685 /* All types requiring non-trivial constructors should have been handled. */
3686 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3687
3688 /* Attempt to avoid creating a new temporary variable. */
3689 if (modify_dest
3690 && TREE_CODE (modify_dest) != SSA_NAME)
3691 {
3692 bool use_it = false;
3693
3694 /* We can't use MODIFY_DEST if there's type promotion involved. */
3695 if (!useless_type_conversion_p (callee_type, caller_type))
3696 use_it = false;
3697
3698 /* ??? If we're assigning to a variable sized type, then we must
3699 reuse the destination variable, because we've no good way to
3700 create variable sized temporaries at this point. */
3701 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3702 use_it = true;
3703
3704 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3705 reuse it as the result of the call directly. Don't do this if
3706 it would promote MODIFY_DEST to addressable. */
3707 else if (TREE_ADDRESSABLE (result))
3708 use_it = false;
3709 else
3710 {
3711 tree base_m = get_base_address (modify_dest);
3712
3713 /* If the base isn't a decl, then it's a pointer, and we don't
3714 know where that's going to go. */
3715 if (!DECL_P (base_m))
3716 use_it = false;
3717 else if (is_global_var (base_m))
3718 use_it = false;
3719 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3720 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3721 && !DECL_GIMPLE_REG_P (result)
3722 && DECL_GIMPLE_REG_P (base_m))
3723 use_it = false;
3724 else if (!TREE_ADDRESSABLE (base_m))
3725 use_it = true;
3726 }
3727
3728 if (use_it)
3729 {
3730 var = modify_dest;
3731 use = NULL;
3732 goto done;
3733 }
3734 }
3735
3736 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3737
3738 var = copy_result_decl_to_var (result, id);
3739 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3740
3741 /* Do not have the rest of GCC warn about this variable as it should
3742 not be visible to the user. */
3743 TREE_NO_WARNING (var) = 1;
3744
3745 declare_inline_vars (id->block, var);
3746
3747 /* Build the use expr. If the return type of the function was
3748 promoted, convert it back to the expected type. */
3749 use = var;
3750 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3751 {
3752 /* If we can match up types by promotion/demotion do so. */
3753 if (fold_convertible_p (caller_type, var))
3754 use = fold_convert (caller_type, var);
3755 else
3756 {
3757 /* ??? For valid programs we should not end up here.
3758 Still if we end up with truly mismatched types here, fall back
3759 to using a MEM_REF to not leak invalid GIMPLE to the following
3760 passes. */
3761 /* Prevent var from being written into SSA form. */
3762 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3763 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3764 DECL_GIMPLE_REG_P (var) = false;
3765 else if (is_gimple_reg_type (TREE_TYPE (var)))
3766 TREE_ADDRESSABLE (var) = true;
3767 use = fold_build2 (MEM_REF, caller_type,
3768 build_fold_addr_expr (var),
3769 build_int_cst (ptr_type_node, 0));
3770 }
3771 }
3772
3773 STRIP_USELESS_TYPE_CONVERSION (use);
3774
3775 if (DECL_BY_REFERENCE (result))
3776 {
3777 TREE_ADDRESSABLE (var) = 1;
3778 var = build_fold_addr_expr (var);
3779 }
3780
3781 done:
3782 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3783 way, when the RESULT_DECL is encountered, it will be
3784 automatically replaced by the VAR_DECL.
3785
3786 When returning by reference, ensure that RESULT_DECL remaps to
3787 gimple_val. */
3788 if (DECL_BY_REFERENCE (result)
3789 && !is_gimple_val (var))
3790 {
3791 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3792 insert_decl_map (id, result, temp);
3793 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3794 it's default_def SSA_NAME. */
3795 if (gimple_in_ssa_p (id->src_cfun)
3796 && is_gimple_reg (result))
3797 {
3798 temp = make_ssa_name (temp);
3799 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3800 }
3801 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3802 }
3803 else
3804 insert_decl_map (id, result, var);
3805
3806 /* Remember this so we can ignore it in remap_decls. */
3807 id->retvar = var;
3808 return use;
3809 }
3810
3811 /* Determine if the function can be copied. If so return NULL. If
3812 not return a string describng the reason for failure. */
3813
3814 const char *
3815 copy_forbidden (struct function *fun)
3816 {
3817 const char *reason = fun->cannot_be_copied_reason;
3818
3819 /* Only examine the function once. */
3820 if (fun->cannot_be_copied_set)
3821 return reason;
3822
3823 /* We cannot copy a function that receives a non-local goto
3824 because we cannot remap the destination label used in the
3825 function that is performing the non-local goto. */
3826 /* ??? Actually, this should be possible, if we work at it.
3827 No doubt there's just a handful of places that simply
3828 assume it doesn't happen and don't substitute properly. */
3829 if (fun->has_nonlocal_label)
3830 {
3831 reason = G_("function %q+F can never be copied "
3832 "because it receives a non-local goto");
3833 goto fail;
3834 }
3835
3836 if (fun->has_forced_label_in_static)
3837 {
3838 reason = G_("function %q+F can never be copied because it saves "
3839 "address of local label in a static variable");
3840 goto fail;
3841 }
3842
3843 fail:
3844 fun->cannot_be_copied_reason = reason;
3845 fun->cannot_be_copied_set = true;
3846 return reason;
3847 }
3848
3849
3850 static const char *inline_forbidden_reason;
3851
3852 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3853 iff a function cannot be inlined. Also sets the reason why. */
3854
3855 static tree
3856 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3857 struct walk_stmt_info *wip)
3858 {
3859 tree fn = (tree) wip->info;
3860 tree t;
3861 gimple *stmt = gsi_stmt (*gsi);
3862
3863 switch (gimple_code (stmt))
3864 {
3865 case GIMPLE_CALL:
3866 /* Refuse to inline alloca call unless user explicitly forced so as
3867 this may change program's memory overhead drastically when the
3868 function using alloca is called in loop. In GCC present in
3869 SPEC2000 inlining into schedule_block cause it to require 2GB of
3870 RAM instead of 256MB. Don't do so for alloca calls emitted for
3871 VLA objects as those can't cause unbounded growth (they're always
3872 wrapped inside stack_save/stack_restore regions. */
3873 if (gimple_maybe_alloca_call_p (stmt)
3874 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3875 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3876 {
3877 inline_forbidden_reason
3878 = G_("function %q+F can never be inlined because it uses "
3879 "alloca (override using the always_inline attribute)");
3880 *handled_ops_p = true;
3881 return fn;
3882 }
3883
3884 t = gimple_call_fndecl (stmt);
3885 if (t == NULL_TREE)
3886 break;
3887
3888 /* We cannot inline functions that call setjmp. */
3889 if (setjmp_call_p (t))
3890 {
3891 inline_forbidden_reason
3892 = G_("function %q+F can never be inlined because it uses setjmp");
3893 *handled_ops_p = true;
3894 return t;
3895 }
3896
3897 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3898 switch (DECL_FUNCTION_CODE (t))
3899 {
3900 /* We cannot inline functions that take a variable number of
3901 arguments. */
3902 case BUILT_IN_VA_START:
3903 case BUILT_IN_NEXT_ARG:
3904 case BUILT_IN_VA_END:
3905 inline_forbidden_reason
3906 = G_("function %q+F can never be inlined because it "
3907 "uses variable argument lists");
3908 *handled_ops_p = true;
3909 return t;
3910
3911 case BUILT_IN_LONGJMP:
3912 /* We can't inline functions that call __builtin_longjmp at
3913 all. The non-local goto machinery really requires the
3914 destination be in a different function. If we allow the
3915 function calling __builtin_longjmp to be inlined into the
3916 function calling __builtin_setjmp, Things will Go Awry. */
3917 inline_forbidden_reason
3918 = G_("function %q+F can never be inlined because "
3919 "it uses setjmp-longjmp exception handling");
3920 *handled_ops_p = true;
3921 return t;
3922
3923 case BUILT_IN_NONLOCAL_GOTO:
3924 /* Similarly. */
3925 inline_forbidden_reason
3926 = G_("function %q+F can never be inlined because "
3927 "it uses non-local goto");
3928 *handled_ops_p = true;
3929 return t;
3930
3931 case BUILT_IN_RETURN:
3932 case BUILT_IN_APPLY_ARGS:
3933 /* If a __builtin_apply_args caller would be inlined,
3934 it would be saving arguments of the function it has
3935 been inlined into. Similarly __builtin_return would
3936 return from the function the inline has been inlined into. */
3937 inline_forbidden_reason
3938 = G_("function %q+F can never be inlined because "
3939 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3940 *handled_ops_p = true;
3941 return t;
3942
3943 default:
3944 break;
3945 }
3946 break;
3947
3948 case GIMPLE_GOTO:
3949 t = gimple_goto_dest (stmt);
3950
3951 /* We will not inline a function which uses computed goto. The
3952 addresses of its local labels, which may be tucked into
3953 global storage, are of course not constant across
3954 instantiations, which causes unexpected behavior. */
3955 if (TREE_CODE (t) != LABEL_DECL)
3956 {
3957 inline_forbidden_reason
3958 = G_("function %q+F can never be inlined "
3959 "because it contains a computed goto");
3960 *handled_ops_p = true;
3961 return t;
3962 }
3963 break;
3964
3965 default:
3966 break;
3967 }
3968
3969 *handled_ops_p = false;
3970 return NULL_TREE;
3971 }
3972
3973 /* Return true if FNDECL is a function that cannot be inlined into
3974 another one. */
3975
3976 static bool
3977 inline_forbidden_p (tree fndecl)
3978 {
3979 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3980 struct walk_stmt_info wi;
3981 basic_block bb;
3982 bool forbidden_p = false;
3983
3984 /* First check for shared reasons not to copy the code. */
3985 inline_forbidden_reason = copy_forbidden (fun);
3986 if (inline_forbidden_reason != NULL)
3987 return true;
3988
3989 /* Next, walk the statements of the function looking for
3990 constraucts we can't handle, or are non-optimal for inlining. */
3991 hash_set<tree> visited_nodes;
3992 memset (&wi, 0, sizeof (wi));
3993 wi.info = (void *) fndecl;
3994 wi.pset = &visited_nodes;
3995
3996 FOR_EACH_BB_FN (bb, fun)
3997 {
3998 gimple *ret;
3999 gimple_seq seq = bb_seq (bb);
4000 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4001 forbidden_p = (ret != NULL);
4002 if (forbidden_p)
4003 break;
4004 }
4005
4006 return forbidden_p;
4007 }
4008 \f
4009 /* Return false if the function FNDECL cannot be inlined on account of its
4010 attributes, true otherwise. */
4011 static bool
4012 function_attribute_inlinable_p (const_tree fndecl)
4013 {
4014 if (targetm.attribute_table)
4015 {
4016 const_tree a;
4017
4018 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4019 {
4020 const_tree name = get_attribute_name (a);
4021 int i;
4022
4023 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4024 if (is_attribute_p (targetm.attribute_table[i].name, name))
4025 return targetm.function_attribute_inlinable_p (fndecl);
4026 }
4027 }
4028
4029 return true;
4030 }
4031
4032 /* Returns nonzero if FN is a function that does not have any
4033 fundamental inline blocking properties. */
4034
4035 bool
4036 tree_inlinable_function_p (tree fn)
4037 {
4038 bool inlinable = true;
4039 bool do_warning;
4040 tree always_inline;
4041
4042 /* If we've already decided this function shouldn't be inlined,
4043 there's no need to check again. */
4044 if (DECL_UNINLINABLE (fn))
4045 return false;
4046
4047 /* We only warn for functions declared `inline' by the user. */
4048 do_warning = (opt_for_fn (fn, warn_inline)
4049 && DECL_DECLARED_INLINE_P (fn)
4050 && !DECL_NO_INLINE_WARNING_P (fn)
4051 && !DECL_IN_SYSTEM_HEADER (fn));
4052
4053 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4054
4055 if (flag_no_inline
4056 && always_inline == NULL)
4057 {
4058 if (do_warning)
4059 warning (OPT_Winline, "function %q+F can never be inlined because it "
4060 "is suppressed using %<-fno-inline%>", fn);
4061 inlinable = false;
4062 }
4063
4064 else if (!function_attribute_inlinable_p (fn))
4065 {
4066 if (do_warning)
4067 warning (OPT_Winline, "function %q+F can never be inlined because it "
4068 "uses attributes conflicting with inlining", fn);
4069 inlinable = false;
4070 }
4071
4072 else if (inline_forbidden_p (fn))
4073 {
4074 /* See if we should warn about uninlinable functions. Previously,
4075 some of these warnings would be issued while trying to expand
4076 the function inline, but that would cause multiple warnings
4077 about functions that would for example call alloca. But since
4078 this a property of the function, just one warning is enough.
4079 As a bonus we can now give more details about the reason why a
4080 function is not inlinable. */
4081 if (always_inline)
4082 error (inline_forbidden_reason, fn);
4083 else if (do_warning)
4084 warning (OPT_Winline, inline_forbidden_reason, fn);
4085
4086 inlinable = false;
4087 }
4088
4089 /* Squirrel away the result so that we don't have to check again. */
4090 DECL_UNINLINABLE (fn) = !inlinable;
4091
4092 return inlinable;
4093 }
4094
4095 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4096 word size and take possible memcpy call into account and return
4097 cost based on whether optimizing for size or speed according to SPEED_P. */
4098
4099 int
4100 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4101 {
4102 HOST_WIDE_INT size;
4103
4104 gcc_assert (!VOID_TYPE_P (type));
4105
4106 if (TREE_CODE (type) == VECTOR_TYPE)
4107 {
4108 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4109 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4110 int orig_mode_size
4111 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4112 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4113 return ((orig_mode_size + simd_mode_size - 1)
4114 / simd_mode_size);
4115 }
4116
4117 size = int_size_in_bytes (type);
4118
4119 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4120 /* Cost of a memcpy call, 3 arguments and the call. */
4121 return 4;
4122 else
4123 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4124 }
4125
4126 /* Returns cost of operation CODE, according to WEIGHTS */
4127
4128 static int
4129 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4130 tree op1 ATTRIBUTE_UNUSED, tree op2)
4131 {
4132 switch (code)
4133 {
4134 /* These are "free" conversions, or their presumed cost
4135 is folded into other operations. */
4136 case RANGE_EXPR:
4137 CASE_CONVERT:
4138 case COMPLEX_EXPR:
4139 case PAREN_EXPR:
4140 case VIEW_CONVERT_EXPR:
4141 return 0;
4142
4143 /* Assign cost of 1 to usual operations.
4144 ??? We may consider mapping RTL costs to this. */
4145 case COND_EXPR:
4146 case VEC_COND_EXPR:
4147 case VEC_PERM_EXPR:
4148
4149 case PLUS_EXPR:
4150 case POINTER_PLUS_EXPR:
4151 case POINTER_DIFF_EXPR:
4152 case MINUS_EXPR:
4153 case MULT_EXPR:
4154 case MULT_HIGHPART_EXPR:
4155
4156 case ADDR_SPACE_CONVERT_EXPR:
4157 case FIXED_CONVERT_EXPR:
4158 case FIX_TRUNC_EXPR:
4159
4160 case NEGATE_EXPR:
4161 case FLOAT_EXPR:
4162 case MIN_EXPR:
4163 case MAX_EXPR:
4164 case ABS_EXPR:
4165 case ABSU_EXPR:
4166
4167 case LSHIFT_EXPR:
4168 case RSHIFT_EXPR:
4169 case LROTATE_EXPR:
4170 case RROTATE_EXPR:
4171
4172 case BIT_IOR_EXPR:
4173 case BIT_XOR_EXPR:
4174 case BIT_AND_EXPR:
4175 case BIT_NOT_EXPR:
4176
4177 case TRUTH_ANDIF_EXPR:
4178 case TRUTH_ORIF_EXPR:
4179 case TRUTH_AND_EXPR:
4180 case TRUTH_OR_EXPR:
4181 case TRUTH_XOR_EXPR:
4182 case TRUTH_NOT_EXPR:
4183
4184 case LT_EXPR:
4185 case LE_EXPR:
4186 case GT_EXPR:
4187 case GE_EXPR:
4188 case EQ_EXPR:
4189 case NE_EXPR:
4190 case ORDERED_EXPR:
4191 case UNORDERED_EXPR:
4192
4193 case UNLT_EXPR:
4194 case UNLE_EXPR:
4195 case UNGT_EXPR:
4196 case UNGE_EXPR:
4197 case UNEQ_EXPR:
4198 case LTGT_EXPR:
4199
4200 case CONJ_EXPR:
4201
4202 case PREDECREMENT_EXPR:
4203 case PREINCREMENT_EXPR:
4204 case POSTDECREMENT_EXPR:
4205 case POSTINCREMENT_EXPR:
4206
4207 case REALIGN_LOAD_EXPR:
4208
4209 case WIDEN_SUM_EXPR:
4210 case WIDEN_MULT_EXPR:
4211 case DOT_PROD_EXPR:
4212 case SAD_EXPR:
4213 case WIDEN_MULT_PLUS_EXPR:
4214 case WIDEN_MULT_MINUS_EXPR:
4215 case WIDEN_LSHIFT_EXPR:
4216
4217 case VEC_WIDEN_MULT_HI_EXPR:
4218 case VEC_WIDEN_MULT_LO_EXPR:
4219 case VEC_WIDEN_MULT_EVEN_EXPR:
4220 case VEC_WIDEN_MULT_ODD_EXPR:
4221 case VEC_UNPACK_HI_EXPR:
4222 case VEC_UNPACK_LO_EXPR:
4223 case VEC_UNPACK_FLOAT_HI_EXPR:
4224 case VEC_UNPACK_FLOAT_LO_EXPR:
4225 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4226 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4227 case VEC_PACK_TRUNC_EXPR:
4228 case VEC_PACK_SAT_EXPR:
4229 case VEC_PACK_FIX_TRUNC_EXPR:
4230 case VEC_PACK_FLOAT_EXPR:
4231 case VEC_WIDEN_LSHIFT_HI_EXPR:
4232 case VEC_WIDEN_LSHIFT_LO_EXPR:
4233 case VEC_DUPLICATE_EXPR:
4234 case VEC_SERIES_EXPR:
4235
4236 return 1;
4237
4238 /* Few special cases of expensive operations. This is useful
4239 to avoid inlining on functions having too many of these. */
4240 case TRUNC_DIV_EXPR:
4241 case CEIL_DIV_EXPR:
4242 case FLOOR_DIV_EXPR:
4243 case ROUND_DIV_EXPR:
4244 case EXACT_DIV_EXPR:
4245 case TRUNC_MOD_EXPR:
4246 case CEIL_MOD_EXPR:
4247 case FLOOR_MOD_EXPR:
4248 case ROUND_MOD_EXPR:
4249 case RDIV_EXPR:
4250 if (TREE_CODE (op2) != INTEGER_CST)
4251 return weights->div_mod_cost;
4252 return 1;
4253
4254 /* Bit-field insertion needs several shift and mask operations. */
4255 case BIT_INSERT_EXPR:
4256 return 3;
4257
4258 default:
4259 /* We expect a copy assignment with no operator. */
4260 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4261 return 0;
4262 }
4263 }
4264
4265
4266 /* Estimate number of instructions that will be created by expanding
4267 the statements in the statement sequence STMTS.
4268 WEIGHTS contains weights attributed to various constructs. */
4269
4270 int
4271 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4272 {
4273 int cost;
4274 gimple_stmt_iterator gsi;
4275
4276 cost = 0;
4277 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4278 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4279
4280 return cost;
4281 }
4282
4283
4284 /* Estimate number of instructions that will be created by expanding STMT.
4285 WEIGHTS contains weights attributed to various constructs. */
4286
4287 int
4288 estimate_num_insns (gimple *stmt, eni_weights *weights)
4289 {
4290 unsigned cost, i;
4291 enum gimple_code code = gimple_code (stmt);
4292 tree lhs;
4293 tree rhs;
4294
4295 switch (code)
4296 {
4297 case GIMPLE_ASSIGN:
4298 /* Try to estimate the cost of assignments. We have three cases to
4299 deal with:
4300 1) Simple assignments to registers;
4301 2) Stores to things that must live in memory. This includes
4302 "normal" stores to scalars, but also assignments of large
4303 structures, or constructors of big arrays;
4304
4305 Let us look at the first two cases, assuming we have "a = b + C":
4306 <GIMPLE_ASSIGN <var_decl "a">
4307 <plus_expr <var_decl "b"> <constant C>>
4308 If "a" is a GIMPLE register, the assignment to it is free on almost
4309 any target, because "a" usually ends up in a real register. Hence
4310 the only cost of this expression comes from the PLUS_EXPR, and we
4311 can ignore the GIMPLE_ASSIGN.
4312 If "a" is not a GIMPLE register, the assignment to "a" will most
4313 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4314 of moving something into "a", which we compute using the function
4315 estimate_move_cost. */
4316 if (gimple_clobber_p (stmt))
4317 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4318
4319 lhs = gimple_assign_lhs (stmt);
4320 rhs = gimple_assign_rhs1 (stmt);
4321
4322 cost = 0;
4323
4324 /* Account for the cost of moving to / from memory. */
4325 if (gimple_store_p (stmt))
4326 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4327 if (gimple_assign_load_p (stmt))
4328 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4329
4330 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4331 gimple_assign_rhs1 (stmt),
4332 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4333 == GIMPLE_BINARY_RHS
4334 ? gimple_assign_rhs2 (stmt) : NULL);
4335 break;
4336
4337 case GIMPLE_COND:
4338 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4339 gimple_op (stmt, 0),
4340 gimple_op (stmt, 1));
4341 break;
4342
4343 case GIMPLE_SWITCH:
4344 {
4345 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4346 /* Take into account cost of the switch + guess 2 conditional jumps for
4347 each case label.
4348
4349 TODO: once the switch expansion logic is sufficiently separated, we can
4350 do better job on estimating cost of the switch. */
4351 if (weights->time_based)
4352 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4353 else
4354 cost = gimple_switch_num_labels (switch_stmt) * 2;
4355 }
4356 break;
4357
4358 case GIMPLE_CALL:
4359 {
4360 tree decl;
4361
4362 if (gimple_call_internal_p (stmt))
4363 return 0;
4364 else if ((decl = gimple_call_fndecl (stmt))
4365 && fndecl_built_in_p (decl))
4366 {
4367 /* Do not special case builtins where we see the body.
4368 This just confuse inliner. */
4369 struct cgraph_node *node;
4370 if (!(node = cgraph_node::get (decl))
4371 || node->definition)
4372 ;
4373 /* For buitins that are likely expanded to nothing or
4374 inlined do not account operand costs. */
4375 else if (is_simple_builtin (decl))
4376 return 0;
4377 else if (is_inexpensive_builtin (decl))
4378 return weights->target_builtin_call_cost;
4379 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4380 {
4381 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4382 specialize the cheap expansion we do here.
4383 ??? This asks for a more general solution. */
4384 switch (DECL_FUNCTION_CODE (decl))
4385 {
4386 case BUILT_IN_POW:
4387 case BUILT_IN_POWF:
4388 case BUILT_IN_POWL:
4389 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4390 && (real_equal
4391 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4392 &dconst2)))
4393 return estimate_operator_cost
4394 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4395 gimple_call_arg (stmt, 0));
4396 break;
4397
4398 default:
4399 break;
4400 }
4401 }
4402 }
4403
4404 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4405 if (gimple_call_lhs (stmt))
4406 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4407 weights->time_based);
4408 for (i = 0; i < gimple_call_num_args (stmt); i++)
4409 {
4410 tree arg = gimple_call_arg (stmt, i);
4411 cost += estimate_move_cost (TREE_TYPE (arg),
4412 weights->time_based);
4413 }
4414 break;
4415 }
4416
4417 case GIMPLE_RETURN:
4418 return weights->return_cost;
4419
4420 case GIMPLE_GOTO:
4421 case GIMPLE_LABEL:
4422 case GIMPLE_NOP:
4423 case GIMPLE_PHI:
4424 case GIMPLE_PREDICT:
4425 case GIMPLE_DEBUG:
4426 return 0;
4427
4428 case GIMPLE_ASM:
4429 {
4430 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4431 /* 1000 means infinity. This avoids overflows later
4432 with very long asm statements. */
4433 if (count > 1000)
4434 count = 1000;
4435 /* If this asm is asm inline, count anything as minimum size. */
4436 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4437 count = MIN (1, count);
4438 return MAX (1, count);
4439 }
4440
4441 case GIMPLE_RESX:
4442 /* This is either going to be an external function call with one
4443 argument, or two register copy statements plus a goto. */
4444 return 2;
4445
4446 case GIMPLE_EH_DISPATCH:
4447 /* ??? This is going to turn into a switch statement. Ideally
4448 we'd have a look at the eh region and estimate the number of
4449 edges involved. */
4450 return 10;
4451
4452 case GIMPLE_BIND:
4453 return estimate_num_insns_seq (
4454 gimple_bind_body (as_a <gbind *> (stmt)),
4455 weights);
4456
4457 case GIMPLE_EH_FILTER:
4458 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4459
4460 case GIMPLE_CATCH:
4461 return estimate_num_insns_seq (gimple_catch_handler (
4462 as_a <gcatch *> (stmt)),
4463 weights);
4464
4465 case GIMPLE_TRY:
4466 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4467 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4468
4469 /* OMP directives are generally very expensive. */
4470
4471 case GIMPLE_OMP_RETURN:
4472 case GIMPLE_OMP_SECTIONS_SWITCH:
4473 case GIMPLE_OMP_ATOMIC_STORE:
4474 case GIMPLE_OMP_CONTINUE:
4475 /* ...except these, which are cheap. */
4476 return 0;
4477
4478 case GIMPLE_OMP_ATOMIC_LOAD:
4479 return weights->omp_cost;
4480
4481 case GIMPLE_OMP_FOR:
4482 return (weights->omp_cost
4483 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4484 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4485
4486 case GIMPLE_OMP_PARALLEL:
4487 case GIMPLE_OMP_TASK:
4488 case GIMPLE_OMP_CRITICAL:
4489 case GIMPLE_OMP_MASTER:
4490 case GIMPLE_OMP_TASKGROUP:
4491 case GIMPLE_OMP_ORDERED:
4492 case GIMPLE_OMP_SCAN:
4493 case GIMPLE_OMP_SECTION:
4494 case GIMPLE_OMP_SECTIONS:
4495 case GIMPLE_OMP_SINGLE:
4496 case GIMPLE_OMP_TARGET:
4497 case GIMPLE_OMP_TEAMS:
4498 return (weights->omp_cost
4499 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4500
4501 case GIMPLE_TRANSACTION:
4502 return (weights->tm_cost
4503 + estimate_num_insns_seq (gimple_transaction_body (
4504 as_a <gtransaction *> (stmt)),
4505 weights));
4506
4507 default:
4508 gcc_unreachable ();
4509 }
4510
4511 return cost;
4512 }
4513
4514 /* Estimate number of instructions that will be created by expanding
4515 function FNDECL. WEIGHTS contains weights attributed to various
4516 constructs. */
4517
4518 int
4519 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4520 {
4521 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4522 gimple_stmt_iterator bsi;
4523 basic_block bb;
4524 int n = 0;
4525
4526 gcc_assert (my_function && my_function->cfg);
4527 FOR_EACH_BB_FN (bb, my_function)
4528 {
4529 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4530 n += estimate_num_insns (gsi_stmt (bsi), weights);
4531 }
4532
4533 return n;
4534 }
4535
4536
4537 /* Initializes weights used by estimate_num_insns. */
4538
4539 void
4540 init_inline_once (void)
4541 {
4542 eni_size_weights.call_cost = 1;
4543 eni_size_weights.indirect_call_cost = 3;
4544 eni_size_weights.target_builtin_call_cost = 1;
4545 eni_size_weights.div_mod_cost = 1;
4546 eni_size_weights.omp_cost = 40;
4547 eni_size_weights.tm_cost = 10;
4548 eni_size_weights.time_based = false;
4549 eni_size_weights.return_cost = 1;
4550
4551 /* Estimating time for call is difficult, since we have no idea what the
4552 called function does. In the current uses of eni_time_weights,
4553 underestimating the cost does less harm than overestimating it, so
4554 we choose a rather small value here. */
4555 eni_time_weights.call_cost = 10;
4556 eni_time_weights.indirect_call_cost = 15;
4557 eni_time_weights.target_builtin_call_cost = 1;
4558 eni_time_weights.div_mod_cost = 10;
4559 eni_time_weights.omp_cost = 40;
4560 eni_time_weights.tm_cost = 40;
4561 eni_time_weights.time_based = true;
4562 eni_time_weights.return_cost = 2;
4563 }
4564
4565
4566 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4567
4568 static void
4569 prepend_lexical_block (tree current_block, tree new_block)
4570 {
4571 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4572 BLOCK_SUBBLOCKS (current_block) = new_block;
4573 BLOCK_SUPERCONTEXT (new_block) = current_block;
4574 }
4575
4576 /* Add local variables from CALLEE to CALLER. */
4577
4578 static inline void
4579 add_local_variables (struct function *callee, struct function *caller,
4580 copy_body_data *id)
4581 {
4582 tree var;
4583 unsigned ix;
4584
4585 FOR_EACH_LOCAL_DECL (callee, ix, var)
4586 if (!can_be_nonlocal (var, id))
4587 {
4588 tree new_var = remap_decl (var, id);
4589
4590 /* Remap debug-expressions. */
4591 if (VAR_P (new_var)
4592 && DECL_HAS_DEBUG_EXPR_P (var)
4593 && new_var != var)
4594 {
4595 tree tem = DECL_DEBUG_EXPR (var);
4596 bool old_regimplify = id->regimplify;
4597 id->remapping_type_depth++;
4598 walk_tree (&tem, copy_tree_body_r, id, NULL);
4599 id->remapping_type_depth--;
4600 id->regimplify = old_regimplify;
4601 SET_DECL_DEBUG_EXPR (new_var, tem);
4602 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4603 }
4604 add_local_decl (caller, new_var);
4605 }
4606 }
4607
4608 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4609 have brought in or introduced any debug stmts for SRCVAR. */
4610
4611 static inline void
4612 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4613 {
4614 tree *remappedvarp = id->decl_map->get (srcvar);
4615
4616 if (!remappedvarp)
4617 return;
4618
4619 if (!VAR_P (*remappedvarp))
4620 return;
4621
4622 if (*remappedvarp == id->retvar)
4623 return;
4624
4625 tree tvar = target_for_debug_bind (*remappedvarp);
4626 if (!tvar)
4627 return;
4628
4629 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4630 id->call_stmt);
4631 gimple_seq_add_stmt (bindings, stmt);
4632 }
4633
4634 /* For each inlined variable for which we may have debug bind stmts,
4635 add before GSI a final debug stmt resetting it, marking the end of
4636 its life, so that var-tracking knows it doesn't have to compute
4637 further locations for it. */
4638
4639 static inline void
4640 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4641 {
4642 tree var;
4643 unsigned ix;
4644 gimple_seq bindings = NULL;
4645
4646 if (!gimple_in_ssa_p (id->src_cfun))
4647 return;
4648
4649 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4650 return;
4651
4652 for (var = DECL_ARGUMENTS (id->src_fn);
4653 var; var = DECL_CHAIN (var))
4654 reset_debug_binding (id, var, &bindings);
4655
4656 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4657 reset_debug_binding (id, var, &bindings);
4658
4659 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4660 }
4661
4662 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4663
4664 static bool
4665 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4666 bitmap to_purge)
4667 {
4668 tree use_retvar;
4669 tree fn;
4670 hash_map<tree, tree> *dst;
4671 hash_map<tree, tree> *st = NULL;
4672 tree return_slot;
4673 tree modify_dest;
4674 struct cgraph_edge *cg_edge;
4675 cgraph_inline_failed_t reason;
4676 basic_block return_block;
4677 edge e;
4678 gimple_stmt_iterator gsi, stmt_gsi;
4679 bool successfully_inlined = false;
4680 bool purge_dead_abnormal_edges;
4681 gcall *call_stmt;
4682 unsigned int prop_mask, src_properties;
4683 struct function *dst_cfun;
4684 tree simduid;
4685 use_operand_p use;
4686 gimple *simtenter_stmt = NULL;
4687 vec<tree> *simtvars_save;
4688
4689 /* The gimplifier uses input_location in too many places, such as
4690 internal_get_tmp_var (). */
4691 location_t saved_location = input_location;
4692 input_location = gimple_location (stmt);
4693
4694 /* From here on, we're only interested in CALL_EXPRs. */
4695 call_stmt = dyn_cast <gcall *> (stmt);
4696 if (!call_stmt)
4697 goto egress;
4698
4699 cg_edge = id->dst_node->get_edge (stmt);
4700 gcc_checking_assert (cg_edge);
4701 /* First, see if we can figure out what function is being called.
4702 If we cannot, then there is no hope of inlining the function. */
4703 if (cg_edge->indirect_unknown_callee)
4704 goto egress;
4705 fn = cg_edge->callee->decl;
4706 gcc_checking_assert (fn);
4707
4708 /* If FN is a declaration of a function in a nested scope that was
4709 globally declared inline, we don't set its DECL_INITIAL.
4710 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4711 C++ front-end uses it for cdtors to refer to their internal
4712 declarations, that are not real functions. Fortunately those
4713 don't have trees to be saved, so we can tell by checking their
4714 gimple_body. */
4715 if (!DECL_INITIAL (fn)
4716 && DECL_ABSTRACT_ORIGIN (fn)
4717 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4718 fn = DECL_ABSTRACT_ORIGIN (fn);
4719
4720 /* Don't try to inline functions that are not well-suited to inlining. */
4721 if (cg_edge->inline_failed)
4722 {
4723 reason = cg_edge->inline_failed;
4724 /* If this call was originally indirect, we do not want to emit any
4725 inlining related warnings or sorry messages because there are no
4726 guarantees regarding those. */
4727 if (cg_edge->indirect_inlining_edge)
4728 goto egress;
4729
4730 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4731 /* For extern inline functions that get redefined we always
4732 silently ignored always_inline flag. Better behavior would
4733 be to be able to keep both bodies and use extern inline body
4734 for inlining, but we can't do that because frontends overwrite
4735 the body. */
4736 && !cg_edge->callee->redefined_extern_inline
4737 /* During early inline pass, report only when optimization is
4738 not turned on. */
4739 && (symtab->global_info_ready
4740 || !optimize
4741 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4742 /* PR 20090218-1_0.c. Body can be provided by another module. */
4743 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4744 {
4745 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4746 cgraph_inline_failed_string (reason));
4747 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4748 inform (gimple_location (stmt), "called from here");
4749 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4750 inform (DECL_SOURCE_LOCATION (cfun->decl),
4751 "called from this function");
4752 }
4753 else if (opt_for_fn (fn, warn_inline)
4754 && DECL_DECLARED_INLINE_P (fn)
4755 && !DECL_NO_INLINE_WARNING_P (fn)
4756 && !DECL_IN_SYSTEM_HEADER (fn)
4757 && reason != CIF_UNSPECIFIED
4758 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4759 /* Do not warn about not inlined recursive calls. */
4760 && !cg_edge->recursive_p ()
4761 /* Avoid warnings during early inline pass. */
4762 && symtab->global_info_ready)
4763 {
4764 auto_diagnostic_group d;
4765 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4766 fn, _(cgraph_inline_failed_string (reason))))
4767 {
4768 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4769 inform (gimple_location (stmt), "called from here");
4770 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4771 inform (DECL_SOURCE_LOCATION (cfun->decl),
4772 "called from this function");
4773 }
4774 }
4775 goto egress;
4776 }
4777 id->src_node = cg_edge->callee;
4778
4779 /* If callee is thunk, all we need is to adjust the THIS pointer
4780 and redirect to function being thunked. */
4781 if (id->src_node->thunk.thunk_p)
4782 {
4783 cgraph_edge *edge;
4784 tree virtual_offset = NULL;
4785 profile_count count = cg_edge->count;
4786 tree op;
4787 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4788
4789 cgraph_edge::remove (cg_edge);
4790 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4791 gimple_uid (stmt),
4792 profile_count::one (),
4793 profile_count::one (),
4794 true);
4795 edge->count = count;
4796 if (id->src_node->thunk.virtual_offset_p)
4797 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4798 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4799 NULL);
4800 gsi_insert_before (&iter, gimple_build_assign (op,
4801 gimple_call_arg (stmt, 0)),
4802 GSI_NEW_STMT);
4803 gcc_assert (id->src_node->thunk.this_adjusting);
4804 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4805 virtual_offset, id->src_node->thunk.indirect_offset);
4806
4807 gimple_call_set_arg (stmt, 0, op);
4808 gimple_call_set_fndecl (stmt, edge->callee->decl);
4809 update_stmt (stmt);
4810 id->src_node->remove ();
4811 expand_call_inline (bb, stmt, id, to_purge);
4812 maybe_remove_unused_call_args (cfun, stmt);
4813 return true;
4814 }
4815 fn = cg_edge->callee->decl;
4816 cg_edge->callee->get_untransformed_body ();
4817
4818 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4819 cg_edge->callee->verify ();
4820
4821 /* We will be inlining this callee. */
4822 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4823
4824 /* Update the callers EH personality. */
4825 if (DECL_FUNCTION_PERSONALITY (fn))
4826 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4827 = DECL_FUNCTION_PERSONALITY (fn);
4828
4829 /* Split the block before the GIMPLE_CALL. */
4830 stmt_gsi = gsi_for_stmt (stmt);
4831 gsi_prev (&stmt_gsi);
4832 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4833 bb = e->src;
4834 return_block = e->dest;
4835 remove_edge (e);
4836
4837 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4838 been the source of abnormal edges. In this case, schedule
4839 the removal of dead abnormal edges. */
4840 gsi = gsi_start_bb (return_block);
4841 gsi_next (&gsi);
4842 purge_dead_abnormal_edges = gsi_end_p (gsi);
4843
4844 stmt_gsi = gsi_start_bb (return_block);
4845
4846 /* Build a block containing code to initialize the arguments, the
4847 actual inline expansion of the body, and a label for the return
4848 statements within the function to jump to. The type of the
4849 statement expression is the return type of the function call.
4850 ??? If the call does not have an associated block then we will
4851 remap all callee blocks to NULL, effectively dropping most of
4852 its debug information. This should only happen for calls to
4853 artificial decls inserted by the compiler itself. We need to
4854 either link the inlined blocks into the caller block tree or
4855 not refer to them in any way to not break GC for locations. */
4856 if (tree block = gimple_block (stmt))
4857 {
4858 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4859 to make inlined_function_outer_scope_p return true on this BLOCK. */
4860 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4861 if (loc == UNKNOWN_LOCATION)
4862 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4863 if (loc == UNKNOWN_LOCATION)
4864 loc = BUILTINS_LOCATION;
4865 id->block = make_node (BLOCK);
4866 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4867 BLOCK_SOURCE_LOCATION (id->block) = loc;
4868 prepend_lexical_block (block, id->block);
4869 }
4870
4871 /* Local declarations will be replaced by their equivalents in this map. */
4872 st = id->decl_map;
4873 id->decl_map = new hash_map<tree, tree>;
4874 dst = id->debug_map;
4875 id->debug_map = NULL;
4876 if (flag_stack_reuse != SR_NONE)
4877 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4878
4879 /* Record the function we are about to inline. */
4880 id->src_fn = fn;
4881 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4882 id->reset_location = DECL_IGNORED_P (fn);
4883 id->call_stmt = call_stmt;
4884
4885 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4886 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4887 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4888 simtvars_save = id->dst_simt_vars;
4889 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4890 && (simduid = bb->loop_father->simduid) != NULL_TREE
4891 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4892 && single_imm_use (simduid, &use, &simtenter_stmt)
4893 && is_gimple_call (simtenter_stmt)
4894 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4895 vec_alloc (id->dst_simt_vars, 0);
4896 else
4897 id->dst_simt_vars = NULL;
4898
4899 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4900 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4901
4902 /* If the src function contains an IFN_VA_ARG, then so will the dst
4903 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4904 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4905 src_properties = id->src_cfun->curr_properties & prop_mask;
4906 if (src_properties != prop_mask)
4907 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4908 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4909
4910 gcc_assert (!id->src_cfun->after_inlining);
4911
4912 id->entry_bb = bb;
4913 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4914 {
4915 gimple_stmt_iterator si = gsi_last_bb (bb);
4916 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4917 NOT_TAKEN),
4918 GSI_NEW_STMT);
4919 }
4920 initialize_inlined_parameters (id, stmt, fn, bb);
4921 if (debug_nonbind_markers_p && debug_inline_points && id->block
4922 && inlined_function_outer_scope_p (id->block))
4923 {
4924 gimple_stmt_iterator si = gsi_last_bb (bb);
4925 gsi_insert_after (&si, gimple_build_debug_inline_entry
4926 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4927 GSI_NEW_STMT);
4928 }
4929
4930 if (DECL_INITIAL (fn))
4931 {
4932 if (gimple_block (stmt))
4933 {
4934 tree *var;
4935
4936 prepend_lexical_block (id->block,
4937 remap_blocks (DECL_INITIAL (fn), id));
4938 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4939 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4940 == NULL_TREE));
4941 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4942 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4943 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4944 under it. The parameters can be then evaluated in the debugger,
4945 but don't show in backtraces. */
4946 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4947 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4948 {
4949 tree v = *var;
4950 *var = TREE_CHAIN (v);
4951 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4952 BLOCK_VARS (id->block) = v;
4953 }
4954 else
4955 var = &TREE_CHAIN (*var);
4956 }
4957 else
4958 remap_blocks_to_null (DECL_INITIAL (fn), id);
4959 }
4960
4961 /* Return statements in the function body will be replaced by jumps
4962 to the RET_LABEL. */
4963 gcc_assert (DECL_INITIAL (fn));
4964 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4965
4966 /* Find the LHS to which the result of this call is assigned. */
4967 return_slot = NULL;
4968 if (gimple_call_lhs (stmt))
4969 {
4970 modify_dest = gimple_call_lhs (stmt);
4971
4972 /* The function which we are inlining might not return a value,
4973 in which case we should issue a warning that the function
4974 does not return a value. In that case the optimizers will
4975 see that the variable to which the value is assigned was not
4976 initialized. We do not want to issue a warning about that
4977 uninitialized variable. */
4978 if (DECL_P (modify_dest))
4979 TREE_NO_WARNING (modify_dest) = 1;
4980
4981 if (gimple_call_return_slot_opt_p (call_stmt))
4982 {
4983 return_slot = modify_dest;
4984 modify_dest = NULL;
4985 }
4986 }
4987 else
4988 modify_dest = NULL;
4989
4990 /* If we are inlining a call to the C++ operator new, we don't want
4991 to use type based alias analysis on the return value. Otherwise
4992 we may get confused if the compiler sees that the inlined new
4993 function returns a pointer which was just deleted. See bug
4994 33407. */
4995 if (DECL_IS_OPERATOR_NEW_P (fn))
4996 {
4997 return_slot = NULL;
4998 modify_dest = NULL;
4999 }
5000
5001 /* Declare the return variable for the function. */
5002 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5003
5004 /* Add local vars in this inlined callee to caller. */
5005 add_local_variables (id->src_cfun, cfun, id);
5006
5007 if (id->src_node->clone.performed_splits)
5008 {
5009 /* Any calls from the inlined function will be turned into calls from the
5010 function we inline into. We must preserve notes about how to split
5011 parameters such calls should be redirected/updated. */
5012 unsigned len = vec_safe_length (id->src_node->clone.performed_splits);
5013 for (unsigned i = 0; i < len; i++)
5014 {
5015 ipa_param_performed_split ps
5016 = (*id->src_node->clone.performed_splits)[i];
5017 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5018 vec_safe_push (id->dst_node->clone.performed_splits, ps);
5019 }
5020
5021 if (flag_checking)
5022 {
5023 len = vec_safe_length (id->dst_node->clone.performed_splits);
5024 for (unsigned i = 0; i < len; i++)
5025 {
5026 ipa_param_performed_split *ps1
5027 = &(*id->dst_node->clone.performed_splits)[i];
5028 for (unsigned j = i + 1; j < len; j++)
5029 {
5030 ipa_param_performed_split *ps2
5031 = &(*id->dst_node->clone.performed_splits)[j];
5032 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5033 || ps1->unit_offset != ps2->unit_offset);
5034 }
5035 }
5036 }
5037 }
5038
5039 if (dump_enabled_p ())
5040 {
5041 char buf[128];
5042 snprintf (buf, sizeof(buf), "%4.2f",
5043 cg_edge->sreal_frequency ().to_double ());
5044 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5045 call_stmt,
5046 "Inlining %C to %C with frequency %s\n",
5047 id->src_node, id->dst_node, buf);
5048 if (dump_file && (dump_flags & TDF_DETAILS))
5049 {
5050 id->src_node->dump (dump_file);
5051 id->dst_node->dump (dump_file);
5052 }
5053 }
5054
5055 /* This is it. Duplicate the callee body. Assume callee is
5056 pre-gimplified. Note that we must not alter the caller
5057 function in any way before this point, as this CALL_EXPR may be
5058 a self-referential call; if we're calling ourselves, we need to
5059 duplicate our body before altering anything. */
5060 copy_body (id, bb, return_block, NULL);
5061
5062 reset_debug_bindings (id, stmt_gsi);
5063
5064 if (flag_stack_reuse != SR_NONE)
5065 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5066 if (!TREE_THIS_VOLATILE (p))
5067 {
5068 tree *varp = id->decl_map->get (p);
5069 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5070 {
5071 tree clobber = build_clobber (TREE_TYPE (*varp));
5072 gimple *clobber_stmt;
5073 clobber_stmt = gimple_build_assign (*varp, clobber);
5074 gimple_set_location (clobber_stmt, gimple_location (stmt));
5075 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5076 }
5077 }
5078
5079 /* Reset the escaped solution. */
5080 if (cfun->gimple_df)
5081 pt_solution_reset (&cfun->gimple_df->escaped);
5082
5083 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5084 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5085 {
5086 size_t nargs = gimple_call_num_args (simtenter_stmt);
5087 vec<tree> *vars = id->dst_simt_vars;
5088 auto_vec<tree> newargs (nargs + vars->length ());
5089 for (size_t i = 0; i < nargs; i++)
5090 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5091 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5092 {
5093 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5094 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5095 }
5096 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5097 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5098 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5099 gsi_replace (&gsi, g, false);
5100 }
5101 vec_free (id->dst_simt_vars);
5102 id->dst_simt_vars = simtvars_save;
5103
5104 /* Clean up. */
5105 if (id->debug_map)
5106 {
5107 delete id->debug_map;
5108 id->debug_map = dst;
5109 }
5110 delete id->decl_map;
5111 id->decl_map = st;
5112
5113 /* Unlink the calls virtual operands before replacing it. */
5114 unlink_stmt_vdef (stmt);
5115 if (gimple_vdef (stmt)
5116 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5117 release_ssa_name (gimple_vdef (stmt));
5118
5119 /* If the inlined function returns a result that we care about,
5120 substitute the GIMPLE_CALL with an assignment of the return
5121 variable to the LHS of the call. That is, if STMT was
5122 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5123 if (use_retvar && gimple_call_lhs (stmt))
5124 {
5125 gimple *old_stmt = stmt;
5126 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5127 gimple_set_location (stmt, gimple_location (old_stmt));
5128 gsi_replace (&stmt_gsi, stmt, false);
5129 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5130 /* Append a clobber for id->retvar if easily possible. */
5131 if (flag_stack_reuse != SR_NONE
5132 && id->retvar
5133 && VAR_P (id->retvar)
5134 && id->retvar != return_slot
5135 && id->retvar != modify_dest
5136 && !TREE_THIS_VOLATILE (id->retvar)
5137 && !is_gimple_reg (id->retvar)
5138 && !stmt_ends_bb_p (stmt))
5139 {
5140 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5141 gimple *clobber_stmt;
5142 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5143 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5144 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5145 }
5146 }
5147 else
5148 {
5149 /* Handle the case of inlining a function with no return
5150 statement, which causes the return value to become undefined. */
5151 if (gimple_call_lhs (stmt)
5152 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5153 {
5154 tree name = gimple_call_lhs (stmt);
5155 tree var = SSA_NAME_VAR (name);
5156 tree def = var ? ssa_default_def (cfun, var) : NULL;
5157
5158 if (def)
5159 {
5160 /* If the variable is used undefined, make this name
5161 undefined via a move. */
5162 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5163 gsi_replace (&stmt_gsi, stmt, true);
5164 }
5165 else
5166 {
5167 if (!var)
5168 {
5169 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5170 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5171 }
5172 /* Otherwise make this variable undefined. */
5173 gsi_remove (&stmt_gsi, true);
5174 set_ssa_default_def (cfun, var, name);
5175 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5176 }
5177 }
5178 /* Replace with a clobber for id->retvar. */
5179 else if (flag_stack_reuse != SR_NONE
5180 && id->retvar
5181 && VAR_P (id->retvar)
5182 && id->retvar != return_slot
5183 && id->retvar != modify_dest
5184 && !TREE_THIS_VOLATILE (id->retvar)
5185 && !is_gimple_reg (id->retvar))
5186 {
5187 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5188 gimple *clobber_stmt;
5189 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5190 gimple_set_location (clobber_stmt, gimple_location (stmt));
5191 gsi_replace (&stmt_gsi, clobber_stmt, false);
5192 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5193 }
5194 else
5195 gsi_remove (&stmt_gsi, true);
5196 }
5197
5198 if (purge_dead_abnormal_edges)
5199 bitmap_set_bit (to_purge, return_block->index);
5200
5201 /* If the value of the new expression is ignored, that's OK. We
5202 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5203 the equivalent inlined version either. */
5204 if (is_gimple_assign (stmt))
5205 {
5206 gcc_assert (gimple_assign_single_p (stmt)
5207 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5208 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5209 }
5210
5211 id->add_clobbers_to_eh_landing_pads = 0;
5212
5213 /* Output the inlining info for this abstract function, since it has been
5214 inlined. If we don't do this now, we can lose the information about the
5215 variables in the function when the blocks get blown away as soon as we
5216 remove the cgraph node. */
5217 if (gimple_block (stmt))
5218 (*debug_hooks->outlining_inline_function) (fn);
5219
5220 /* Update callgraph if needed. */
5221 cg_edge->callee->remove ();
5222
5223 id->block = NULL_TREE;
5224 id->retvar = NULL_TREE;
5225 successfully_inlined = true;
5226
5227 egress:
5228 input_location = saved_location;
5229 return successfully_inlined;
5230 }
5231
5232 /* Expand call statements reachable from STMT_P.
5233 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5234 in a MODIFY_EXPR. */
5235
5236 static bool
5237 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5238 bitmap to_purge)
5239 {
5240 gimple_stmt_iterator gsi;
5241 bool inlined = false;
5242
5243 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5244 {
5245 gimple *stmt = gsi_stmt (gsi);
5246 gsi_prev (&gsi);
5247
5248 if (is_gimple_call (stmt)
5249 && !gimple_call_internal_p (stmt))
5250 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5251 }
5252
5253 return inlined;
5254 }
5255
5256
5257 /* Walk all basic blocks created after FIRST and try to fold every statement
5258 in the STATEMENTS pointer set. */
5259
5260 static void
5261 fold_marked_statements (int first, hash_set<gimple *> *statements)
5262 {
5263 auto_bitmap to_purge;
5264
5265 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5266 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5267 bitmap_clear (visited);
5268
5269 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5270 while (!stack.is_empty ())
5271 {
5272 /* Look at the edge on the top of the stack. */
5273 edge e = stack.pop ();
5274 basic_block dest = e->dest;
5275
5276 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5277 || bitmap_bit_p (visited, dest->index))
5278 continue;
5279
5280 bitmap_set_bit (visited, dest->index);
5281
5282 if (dest->index >= first)
5283 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5284 !gsi_end_p (gsi); gsi_next (&gsi))
5285 {
5286 if (!statements->contains (gsi_stmt (gsi)))
5287 continue;
5288
5289 gimple *old_stmt = gsi_stmt (gsi);
5290 tree old_decl = (is_gimple_call (old_stmt)
5291 ? gimple_call_fndecl (old_stmt) : 0);
5292 if (old_decl && fndecl_built_in_p (old_decl))
5293 {
5294 /* Folding builtins can create multiple instructions,
5295 we need to look at all of them. */
5296 gimple_stmt_iterator i2 = gsi;
5297 gsi_prev (&i2);
5298 if (fold_stmt (&gsi))
5299 {
5300 gimple *new_stmt;
5301 /* If a builtin at the end of a bb folded into nothing,
5302 the following loop won't work. */
5303 if (gsi_end_p (gsi))
5304 {
5305 cgraph_update_edges_for_call_stmt (old_stmt,
5306 old_decl, NULL);
5307 break;
5308 }
5309 if (gsi_end_p (i2))
5310 i2 = gsi_start_bb (dest);
5311 else
5312 gsi_next (&i2);
5313 while (1)
5314 {
5315 new_stmt = gsi_stmt (i2);
5316 update_stmt (new_stmt);
5317 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5318 new_stmt);
5319
5320 if (new_stmt == gsi_stmt (gsi))
5321 {
5322 /* It is okay to check only for the very last
5323 of these statements. If it is a throwing
5324 statement nothing will change. If it isn't
5325 this can remove EH edges. If that weren't
5326 correct then because some intermediate stmts
5327 throw, but not the last one. That would mean
5328 we'd have to split the block, which we can't
5329 here and we'd loose anyway. And as builtins
5330 probably never throw, this all
5331 is mood anyway. */
5332 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5333 new_stmt))
5334 bitmap_set_bit (to_purge, dest->index);
5335 break;
5336 }
5337 gsi_next (&i2);
5338 }
5339 }
5340 }
5341 else if (fold_stmt (&gsi))
5342 {
5343 /* Re-read the statement from GSI as fold_stmt() may
5344 have changed it. */
5345 gimple *new_stmt = gsi_stmt (gsi);
5346 update_stmt (new_stmt);
5347
5348 if (is_gimple_call (old_stmt)
5349 || is_gimple_call (new_stmt))
5350 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5351 new_stmt);
5352
5353 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5354 bitmap_set_bit (to_purge, dest->index);
5355 }
5356 }
5357
5358 if (EDGE_COUNT (dest->succs) > 0)
5359 {
5360 /* Avoid warnings emitted from folding statements that
5361 became unreachable because of inlined function parameter
5362 propagation. */
5363 e = find_taken_edge (dest, NULL_TREE);
5364 if (e)
5365 stack.quick_push (e);
5366 else
5367 {
5368 edge_iterator ei;
5369 FOR_EACH_EDGE (e, ei, dest->succs)
5370 stack.safe_push (e);
5371 }
5372 }
5373 }
5374
5375 gimple_purge_all_dead_eh_edges (to_purge);
5376 }
5377
5378 /* Expand calls to inline functions in the body of FN. */
5379
5380 unsigned int
5381 optimize_inline_calls (tree fn)
5382 {
5383 copy_body_data id;
5384 basic_block bb;
5385 int last = n_basic_blocks_for_fn (cfun);
5386 bool inlined_p = false;
5387
5388 /* Clear out ID. */
5389 memset (&id, 0, sizeof (id));
5390
5391 id.src_node = id.dst_node = cgraph_node::get (fn);
5392 gcc_assert (id.dst_node->definition);
5393 id.dst_fn = fn;
5394 /* Or any functions that aren't finished yet. */
5395 if (current_function_decl)
5396 id.dst_fn = current_function_decl;
5397
5398 id.copy_decl = copy_decl_maybe_to_var;
5399 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5400 id.transform_new_cfg = false;
5401 id.transform_return_to_modify = true;
5402 id.transform_parameter = true;
5403 id.transform_lang_insert_block = NULL;
5404 id.statements_to_fold = new hash_set<gimple *>;
5405
5406 push_gimplify_context ();
5407
5408 /* We make no attempts to keep dominance info up-to-date. */
5409 free_dominance_info (CDI_DOMINATORS);
5410 free_dominance_info (CDI_POST_DOMINATORS);
5411
5412 /* Register specific gimple functions. */
5413 gimple_register_cfg_hooks ();
5414
5415 /* Reach the trees by walking over the CFG, and note the
5416 enclosing basic-blocks in the call edges. */
5417 /* We walk the blocks going forward, because inlined function bodies
5418 will split id->current_basic_block, and the new blocks will
5419 follow it; we'll trudge through them, processing their CALL_EXPRs
5420 along the way. */
5421 auto_bitmap to_purge;
5422 FOR_EACH_BB_FN (bb, cfun)
5423 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5424
5425 pop_gimplify_context (NULL);
5426
5427 if (flag_checking)
5428 {
5429 struct cgraph_edge *e;
5430
5431 id.dst_node->verify ();
5432
5433 /* Double check that we inlined everything we are supposed to inline. */
5434 for (e = id.dst_node->callees; e; e = e->next_callee)
5435 gcc_assert (e->inline_failed);
5436 }
5437
5438 /* If we didn't inline into the function there is nothing to do. */
5439 if (!inlined_p)
5440 {
5441 delete id.statements_to_fold;
5442 return 0;
5443 }
5444
5445 /* Fold queued statements. */
5446 update_max_bb_count ();
5447 fold_marked_statements (last, id.statements_to_fold);
5448 delete id.statements_to_fold;
5449
5450 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5451 We need to do this after fold_marked_statements since that may walk
5452 the SSA use-def chain. */
5453 unsigned i;
5454 bitmap_iterator bi;
5455 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5456 {
5457 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5458 if (bb)
5459 {
5460 gimple_purge_dead_eh_edges (bb);
5461 gimple_purge_dead_abnormal_call_edges (bb);
5462 }
5463 }
5464
5465 gcc_assert (!id.debug_stmts.exists ());
5466
5467 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5468 number_blocks (fn);
5469
5470 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5471
5472 if (flag_checking)
5473 id.dst_node->verify ();
5474
5475 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5476 not possible yet - the IPA passes might make various functions to not
5477 throw and they don't care to proactively update local EH info. This is
5478 done later in fixup_cfg pass that also execute the verification. */
5479 return (TODO_update_ssa
5480 | TODO_cleanup_cfg
5481 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5482 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5483 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5484 ? TODO_rebuild_frequencies : 0));
5485 }
5486
5487 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5488
5489 tree
5490 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5491 {
5492 enum tree_code code = TREE_CODE (*tp);
5493 enum tree_code_class cl = TREE_CODE_CLASS (code);
5494
5495 /* We make copies of most nodes. */
5496 if (IS_EXPR_CODE_CLASS (cl)
5497 || code == TREE_LIST
5498 || code == TREE_VEC
5499 || code == TYPE_DECL
5500 || code == OMP_CLAUSE)
5501 {
5502 /* Because the chain gets clobbered when we make a copy, we save it
5503 here. */
5504 tree chain = NULL_TREE, new_tree;
5505
5506 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5507 chain = TREE_CHAIN (*tp);
5508
5509 /* Copy the node. */
5510 new_tree = copy_node (*tp);
5511
5512 *tp = new_tree;
5513
5514 /* Now, restore the chain, if appropriate. That will cause
5515 walk_tree to walk into the chain as well. */
5516 if (code == PARM_DECL
5517 || code == TREE_LIST
5518 || code == OMP_CLAUSE)
5519 TREE_CHAIN (*tp) = chain;
5520
5521 /* For now, we don't update BLOCKs when we make copies. So, we
5522 have to nullify all BIND_EXPRs. */
5523 if (TREE_CODE (*tp) == BIND_EXPR)
5524 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5525 }
5526 else if (code == CONSTRUCTOR)
5527 {
5528 /* CONSTRUCTOR nodes need special handling because
5529 we need to duplicate the vector of elements. */
5530 tree new_tree;
5531
5532 new_tree = copy_node (*tp);
5533 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5534 *tp = new_tree;
5535 }
5536 else if (code == STATEMENT_LIST)
5537 /* We used to just abort on STATEMENT_LIST, but we can run into them
5538 with statement-expressions (c++/40975). */
5539 copy_statement_list (tp);
5540 else if (TREE_CODE_CLASS (code) == tcc_type)
5541 *walk_subtrees = 0;
5542 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5543 *walk_subtrees = 0;
5544 else if (TREE_CODE_CLASS (code) == tcc_constant)
5545 *walk_subtrees = 0;
5546 return NULL_TREE;
5547 }
5548
5549 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5550 information indicating to what new SAVE_EXPR this one should be mapped,
5551 use that one. Otherwise, create a new node and enter it in ST. FN is
5552 the function into which the copy will be placed. */
5553
5554 static void
5555 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5556 {
5557 tree *n;
5558 tree t;
5559
5560 /* See if we already encountered this SAVE_EXPR. */
5561 n = st->get (*tp);
5562
5563 /* If we didn't already remap this SAVE_EXPR, do so now. */
5564 if (!n)
5565 {
5566 t = copy_node (*tp);
5567
5568 /* Remember this SAVE_EXPR. */
5569 st->put (*tp, t);
5570 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5571 st->put (t, t);
5572 }
5573 else
5574 {
5575 /* We've already walked into this SAVE_EXPR; don't do it again. */
5576 *walk_subtrees = 0;
5577 t = *n;
5578 }
5579
5580 /* Replace this SAVE_EXPR with the copy. */
5581 *tp = t;
5582 }
5583
5584 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5585 label, copies the declaration and enters it in the splay_tree in DATA (which
5586 is really a 'copy_body_data *'. */
5587
5588 static tree
5589 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5590 bool *handled_ops_p ATTRIBUTE_UNUSED,
5591 struct walk_stmt_info *wi)
5592 {
5593 copy_body_data *id = (copy_body_data *) wi->info;
5594 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5595
5596 if (stmt)
5597 {
5598 tree decl = gimple_label_label (stmt);
5599
5600 /* Copy the decl and remember the copy. */
5601 insert_decl_map (id, decl, id->copy_decl (decl, id));
5602 }
5603
5604 return NULL_TREE;
5605 }
5606
5607 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5608 struct walk_stmt_info *wi);
5609
5610 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5611 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5612 remaps all local declarations to appropriate replacements in gimple
5613 operands. */
5614
5615 static tree
5616 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5617 {
5618 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5619 copy_body_data *id = (copy_body_data *) wi->info;
5620 hash_map<tree, tree> *st = id->decl_map;
5621 tree *n;
5622 tree expr = *tp;
5623
5624 /* For recursive invocations this is no longer the LHS itself. */
5625 bool is_lhs = wi->is_lhs;
5626 wi->is_lhs = false;
5627
5628 if (TREE_CODE (expr) == SSA_NAME)
5629 {
5630 *tp = remap_ssa_name (*tp, id);
5631 *walk_subtrees = 0;
5632 if (is_lhs)
5633 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5634 }
5635 /* Only a local declaration (variable or label). */
5636 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5637 || TREE_CODE (expr) == LABEL_DECL)
5638 {
5639 /* Lookup the declaration. */
5640 n = st->get (expr);
5641
5642 /* If it's there, remap it. */
5643 if (n)
5644 *tp = *n;
5645 *walk_subtrees = 0;
5646 }
5647 else if (TREE_CODE (expr) == STATEMENT_LIST
5648 || TREE_CODE (expr) == BIND_EXPR
5649 || TREE_CODE (expr) == SAVE_EXPR)
5650 gcc_unreachable ();
5651 else if (TREE_CODE (expr) == TARGET_EXPR)
5652 {
5653 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5654 It's OK for this to happen if it was part of a subtree that
5655 isn't immediately expanded, such as operand 2 of another
5656 TARGET_EXPR. */
5657 if (!TREE_OPERAND (expr, 1))
5658 {
5659 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5660 TREE_OPERAND (expr, 3) = NULL_TREE;
5661 }
5662 }
5663 else if (TREE_CODE (expr) == OMP_CLAUSE)
5664 {
5665 /* Before the omplower pass completes, some OMP clauses can contain
5666 sequences that are neither copied by gimple_seq_copy nor walked by
5667 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5668 in those situations, we have to copy and process them explicitely. */
5669
5670 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5671 {
5672 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5673 seq = duplicate_remap_omp_clause_seq (seq, wi);
5674 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5675 }
5676 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5677 {
5678 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5679 seq = duplicate_remap_omp_clause_seq (seq, wi);
5680 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5681 }
5682 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5683 {
5684 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5685 seq = duplicate_remap_omp_clause_seq (seq, wi);
5686 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5687 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5688 seq = duplicate_remap_omp_clause_seq (seq, wi);
5689 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5690 }
5691 }
5692
5693 /* Keep iterating. */
5694 return NULL_TREE;
5695 }
5696
5697
5698 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5699 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5700 remaps all local declarations to appropriate replacements in gimple
5701 statements. */
5702
5703 static tree
5704 replace_locals_stmt (gimple_stmt_iterator *gsip,
5705 bool *handled_ops_p ATTRIBUTE_UNUSED,
5706 struct walk_stmt_info *wi)
5707 {
5708 copy_body_data *id = (copy_body_data *) wi->info;
5709 gimple *gs = gsi_stmt (*gsip);
5710
5711 if (gbind *stmt = dyn_cast <gbind *> (gs))
5712 {
5713 tree block = gimple_bind_block (stmt);
5714
5715 if (block)
5716 {
5717 remap_block (&block, id);
5718 gimple_bind_set_block (stmt, block);
5719 }
5720
5721 /* This will remap a lot of the same decls again, but this should be
5722 harmless. */
5723 if (gimple_bind_vars (stmt))
5724 {
5725 tree old_var, decls = gimple_bind_vars (stmt);
5726
5727 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5728 if (!can_be_nonlocal (old_var, id)
5729 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5730 remap_decl (old_var, id);
5731
5732 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5733 id->prevent_decl_creation_for_types = true;
5734 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5735 id->prevent_decl_creation_for_types = false;
5736 }
5737 }
5738
5739 /* Keep iterating. */
5740 return NULL_TREE;
5741 }
5742
5743 /* Create a copy of SEQ and remap all decls in it. */
5744
5745 static gimple_seq
5746 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5747 {
5748 if (!seq)
5749 return NULL;
5750
5751 /* If there are any labels in OMP sequences, they can be only referred to in
5752 the sequence itself and therefore we can do both here. */
5753 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5754 gimple_seq copy = gimple_seq_copy (seq);
5755 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5756 return copy;
5757 }
5758
5759 /* Copies everything in SEQ and replaces variables and labels local to
5760 current_function_decl. */
5761
5762 gimple_seq
5763 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5764 {
5765 copy_body_data id;
5766 struct walk_stmt_info wi;
5767 gimple_seq copy;
5768
5769 /* There's nothing to do for NULL_TREE. */
5770 if (seq == NULL)
5771 return seq;
5772
5773 /* Set up ID. */
5774 memset (&id, 0, sizeof (id));
5775 id.src_fn = current_function_decl;
5776 id.dst_fn = current_function_decl;
5777 id.src_cfun = cfun;
5778 id.decl_map = new hash_map<tree, tree>;
5779 id.debug_map = NULL;
5780
5781 id.copy_decl = copy_decl_no_change;
5782 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5783 id.transform_new_cfg = false;
5784 id.transform_return_to_modify = false;
5785 id.transform_parameter = false;
5786 id.transform_lang_insert_block = NULL;
5787
5788 /* Walk the tree once to find local labels. */
5789 memset (&wi, 0, sizeof (wi));
5790 hash_set<tree> visited;
5791 wi.info = &id;
5792 wi.pset = &visited;
5793 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5794
5795 copy = gimple_seq_copy (seq);
5796
5797 /* Walk the copy, remapping decls. */
5798 memset (&wi, 0, sizeof (wi));
5799 wi.info = &id;
5800 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5801
5802 /* Clean up. */
5803 delete id.decl_map;
5804 if (id.debug_map)
5805 delete id.debug_map;
5806 if (id.dependence_map)
5807 {
5808 delete id.dependence_map;
5809 id.dependence_map = NULL;
5810 }
5811
5812 return copy;
5813 }
5814
5815
5816 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5817
5818 static tree
5819 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5820 {
5821 if (*tp == data)
5822 return (tree) data;
5823 else
5824 return NULL;
5825 }
5826
5827 DEBUG_FUNCTION bool
5828 debug_find_tree (tree top, tree search)
5829 {
5830 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5831 }
5832
5833
5834 /* Declare the variables created by the inliner. Add all the variables in
5835 VARS to BIND_EXPR. */
5836
5837 static void
5838 declare_inline_vars (tree block, tree vars)
5839 {
5840 tree t;
5841 for (t = vars; t; t = DECL_CHAIN (t))
5842 {
5843 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5844 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5845 add_local_decl (cfun, t);
5846 }
5847
5848 if (block)
5849 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5850 }
5851
5852 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5853 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5854 VAR_DECL translation. */
5855
5856 tree
5857 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5858 {
5859 /* Don't generate debug information for the copy if we wouldn't have
5860 generated it for the copy either. */
5861 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5862 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5863
5864 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5865 declaration inspired this copy. */
5866 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5867
5868 /* The new variable/label has no RTL, yet. */
5869 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5870 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5871 SET_DECL_RTL (copy, 0);
5872 /* For vector typed decls make sure to update DECL_MODE according
5873 to the new function context. */
5874 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5875 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5876
5877 /* These args would always appear unused, if not for this. */
5878 TREE_USED (copy) = 1;
5879
5880 /* Set the context for the new declaration. */
5881 if (!DECL_CONTEXT (decl))
5882 /* Globals stay global. */
5883 ;
5884 else if (DECL_CONTEXT (decl) != id->src_fn)
5885 /* Things that weren't in the scope of the function we're inlining
5886 from aren't in the scope we're inlining to, either. */
5887 ;
5888 else if (TREE_STATIC (decl))
5889 /* Function-scoped static variables should stay in the original
5890 function. */
5891 ;
5892 else
5893 {
5894 /* Ordinary automatic local variables are now in the scope of the
5895 new function. */
5896 DECL_CONTEXT (copy) = id->dst_fn;
5897 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5898 {
5899 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5900 DECL_ATTRIBUTES (copy)
5901 = tree_cons (get_identifier ("omp simt private"), NULL,
5902 DECL_ATTRIBUTES (copy));
5903 id->dst_simt_vars->safe_push (copy);
5904 }
5905 }
5906
5907 return copy;
5908 }
5909
5910 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5911 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5912 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5913
5914 tree
5915 copy_decl_to_var (tree decl, copy_body_data *id)
5916 {
5917 tree copy, type;
5918
5919 gcc_assert (TREE_CODE (decl) == PARM_DECL
5920 || TREE_CODE (decl) == RESULT_DECL);
5921
5922 type = TREE_TYPE (decl);
5923
5924 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5925 VAR_DECL, DECL_NAME (decl), type);
5926 if (DECL_PT_UID_SET_P (decl))
5927 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5928 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5929 TREE_READONLY (copy) = TREE_READONLY (decl);
5930 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5931 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5932 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5933
5934 return copy_decl_for_dup_finish (id, decl, copy);
5935 }
5936
5937 /* Like copy_decl_to_var, but create a return slot object instead of a
5938 pointer variable for return by invisible reference. */
5939
5940 static tree
5941 copy_result_decl_to_var (tree decl, copy_body_data *id)
5942 {
5943 tree copy, type;
5944
5945 gcc_assert (TREE_CODE (decl) == PARM_DECL
5946 || TREE_CODE (decl) == RESULT_DECL);
5947
5948 type = TREE_TYPE (decl);
5949 if (DECL_BY_REFERENCE (decl))
5950 type = TREE_TYPE (type);
5951
5952 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5953 VAR_DECL, DECL_NAME (decl), type);
5954 if (DECL_PT_UID_SET_P (decl))
5955 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5956 TREE_READONLY (copy) = TREE_READONLY (decl);
5957 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5958 if (!DECL_BY_REFERENCE (decl))
5959 {
5960 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5961 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5962 }
5963
5964 return copy_decl_for_dup_finish (id, decl, copy);
5965 }
5966
5967 tree
5968 copy_decl_no_change (tree decl, copy_body_data *id)
5969 {
5970 tree copy;
5971
5972 copy = copy_node (decl);
5973
5974 /* The COPY is not abstract; it will be generated in DST_FN. */
5975 DECL_ABSTRACT_P (copy) = false;
5976 lang_hooks.dup_lang_specific_decl (copy);
5977
5978 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5979 been taken; it's for internal bookkeeping in expand_goto_internal. */
5980 if (TREE_CODE (copy) == LABEL_DECL)
5981 {
5982 TREE_ADDRESSABLE (copy) = 0;
5983 LABEL_DECL_UID (copy) = -1;
5984 }
5985
5986 return copy_decl_for_dup_finish (id, decl, copy);
5987 }
5988
5989 static tree
5990 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5991 {
5992 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5993 return copy_decl_to_var (decl, id);
5994 else
5995 return copy_decl_no_change (decl, id);
5996 }
5997
5998 /* Return a copy of the function's argument tree without any modifications. */
5999
6000 static tree
6001 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6002 {
6003 tree arg, *parg;
6004 tree new_parm = NULL;
6005
6006 parg = &new_parm;
6007 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6008 {
6009 tree new_tree = remap_decl (arg, id);
6010 if (TREE_CODE (new_tree) != PARM_DECL)
6011 new_tree = id->copy_decl (arg, id);
6012 lang_hooks.dup_lang_specific_decl (new_tree);
6013 *parg = new_tree;
6014 parg = &DECL_CHAIN (new_tree);
6015 }
6016 return new_parm;
6017 }
6018
6019 /* Return a copy of the function's static chain. */
6020 static tree
6021 copy_static_chain (tree static_chain, copy_body_data * id)
6022 {
6023 tree *chain_copy, *pvar;
6024
6025 chain_copy = &static_chain;
6026 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6027 {
6028 tree new_tree = remap_decl (*pvar, id);
6029 lang_hooks.dup_lang_specific_decl (new_tree);
6030 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6031 *pvar = new_tree;
6032 }
6033 return static_chain;
6034 }
6035
6036 /* Return true if the function is allowed to be versioned.
6037 This is a guard for the versioning functionality. */
6038
6039 bool
6040 tree_versionable_function_p (tree fndecl)
6041 {
6042 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6043 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6044 }
6045
6046 /* Update clone info after duplication. */
6047
6048 static void
6049 update_clone_info (copy_body_data * id)
6050 {
6051 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6052 = id->dst_node->clone.performed_splits;
6053 if (cur_performed_splits)
6054 {
6055 unsigned len = cur_performed_splits->length ();
6056 for (unsigned i = 0; i < len; i++)
6057 {
6058 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6059 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6060 }
6061 }
6062
6063 struct cgraph_node *node;
6064 if (!id->dst_node->clones)
6065 return;
6066 for (node = id->dst_node->clones; node != id->dst_node;)
6067 {
6068 /* First update replace maps to match the new body. */
6069 if (node->clone.tree_map)
6070 {
6071 unsigned int i;
6072 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
6073 {
6074 struct ipa_replace_map *replace_info;
6075 replace_info = (*node->clone.tree_map)[i];
6076 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6077 }
6078 }
6079 if (node->clone.performed_splits)
6080 {
6081 unsigned len = vec_safe_length (node->clone.performed_splits);
6082 for (unsigned i = 0; i < len; i++)
6083 {
6084 ipa_param_performed_split *ps
6085 = &(*node->clone.performed_splits)[i];
6086 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6087 }
6088 }
6089 if (unsigned len = vec_safe_length (cur_performed_splits))
6090 {
6091 /* We do not want to add current performed splits when we are saving
6092 a copy of function body for later during inlining, that would just
6093 duplicate all entries. So let's have a look whether anything
6094 referring to the first dummy_decl is present. */
6095 unsigned dst_len = vec_safe_length (node->clone.performed_splits);
6096 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6097 for (unsigned i = 0; i < dst_len; i++)
6098 if ((*node->clone.performed_splits)[i].dummy_decl
6099 == first->dummy_decl)
6100 {
6101 len = 0;
6102 break;
6103 }
6104
6105 for (unsigned i = 0; i < len; i++)
6106 vec_safe_push (node->clone.performed_splits,
6107 (*cur_performed_splits)[i]);
6108 if (flag_checking)
6109 {
6110 for (unsigned i = 0; i < dst_len; i++)
6111 {
6112 ipa_param_performed_split *ps1
6113 = &(*node->clone.performed_splits)[i];
6114 for (unsigned j = i + 1; j < dst_len; j++)
6115 {
6116 ipa_param_performed_split *ps2
6117 = &(*node->clone.performed_splits)[j];
6118 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6119 || ps1->unit_offset != ps2->unit_offset);
6120 }
6121 }
6122 }
6123 }
6124
6125 if (node->clones)
6126 node = node->clones;
6127 else if (node->next_sibling_clone)
6128 node = node->next_sibling_clone;
6129 else
6130 {
6131 while (node != id->dst_node && !node->next_sibling_clone)
6132 node = node->clone_of;
6133 if (node != id->dst_node)
6134 node = node->next_sibling_clone;
6135 }
6136 }
6137 }
6138
6139 /* Create a copy of a function's tree.
6140 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6141 of the original function and the new copied function
6142 respectively. In case we want to replace a DECL
6143 tree with another tree while duplicating the function's
6144 body, TREE_MAP represents the mapping between these
6145 trees. If UPDATE_CLONES is set, the call_stmt fields
6146 of edges of clones of the function will be updated.
6147
6148 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6149 function parameters and return value) should be modified).
6150 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6151 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6152 */
6153 void
6154 tree_function_versioning (tree old_decl, tree new_decl,
6155 vec<ipa_replace_map *, va_gc> *tree_map,
6156 ipa_param_adjustments *param_adjustments,
6157 bool update_clones, bitmap blocks_to_copy,
6158 basic_block new_entry)
6159 {
6160 struct cgraph_node *old_version_node;
6161 struct cgraph_node *new_version_node;
6162 copy_body_data id;
6163 tree p;
6164 unsigned i;
6165 struct ipa_replace_map *replace_info;
6166 basic_block old_entry_block, bb;
6167 auto_vec<gimple *, 10> init_stmts;
6168 tree vars = NULL_TREE;
6169
6170 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6171 && TREE_CODE (new_decl) == FUNCTION_DECL);
6172 DECL_POSSIBLY_INLINED (old_decl) = 1;
6173
6174 old_version_node = cgraph_node::get (old_decl);
6175 gcc_checking_assert (old_version_node);
6176 new_version_node = cgraph_node::get (new_decl);
6177 gcc_checking_assert (new_version_node);
6178
6179 /* Copy over debug args. */
6180 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6181 {
6182 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6183 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6184 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6185 old_debug_args = decl_debug_args_lookup (old_decl);
6186 if (old_debug_args)
6187 {
6188 new_debug_args = decl_debug_args_insert (new_decl);
6189 *new_debug_args = vec_safe_copy (*old_debug_args);
6190 }
6191 }
6192
6193 /* Output the inlining info for this abstract function, since it has been
6194 inlined. If we don't do this now, we can lose the information about the
6195 variables in the function when the blocks get blown away as soon as we
6196 remove the cgraph node. */
6197 (*debug_hooks->outlining_inline_function) (old_decl);
6198
6199 DECL_ARTIFICIAL (new_decl) = 1;
6200 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6201 if (DECL_ORIGIN (old_decl) == old_decl)
6202 old_version_node->used_as_abstract_origin = true;
6203 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6204
6205 /* Prepare the data structures for the tree copy. */
6206 memset (&id, 0, sizeof (id));
6207
6208 /* Generate a new name for the new version. */
6209 id.statements_to_fold = new hash_set<gimple *>;
6210
6211 id.decl_map = new hash_map<tree, tree>;
6212 id.debug_map = NULL;
6213 id.src_fn = old_decl;
6214 id.dst_fn = new_decl;
6215 id.src_node = old_version_node;
6216 id.dst_node = new_version_node;
6217 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6218 id.blocks_to_copy = blocks_to_copy;
6219
6220 id.copy_decl = copy_decl_no_change;
6221 id.transform_call_graph_edges
6222 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6223 id.transform_new_cfg = true;
6224 id.transform_return_to_modify = false;
6225 id.transform_parameter = false;
6226 id.transform_lang_insert_block = NULL;
6227
6228 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6229 (DECL_STRUCT_FUNCTION (old_decl));
6230 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6231 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6232 initialize_cfun (new_decl, old_decl,
6233 new_entry ? new_entry->count : old_entry_block->count);
6234 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6235 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6236 = id.src_cfun->gimple_df->ipa_pta;
6237
6238 /* Copy the function's static chain. */
6239 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6240 if (p)
6241 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6242 = copy_static_chain (p, &id);
6243
6244 auto_vec<int, 16> new_param_indices;
6245 ipa_param_adjustments *old_param_adjustments
6246 = old_version_node->clone.param_adjustments;
6247 if (old_param_adjustments)
6248 old_param_adjustments->get_updated_indices (&new_param_indices);
6249
6250 /* If there's a tree_map, prepare for substitution. */
6251 if (tree_map)
6252 for (i = 0; i < tree_map->length (); i++)
6253 {
6254 gimple *init;
6255 replace_info = (*tree_map)[i];
6256
6257 int p = replace_info->parm_num;
6258 if (old_param_adjustments)
6259 p = new_param_indices[p];
6260
6261 tree parm;
6262 tree req_type, new_type;
6263
6264 for (parm = DECL_ARGUMENTS (old_decl); p;
6265 parm = DECL_CHAIN (parm))
6266 p--;
6267 tree old_tree = parm;
6268 req_type = TREE_TYPE (parm);
6269 new_type = TREE_TYPE (replace_info->new_tree);
6270 if (!useless_type_conversion_p (req_type, new_type))
6271 {
6272 if (fold_convertible_p (req_type, replace_info->new_tree))
6273 replace_info->new_tree
6274 = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
6275 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6276 replace_info->new_tree
6277 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6278 replace_info->new_tree);
6279 else
6280 {
6281 if (dump_file)
6282 {
6283 fprintf (dump_file, " const ");
6284 print_generic_expr (dump_file,
6285 replace_info->new_tree);
6286 fprintf (dump_file,
6287 " can't be converted to param ");
6288 print_generic_expr (dump_file, parm);
6289 fprintf (dump_file, "\n");
6290 }
6291 old_tree = NULL;
6292 }
6293 }
6294
6295 if (old_tree)
6296 {
6297 init = setup_one_parameter (&id, old_tree, replace_info->new_tree,
6298 id.src_fn, NULL, &vars);
6299 if (init)
6300 init_stmts.safe_push (init);
6301 }
6302 }
6303
6304 ipa_param_body_adjustments *param_body_adjs = NULL;
6305 if (param_adjustments)
6306 {
6307 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6308 new_decl, old_decl,
6309 &id, &vars, tree_map);
6310 id.param_body_adjs = param_body_adjs;
6311 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6312 }
6313 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6314 DECL_ARGUMENTS (new_decl)
6315 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6316
6317 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6318 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6319
6320 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6321
6322 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6323 /* Add local vars. */
6324 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6325
6326 if (DECL_RESULT (old_decl) == NULL_TREE)
6327 ;
6328 else if (param_adjustments && param_adjustments->m_skip_return
6329 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6330 {
6331 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6332 &id);
6333 declare_inline_vars (NULL, resdecl_repl);
6334 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6335
6336 DECL_RESULT (new_decl)
6337 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6338 RESULT_DECL, NULL_TREE, void_type_node);
6339 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6340 DECL_IS_MALLOC (new_decl) = false;
6341 cfun->returns_struct = 0;
6342 cfun->returns_pcc_struct = 0;
6343 }
6344 else
6345 {
6346 tree old_name;
6347 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6348 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6349 if (gimple_in_ssa_p (id.src_cfun)
6350 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6351 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6352 {
6353 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6354 insert_decl_map (&id, old_name, new_name);
6355 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6356 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6357 }
6358 }
6359
6360 /* Set up the destination functions loop tree. */
6361 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6362 {
6363 cfun->curr_properties &= ~PROP_loops;
6364 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6365 cfun->curr_properties |= PROP_loops;
6366 }
6367
6368 /* Copy the Function's body. */
6369 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6370 new_entry);
6371
6372 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6373 number_blocks (new_decl);
6374
6375 /* We want to create the BB unconditionally, so that the addition of
6376 debug stmts doesn't affect BB count, which may in the end cause
6377 codegen differences. */
6378 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6379 while (init_stmts.length ())
6380 insert_init_stmt (&id, bb, init_stmts.pop ());
6381 update_clone_info (&id);
6382
6383 /* Remap the nonlocal_goto_save_area, if any. */
6384 if (cfun->nonlocal_goto_save_area)
6385 {
6386 struct walk_stmt_info wi;
6387
6388 memset (&wi, 0, sizeof (wi));
6389 wi.info = &id;
6390 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6391 }
6392
6393 /* Clean up. */
6394 delete id.decl_map;
6395 if (id.debug_map)
6396 delete id.debug_map;
6397 free_dominance_info (CDI_DOMINATORS);
6398 free_dominance_info (CDI_POST_DOMINATORS);
6399
6400 update_max_bb_count ();
6401 fold_marked_statements (0, id.statements_to_fold);
6402 delete id.statements_to_fold;
6403 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6404 if (id.dst_node->definition)
6405 cgraph_edge::rebuild_references ();
6406 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6407 {
6408 calculate_dominance_info (CDI_DOMINATORS);
6409 fix_loop_structure (NULL);
6410 }
6411 update_ssa (TODO_update_ssa);
6412
6413 /* After partial cloning we need to rescale frequencies, so they are
6414 within proper range in the cloned function. */
6415 if (new_entry)
6416 {
6417 struct cgraph_edge *e;
6418 rebuild_frequencies ();
6419
6420 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6421 for (e = new_version_node->callees; e; e = e->next_callee)
6422 {
6423 basic_block bb = gimple_bb (e->call_stmt);
6424 e->count = bb->count;
6425 }
6426 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6427 {
6428 basic_block bb = gimple_bb (e->call_stmt);
6429 e->count = bb->count;
6430 }
6431 }
6432
6433 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6434 {
6435 vec<tree, va_gc> **debug_args = NULL;
6436 unsigned int len = 0;
6437 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6438
6439 for (i = 0; i < reset_len; i++)
6440 {
6441 tree parm = param_body_adjs->m_reset_debug_decls[i];
6442 gcc_assert (is_gimple_reg (parm));
6443 tree ddecl;
6444
6445 if (debug_args == NULL)
6446 {
6447 debug_args = decl_debug_args_insert (new_decl);
6448 len = vec_safe_length (*debug_args);
6449 }
6450 ddecl = make_node (DEBUG_EXPR_DECL);
6451 DECL_ARTIFICIAL (ddecl) = 1;
6452 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6453 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6454 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6455 vec_safe_push (*debug_args, ddecl);
6456 }
6457 if (debug_args != NULL)
6458 {
6459 /* On the callee side, add
6460 DEBUG D#Y s=> parm
6461 DEBUG var => D#Y
6462 stmts to the first bb where var is a VAR_DECL created for the
6463 optimized away parameter in DECL_INITIAL block. This hints
6464 in the debug info that var (whole DECL_ORIGIN is the parm
6465 PARM_DECL) is optimized away, but could be looked up at the
6466 call site as value of D#X there. */
6467 tree vexpr;
6468 gimple_stmt_iterator cgsi
6469 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6470 gimple *def_temp;
6471 tree var = vars;
6472 i = vec_safe_length (*debug_args);
6473 do
6474 {
6475 i -= 2;
6476 while (var != NULL_TREE
6477 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6478 var = TREE_CHAIN (var);
6479 if (var == NULL_TREE)
6480 break;
6481 vexpr = make_node (DEBUG_EXPR_DECL);
6482 tree parm = (**debug_args)[i];
6483 DECL_ARTIFICIAL (vexpr) = 1;
6484 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6485 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6486 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6487 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6488 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6489 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6490 }
6491 while (i > len);
6492 }
6493 }
6494 delete param_body_adjs;
6495 free_dominance_info (CDI_DOMINATORS);
6496 free_dominance_info (CDI_POST_DOMINATORS);
6497
6498 gcc_assert (!id.debug_stmts.exists ());
6499 pop_cfun ();
6500 return;
6501 }
6502
6503 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6504 the callee and return the inlined body on success. */
6505
6506 tree
6507 maybe_inline_call_in_expr (tree exp)
6508 {
6509 tree fn = get_callee_fndecl (exp);
6510
6511 /* We can only try to inline "const" functions. */
6512 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6513 {
6514 call_expr_arg_iterator iter;
6515 copy_body_data id;
6516 tree param, arg, t;
6517 hash_map<tree, tree> decl_map;
6518
6519 /* Remap the parameters. */
6520 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6521 param;
6522 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6523 decl_map.put (param, arg);
6524
6525 memset (&id, 0, sizeof (id));
6526 id.src_fn = fn;
6527 id.dst_fn = current_function_decl;
6528 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6529 id.decl_map = &decl_map;
6530
6531 id.copy_decl = copy_decl_no_change;
6532 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6533 id.transform_new_cfg = false;
6534 id.transform_return_to_modify = true;
6535 id.transform_parameter = true;
6536 id.transform_lang_insert_block = NULL;
6537
6538 /* Make sure not to unshare trees behind the front-end's back
6539 since front-end specific mechanisms may rely on sharing. */
6540 id.regimplify = false;
6541 id.do_not_unshare = true;
6542
6543 /* We're not inside any EH region. */
6544 id.eh_lp_nr = 0;
6545
6546 t = copy_tree_body (&id);
6547
6548 /* We can only return something suitable for use in a GENERIC
6549 expression tree. */
6550 if (TREE_CODE (t) == MODIFY_EXPR)
6551 return TREE_OPERAND (t, 1);
6552 }
6553
6554 return NULL_TREE;
6555 }
6556
6557 /* Duplicate a type, fields and all. */
6558
6559 tree
6560 build_duplicate_type (tree type)
6561 {
6562 struct copy_body_data id;
6563
6564 memset (&id, 0, sizeof (id));
6565 id.src_fn = current_function_decl;
6566 id.dst_fn = current_function_decl;
6567 id.src_cfun = cfun;
6568 id.decl_map = new hash_map<tree, tree>;
6569 id.debug_map = NULL;
6570 id.copy_decl = copy_decl_no_change;
6571
6572 type = remap_type_1 (type, &id);
6573
6574 delete id.decl_map;
6575 if (id.debug_map)
6576 delete id.debug_map;
6577
6578 TYPE_CANONICAL (type) = type;
6579
6580 return type;
6581 }
6582
6583 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6584 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6585 evaluation. */
6586
6587 tree
6588 copy_fn (tree fn, tree& parms, tree& result)
6589 {
6590 copy_body_data id;
6591 tree param;
6592 hash_map<tree, tree> decl_map;
6593
6594 tree *p = &parms;
6595 *p = NULL_TREE;
6596
6597 memset (&id, 0, sizeof (id));
6598 id.src_fn = fn;
6599 id.dst_fn = current_function_decl;
6600 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6601 id.decl_map = &decl_map;
6602
6603 id.copy_decl = copy_decl_no_change;
6604 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6605 id.transform_new_cfg = false;
6606 id.transform_return_to_modify = false;
6607 id.transform_parameter = true;
6608 id.transform_lang_insert_block = NULL;
6609
6610 /* Make sure not to unshare trees behind the front-end's back
6611 since front-end specific mechanisms may rely on sharing. */
6612 id.regimplify = false;
6613 id.do_not_unshare = true;
6614 id.do_not_fold = true;
6615
6616 /* We're not inside any EH region. */
6617 id.eh_lp_nr = 0;
6618
6619 /* Remap the parameters and result and return them to the caller. */
6620 for (param = DECL_ARGUMENTS (fn);
6621 param;
6622 param = DECL_CHAIN (param))
6623 {
6624 *p = remap_decl (param, &id);
6625 p = &DECL_CHAIN (*p);
6626 }
6627
6628 if (DECL_RESULT (fn))
6629 result = remap_decl (DECL_RESULT (fn), &id);
6630 else
6631 result = NULL_TREE;
6632
6633 return copy_tree_body (&id);
6634 }