re PR tree-optimization/92645 (Hand written vector code is 450 times slower when...
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64
65 /* I'm not real happy about this, but we need to handle gimple and
66 non-gimple trees. */
67
68 /* Inlining, Cloning, Versioning, Parallelization
69
70 Inlining: a function body is duplicated, but the PARM_DECLs are
71 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72 MODIFY_EXPRs that store to a dedicated returned-value variable.
73 The duplicated eh_region info of the copy will later be appended
74 to the info for the caller; the eh_region info in copied throwing
75 statements and RESX statements are adjusted accordingly.
76
77 Cloning: (only in C++) We have one body for a con/de/structor, and
78 multiple function decls, each with a unique parameter list.
79 Duplicate the body, using the given splay tree; some parameters
80 will become constants (like 0 or 1).
81
82 Versioning: a function body is duplicated and the result is a new
83 function rather than into blocks of an existing function as with
84 inlining. Some parameters will become constants.
85
86 Parallelization: a region of a function is duplicated resulting in
87 a new function. Variables may be replaced with complex expressions
88 to enable shared variable semantics.
89
90 All of these will simultaneously lookup any callgraph edges. If
91 we're going to inline the duplicated function body, and the given
92 function has some cloned callgraph nodes (one for each place this
93 function will be inlined) those callgraph edges will be duplicated.
94 If we're cloning the body, those callgraph edges will be
95 updated to point into the new body. (Note that the original
96 callgraph node and edge list will not be altered.)
97
98 See the CALL_EXPR handling case in copy_tree_body_r (). */
99
100 /* To Do:
101
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
108
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
111
112
113 /* Weights that estimate_num_insns uses to estimate the size of the
114 produced code. */
115
116 eni_weights eni_size_weights;
117
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119 to execute the produced code. */
120
121 eni_weights eni_time_weights;
122
123 /* Prototypes. */
124
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
136
137 /* Insert a tree->tree mapping for ID. Despite the name suggests
138 that the trees should be variables, it is used for more than that. */
139
140 void
141 insert_decl_map (copy_body_data *id, tree key, tree value)
142 {
143 id->decl_map->put (key, value);
144
145 /* Always insert an identity map as well. If we see this same new
146 node again, we won't want to duplicate it a second time. */
147 if (key != value)
148 id->decl_map->put (value, value);
149 }
150
151 /* Insert a tree->tree mapping for ID. This is only used for
152 variables. */
153
154 static void
155 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 {
157 if (!gimple_in_ssa_p (id->src_cfun))
158 return;
159
160 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
161 return;
162
163 if (!target_for_debug_bind (key))
164 return;
165
166 gcc_assert (TREE_CODE (key) == PARM_DECL);
167 gcc_assert (VAR_P (value));
168
169 if (!id->debug_map)
170 id->debug_map = new hash_map<tree, tree>;
171
172 id->debug_map->put (key, value);
173 }
174
175 /* If nonzero, we're remapping the contents of inlined debug
176 statements. If negative, an error has occurred, such as a
177 reference to a variable that isn't available in the inlined
178 context. */
179 static int processing_debug_stmt = 0;
180
181 /* Construct new SSA name for old NAME. ID is the inline context. */
182
183 static tree
184 remap_ssa_name (tree name, copy_body_data *id)
185 {
186 tree new_tree, var;
187 tree *n;
188
189 gcc_assert (TREE_CODE (name) == SSA_NAME);
190
191 n = id->decl_map->get (name);
192 if (n)
193 {
194 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
195 remove an unused LHS from a call statement. Such LHS can however
196 still appear in debug statements, but their value is lost in this
197 function and we do not want to map them. */
198 if (id->killed_new_ssa_names
199 && id->killed_new_ssa_names->contains (*n))
200 {
201 gcc_assert (processing_debug_stmt);
202 processing_debug_stmt = -1;
203 return name;
204 }
205
206 return unshare_expr (*n);
207 }
208
209 if (processing_debug_stmt)
210 {
211 if (SSA_NAME_IS_DEFAULT_DEF (name)
212 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
213 && id->entry_bb == NULL
214 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
215 {
216 tree vexpr = make_node (DEBUG_EXPR_DECL);
217 gimple *def_temp;
218 gimple_stmt_iterator gsi;
219 tree val = SSA_NAME_VAR (name);
220
221 n = id->decl_map->get (val);
222 if (n != NULL)
223 val = *n;
224 if (TREE_CODE (val) != PARM_DECL
225 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
226 {
227 processing_debug_stmt = -1;
228 return name;
229 }
230 n = id->decl_map->get (val);
231 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
232 return *n;
233 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
234 DECL_ARTIFICIAL (vexpr) = 1;
235 TREE_TYPE (vexpr) = TREE_TYPE (name);
236 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
237 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
238 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
239 insert_decl_map (id, val, vexpr);
240 return vexpr;
241 }
242
243 processing_debug_stmt = -1;
244 return name;
245 }
246
247 /* Remap anonymous SSA names or SSA names of anonymous decls. */
248 var = SSA_NAME_VAR (name);
249 if (!var
250 || (!SSA_NAME_IS_DEFAULT_DEF (name)
251 && VAR_P (var)
252 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
253 && DECL_ARTIFICIAL (var)
254 && DECL_IGNORED_P (var)
255 && !DECL_NAME (var)))
256 {
257 struct ptr_info_def *pi;
258 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
259 if (!var && SSA_NAME_IDENTIFIER (name))
260 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
261 insert_decl_map (id, name, new_tree);
262 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
263 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
264 /* At least IPA points-to info can be directly transferred. */
265 if (id->src_cfun->gimple_df
266 && id->src_cfun->gimple_df->ipa_pta
267 && POINTER_TYPE_P (TREE_TYPE (name))
268 && (pi = SSA_NAME_PTR_INFO (name))
269 && !pi->pt.anything)
270 {
271 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
272 new_pi->pt = pi->pt;
273 }
274 /* So can range-info. */
275 if (!POINTER_TYPE_P (TREE_TYPE (name))
276 && SSA_NAME_RANGE_INFO (name))
277 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
278 SSA_NAME_RANGE_INFO (name));
279 return new_tree;
280 }
281
282 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
283 in copy_bb. */
284 new_tree = remap_decl (var, id);
285
286 /* We might've substituted constant or another SSA_NAME for
287 the variable.
288
289 Replace the SSA name representing RESULT_DECL by variable during
290 inlining: this saves us from need to introduce PHI node in a case
291 return value is just partly initialized. */
292 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
293 && (!SSA_NAME_VAR (name)
294 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
295 || !id->transform_return_to_modify))
296 {
297 struct ptr_info_def *pi;
298 new_tree = make_ssa_name (new_tree);
299 insert_decl_map (id, name, new_tree);
300 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
301 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
302 /* At least IPA points-to info can be directly transferred. */
303 if (id->src_cfun->gimple_df
304 && id->src_cfun->gimple_df->ipa_pta
305 && POINTER_TYPE_P (TREE_TYPE (name))
306 && (pi = SSA_NAME_PTR_INFO (name))
307 && !pi->pt.anything)
308 {
309 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
310 new_pi->pt = pi->pt;
311 }
312 /* So can range-info. */
313 if (!POINTER_TYPE_P (TREE_TYPE (name))
314 && SSA_NAME_RANGE_INFO (name))
315 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
316 SSA_NAME_RANGE_INFO (name));
317 if (SSA_NAME_IS_DEFAULT_DEF (name))
318 {
319 /* By inlining function having uninitialized variable, we might
320 extend the lifetime (variable might get reused). This cause
321 ICE in the case we end up extending lifetime of SSA name across
322 abnormal edge, but also increase register pressure.
323
324 We simply initialize all uninitialized vars by 0 except
325 for case we are inlining to very first BB. We can avoid
326 this for all BBs that are not inside strongly connected
327 regions of the CFG, but this is expensive to test. */
328 if (id->entry_bb
329 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
330 && (!SSA_NAME_VAR (name)
331 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
332 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
333 0)->dest
334 || EDGE_COUNT (id->entry_bb->preds) != 1))
335 {
336 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
337 gimple *init_stmt;
338 tree zero = build_zero_cst (TREE_TYPE (new_tree));
339
340 init_stmt = gimple_build_assign (new_tree, zero);
341 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
342 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
343 }
344 else
345 {
346 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
347 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
348 }
349 }
350 }
351 else
352 insert_decl_map (id, name, new_tree);
353 return new_tree;
354 }
355
356 /* Remap DECL during the copying of the BLOCK tree for the function. */
357
358 tree
359 remap_decl (tree decl, copy_body_data *id)
360 {
361 tree *n;
362
363 /* We only remap local variables in the current function. */
364
365 /* See if we have remapped this declaration. */
366
367 n = id->decl_map->get (decl);
368
369 if (!n && processing_debug_stmt)
370 {
371 processing_debug_stmt = -1;
372 return decl;
373 }
374
375 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
376 necessary DECLs have already been remapped and we do not want to duplicate
377 a decl coming from outside of the sequence we are copying. */
378 if (!n
379 && id->prevent_decl_creation_for_types
380 && id->remapping_type_depth > 0
381 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
382 return decl;
383
384 /* If we didn't already have an equivalent for this declaration, create one
385 now. */
386 if (!n)
387 {
388 /* Make a copy of the variable or label. */
389 tree t = id->copy_decl (decl, id);
390
391 /* Remember it, so that if we encounter this local entity again
392 we can reuse this copy. Do this early because remap_type may
393 need this decl for TYPE_STUB_DECL. */
394 insert_decl_map (id, decl, t);
395
396 if (!DECL_P (t))
397 return t;
398
399 /* Remap types, if necessary. */
400 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
401 if (TREE_CODE (t) == TYPE_DECL)
402 {
403 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
404
405 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
406 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
407 is not set on the TYPE_DECL, for example in LTO mode. */
408 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
409 {
410 tree x = build_variant_type_copy (TREE_TYPE (t));
411 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
412 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
413 DECL_ORIGINAL_TYPE (t) = x;
414 }
415 }
416
417 /* Remap sizes as necessary. */
418 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
419 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
420
421 /* If fields, do likewise for offset and qualifier. */
422 if (TREE_CODE (t) == FIELD_DECL)
423 {
424 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
425 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
426 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
427 }
428
429 return t;
430 }
431
432 if (id->do_not_unshare)
433 return *n;
434 else
435 return unshare_expr (*n);
436 }
437
438 static tree
439 remap_type_1 (tree type, copy_body_data *id)
440 {
441 tree new_tree, t;
442
443 /* We do need a copy. build and register it now. If this is a pointer or
444 reference type, remap the designated type and make a new pointer or
445 reference type. */
446 if (TREE_CODE (type) == POINTER_TYPE)
447 {
448 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
449 TYPE_MODE (type),
450 TYPE_REF_CAN_ALIAS_ALL (type));
451 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 new_tree = build_type_attribute_qual_variant (new_tree,
453 TYPE_ATTRIBUTES (type),
454 TYPE_QUALS (type));
455 insert_decl_map (id, type, new_tree);
456 return new_tree;
457 }
458 else if (TREE_CODE (type) == REFERENCE_TYPE)
459 {
460 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
461 TYPE_MODE (type),
462 TYPE_REF_CAN_ALIAS_ALL (type));
463 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
464 new_tree = build_type_attribute_qual_variant (new_tree,
465 TYPE_ATTRIBUTES (type),
466 TYPE_QUALS (type));
467 insert_decl_map (id, type, new_tree);
468 return new_tree;
469 }
470 else
471 new_tree = copy_node (type);
472
473 insert_decl_map (id, type, new_tree);
474
475 /* This is a new type, not a copy of an old type. Need to reassociate
476 variants. We can handle everything except the main variant lazily. */
477 t = TYPE_MAIN_VARIANT (type);
478 if (type != t)
479 {
480 t = remap_type (t, id);
481 TYPE_MAIN_VARIANT (new_tree) = t;
482 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
483 TYPE_NEXT_VARIANT (t) = new_tree;
484 }
485 else
486 {
487 TYPE_MAIN_VARIANT (new_tree) = new_tree;
488 TYPE_NEXT_VARIANT (new_tree) = NULL;
489 }
490
491 if (TYPE_STUB_DECL (type))
492 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
493
494 /* Lazily create pointer and reference types. */
495 TYPE_POINTER_TO (new_tree) = NULL;
496 TYPE_REFERENCE_TO (new_tree) = NULL;
497
498 /* Copy all types that may contain references to local variables; be sure to
499 preserve sharing in between type and its main variant when possible. */
500 switch (TREE_CODE (new_tree))
501 {
502 case INTEGER_TYPE:
503 case REAL_TYPE:
504 case FIXED_POINT_TYPE:
505 case ENUMERAL_TYPE:
506 case BOOLEAN_TYPE:
507 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
508 {
509 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
510 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
511
512 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
513 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
514 }
515 else
516 {
517 t = TYPE_MIN_VALUE (new_tree);
518 if (t && TREE_CODE (t) != INTEGER_CST)
519 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
520
521 t = TYPE_MAX_VALUE (new_tree);
522 if (t && TREE_CODE (t) != INTEGER_CST)
523 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
524 }
525 return new_tree;
526
527 case FUNCTION_TYPE:
528 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
529 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
530 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
531 else
532 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
533 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
534 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
535 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
536 else
537 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
538 return new_tree;
539
540 case ARRAY_TYPE:
541 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
542 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
543 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
544 else
545 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
546
547 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
548 {
549 gcc_checking_assert (TYPE_DOMAIN (type)
550 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
551 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
552 }
553 else
554 {
555 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
556 /* For array bounds where we have decided not to copy over the bounds
557 variable which isn't used in OpenMP/OpenACC region, change them to
558 an uninitialized VAR_DECL temporary. */
559 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
560 && id->adjust_array_error_bounds
561 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
562 {
563 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
564 DECL_ATTRIBUTES (v)
565 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
566 DECL_ATTRIBUTES (v));
567 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
568 }
569 }
570 break;
571
572 case RECORD_TYPE:
573 case UNION_TYPE:
574 case QUAL_UNION_TYPE:
575 if (TYPE_MAIN_VARIANT (type) != type
576 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
577 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
578 else
579 {
580 tree f, nf = NULL;
581
582 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
583 {
584 t = remap_decl (f, id);
585 DECL_CONTEXT (t) = new_tree;
586 DECL_CHAIN (t) = nf;
587 nf = t;
588 }
589 TYPE_FIELDS (new_tree) = nreverse (nf);
590 }
591 break;
592
593 case OFFSET_TYPE:
594 default:
595 /* Shouldn't have been thought variable sized. */
596 gcc_unreachable ();
597 }
598
599 /* All variants of type share the same size, so use the already remaped data. */
600 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
601 {
602 tree s = TYPE_SIZE (type);
603 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
604 tree su = TYPE_SIZE_UNIT (type);
605 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
606 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
607 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
608 || s == mvs);
609 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
610 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
611 || su == mvsu);
612 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
613 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
614 }
615 else
616 {
617 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
618 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
619 }
620
621 return new_tree;
622 }
623
624 /* Helper function for remap_type_2, called through walk_tree. */
625
626 static tree
627 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
628 {
629 copy_body_data *id = (copy_body_data *) data;
630
631 if (TYPE_P (*tp))
632 *walk_subtrees = 0;
633
634 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
635 return *tp;
636
637 return NULL_TREE;
638 }
639
640 /* Return true if TYPE needs to be remapped because remap_decl on any
641 needed embedded decl returns something other than that decl. */
642
643 static bool
644 remap_type_2 (tree type, copy_body_data *id)
645 {
646 tree t;
647
648 #define RETURN_TRUE_IF_VAR(T) \
649 do \
650 { \
651 tree _t = (T); \
652 if (_t) \
653 { \
654 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
655 return true; \
656 if (!TYPE_SIZES_GIMPLIFIED (type) \
657 && walk_tree (&_t, remap_type_3, id, NULL)) \
658 return true; \
659 } \
660 } \
661 while (0)
662
663 switch (TREE_CODE (type))
664 {
665 case POINTER_TYPE:
666 case REFERENCE_TYPE:
667 case FUNCTION_TYPE:
668 case METHOD_TYPE:
669 return remap_type_2 (TREE_TYPE (type), id);
670
671 case INTEGER_TYPE:
672 case REAL_TYPE:
673 case FIXED_POINT_TYPE:
674 case ENUMERAL_TYPE:
675 case BOOLEAN_TYPE:
676 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
677 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
678 return false;
679
680 case ARRAY_TYPE:
681 if (remap_type_2 (TREE_TYPE (type), id)
682 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
683 return true;
684 break;
685
686 case RECORD_TYPE:
687 case UNION_TYPE:
688 case QUAL_UNION_TYPE:
689 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
690 if (TREE_CODE (t) == FIELD_DECL)
691 {
692 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
693 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
694 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
695 if (TREE_CODE (type) == QUAL_UNION_TYPE)
696 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
697 }
698 break;
699
700 default:
701 return false;
702 }
703
704 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
705 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
706 return false;
707 #undef RETURN_TRUE_IF_VAR
708 }
709
710 tree
711 remap_type (tree type, copy_body_data *id)
712 {
713 tree *node;
714 tree tmp;
715
716 if (type == NULL)
717 return type;
718
719 /* See if we have remapped this type. */
720 node = id->decl_map->get (type);
721 if (node)
722 return *node;
723
724 /* The type only needs remapping if it's variably modified. */
725 if (! variably_modified_type_p (type, id->src_fn)
726 /* Don't remap if copy_decl method doesn't always return a new
727 decl and for all embedded decls returns the passed in decl. */
728 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
729 {
730 insert_decl_map (id, type, type);
731 return type;
732 }
733
734 id->remapping_type_depth++;
735 tmp = remap_type_1 (type, id);
736 id->remapping_type_depth--;
737
738 return tmp;
739 }
740
741 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
742
743 static bool
744 can_be_nonlocal (tree decl, copy_body_data *id)
745 {
746 /* We cannot duplicate function decls. */
747 if (TREE_CODE (decl) == FUNCTION_DECL)
748 return true;
749
750 /* Local static vars must be non-local or we get multiple declaration
751 problems. */
752 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
753 return true;
754
755 return false;
756 }
757
758 static tree
759 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
760 copy_body_data *id)
761 {
762 tree old_var;
763 tree new_decls = NULL_TREE;
764
765 /* Remap its variables. */
766 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
767 {
768 tree new_var;
769
770 if (can_be_nonlocal (old_var, id))
771 {
772 /* We need to add this variable to the local decls as otherwise
773 nothing else will do so. */
774 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
775 add_local_decl (cfun, old_var);
776 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
777 && !DECL_IGNORED_P (old_var)
778 && nonlocalized_list)
779 vec_safe_push (*nonlocalized_list, old_var);
780 continue;
781 }
782
783 /* Remap the variable. */
784 new_var = remap_decl (old_var, id);
785
786 /* If we didn't remap this variable, we can't mess with its
787 TREE_CHAIN. If we remapped this variable to the return slot, it's
788 already declared somewhere else, so don't declare it here. */
789
790 if (new_var == id->retvar)
791 ;
792 else if (!new_var)
793 {
794 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
795 && !DECL_IGNORED_P (old_var)
796 && nonlocalized_list)
797 vec_safe_push (*nonlocalized_list, old_var);
798 }
799 else
800 {
801 gcc_assert (DECL_P (new_var));
802 DECL_CHAIN (new_var) = new_decls;
803 new_decls = new_var;
804
805 /* Also copy value-expressions. */
806 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
807 {
808 tree tem = DECL_VALUE_EXPR (new_var);
809 bool old_regimplify = id->regimplify;
810 id->remapping_type_depth++;
811 walk_tree (&tem, copy_tree_body_r, id, NULL);
812 id->remapping_type_depth--;
813 id->regimplify = old_regimplify;
814 SET_DECL_VALUE_EXPR (new_var, tem);
815 }
816 }
817 }
818
819 return nreverse (new_decls);
820 }
821
822 /* Copy the BLOCK to contain remapped versions of the variables
823 therein. And hook the new block into the block-tree. */
824
825 static void
826 remap_block (tree *block, copy_body_data *id)
827 {
828 tree old_block;
829 tree new_block;
830
831 /* Make the new block. */
832 old_block = *block;
833 new_block = make_node (BLOCK);
834 TREE_USED (new_block) = TREE_USED (old_block);
835 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
836 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
837 BLOCK_NONLOCALIZED_VARS (new_block)
838 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
839 *block = new_block;
840
841 /* Remap its variables. */
842 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
843 &BLOCK_NONLOCALIZED_VARS (new_block),
844 id);
845
846 if (id->transform_lang_insert_block)
847 id->transform_lang_insert_block (new_block);
848
849 /* Remember the remapped block. */
850 insert_decl_map (id, old_block, new_block);
851 }
852
853 /* Copy the whole block tree and root it in id->block. */
854
855 static tree
856 remap_blocks (tree block, copy_body_data *id)
857 {
858 tree t;
859 tree new_tree = block;
860
861 if (!block)
862 return NULL;
863
864 remap_block (&new_tree, id);
865 gcc_assert (new_tree != block);
866 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
867 prepend_lexical_block (new_tree, remap_blocks (t, id));
868 /* Blocks are in arbitrary order, but make things slightly prettier and do
869 not swap order when producing a copy. */
870 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
871 return new_tree;
872 }
873
874 /* Remap the block tree rooted at BLOCK to nothing. */
875
876 static void
877 remap_blocks_to_null (tree block, copy_body_data *id)
878 {
879 tree t;
880 insert_decl_map (id, block, NULL_TREE);
881 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
882 remap_blocks_to_null (t, id);
883 }
884
885 /* Remap the location info pointed to by LOCUS. */
886
887 static location_t
888 remap_location (location_t locus, copy_body_data *id)
889 {
890 if (LOCATION_BLOCK (locus))
891 {
892 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
893 gcc_assert (n);
894 if (*n)
895 return set_block (locus, *n);
896 }
897
898 locus = LOCATION_LOCUS (locus);
899
900 if (locus != UNKNOWN_LOCATION && id->block)
901 return set_block (locus, id->block);
902
903 return locus;
904 }
905
906 static void
907 copy_statement_list (tree *tp)
908 {
909 tree_stmt_iterator oi, ni;
910 tree new_tree;
911
912 new_tree = alloc_stmt_list ();
913 ni = tsi_start (new_tree);
914 oi = tsi_start (*tp);
915 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
916 *tp = new_tree;
917
918 for (; !tsi_end_p (oi); tsi_next (&oi))
919 {
920 tree stmt = tsi_stmt (oi);
921 if (TREE_CODE (stmt) == STATEMENT_LIST)
922 /* This copy is not redundant; tsi_link_after will smash this
923 STATEMENT_LIST into the end of the one we're building, and we
924 don't want to do that with the original. */
925 copy_statement_list (&stmt);
926 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
927 }
928 }
929
930 static void
931 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
932 {
933 tree block = BIND_EXPR_BLOCK (*tp);
934 /* Copy (and replace) the statement. */
935 copy_tree_r (tp, walk_subtrees, NULL);
936 if (block)
937 {
938 remap_block (&block, id);
939 BIND_EXPR_BLOCK (*tp) = block;
940 }
941
942 if (BIND_EXPR_VARS (*tp))
943 /* This will remap a lot of the same decls again, but this should be
944 harmless. */
945 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
946 }
947
948
949 /* Create a new gimple_seq by remapping all the statements in BODY
950 using the inlining information in ID. */
951
952 static gimple_seq
953 remap_gimple_seq (gimple_seq body, copy_body_data *id)
954 {
955 gimple_stmt_iterator si;
956 gimple_seq new_body = NULL;
957
958 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
959 {
960 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
961 gimple_seq_add_seq (&new_body, new_stmts);
962 }
963
964 return new_body;
965 }
966
967
968 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
969 block using the mapping information in ID. */
970
971 static gimple *
972 copy_gimple_bind (gbind *stmt, copy_body_data *id)
973 {
974 gimple *new_bind;
975 tree new_block, new_vars;
976 gimple_seq body, new_body;
977
978 /* Copy the statement. Note that we purposely don't use copy_stmt
979 here because we need to remap statements as we copy. */
980 body = gimple_bind_body (stmt);
981 new_body = remap_gimple_seq (body, id);
982
983 new_block = gimple_bind_block (stmt);
984 if (new_block)
985 remap_block (&new_block, id);
986
987 /* This will remap a lot of the same decls again, but this should be
988 harmless. */
989 new_vars = gimple_bind_vars (stmt);
990 if (new_vars)
991 new_vars = remap_decls (new_vars, NULL, id);
992
993 new_bind = gimple_build_bind (new_vars, new_body, new_block);
994
995 return new_bind;
996 }
997
998 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
999
1000 static bool
1001 is_parm (tree decl)
1002 {
1003 if (TREE_CODE (decl) == SSA_NAME)
1004 {
1005 decl = SSA_NAME_VAR (decl);
1006 if (!decl)
1007 return false;
1008 }
1009
1010 return (TREE_CODE (decl) == PARM_DECL);
1011 }
1012
1013 /* Remap the dependence CLIQUE from the source to the destination function
1014 as specified in ID. */
1015
1016 static unsigned short
1017 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1018 {
1019 if (clique == 0 || processing_debug_stmt)
1020 return 0;
1021 if (!id->dependence_map)
1022 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1023 bool existed;
1024 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1025 if (!existed)
1026 {
1027 /* Clique 1 is reserved for local ones set by PTA. */
1028 if (cfun->last_clique == 0)
1029 cfun->last_clique = 1;
1030 newc = ++cfun->last_clique;
1031 }
1032 return newc;
1033 }
1034
1035 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1036 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1037 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1038 recursing into the children nodes of *TP. */
1039
1040 static tree
1041 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1042 {
1043 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1044 copy_body_data *id = (copy_body_data *) wi_p->info;
1045 tree fn = id->src_fn;
1046
1047 /* For recursive invocations this is no longer the LHS itself. */
1048 bool is_lhs = wi_p->is_lhs;
1049 wi_p->is_lhs = false;
1050
1051 if (TREE_CODE (*tp) == SSA_NAME)
1052 {
1053 *tp = remap_ssa_name (*tp, id);
1054 *walk_subtrees = 0;
1055 if (is_lhs)
1056 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1057 return NULL;
1058 }
1059 else if (auto_var_in_fn_p (*tp, fn))
1060 {
1061 /* Local variables and labels need to be replaced by equivalent
1062 variables. We don't want to copy static variables; there's
1063 only one of those, no matter how many times we inline the
1064 containing function. Similarly for globals from an outer
1065 function. */
1066 tree new_decl;
1067
1068 /* Remap the declaration. */
1069 new_decl = remap_decl (*tp, id);
1070 gcc_assert (new_decl);
1071 /* Replace this variable with the copy. */
1072 STRIP_TYPE_NOPS (new_decl);
1073 /* ??? The C++ frontend uses void * pointer zero to initialize
1074 any other type. This confuses the middle-end type verification.
1075 As cloned bodies do not go through gimplification again the fixup
1076 there doesn't trigger. */
1077 if (TREE_CODE (new_decl) == INTEGER_CST
1078 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1079 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1080 *tp = new_decl;
1081 *walk_subtrees = 0;
1082 }
1083 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1084 gcc_unreachable ();
1085 else if (TREE_CODE (*tp) == SAVE_EXPR)
1086 gcc_unreachable ();
1087 else if (TREE_CODE (*tp) == LABEL_DECL
1088 && (!DECL_CONTEXT (*tp)
1089 || decl_function_context (*tp) == id->src_fn))
1090 /* These may need to be remapped for EH handling. */
1091 *tp = remap_decl (*tp, id);
1092 else if (TREE_CODE (*tp) == FIELD_DECL)
1093 {
1094 /* If the enclosing record type is variably_modified_type_p, the field
1095 has already been remapped. Otherwise, it need not be. */
1096 tree *n = id->decl_map->get (*tp);
1097 if (n)
1098 *tp = *n;
1099 *walk_subtrees = 0;
1100 }
1101 else if (TYPE_P (*tp))
1102 /* Types may need remapping as well. */
1103 *tp = remap_type (*tp, id);
1104 else if (CONSTANT_CLASS_P (*tp))
1105 {
1106 /* If this is a constant, we have to copy the node iff the type
1107 will be remapped. copy_tree_r will not copy a constant. */
1108 tree new_type = remap_type (TREE_TYPE (*tp), id);
1109
1110 if (new_type == TREE_TYPE (*tp))
1111 *walk_subtrees = 0;
1112
1113 else if (TREE_CODE (*tp) == INTEGER_CST)
1114 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1115 else
1116 {
1117 *tp = copy_node (*tp);
1118 TREE_TYPE (*tp) = new_type;
1119 }
1120 }
1121 else
1122 {
1123 /* Otherwise, just copy the node. Note that copy_tree_r already
1124 knows not to copy VAR_DECLs, etc., so this is safe. */
1125
1126 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1127 {
1128 /* We need to re-canonicalize MEM_REFs from inline substitutions
1129 that can happen when a pointer argument is an ADDR_EXPR.
1130 Recurse here manually to allow that. */
1131 tree ptr = TREE_OPERAND (*tp, 0);
1132 tree type = remap_type (TREE_TYPE (*tp), id);
1133 tree old = *tp;
1134 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1135 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1136 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1137 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1138 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1139 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1140 {
1141 MR_DEPENDENCE_CLIQUE (*tp)
1142 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1143 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1144 }
1145 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1146 remapped a parameter as the property might be valid only
1147 for the parameter itself. */
1148 if (TREE_THIS_NOTRAP (old)
1149 && (!is_parm (TREE_OPERAND (old, 0))
1150 || (!id->transform_parameter && is_parm (ptr))))
1151 TREE_THIS_NOTRAP (*tp) = 1;
1152 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1153 *walk_subtrees = 0;
1154 return NULL;
1155 }
1156
1157 /* Here is the "usual case". Copy this tree node, and then
1158 tweak some special cases. */
1159 copy_tree_r (tp, walk_subtrees, NULL);
1160
1161 if (TREE_CODE (*tp) != OMP_CLAUSE)
1162 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1163
1164 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1165 {
1166 /* The copied TARGET_EXPR has never been expanded, even if the
1167 original node was expanded already. */
1168 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1169 TREE_OPERAND (*tp, 3) = NULL_TREE;
1170 }
1171 else if (TREE_CODE (*tp) == ADDR_EXPR)
1172 {
1173 /* Variable substitution need not be simple. In particular,
1174 the MEM_REF substitution above. Make sure that
1175 TREE_CONSTANT and friends are up-to-date. */
1176 int invariant = is_gimple_min_invariant (*tp);
1177 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1178 recompute_tree_invariant_for_addr_expr (*tp);
1179
1180 /* If this used to be invariant, but is not any longer,
1181 then regimplification is probably needed. */
1182 if (invariant && !is_gimple_min_invariant (*tp))
1183 id->regimplify = true;
1184
1185 *walk_subtrees = 0;
1186 }
1187 }
1188
1189 /* Update the TREE_BLOCK for the cloned expr. */
1190 if (EXPR_P (*tp))
1191 {
1192 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1193 tree old_block = TREE_BLOCK (*tp);
1194 if (old_block)
1195 {
1196 tree *n;
1197 n = id->decl_map->get (TREE_BLOCK (*tp));
1198 if (n)
1199 new_block = *n;
1200 }
1201 TREE_SET_BLOCK (*tp, new_block);
1202 }
1203
1204 /* Keep iterating. */
1205 return NULL_TREE;
1206 }
1207
1208
1209 /* Called from copy_body_id via walk_tree. DATA is really a
1210 `copy_body_data *'. */
1211
1212 tree
1213 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1214 {
1215 copy_body_data *id = (copy_body_data *) data;
1216 tree fn = id->src_fn;
1217 tree new_block;
1218
1219 /* Begin by recognizing trees that we'll completely rewrite for the
1220 inlining context. Our output for these trees is completely
1221 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1222 into an edge). Further down, we'll handle trees that get
1223 duplicated and/or tweaked. */
1224
1225 /* When requested, RETURN_EXPRs should be transformed to just the
1226 contained MODIFY_EXPR. The branch semantics of the return will
1227 be handled elsewhere by manipulating the CFG rather than a statement. */
1228 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1229 {
1230 tree assignment = TREE_OPERAND (*tp, 0);
1231
1232 /* If we're returning something, just turn that into an
1233 assignment into the equivalent of the original RESULT_DECL.
1234 If the "assignment" is just the result decl, the result
1235 decl has already been set (e.g. a recent "foo (&result_decl,
1236 ...)"); just toss the entire RETURN_EXPR. */
1237 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1238 {
1239 /* Replace the RETURN_EXPR with (a copy of) the
1240 MODIFY_EXPR hanging underneath. */
1241 *tp = copy_node (assignment);
1242 }
1243 else /* Else the RETURN_EXPR returns no value. */
1244 {
1245 *tp = NULL;
1246 return (tree) (void *)1;
1247 }
1248 }
1249 else if (TREE_CODE (*tp) == SSA_NAME)
1250 {
1251 *tp = remap_ssa_name (*tp, id);
1252 *walk_subtrees = 0;
1253 return NULL;
1254 }
1255
1256 /* Local variables and labels need to be replaced by equivalent
1257 variables. We don't want to copy static variables; there's only
1258 one of those, no matter how many times we inline the containing
1259 function. Similarly for globals from an outer function. */
1260 else if (auto_var_in_fn_p (*tp, fn))
1261 {
1262 tree new_decl;
1263
1264 /* Remap the declaration. */
1265 new_decl = remap_decl (*tp, id);
1266 gcc_assert (new_decl);
1267 /* Replace this variable with the copy. */
1268 STRIP_TYPE_NOPS (new_decl);
1269 *tp = new_decl;
1270 *walk_subtrees = 0;
1271 }
1272 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1273 copy_statement_list (tp);
1274 else if (TREE_CODE (*tp) == SAVE_EXPR
1275 || TREE_CODE (*tp) == TARGET_EXPR)
1276 remap_save_expr (tp, id->decl_map, walk_subtrees);
1277 else if (TREE_CODE (*tp) == LABEL_DECL
1278 && (! DECL_CONTEXT (*tp)
1279 || decl_function_context (*tp) == id->src_fn))
1280 /* These may need to be remapped for EH handling. */
1281 *tp = remap_decl (*tp, id);
1282 else if (TREE_CODE (*tp) == BIND_EXPR)
1283 copy_bind_expr (tp, walk_subtrees, id);
1284 /* Types may need remapping as well. */
1285 else if (TYPE_P (*tp))
1286 *tp = remap_type (*tp, id);
1287
1288 /* If this is a constant, we have to copy the node iff the type will be
1289 remapped. copy_tree_r will not copy a constant. */
1290 else if (CONSTANT_CLASS_P (*tp))
1291 {
1292 tree new_type = remap_type (TREE_TYPE (*tp), id);
1293
1294 if (new_type == TREE_TYPE (*tp))
1295 *walk_subtrees = 0;
1296
1297 else if (TREE_CODE (*tp) == INTEGER_CST)
1298 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1299 else
1300 {
1301 *tp = copy_node (*tp);
1302 TREE_TYPE (*tp) = new_type;
1303 }
1304 }
1305
1306 /* Otherwise, just copy the node. Note that copy_tree_r already
1307 knows not to copy VAR_DECLs, etc., so this is safe. */
1308 else
1309 {
1310 /* Here we handle trees that are not completely rewritten.
1311 First we detect some inlining-induced bogosities for
1312 discarding. */
1313 if (TREE_CODE (*tp) == MODIFY_EXPR
1314 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1315 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1316 {
1317 /* Some assignments VAR = VAR; don't generate any rtl code
1318 and thus don't count as variable modification. Avoid
1319 keeping bogosities like 0 = 0. */
1320 tree decl = TREE_OPERAND (*tp, 0), value;
1321 tree *n;
1322
1323 n = id->decl_map->get (decl);
1324 if (n)
1325 {
1326 value = *n;
1327 STRIP_TYPE_NOPS (value);
1328 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1329 {
1330 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1331 return copy_tree_body_r (tp, walk_subtrees, data);
1332 }
1333 }
1334 }
1335 else if (TREE_CODE (*tp) == INDIRECT_REF)
1336 {
1337 /* Get rid of *& from inline substitutions that can happen when a
1338 pointer argument is an ADDR_EXPR. */
1339 tree decl = TREE_OPERAND (*tp, 0);
1340 tree *n = id->decl_map->get (decl);
1341 if (n)
1342 {
1343 /* If we happen to get an ADDR_EXPR in n->value, strip
1344 it manually here as we'll eventually get ADDR_EXPRs
1345 which lie about their types pointed to. In this case
1346 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1347 but we absolutely rely on that. As fold_indirect_ref
1348 does other useful transformations, try that first, though. */
1349 tree type = TREE_TYPE (*tp);
1350 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1351 tree old = *tp;
1352 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1353 if (! *tp)
1354 {
1355 type = remap_type (type, id);
1356 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1357 {
1358 *tp
1359 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1360 /* ??? We should either assert here or build
1361 a VIEW_CONVERT_EXPR instead of blindly leaking
1362 incompatible types to our IL. */
1363 if (! *tp)
1364 *tp = TREE_OPERAND (ptr, 0);
1365 }
1366 else
1367 {
1368 *tp = build1 (INDIRECT_REF, type, ptr);
1369 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1370 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1371 TREE_READONLY (*tp) = TREE_READONLY (old);
1372 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1373 have remapped a parameter as the property might be
1374 valid only for the parameter itself. */
1375 if (TREE_THIS_NOTRAP (old)
1376 && (!is_parm (TREE_OPERAND (old, 0))
1377 || (!id->transform_parameter && is_parm (ptr))))
1378 TREE_THIS_NOTRAP (*tp) = 1;
1379 }
1380 }
1381 *walk_subtrees = 0;
1382 return NULL;
1383 }
1384 }
1385 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1386 {
1387 /* We need to re-canonicalize MEM_REFs from inline substitutions
1388 that can happen when a pointer argument is an ADDR_EXPR.
1389 Recurse here manually to allow that. */
1390 tree ptr = TREE_OPERAND (*tp, 0);
1391 tree type = remap_type (TREE_TYPE (*tp), id);
1392 tree old = *tp;
1393 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1394 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1395 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1396 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1397 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1398 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1399 {
1400 MR_DEPENDENCE_CLIQUE (*tp)
1401 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1402 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1403 }
1404 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1405 remapped a parameter as the property might be valid only
1406 for the parameter itself. */
1407 if (TREE_THIS_NOTRAP (old)
1408 && (!is_parm (TREE_OPERAND (old, 0))
1409 || (!id->transform_parameter && is_parm (ptr))))
1410 TREE_THIS_NOTRAP (*tp) = 1;
1411 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1412 *walk_subtrees = 0;
1413 return NULL;
1414 }
1415
1416 /* Here is the "usual case". Copy this tree node, and then
1417 tweak some special cases. */
1418 copy_tree_r (tp, walk_subtrees, NULL);
1419
1420 /* If EXPR has block defined, map it to newly constructed block.
1421 When inlining we want EXPRs without block appear in the block
1422 of function call if we are not remapping a type. */
1423 if (EXPR_P (*tp))
1424 {
1425 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1426 if (TREE_BLOCK (*tp))
1427 {
1428 tree *n;
1429 n = id->decl_map->get (TREE_BLOCK (*tp));
1430 if (n)
1431 new_block = *n;
1432 }
1433 TREE_SET_BLOCK (*tp, new_block);
1434 }
1435
1436 if (TREE_CODE (*tp) != OMP_CLAUSE)
1437 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1438
1439 /* The copied TARGET_EXPR has never been expanded, even if the
1440 original node was expanded already. */
1441 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1442 {
1443 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1444 TREE_OPERAND (*tp, 3) = NULL_TREE;
1445 }
1446
1447 /* Variable substitution need not be simple. In particular, the
1448 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1449 and friends are up-to-date. */
1450 else if (TREE_CODE (*tp) == ADDR_EXPR)
1451 {
1452 int invariant = is_gimple_min_invariant (*tp);
1453 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1454
1455 /* Handle the case where we substituted an INDIRECT_REF
1456 into the operand of the ADDR_EXPR. */
1457 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1458 && !id->do_not_fold)
1459 {
1460 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1461 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1462 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1463 *tp = t;
1464 }
1465 else
1466 recompute_tree_invariant_for_addr_expr (*tp);
1467
1468 /* If this used to be invariant, but is not any longer,
1469 then regimplification is probably needed. */
1470 if (invariant && !is_gimple_min_invariant (*tp))
1471 id->regimplify = true;
1472
1473 *walk_subtrees = 0;
1474 }
1475 }
1476
1477 /* Keep iterating. */
1478 return NULL_TREE;
1479 }
1480
1481 /* Helper for remap_gimple_stmt. Given an EH region number for the
1482 source function, map that to the duplicate EH region number in
1483 the destination function. */
1484
1485 static int
1486 remap_eh_region_nr (int old_nr, copy_body_data *id)
1487 {
1488 eh_region old_r, new_r;
1489
1490 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1491 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1492
1493 return new_r->index;
1494 }
1495
1496 /* Similar, but operate on INTEGER_CSTs. */
1497
1498 static tree
1499 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1500 {
1501 int old_nr, new_nr;
1502
1503 old_nr = tree_to_shwi (old_t_nr);
1504 new_nr = remap_eh_region_nr (old_nr, id);
1505
1506 return build_int_cst (integer_type_node, new_nr);
1507 }
1508
1509 /* Helper for copy_bb. Remap statement STMT using the inlining
1510 information in ID. Return the new statement copy. */
1511
1512 static gimple_seq
1513 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1514 {
1515 gimple *copy = NULL;
1516 struct walk_stmt_info wi;
1517 bool skip_first = false;
1518 gimple_seq stmts = NULL;
1519
1520 if (is_gimple_debug (stmt)
1521 && (gimple_debug_nonbind_marker_p (stmt)
1522 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1523 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1524 return NULL;
1525
1526 /* Begin by recognizing trees that we'll completely rewrite for the
1527 inlining context. Our output for these trees is completely
1528 different from our input (e.g. RETURN_EXPR is deleted and morphs
1529 into an edge). Further down, we'll handle trees that get
1530 duplicated and/or tweaked. */
1531
1532 /* When requested, GIMPLE_RETURN should be transformed to just the
1533 contained GIMPLE_ASSIGN. The branch semantics of the return will
1534 be handled elsewhere by manipulating the CFG rather than the
1535 statement. */
1536 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1537 {
1538 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1539
1540 /* If we're returning something, just turn that into an
1541 assignment to the equivalent of the original RESULT_DECL.
1542 If RETVAL is just the result decl, the result decl has
1543 already been set (e.g. a recent "foo (&result_decl, ...)");
1544 just toss the entire GIMPLE_RETURN. Likewise for when the
1545 call doesn't want the return value. */
1546 if (retval
1547 && (TREE_CODE (retval) != RESULT_DECL
1548 && (!id->call_stmt
1549 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1550 && (TREE_CODE (retval) != SSA_NAME
1551 || ! SSA_NAME_VAR (retval)
1552 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1553 {
1554 copy = gimple_build_assign (id->do_not_unshare
1555 ? id->retvar : unshare_expr (id->retvar),
1556 retval);
1557 /* id->retvar is already substituted. Skip it on later remapping. */
1558 skip_first = true;
1559 }
1560 else
1561 return NULL;
1562 }
1563 else if (gimple_has_substatements (stmt))
1564 {
1565 gimple_seq s1, s2;
1566
1567 /* When cloning bodies from the C++ front end, we will be handed bodies
1568 in High GIMPLE form. Handle here all the High GIMPLE statements that
1569 have embedded statements. */
1570 switch (gimple_code (stmt))
1571 {
1572 case GIMPLE_BIND:
1573 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1574 break;
1575
1576 case GIMPLE_CATCH:
1577 {
1578 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1579 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1580 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1581 }
1582 break;
1583
1584 case GIMPLE_EH_FILTER:
1585 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1586 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1587 break;
1588
1589 case GIMPLE_TRY:
1590 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1591 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1592 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1593 break;
1594
1595 case GIMPLE_WITH_CLEANUP_EXPR:
1596 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1597 copy = gimple_build_wce (s1);
1598 break;
1599
1600 case GIMPLE_OMP_PARALLEL:
1601 {
1602 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1603 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1604 copy = gimple_build_omp_parallel
1605 (s1,
1606 gimple_omp_parallel_clauses (omp_par_stmt),
1607 gimple_omp_parallel_child_fn (omp_par_stmt),
1608 gimple_omp_parallel_data_arg (omp_par_stmt));
1609 }
1610 break;
1611
1612 case GIMPLE_OMP_TASK:
1613 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1614 copy = gimple_build_omp_task
1615 (s1,
1616 gimple_omp_task_clauses (stmt),
1617 gimple_omp_task_child_fn (stmt),
1618 gimple_omp_task_data_arg (stmt),
1619 gimple_omp_task_copy_fn (stmt),
1620 gimple_omp_task_arg_size (stmt),
1621 gimple_omp_task_arg_align (stmt));
1622 break;
1623
1624 case GIMPLE_OMP_FOR:
1625 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1626 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1627 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1628 gimple_omp_for_clauses (stmt),
1629 gimple_omp_for_collapse (stmt), s2);
1630 {
1631 size_t i;
1632 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1633 {
1634 gimple_omp_for_set_index (copy, i,
1635 gimple_omp_for_index (stmt, i));
1636 gimple_omp_for_set_initial (copy, i,
1637 gimple_omp_for_initial (stmt, i));
1638 gimple_omp_for_set_final (copy, i,
1639 gimple_omp_for_final (stmt, i));
1640 gimple_omp_for_set_incr (copy, i,
1641 gimple_omp_for_incr (stmt, i));
1642 gimple_omp_for_set_cond (copy, i,
1643 gimple_omp_for_cond (stmt, i));
1644 }
1645 }
1646 break;
1647
1648 case GIMPLE_OMP_MASTER:
1649 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1650 copy = gimple_build_omp_master (s1);
1651 break;
1652
1653 case GIMPLE_OMP_TASKGROUP:
1654 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1655 copy = gimple_build_omp_taskgroup
1656 (s1, gimple_omp_taskgroup_clauses (stmt));
1657 break;
1658
1659 case GIMPLE_OMP_ORDERED:
1660 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1661 copy = gimple_build_omp_ordered
1662 (s1,
1663 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1664 break;
1665
1666 case GIMPLE_OMP_SCAN:
1667 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1668 copy = gimple_build_omp_scan
1669 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1670 break;
1671
1672 case GIMPLE_OMP_SECTION:
1673 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1674 copy = gimple_build_omp_section (s1);
1675 break;
1676
1677 case GIMPLE_OMP_SECTIONS:
1678 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1679 copy = gimple_build_omp_sections
1680 (s1, gimple_omp_sections_clauses (stmt));
1681 break;
1682
1683 case GIMPLE_OMP_SINGLE:
1684 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1685 copy = gimple_build_omp_single
1686 (s1, gimple_omp_single_clauses (stmt));
1687 break;
1688
1689 case GIMPLE_OMP_TARGET:
1690 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1691 copy = gimple_build_omp_target
1692 (s1, gimple_omp_target_kind (stmt),
1693 gimple_omp_target_clauses (stmt));
1694 break;
1695
1696 case GIMPLE_OMP_TEAMS:
1697 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1698 copy = gimple_build_omp_teams
1699 (s1, gimple_omp_teams_clauses (stmt));
1700 break;
1701
1702 case GIMPLE_OMP_CRITICAL:
1703 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1704 copy = gimple_build_omp_critical (s1,
1705 gimple_omp_critical_name
1706 (as_a <gomp_critical *> (stmt)),
1707 gimple_omp_critical_clauses
1708 (as_a <gomp_critical *> (stmt)));
1709 break;
1710
1711 case GIMPLE_TRANSACTION:
1712 {
1713 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1714 gtransaction *new_trans_stmt;
1715 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1716 id);
1717 copy = new_trans_stmt = gimple_build_transaction (s1);
1718 gimple_transaction_set_subcode (new_trans_stmt,
1719 gimple_transaction_subcode (old_trans_stmt));
1720 gimple_transaction_set_label_norm (new_trans_stmt,
1721 gimple_transaction_label_norm (old_trans_stmt));
1722 gimple_transaction_set_label_uninst (new_trans_stmt,
1723 gimple_transaction_label_uninst (old_trans_stmt));
1724 gimple_transaction_set_label_over (new_trans_stmt,
1725 gimple_transaction_label_over (old_trans_stmt));
1726 }
1727 break;
1728
1729 default:
1730 gcc_unreachable ();
1731 }
1732 }
1733 else
1734 {
1735 if (gimple_assign_copy_p (stmt)
1736 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1737 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1738 {
1739 /* Here we handle statements that are not completely rewritten.
1740 First we detect some inlining-induced bogosities for
1741 discarding. */
1742
1743 /* Some assignments VAR = VAR; don't generate any rtl code
1744 and thus don't count as variable modification. Avoid
1745 keeping bogosities like 0 = 0. */
1746 tree decl = gimple_assign_lhs (stmt), value;
1747 tree *n;
1748
1749 n = id->decl_map->get (decl);
1750 if (n)
1751 {
1752 value = *n;
1753 STRIP_TYPE_NOPS (value);
1754 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1755 return NULL;
1756 }
1757 }
1758
1759 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1760 in a block that we aren't copying during tree_function_versioning,
1761 just drop the clobber stmt. */
1762 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1763 {
1764 tree lhs = gimple_assign_lhs (stmt);
1765 if (TREE_CODE (lhs) == MEM_REF
1766 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1767 {
1768 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1769 if (gimple_bb (def_stmt)
1770 && !bitmap_bit_p (id->blocks_to_copy,
1771 gimple_bb (def_stmt)->index))
1772 return NULL;
1773 }
1774 }
1775
1776 /* We do not allow CLOBBERs of handled components. In case
1777 returned value is stored via such handled component, remove
1778 the clobber so stmt verifier is happy. */
1779 if (gimple_clobber_p (stmt)
1780 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1781 {
1782 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1783 if (!DECL_P (remapped)
1784 && TREE_CODE (remapped) != MEM_REF)
1785 return NULL;
1786 }
1787
1788 if (gimple_debug_bind_p (stmt))
1789 {
1790 gdebug *copy
1791 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1792 gimple_debug_bind_get_value (stmt),
1793 stmt);
1794 if (id->reset_location)
1795 gimple_set_location (copy, input_location);
1796 id->debug_stmts.safe_push (copy);
1797 gimple_seq_add_stmt (&stmts, copy);
1798 return stmts;
1799 }
1800 if (gimple_debug_source_bind_p (stmt))
1801 {
1802 gdebug *copy = gimple_build_debug_source_bind
1803 (gimple_debug_source_bind_get_var (stmt),
1804 gimple_debug_source_bind_get_value (stmt),
1805 stmt);
1806 if (id->reset_location)
1807 gimple_set_location (copy, input_location);
1808 id->debug_stmts.safe_push (copy);
1809 gimple_seq_add_stmt (&stmts, copy);
1810 return stmts;
1811 }
1812 if (gimple_debug_nonbind_marker_p (stmt))
1813 {
1814 /* If the inlined function has too many debug markers,
1815 don't copy them. */
1816 if (id->src_cfun->debug_marker_count
1817 > param_max_debug_marker_count)
1818 return stmts;
1819
1820 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1821 if (id->reset_location)
1822 gimple_set_location (copy, input_location);
1823 id->debug_stmts.safe_push (copy);
1824 gimple_seq_add_stmt (&stmts, copy);
1825 return stmts;
1826 }
1827
1828 /* Create a new deep copy of the statement. */
1829 copy = gimple_copy (stmt);
1830
1831 /* Clear flags that need revisiting. */
1832 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1833 {
1834 if (gimple_call_tail_p (call_stmt))
1835 gimple_call_set_tail (call_stmt, false);
1836 if (gimple_call_from_thunk_p (call_stmt))
1837 gimple_call_set_from_thunk (call_stmt, false);
1838 if (gimple_call_internal_p (call_stmt))
1839 switch (gimple_call_internal_fn (call_stmt))
1840 {
1841 case IFN_GOMP_SIMD_LANE:
1842 case IFN_GOMP_SIMD_VF:
1843 case IFN_GOMP_SIMD_LAST_LANE:
1844 case IFN_GOMP_SIMD_ORDERED_START:
1845 case IFN_GOMP_SIMD_ORDERED_END:
1846 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1847 break;
1848 default:
1849 break;
1850 }
1851 }
1852
1853 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1854 RESX and EH_DISPATCH. */
1855 if (id->eh_map)
1856 switch (gimple_code (copy))
1857 {
1858 case GIMPLE_CALL:
1859 {
1860 tree r, fndecl = gimple_call_fndecl (copy);
1861 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1862 switch (DECL_FUNCTION_CODE (fndecl))
1863 {
1864 case BUILT_IN_EH_COPY_VALUES:
1865 r = gimple_call_arg (copy, 1);
1866 r = remap_eh_region_tree_nr (r, id);
1867 gimple_call_set_arg (copy, 1, r);
1868 /* FALLTHRU */
1869
1870 case BUILT_IN_EH_POINTER:
1871 case BUILT_IN_EH_FILTER:
1872 r = gimple_call_arg (copy, 0);
1873 r = remap_eh_region_tree_nr (r, id);
1874 gimple_call_set_arg (copy, 0, r);
1875 break;
1876
1877 default:
1878 break;
1879 }
1880
1881 /* Reset alias info if we didn't apply measures to
1882 keep it valid over inlining by setting DECL_PT_UID. */
1883 if (!id->src_cfun->gimple_df
1884 || !id->src_cfun->gimple_df->ipa_pta)
1885 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1886 }
1887 break;
1888
1889 case GIMPLE_RESX:
1890 {
1891 gresx *resx_stmt = as_a <gresx *> (copy);
1892 int r = gimple_resx_region (resx_stmt);
1893 r = remap_eh_region_nr (r, id);
1894 gimple_resx_set_region (resx_stmt, r);
1895 }
1896 break;
1897
1898 case GIMPLE_EH_DISPATCH:
1899 {
1900 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1901 int r = gimple_eh_dispatch_region (eh_dispatch);
1902 r = remap_eh_region_nr (r, id);
1903 gimple_eh_dispatch_set_region (eh_dispatch, r);
1904 }
1905 break;
1906
1907 default:
1908 break;
1909 }
1910 }
1911
1912 /* If STMT has a block defined, map it to the newly constructed block. */
1913 if (tree block = gimple_block (copy))
1914 {
1915 tree *n;
1916 n = id->decl_map->get (block);
1917 gcc_assert (n);
1918 gimple_set_block (copy, *n);
1919 }
1920 if (id->param_body_adjs)
1921 {
1922 gimple_seq extra_stmts = NULL;
1923 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1924 if (!gimple_seq_empty_p (extra_stmts))
1925 {
1926 memset (&wi, 0, sizeof (wi));
1927 wi.info = id;
1928 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1929 !gsi_end_p (egsi);
1930 gsi_next (&egsi))
1931 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1932 gimple_seq_add_seq (&stmts, extra_stmts);
1933 }
1934 }
1935
1936 if (id->reset_location)
1937 gimple_set_location (copy, input_location);
1938
1939 /* Debug statements ought to be rebuilt and not copied. */
1940 gcc_checking_assert (!is_gimple_debug (copy));
1941
1942 /* Remap all the operands in COPY. */
1943 memset (&wi, 0, sizeof (wi));
1944 wi.info = id;
1945 if (skip_first)
1946 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1947 else
1948 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1949
1950 /* Clear the copied virtual operands. We are not remapping them here
1951 but are going to recreate them from scratch. */
1952 if (gimple_has_mem_ops (copy))
1953 {
1954 gimple_set_vdef (copy, NULL_TREE);
1955 gimple_set_vuse (copy, NULL_TREE);
1956 }
1957
1958 gimple_seq_add_stmt (&stmts, copy);
1959 return stmts;
1960 }
1961
1962
1963 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1964 later */
1965
1966 static basic_block
1967 copy_bb (copy_body_data *id, basic_block bb,
1968 profile_count num, profile_count den)
1969 {
1970 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1971 basic_block copy_basic_block;
1972 tree decl;
1973 basic_block prev;
1974
1975 profile_count::adjust_for_ipa_scaling (&num, &den);
1976
1977 /* Search for previous copied basic block. */
1978 prev = bb->prev_bb;
1979 while (!prev->aux)
1980 prev = prev->prev_bb;
1981
1982 /* create_basic_block() will append every new block to
1983 basic_block_info automatically. */
1984 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1985 copy_basic_block->count = bb->count.apply_scale (num, den);
1986
1987 copy_gsi = gsi_start_bb (copy_basic_block);
1988
1989 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1990 {
1991 gimple_seq stmts;
1992 gimple *stmt = gsi_stmt (gsi);
1993 gimple *orig_stmt = stmt;
1994 gimple_stmt_iterator stmts_gsi;
1995 bool stmt_added = false;
1996
1997 id->regimplify = false;
1998 stmts = remap_gimple_stmt (stmt, id);
1999
2000 if (gimple_seq_empty_p (stmts))
2001 continue;
2002
2003 seq_gsi = copy_gsi;
2004
2005 for (stmts_gsi = gsi_start (stmts);
2006 !gsi_end_p (stmts_gsi); )
2007 {
2008 stmt = gsi_stmt (stmts_gsi);
2009
2010 /* Advance iterator now before stmt is moved to seq_gsi. */
2011 gsi_next (&stmts_gsi);
2012
2013 if (gimple_nop_p (stmt))
2014 continue;
2015
2016 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2017 orig_stmt);
2018
2019 /* With return slot optimization we can end up with
2020 non-gimple (foo *)&this->m, fix that here. */
2021 if (is_gimple_assign (stmt)
2022 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2023 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2024 {
2025 tree new_rhs;
2026 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2027 gimple_assign_rhs1 (stmt),
2028 true, NULL, false,
2029 GSI_CONTINUE_LINKING);
2030 gimple_assign_set_rhs1 (stmt, new_rhs);
2031 id->regimplify = false;
2032 }
2033
2034 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2035
2036 if (id->regimplify)
2037 gimple_regimplify_operands (stmt, &seq_gsi);
2038
2039 stmt_added = true;
2040 }
2041
2042 if (!stmt_added)
2043 continue;
2044
2045 /* If copy_basic_block has been empty at the start of this iteration,
2046 call gsi_start_bb again to get at the newly added statements. */
2047 if (gsi_end_p (copy_gsi))
2048 copy_gsi = gsi_start_bb (copy_basic_block);
2049 else
2050 gsi_next (&copy_gsi);
2051
2052 /* Process the new statement. The call to gimple_regimplify_operands
2053 possibly turned the statement into multiple statements, we
2054 need to process all of them. */
2055 do
2056 {
2057 tree fn;
2058 gcall *call_stmt;
2059
2060 stmt = gsi_stmt (copy_gsi);
2061 call_stmt = dyn_cast <gcall *> (stmt);
2062 if (call_stmt
2063 && gimple_call_va_arg_pack_p (call_stmt)
2064 && id->call_stmt
2065 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2066 {
2067 /* __builtin_va_arg_pack () should be replaced by
2068 all arguments corresponding to ... in the caller. */
2069 tree p;
2070 gcall *new_call;
2071 vec<tree> argarray;
2072 size_t nargs = gimple_call_num_args (id->call_stmt);
2073 size_t n;
2074
2075 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2076 nargs--;
2077
2078 /* Create the new array of arguments. */
2079 n = nargs + gimple_call_num_args (call_stmt);
2080 argarray.create (n);
2081 argarray.safe_grow_cleared (n);
2082
2083 /* Copy all the arguments before '...' */
2084 memcpy (argarray.address (),
2085 gimple_call_arg_ptr (call_stmt, 0),
2086 gimple_call_num_args (call_stmt) * sizeof (tree));
2087
2088 /* Append the arguments passed in '...' */
2089 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2090 gimple_call_arg_ptr (id->call_stmt, 0)
2091 + (gimple_call_num_args (id->call_stmt) - nargs),
2092 nargs * sizeof (tree));
2093
2094 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2095 argarray);
2096
2097 argarray.release ();
2098
2099 /* Copy all GIMPLE_CALL flags, location and block, except
2100 GF_CALL_VA_ARG_PACK. */
2101 gimple_call_copy_flags (new_call, call_stmt);
2102 gimple_call_set_va_arg_pack (new_call, false);
2103 /* location includes block. */
2104 gimple_set_location (new_call, gimple_location (stmt));
2105 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2106
2107 gsi_replace (&copy_gsi, new_call, false);
2108 stmt = new_call;
2109 }
2110 else if (call_stmt
2111 && id->call_stmt
2112 && (decl = gimple_call_fndecl (stmt))
2113 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2114 {
2115 /* __builtin_va_arg_pack_len () should be replaced by
2116 the number of anonymous arguments. */
2117 size_t nargs = gimple_call_num_args (id->call_stmt);
2118 tree count, p;
2119 gimple *new_stmt;
2120
2121 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2122 nargs--;
2123
2124 if (!gimple_call_lhs (stmt))
2125 {
2126 /* Drop unused calls. */
2127 gsi_remove (&copy_gsi, false);
2128 continue;
2129 }
2130 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2131 {
2132 count = build_int_cst (integer_type_node, nargs);
2133 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2134 gsi_replace (&copy_gsi, new_stmt, false);
2135 stmt = new_stmt;
2136 }
2137 else if (nargs != 0)
2138 {
2139 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2140 count = build_int_cst (integer_type_node, nargs);
2141 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2142 PLUS_EXPR, newlhs, count);
2143 gimple_call_set_lhs (stmt, newlhs);
2144 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2145 }
2146 }
2147 else if (call_stmt
2148 && id->call_stmt
2149 && gimple_call_internal_p (stmt)
2150 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2151 {
2152 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2153 gsi_remove (&copy_gsi, false);
2154 continue;
2155 }
2156
2157 /* Statements produced by inlining can be unfolded, especially
2158 when we constant propagated some operands. We can't fold
2159 them right now for two reasons:
2160 1) folding require SSA_NAME_DEF_STMTs to be correct
2161 2) we can't change function calls to builtins.
2162 So we just mark statement for later folding. We mark
2163 all new statements, instead just statements that has changed
2164 by some nontrivial substitution so even statements made
2165 foldable indirectly are updated. If this turns out to be
2166 expensive, copy_body can be told to watch for nontrivial
2167 changes. */
2168 if (id->statements_to_fold)
2169 id->statements_to_fold->add (stmt);
2170
2171 /* We're duplicating a CALL_EXPR. Find any corresponding
2172 callgraph edges and update or duplicate them. */
2173 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2174 {
2175 struct cgraph_edge *edge;
2176
2177 switch (id->transform_call_graph_edges)
2178 {
2179 case CB_CGE_DUPLICATE:
2180 edge = id->src_node->get_edge (orig_stmt);
2181 if (edge)
2182 {
2183 struct cgraph_edge *old_edge = edge;
2184 profile_count old_cnt = edge->count;
2185 edge = edge->clone (id->dst_node, call_stmt,
2186 gimple_uid (stmt),
2187 num, den,
2188 true);
2189
2190 /* Speculative calls consist of two edges - direct and
2191 indirect. Duplicate the whole thing and distribute
2192 frequencies accordingly. */
2193 if (edge->speculative)
2194 {
2195 struct cgraph_edge *direct, *indirect;
2196 struct ipa_ref *ref;
2197
2198 gcc_assert (!edge->indirect_unknown_callee);
2199 old_edge->speculative_call_info (direct, indirect, ref);
2200
2201 profile_count indir_cnt = indirect->count;
2202 indirect = indirect->clone (id->dst_node, call_stmt,
2203 gimple_uid (stmt),
2204 num, den,
2205 true);
2206
2207 profile_probability prob
2208 = indir_cnt.probability_in (old_cnt + indir_cnt);
2209 indirect->count
2210 = copy_basic_block->count.apply_probability (prob);
2211 edge->count = copy_basic_block->count - indirect->count;
2212 id->dst_node->clone_reference (ref, stmt);
2213 }
2214 else
2215 edge->count = copy_basic_block->count;
2216 }
2217 break;
2218
2219 case CB_CGE_MOVE_CLONES:
2220 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2221 call_stmt);
2222 edge = id->dst_node->get_edge (stmt);
2223 break;
2224
2225 case CB_CGE_MOVE:
2226 edge = id->dst_node->get_edge (orig_stmt);
2227 if (edge)
2228 edge->set_call_stmt (call_stmt);
2229 break;
2230
2231 default:
2232 gcc_unreachable ();
2233 }
2234
2235 /* Constant propagation on argument done during inlining
2236 may create new direct call. Produce an edge for it. */
2237 if ((!edge
2238 || (edge->indirect_inlining_edge
2239 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2240 && id->dst_node->definition
2241 && (fn = gimple_call_fndecl (stmt)) != NULL)
2242 {
2243 struct cgraph_node *dest = cgraph_node::get_create (fn);
2244
2245 /* We have missing edge in the callgraph. This can happen
2246 when previous inlining turned an indirect call into a
2247 direct call by constant propagating arguments or we are
2248 producing dead clone (for further cloning). In all
2249 other cases we hit a bug (incorrect node sharing is the
2250 most common reason for missing edges). */
2251 gcc_assert (!dest->definition
2252 || dest->address_taken
2253 || !id->src_node->definition
2254 || !id->dst_node->definition);
2255 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2256 id->dst_node->create_edge_including_clones
2257 (dest, orig_stmt, call_stmt, bb->count,
2258 CIF_ORIGINALLY_INDIRECT_CALL);
2259 else
2260 id->dst_node->create_edge (dest, call_stmt,
2261 bb->count)->inline_failed
2262 = CIF_ORIGINALLY_INDIRECT_CALL;
2263 if (dump_file)
2264 {
2265 fprintf (dump_file, "Created new direct edge to %s\n",
2266 dest->name ());
2267 }
2268 }
2269
2270 notice_special_calls (as_a <gcall *> (stmt));
2271 }
2272
2273 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2274 id->eh_map, id->eh_lp_nr);
2275
2276 gsi_next (&copy_gsi);
2277 }
2278 while (!gsi_end_p (copy_gsi));
2279
2280 copy_gsi = gsi_last_bb (copy_basic_block);
2281 }
2282
2283 return copy_basic_block;
2284 }
2285
2286 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2287 form is quite easy, since dominator relationship for old basic blocks does
2288 not change.
2289
2290 There is however exception where inlining might change dominator relation
2291 across EH edges from basic block within inlined functions destinating
2292 to landing pads in function we inline into.
2293
2294 The function fills in PHI_RESULTs of such PHI nodes if they refer
2295 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2296 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2297 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2298 set, and this means that there will be no overlapping live ranges
2299 for the underlying symbol.
2300
2301 This might change in future if we allow redirecting of EH edges and
2302 we might want to change way build CFG pre-inlining to include
2303 all the possible edges then. */
2304 static void
2305 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2306 bool can_throw, bool nonlocal_goto)
2307 {
2308 edge e;
2309 edge_iterator ei;
2310
2311 FOR_EACH_EDGE (e, ei, bb->succs)
2312 if (!e->dest->aux
2313 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2314 {
2315 gphi *phi;
2316 gphi_iterator si;
2317
2318 if (!nonlocal_goto)
2319 gcc_assert (e->flags & EDGE_EH);
2320
2321 if (!can_throw)
2322 gcc_assert (!(e->flags & EDGE_EH));
2323
2324 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2325 {
2326 edge re;
2327
2328 phi = si.phi ();
2329
2330 /* For abnormal goto/call edges the receiver can be the
2331 ENTRY_BLOCK. Do not assert this cannot happen. */
2332
2333 gcc_assert ((e->flags & EDGE_EH)
2334 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2335
2336 re = find_edge (ret_bb, e->dest);
2337 gcc_checking_assert (re);
2338 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2339 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2340
2341 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2342 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2343 }
2344 }
2345 }
2346
2347 /* Insert clobbers for automatic variables of inlined ID->src_fn
2348 function at the start of basic block ID->eh_landing_pad_dest. */
2349
2350 static void
2351 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2352 {
2353 tree var;
2354 basic_block bb = id->eh_landing_pad_dest;
2355 live_vars_map *vars = NULL;
2356 unsigned int cnt = 0;
2357 unsigned int i;
2358 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2359 if (VAR_P (var)
2360 && !DECL_HARD_REGISTER (var)
2361 && !TREE_THIS_VOLATILE (var)
2362 && !DECL_HAS_VALUE_EXPR_P (var)
2363 && !is_gimple_reg (var)
2364 && auto_var_in_fn_p (var, id->src_fn)
2365 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2366 {
2367 tree *t = id->decl_map->get (var);
2368 if (!t)
2369 continue;
2370 tree new_var = *t;
2371 if (VAR_P (new_var)
2372 && !DECL_HARD_REGISTER (new_var)
2373 && !TREE_THIS_VOLATILE (new_var)
2374 && !DECL_HAS_VALUE_EXPR_P (new_var)
2375 && !is_gimple_reg (new_var)
2376 && auto_var_in_fn_p (new_var, id->dst_fn))
2377 {
2378 if (vars == NULL)
2379 vars = new live_vars_map;
2380 vars->put (DECL_UID (var), cnt++);
2381 }
2382 }
2383 if (vars == NULL)
2384 return;
2385
2386 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2387 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2388 if (VAR_P (var))
2389 {
2390 edge e;
2391 edge_iterator ei;
2392 bool needed = false;
2393 unsigned int *v = vars->get (DECL_UID (var));
2394 if (v == NULL)
2395 continue;
2396 FOR_EACH_EDGE (e, ei, bb->preds)
2397 if ((e->flags & EDGE_EH) != 0
2398 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2399 {
2400 basic_block src_bb = (basic_block) e->src->aux;
2401
2402 if (bitmap_bit_p (&live[src_bb->index], *v))
2403 {
2404 needed = true;
2405 break;
2406 }
2407 }
2408 if (needed)
2409 {
2410 tree new_var = *id->decl_map->get (var);
2411 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2412 tree clobber = build_clobber (TREE_TYPE (new_var));
2413 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2414 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2415 }
2416 }
2417 destroy_live_vars (live);
2418 delete vars;
2419 }
2420
2421 /* Copy edges from BB into its copy constructed earlier, scale profile
2422 accordingly. Edges will be taken care of later. Assume aux
2423 pointers to point to the copies of each BB. Return true if any
2424 debug stmts are left after a statement that must end the basic block. */
2425
2426 static bool
2427 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2428 basic_block ret_bb, basic_block abnormal_goto_dest,
2429 copy_body_data *id)
2430 {
2431 basic_block new_bb = (basic_block) bb->aux;
2432 edge_iterator ei;
2433 edge old_edge;
2434 gimple_stmt_iterator si;
2435 bool need_debug_cleanup = false;
2436
2437 /* Use the indices from the original blocks to create edges for the
2438 new ones. */
2439 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2440 if (!(old_edge->flags & EDGE_EH))
2441 {
2442 edge new_edge;
2443 int flags = old_edge->flags;
2444 location_t locus = old_edge->goto_locus;
2445
2446 /* Return edges do get a FALLTHRU flag when they get inlined. */
2447 if (old_edge->dest->index == EXIT_BLOCK
2448 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2449 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2450 flags |= EDGE_FALLTHRU;
2451
2452 new_edge
2453 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2454 new_edge->probability = old_edge->probability;
2455 if (!id->reset_location)
2456 new_edge->goto_locus = remap_location (locus, id);
2457 }
2458
2459 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2460 return false;
2461
2462 /* When doing function splitting, we must decrease count of the return block
2463 which was previously reachable by block we did not copy. */
2464 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2465 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2466 if (old_edge->src->index != ENTRY_BLOCK
2467 && !old_edge->src->aux)
2468 new_bb->count -= old_edge->count ().apply_scale (num, den);
2469
2470 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2471 {
2472 gimple *copy_stmt;
2473 bool can_throw, nonlocal_goto;
2474
2475 copy_stmt = gsi_stmt (si);
2476 if (!is_gimple_debug (copy_stmt))
2477 update_stmt (copy_stmt);
2478
2479 /* Do this before the possible split_block. */
2480 gsi_next (&si);
2481
2482 /* If this tree could throw an exception, there are two
2483 cases where we need to add abnormal edge(s): the
2484 tree wasn't in a region and there is a "current
2485 region" in the caller; or the original tree had
2486 EH edges. In both cases split the block after the tree,
2487 and add abnormal edge(s) as needed; we need both
2488 those from the callee and the caller.
2489 We check whether the copy can throw, because the const
2490 propagation can change an INDIRECT_REF which throws
2491 into a COMPONENT_REF which doesn't. If the copy
2492 can throw, the original could also throw. */
2493 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2494 nonlocal_goto
2495 = (stmt_can_make_abnormal_goto (copy_stmt)
2496 && !computed_goto_p (copy_stmt));
2497
2498 if (can_throw || nonlocal_goto)
2499 {
2500 if (!gsi_end_p (si))
2501 {
2502 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2503 gsi_next (&si);
2504 if (gsi_end_p (si))
2505 need_debug_cleanup = true;
2506 }
2507 if (!gsi_end_p (si))
2508 /* Note that bb's predecessor edges aren't necessarily
2509 right at this point; split_block doesn't care. */
2510 {
2511 edge e = split_block (new_bb, copy_stmt);
2512
2513 new_bb = e->dest;
2514 new_bb->aux = e->src->aux;
2515 si = gsi_start_bb (new_bb);
2516 }
2517 }
2518
2519 bool update_probs = false;
2520
2521 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2522 {
2523 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2524 update_probs = true;
2525 }
2526 else if (can_throw)
2527 {
2528 make_eh_edges (copy_stmt);
2529 update_probs = true;
2530 }
2531
2532 /* EH edges may not match old edges. Copy as much as possible. */
2533 if (update_probs)
2534 {
2535 edge e;
2536 edge_iterator ei;
2537 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2538
2539 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2540 if ((old_edge->flags & EDGE_EH)
2541 && (e = find_edge (copy_stmt_bb,
2542 (basic_block) old_edge->dest->aux))
2543 && (e->flags & EDGE_EH))
2544 e->probability = old_edge->probability;
2545
2546 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2547 if (e->flags & EDGE_EH)
2548 {
2549 if (!e->probability.initialized_p ())
2550 e->probability = profile_probability::never ();
2551 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2552 {
2553 if (id->eh_landing_pad_dest == NULL)
2554 id->eh_landing_pad_dest = e->dest;
2555 else
2556 gcc_assert (id->eh_landing_pad_dest == e->dest);
2557 }
2558 }
2559 }
2560
2561
2562 /* If the call we inline cannot make abnormal goto do not add
2563 additional abnormal edges but only retain those already present
2564 in the original function body. */
2565 if (abnormal_goto_dest == NULL)
2566 nonlocal_goto = false;
2567 if (nonlocal_goto)
2568 {
2569 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2570
2571 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2572 nonlocal_goto = false;
2573 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2574 in OpenMP regions which aren't allowed to be left abnormally.
2575 So, no need to add abnormal edge in that case. */
2576 else if (is_gimple_call (copy_stmt)
2577 && gimple_call_internal_p (copy_stmt)
2578 && (gimple_call_internal_fn (copy_stmt)
2579 == IFN_ABNORMAL_DISPATCHER)
2580 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2581 nonlocal_goto = false;
2582 else
2583 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2584 EDGE_ABNORMAL);
2585 }
2586
2587 if ((can_throw || nonlocal_goto)
2588 && gimple_in_ssa_p (cfun))
2589 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2590 can_throw, nonlocal_goto);
2591 }
2592 return need_debug_cleanup;
2593 }
2594
2595 /* Copy the PHIs. All blocks and edges are copied, some blocks
2596 was possibly split and new outgoing EH edges inserted.
2597 BB points to the block of original function and AUX pointers links
2598 the original and newly copied blocks. */
2599
2600 static void
2601 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2602 {
2603 basic_block const new_bb = (basic_block) bb->aux;
2604 edge_iterator ei;
2605 gphi *phi;
2606 gphi_iterator si;
2607 edge new_edge;
2608 bool inserted = false;
2609
2610 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2611 {
2612 tree res, new_res;
2613 gphi *new_phi;
2614
2615 phi = si.phi ();
2616 res = PHI_RESULT (phi);
2617 new_res = res;
2618 if (!virtual_operand_p (res))
2619 {
2620 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2621 if (EDGE_COUNT (new_bb->preds) == 0)
2622 {
2623 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2624 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2625 }
2626 else
2627 {
2628 new_phi = create_phi_node (new_res, new_bb);
2629 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2630 {
2631 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2632 bb);
2633 tree arg;
2634 tree new_arg;
2635 edge_iterator ei2;
2636 location_t locus;
2637
2638 /* When doing partial cloning, we allow PHIs on the entry
2639 block as long as all the arguments are the same.
2640 Find any input edge to see argument to copy. */
2641 if (!old_edge)
2642 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2643 if (!old_edge->src->aux)
2644 break;
2645
2646 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2647 new_arg = arg;
2648 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2649 gcc_assert (new_arg);
2650 /* With return slot optimization we can end up with
2651 non-gimple (foo *)&this->m, fix that here. */
2652 if (TREE_CODE (new_arg) != SSA_NAME
2653 && TREE_CODE (new_arg) != FUNCTION_DECL
2654 && !is_gimple_val (new_arg))
2655 {
2656 gimple_seq stmts = NULL;
2657 new_arg = force_gimple_operand (new_arg, &stmts, true,
2658 NULL);
2659 gsi_insert_seq_on_edge (new_edge, stmts);
2660 inserted = true;
2661 }
2662 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2663 if (id->reset_location)
2664 locus = input_location;
2665 else
2666 locus = remap_location (locus, id);
2667 add_phi_arg (new_phi, new_arg, new_edge, locus);
2668 }
2669 }
2670 }
2671 }
2672
2673 /* Commit the delayed edge insertions. */
2674 if (inserted)
2675 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2676 gsi_commit_one_edge_insert (new_edge, NULL);
2677 }
2678
2679
2680 /* Wrapper for remap_decl so it can be used as a callback. */
2681
2682 static tree
2683 remap_decl_1 (tree decl, void *data)
2684 {
2685 return remap_decl (decl, (copy_body_data *) data);
2686 }
2687
2688 /* Build struct function and associated datastructures for the new clone
2689 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2690 the cfun to the function of new_fndecl (and current_function_decl too). */
2691
2692 static void
2693 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2694 {
2695 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2696
2697 if (!DECL_ARGUMENTS (new_fndecl))
2698 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2699 if (!DECL_RESULT (new_fndecl))
2700 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2701
2702 /* Register specific tree functions. */
2703 gimple_register_cfg_hooks ();
2704
2705 /* Get clean struct function. */
2706 push_struct_function (new_fndecl);
2707
2708 /* We will rebuild these, so just sanity check that they are empty. */
2709 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2710 gcc_assert (cfun->local_decls == NULL);
2711 gcc_assert (cfun->cfg == NULL);
2712 gcc_assert (cfun->decl == new_fndecl);
2713
2714 /* Copy items we preserve during cloning. */
2715 cfun->static_chain_decl = src_cfun->static_chain_decl;
2716 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2717 cfun->function_end_locus = src_cfun->function_end_locus;
2718 cfun->curr_properties = src_cfun->curr_properties;
2719 cfun->last_verified = src_cfun->last_verified;
2720 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2721 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2722 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2723 cfun->calls_eh_return = src_cfun->calls_eh_return;
2724 cfun->stdarg = src_cfun->stdarg;
2725 cfun->after_inlining = src_cfun->after_inlining;
2726 cfun->can_throw_non_call_exceptions
2727 = src_cfun->can_throw_non_call_exceptions;
2728 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2729 cfun->returns_struct = src_cfun->returns_struct;
2730 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2731
2732 init_empty_tree_cfg ();
2733
2734 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2735
2736 profile_count num = count;
2737 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2738 profile_count::adjust_for_ipa_scaling (&num, &den);
2739
2740 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2741 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2742 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2743 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2744 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2745 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2746 if (src_cfun->eh)
2747 init_eh_for_function ();
2748
2749 if (src_cfun->gimple_df)
2750 {
2751 init_tree_ssa (cfun);
2752 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2753 if (cfun->gimple_df->in_ssa_p)
2754 init_ssa_operands (cfun);
2755 }
2756 }
2757
2758 /* Helper function for copy_cfg_body. Move debug stmts from the end
2759 of NEW_BB to the beginning of successor basic blocks when needed. If the
2760 successor has multiple predecessors, reset them, otherwise keep
2761 their value. */
2762
2763 static void
2764 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2765 {
2766 edge e;
2767 edge_iterator ei;
2768 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2769
2770 if (gsi_end_p (si)
2771 || gsi_one_before_end_p (si)
2772 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2773 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2774 return;
2775
2776 FOR_EACH_EDGE (e, ei, new_bb->succs)
2777 {
2778 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2779 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2780 while (is_gimple_debug (gsi_stmt (ssi)))
2781 {
2782 gimple *stmt = gsi_stmt (ssi);
2783 gdebug *new_stmt;
2784 tree var;
2785 tree value;
2786
2787 /* For the last edge move the debug stmts instead of copying
2788 them. */
2789 if (ei_one_before_end_p (ei))
2790 {
2791 si = ssi;
2792 gsi_prev (&ssi);
2793 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2794 {
2795 gimple_debug_bind_reset_value (stmt);
2796 gimple_set_location (stmt, UNKNOWN_LOCATION);
2797 }
2798 gsi_remove (&si, false);
2799 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2800 continue;
2801 }
2802
2803 if (gimple_debug_bind_p (stmt))
2804 {
2805 var = gimple_debug_bind_get_var (stmt);
2806 if (single_pred_p (e->dest))
2807 {
2808 value = gimple_debug_bind_get_value (stmt);
2809 value = unshare_expr (value);
2810 new_stmt = gimple_build_debug_bind (var, value, stmt);
2811 }
2812 else
2813 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2814 }
2815 else if (gimple_debug_source_bind_p (stmt))
2816 {
2817 var = gimple_debug_source_bind_get_var (stmt);
2818 value = gimple_debug_source_bind_get_value (stmt);
2819 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2820 }
2821 else if (gimple_debug_nonbind_marker_p (stmt))
2822 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2823 else
2824 gcc_unreachable ();
2825 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2826 id->debug_stmts.safe_push (new_stmt);
2827 gsi_prev (&ssi);
2828 }
2829 }
2830 }
2831
2832 /* Make a copy of the sub-loops of SRC_PARENT and place them
2833 as siblings of DEST_PARENT. */
2834
2835 static void
2836 copy_loops (copy_body_data *id,
2837 class loop *dest_parent, class loop *src_parent)
2838 {
2839 class loop *src_loop = src_parent->inner;
2840 while (src_loop)
2841 {
2842 if (!id->blocks_to_copy
2843 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2844 {
2845 class loop *dest_loop = alloc_loop ();
2846
2847 /* Assign the new loop its header and latch and associate
2848 those with the new loop. */
2849 dest_loop->header = (basic_block)src_loop->header->aux;
2850 dest_loop->header->loop_father = dest_loop;
2851 if (src_loop->latch != NULL)
2852 {
2853 dest_loop->latch = (basic_block)src_loop->latch->aux;
2854 dest_loop->latch->loop_father = dest_loop;
2855 }
2856
2857 /* Copy loop meta-data. */
2858 copy_loop_info (src_loop, dest_loop);
2859 if (dest_loop->unroll)
2860 cfun->has_unroll = true;
2861 if (dest_loop->force_vectorize)
2862 cfun->has_force_vectorize_loops = true;
2863 if (id->src_cfun->last_clique != 0)
2864 dest_loop->owned_clique
2865 = remap_dependence_clique (id,
2866 src_loop->owned_clique
2867 ? src_loop->owned_clique : 1);
2868
2869 /* Finally place it into the loop array and the loop tree. */
2870 place_new_loop (cfun, dest_loop);
2871 flow_loop_tree_node_add (dest_parent, dest_loop);
2872
2873 if (src_loop->simduid)
2874 {
2875 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2876 cfun->has_simduid_loops = true;
2877 }
2878
2879 /* Recurse. */
2880 copy_loops (id, dest_loop, src_loop);
2881 }
2882 src_loop = src_loop->next;
2883 }
2884 }
2885
2886 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2887
2888 void
2889 redirect_all_calls (copy_body_data * id, basic_block bb)
2890 {
2891 gimple_stmt_iterator si;
2892 gimple *last = last_stmt (bb);
2893 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2894 {
2895 gimple *stmt = gsi_stmt (si);
2896 if (is_gimple_call (stmt))
2897 {
2898 tree old_lhs = gimple_call_lhs (stmt);
2899 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2900 if (edge)
2901 {
2902 gimple *new_stmt = edge->redirect_call_stmt_to_callee ();
2903 /* If IPA-SRA transformation, run as part of edge redirection,
2904 removed the LHS because it is unused, save it to
2905 killed_new_ssa_names so that we can prune it from debug
2906 statements. */
2907 if (old_lhs
2908 && TREE_CODE (old_lhs) == SSA_NAME
2909 && !gimple_call_lhs (new_stmt))
2910 {
2911 if (!id->killed_new_ssa_names)
2912 id->killed_new_ssa_names = new hash_set<tree> (16);
2913 id->killed_new_ssa_names->add (old_lhs);
2914 }
2915
2916 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2917 gimple_purge_dead_eh_edges (bb);
2918 }
2919 }
2920 }
2921 }
2922
2923 /* Make a copy of the body of FN so that it can be inserted inline in
2924 another function. Walks FN via CFG, returns new fndecl. */
2925
2926 static tree
2927 copy_cfg_body (copy_body_data * id,
2928 basic_block entry_block_map, basic_block exit_block_map,
2929 basic_block new_entry)
2930 {
2931 tree callee_fndecl = id->src_fn;
2932 /* Original cfun for the callee, doesn't change. */
2933 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2934 struct function *cfun_to_copy;
2935 basic_block bb;
2936 tree new_fndecl = NULL;
2937 bool need_debug_cleanup = false;
2938 int last;
2939 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2940 profile_count num = entry_block_map->count;
2941
2942 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2943
2944 /* Register specific tree functions. */
2945 gimple_register_cfg_hooks ();
2946
2947 /* If we are inlining just region of the function, make sure to connect
2948 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2949 part of loop, we must compute frequency and probability of
2950 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2951 probabilities of edges incoming from nonduplicated region. */
2952 if (new_entry)
2953 {
2954 edge e;
2955 edge_iterator ei;
2956 den = profile_count::zero ();
2957
2958 FOR_EACH_EDGE (e, ei, new_entry->preds)
2959 if (!e->src->aux)
2960 den += e->count ();
2961 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2962 }
2963
2964 profile_count::adjust_for_ipa_scaling (&num, &den);
2965
2966 /* Must have a CFG here at this point. */
2967 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2968 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2969
2970
2971 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2972 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2973 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2974 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2975
2976 /* Duplicate any exception-handling regions. */
2977 if (cfun->eh)
2978 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2979 remap_decl_1, id);
2980
2981 /* Use aux pointers to map the original blocks to copy. */
2982 FOR_EACH_BB_FN (bb, cfun_to_copy)
2983 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2984 {
2985 basic_block new_bb = copy_bb (id, bb, num, den);
2986 bb->aux = new_bb;
2987 new_bb->aux = bb;
2988 new_bb->loop_father = entry_block_map->loop_father;
2989 }
2990
2991 last = last_basic_block_for_fn (cfun);
2992
2993 /* Now that we've duplicated the blocks, duplicate their edges. */
2994 basic_block abnormal_goto_dest = NULL;
2995 if (id->call_stmt
2996 && stmt_can_make_abnormal_goto (id->call_stmt))
2997 {
2998 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2999
3000 bb = gimple_bb (id->call_stmt);
3001 gsi_next (&gsi);
3002 if (gsi_end_p (gsi))
3003 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3004 }
3005 FOR_ALL_BB_FN (bb, cfun_to_copy)
3006 if (!id->blocks_to_copy
3007 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3008 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3009 abnormal_goto_dest, id);
3010
3011 if (id->eh_landing_pad_dest)
3012 {
3013 add_clobbers_to_eh_landing_pad (id);
3014 id->eh_landing_pad_dest = NULL;
3015 }
3016
3017 if (new_entry)
3018 {
3019 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3020 EDGE_FALLTHRU);
3021 e->probability = profile_probability::always ();
3022 }
3023
3024 /* Duplicate the loop tree, if available and wanted. */
3025 if (loops_for_fn (src_cfun) != NULL
3026 && current_loops != NULL)
3027 {
3028 copy_loops (id, entry_block_map->loop_father,
3029 get_loop (src_cfun, 0));
3030 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3031 loops_state_set (LOOPS_NEED_FIXUP);
3032 }
3033
3034 /* If the loop tree in the source function needed fixup, mark the
3035 destination loop tree for fixup, too. */
3036 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3037 loops_state_set (LOOPS_NEED_FIXUP);
3038
3039 if (gimple_in_ssa_p (cfun))
3040 FOR_ALL_BB_FN (bb, cfun_to_copy)
3041 if (!id->blocks_to_copy
3042 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3043 copy_phis_for_bb (bb, id);
3044
3045 FOR_ALL_BB_FN (bb, cfun_to_copy)
3046 if (bb->aux)
3047 {
3048 if (need_debug_cleanup
3049 && bb->index != ENTRY_BLOCK
3050 && bb->index != EXIT_BLOCK)
3051 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3052 /* Update call edge destinations. This cannot be done before loop
3053 info is updated, because we may split basic blocks. */
3054 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3055 && bb->index != ENTRY_BLOCK
3056 && bb->index != EXIT_BLOCK)
3057 redirect_all_calls (id, (basic_block)bb->aux);
3058 ((basic_block)bb->aux)->aux = NULL;
3059 bb->aux = NULL;
3060 }
3061
3062 /* Zero out AUX fields of newly created block during EH edge
3063 insertion. */
3064 for (; last < last_basic_block_for_fn (cfun); last++)
3065 {
3066 if (need_debug_cleanup)
3067 maybe_move_debug_stmts_to_successors (id,
3068 BASIC_BLOCK_FOR_FN (cfun, last));
3069 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3070 /* Update call edge destinations. This cannot be done before loop
3071 info is updated, because we may split basic blocks. */
3072 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3073 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3074 }
3075 entry_block_map->aux = NULL;
3076 exit_block_map->aux = NULL;
3077
3078 if (id->eh_map)
3079 {
3080 delete id->eh_map;
3081 id->eh_map = NULL;
3082 }
3083 if (id->dependence_map)
3084 {
3085 delete id->dependence_map;
3086 id->dependence_map = NULL;
3087 }
3088
3089 return new_fndecl;
3090 }
3091
3092 /* Copy the debug STMT using ID. We deal with these statements in a
3093 special way: if any variable in their VALUE expression wasn't
3094 remapped yet, we won't remap it, because that would get decl uids
3095 out of sync, causing codegen differences between -g and -g0. If
3096 this arises, we drop the VALUE expression altogether. */
3097
3098 static void
3099 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3100 {
3101 tree t, *n;
3102 struct walk_stmt_info wi;
3103
3104 if (tree block = gimple_block (stmt))
3105 {
3106 n = id->decl_map->get (block);
3107 gimple_set_block (stmt, n ? *n : id->block);
3108 }
3109
3110 if (gimple_debug_nonbind_marker_p (stmt))
3111 return;
3112
3113 /* Remap all the operands in COPY. */
3114 memset (&wi, 0, sizeof (wi));
3115 wi.info = id;
3116
3117 processing_debug_stmt = 1;
3118
3119 if (gimple_debug_source_bind_p (stmt))
3120 t = gimple_debug_source_bind_get_var (stmt);
3121 else if (gimple_debug_bind_p (stmt))
3122 t = gimple_debug_bind_get_var (stmt);
3123 else
3124 gcc_unreachable ();
3125
3126 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3127 && (n = id->debug_map->get (t)))
3128 {
3129 gcc_assert (VAR_P (*n));
3130 t = *n;
3131 }
3132 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3133 /* T is a non-localized variable. */;
3134 else
3135 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3136
3137 if (gimple_debug_bind_p (stmt))
3138 {
3139 gimple_debug_bind_set_var (stmt, t);
3140
3141 if (gimple_debug_bind_has_value_p (stmt))
3142 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3143 remap_gimple_op_r, &wi, NULL);
3144
3145 /* Punt if any decl couldn't be remapped. */
3146 if (processing_debug_stmt < 0)
3147 gimple_debug_bind_reset_value (stmt);
3148 }
3149 else if (gimple_debug_source_bind_p (stmt))
3150 {
3151 gimple_debug_source_bind_set_var (stmt, t);
3152 /* When inlining and source bind refers to one of the optimized
3153 away parameters, change the source bind into normal debug bind
3154 referring to the corresponding DEBUG_EXPR_DECL that should have
3155 been bound before the call stmt. */
3156 t = gimple_debug_source_bind_get_value (stmt);
3157 if (t != NULL_TREE
3158 && TREE_CODE (t) == PARM_DECL
3159 && id->call_stmt)
3160 {
3161 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3162 unsigned int i;
3163 if (debug_args != NULL)
3164 {
3165 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3166 if ((**debug_args)[i] == DECL_ORIGIN (t)
3167 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3168 {
3169 t = (**debug_args)[i + 1];
3170 stmt->subcode = GIMPLE_DEBUG_BIND;
3171 gimple_debug_bind_set_value (stmt, t);
3172 break;
3173 }
3174 }
3175 }
3176 if (gimple_debug_source_bind_p (stmt))
3177 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3178 remap_gimple_op_r, &wi, NULL);
3179 }
3180
3181 processing_debug_stmt = 0;
3182
3183 update_stmt (stmt);
3184 }
3185
3186 /* Process deferred debug stmts. In order to give values better odds
3187 of being successfully remapped, we delay the processing of debug
3188 stmts until all other stmts that might require remapping are
3189 processed. */
3190
3191 static void
3192 copy_debug_stmts (copy_body_data *id)
3193 {
3194 size_t i;
3195 gdebug *stmt;
3196
3197 if (!id->debug_stmts.exists ())
3198 return;
3199
3200 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3201 copy_debug_stmt (stmt, id);
3202
3203 id->debug_stmts.release ();
3204 }
3205
3206 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3207 another function. */
3208
3209 static tree
3210 copy_tree_body (copy_body_data *id)
3211 {
3212 tree fndecl = id->src_fn;
3213 tree body = DECL_SAVED_TREE (fndecl);
3214
3215 walk_tree (&body, copy_tree_body_r, id, NULL);
3216
3217 return body;
3218 }
3219
3220 /* Make a copy of the body of FN so that it can be inserted inline in
3221 another function. */
3222
3223 static tree
3224 copy_body (copy_body_data *id,
3225 basic_block entry_block_map, basic_block exit_block_map,
3226 basic_block new_entry)
3227 {
3228 tree fndecl = id->src_fn;
3229 tree body;
3230
3231 /* If this body has a CFG, walk CFG and copy. */
3232 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3233 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3234 new_entry);
3235 copy_debug_stmts (id);
3236 delete id->killed_new_ssa_names;
3237 id->killed_new_ssa_names = NULL;
3238
3239 return body;
3240 }
3241
3242 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3243 defined in function FN, or of a data member thereof. */
3244
3245 static bool
3246 self_inlining_addr_expr (tree value, tree fn)
3247 {
3248 tree var;
3249
3250 if (TREE_CODE (value) != ADDR_EXPR)
3251 return false;
3252
3253 var = get_base_address (TREE_OPERAND (value, 0));
3254
3255 return var && auto_var_in_fn_p (var, fn);
3256 }
3257
3258 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3259 lexical block and line number information from base_stmt, if given,
3260 or from the last stmt of the block otherwise. */
3261
3262 static gimple *
3263 insert_init_debug_bind (copy_body_data *id,
3264 basic_block bb, tree var, tree value,
3265 gimple *base_stmt)
3266 {
3267 gimple *note;
3268 gimple_stmt_iterator gsi;
3269 tree tracked_var;
3270
3271 if (!gimple_in_ssa_p (id->src_cfun))
3272 return NULL;
3273
3274 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3275 return NULL;
3276
3277 tracked_var = target_for_debug_bind (var);
3278 if (!tracked_var)
3279 return NULL;
3280
3281 if (bb)
3282 {
3283 gsi = gsi_last_bb (bb);
3284 if (!base_stmt && !gsi_end_p (gsi))
3285 base_stmt = gsi_stmt (gsi);
3286 }
3287
3288 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3289
3290 if (bb)
3291 {
3292 if (!gsi_end_p (gsi))
3293 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3294 else
3295 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3296 }
3297
3298 return note;
3299 }
3300
3301 static void
3302 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3303 {
3304 /* If VAR represents a zero-sized variable, it's possible that the
3305 assignment statement may result in no gimple statements. */
3306 if (init_stmt)
3307 {
3308 gimple_stmt_iterator si = gsi_last_bb (bb);
3309
3310 /* We can end up with init statements that store to a non-register
3311 from a rhs with a conversion. Handle that here by forcing the
3312 rhs into a temporary. gimple_regimplify_operands is not
3313 prepared to do this for us. */
3314 if (!is_gimple_debug (init_stmt)
3315 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3316 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3317 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3318 {
3319 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3320 gimple_expr_type (init_stmt),
3321 gimple_assign_rhs1 (init_stmt));
3322 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3323 GSI_NEW_STMT);
3324 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3325 gimple_assign_set_rhs1 (init_stmt, rhs);
3326 }
3327 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3328 gimple_regimplify_operands (init_stmt, &si);
3329
3330 if (!is_gimple_debug (init_stmt))
3331 {
3332 tree def = gimple_assign_lhs (init_stmt);
3333 insert_init_debug_bind (id, bb, def, def, init_stmt);
3334 }
3335 }
3336 }
3337
3338 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3339 if need be (which should only be necessary for invalid programs). Attempt
3340 to convert VAL to TYPE and return the result if it is possible, just return
3341 a zero constant of the given type if it fails. */
3342
3343 tree
3344 force_value_to_type (tree type, tree value)
3345 {
3346 /* If we can match up types by promotion/demotion do so. */
3347 if (fold_convertible_p (type, value))
3348 return fold_convert (type, value);
3349
3350 /* ??? For valid programs we should not end up here.
3351 Still if we end up with truly mismatched types here, fall back
3352 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3353 GIMPLE to the following passes. */
3354 if (!is_gimple_reg_type (TREE_TYPE (value))
3355 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3356 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3357 else
3358 return build_zero_cst (type);
3359 }
3360
3361 /* Initialize parameter P with VALUE. If needed, produce init statement
3362 at the end of BB. When BB is NULL, we return init statement to be
3363 output later. */
3364 static gimple *
3365 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3366 basic_block bb, tree *vars)
3367 {
3368 gimple *init_stmt = NULL;
3369 tree var;
3370 tree rhs = value;
3371 tree def = (gimple_in_ssa_p (cfun)
3372 ? ssa_default_def (id->src_cfun, p) : NULL);
3373
3374 if (value
3375 && value != error_mark_node
3376 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3377 rhs = force_value_to_type (TREE_TYPE (p), value);
3378
3379 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3380 here since the type of this decl must be visible to the calling
3381 function. */
3382 var = copy_decl_to_var (p, id);
3383
3384 /* Declare this new variable. */
3385 DECL_CHAIN (var) = *vars;
3386 *vars = var;
3387
3388 /* Make gimplifier happy about this variable. */
3389 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3390
3391 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3392 we would not need to create a new variable here at all, if it
3393 weren't for debug info. Still, we can just use the argument
3394 value. */
3395 if (TREE_READONLY (p)
3396 && !TREE_ADDRESSABLE (p)
3397 && value && !TREE_SIDE_EFFECTS (value)
3398 && !def)
3399 {
3400 /* We may produce non-gimple trees by adding NOPs or introduce
3401 invalid sharing when operand is not really constant.
3402 It is not big deal to prohibit constant propagation here as
3403 we will constant propagate in DOM1 pass anyway. */
3404 if (is_gimple_min_invariant (value)
3405 && useless_type_conversion_p (TREE_TYPE (p),
3406 TREE_TYPE (value))
3407 /* We have to be very careful about ADDR_EXPR. Make sure
3408 the base variable isn't a local variable of the inlined
3409 function, e.g., when doing recursive inlining, direct or
3410 mutually-recursive or whatever, which is why we don't
3411 just test whether fn == current_function_decl. */
3412 && ! self_inlining_addr_expr (value, fn))
3413 {
3414 insert_decl_map (id, p, value);
3415 insert_debug_decl_map (id, p, var);
3416 return insert_init_debug_bind (id, bb, var, value, NULL);
3417 }
3418 }
3419
3420 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3421 that way, when the PARM_DECL is encountered, it will be
3422 automatically replaced by the VAR_DECL. */
3423 insert_decl_map (id, p, var);
3424
3425 /* Even if P was TREE_READONLY, the new VAR should not be.
3426 In the original code, we would have constructed a
3427 temporary, and then the function body would have never
3428 changed the value of P. However, now, we will be
3429 constructing VAR directly. The constructor body may
3430 change its value multiple times as it is being
3431 constructed. Therefore, it must not be TREE_READONLY;
3432 the back-end assumes that TREE_READONLY variable is
3433 assigned to only once. */
3434 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3435 TREE_READONLY (var) = 0;
3436
3437 /* If there is no setup required and we are in SSA, take the easy route
3438 replacing all SSA names representing the function parameter by the
3439 SSA name passed to function.
3440
3441 We need to construct map for the variable anyway as it might be used
3442 in different SSA names when parameter is set in function.
3443
3444 Do replacement at -O0 for const arguments replaced by constant.
3445 This is important for builtin_constant_p and other construct requiring
3446 constant argument to be visible in inlined function body. */
3447 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3448 && (optimize
3449 || (TREE_READONLY (p)
3450 && is_gimple_min_invariant (rhs)))
3451 && (TREE_CODE (rhs) == SSA_NAME
3452 || is_gimple_min_invariant (rhs))
3453 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3454 {
3455 insert_decl_map (id, def, rhs);
3456 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3457 }
3458
3459 /* If the value of argument is never used, don't care about initializing
3460 it. */
3461 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3462 {
3463 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3464 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3465 }
3466
3467 /* Initialize this VAR_DECL from the equivalent argument. Convert
3468 the argument to the proper type in case it was promoted. */
3469 if (value)
3470 {
3471 if (rhs == error_mark_node)
3472 {
3473 insert_decl_map (id, p, var);
3474 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3475 }
3476
3477 STRIP_USELESS_TYPE_CONVERSION (rhs);
3478
3479 /* If we are in SSA form properly remap the default definition
3480 or assign to a dummy SSA name if the parameter is unused and
3481 we are not optimizing. */
3482 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3483 {
3484 if (def)
3485 {
3486 def = remap_ssa_name (def, id);
3487 init_stmt = gimple_build_assign (def, rhs);
3488 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3489 set_ssa_default_def (cfun, var, NULL);
3490 }
3491 else if (!optimize)
3492 {
3493 def = make_ssa_name (var);
3494 init_stmt = gimple_build_assign (def, rhs);
3495 }
3496 }
3497 else
3498 init_stmt = gimple_build_assign (var, rhs);
3499
3500 if (bb && init_stmt)
3501 insert_init_stmt (id, bb, init_stmt);
3502 }
3503 return init_stmt;
3504 }
3505
3506 /* Generate code to initialize the parameters of the function at the
3507 top of the stack in ID from the GIMPLE_CALL STMT. */
3508
3509 static void
3510 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3511 tree fn, basic_block bb)
3512 {
3513 tree parms;
3514 size_t i;
3515 tree p;
3516 tree vars = NULL_TREE;
3517 tree static_chain = gimple_call_chain (stmt);
3518
3519 /* Figure out what the parameters are. */
3520 parms = DECL_ARGUMENTS (fn);
3521
3522 /* Loop through the parameter declarations, replacing each with an
3523 equivalent VAR_DECL, appropriately initialized. */
3524 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3525 {
3526 tree val;
3527 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3528 setup_one_parameter (id, p, val, fn, bb, &vars);
3529 }
3530 /* After remapping parameters remap their types. This has to be done
3531 in a second loop over all parameters to appropriately remap
3532 variable sized arrays when the size is specified in a
3533 parameter following the array. */
3534 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3535 {
3536 tree *varp = id->decl_map->get (p);
3537 if (varp && VAR_P (*varp))
3538 {
3539 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3540 ? ssa_default_def (id->src_cfun, p) : NULL);
3541 tree var = *varp;
3542 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3543 /* Also remap the default definition if it was remapped
3544 to the default definition of the parameter replacement
3545 by the parameter setup. */
3546 if (def)
3547 {
3548 tree *defp = id->decl_map->get (def);
3549 if (defp
3550 && TREE_CODE (*defp) == SSA_NAME
3551 && SSA_NAME_VAR (*defp) == var)
3552 TREE_TYPE (*defp) = TREE_TYPE (var);
3553 }
3554 }
3555 }
3556
3557 /* Initialize the static chain. */
3558 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3559 gcc_assert (fn != current_function_decl);
3560 if (p)
3561 {
3562 /* No static chain? Seems like a bug in tree-nested.c. */
3563 gcc_assert (static_chain);
3564
3565 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3566 }
3567
3568 declare_inline_vars (id->block, vars);
3569 }
3570
3571
3572 /* Declare a return variable to replace the RESULT_DECL for the
3573 function we are calling. An appropriate DECL_STMT is returned.
3574 The USE_STMT is filled to contain a use of the declaration to
3575 indicate the return value of the function.
3576
3577 RETURN_SLOT, if non-null is place where to store the result. It
3578 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3579 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3580
3581 The return value is a (possibly null) value that holds the result
3582 as seen by the caller. */
3583
3584 static tree
3585 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3586 basic_block entry_bb)
3587 {
3588 tree callee = id->src_fn;
3589 tree result = DECL_RESULT (callee);
3590 tree callee_type = TREE_TYPE (result);
3591 tree caller_type;
3592 tree var, use;
3593
3594 /* Handle type-mismatches in the function declaration return type
3595 vs. the call expression. */
3596 if (modify_dest)
3597 caller_type = TREE_TYPE (modify_dest);
3598 else if (return_slot)
3599 caller_type = TREE_TYPE (return_slot);
3600 else /* No LHS on the call. */
3601 caller_type = TREE_TYPE (TREE_TYPE (callee));
3602
3603 /* We don't need to do anything for functions that don't return anything. */
3604 if (VOID_TYPE_P (callee_type))
3605 return NULL_TREE;
3606
3607 /* If there was a return slot, then the return value is the
3608 dereferenced address of that object. */
3609 if (return_slot)
3610 {
3611 /* The front end shouldn't have used both return_slot and
3612 a modify expression. */
3613 gcc_assert (!modify_dest);
3614 if (DECL_BY_REFERENCE (result))
3615 {
3616 tree return_slot_addr = build_fold_addr_expr (return_slot);
3617 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3618
3619 /* We are going to construct *&return_slot and we can't do that
3620 for variables believed to be not addressable.
3621
3622 FIXME: This check possibly can match, because values returned
3623 via return slot optimization are not believed to have address
3624 taken by alias analysis. */
3625 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3626 var = return_slot_addr;
3627 mark_addressable (return_slot);
3628 }
3629 else
3630 {
3631 var = return_slot;
3632 gcc_assert (TREE_CODE (var) != SSA_NAME);
3633 if (TREE_ADDRESSABLE (result))
3634 mark_addressable (var);
3635 }
3636 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3637 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3638 && !DECL_GIMPLE_REG_P (result)
3639 && DECL_P (var))
3640 DECL_GIMPLE_REG_P (var) = 0;
3641
3642 if (!useless_type_conversion_p (callee_type, caller_type))
3643 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3644
3645 use = NULL;
3646 goto done;
3647 }
3648
3649 /* All types requiring non-trivial constructors should have been handled. */
3650 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3651
3652 /* Attempt to avoid creating a new temporary variable. */
3653 if (modify_dest
3654 && TREE_CODE (modify_dest) != SSA_NAME)
3655 {
3656 bool use_it = false;
3657
3658 /* We can't use MODIFY_DEST if there's type promotion involved. */
3659 if (!useless_type_conversion_p (callee_type, caller_type))
3660 use_it = false;
3661
3662 /* ??? If we're assigning to a variable sized type, then we must
3663 reuse the destination variable, because we've no good way to
3664 create variable sized temporaries at this point. */
3665 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3666 use_it = true;
3667
3668 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3669 reuse it as the result of the call directly. Don't do this if
3670 it would promote MODIFY_DEST to addressable. */
3671 else if (TREE_ADDRESSABLE (result))
3672 use_it = false;
3673 else
3674 {
3675 tree base_m = get_base_address (modify_dest);
3676
3677 /* If the base isn't a decl, then it's a pointer, and we don't
3678 know where that's going to go. */
3679 if (!DECL_P (base_m))
3680 use_it = false;
3681 else if (is_global_var (base_m))
3682 use_it = false;
3683 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3684 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3685 && !DECL_GIMPLE_REG_P (result)
3686 && DECL_GIMPLE_REG_P (base_m))
3687 use_it = false;
3688 else if (!TREE_ADDRESSABLE (base_m))
3689 use_it = true;
3690 }
3691
3692 if (use_it)
3693 {
3694 var = modify_dest;
3695 use = NULL;
3696 goto done;
3697 }
3698 }
3699
3700 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3701
3702 var = copy_result_decl_to_var (result, id);
3703 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3704
3705 /* Do not have the rest of GCC warn about this variable as it should
3706 not be visible to the user. */
3707 TREE_NO_WARNING (var) = 1;
3708
3709 declare_inline_vars (id->block, var);
3710
3711 /* Build the use expr. If the return type of the function was
3712 promoted, convert it back to the expected type. */
3713 use = var;
3714 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3715 {
3716 /* If we can match up types by promotion/demotion do so. */
3717 if (fold_convertible_p (caller_type, var))
3718 use = fold_convert (caller_type, var);
3719 else
3720 {
3721 /* ??? For valid programs we should not end up here.
3722 Still if we end up with truly mismatched types here, fall back
3723 to using a MEM_REF to not leak invalid GIMPLE to the following
3724 passes. */
3725 /* Prevent var from being written into SSA form. */
3726 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3727 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3728 DECL_GIMPLE_REG_P (var) = false;
3729 else if (is_gimple_reg_type (TREE_TYPE (var)))
3730 TREE_ADDRESSABLE (var) = true;
3731 use = fold_build2 (MEM_REF, caller_type,
3732 build_fold_addr_expr (var),
3733 build_int_cst (ptr_type_node, 0));
3734 }
3735 }
3736
3737 STRIP_USELESS_TYPE_CONVERSION (use);
3738
3739 if (DECL_BY_REFERENCE (result))
3740 {
3741 TREE_ADDRESSABLE (var) = 1;
3742 var = build_fold_addr_expr (var);
3743 }
3744
3745 done:
3746 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3747 way, when the RESULT_DECL is encountered, it will be
3748 automatically replaced by the VAR_DECL.
3749
3750 When returning by reference, ensure that RESULT_DECL remaps to
3751 gimple_val. */
3752 if (DECL_BY_REFERENCE (result)
3753 && !is_gimple_val (var))
3754 {
3755 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3756 insert_decl_map (id, result, temp);
3757 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3758 it's default_def SSA_NAME. */
3759 if (gimple_in_ssa_p (id->src_cfun)
3760 && is_gimple_reg (result))
3761 {
3762 temp = make_ssa_name (temp);
3763 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3764 }
3765 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3766 }
3767 else
3768 insert_decl_map (id, result, var);
3769
3770 /* Remember this so we can ignore it in remap_decls. */
3771 id->retvar = var;
3772 return use;
3773 }
3774
3775 /* Determine if the function can be copied. If so return NULL. If
3776 not return a string describng the reason for failure. */
3777
3778 const char *
3779 copy_forbidden (struct function *fun)
3780 {
3781 const char *reason = fun->cannot_be_copied_reason;
3782
3783 /* Only examine the function once. */
3784 if (fun->cannot_be_copied_set)
3785 return reason;
3786
3787 /* We cannot copy a function that receives a non-local goto
3788 because we cannot remap the destination label used in the
3789 function that is performing the non-local goto. */
3790 /* ??? Actually, this should be possible, if we work at it.
3791 No doubt there's just a handful of places that simply
3792 assume it doesn't happen and don't substitute properly. */
3793 if (fun->has_nonlocal_label)
3794 {
3795 reason = G_("function %q+F can never be copied "
3796 "because it receives a non-local goto");
3797 goto fail;
3798 }
3799
3800 if (fun->has_forced_label_in_static)
3801 {
3802 reason = G_("function %q+F can never be copied because it saves "
3803 "address of local label in a static variable");
3804 goto fail;
3805 }
3806
3807 fail:
3808 fun->cannot_be_copied_reason = reason;
3809 fun->cannot_be_copied_set = true;
3810 return reason;
3811 }
3812
3813
3814 static const char *inline_forbidden_reason;
3815
3816 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3817 iff a function cannot be inlined. Also sets the reason why. */
3818
3819 static tree
3820 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3821 struct walk_stmt_info *wip)
3822 {
3823 tree fn = (tree) wip->info;
3824 tree t;
3825 gimple *stmt = gsi_stmt (*gsi);
3826
3827 switch (gimple_code (stmt))
3828 {
3829 case GIMPLE_CALL:
3830 /* Refuse to inline alloca call unless user explicitly forced so as
3831 this may change program's memory overhead drastically when the
3832 function using alloca is called in loop. In GCC present in
3833 SPEC2000 inlining into schedule_block cause it to require 2GB of
3834 RAM instead of 256MB. Don't do so for alloca calls emitted for
3835 VLA objects as those can't cause unbounded growth (they're always
3836 wrapped inside stack_save/stack_restore regions. */
3837 if (gimple_maybe_alloca_call_p (stmt)
3838 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3839 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3840 {
3841 inline_forbidden_reason
3842 = G_("function %q+F can never be inlined because it uses "
3843 "alloca (override using the always_inline attribute)");
3844 *handled_ops_p = true;
3845 return fn;
3846 }
3847
3848 t = gimple_call_fndecl (stmt);
3849 if (t == NULL_TREE)
3850 break;
3851
3852 /* We cannot inline functions that call setjmp. */
3853 if (setjmp_call_p (t))
3854 {
3855 inline_forbidden_reason
3856 = G_("function %q+F can never be inlined because it uses setjmp");
3857 *handled_ops_p = true;
3858 return t;
3859 }
3860
3861 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3862 switch (DECL_FUNCTION_CODE (t))
3863 {
3864 /* We cannot inline functions that take a variable number of
3865 arguments. */
3866 case BUILT_IN_VA_START:
3867 case BUILT_IN_NEXT_ARG:
3868 case BUILT_IN_VA_END:
3869 inline_forbidden_reason
3870 = G_("function %q+F can never be inlined because it "
3871 "uses variable argument lists");
3872 *handled_ops_p = true;
3873 return t;
3874
3875 case BUILT_IN_LONGJMP:
3876 /* We can't inline functions that call __builtin_longjmp at
3877 all. The non-local goto machinery really requires the
3878 destination be in a different function. If we allow the
3879 function calling __builtin_longjmp to be inlined into the
3880 function calling __builtin_setjmp, Things will Go Awry. */
3881 inline_forbidden_reason
3882 = G_("function %q+F can never be inlined because "
3883 "it uses setjmp-longjmp exception handling");
3884 *handled_ops_p = true;
3885 return t;
3886
3887 case BUILT_IN_NONLOCAL_GOTO:
3888 /* Similarly. */
3889 inline_forbidden_reason
3890 = G_("function %q+F can never be inlined because "
3891 "it uses non-local goto");
3892 *handled_ops_p = true;
3893 return t;
3894
3895 case BUILT_IN_RETURN:
3896 case BUILT_IN_APPLY_ARGS:
3897 /* If a __builtin_apply_args caller would be inlined,
3898 it would be saving arguments of the function it has
3899 been inlined into. Similarly __builtin_return would
3900 return from the function the inline has been inlined into. */
3901 inline_forbidden_reason
3902 = G_("function %q+F can never be inlined because "
3903 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3904 *handled_ops_p = true;
3905 return t;
3906
3907 default:
3908 break;
3909 }
3910 break;
3911
3912 case GIMPLE_GOTO:
3913 t = gimple_goto_dest (stmt);
3914
3915 /* We will not inline a function which uses computed goto. The
3916 addresses of its local labels, which may be tucked into
3917 global storage, are of course not constant across
3918 instantiations, which causes unexpected behavior. */
3919 if (TREE_CODE (t) != LABEL_DECL)
3920 {
3921 inline_forbidden_reason
3922 = G_("function %q+F can never be inlined "
3923 "because it contains a computed goto");
3924 *handled_ops_p = true;
3925 return t;
3926 }
3927 break;
3928
3929 default:
3930 break;
3931 }
3932
3933 *handled_ops_p = false;
3934 return NULL_TREE;
3935 }
3936
3937 /* Return true if FNDECL is a function that cannot be inlined into
3938 another one. */
3939
3940 static bool
3941 inline_forbidden_p (tree fndecl)
3942 {
3943 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3944 struct walk_stmt_info wi;
3945 basic_block bb;
3946 bool forbidden_p = false;
3947
3948 /* First check for shared reasons not to copy the code. */
3949 inline_forbidden_reason = copy_forbidden (fun);
3950 if (inline_forbidden_reason != NULL)
3951 return true;
3952
3953 /* Next, walk the statements of the function looking for
3954 constraucts we can't handle, or are non-optimal for inlining. */
3955 hash_set<tree> visited_nodes;
3956 memset (&wi, 0, sizeof (wi));
3957 wi.info = (void *) fndecl;
3958 wi.pset = &visited_nodes;
3959
3960 FOR_EACH_BB_FN (bb, fun)
3961 {
3962 gimple *ret;
3963 gimple_seq seq = bb_seq (bb);
3964 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3965 forbidden_p = (ret != NULL);
3966 if (forbidden_p)
3967 break;
3968 }
3969
3970 return forbidden_p;
3971 }
3972 \f
3973 /* Return false if the function FNDECL cannot be inlined on account of its
3974 attributes, true otherwise. */
3975 static bool
3976 function_attribute_inlinable_p (const_tree fndecl)
3977 {
3978 if (targetm.attribute_table)
3979 {
3980 const_tree a;
3981
3982 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3983 {
3984 const_tree name = get_attribute_name (a);
3985 int i;
3986
3987 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3988 if (is_attribute_p (targetm.attribute_table[i].name, name))
3989 return targetm.function_attribute_inlinable_p (fndecl);
3990 }
3991 }
3992
3993 return true;
3994 }
3995
3996 /* Returns nonzero if FN is a function that does not have any
3997 fundamental inline blocking properties. */
3998
3999 bool
4000 tree_inlinable_function_p (tree fn)
4001 {
4002 bool inlinable = true;
4003 bool do_warning;
4004 tree always_inline;
4005
4006 /* If we've already decided this function shouldn't be inlined,
4007 there's no need to check again. */
4008 if (DECL_UNINLINABLE (fn))
4009 return false;
4010
4011 /* We only warn for functions declared `inline' by the user. */
4012 do_warning = (warn_inline
4013 && DECL_DECLARED_INLINE_P (fn)
4014 && !DECL_NO_INLINE_WARNING_P (fn)
4015 && !DECL_IN_SYSTEM_HEADER (fn));
4016
4017 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4018
4019 if (flag_no_inline
4020 && always_inline == NULL)
4021 {
4022 if (do_warning)
4023 warning (OPT_Winline, "function %q+F can never be inlined because it "
4024 "is suppressed using %<-fno-inline%>", fn);
4025 inlinable = false;
4026 }
4027
4028 else if (!function_attribute_inlinable_p (fn))
4029 {
4030 if (do_warning)
4031 warning (OPT_Winline, "function %q+F can never be inlined because it "
4032 "uses attributes conflicting with inlining", fn);
4033 inlinable = false;
4034 }
4035
4036 else if (inline_forbidden_p (fn))
4037 {
4038 /* See if we should warn about uninlinable functions. Previously,
4039 some of these warnings would be issued while trying to expand
4040 the function inline, but that would cause multiple warnings
4041 about functions that would for example call alloca. But since
4042 this a property of the function, just one warning is enough.
4043 As a bonus we can now give more details about the reason why a
4044 function is not inlinable. */
4045 if (always_inline)
4046 error (inline_forbidden_reason, fn);
4047 else if (do_warning)
4048 warning (OPT_Winline, inline_forbidden_reason, fn);
4049
4050 inlinable = false;
4051 }
4052
4053 /* Squirrel away the result so that we don't have to check again. */
4054 DECL_UNINLINABLE (fn) = !inlinable;
4055
4056 return inlinable;
4057 }
4058
4059 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4060 word size and take possible memcpy call into account and return
4061 cost based on whether optimizing for size or speed according to SPEED_P. */
4062
4063 int
4064 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4065 {
4066 HOST_WIDE_INT size;
4067
4068 gcc_assert (!VOID_TYPE_P (type));
4069
4070 if (TREE_CODE (type) == VECTOR_TYPE)
4071 {
4072 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4073 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4074 int orig_mode_size
4075 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4076 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4077 return ((orig_mode_size + simd_mode_size - 1)
4078 / simd_mode_size);
4079 }
4080
4081 size = int_size_in_bytes (type);
4082
4083 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4084 /* Cost of a memcpy call, 3 arguments and the call. */
4085 return 4;
4086 else
4087 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4088 }
4089
4090 /* Returns cost of operation CODE, according to WEIGHTS */
4091
4092 static int
4093 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4094 tree op1 ATTRIBUTE_UNUSED, tree op2)
4095 {
4096 switch (code)
4097 {
4098 /* These are "free" conversions, or their presumed cost
4099 is folded into other operations. */
4100 case RANGE_EXPR:
4101 CASE_CONVERT:
4102 case COMPLEX_EXPR:
4103 case PAREN_EXPR:
4104 case VIEW_CONVERT_EXPR:
4105 return 0;
4106
4107 /* Assign cost of 1 to usual operations.
4108 ??? We may consider mapping RTL costs to this. */
4109 case COND_EXPR:
4110 case VEC_COND_EXPR:
4111 case VEC_PERM_EXPR:
4112
4113 case PLUS_EXPR:
4114 case POINTER_PLUS_EXPR:
4115 case POINTER_DIFF_EXPR:
4116 case MINUS_EXPR:
4117 case MULT_EXPR:
4118 case MULT_HIGHPART_EXPR:
4119
4120 case ADDR_SPACE_CONVERT_EXPR:
4121 case FIXED_CONVERT_EXPR:
4122 case FIX_TRUNC_EXPR:
4123
4124 case NEGATE_EXPR:
4125 case FLOAT_EXPR:
4126 case MIN_EXPR:
4127 case MAX_EXPR:
4128 case ABS_EXPR:
4129 case ABSU_EXPR:
4130
4131 case LSHIFT_EXPR:
4132 case RSHIFT_EXPR:
4133 case LROTATE_EXPR:
4134 case RROTATE_EXPR:
4135
4136 case BIT_IOR_EXPR:
4137 case BIT_XOR_EXPR:
4138 case BIT_AND_EXPR:
4139 case BIT_NOT_EXPR:
4140
4141 case TRUTH_ANDIF_EXPR:
4142 case TRUTH_ORIF_EXPR:
4143 case TRUTH_AND_EXPR:
4144 case TRUTH_OR_EXPR:
4145 case TRUTH_XOR_EXPR:
4146 case TRUTH_NOT_EXPR:
4147
4148 case LT_EXPR:
4149 case LE_EXPR:
4150 case GT_EXPR:
4151 case GE_EXPR:
4152 case EQ_EXPR:
4153 case NE_EXPR:
4154 case ORDERED_EXPR:
4155 case UNORDERED_EXPR:
4156
4157 case UNLT_EXPR:
4158 case UNLE_EXPR:
4159 case UNGT_EXPR:
4160 case UNGE_EXPR:
4161 case UNEQ_EXPR:
4162 case LTGT_EXPR:
4163
4164 case CONJ_EXPR:
4165
4166 case PREDECREMENT_EXPR:
4167 case PREINCREMENT_EXPR:
4168 case POSTDECREMENT_EXPR:
4169 case POSTINCREMENT_EXPR:
4170
4171 case REALIGN_LOAD_EXPR:
4172
4173 case WIDEN_SUM_EXPR:
4174 case WIDEN_MULT_EXPR:
4175 case DOT_PROD_EXPR:
4176 case SAD_EXPR:
4177 case WIDEN_MULT_PLUS_EXPR:
4178 case WIDEN_MULT_MINUS_EXPR:
4179 case WIDEN_LSHIFT_EXPR:
4180
4181 case VEC_WIDEN_MULT_HI_EXPR:
4182 case VEC_WIDEN_MULT_LO_EXPR:
4183 case VEC_WIDEN_MULT_EVEN_EXPR:
4184 case VEC_WIDEN_MULT_ODD_EXPR:
4185 case VEC_UNPACK_HI_EXPR:
4186 case VEC_UNPACK_LO_EXPR:
4187 case VEC_UNPACK_FLOAT_HI_EXPR:
4188 case VEC_UNPACK_FLOAT_LO_EXPR:
4189 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4190 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4191 case VEC_PACK_TRUNC_EXPR:
4192 case VEC_PACK_SAT_EXPR:
4193 case VEC_PACK_FIX_TRUNC_EXPR:
4194 case VEC_PACK_FLOAT_EXPR:
4195 case VEC_WIDEN_LSHIFT_HI_EXPR:
4196 case VEC_WIDEN_LSHIFT_LO_EXPR:
4197 case VEC_DUPLICATE_EXPR:
4198 case VEC_SERIES_EXPR:
4199
4200 return 1;
4201
4202 /* Few special cases of expensive operations. This is useful
4203 to avoid inlining on functions having too many of these. */
4204 case TRUNC_DIV_EXPR:
4205 case CEIL_DIV_EXPR:
4206 case FLOOR_DIV_EXPR:
4207 case ROUND_DIV_EXPR:
4208 case EXACT_DIV_EXPR:
4209 case TRUNC_MOD_EXPR:
4210 case CEIL_MOD_EXPR:
4211 case FLOOR_MOD_EXPR:
4212 case ROUND_MOD_EXPR:
4213 case RDIV_EXPR:
4214 if (TREE_CODE (op2) != INTEGER_CST)
4215 return weights->div_mod_cost;
4216 return 1;
4217
4218 /* Bit-field insertion needs several shift and mask operations. */
4219 case BIT_INSERT_EXPR:
4220 return 3;
4221
4222 default:
4223 /* We expect a copy assignment with no operator. */
4224 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4225 return 0;
4226 }
4227 }
4228
4229
4230 /* Estimate number of instructions that will be created by expanding
4231 the statements in the statement sequence STMTS.
4232 WEIGHTS contains weights attributed to various constructs. */
4233
4234 int
4235 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4236 {
4237 int cost;
4238 gimple_stmt_iterator gsi;
4239
4240 cost = 0;
4241 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4242 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4243
4244 return cost;
4245 }
4246
4247
4248 /* Estimate number of instructions that will be created by expanding STMT.
4249 WEIGHTS contains weights attributed to various constructs. */
4250
4251 int
4252 estimate_num_insns (gimple *stmt, eni_weights *weights)
4253 {
4254 unsigned cost, i;
4255 enum gimple_code code = gimple_code (stmt);
4256 tree lhs;
4257 tree rhs;
4258
4259 switch (code)
4260 {
4261 case GIMPLE_ASSIGN:
4262 /* Try to estimate the cost of assignments. We have three cases to
4263 deal with:
4264 1) Simple assignments to registers;
4265 2) Stores to things that must live in memory. This includes
4266 "normal" stores to scalars, but also assignments of large
4267 structures, or constructors of big arrays;
4268
4269 Let us look at the first two cases, assuming we have "a = b + C":
4270 <GIMPLE_ASSIGN <var_decl "a">
4271 <plus_expr <var_decl "b"> <constant C>>
4272 If "a" is a GIMPLE register, the assignment to it is free on almost
4273 any target, because "a" usually ends up in a real register. Hence
4274 the only cost of this expression comes from the PLUS_EXPR, and we
4275 can ignore the GIMPLE_ASSIGN.
4276 If "a" is not a GIMPLE register, the assignment to "a" will most
4277 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4278 of moving something into "a", which we compute using the function
4279 estimate_move_cost. */
4280 if (gimple_clobber_p (stmt))
4281 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4282
4283 lhs = gimple_assign_lhs (stmt);
4284 rhs = gimple_assign_rhs1 (stmt);
4285
4286 cost = 0;
4287
4288 /* Account for the cost of moving to / from memory. */
4289 if (gimple_store_p (stmt))
4290 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4291 if (gimple_assign_load_p (stmt))
4292 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4293
4294 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4295 gimple_assign_rhs1 (stmt),
4296 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4297 == GIMPLE_BINARY_RHS
4298 ? gimple_assign_rhs2 (stmt) : NULL);
4299 break;
4300
4301 case GIMPLE_COND:
4302 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4303 gimple_op (stmt, 0),
4304 gimple_op (stmt, 1));
4305 break;
4306
4307 case GIMPLE_SWITCH:
4308 {
4309 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4310 /* Take into account cost of the switch + guess 2 conditional jumps for
4311 each case label.
4312
4313 TODO: once the switch expansion logic is sufficiently separated, we can
4314 do better job on estimating cost of the switch. */
4315 if (weights->time_based)
4316 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4317 else
4318 cost = gimple_switch_num_labels (switch_stmt) * 2;
4319 }
4320 break;
4321
4322 case GIMPLE_CALL:
4323 {
4324 tree decl;
4325
4326 if (gimple_call_internal_p (stmt))
4327 return 0;
4328 else if ((decl = gimple_call_fndecl (stmt))
4329 && fndecl_built_in_p (decl))
4330 {
4331 /* Do not special case builtins where we see the body.
4332 This just confuse inliner. */
4333 struct cgraph_node *node;
4334 if (!(node = cgraph_node::get (decl))
4335 || node->definition)
4336 ;
4337 /* For buitins that are likely expanded to nothing or
4338 inlined do not account operand costs. */
4339 else if (is_simple_builtin (decl))
4340 return 0;
4341 else if (is_inexpensive_builtin (decl))
4342 return weights->target_builtin_call_cost;
4343 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4344 {
4345 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4346 specialize the cheap expansion we do here.
4347 ??? This asks for a more general solution. */
4348 switch (DECL_FUNCTION_CODE (decl))
4349 {
4350 case BUILT_IN_POW:
4351 case BUILT_IN_POWF:
4352 case BUILT_IN_POWL:
4353 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4354 && (real_equal
4355 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4356 &dconst2)))
4357 return estimate_operator_cost
4358 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4359 gimple_call_arg (stmt, 0));
4360 break;
4361
4362 default:
4363 break;
4364 }
4365 }
4366 }
4367
4368 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4369 if (gimple_call_lhs (stmt))
4370 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4371 weights->time_based);
4372 for (i = 0; i < gimple_call_num_args (stmt); i++)
4373 {
4374 tree arg = gimple_call_arg (stmt, i);
4375 cost += estimate_move_cost (TREE_TYPE (arg),
4376 weights->time_based);
4377 }
4378 break;
4379 }
4380
4381 case GIMPLE_RETURN:
4382 return weights->return_cost;
4383
4384 case GIMPLE_GOTO:
4385 case GIMPLE_LABEL:
4386 case GIMPLE_NOP:
4387 case GIMPLE_PHI:
4388 case GIMPLE_PREDICT:
4389 case GIMPLE_DEBUG:
4390 return 0;
4391
4392 case GIMPLE_ASM:
4393 {
4394 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4395 /* 1000 means infinity. This avoids overflows later
4396 with very long asm statements. */
4397 if (count > 1000)
4398 count = 1000;
4399 /* If this asm is asm inline, count anything as minimum size. */
4400 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4401 count = MIN (1, count);
4402 return MAX (1, count);
4403 }
4404
4405 case GIMPLE_RESX:
4406 /* This is either going to be an external function call with one
4407 argument, or two register copy statements plus a goto. */
4408 return 2;
4409
4410 case GIMPLE_EH_DISPATCH:
4411 /* ??? This is going to turn into a switch statement. Ideally
4412 we'd have a look at the eh region and estimate the number of
4413 edges involved. */
4414 return 10;
4415
4416 case GIMPLE_BIND:
4417 return estimate_num_insns_seq (
4418 gimple_bind_body (as_a <gbind *> (stmt)),
4419 weights);
4420
4421 case GIMPLE_EH_FILTER:
4422 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4423
4424 case GIMPLE_CATCH:
4425 return estimate_num_insns_seq (gimple_catch_handler (
4426 as_a <gcatch *> (stmt)),
4427 weights);
4428
4429 case GIMPLE_TRY:
4430 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4431 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4432
4433 /* OMP directives are generally very expensive. */
4434
4435 case GIMPLE_OMP_RETURN:
4436 case GIMPLE_OMP_SECTIONS_SWITCH:
4437 case GIMPLE_OMP_ATOMIC_STORE:
4438 case GIMPLE_OMP_CONTINUE:
4439 /* ...except these, which are cheap. */
4440 return 0;
4441
4442 case GIMPLE_OMP_ATOMIC_LOAD:
4443 return weights->omp_cost;
4444
4445 case GIMPLE_OMP_FOR:
4446 return (weights->omp_cost
4447 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4448 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4449
4450 case GIMPLE_OMP_PARALLEL:
4451 case GIMPLE_OMP_TASK:
4452 case GIMPLE_OMP_CRITICAL:
4453 case GIMPLE_OMP_MASTER:
4454 case GIMPLE_OMP_TASKGROUP:
4455 case GIMPLE_OMP_ORDERED:
4456 case GIMPLE_OMP_SCAN:
4457 case GIMPLE_OMP_SECTION:
4458 case GIMPLE_OMP_SECTIONS:
4459 case GIMPLE_OMP_SINGLE:
4460 case GIMPLE_OMP_TARGET:
4461 case GIMPLE_OMP_TEAMS:
4462 return (weights->omp_cost
4463 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4464
4465 case GIMPLE_TRANSACTION:
4466 return (weights->tm_cost
4467 + estimate_num_insns_seq (gimple_transaction_body (
4468 as_a <gtransaction *> (stmt)),
4469 weights));
4470
4471 default:
4472 gcc_unreachable ();
4473 }
4474
4475 return cost;
4476 }
4477
4478 /* Estimate number of instructions that will be created by expanding
4479 function FNDECL. WEIGHTS contains weights attributed to various
4480 constructs. */
4481
4482 int
4483 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4484 {
4485 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4486 gimple_stmt_iterator bsi;
4487 basic_block bb;
4488 int n = 0;
4489
4490 gcc_assert (my_function && my_function->cfg);
4491 FOR_EACH_BB_FN (bb, my_function)
4492 {
4493 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4494 n += estimate_num_insns (gsi_stmt (bsi), weights);
4495 }
4496
4497 return n;
4498 }
4499
4500
4501 /* Initializes weights used by estimate_num_insns. */
4502
4503 void
4504 init_inline_once (void)
4505 {
4506 eni_size_weights.call_cost = 1;
4507 eni_size_weights.indirect_call_cost = 3;
4508 eni_size_weights.target_builtin_call_cost = 1;
4509 eni_size_weights.div_mod_cost = 1;
4510 eni_size_weights.omp_cost = 40;
4511 eni_size_weights.tm_cost = 10;
4512 eni_size_weights.time_based = false;
4513 eni_size_weights.return_cost = 1;
4514
4515 /* Estimating time for call is difficult, since we have no idea what the
4516 called function does. In the current uses of eni_time_weights,
4517 underestimating the cost does less harm than overestimating it, so
4518 we choose a rather small value here. */
4519 eni_time_weights.call_cost = 10;
4520 eni_time_weights.indirect_call_cost = 15;
4521 eni_time_weights.target_builtin_call_cost = 1;
4522 eni_time_weights.div_mod_cost = 10;
4523 eni_time_weights.omp_cost = 40;
4524 eni_time_weights.tm_cost = 40;
4525 eni_time_weights.time_based = true;
4526 eni_time_weights.return_cost = 2;
4527 }
4528
4529
4530 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4531
4532 static void
4533 prepend_lexical_block (tree current_block, tree new_block)
4534 {
4535 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4536 BLOCK_SUBBLOCKS (current_block) = new_block;
4537 BLOCK_SUPERCONTEXT (new_block) = current_block;
4538 }
4539
4540 /* Add local variables from CALLEE to CALLER. */
4541
4542 static inline void
4543 add_local_variables (struct function *callee, struct function *caller,
4544 copy_body_data *id)
4545 {
4546 tree var;
4547 unsigned ix;
4548
4549 FOR_EACH_LOCAL_DECL (callee, ix, var)
4550 if (!can_be_nonlocal (var, id))
4551 {
4552 tree new_var = remap_decl (var, id);
4553
4554 /* Remap debug-expressions. */
4555 if (VAR_P (new_var)
4556 && DECL_HAS_DEBUG_EXPR_P (var)
4557 && new_var != var)
4558 {
4559 tree tem = DECL_DEBUG_EXPR (var);
4560 bool old_regimplify = id->regimplify;
4561 id->remapping_type_depth++;
4562 walk_tree (&tem, copy_tree_body_r, id, NULL);
4563 id->remapping_type_depth--;
4564 id->regimplify = old_regimplify;
4565 SET_DECL_DEBUG_EXPR (new_var, tem);
4566 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4567 }
4568 add_local_decl (caller, new_var);
4569 }
4570 }
4571
4572 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4573 have brought in or introduced any debug stmts for SRCVAR. */
4574
4575 static inline void
4576 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4577 {
4578 tree *remappedvarp = id->decl_map->get (srcvar);
4579
4580 if (!remappedvarp)
4581 return;
4582
4583 if (!VAR_P (*remappedvarp))
4584 return;
4585
4586 if (*remappedvarp == id->retvar)
4587 return;
4588
4589 tree tvar = target_for_debug_bind (*remappedvarp);
4590 if (!tvar)
4591 return;
4592
4593 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4594 id->call_stmt);
4595 gimple_seq_add_stmt (bindings, stmt);
4596 }
4597
4598 /* For each inlined variable for which we may have debug bind stmts,
4599 add before GSI a final debug stmt resetting it, marking the end of
4600 its life, so that var-tracking knows it doesn't have to compute
4601 further locations for it. */
4602
4603 static inline void
4604 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4605 {
4606 tree var;
4607 unsigned ix;
4608 gimple_seq bindings = NULL;
4609
4610 if (!gimple_in_ssa_p (id->src_cfun))
4611 return;
4612
4613 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4614 return;
4615
4616 for (var = DECL_ARGUMENTS (id->src_fn);
4617 var; var = DECL_CHAIN (var))
4618 reset_debug_binding (id, var, &bindings);
4619
4620 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4621 reset_debug_binding (id, var, &bindings);
4622
4623 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4624 }
4625
4626 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4627
4628 static bool
4629 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4630 bitmap to_purge)
4631 {
4632 tree use_retvar;
4633 tree fn;
4634 hash_map<tree, tree> *dst;
4635 hash_map<tree, tree> *st = NULL;
4636 tree return_slot;
4637 tree modify_dest;
4638 struct cgraph_edge *cg_edge;
4639 cgraph_inline_failed_t reason;
4640 basic_block return_block;
4641 edge e;
4642 gimple_stmt_iterator gsi, stmt_gsi;
4643 bool successfully_inlined = false;
4644 bool purge_dead_abnormal_edges;
4645 gcall *call_stmt;
4646 unsigned int prop_mask, src_properties;
4647 struct function *dst_cfun;
4648 tree simduid;
4649 use_operand_p use;
4650 gimple *simtenter_stmt = NULL;
4651 vec<tree> *simtvars_save;
4652
4653 /* The gimplifier uses input_location in too many places, such as
4654 internal_get_tmp_var (). */
4655 location_t saved_location = input_location;
4656 input_location = gimple_location (stmt);
4657
4658 /* From here on, we're only interested in CALL_EXPRs. */
4659 call_stmt = dyn_cast <gcall *> (stmt);
4660 if (!call_stmt)
4661 goto egress;
4662
4663 cg_edge = id->dst_node->get_edge (stmt);
4664 gcc_checking_assert (cg_edge);
4665 /* First, see if we can figure out what function is being called.
4666 If we cannot, then there is no hope of inlining the function. */
4667 if (cg_edge->indirect_unknown_callee)
4668 goto egress;
4669 fn = cg_edge->callee->decl;
4670 gcc_checking_assert (fn);
4671
4672 /* If FN is a declaration of a function in a nested scope that was
4673 globally declared inline, we don't set its DECL_INITIAL.
4674 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4675 C++ front-end uses it for cdtors to refer to their internal
4676 declarations, that are not real functions. Fortunately those
4677 don't have trees to be saved, so we can tell by checking their
4678 gimple_body. */
4679 if (!DECL_INITIAL (fn)
4680 && DECL_ABSTRACT_ORIGIN (fn)
4681 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4682 fn = DECL_ABSTRACT_ORIGIN (fn);
4683
4684 /* Don't try to inline functions that are not well-suited to inlining. */
4685 if (cg_edge->inline_failed)
4686 {
4687 reason = cg_edge->inline_failed;
4688 /* If this call was originally indirect, we do not want to emit any
4689 inlining related warnings or sorry messages because there are no
4690 guarantees regarding those. */
4691 if (cg_edge->indirect_inlining_edge)
4692 goto egress;
4693
4694 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4695 /* For extern inline functions that get redefined we always
4696 silently ignored always_inline flag. Better behavior would
4697 be to be able to keep both bodies and use extern inline body
4698 for inlining, but we can't do that because frontends overwrite
4699 the body. */
4700 && !cg_edge->callee->redefined_extern_inline
4701 /* During early inline pass, report only when optimization is
4702 not turned on. */
4703 && (symtab->global_info_ready
4704 || !optimize
4705 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4706 /* PR 20090218-1_0.c. Body can be provided by another module. */
4707 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4708 {
4709 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4710 cgraph_inline_failed_string (reason));
4711 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4712 inform (gimple_location (stmt), "called from here");
4713 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4714 inform (DECL_SOURCE_LOCATION (cfun->decl),
4715 "called from this function");
4716 }
4717 else if (warn_inline
4718 && DECL_DECLARED_INLINE_P (fn)
4719 && !DECL_NO_INLINE_WARNING_P (fn)
4720 && !DECL_IN_SYSTEM_HEADER (fn)
4721 && reason != CIF_UNSPECIFIED
4722 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4723 /* Do not warn about not inlined recursive calls. */
4724 && !cg_edge->recursive_p ()
4725 /* Avoid warnings during early inline pass. */
4726 && symtab->global_info_ready)
4727 {
4728 auto_diagnostic_group d;
4729 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4730 fn, _(cgraph_inline_failed_string (reason))))
4731 {
4732 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4733 inform (gimple_location (stmt), "called from here");
4734 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4735 inform (DECL_SOURCE_LOCATION (cfun->decl),
4736 "called from this function");
4737 }
4738 }
4739 goto egress;
4740 }
4741 id->src_node = cg_edge->callee;
4742
4743 /* If callee is thunk, all we need is to adjust the THIS pointer
4744 and redirect to function being thunked. */
4745 if (id->src_node->thunk.thunk_p)
4746 {
4747 cgraph_edge *edge;
4748 tree virtual_offset = NULL;
4749 profile_count count = cg_edge->count;
4750 tree op;
4751 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4752
4753 cg_edge->remove ();
4754 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4755 gimple_uid (stmt),
4756 profile_count::one (),
4757 profile_count::one (),
4758 true);
4759 edge->count = count;
4760 if (id->src_node->thunk.virtual_offset_p)
4761 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4762 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4763 NULL);
4764 gsi_insert_before (&iter, gimple_build_assign (op,
4765 gimple_call_arg (stmt, 0)),
4766 GSI_NEW_STMT);
4767 gcc_assert (id->src_node->thunk.this_adjusting);
4768 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4769 virtual_offset, id->src_node->thunk.indirect_offset);
4770
4771 gimple_call_set_arg (stmt, 0, op);
4772 gimple_call_set_fndecl (stmt, edge->callee->decl);
4773 update_stmt (stmt);
4774 id->src_node->remove ();
4775 expand_call_inline (bb, stmt, id, to_purge);
4776 maybe_remove_unused_call_args (cfun, stmt);
4777 return true;
4778 }
4779 fn = cg_edge->callee->decl;
4780 cg_edge->callee->get_untransformed_body ();
4781
4782 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4783 cg_edge->callee->verify ();
4784
4785 /* We will be inlining this callee. */
4786 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4787
4788 /* Update the callers EH personality. */
4789 if (DECL_FUNCTION_PERSONALITY (fn))
4790 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4791 = DECL_FUNCTION_PERSONALITY (fn);
4792
4793 /* Split the block before the GIMPLE_CALL. */
4794 stmt_gsi = gsi_for_stmt (stmt);
4795 gsi_prev (&stmt_gsi);
4796 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4797 bb = e->src;
4798 return_block = e->dest;
4799 remove_edge (e);
4800
4801 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4802 been the source of abnormal edges. In this case, schedule
4803 the removal of dead abnormal edges. */
4804 gsi = gsi_start_bb (return_block);
4805 gsi_next (&gsi);
4806 purge_dead_abnormal_edges = gsi_end_p (gsi);
4807
4808 stmt_gsi = gsi_start_bb (return_block);
4809
4810 /* Build a block containing code to initialize the arguments, the
4811 actual inline expansion of the body, and a label for the return
4812 statements within the function to jump to. The type of the
4813 statement expression is the return type of the function call.
4814 ??? If the call does not have an associated block then we will
4815 remap all callee blocks to NULL, effectively dropping most of
4816 its debug information. This should only happen for calls to
4817 artificial decls inserted by the compiler itself. We need to
4818 either link the inlined blocks into the caller block tree or
4819 not refer to them in any way to not break GC for locations. */
4820 if (tree block = gimple_block (stmt))
4821 {
4822 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4823 to make inlined_function_outer_scope_p return true on this BLOCK. */
4824 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4825 if (loc == UNKNOWN_LOCATION)
4826 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4827 if (loc == UNKNOWN_LOCATION)
4828 loc = BUILTINS_LOCATION;
4829 id->block = make_node (BLOCK);
4830 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4831 BLOCK_SOURCE_LOCATION (id->block) = loc;
4832 prepend_lexical_block (block, id->block);
4833 }
4834
4835 /* Local declarations will be replaced by their equivalents in this map. */
4836 st = id->decl_map;
4837 id->decl_map = new hash_map<tree, tree>;
4838 dst = id->debug_map;
4839 id->debug_map = NULL;
4840 if (flag_stack_reuse != SR_NONE)
4841 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4842
4843 /* Record the function we are about to inline. */
4844 id->src_fn = fn;
4845 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4846 id->reset_location = DECL_IGNORED_P (fn);
4847 id->call_stmt = call_stmt;
4848
4849 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4850 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4851 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4852 simtvars_save = id->dst_simt_vars;
4853 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4854 && (simduid = bb->loop_father->simduid) != NULL_TREE
4855 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4856 && single_imm_use (simduid, &use, &simtenter_stmt)
4857 && is_gimple_call (simtenter_stmt)
4858 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4859 vec_alloc (id->dst_simt_vars, 0);
4860 else
4861 id->dst_simt_vars = NULL;
4862
4863 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4864 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4865
4866 /* If the src function contains an IFN_VA_ARG, then so will the dst
4867 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4868 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4869 src_properties = id->src_cfun->curr_properties & prop_mask;
4870 if (src_properties != prop_mask)
4871 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4872 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4873
4874 gcc_assert (!id->src_cfun->after_inlining);
4875
4876 id->entry_bb = bb;
4877 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4878 {
4879 gimple_stmt_iterator si = gsi_last_bb (bb);
4880 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4881 NOT_TAKEN),
4882 GSI_NEW_STMT);
4883 }
4884 initialize_inlined_parameters (id, stmt, fn, bb);
4885 if (debug_nonbind_markers_p && debug_inline_points && id->block
4886 && inlined_function_outer_scope_p (id->block))
4887 {
4888 gimple_stmt_iterator si = gsi_last_bb (bb);
4889 gsi_insert_after (&si, gimple_build_debug_inline_entry
4890 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4891 GSI_NEW_STMT);
4892 }
4893
4894 if (DECL_INITIAL (fn))
4895 {
4896 if (gimple_block (stmt))
4897 {
4898 tree *var;
4899
4900 prepend_lexical_block (id->block,
4901 remap_blocks (DECL_INITIAL (fn), id));
4902 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4903 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4904 == NULL_TREE));
4905 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4906 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4907 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4908 under it. The parameters can be then evaluated in the debugger,
4909 but don't show in backtraces. */
4910 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4911 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4912 {
4913 tree v = *var;
4914 *var = TREE_CHAIN (v);
4915 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4916 BLOCK_VARS (id->block) = v;
4917 }
4918 else
4919 var = &TREE_CHAIN (*var);
4920 }
4921 else
4922 remap_blocks_to_null (DECL_INITIAL (fn), id);
4923 }
4924
4925 /* Return statements in the function body will be replaced by jumps
4926 to the RET_LABEL. */
4927 gcc_assert (DECL_INITIAL (fn));
4928 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4929
4930 /* Find the LHS to which the result of this call is assigned. */
4931 return_slot = NULL;
4932 if (gimple_call_lhs (stmt))
4933 {
4934 modify_dest = gimple_call_lhs (stmt);
4935
4936 /* The function which we are inlining might not return a value,
4937 in which case we should issue a warning that the function
4938 does not return a value. In that case the optimizers will
4939 see that the variable to which the value is assigned was not
4940 initialized. We do not want to issue a warning about that
4941 uninitialized variable. */
4942 if (DECL_P (modify_dest))
4943 TREE_NO_WARNING (modify_dest) = 1;
4944
4945 if (gimple_call_return_slot_opt_p (call_stmt))
4946 {
4947 return_slot = modify_dest;
4948 modify_dest = NULL;
4949 }
4950 }
4951 else
4952 modify_dest = NULL;
4953
4954 /* If we are inlining a call to the C++ operator new, we don't want
4955 to use type based alias analysis on the return value. Otherwise
4956 we may get confused if the compiler sees that the inlined new
4957 function returns a pointer which was just deleted. See bug
4958 33407. */
4959 if (DECL_IS_OPERATOR_NEW_P (fn))
4960 {
4961 return_slot = NULL;
4962 modify_dest = NULL;
4963 }
4964
4965 /* Declare the return variable for the function. */
4966 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4967
4968 /* Add local vars in this inlined callee to caller. */
4969 add_local_variables (id->src_cfun, cfun, id);
4970
4971 if (id->src_node->clone.performed_splits)
4972 {
4973 /* Any calls from the inlined function will be turned into calls from the
4974 function we inline into. We must preserve notes about how to split
4975 parameters such calls should be redirected/updated. */
4976 unsigned len = vec_safe_length (id->src_node->clone.performed_splits);
4977 for (unsigned i = 0; i < len; i++)
4978 {
4979 ipa_param_performed_split ps
4980 = (*id->src_node->clone.performed_splits)[i];
4981 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
4982 vec_safe_push (id->dst_node->clone.performed_splits, ps);
4983 }
4984
4985 if (flag_checking)
4986 {
4987 len = vec_safe_length (id->dst_node->clone.performed_splits);
4988 for (unsigned i = 0; i < len; i++)
4989 {
4990 ipa_param_performed_split *ps1
4991 = &(*id->dst_node->clone.performed_splits)[i];
4992 for (unsigned j = i + 1; j < len; j++)
4993 {
4994 ipa_param_performed_split *ps2
4995 = &(*id->dst_node->clone.performed_splits)[j];
4996 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
4997 || ps1->unit_offset != ps2->unit_offset);
4998 }
4999 }
5000 }
5001 }
5002
5003 if (dump_enabled_p ())
5004 {
5005 char buf[128];
5006 snprintf (buf, sizeof(buf), "%4.2f",
5007 cg_edge->sreal_frequency ().to_double ());
5008 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5009 call_stmt,
5010 "Inlining %C to %C with frequency %s\n",
5011 id->src_node, id->dst_node, buf);
5012 if (dump_file && (dump_flags & TDF_DETAILS))
5013 {
5014 id->src_node->dump (dump_file);
5015 id->dst_node->dump (dump_file);
5016 }
5017 }
5018
5019 /* This is it. Duplicate the callee body. Assume callee is
5020 pre-gimplified. Note that we must not alter the caller
5021 function in any way before this point, as this CALL_EXPR may be
5022 a self-referential call; if we're calling ourselves, we need to
5023 duplicate our body before altering anything. */
5024 copy_body (id, bb, return_block, NULL);
5025
5026 reset_debug_bindings (id, stmt_gsi);
5027
5028 if (flag_stack_reuse != SR_NONE)
5029 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5030 if (!TREE_THIS_VOLATILE (p))
5031 {
5032 tree *varp = id->decl_map->get (p);
5033 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5034 {
5035 tree clobber = build_clobber (TREE_TYPE (*varp));
5036 gimple *clobber_stmt;
5037 clobber_stmt = gimple_build_assign (*varp, clobber);
5038 gimple_set_location (clobber_stmt, gimple_location (stmt));
5039 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5040 }
5041 }
5042
5043 /* Reset the escaped solution. */
5044 if (cfun->gimple_df)
5045 pt_solution_reset (&cfun->gimple_df->escaped);
5046
5047 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5048 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5049 {
5050 size_t nargs = gimple_call_num_args (simtenter_stmt);
5051 vec<tree> *vars = id->dst_simt_vars;
5052 auto_vec<tree> newargs (nargs + vars->length ());
5053 for (size_t i = 0; i < nargs; i++)
5054 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5055 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5056 {
5057 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5058 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5059 }
5060 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5061 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5062 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5063 gsi_replace (&gsi, g, false);
5064 }
5065 vec_free (id->dst_simt_vars);
5066 id->dst_simt_vars = simtvars_save;
5067
5068 /* Clean up. */
5069 if (id->debug_map)
5070 {
5071 delete id->debug_map;
5072 id->debug_map = dst;
5073 }
5074 delete id->decl_map;
5075 id->decl_map = st;
5076
5077 /* Unlink the calls virtual operands before replacing it. */
5078 unlink_stmt_vdef (stmt);
5079 if (gimple_vdef (stmt)
5080 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5081 release_ssa_name (gimple_vdef (stmt));
5082
5083 /* If the inlined function returns a result that we care about,
5084 substitute the GIMPLE_CALL with an assignment of the return
5085 variable to the LHS of the call. That is, if STMT was
5086 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5087 if (use_retvar && gimple_call_lhs (stmt))
5088 {
5089 gimple *old_stmt = stmt;
5090 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5091 gimple_set_location (stmt, gimple_location (old_stmt));
5092 gsi_replace (&stmt_gsi, stmt, false);
5093 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5094 /* Append a clobber for id->retvar if easily possible. */
5095 if (flag_stack_reuse != SR_NONE
5096 && id->retvar
5097 && VAR_P (id->retvar)
5098 && id->retvar != return_slot
5099 && id->retvar != modify_dest
5100 && !TREE_THIS_VOLATILE (id->retvar)
5101 && !is_gimple_reg (id->retvar)
5102 && !stmt_ends_bb_p (stmt))
5103 {
5104 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5105 gimple *clobber_stmt;
5106 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5107 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5108 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5109 }
5110 }
5111 else
5112 {
5113 /* Handle the case of inlining a function with no return
5114 statement, which causes the return value to become undefined. */
5115 if (gimple_call_lhs (stmt)
5116 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5117 {
5118 tree name = gimple_call_lhs (stmt);
5119 tree var = SSA_NAME_VAR (name);
5120 tree def = var ? ssa_default_def (cfun, var) : NULL;
5121
5122 if (def)
5123 {
5124 /* If the variable is used undefined, make this name
5125 undefined via a move. */
5126 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5127 gsi_replace (&stmt_gsi, stmt, true);
5128 }
5129 else
5130 {
5131 if (!var)
5132 {
5133 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5134 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5135 }
5136 /* Otherwise make this variable undefined. */
5137 gsi_remove (&stmt_gsi, true);
5138 set_ssa_default_def (cfun, var, name);
5139 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5140 }
5141 }
5142 /* Replace with a clobber for id->retvar. */
5143 else if (flag_stack_reuse != SR_NONE
5144 && id->retvar
5145 && VAR_P (id->retvar)
5146 && id->retvar != return_slot
5147 && id->retvar != modify_dest
5148 && !TREE_THIS_VOLATILE (id->retvar)
5149 && !is_gimple_reg (id->retvar))
5150 {
5151 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5152 gimple *clobber_stmt;
5153 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5154 gimple_set_location (clobber_stmt, gimple_location (stmt));
5155 gsi_replace (&stmt_gsi, clobber_stmt, false);
5156 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5157 }
5158 else
5159 gsi_remove (&stmt_gsi, true);
5160 }
5161
5162 if (purge_dead_abnormal_edges)
5163 bitmap_set_bit (to_purge, return_block->index);
5164
5165 /* If the value of the new expression is ignored, that's OK. We
5166 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5167 the equivalent inlined version either. */
5168 if (is_gimple_assign (stmt))
5169 {
5170 gcc_assert (gimple_assign_single_p (stmt)
5171 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5172 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5173 }
5174
5175 id->add_clobbers_to_eh_landing_pads = 0;
5176
5177 /* Output the inlining info for this abstract function, since it has been
5178 inlined. If we don't do this now, we can lose the information about the
5179 variables in the function when the blocks get blown away as soon as we
5180 remove the cgraph node. */
5181 if (gimple_block (stmt))
5182 (*debug_hooks->outlining_inline_function) (fn);
5183
5184 /* Update callgraph if needed. */
5185 cg_edge->callee->remove ();
5186
5187 id->block = NULL_TREE;
5188 id->retvar = NULL_TREE;
5189 successfully_inlined = true;
5190
5191 egress:
5192 input_location = saved_location;
5193 return successfully_inlined;
5194 }
5195
5196 /* Expand call statements reachable from STMT_P.
5197 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5198 in a MODIFY_EXPR. */
5199
5200 static bool
5201 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5202 bitmap to_purge)
5203 {
5204 gimple_stmt_iterator gsi;
5205 bool inlined = false;
5206
5207 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5208 {
5209 gimple *stmt = gsi_stmt (gsi);
5210 gsi_prev (&gsi);
5211
5212 if (is_gimple_call (stmt)
5213 && !gimple_call_internal_p (stmt))
5214 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5215 }
5216
5217 return inlined;
5218 }
5219
5220
5221 /* Walk all basic blocks created after FIRST and try to fold every statement
5222 in the STATEMENTS pointer set. */
5223
5224 static void
5225 fold_marked_statements (int first, hash_set<gimple *> *statements)
5226 {
5227 auto_bitmap to_purge;
5228 for (; first < last_basic_block_for_fn (cfun); first++)
5229 if (BASIC_BLOCK_FOR_FN (cfun, first))
5230 {
5231 gimple_stmt_iterator gsi;
5232
5233 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5234 !gsi_end_p (gsi);
5235 gsi_next (&gsi))
5236 if (statements->contains (gsi_stmt (gsi)))
5237 {
5238 gimple *old_stmt = gsi_stmt (gsi);
5239 tree old_decl
5240 = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5241
5242 if (old_decl && fndecl_built_in_p (old_decl))
5243 {
5244 /* Folding builtins can create multiple instructions,
5245 we need to look at all of them. */
5246 gimple_stmt_iterator i2 = gsi;
5247 gsi_prev (&i2);
5248 if (fold_stmt (&gsi))
5249 {
5250 gimple *new_stmt;
5251 /* If a builtin at the end of a bb folded into nothing,
5252 the following loop won't work. */
5253 if (gsi_end_p (gsi))
5254 {
5255 cgraph_update_edges_for_call_stmt (old_stmt,
5256 old_decl, NULL);
5257 break;
5258 }
5259 if (gsi_end_p (i2))
5260 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5261 else
5262 gsi_next (&i2);
5263 while (1)
5264 {
5265 new_stmt = gsi_stmt (i2);
5266 update_stmt (new_stmt);
5267 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5268 new_stmt);
5269
5270 if (new_stmt == gsi_stmt (gsi))
5271 {
5272 /* It is okay to check only for the very last
5273 of these statements. If it is a throwing
5274 statement nothing will change. If it isn't
5275 this can remove EH edges. If that weren't
5276 correct then because some intermediate stmts
5277 throw, but not the last one. That would mean
5278 we'd have to split the block, which we can't
5279 here and we'd loose anyway. And as builtins
5280 probably never throw, this all
5281 is mood anyway. */
5282 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5283 new_stmt))
5284 bitmap_set_bit (to_purge, first);
5285 break;
5286 }
5287 gsi_next (&i2);
5288 }
5289 }
5290 }
5291 else if (fold_stmt (&gsi))
5292 {
5293 /* Re-read the statement from GSI as fold_stmt() may
5294 have changed it. */
5295 gimple *new_stmt = gsi_stmt (gsi);
5296 update_stmt (new_stmt);
5297
5298 if (is_gimple_call (old_stmt)
5299 || is_gimple_call (new_stmt))
5300 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5301 new_stmt);
5302
5303 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5304 bitmap_set_bit (to_purge, first);
5305 }
5306 }
5307 }
5308 gimple_purge_all_dead_eh_edges (to_purge);
5309 }
5310
5311 /* Expand calls to inline functions in the body of FN. */
5312
5313 unsigned int
5314 optimize_inline_calls (tree fn)
5315 {
5316 copy_body_data id;
5317 basic_block bb;
5318 int last = n_basic_blocks_for_fn (cfun);
5319 bool inlined_p = false;
5320
5321 /* Clear out ID. */
5322 memset (&id, 0, sizeof (id));
5323
5324 id.src_node = id.dst_node = cgraph_node::get (fn);
5325 gcc_assert (id.dst_node->definition);
5326 id.dst_fn = fn;
5327 /* Or any functions that aren't finished yet. */
5328 if (current_function_decl)
5329 id.dst_fn = current_function_decl;
5330
5331 id.copy_decl = copy_decl_maybe_to_var;
5332 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5333 id.transform_new_cfg = false;
5334 id.transform_return_to_modify = true;
5335 id.transform_parameter = true;
5336 id.transform_lang_insert_block = NULL;
5337 id.statements_to_fold = new hash_set<gimple *>;
5338
5339 push_gimplify_context ();
5340
5341 /* We make no attempts to keep dominance info up-to-date. */
5342 free_dominance_info (CDI_DOMINATORS);
5343 free_dominance_info (CDI_POST_DOMINATORS);
5344
5345 /* Register specific gimple functions. */
5346 gimple_register_cfg_hooks ();
5347
5348 /* Reach the trees by walking over the CFG, and note the
5349 enclosing basic-blocks in the call edges. */
5350 /* We walk the blocks going forward, because inlined function bodies
5351 will split id->current_basic_block, and the new blocks will
5352 follow it; we'll trudge through them, processing their CALL_EXPRs
5353 along the way. */
5354 auto_bitmap to_purge;
5355 FOR_EACH_BB_FN (bb, cfun)
5356 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5357
5358 pop_gimplify_context (NULL);
5359
5360 if (flag_checking)
5361 {
5362 struct cgraph_edge *e;
5363
5364 id.dst_node->verify ();
5365
5366 /* Double check that we inlined everything we are supposed to inline. */
5367 for (e = id.dst_node->callees; e; e = e->next_callee)
5368 gcc_assert (e->inline_failed);
5369 }
5370
5371 /* Fold queued statements. */
5372 update_max_bb_count ();
5373 fold_marked_statements (last, id.statements_to_fold);
5374 delete id.statements_to_fold;
5375
5376 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5377 We need to do this after fold_marked_statements since that may walk
5378 the SSA use-def chain. */
5379 unsigned i;
5380 bitmap_iterator bi;
5381 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5382 {
5383 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5384 if (bb)
5385 {
5386 gimple_purge_dead_eh_edges (bb);
5387 gimple_purge_dead_abnormal_call_edges (bb);
5388 }
5389 }
5390
5391 gcc_assert (!id.debug_stmts.exists ());
5392
5393 /* If we didn't inline into the function there is nothing to do. */
5394 if (!inlined_p)
5395 return 0;
5396
5397 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5398 number_blocks (fn);
5399
5400 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5401
5402 if (flag_checking)
5403 id.dst_node->verify ();
5404
5405 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5406 not possible yet - the IPA passes might make various functions to not
5407 throw and they don't care to proactively update local EH info. This is
5408 done later in fixup_cfg pass that also execute the verification. */
5409 return (TODO_update_ssa
5410 | TODO_cleanup_cfg
5411 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5412 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5413 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5414 ? TODO_rebuild_frequencies : 0));
5415 }
5416
5417 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5418
5419 tree
5420 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5421 {
5422 enum tree_code code = TREE_CODE (*tp);
5423 enum tree_code_class cl = TREE_CODE_CLASS (code);
5424
5425 /* We make copies of most nodes. */
5426 if (IS_EXPR_CODE_CLASS (cl)
5427 || code == TREE_LIST
5428 || code == TREE_VEC
5429 || code == TYPE_DECL
5430 || code == OMP_CLAUSE)
5431 {
5432 /* Because the chain gets clobbered when we make a copy, we save it
5433 here. */
5434 tree chain = NULL_TREE, new_tree;
5435
5436 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5437 chain = TREE_CHAIN (*tp);
5438
5439 /* Copy the node. */
5440 new_tree = copy_node (*tp);
5441
5442 *tp = new_tree;
5443
5444 /* Now, restore the chain, if appropriate. That will cause
5445 walk_tree to walk into the chain as well. */
5446 if (code == PARM_DECL
5447 || code == TREE_LIST
5448 || code == OMP_CLAUSE)
5449 TREE_CHAIN (*tp) = chain;
5450
5451 /* For now, we don't update BLOCKs when we make copies. So, we
5452 have to nullify all BIND_EXPRs. */
5453 if (TREE_CODE (*tp) == BIND_EXPR)
5454 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5455 }
5456 else if (code == CONSTRUCTOR)
5457 {
5458 /* CONSTRUCTOR nodes need special handling because
5459 we need to duplicate the vector of elements. */
5460 tree new_tree;
5461
5462 new_tree = copy_node (*tp);
5463 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5464 *tp = new_tree;
5465 }
5466 else if (code == STATEMENT_LIST)
5467 /* We used to just abort on STATEMENT_LIST, but we can run into them
5468 with statement-expressions (c++/40975). */
5469 copy_statement_list (tp);
5470 else if (TREE_CODE_CLASS (code) == tcc_type)
5471 *walk_subtrees = 0;
5472 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5473 *walk_subtrees = 0;
5474 else if (TREE_CODE_CLASS (code) == tcc_constant)
5475 *walk_subtrees = 0;
5476 return NULL_TREE;
5477 }
5478
5479 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5480 information indicating to what new SAVE_EXPR this one should be mapped,
5481 use that one. Otherwise, create a new node and enter it in ST. FN is
5482 the function into which the copy will be placed. */
5483
5484 static void
5485 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5486 {
5487 tree *n;
5488 tree t;
5489
5490 /* See if we already encountered this SAVE_EXPR. */
5491 n = st->get (*tp);
5492
5493 /* If we didn't already remap this SAVE_EXPR, do so now. */
5494 if (!n)
5495 {
5496 t = copy_node (*tp);
5497
5498 /* Remember this SAVE_EXPR. */
5499 st->put (*tp, t);
5500 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5501 st->put (t, t);
5502 }
5503 else
5504 {
5505 /* We've already walked into this SAVE_EXPR; don't do it again. */
5506 *walk_subtrees = 0;
5507 t = *n;
5508 }
5509
5510 /* Replace this SAVE_EXPR with the copy. */
5511 *tp = t;
5512 }
5513
5514 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5515 label, copies the declaration and enters it in the splay_tree in DATA (which
5516 is really a 'copy_body_data *'. */
5517
5518 static tree
5519 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5520 bool *handled_ops_p ATTRIBUTE_UNUSED,
5521 struct walk_stmt_info *wi)
5522 {
5523 copy_body_data *id = (copy_body_data *) wi->info;
5524 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5525
5526 if (stmt)
5527 {
5528 tree decl = gimple_label_label (stmt);
5529
5530 /* Copy the decl and remember the copy. */
5531 insert_decl_map (id, decl, id->copy_decl (decl, id));
5532 }
5533
5534 return NULL_TREE;
5535 }
5536
5537 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5538 struct walk_stmt_info *wi);
5539
5540 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5541 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5542 remaps all local declarations to appropriate replacements in gimple
5543 operands. */
5544
5545 static tree
5546 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5547 {
5548 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5549 copy_body_data *id = (copy_body_data *) wi->info;
5550 hash_map<tree, tree> *st = id->decl_map;
5551 tree *n;
5552 tree expr = *tp;
5553
5554 /* For recursive invocations this is no longer the LHS itself. */
5555 bool is_lhs = wi->is_lhs;
5556 wi->is_lhs = false;
5557
5558 if (TREE_CODE (expr) == SSA_NAME)
5559 {
5560 *tp = remap_ssa_name (*tp, id);
5561 *walk_subtrees = 0;
5562 if (is_lhs)
5563 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5564 }
5565 /* Only a local declaration (variable or label). */
5566 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5567 || TREE_CODE (expr) == LABEL_DECL)
5568 {
5569 /* Lookup the declaration. */
5570 n = st->get (expr);
5571
5572 /* If it's there, remap it. */
5573 if (n)
5574 *tp = *n;
5575 *walk_subtrees = 0;
5576 }
5577 else if (TREE_CODE (expr) == STATEMENT_LIST
5578 || TREE_CODE (expr) == BIND_EXPR
5579 || TREE_CODE (expr) == SAVE_EXPR)
5580 gcc_unreachable ();
5581 else if (TREE_CODE (expr) == TARGET_EXPR)
5582 {
5583 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5584 It's OK for this to happen if it was part of a subtree that
5585 isn't immediately expanded, such as operand 2 of another
5586 TARGET_EXPR. */
5587 if (!TREE_OPERAND (expr, 1))
5588 {
5589 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5590 TREE_OPERAND (expr, 3) = NULL_TREE;
5591 }
5592 }
5593 else if (TREE_CODE (expr) == OMP_CLAUSE)
5594 {
5595 /* Before the omplower pass completes, some OMP clauses can contain
5596 sequences that are neither copied by gimple_seq_copy nor walked by
5597 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5598 in those situations, we have to copy and process them explicitely. */
5599
5600 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5601 {
5602 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5603 seq = duplicate_remap_omp_clause_seq (seq, wi);
5604 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5605 }
5606 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5607 {
5608 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5609 seq = duplicate_remap_omp_clause_seq (seq, wi);
5610 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5611 }
5612 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5613 {
5614 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5615 seq = duplicate_remap_omp_clause_seq (seq, wi);
5616 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5617 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5618 seq = duplicate_remap_omp_clause_seq (seq, wi);
5619 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5620 }
5621 }
5622
5623 /* Keep iterating. */
5624 return NULL_TREE;
5625 }
5626
5627
5628 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5629 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5630 remaps all local declarations to appropriate replacements in gimple
5631 statements. */
5632
5633 static tree
5634 replace_locals_stmt (gimple_stmt_iterator *gsip,
5635 bool *handled_ops_p ATTRIBUTE_UNUSED,
5636 struct walk_stmt_info *wi)
5637 {
5638 copy_body_data *id = (copy_body_data *) wi->info;
5639 gimple *gs = gsi_stmt (*gsip);
5640
5641 if (gbind *stmt = dyn_cast <gbind *> (gs))
5642 {
5643 tree block = gimple_bind_block (stmt);
5644
5645 if (block)
5646 {
5647 remap_block (&block, id);
5648 gimple_bind_set_block (stmt, block);
5649 }
5650
5651 /* This will remap a lot of the same decls again, but this should be
5652 harmless. */
5653 if (gimple_bind_vars (stmt))
5654 {
5655 tree old_var, decls = gimple_bind_vars (stmt);
5656
5657 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5658 if (!can_be_nonlocal (old_var, id)
5659 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5660 remap_decl (old_var, id);
5661
5662 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5663 id->prevent_decl_creation_for_types = true;
5664 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5665 id->prevent_decl_creation_for_types = false;
5666 }
5667 }
5668
5669 /* Keep iterating. */
5670 return NULL_TREE;
5671 }
5672
5673 /* Create a copy of SEQ and remap all decls in it. */
5674
5675 static gimple_seq
5676 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5677 {
5678 if (!seq)
5679 return NULL;
5680
5681 /* If there are any labels in OMP sequences, they can be only referred to in
5682 the sequence itself and therefore we can do both here. */
5683 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5684 gimple_seq copy = gimple_seq_copy (seq);
5685 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5686 return copy;
5687 }
5688
5689 /* Copies everything in SEQ and replaces variables and labels local to
5690 current_function_decl. */
5691
5692 gimple_seq
5693 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5694 {
5695 copy_body_data id;
5696 struct walk_stmt_info wi;
5697 gimple_seq copy;
5698
5699 /* There's nothing to do for NULL_TREE. */
5700 if (seq == NULL)
5701 return seq;
5702
5703 /* Set up ID. */
5704 memset (&id, 0, sizeof (id));
5705 id.src_fn = current_function_decl;
5706 id.dst_fn = current_function_decl;
5707 id.src_cfun = cfun;
5708 id.decl_map = new hash_map<tree, tree>;
5709 id.debug_map = NULL;
5710
5711 id.copy_decl = copy_decl_no_change;
5712 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5713 id.transform_new_cfg = false;
5714 id.transform_return_to_modify = false;
5715 id.transform_parameter = false;
5716 id.transform_lang_insert_block = NULL;
5717
5718 /* Walk the tree once to find local labels. */
5719 memset (&wi, 0, sizeof (wi));
5720 hash_set<tree> visited;
5721 wi.info = &id;
5722 wi.pset = &visited;
5723 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5724
5725 copy = gimple_seq_copy (seq);
5726
5727 /* Walk the copy, remapping decls. */
5728 memset (&wi, 0, sizeof (wi));
5729 wi.info = &id;
5730 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5731
5732 /* Clean up. */
5733 delete id.decl_map;
5734 if (id.debug_map)
5735 delete id.debug_map;
5736 if (id.dependence_map)
5737 {
5738 delete id.dependence_map;
5739 id.dependence_map = NULL;
5740 }
5741
5742 return copy;
5743 }
5744
5745
5746 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5747
5748 static tree
5749 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5750 {
5751 if (*tp == data)
5752 return (tree) data;
5753 else
5754 return NULL;
5755 }
5756
5757 DEBUG_FUNCTION bool
5758 debug_find_tree (tree top, tree search)
5759 {
5760 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5761 }
5762
5763
5764 /* Declare the variables created by the inliner. Add all the variables in
5765 VARS to BIND_EXPR. */
5766
5767 static void
5768 declare_inline_vars (tree block, tree vars)
5769 {
5770 tree t;
5771 for (t = vars; t; t = DECL_CHAIN (t))
5772 {
5773 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5774 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5775 add_local_decl (cfun, t);
5776 }
5777
5778 if (block)
5779 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5780 }
5781
5782 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5783 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5784 VAR_DECL translation. */
5785
5786 tree
5787 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5788 {
5789 /* Don't generate debug information for the copy if we wouldn't have
5790 generated it for the copy either. */
5791 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5792 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5793
5794 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5795 declaration inspired this copy. */
5796 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5797
5798 /* The new variable/label has no RTL, yet. */
5799 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5800 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5801 SET_DECL_RTL (copy, 0);
5802 /* For vector typed decls make sure to update DECL_MODE according
5803 to the new function context. */
5804 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5805 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5806
5807 /* These args would always appear unused, if not for this. */
5808 TREE_USED (copy) = 1;
5809
5810 /* Set the context for the new declaration. */
5811 if (!DECL_CONTEXT (decl))
5812 /* Globals stay global. */
5813 ;
5814 else if (DECL_CONTEXT (decl) != id->src_fn)
5815 /* Things that weren't in the scope of the function we're inlining
5816 from aren't in the scope we're inlining to, either. */
5817 ;
5818 else if (TREE_STATIC (decl))
5819 /* Function-scoped static variables should stay in the original
5820 function. */
5821 ;
5822 else
5823 {
5824 /* Ordinary automatic local variables are now in the scope of the
5825 new function. */
5826 DECL_CONTEXT (copy) = id->dst_fn;
5827 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5828 {
5829 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5830 DECL_ATTRIBUTES (copy)
5831 = tree_cons (get_identifier ("omp simt private"), NULL,
5832 DECL_ATTRIBUTES (copy));
5833 id->dst_simt_vars->safe_push (copy);
5834 }
5835 }
5836
5837 return copy;
5838 }
5839
5840 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5841 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5842 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5843
5844 tree
5845 copy_decl_to_var (tree decl, copy_body_data *id)
5846 {
5847 tree copy, type;
5848
5849 gcc_assert (TREE_CODE (decl) == PARM_DECL
5850 || TREE_CODE (decl) == RESULT_DECL);
5851
5852 type = TREE_TYPE (decl);
5853
5854 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5855 VAR_DECL, DECL_NAME (decl), type);
5856 if (DECL_PT_UID_SET_P (decl))
5857 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5858 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5859 TREE_READONLY (copy) = TREE_READONLY (decl);
5860 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5861 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5862
5863 return copy_decl_for_dup_finish (id, decl, copy);
5864 }
5865
5866 /* Like copy_decl_to_var, but create a return slot object instead of a
5867 pointer variable for return by invisible reference. */
5868
5869 static tree
5870 copy_result_decl_to_var (tree decl, copy_body_data *id)
5871 {
5872 tree copy, type;
5873
5874 gcc_assert (TREE_CODE (decl) == PARM_DECL
5875 || TREE_CODE (decl) == RESULT_DECL);
5876
5877 type = TREE_TYPE (decl);
5878 if (DECL_BY_REFERENCE (decl))
5879 type = TREE_TYPE (type);
5880
5881 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5882 VAR_DECL, DECL_NAME (decl), type);
5883 if (DECL_PT_UID_SET_P (decl))
5884 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5885 TREE_READONLY (copy) = TREE_READONLY (decl);
5886 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5887 if (!DECL_BY_REFERENCE (decl))
5888 {
5889 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5890 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5891 }
5892
5893 return copy_decl_for_dup_finish (id, decl, copy);
5894 }
5895
5896 tree
5897 copy_decl_no_change (tree decl, copy_body_data *id)
5898 {
5899 tree copy;
5900
5901 copy = copy_node (decl);
5902
5903 /* The COPY is not abstract; it will be generated in DST_FN. */
5904 DECL_ABSTRACT_P (copy) = false;
5905 lang_hooks.dup_lang_specific_decl (copy);
5906
5907 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5908 been taken; it's for internal bookkeeping in expand_goto_internal. */
5909 if (TREE_CODE (copy) == LABEL_DECL)
5910 {
5911 TREE_ADDRESSABLE (copy) = 0;
5912 LABEL_DECL_UID (copy) = -1;
5913 }
5914
5915 return copy_decl_for_dup_finish (id, decl, copy);
5916 }
5917
5918 static tree
5919 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5920 {
5921 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5922 return copy_decl_to_var (decl, id);
5923 else
5924 return copy_decl_no_change (decl, id);
5925 }
5926
5927 /* Return a copy of the function's argument tree without any modifications. */
5928
5929 static tree
5930 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
5931 {
5932 tree arg, *parg;
5933 tree new_parm = NULL;
5934
5935 parg = &new_parm;
5936 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
5937 {
5938 tree new_tree = remap_decl (arg, id);
5939 if (TREE_CODE (new_tree) != PARM_DECL)
5940 new_tree = id->copy_decl (arg, id);
5941 lang_hooks.dup_lang_specific_decl (new_tree);
5942 *parg = new_tree;
5943 parg = &DECL_CHAIN (new_tree);
5944 }
5945 return new_parm;
5946 }
5947
5948 /* Return a copy of the function's static chain. */
5949 static tree
5950 copy_static_chain (tree static_chain, copy_body_data * id)
5951 {
5952 tree *chain_copy, *pvar;
5953
5954 chain_copy = &static_chain;
5955 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5956 {
5957 tree new_tree = remap_decl (*pvar, id);
5958 lang_hooks.dup_lang_specific_decl (new_tree);
5959 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5960 *pvar = new_tree;
5961 }
5962 return static_chain;
5963 }
5964
5965 /* Return true if the function is allowed to be versioned.
5966 This is a guard for the versioning functionality. */
5967
5968 bool
5969 tree_versionable_function_p (tree fndecl)
5970 {
5971 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5972 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5973 }
5974
5975 /* Update clone info after duplication. */
5976
5977 static void
5978 update_clone_info (copy_body_data * id)
5979 {
5980 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
5981 = id->dst_node->clone.performed_splits;
5982 if (cur_performed_splits)
5983 {
5984 unsigned len = cur_performed_splits->length ();
5985 for (unsigned i = 0; i < len; i++)
5986 {
5987 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
5988 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
5989 }
5990 }
5991
5992 struct cgraph_node *node;
5993 if (!id->dst_node->clones)
5994 return;
5995 for (node = id->dst_node->clones; node != id->dst_node;)
5996 {
5997 /* First update replace maps to match the new body. */
5998 if (node->clone.tree_map)
5999 {
6000 unsigned int i;
6001 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
6002 {
6003 struct ipa_replace_map *replace_info;
6004 replace_info = (*node->clone.tree_map)[i];
6005 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6006 }
6007 }
6008 if (node->clone.performed_splits)
6009 {
6010 unsigned len = vec_safe_length (node->clone.performed_splits);
6011 for (unsigned i = 0; i < len; i++)
6012 {
6013 ipa_param_performed_split *ps
6014 = &(*node->clone.performed_splits)[i];
6015 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6016 }
6017 }
6018 if (unsigned len = vec_safe_length (cur_performed_splits))
6019 {
6020 /* We do not want to add current performed splits when we are saving
6021 a copy of function body for later during inlining, that would just
6022 duplicate all entries. So let's have a look whether anything
6023 referring to the first dummy_decl is present. */
6024 unsigned dst_len = vec_safe_length (node->clone.performed_splits);
6025 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6026 for (unsigned i = 0; i < dst_len; i++)
6027 if ((*node->clone.performed_splits)[i].dummy_decl
6028 == first->dummy_decl)
6029 {
6030 len = 0;
6031 break;
6032 }
6033
6034 for (unsigned i = 0; i < len; i++)
6035 vec_safe_push (node->clone.performed_splits,
6036 (*cur_performed_splits)[i]);
6037 if (flag_checking)
6038 {
6039 for (unsigned i = 0; i < dst_len; i++)
6040 {
6041 ipa_param_performed_split *ps1
6042 = &(*node->clone.performed_splits)[i];
6043 for (unsigned j = i + 1; j < dst_len; j++)
6044 {
6045 ipa_param_performed_split *ps2
6046 = &(*node->clone.performed_splits)[j];
6047 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6048 || ps1->unit_offset != ps2->unit_offset);
6049 }
6050 }
6051 }
6052 }
6053
6054 if (node->clones)
6055 node = node->clones;
6056 else if (node->next_sibling_clone)
6057 node = node->next_sibling_clone;
6058 else
6059 {
6060 while (node != id->dst_node && !node->next_sibling_clone)
6061 node = node->clone_of;
6062 if (node != id->dst_node)
6063 node = node->next_sibling_clone;
6064 }
6065 }
6066 }
6067
6068 /* Create a copy of a function's tree.
6069 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6070 of the original function and the new copied function
6071 respectively. In case we want to replace a DECL
6072 tree with another tree while duplicating the function's
6073 body, TREE_MAP represents the mapping between these
6074 trees. If UPDATE_CLONES is set, the call_stmt fields
6075 of edges of clones of the function will be updated.
6076
6077 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6078 function parameters and return value) should be modified).
6079 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6080 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6081 */
6082 void
6083 tree_function_versioning (tree old_decl, tree new_decl,
6084 vec<ipa_replace_map *, va_gc> *tree_map,
6085 ipa_param_adjustments *param_adjustments,
6086 bool update_clones, bitmap blocks_to_copy,
6087 basic_block new_entry)
6088 {
6089 struct cgraph_node *old_version_node;
6090 struct cgraph_node *new_version_node;
6091 copy_body_data id;
6092 tree p;
6093 unsigned i;
6094 struct ipa_replace_map *replace_info;
6095 basic_block old_entry_block, bb;
6096 auto_vec<gimple *, 10> init_stmts;
6097 tree vars = NULL_TREE;
6098
6099 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6100 && TREE_CODE (new_decl) == FUNCTION_DECL);
6101 DECL_POSSIBLY_INLINED (old_decl) = 1;
6102
6103 old_version_node = cgraph_node::get (old_decl);
6104 gcc_checking_assert (old_version_node);
6105 new_version_node = cgraph_node::get (new_decl);
6106 gcc_checking_assert (new_version_node);
6107
6108 /* Copy over debug args. */
6109 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6110 {
6111 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6112 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6113 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6114 old_debug_args = decl_debug_args_lookup (old_decl);
6115 if (old_debug_args)
6116 {
6117 new_debug_args = decl_debug_args_insert (new_decl);
6118 *new_debug_args = vec_safe_copy (*old_debug_args);
6119 }
6120 }
6121
6122 /* Output the inlining info for this abstract function, since it has been
6123 inlined. If we don't do this now, we can lose the information about the
6124 variables in the function when the blocks get blown away as soon as we
6125 remove the cgraph node. */
6126 (*debug_hooks->outlining_inline_function) (old_decl);
6127
6128 DECL_ARTIFICIAL (new_decl) = 1;
6129 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6130 if (DECL_ORIGIN (old_decl) == old_decl)
6131 old_version_node->used_as_abstract_origin = true;
6132 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6133
6134 /* Prepare the data structures for the tree copy. */
6135 memset (&id, 0, sizeof (id));
6136
6137 /* Generate a new name for the new version. */
6138 id.statements_to_fold = new hash_set<gimple *>;
6139
6140 id.decl_map = new hash_map<tree, tree>;
6141 id.debug_map = NULL;
6142 id.src_fn = old_decl;
6143 id.dst_fn = new_decl;
6144 id.src_node = old_version_node;
6145 id.dst_node = new_version_node;
6146 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6147 id.blocks_to_copy = blocks_to_copy;
6148
6149 id.copy_decl = copy_decl_no_change;
6150 id.transform_call_graph_edges
6151 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6152 id.transform_new_cfg = true;
6153 id.transform_return_to_modify = false;
6154 id.transform_parameter = false;
6155 id.transform_lang_insert_block = NULL;
6156
6157 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6158 (DECL_STRUCT_FUNCTION (old_decl));
6159 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6160 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6161 initialize_cfun (new_decl, old_decl,
6162 new_entry ? new_entry->count : old_entry_block->count);
6163 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6164 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6165 = id.src_cfun->gimple_df->ipa_pta;
6166
6167 /* Copy the function's static chain. */
6168 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6169 if (p)
6170 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6171 = copy_static_chain (p, &id);
6172
6173 auto_vec<int, 16> new_param_indices;
6174 ipa_param_adjustments *old_param_adjustments
6175 = old_version_node->clone.param_adjustments;
6176 if (old_param_adjustments)
6177 old_param_adjustments->get_updated_indices (&new_param_indices);
6178
6179 /* If there's a tree_map, prepare for substitution. */
6180 if (tree_map)
6181 for (i = 0; i < tree_map->length (); i++)
6182 {
6183 gimple *init;
6184 replace_info = (*tree_map)[i];
6185
6186 int p = replace_info->parm_num;
6187 if (old_param_adjustments)
6188 p = new_param_indices[p];
6189
6190 tree parm;
6191 tree req_type, new_type;
6192
6193 for (parm = DECL_ARGUMENTS (old_decl); p;
6194 parm = DECL_CHAIN (parm))
6195 p--;
6196 tree old_tree = parm;
6197 req_type = TREE_TYPE (parm);
6198 new_type = TREE_TYPE (replace_info->new_tree);
6199 if (!useless_type_conversion_p (req_type, new_type))
6200 {
6201 if (fold_convertible_p (req_type, replace_info->new_tree))
6202 replace_info->new_tree
6203 = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
6204 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6205 replace_info->new_tree
6206 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6207 replace_info->new_tree);
6208 else
6209 {
6210 if (dump_file)
6211 {
6212 fprintf (dump_file, " const ");
6213 print_generic_expr (dump_file,
6214 replace_info->new_tree);
6215 fprintf (dump_file,
6216 " can't be converted to param ");
6217 print_generic_expr (dump_file, parm);
6218 fprintf (dump_file, "\n");
6219 }
6220 old_tree = NULL;
6221 }
6222 }
6223
6224 if (old_tree)
6225 {
6226 init = setup_one_parameter (&id, old_tree, replace_info->new_tree,
6227 id.src_fn, NULL, &vars);
6228 if (init)
6229 init_stmts.safe_push (init);
6230 }
6231 }
6232
6233 ipa_param_body_adjustments *param_body_adjs = NULL;
6234 if (param_adjustments)
6235 {
6236 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6237 new_decl, old_decl,
6238 &id, &vars, tree_map);
6239 id.param_body_adjs = param_body_adjs;
6240 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6241 }
6242 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6243 DECL_ARGUMENTS (new_decl)
6244 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6245
6246 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6247 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6248
6249 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6250
6251 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6252 /* Add local vars. */
6253 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6254
6255 if (DECL_RESULT (old_decl) == NULL_TREE)
6256 ;
6257 else if (param_adjustments && param_adjustments->m_skip_return
6258 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6259 {
6260 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6261 &id);
6262 declare_inline_vars (NULL, resdecl_repl);
6263 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6264
6265 DECL_RESULT (new_decl)
6266 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6267 RESULT_DECL, NULL_TREE, void_type_node);
6268 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6269 DECL_IS_MALLOC (new_decl) = false;
6270 cfun->returns_struct = 0;
6271 cfun->returns_pcc_struct = 0;
6272 }
6273 else
6274 {
6275 tree old_name;
6276 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6277 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6278 if (gimple_in_ssa_p (id.src_cfun)
6279 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6280 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6281 {
6282 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6283 insert_decl_map (&id, old_name, new_name);
6284 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6285 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6286 }
6287 }
6288
6289 /* Set up the destination functions loop tree. */
6290 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6291 {
6292 cfun->curr_properties &= ~PROP_loops;
6293 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6294 cfun->curr_properties |= PROP_loops;
6295 }
6296
6297 /* Copy the Function's body. */
6298 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6299 new_entry);
6300
6301 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6302 number_blocks (new_decl);
6303
6304 /* We want to create the BB unconditionally, so that the addition of
6305 debug stmts doesn't affect BB count, which may in the end cause
6306 codegen differences. */
6307 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6308 while (init_stmts.length ())
6309 insert_init_stmt (&id, bb, init_stmts.pop ());
6310 update_clone_info (&id);
6311
6312 /* Remap the nonlocal_goto_save_area, if any. */
6313 if (cfun->nonlocal_goto_save_area)
6314 {
6315 struct walk_stmt_info wi;
6316
6317 memset (&wi, 0, sizeof (wi));
6318 wi.info = &id;
6319 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6320 }
6321
6322 /* Clean up. */
6323 delete id.decl_map;
6324 if (id.debug_map)
6325 delete id.debug_map;
6326 free_dominance_info (CDI_DOMINATORS);
6327 free_dominance_info (CDI_POST_DOMINATORS);
6328
6329 update_max_bb_count ();
6330 fold_marked_statements (0, id.statements_to_fold);
6331 delete id.statements_to_fold;
6332 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6333 if (id.dst_node->definition)
6334 cgraph_edge::rebuild_references ();
6335 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6336 {
6337 calculate_dominance_info (CDI_DOMINATORS);
6338 fix_loop_structure (NULL);
6339 }
6340 update_ssa (TODO_update_ssa);
6341
6342 /* After partial cloning we need to rescale frequencies, so they are
6343 within proper range in the cloned function. */
6344 if (new_entry)
6345 {
6346 struct cgraph_edge *e;
6347 rebuild_frequencies ();
6348
6349 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6350 for (e = new_version_node->callees; e; e = e->next_callee)
6351 {
6352 basic_block bb = gimple_bb (e->call_stmt);
6353 e->count = bb->count;
6354 }
6355 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6356 {
6357 basic_block bb = gimple_bb (e->call_stmt);
6358 e->count = bb->count;
6359 }
6360 }
6361
6362 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6363 {
6364 vec<tree, va_gc> **debug_args = NULL;
6365 unsigned int len = 0;
6366 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6367
6368 for (i = 0; i < reset_len; i++)
6369 {
6370 tree parm = param_body_adjs->m_reset_debug_decls[i];
6371 gcc_assert (is_gimple_reg (parm));
6372 tree ddecl;
6373
6374 if (debug_args == NULL)
6375 {
6376 debug_args = decl_debug_args_insert (new_decl);
6377 len = vec_safe_length (*debug_args);
6378 }
6379 ddecl = make_node (DEBUG_EXPR_DECL);
6380 DECL_ARTIFICIAL (ddecl) = 1;
6381 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6382 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6383 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6384 vec_safe_push (*debug_args, ddecl);
6385 }
6386 if (debug_args != NULL)
6387 {
6388 /* On the callee side, add
6389 DEBUG D#Y s=> parm
6390 DEBUG var => D#Y
6391 stmts to the first bb where var is a VAR_DECL created for the
6392 optimized away parameter in DECL_INITIAL block. This hints
6393 in the debug info that var (whole DECL_ORIGIN is the parm
6394 PARM_DECL) is optimized away, but could be looked up at the
6395 call site as value of D#X there. */
6396 tree vexpr;
6397 gimple_stmt_iterator cgsi
6398 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6399 gimple *def_temp;
6400 tree var = vars;
6401 i = vec_safe_length (*debug_args);
6402 do
6403 {
6404 i -= 2;
6405 while (var != NULL_TREE
6406 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6407 var = TREE_CHAIN (var);
6408 if (var == NULL_TREE)
6409 break;
6410 vexpr = make_node (DEBUG_EXPR_DECL);
6411 tree parm = (**debug_args)[i];
6412 DECL_ARTIFICIAL (vexpr) = 1;
6413 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6414 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6415 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6416 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6417 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6418 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6419 }
6420 while (i > len);
6421 }
6422 }
6423 delete param_body_adjs;
6424 free_dominance_info (CDI_DOMINATORS);
6425 free_dominance_info (CDI_POST_DOMINATORS);
6426
6427 gcc_assert (!id.debug_stmts.exists ());
6428 pop_cfun ();
6429 return;
6430 }
6431
6432 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6433 the callee and return the inlined body on success. */
6434
6435 tree
6436 maybe_inline_call_in_expr (tree exp)
6437 {
6438 tree fn = get_callee_fndecl (exp);
6439
6440 /* We can only try to inline "const" functions. */
6441 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6442 {
6443 call_expr_arg_iterator iter;
6444 copy_body_data id;
6445 tree param, arg, t;
6446 hash_map<tree, tree> decl_map;
6447
6448 /* Remap the parameters. */
6449 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6450 param;
6451 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6452 decl_map.put (param, arg);
6453
6454 memset (&id, 0, sizeof (id));
6455 id.src_fn = fn;
6456 id.dst_fn = current_function_decl;
6457 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6458 id.decl_map = &decl_map;
6459
6460 id.copy_decl = copy_decl_no_change;
6461 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6462 id.transform_new_cfg = false;
6463 id.transform_return_to_modify = true;
6464 id.transform_parameter = true;
6465 id.transform_lang_insert_block = NULL;
6466
6467 /* Make sure not to unshare trees behind the front-end's back
6468 since front-end specific mechanisms may rely on sharing. */
6469 id.regimplify = false;
6470 id.do_not_unshare = true;
6471
6472 /* We're not inside any EH region. */
6473 id.eh_lp_nr = 0;
6474
6475 t = copy_tree_body (&id);
6476
6477 /* We can only return something suitable for use in a GENERIC
6478 expression tree. */
6479 if (TREE_CODE (t) == MODIFY_EXPR)
6480 return TREE_OPERAND (t, 1);
6481 }
6482
6483 return NULL_TREE;
6484 }
6485
6486 /* Duplicate a type, fields and all. */
6487
6488 tree
6489 build_duplicate_type (tree type)
6490 {
6491 struct copy_body_data id;
6492
6493 memset (&id, 0, sizeof (id));
6494 id.src_fn = current_function_decl;
6495 id.dst_fn = current_function_decl;
6496 id.src_cfun = cfun;
6497 id.decl_map = new hash_map<tree, tree>;
6498 id.debug_map = NULL;
6499 id.copy_decl = copy_decl_no_change;
6500
6501 type = remap_type_1 (type, &id);
6502
6503 delete id.decl_map;
6504 if (id.debug_map)
6505 delete id.debug_map;
6506
6507 TYPE_CANONICAL (type) = type;
6508
6509 return type;
6510 }
6511
6512 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6513 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6514 evaluation. */
6515
6516 tree
6517 copy_fn (tree fn, tree& parms, tree& result)
6518 {
6519 copy_body_data id;
6520 tree param;
6521 hash_map<tree, tree> decl_map;
6522
6523 tree *p = &parms;
6524 *p = NULL_TREE;
6525
6526 memset (&id, 0, sizeof (id));
6527 id.src_fn = fn;
6528 id.dst_fn = current_function_decl;
6529 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6530 id.decl_map = &decl_map;
6531
6532 id.copy_decl = copy_decl_no_change;
6533 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6534 id.transform_new_cfg = false;
6535 id.transform_return_to_modify = false;
6536 id.transform_parameter = true;
6537 id.transform_lang_insert_block = NULL;
6538
6539 /* Make sure not to unshare trees behind the front-end's back
6540 since front-end specific mechanisms may rely on sharing. */
6541 id.regimplify = false;
6542 id.do_not_unshare = true;
6543 id.do_not_fold = true;
6544
6545 /* We're not inside any EH region. */
6546 id.eh_lp_nr = 0;
6547
6548 /* Remap the parameters and result and return them to the caller. */
6549 for (param = DECL_ARGUMENTS (fn);
6550 param;
6551 param = DECL_CHAIN (param))
6552 {
6553 *p = remap_decl (param, &id);
6554 p = &DECL_CHAIN (*p);
6555 }
6556
6557 if (DECL_RESULT (fn))
6558 result = remap_decl (DECL_RESULT (fn), &id);
6559 else
6560 result = NULL_TREE;
6561
6562 return copy_tree_body (&id);
6563 }