re PR middle-end/90982 (ICE in make_decl_rtl, at varasm.c:1344)
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64 #include "tree-ssa-live.h"
65
66 /* I'm not real happy about this, but we need to handle gimple and
67 non-gimple trees. */
68
69 /* Inlining, Cloning, Versioning, Parallelization
70
71 Inlining: a function body is duplicated, but the PARM_DECLs are
72 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
73 MODIFY_EXPRs that store to a dedicated returned-value variable.
74 The duplicated eh_region info of the copy will later be appended
75 to the info for the caller; the eh_region info in copied throwing
76 statements and RESX statements are adjusted accordingly.
77
78 Cloning: (only in C++) We have one body for a con/de/structor, and
79 multiple function decls, each with a unique parameter list.
80 Duplicate the body, using the given splay tree; some parameters
81 will become constants (like 0 or 1).
82
83 Versioning: a function body is duplicated and the result is a new
84 function rather than into blocks of an existing function as with
85 inlining. Some parameters will become constants.
86
87 Parallelization: a region of a function is duplicated resulting in
88 a new function. Variables may be replaced with complex expressions
89 to enable shared variable semantics.
90
91 All of these will simultaneously lookup any callgraph edges. If
92 we're going to inline the duplicated function body, and the given
93 function has some cloned callgraph nodes (one for each place this
94 function will be inlined) those callgraph edges will be duplicated.
95 If we're cloning the body, those callgraph edges will be
96 updated to point into the new body. (Note that the original
97 callgraph node and edge list will not be altered.)
98
99 See the CALL_EXPR handling case in copy_tree_body_r (). */
100
101 /* To Do:
102
103 o In order to make inlining-on-trees work, we pessimized
104 function-local static constants. In particular, they are now
105 always output, even when not addressed. Fix this by treating
106 function-local static constants just like global static
107 constants; the back-end already knows not to output them if they
108 are not needed.
109
110 o Provide heuristics to clamp inlining of recursive template
111 calls? */
112
113
114 /* Weights that estimate_num_insns uses to estimate the size of the
115 produced code. */
116
117 eni_weights eni_size_weights;
118
119 /* Weights that estimate_num_insns uses to estimate the time necessary
120 to execute the produced code. */
121
122 eni_weights eni_time_weights;
123
124 /* Prototypes. */
125
126 static tree declare_return_variable (copy_body_data *, tree, tree,
127 basic_block);
128 static void remap_block (tree *, copy_body_data *);
129 static void copy_bind_expr (tree *, int *, copy_body_data *);
130 static void declare_inline_vars (tree, tree);
131 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
132 static void prepend_lexical_block (tree current_block, tree new_block);
133 static tree copy_decl_to_var (tree, copy_body_data *);
134 static tree copy_result_decl_to_var (tree, copy_body_data *);
135 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
136 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
137 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138
139 /* Insert a tree->tree mapping for ID. Despite the name suggests
140 that the trees should be variables, it is used for more than that. */
141
142 void
143 insert_decl_map (copy_body_data *id, tree key, tree value)
144 {
145 id->decl_map->put (key, value);
146
147 /* Always insert an identity map as well. If we see this same new
148 node again, we won't want to duplicate it a second time. */
149 if (key != value)
150 id->decl_map->put (value, value);
151 }
152
153 /* Insert a tree->tree mapping for ID. This is only used for
154 variables. */
155
156 static void
157 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158 {
159 if (!gimple_in_ssa_p (id->src_cfun))
160 return;
161
162 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
163 return;
164
165 if (!target_for_debug_bind (key))
166 return;
167
168 gcc_assert (TREE_CODE (key) == PARM_DECL);
169 gcc_assert (VAR_P (value));
170
171 if (!id->debug_map)
172 id->debug_map = new hash_map<tree, tree>;
173
174 id->debug_map->put (key, value);
175 }
176
177 /* If nonzero, we're remapping the contents of inlined debug
178 statements. If negative, an error has occurred, such as a
179 reference to a variable that isn't available in the inlined
180 context. */
181 static int processing_debug_stmt = 0;
182
183 /* Construct new SSA name for old NAME. ID is the inline context. */
184
185 static tree
186 remap_ssa_name (tree name, copy_body_data *id)
187 {
188 tree new_tree, var;
189 tree *n;
190
191 gcc_assert (TREE_CODE (name) == SSA_NAME);
192
193 n = id->decl_map->get (name);
194 if (n)
195 return unshare_expr (*n);
196
197 if (processing_debug_stmt)
198 {
199 if (SSA_NAME_IS_DEFAULT_DEF (name)
200 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
201 && id->entry_bb == NULL
202 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
203 {
204 tree vexpr = make_node (DEBUG_EXPR_DECL);
205 gimple *def_temp;
206 gimple_stmt_iterator gsi;
207 tree val = SSA_NAME_VAR (name);
208
209 n = id->decl_map->get (val);
210 if (n != NULL)
211 val = *n;
212 if (TREE_CODE (val) != PARM_DECL
213 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
214 {
215 processing_debug_stmt = -1;
216 return name;
217 }
218 n = id->decl_map->get (val);
219 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
220 return *n;
221 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
222 DECL_ARTIFICIAL (vexpr) = 1;
223 TREE_TYPE (vexpr) = TREE_TYPE (name);
224 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
225 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
226 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
227 insert_decl_map (id, val, vexpr);
228 return vexpr;
229 }
230
231 processing_debug_stmt = -1;
232 return name;
233 }
234
235 /* Remap anonymous SSA names or SSA names of anonymous decls. */
236 var = SSA_NAME_VAR (name);
237 if (!var
238 || (!SSA_NAME_IS_DEFAULT_DEF (name)
239 && VAR_P (var)
240 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
241 && DECL_ARTIFICIAL (var)
242 && DECL_IGNORED_P (var)
243 && !DECL_NAME (var)))
244 {
245 struct ptr_info_def *pi;
246 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
247 if (!var && SSA_NAME_IDENTIFIER (name))
248 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
249 insert_decl_map (id, name, new_tree);
250 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
251 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
252 /* At least IPA points-to info can be directly transferred. */
253 if (id->src_cfun->gimple_df
254 && id->src_cfun->gimple_df->ipa_pta
255 && POINTER_TYPE_P (TREE_TYPE (name))
256 && (pi = SSA_NAME_PTR_INFO (name))
257 && !pi->pt.anything)
258 {
259 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
260 new_pi->pt = pi->pt;
261 }
262 /* So can range-info. */
263 if (!POINTER_TYPE_P (TREE_TYPE (name))
264 && SSA_NAME_RANGE_INFO (name))
265 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
266 SSA_NAME_RANGE_INFO (name));
267 return new_tree;
268 }
269
270 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
271 in copy_bb. */
272 new_tree = remap_decl (var, id);
273
274 /* We might've substituted constant or another SSA_NAME for
275 the variable.
276
277 Replace the SSA name representing RESULT_DECL by variable during
278 inlining: this saves us from need to introduce PHI node in a case
279 return value is just partly initialized. */
280 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
281 && (!SSA_NAME_VAR (name)
282 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
283 || !id->transform_return_to_modify))
284 {
285 struct ptr_info_def *pi;
286 new_tree = make_ssa_name (new_tree);
287 insert_decl_map (id, name, new_tree);
288 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
289 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
290 /* At least IPA points-to info can be directly transferred. */
291 if (id->src_cfun->gimple_df
292 && id->src_cfun->gimple_df->ipa_pta
293 && POINTER_TYPE_P (TREE_TYPE (name))
294 && (pi = SSA_NAME_PTR_INFO (name))
295 && !pi->pt.anything)
296 {
297 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
298 new_pi->pt = pi->pt;
299 }
300 /* So can range-info. */
301 if (!POINTER_TYPE_P (TREE_TYPE (name))
302 && SSA_NAME_RANGE_INFO (name))
303 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
304 SSA_NAME_RANGE_INFO (name));
305 if (SSA_NAME_IS_DEFAULT_DEF (name))
306 {
307 /* By inlining function having uninitialized variable, we might
308 extend the lifetime (variable might get reused). This cause
309 ICE in the case we end up extending lifetime of SSA name across
310 abnormal edge, but also increase register pressure.
311
312 We simply initialize all uninitialized vars by 0 except
313 for case we are inlining to very first BB. We can avoid
314 this for all BBs that are not inside strongly connected
315 regions of the CFG, but this is expensive to test. */
316 if (id->entry_bb
317 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
318 && (!SSA_NAME_VAR (name)
319 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
320 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
321 0)->dest
322 || EDGE_COUNT (id->entry_bb->preds) != 1))
323 {
324 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
325 gimple *init_stmt;
326 tree zero = build_zero_cst (TREE_TYPE (new_tree));
327
328 init_stmt = gimple_build_assign (new_tree, zero);
329 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
330 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
331 }
332 else
333 {
334 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
335 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
336 }
337 }
338 }
339 else
340 insert_decl_map (id, name, new_tree);
341 return new_tree;
342 }
343
344 /* Remap DECL during the copying of the BLOCK tree for the function. */
345
346 tree
347 remap_decl (tree decl, copy_body_data *id)
348 {
349 tree *n;
350
351 /* We only remap local variables in the current function. */
352
353 /* See if we have remapped this declaration. */
354
355 n = id->decl_map->get (decl);
356
357 if (!n && processing_debug_stmt)
358 {
359 processing_debug_stmt = -1;
360 return decl;
361 }
362
363 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
364 necessary DECLs have already been remapped and we do not want to duplicate
365 a decl coming from outside of the sequence we are copying. */
366 if (!n
367 && id->prevent_decl_creation_for_types
368 && id->remapping_type_depth > 0
369 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
370 return decl;
371
372 /* If we didn't already have an equivalent for this declaration, create one
373 now. */
374 if (!n)
375 {
376 /* Make a copy of the variable or label. */
377 tree t = id->copy_decl (decl, id);
378
379 /* Remember it, so that if we encounter this local entity again
380 we can reuse this copy. Do this early because remap_type may
381 need this decl for TYPE_STUB_DECL. */
382 insert_decl_map (id, decl, t);
383
384 if (!DECL_P (t))
385 return t;
386
387 /* Remap types, if necessary. */
388 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
389 if (TREE_CODE (t) == TYPE_DECL)
390 {
391 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
392
393 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
394 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
395 is not set on the TYPE_DECL, for example in LTO mode. */
396 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
397 {
398 tree x = build_variant_type_copy (TREE_TYPE (t));
399 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
400 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
401 DECL_ORIGINAL_TYPE (t) = x;
402 }
403 }
404
405 /* Remap sizes as necessary. */
406 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
407 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
408
409 /* If fields, do likewise for offset and qualifier. */
410 if (TREE_CODE (t) == FIELD_DECL)
411 {
412 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
413 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
414 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
415 }
416
417 return t;
418 }
419
420 if (id->do_not_unshare)
421 return *n;
422 else
423 return unshare_expr (*n);
424 }
425
426 static tree
427 remap_type_1 (tree type, copy_body_data *id)
428 {
429 tree new_tree, t;
430
431 /* We do need a copy. build and register it now. If this is a pointer or
432 reference type, remap the designated type and make a new pointer or
433 reference type. */
434 if (TREE_CODE (type) == POINTER_TYPE)
435 {
436 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
437 TYPE_MODE (type),
438 TYPE_REF_CAN_ALIAS_ALL (type));
439 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
440 new_tree = build_type_attribute_qual_variant (new_tree,
441 TYPE_ATTRIBUTES (type),
442 TYPE_QUALS (type));
443 insert_decl_map (id, type, new_tree);
444 return new_tree;
445 }
446 else if (TREE_CODE (type) == REFERENCE_TYPE)
447 {
448 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
449 TYPE_MODE (type),
450 TYPE_REF_CAN_ALIAS_ALL (type));
451 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 new_tree = build_type_attribute_qual_variant (new_tree,
453 TYPE_ATTRIBUTES (type),
454 TYPE_QUALS (type));
455 insert_decl_map (id, type, new_tree);
456 return new_tree;
457 }
458 else
459 new_tree = copy_node (type);
460
461 insert_decl_map (id, type, new_tree);
462
463 /* This is a new type, not a copy of an old type. Need to reassociate
464 variants. We can handle everything except the main variant lazily. */
465 t = TYPE_MAIN_VARIANT (type);
466 if (type != t)
467 {
468 t = remap_type (t, id);
469 TYPE_MAIN_VARIANT (new_tree) = t;
470 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
471 TYPE_NEXT_VARIANT (t) = new_tree;
472 }
473 else
474 {
475 TYPE_MAIN_VARIANT (new_tree) = new_tree;
476 TYPE_NEXT_VARIANT (new_tree) = NULL;
477 }
478
479 if (TYPE_STUB_DECL (type))
480 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
481
482 /* Lazily create pointer and reference types. */
483 TYPE_POINTER_TO (new_tree) = NULL;
484 TYPE_REFERENCE_TO (new_tree) = NULL;
485
486 /* Copy all types that may contain references to local variables; be sure to
487 preserve sharing in between type and its main variant when possible. */
488 switch (TREE_CODE (new_tree))
489 {
490 case INTEGER_TYPE:
491 case REAL_TYPE:
492 case FIXED_POINT_TYPE:
493 case ENUMERAL_TYPE:
494 case BOOLEAN_TYPE:
495 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
496 {
497 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
498 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
499
500 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
501 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
502 }
503 else
504 {
505 t = TYPE_MIN_VALUE (new_tree);
506 if (t && TREE_CODE (t) != INTEGER_CST)
507 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
508
509 t = TYPE_MAX_VALUE (new_tree);
510 if (t && TREE_CODE (t) != INTEGER_CST)
511 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
512 }
513 return new_tree;
514
515 case FUNCTION_TYPE:
516 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
517 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
518 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
519 else
520 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
521 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
522 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
523 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
524 else
525 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
526 return new_tree;
527
528 case ARRAY_TYPE:
529 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
530 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
531 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
532 else
533 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
534
535 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
536 {
537 gcc_checking_assert (TYPE_DOMAIN (type)
538 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
539 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
540 }
541 else
542 {
543 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
544 /* For array bounds where we have decided not to copy over the bounds
545 variable which isn't used in OpenMP/OpenACC region, change them to
546 an uninitialized VAR_DECL temporary. */
547 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
548 && id->adjust_array_error_bounds
549 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
550 {
551 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
552 DECL_ATTRIBUTES (v)
553 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
554 DECL_ATTRIBUTES (v));
555 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
556 }
557 }
558 break;
559
560 case RECORD_TYPE:
561 case UNION_TYPE:
562 case QUAL_UNION_TYPE:
563 if (TYPE_MAIN_VARIANT (type) != type
564 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
565 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
566 else
567 {
568 tree f, nf = NULL;
569
570 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
571 {
572 t = remap_decl (f, id);
573 DECL_CONTEXT (t) = new_tree;
574 DECL_CHAIN (t) = nf;
575 nf = t;
576 }
577 TYPE_FIELDS (new_tree) = nreverse (nf);
578 }
579 break;
580
581 case OFFSET_TYPE:
582 default:
583 /* Shouldn't have been thought variable sized. */
584 gcc_unreachable ();
585 }
586
587 /* All variants of type share the same size, so use the already remaped data. */
588 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
589 {
590 tree s = TYPE_SIZE (type);
591 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
592 tree su = TYPE_SIZE_UNIT (type);
593 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
594 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
595 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
596 || s == mvs);
597 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
598 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
599 || su == mvsu);
600 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
601 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
602 }
603 else
604 {
605 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
606 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
607 }
608
609 return new_tree;
610 }
611
612 /* Helper function for remap_type_2, called through walk_tree. */
613
614 static tree
615 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
616 {
617 copy_body_data *id = (copy_body_data *) data;
618
619 if (TYPE_P (*tp))
620 *walk_subtrees = 0;
621
622 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
623 return *tp;
624
625 return NULL_TREE;
626 }
627
628 /* Return true if TYPE needs to be remapped because remap_decl on any
629 needed embedded decl returns something other than that decl. */
630
631 static bool
632 remap_type_2 (tree type, copy_body_data *id)
633 {
634 tree t;
635
636 #define RETURN_TRUE_IF_VAR(T) \
637 do \
638 { \
639 tree _t = (T); \
640 if (_t) \
641 { \
642 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
643 return true; \
644 if (!TYPE_SIZES_GIMPLIFIED (type) \
645 && walk_tree (&_t, remap_type_3, id, NULL)) \
646 return true; \
647 } \
648 } \
649 while (0)
650
651 switch (TREE_CODE (type))
652 {
653 case POINTER_TYPE:
654 case REFERENCE_TYPE:
655 case FUNCTION_TYPE:
656 case METHOD_TYPE:
657 return remap_type_2 (TREE_TYPE (type), id);
658
659 case INTEGER_TYPE:
660 case REAL_TYPE:
661 case FIXED_POINT_TYPE:
662 case ENUMERAL_TYPE:
663 case BOOLEAN_TYPE:
664 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
665 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
666 return false;
667
668 case ARRAY_TYPE:
669 if (remap_type_2 (TREE_TYPE (type), id)
670 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
671 return true;
672 break;
673
674 case RECORD_TYPE:
675 case UNION_TYPE:
676 case QUAL_UNION_TYPE:
677 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
678 if (TREE_CODE (t) == FIELD_DECL)
679 {
680 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
681 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
682 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
683 if (TREE_CODE (type) == QUAL_UNION_TYPE)
684 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
685 }
686 break;
687
688 default:
689 return false;
690 }
691
692 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
693 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
694 return false;
695 #undef RETURN_TRUE_IF_VAR
696 }
697
698 tree
699 remap_type (tree type, copy_body_data *id)
700 {
701 tree *node;
702 tree tmp;
703
704 if (type == NULL)
705 return type;
706
707 /* See if we have remapped this type. */
708 node = id->decl_map->get (type);
709 if (node)
710 return *node;
711
712 /* The type only needs remapping if it's variably modified. */
713 if (! variably_modified_type_p (type, id->src_fn)
714 /* Don't remap if copy_decl method doesn't always return a new
715 decl and for all embedded decls returns the passed in decl. */
716 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
717 {
718 insert_decl_map (id, type, type);
719 return type;
720 }
721
722 id->remapping_type_depth++;
723 tmp = remap_type_1 (type, id);
724 id->remapping_type_depth--;
725
726 return tmp;
727 }
728
729 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
730
731 static bool
732 can_be_nonlocal (tree decl, copy_body_data *id)
733 {
734 /* We cannot duplicate function decls. */
735 if (TREE_CODE (decl) == FUNCTION_DECL)
736 return true;
737
738 /* Local static vars must be non-local or we get multiple declaration
739 problems. */
740 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
741 return true;
742
743 return false;
744 }
745
746 static tree
747 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
748 copy_body_data *id)
749 {
750 tree old_var;
751 tree new_decls = NULL_TREE;
752
753 /* Remap its variables. */
754 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
755 {
756 tree new_var;
757
758 if (can_be_nonlocal (old_var, id))
759 {
760 /* We need to add this variable to the local decls as otherwise
761 nothing else will do so. */
762 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
763 add_local_decl (cfun, old_var);
764 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
765 && !DECL_IGNORED_P (old_var)
766 && nonlocalized_list)
767 vec_safe_push (*nonlocalized_list, old_var);
768 continue;
769 }
770
771 /* Remap the variable. */
772 new_var = remap_decl (old_var, id);
773
774 /* If we didn't remap this variable, we can't mess with its
775 TREE_CHAIN. If we remapped this variable to the return slot, it's
776 already declared somewhere else, so don't declare it here. */
777
778 if (new_var == id->retvar)
779 ;
780 else if (!new_var)
781 {
782 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
783 && !DECL_IGNORED_P (old_var)
784 && nonlocalized_list)
785 vec_safe_push (*nonlocalized_list, old_var);
786 }
787 else
788 {
789 gcc_assert (DECL_P (new_var));
790 DECL_CHAIN (new_var) = new_decls;
791 new_decls = new_var;
792
793 /* Also copy value-expressions. */
794 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
795 {
796 tree tem = DECL_VALUE_EXPR (new_var);
797 bool old_regimplify = id->regimplify;
798 id->remapping_type_depth++;
799 walk_tree (&tem, copy_tree_body_r, id, NULL);
800 id->remapping_type_depth--;
801 id->regimplify = old_regimplify;
802 SET_DECL_VALUE_EXPR (new_var, tem);
803 }
804 }
805 }
806
807 return nreverse (new_decls);
808 }
809
810 /* Copy the BLOCK to contain remapped versions of the variables
811 therein. And hook the new block into the block-tree. */
812
813 static void
814 remap_block (tree *block, copy_body_data *id)
815 {
816 tree old_block;
817 tree new_block;
818
819 /* Make the new block. */
820 old_block = *block;
821 new_block = make_node (BLOCK);
822 TREE_USED (new_block) = TREE_USED (old_block);
823 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
824 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
825 BLOCK_NONLOCALIZED_VARS (new_block)
826 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
827 *block = new_block;
828
829 /* Remap its variables. */
830 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
831 &BLOCK_NONLOCALIZED_VARS (new_block),
832 id);
833
834 if (id->transform_lang_insert_block)
835 id->transform_lang_insert_block (new_block);
836
837 /* Remember the remapped block. */
838 insert_decl_map (id, old_block, new_block);
839 }
840
841 /* Copy the whole block tree and root it in id->block. */
842
843 static tree
844 remap_blocks (tree block, copy_body_data *id)
845 {
846 tree t;
847 tree new_tree = block;
848
849 if (!block)
850 return NULL;
851
852 remap_block (&new_tree, id);
853 gcc_assert (new_tree != block);
854 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
855 prepend_lexical_block (new_tree, remap_blocks (t, id));
856 /* Blocks are in arbitrary order, but make things slightly prettier and do
857 not swap order when producing a copy. */
858 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
859 return new_tree;
860 }
861
862 /* Remap the block tree rooted at BLOCK to nothing. */
863
864 static void
865 remap_blocks_to_null (tree block, copy_body_data *id)
866 {
867 tree t;
868 insert_decl_map (id, block, NULL_TREE);
869 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
870 remap_blocks_to_null (t, id);
871 }
872
873 /* Remap the location info pointed to by LOCUS. */
874
875 static location_t
876 remap_location (location_t locus, copy_body_data *id)
877 {
878 if (LOCATION_BLOCK (locus))
879 {
880 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
881 gcc_assert (n);
882 if (*n)
883 return set_block (locus, *n);
884 }
885
886 locus = LOCATION_LOCUS (locus);
887
888 if (locus != UNKNOWN_LOCATION && id->block)
889 return set_block (locus, id->block);
890
891 return locus;
892 }
893
894 static void
895 copy_statement_list (tree *tp)
896 {
897 tree_stmt_iterator oi, ni;
898 tree new_tree;
899
900 new_tree = alloc_stmt_list ();
901 ni = tsi_start (new_tree);
902 oi = tsi_start (*tp);
903 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
904 *tp = new_tree;
905
906 for (; !tsi_end_p (oi); tsi_next (&oi))
907 {
908 tree stmt = tsi_stmt (oi);
909 if (TREE_CODE (stmt) == STATEMENT_LIST)
910 /* This copy is not redundant; tsi_link_after will smash this
911 STATEMENT_LIST into the end of the one we're building, and we
912 don't want to do that with the original. */
913 copy_statement_list (&stmt);
914 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
915 }
916 }
917
918 static void
919 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
920 {
921 tree block = BIND_EXPR_BLOCK (*tp);
922 /* Copy (and replace) the statement. */
923 copy_tree_r (tp, walk_subtrees, NULL);
924 if (block)
925 {
926 remap_block (&block, id);
927 BIND_EXPR_BLOCK (*tp) = block;
928 }
929
930 if (BIND_EXPR_VARS (*tp))
931 /* This will remap a lot of the same decls again, but this should be
932 harmless. */
933 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
934 }
935
936
937 /* Create a new gimple_seq by remapping all the statements in BODY
938 using the inlining information in ID. */
939
940 static gimple_seq
941 remap_gimple_seq (gimple_seq body, copy_body_data *id)
942 {
943 gimple_stmt_iterator si;
944 gimple_seq new_body = NULL;
945
946 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
947 {
948 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
949 gimple_seq_add_seq (&new_body, new_stmts);
950 }
951
952 return new_body;
953 }
954
955
956 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
957 block using the mapping information in ID. */
958
959 static gimple *
960 copy_gimple_bind (gbind *stmt, copy_body_data *id)
961 {
962 gimple *new_bind;
963 tree new_block, new_vars;
964 gimple_seq body, new_body;
965
966 /* Copy the statement. Note that we purposely don't use copy_stmt
967 here because we need to remap statements as we copy. */
968 body = gimple_bind_body (stmt);
969 new_body = remap_gimple_seq (body, id);
970
971 new_block = gimple_bind_block (stmt);
972 if (new_block)
973 remap_block (&new_block, id);
974
975 /* This will remap a lot of the same decls again, but this should be
976 harmless. */
977 new_vars = gimple_bind_vars (stmt);
978 if (new_vars)
979 new_vars = remap_decls (new_vars, NULL, id);
980
981 new_bind = gimple_build_bind (new_vars, new_body, new_block);
982
983 return new_bind;
984 }
985
986 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
987
988 static bool
989 is_parm (tree decl)
990 {
991 if (TREE_CODE (decl) == SSA_NAME)
992 {
993 decl = SSA_NAME_VAR (decl);
994 if (!decl)
995 return false;
996 }
997
998 return (TREE_CODE (decl) == PARM_DECL);
999 }
1000
1001 /* Remap the dependence CLIQUE from the source to the destination function
1002 as specified in ID. */
1003
1004 static unsigned short
1005 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1006 {
1007 if (clique == 0 || processing_debug_stmt)
1008 return 0;
1009 if (!id->dependence_map)
1010 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1011 bool existed;
1012 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1013 if (!existed)
1014 {
1015 /* Clique 1 is reserved for local ones set by PTA. */
1016 if (cfun->last_clique == 0)
1017 cfun->last_clique = 1;
1018 newc = ++cfun->last_clique;
1019 }
1020 return newc;
1021 }
1022
1023 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1024 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1025 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1026 recursing into the children nodes of *TP. */
1027
1028 static tree
1029 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1030 {
1031 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1032 copy_body_data *id = (copy_body_data *) wi_p->info;
1033 tree fn = id->src_fn;
1034
1035 /* For recursive invocations this is no longer the LHS itself. */
1036 bool is_lhs = wi_p->is_lhs;
1037 wi_p->is_lhs = false;
1038
1039 if (TREE_CODE (*tp) == SSA_NAME)
1040 {
1041 *tp = remap_ssa_name (*tp, id);
1042 *walk_subtrees = 0;
1043 if (is_lhs)
1044 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1045 return NULL;
1046 }
1047 else if (auto_var_in_fn_p (*tp, fn))
1048 {
1049 /* Local variables and labels need to be replaced by equivalent
1050 variables. We don't want to copy static variables; there's
1051 only one of those, no matter how many times we inline the
1052 containing function. Similarly for globals from an outer
1053 function. */
1054 tree new_decl;
1055
1056 /* Remap the declaration. */
1057 new_decl = remap_decl (*tp, id);
1058 gcc_assert (new_decl);
1059 /* Replace this variable with the copy. */
1060 STRIP_TYPE_NOPS (new_decl);
1061 /* ??? The C++ frontend uses void * pointer zero to initialize
1062 any other type. This confuses the middle-end type verification.
1063 As cloned bodies do not go through gimplification again the fixup
1064 there doesn't trigger. */
1065 if (TREE_CODE (new_decl) == INTEGER_CST
1066 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1067 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1068 *tp = new_decl;
1069 *walk_subtrees = 0;
1070 }
1071 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1072 gcc_unreachable ();
1073 else if (TREE_CODE (*tp) == SAVE_EXPR)
1074 gcc_unreachable ();
1075 else if (TREE_CODE (*tp) == LABEL_DECL
1076 && (!DECL_CONTEXT (*tp)
1077 || decl_function_context (*tp) == id->src_fn))
1078 /* These may need to be remapped for EH handling. */
1079 *tp = remap_decl (*tp, id);
1080 else if (TREE_CODE (*tp) == FIELD_DECL)
1081 {
1082 /* If the enclosing record type is variably_modified_type_p, the field
1083 has already been remapped. Otherwise, it need not be. */
1084 tree *n = id->decl_map->get (*tp);
1085 if (n)
1086 *tp = *n;
1087 *walk_subtrees = 0;
1088 }
1089 else if (TYPE_P (*tp))
1090 /* Types may need remapping as well. */
1091 *tp = remap_type (*tp, id);
1092 else if (CONSTANT_CLASS_P (*tp))
1093 {
1094 /* If this is a constant, we have to copy the node iff the type
1095 will be remapped. copy_tree_r will not copy a constant. */
1096 tree new_type = remap_type (TREE_TYPE (*tp), id);
1097
1098 if (new_type == TREE_TYPE (*tp))
1099 *walk_subtrees = 0;
1100
1101 else if (TREE_CODE (*tp) == INTEGER_CST)
1102 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1103 else
1104 {
1105 *tp = copy_node (*tp);
1106 TREE_TYPE (*tp) = new_type;
1107 }
1108 }
1109 else
1110 {
1111 /* Otherwise, just copy the node. Note that copy_tree_r already
1112 knows not to copy VAR_DECLs, etc., so this is safe. */
1113
1114 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1115 {
1116 /* We need to re-canonicalize MEM_REFs from inline substitutions
1117 that can happen when a pointer argument is an ADDR_EXPR.
1118 Recurse here manually to allow that. */
1119 tree ptr = TREE_OPERAND (*tp, 0);
1120 tree type = remap_type (TREE_TYPE (*tp), id);
1121 tree old = *tp;
1122 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1123 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1124 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1125 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1126 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1127 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1128 {
1129 MR_DEPENDENCE_CLIQUE (*tp)
1130 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1131 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1132 }
1133 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1134 remapped a parameter as the property might be valid only
1135 for the parameter itself. */
1136 if (TREE_THIS_NOTRAP (old)
1137 && (!is_parm (TREE_OPERAND (old, 0))
1138 || (!id->transform_parameter && is_parm (ptr))))
1139 TREE_THIS_NOTRAP (*tp) = 1;
1140 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1141 *walk_subtrees = 0;
1142 return NULL;
1143 }
1144
1145 /* Here is the "usual case". Copy this tree node, and then
1146 tweak some special cases. */
1147 copy_tree_r (tp, walk_subtrees, NULL);
1148
1149 if (TREE_CODE (*tp) != OMP_CLAUSE)
1150 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1151
1152 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1153 {
1154 /* The copied TARGET_EXPR has never been expanded, even if the
1155 original node was expanded already. */
1156 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1157 TREE_OPERAND (*tp, 3) = NULL_TREE;
1158 }
1159 else if (TREE_CODE (*tp) == ADDR_EXPR)
1160 {
1161 /* Variable substitution need not be simple. In particular,
1162 the MEM_REF substitution above. Make sure that
1163 TREE_CONSTANT and friends are up-to-date. */
1164 int invariant = is_gimple_min_invariant (*tp);
1165 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1166 recompute_tree_invariant_for_addr_expr (*tp);
1167
1168 /* If this used to be invariant, but is not any longer,
1169 then regimplification is probably needed. */
1170 if (invariant && !is_gimple_min_invariant (*tp))
1171 id->regimplify = true;
1172
1173 *walk_subtrees = 0;
1174 }
1175 }
1176
1177 /* Update the TREE_BLOCK for the cloned expr. */
1178 if (EXPR_P (*tp))
1179 {
1180 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1181 tree old_block = TREE_BLOCK (*tp);
1182 if (old_block)
1183 {
1184 tree *n;
1185 n = id->decl_map->get (TREE_BLOCK (*tp));
1186 if (n)
1187 new_block = *n;
1188 }
1189 TREE_SET_BLOCK (*tp, new_block);
1190 }
1191
1192 /* Keep iterating. */
1193 return NULL_TREE;
1194 }
1195
1196
1197 /* Called from copy_body_id via walk_tree. DATA is really a
1198 `copy_body_data *'. */
1199
1200 tree
1201 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1202 {
1203 copy_body_data *id = (copy_body_data *) data;
1204 tree fn = id->src_fn;
1205 tree new_block;
1206
1207 /* Begin by recognizing trees that we'll completely rewrite for the
1208 inlining context. Our output for these trees is completely
1209 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1210 into an edge). Further down, we'll handle trees that get
1211 duplicated and/or tweaked. */
1212
1213 /* When requested, RETURN_EXPRs should be transformed to just the
1214 contained MODIFY_EXPR. The branch semantics of the return will
1215 be handled elsewhere by manipulating the CFG rather than a statement. */
1216 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1217 {
1218 tree assignment = TREE_OPERAND (*tp, 0);
1219
1220 /* If we're returning something, just turn that into an
1221 assignment into the equivalent of the original RESULT_DECL.
1222 If the "assignment" is just the result decl, the result
1223 decl has already been set (e.g. a recent "foo (&result_decl,
1224 ...)"); just toss the entire RETURN_EXPR. */
1225 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1226 {
1227 /* Replace the RETURN_EXPR with (a copy of) the
1228 MODIFY_EXPR hanging underneath. */
1229 *tp = copy_node (assignment);
1230 }
1231 else /* Else the RETURN_EXPR returns no value. */
1232 {
1233 *tp = NULL;
1234 return (tree) (void *)1;
1235 }
1236 }
1237 else if (TREE_CODE (*tp) == SSA_NAME)
1238 {
1239 *tp = remap_ssa_name (*tp, id);
1240 *walk_subtrees = 0;
1241 return NULL;
1242 }
1243
1244 /* Local variables and labels need to be replaced by equivalent
1245 variables. We don't want to copy static variables; there's only
1246 one of those, no matter how many times we inline the containing
1247 function. Similarly for globals from an outer function. */
1248 else if (auto_var_in_fn_p (*tp, fn))
1249 {
1250 tree new_decl;
1251
1252 /* Remap the declaration. */
1253 new_decl = remap_decl (*tp, id);
1254 gcc_assert (new_decl);
1255 /* Replace this variable with the copy. */
1256 STRIP_TYPE_NOPS (new_decl);
1257 *tp = new_decl;
1258 *walk_subtrees = 0;
1259 }
1260 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1261 copy_statement_list (tp);
1262 else if (TREE_CODE (*tp) == SAVE_EXPR
1263 || TREE_CODE (*tp) == TARGET_EXPR)
1264 remap_save_expr (tp, id->decl_map, walk_subtrees);
1265 else if (TREE_CODE (*tp) == LABEL_DECL
1266 && (! DECL_CONTEXT (*tp)
1267 || decl_function_context (*tp) == id->src_fn))
1268 /* These may need to be remapped for EH handling. */
1269 *tp = remap_decl (*tp, id);
1270 else if (TREE_CODE (*tp) == BIND_EXPR)
1271 copy_bind_expr (tp, walk_subtrees, id);
1272 /* Types may need remapping as well. */
1273 else if (TYPE_P (*tp))
1274 *tp = remap_type (*tp, id);
1275
1276 /* If this is a constant, we have to copy the node iff the type will be
1277 remapped. copy_tree_r will not copy a constant. */
1278 else if (CONSTANT_CLASS_P (*tp))
1279 {
1280 tree new_type = remap_type (TREE_TYPE (*tp), id);
1281
1282 if (new_type == TREE_TYPE (*tp))
1283 *walk_subtrees = 0;
1284
1285 else if (TREE_CODE (*tp) == INTEGER_CST)
1286 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1287 else
1288 {
1289 *tp = copy_node (*tp);
1290 TREE_TYPE (*tp) = new_type;
1291 }
1292 }
1293
1294 /* Otherwise, just copy the node. Note that copy_tree_r already
1295 knows not to copy VAR_DECLs, etc., so this is safe. */
1296 else
1297 {
1298 /* Here we handle trees that are not completely rewritten.
1299 First we detect some inlining-induced bogosities for
1300 discarding. */
1301 if (TREE_CODE (*tp) == MODIFY_EXPR
1302 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1303 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1304 {
1305 /* Some assignments VAR = VAR; don't generate any rtl code
1306 and thus don't count as variable modification. Avoid
1307 keeping bogosities like 0 = 0. */
1308 tree decl = TREE_OPERAND (*tp, 0), value;
1309 tree *n;
1310
1311 n = id->decl_map->get (decl);
1312 if (n)
1313 {
1314 value = *n;
1315 STRIP_TYPE_NOPS (value);
1316 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1317 {
1318 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1319 return copy_tree_body_r (tp, walk_subtrees, data);
1320 }
1321 }
1322 }
1323 else if (TREE_CODE (*tp) == INDIRECT_REF)
1324 {
1325 /* Get rid of *& from inline substitutions that can happen when a
1326 pointer argument is an ADDR_EXPR. */
1327 tree decl = TREE_OPERAND (*tp, 0);
1328 tree *n = id->decl_map->get (decl);
1329 if (n)
1330 {
1331 /* If we happen to get an ADDR_EXPR in n->value, strip
1332 it manually here as we'll eventually get ADDR_EXPRs
1333 which lie about their types pointed to. In this case
1334 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1335 but we absolutely rely on that. As fold_indirect_ref
1336 does other useful transformations, try that first, though. */
1337 tree type = TREE_TYPE (*tp);
1338 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1339 tree old = *tp;
1340 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1341 if (! *tp)
1342 {
1343 type = remap_type (type, id);
1344 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1345 {
1346 *tp
1347 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1348 /* ??? We should either assert here or build
1349 a VIEW_CONVERT_EXPR instead of blindly leaking
1350 incompatible types to our IL. */
1351 if (! *tp)
1352 *tp = TREE_OPERAND (ptr, 0);
1353 }
1354 else
1355 {
1356 *tp = build1 (INDIRECT_REF, type, ptr);
1357 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1358 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1359 TREE_READONLY (*tp) = TREE_READONLY (old);
1360 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1361 have remapped a parameter as the property might be
1362 valid only for the parameter itself. */
1363 if (TREE_THIS_NOTRAP (old)
1364 && (!is_parm (TREE_OPERAND (old, 0))
1365 || (!id->transform_parameter && is_parm (ptr))))
1366 TREE_THIS_NOTRAP (*tp) = 1;
1367 }
1368 }
1369 *walk_subtrees = 0;
1370 return NULL;
1371 }
1372 }
1373 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1374 {
1375 /* We need to re-canonicalize MEM_REFs from inline substitutions
1376 that can happen when a pointer argument is an ADDR_EXPR.
1377 Recurse here manually to allow that. */
1378 tree ptr = TREE_OPERAND (*tp, 0);
1379 tree type = remap_type (TREE_TYPE (*tp), id);
1380 tree old = *tp;
1381 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1382 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1383 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1384 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1385 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1386 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1387 {
1388 MR_DEPENDENCE_CLIQUE (*tp)
1389 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1390 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1391 }
1392 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1393 remapped a parameter as the property might be valid only
1394 for the parameter itself. */
1395 if (TREE_THIS_NOTRAP (old)
1396 && (!is_parm (TREE_OPERAND (old, 0))
1397 || (!id->transform_parameter && is_parm (ptr))))
1398 TREE_THIS_NOTRAP (*tp) = 1;
1399 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1400 *walk_subtrees = 0;
1401 return NULL;
1402 }
1403
1404 /* Here is the "usual case". Copy this tree node, and then
1405 tweak some special cases. */
1406 copy_tree_r (tp, walk_subtrees, NULL);
1407
1408 /* If EXPR has block defined, map it to newly constructed block.
1409 When inlining we want EXPRs without block appear in the block
1410 of function call if we are not remapping a type. */
1411 if (EXPR_P (*tp))
1412 {
1413 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1414 if (TREE_BLOCK (*tp))
1415 {
1416 tree *n;
1417 n = id->decl_map->get (TREE_BLOCK (*tp));
1418 if (n)
1419 new_block = *n;
1420 }
1421 TREE_SET_BLOCK (*tp, new_block);
1422 }
1423
1424 if (TREE_CODE (*tp) != OMP_CLAUSE)
1425 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1426
1427 /* The copied TARGET_EXPR has never been expanded, even if the
1428 original node was expanded already. */
1429 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1430 {
1431 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1432 TREE_OPERAND (*tp, 3) = NULL_TREE;
1433 }
1434
1435 /* Variable substitution need not be simple. In particular, the
1436 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1437 and friends are up-to-date. */
1438 else if (TREE_CODE (*tp) == ADDR_EXPR)
1439 {
1440 int invariant = is_gimple_min_invariant (*tp);
1441 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1442
1443 /* Handle the case where we substituted an INDIRECT_REF
1444 into the operand of the ADDR_EXPR. */
1445 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1446 && !id->do_not_fold)
1447 {
1448 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1449 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1450 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1451 *tp = t;
1452 }
1453 else
1454 recompute_tree_invariant_for_addr_expr (*tp);
1455
1456 /* If this used to be invariant, but is not any longer,
1457 then regimplification is probably needed. */
1458 if (invariant && !is_gimple_min_invariant (*tp))
1459 id->regimplify = true;
1460
1461 *walk_subtrees = 0;
1462 }
1463 }
1464
1465 /* Keep iterating. */
1466 return NULL_TREE;
1467 }
1468
1469 /* Helper for remap_gimple_stmt. Given an EH region number for the
1470 source function, map that to the duplicate EH region number in
1471 the destination function. */
1472
1473 static int
1474 remap_eh_region_nr (int old_nr, copy_body_data *id)
1475 {
1476 eh_region old_r, new_r;
1477
1478 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1479 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1480
1481 return new_r->index;
1482 }
1483
1484 /* Similar, but operate on INTEGER_CSTs. */
1485
1486 static tree
1487 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1488 {
1489 int old_nr, new_nr;
1490
1491 old_nr = tree_to_shwi (old_t_nr);
1492 new_nr = remap_eh_region_nr (old_nr, id);
1493
1494 return build_int_cst (integer_type_node, new_nr);
1495 }
1496
1497 /* Helper for copy_bb. Remap statement STMT using the inlining
1498 information in ID. Return the new statement copy. */
1499
1500 static gimple_seq
1501 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1502 {
1503 gimple *copy = NULL;
1504 struct walk_stmt_info wi;
1505 bool skip_first = false;
1506 gimple_seq stmts = NULL;
1507
1508 if (is_gimple_debug (stmt)
1509 && (gimple_debug_nonbind_marker_p (stmt)
1510 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1511 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1512 return NULL;
1513
1514 /* Begin by recognizing trees that we'll completely rewrite for the
1515 inlining context. Our output for these trees is completely
1516 different from our input (e.g. RETURN_EXPR is deleted and morphs
1517 into an edge). Further down, we'll handle trees that get
1518 duplicated and/or tweaked. */
1519
1520 /* When requested, GIMPLE_RETURN should be transformed to just the
1521 contained GIMPLE_ASSIGN. The branch semantics of the return will
1522 be handled elsewhere by manipulating the CFG rather than the
1523 statement. */
1524 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1525 {
1526 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1527
1528 /* If we're returning something, just turn that into an
1529 assignment to the equivalent of the original RESULT_DECL.
1530 If RETVAL is just the result decl, the result decl has
1531 already been set (e.g. a recent "foo (&result_decl, ...)");
1532 just toss the entire GIMPLE_RETURN. */
1533 if (retval
1534 && (TREE_CODE (retval) != RESULT_DECL
1535 && (TREE_CODE (retval) != SSA_NAME
1536 || ! SSA_NAME_VAR (retval)
1537 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1538 {
1539 copy = gimple_build_assign (id->do_not_unshare
1540 ? id->retvar : unshare_expr (id->retvar),
1541 retval);
1542 /* id->retvar is already substituted. Skip it on later remapping. */
1543 skip_first = true;
1544 }
1545 else
1546 return NULL;
1547 }
1548 else if (gimple_has_substatements (stmt))
1549 {
1550 gimple_seq s1, s2;
1551
1552 /* When cloning bodies from the C++ front end, we will be handed bodies
1553 in High GIMPLE form. Handle here all the High GIMPLE statements that
1554 have embedded statements. */
1555 switch (gimple_code (stmt))
1556 {
1557 case GIMPLE_BIND:
1558 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1559 break;
1560
1561 case GIMPLE_CATCH:
1562 {
1563 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1564 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1565 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1566 }
1567 break;
1568
1569 case GIMPLE_EH_FILTER:
1570 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1571 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1572 break;
1573
1574 case GIMPLE_TRY:
1575 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1576 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1577 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1578 break;
1579
1580 case GIMPLE_WITH_CLEANUP_EXPR:
1581 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1582 copy = gimple_build_wce (s1);
1583 break;
1584
1585 case GIMPLE_OMP_PARALLEL:
1586 {
1587 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1588 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1589 copy = gimple_build_omp_parallel
1590 (s1,
1591 gimple_omp_parallel_clauses (omp_par_stmt),
1592 gimple_omp_parallel_child_fn (omp_par_stmt),
1593 gimple_omp_parallel_data_arg (omp_par_stmt));
1594 }
1595 break;
1596
1597 case GIMPLE_OMP_TASK:
1598 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1599 copy = gimple_build_omp_task
1600 (s1,
1601 gimple_omp_task_clauses (stmt),
1602 gimple_omp_task_child_fn (stmt),
1603 gimple_omp_task_data_arg (stmt),
1604 gimple_omp_task_copy_fn (stmt),
1605 gimple_omp_task_arg_size (stmt),
1606 gimple_omp_task_arg_align (stmt));
1607 break;
1608
1609 case GIMPLE_OMP_FOR:
1610 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1611 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1612 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1613 gimple_omp_for_clauses (stmt),
1614 gimple_omp_for_collapse (stmt), s2);
1615 {
1616 size_t i;
1617 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1618 {
1619 gimple_omp_for_set_index (copy, i,
1620 gimple_omp_for_index (stmt, i));
1621 gimple_omp_for_set_initial (copy, i,
1622 gimple_omp_for_initial (stmt, i));
1623 gimple_omp_for_set_final (copy, i,
1624 gimple_omp_for_final (stmt, i));
1625 gimple_omp_for_set_incr (copy, i,
1626 gimple_omp_for_incr (stmt, i));
1627 gimple_omp_for_set_cond (copy, i,
1628 gimple_omp_for_cond (stmt, i));
1629 }
1630 }
1631 break;
1632
1633 case GIMPLE_OMP_MASTER:
1634 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1635 copy = gimple_build_omp_master (s1);
1636 break;
1637
1638 case GIMPLE_OMP_TASKGROUP:
1639 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1640 copy = gimple_build_omp_taskgroup
1641 (s1, gimple_omp_taskgroup_clauses (stmt));
1642 break;
1643
1644 case GIMPLE_OMP_ORDERED:
1645 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1646 copy = gimple_build_omp_ordered
1647 (s1,
1648 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1649 break;
1650
1651 case GIMPLE_OMP_SCAN:
1652 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1653 copy = gimple_build_omp_scan
1654 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1655 break;
1656
1657 case GIMPLE_OMP_SECTION:
1658 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1659 copy = gimple_build_omp_section (s1);
1660 break;
1661
1662 case GIMPLE_OMP_SECTIONS:
1663 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1664 copy = gimple_build_omp_sections
1665 (s1, gimple_omp_sections_clauses (stmt));
1666 break;
1667
1668 case GIMPLE_OMP_SINGLE:
1669 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1670 copy = gimple_build_omp_single
1671 (s1, gimple_omp_single_clauses (stmt));
1672 break;
1673
1674 case GIMPLE_OMP_TARGET:
1675 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1676 copy = gimple_build_omp_target
1677 (s1, gimple_omp_target_kind (stmt),
1678 gimple_omp_target_clauses (stmt));
1679 break;
1680
1681 case GIMPLE_OMP_TEAMS:
1682 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1683 copy = gimple_build_omp_teams
1684 (s1, gimple_omp_teams_clauses (stmt));
1685 break;
1686
1687 case GIMPLE_OMP_CRITICAL:
1688 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1689 copy = gimple_build_omp_critical (s1,
1690 gimple_omp_critical_name
1691 (as_a <gomp_critical *> (stmt)),
1692 gimple_omp_critical_clauses
1693 (as_a <gomp_critical *> (stmt)));
1694 break;
1695
1696 case GIMPLE_TRANSACTION:
1697 {
1698 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1699 gtransaction *new_trans_stmt;
1700 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1701 id);
1702 copy = new_trans_stmt = gimple_build_transaction (s1);
1703 gimple_transaction_set_subcode (new_trans_stmt,
1704 gimple_transaction_subcode (old_trans_stmt));
1705 gimple_transaction_set_label_norm (new_trans_stmt,
1706 gimple_transaction_label_norm (old_trans_stmt));
1707 gimple_transaction_set_label_uninst (new_trans_stmt,
1708 gimple_transaction_label_uninst (old_trans_stmt));
1709 gimple_transaction_set_label_over (new_trans_stmt,
1710 gimple_transaction_label_over (old_trans_stmt));
1711 }
1712 break;
1713
1714 default:
1715 gcc_unreachable ();
1716 }
1717 }
1718 else
1719 {
1720 if (gimple_assign_copy_p (stmt)
1721 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1722 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1723 {
1724 /* Here we handle statements that are not completely rewritten.
1725 First we detect some inlining-induced bogosities for
1726 discarding. */
1727
1728 /* Some assignments VAR = VAR; don't generate any rtl code
1729 and thus don't count as variable modification. Avoid
1730 keeping bogosities like 0 = 0. */
1731 tree decl = gimple_assign_lhs (stmt), value;
1732 tree *n;
1733
1734 n = id->decl_map->get (decl);
1735 if (n)
1736 {
1737 value = *n;
1738 STRIP_TYPE_NOPS (value);
1739 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1740 return NULL;
1741 }
1742 }
1743
1744 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1745 in a block that we aren't copying during tree_function_versioning,
1746 just drop the clobber stmt. */
1747 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1748 {
1749 tree lhs = gimple_assign_lhs (stmt);
1750 if (TREE_CODE (lhs) == MEM_REF
1751 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1752 {
1753 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1754 if (gimple_bb (def_stmt)
1755 && !bitmap_bit_p (id->blocks_to_copy,
1756 gimple_bb (def_stmt)->index))
1757 return NULL;
1758 }
1759 }
1760
1761 if (gimple_debug_bind_p (stmt))
1762 {
1763 gdebug *copy
1764 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1765 gimple_debug_bind_get_value (stmt),
1766 stmt);
1767 if (id->reset_location)
1768 gimple_set_location (copy, input_location);
1769 id->debug_stmts.safe_push (copy);
1770 gimple_seq_add_stmt (&stmts, copy);
1771 return stmts;
1772 }
1773 if (gimple_debug_source_bind_p (stmt))
1774 {
1775 gdebug *copy = gimple_build_debug_source_bind
1776 (gimple_debug_source_bind_get_var (stmt),
1777 gimple_debug_source_bind_get_value (stmt),
1778 stmt);
1779 if (id->reset_location)
1780 gimple_set_location (copy, input_location);
1781 id->debug_stmts.safe_push (copy);
1782 gimple_seq_add_stmt (&stmts, copy);
1783 return stmts;
1784 }
1785 if (gimple_debug_nonbind_marker_p (stmt))
1786 {
1787 /* If the inlined function has too many debug markers,
1788 don't copy them. */
1789 if (id->src_cfun->debug_marker_count
1790 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1791 return stmts;
1792
1793 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1794 if (id->reset_location)
1795 gimple_set_location (copy, input_location);
1796 id->debug_stmts.safe_push (copy);
1797 gimple_seq_add_stmt (&stmts, copy);
1798 return stmts;
1799 }
1800
1801 /* Create a new deep copy of the statement. */
1802 copy = gimple_copy (stmt);
1803
1804 /* Clear flags that need revisiting. */
1805 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1806 {
1807 if (gimple_call_tail_p (call_stmt))
1808 gimple_call_set_tail (call_stmt, false);
1809 if (gimple_call_from_thunk_p (call_stmt))
1810 gimple_call_set_from_thunk (call_stmt, false);
1811 if (gimple_call_internal_p (call_stmt))
1812 switch (gimple_call_internal_fn (call_stmt))
1813 {
1814 case IFN_GOMP_SIMD_LANE:
1815 case IFN_GOMP_SIMD_VF:
1816 case IFN_GOMP_SIMD_LAST_LANE:
1817 case IFN_GOMP_SIMD_ORDERED_START:
1818 case IFN_GOMP_SIMD_ORDERED_END:
1819 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1820 break;
1821 default:
1822 break;
1823 }
1824 }
1825
1826 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1827 RESX and EH_DISPATCH. */
1828 if (id->eh_map)
1829 switch (gimple_code (copy))
1830 {
1831 case GIMPLE_CALL:
1832 {
1833 tree r, fndecl = gimple_call_fndecl (copy);
1834 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1835 switch (DECL_FUNCTION_CODE (fndecl))
1836 {
1837 case BUILT_IN_EH_COPY_VALUES:
1838 r = gimple_call_arg (copy, 1);
1839 r = remap_eh_region_tree_nr (r, id);
1840 gimple_call_set_arg (copy, 1, r);
1841 /* FALLTHRU */
1842
1843 case BUILT_IN_EH_POINTER:
1844 case BUILT_IN_EH_FILTER:
1845 r = gimple_call_arg (copy, 0);
1846 r = remap_eh_region_tree_nr (r, id);
1847 gimple_call_set_arg (copy, 0, r);
1848 break;
1849
1850 default:
1851 break;
1852 }
1853
1854 /* Reset alias info if we didn't apply measures to
1855 keep it valid over inlining by setting DECL_PT_UID. */
1856 if (!id->src_cfun->gimple_df
1857 || !id->src_cfun->gimple_df->ipa_pta)
1858 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1859 }
1860 break;
1861
1862 case GIMPLE_RESX:
1863 {
1864 gresx *resx_stmt = as_a <gresx *> (copy);
1865 int r = gimple_resx_region (resx_stmt);
1866 r = remap_eh_region_nr (r, id);
1867 gimple_resx_set_region (resx_stmt, r);
1868 }
1869 break;
1870
1871 case GIMPLE_EH_DISPATCH:
1872 {
1873 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1874 int r = gimple_eh_dispatch_region (eh_dispatch);
1875 r = remap_eh_region_nr (r, id);
1876 gimple_eh_dispatch_set_region (eh_dispatch, r);
1877 }
1878 break;
1879
1880 default:
1881 break;
1882 }
1883 }
1884
1885 /* If STMT has a block defined, map it to the newly constructed block. */
1886 if (tree block = gimple_block (copy))
1887 {
1888 tree *n;
1889 n = id->decl_map->get (block);
1890 gcc_assert (n);
1891 gimple_set_block (copy, *n);
1892 }
1893
1894 if (id->reset_location)
1895 gimple_set_location (copy, input_location);
1896
1897 /* Debug statements ought to be rebuilt and not copied. */
1898 gcc_checking_assert (!is_gimple_debug (copy));
1899
1900 /* Remap all the operands in COPY. */
1901 memset (&wi, 0, sizeof (wi));
1902 wi.info = id;
1903 if (skip_first)
1904 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1905 else
1906 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1907
1908 /* Clear the copied virtual operands. We are not remapping them here
1909 but are going to recreate them from scratch. */
1910 if (gimple_has_mem_ops (copy))
1911 {
1912 gimple_set_vdef (copy, NULL_TREE);
1913 gimple_set_vuse (copy, NULL_TREE);
1914 }
1915
1916 gimple_seq_add_stmt (&stmts, copy);
1917 return stmts;
1918 }
1919
1920
1921 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1922 later */
1923
1924 static basic_block
1925 copy_bb (copy_body_data *id, basic_block bb,
1926 profile_count num, profile_count den)
1927 {
1928 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1929 basic_block copy_basic_block;
1930 tree decl;
1931 basic_block prev;
1932
1933 profile_count::adjust_for_ipa_scaling (&num, &den);
1934
1935 /* Search for previous copied basic block. */
1936 prev = bb->prev_bb;
1937 while (!prev->aux)
1938 prev = prev->prev_bb;
1939
1940 /* create_basic_block() will append every new block to
1941 basic_block_info automatically. */
1942 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1943 copy_basic_block->count = bb->count.apply_scale (num, den);
1944
1945 copy_gsi = gsi_start_bb (copy_basic_block);
1946
1947 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1948 {
1949 gimple_seq stmts;
1950 gimple *stmt = gsi_stmt (gsi);
1951 gimple *orig_stmt = stmt;
1952 gimple_stmt_iterator stmts_gsi;
1953 bool stmt_added = false;
1954
1955 id->regimplify = false;
1956 stmts = remap_gimple_stmt (stmt, id);
1957
1958 if (gimple_seq_empty_p (stmts))
1959 continue;
1960
1961 seq_gsi = copy_gsi;
1962
1963 for (stmts_gsi = gsi_start (stmts);
1964 !gsi_end_p (stmts_gsi); )
1965 {
1966 stmt = gsi_stmt (stmts_gsi);
1967
1968 /* Advance iterator now before stmt is moved to seq_gsi. */
1969 gsi_next (&stmts_gsi);
1970
1971 if (gimple_nop_p (stmt))
1972 continue;
1973
1974 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1975 orig_stmt);
1976
1977 /* With return slot optimization we can end up with
1978 non-gimple (foo *)&this->m, fix that here. */
1979 if (is_gimple_assign (stmt)
1980 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1981 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1982 {
1983 tree new_rhs;
1984 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1985 gimple_assign_rhs1 (stmt),
1986 true, NULL, false,
1987 GSI_CONTINUE_LINKING);
1988 gimple_assign_set_rhs1 (stmt, new_rhs);
1989 id->regimplify = false;
1990 }
1991
1992 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1993
1994 if (id->regimplify)
1995 gimple_regimplify_operands (stmt, &seq_gsi);
1996
1997 stmt_added = true;
1998 }
1999
2000 if (!stmt_added)
2001 continue;
2002
2003 /* If copy_basic_block has been empty at the start of this iteration,
2004 call gsi_start_bb again to get at the newly added statements. */
2005 if (gsi_end_p (copy_gsi))
2006 copy_gsi = gsi_start_bb (copy_basic_block);
2007 else
2008 gsi_next (&copy_gsi);
2009
2010 /* Process the new statement. The call to gimple_regimplify_operands
2011 possibly turned the statement into multiple statements, we
2012 need to process all of them. */
2013 do
2014 {
2015 tree fn;
2016 gcall *call_stmt;
2017
2018 stmt = gsi_stmt (copy_gsi);
2019 call_stmt = dyn_cast <gcall *> (stmt);
2020 if (call_stmt
2021 && gimple_call_va_arg_pack_p (call_stmt)
2022 && id->call_stmt
2023 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2024 {
2025 /* __builtin_va_arg_pack () should be replaced by
2026 all arguments corresponding to ... in the caller. */
2027 tree p;
2028 gcall *new_call;
2029 vec<tree> argarray;
2030 size_t nargs = gimple_call_num_args (id->call_stmt);
2031 size_t n;
2032
2033 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2034 nargs--;
2035
2036 /* Create the new array of arguments. */
2037 n = nargs + gimple_call_num_args (call_stmt);
2038 argarray.create (n);
2039 argarray.safe_grow_cleared (n);
2040
2041 /* Copy all the arguments before '...' */
2042 memcpy (argarray.address (),
2043 gimple_call_arg_ptr (call_stmt, 0),
2044 gimple_call_num_args (call_stmt) * sizeof (tree));
2045
2046 /* Append the arguments passed in '...' */
2047 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2048 gimple_call_arg_ptr (id->call_stmt, 0)
2049 + (gimple_call_num_args (id->call_stmt) - nargs),
2050 nargs * sizeof (tree));
2051
2052 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2053 argarray);
2054
2055 argarray.release ();
2056
2057 /* Copy all GIMPLE_CALL flags, location and block, except
2058 GF_CALL_VA_ARG_PACK. */
2059 gimple_call_copy_flags (new_call, call_stmt);
2060 gimple_call_set_va_arg_pack (new_call, false);
2061 /* location includes block. */
2062 gimple_set_location (new_call, gimple_location (stmt));
2063 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2064
2065 gsi_replace (&copy_gsi, new_call, false);
2066 stmt = new_call;
2067 }
2068 else if (call_stmt
2069 && id->call_stmt
2070 && (decl = gimple_call_fndecl (stmt))
2071 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2072 {
2073 /* __builtin_va_arg_pack_len () should be replaced by
2074 the number of anonymous arguments. */
2075 size_t nargs = gimple_call_num_args (id->call_stmt);
2076 tree count, p;
2077 gimple *new_stmt;
2078
2079 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2080 nargs--;
2081
2082 if (!gimple_call_lhs (stmt))
2083 {
2084 /* Drop unused calls. */
2085 gsi_remove (&copy_gsi, false);
2086 continue;
2087 }
2088 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2089 {
2090 count = build_int_cst (integer_type_node, nargs);
2091 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2092 gsi_replace (&copy_gsi, new_stmt, false);
2093 stmt = new_stmt;
2094 }
2095 else if (nargs != 0)
2096 {
2097 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2098 count = build_int_cst (integer_type_node, nargs);
2099 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2100 PLUS_EXPR, newlhs, count);
2101 gimple_call_set_lhs (stmt, newlhs);
2102 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2103 }
2104 }
2105 else if (call_stmt
2106 && id->call_stmt
2107 && gimple_call_internal_p (stmt)
2108 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2109 {
2110 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2111 gsi_remove (&copy_gsi, false);
2112 continue;
2113 }
2114
2115 /* Statements produced by inlining can be unfolded, especially
2116 when we constant propagated some operands. We can't fold
2117 them right now for two reasons:
2118 1) folding require SSA_NAME_DEF_STMTs to be correct
2119 2) we can't change function calls to builtins.
2120 So we just mark statement for later folding. We mark
2121 all new statements, instead just statements that has changed
2122 by some nontrivial substitution so even statements made
2123 foldable indirectly are updated. If this turns out to be
2124 expensive, copy_body can be told to watch for nontrivial
2125 changes. */
2126 if (id->statements_to_fold)
2127 id->statements_to_fold->add (stmt);
2128
2129 /* We're duplicating a CALL_EXPR. Find any corresponding
2130 callgraph edges and update or duplicate them. */
2131 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2132 {
2133 struct cgraph_edge *edge;
2134
2135 switch (id->transform_call_graph_edges)
2136 {
2137 case CB_CGE_DUPLICATE:
2138 edge = id->src_node->get_edge (orig_stmt);
2139 if (edge)
2140 {
2141 struct cgraph_edge *old_edge = edge;
2142 profile_count old_cnt = edge->count;
2143 edge = edge->clone (id->dst_node, call_stmt,
2144 gimple_uid (stmt),
2145 num, den,
2146 true);
2147
2148 /* Speculative calls consist of two edges - direct and
2149 indirect. Duplicate the whole thing and distribute
2150 frequencies accordingly. */
2151 if (edge->speculative)
2152 {
2153 struct cgraph_edge *direct, *indirect;
2154 struct ipa_ref *ref;
2155
2156 gcc_assert (!edge->indirect_unknown_callee);
2157 old_edge->speculative_call_info (direct, indirect, ref);
2158
2159 profile_count indir_cnt = indirect->count;
2160 indirect = indirect->clone (id->dst_node, call_stmt,
2161 gimple_uid (stmt),
2162 num, den,
2163 true);
2164
2165 profile_probability prob
2166 = indir_cnt.probability_in (old_cnt + indir_cnt);
2167 indirect->count
2168 = copy_basic_block->count.apply_probability (prob);
2169 edge->count = copy_basic_block->count - indirect->count;
2170 id->dst_node->clone_reference (ref, stmt);
2171 }
2172 else
2173 edge->count = copy_basic_block->count;
2174 }
2175 break;
2176
2177 case CB_CGE_MOVE_CLONES:
2178 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2179 call_stmt);
2180 edge = id->dst_node->get_edge (stmt);
2181 break;
2182
2183 case CB_CGE_MOVE:
2184 edge = id->dst_node->get_edge (orig_stmt);
2185 if (edge)
2186 edge->set_call_stmt (call_stmt);
2187 break;
2188
2189 default:
2190 gcc_unreachable ();
2191 }
2192
2193 /* Constant propagation on argument done during inlining
2194 may create new direct call. Produce an edge for it. */
2195 if ((!edge
2196 || (edge->indirect_inlining_edge
2197 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2198 && id->dst_node->definition
2199 && (fn = gimple_call_fndecl (stmt)) != NULL)
2200 {
2201 struct cgraph_node *dest = cgraph_node::get_create (fn);
2202
2203 /* We have missing edge in the callgraph. This can happen
2204 when previous inlining turned an indirect call into a
2205 direct call by constant propagating arguments or we are
2206 producing dead clone (for further cloning). In all
2207 other cases we hit a bug (incorrect node sharing is the
2208 most common reason for missing edges). */
2209 gcc_assert (!dest->definition
2210 || dest->address_taken
2211 || !id->src_node->definition
2212 || !id->dst_node->definition);
2213 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2214 id->dst_node->create_edge_including_clones
2215 (dest, orig_stmt, call_stmt, bb->count,
2216 CIF_ORIGINALLY_INDIRECT_CALL);
2217 else
2218 id->dst_node->create_edge (dest, call_stmt,
2219 bb->count)->inline_failed
2220 = CIF_ORIGINALLY_INDIRECT_CALL;
2221 if (dump_file)
2222 {
2223 fprintf (dump_file, "Created new direct edge to %s\n",
2224 dest->name ());
2225 }
2226 }
2227
2228 notice_special_calls (as_a <gcall *> (stmt));
2229 }
2230
2231 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2232 id->eh_map, id->eh_lp_nr);
2233
2234 gsi_next (&copy_gsi);
2235 }
2236 while (!gsi_end_p (copy_gsi));
2237
2238 copy_gsi = gsi_last_bb (copy_basic_block);
2239 }
2240
2241 return copy_basic_block;
2242 }
2243
2244 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2245 form is quite easy, since dominator relationship for old basic blocks does
2246 not change.
2247
2248 There is however exception where inlining might change dominator relation
2249 across EH edges from basic block within inlined functions destinating
2250 to landing pads in function we inline into.
2251
2252 The function fills in PHI_RESULTs of such PHI nodes if they refer
2253 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2254 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2255 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2256 set, and this means that there will be no overlapping live ranges
2257 for the underlying symbol.
2258
2259 This might change in future if we allow redirecting of EH edges and
2260 we might want to change way build CFG pre-inlining to include
2261 all the possible edges then. */
2262 static void
2263 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2264 bool can_throw, bool nonlocal_goto)
2265 {
2266 edge e;
2267 edge_iterator ei;
2268
2269 FOR_EACH_EDGE (e, ei, bb->succs)
2270 if (!e->dest->aux
2271 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2272 {
2273 gphi *phi;
2274 gphi_iterator si;
2275
2276 if (!nonlocal_goto)
2277 gcc_assert (e->flags & EDGE_EH);
2278
2279 if (!can_throw)
2280 gcc_assert (!(e->flags & EDGE_EH));
2281
2282 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2283 {
2284 edge re;
2285
2286 phi = si.phi ();
2287
2288 /* For abnormal goto/call edges the receiver can be the
2289 ENTRY_BLOCK. Do not assert this cannot happen. */
2290
2291 gcc_assert ((e->flags & EDGE_EH)
2292 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2293
2294 re = find_edge (ret_bb, e->dest);
2295 gcc_checking_assert (re);
2296 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2297 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2298
2299 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2300 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2301 }
2302 }
2303 }
2304
2305 /* Insert clobbers for automatic variables of inlined ID->src_fn
2306 function at the start of basic block ID->eh_landing_pad_dest. */
2307
2308 static void
2309 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2310 {
2311 tree var;
2312 basic_block bb = id->eh_landing_pad_dest;
2313 live_vars_map *vars = NULL;
2314 unsigned int cnt = 0;
2315 unsigned int i;
2316 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2317 if (VAR_P (var)
2318 && !DECL_HARD_REGISTER (var)
2319 && !TREE_THIS_VOLATILE (var)
2320 && !DECL_HAS_VALUE_EXPR_P (var)
2321 && !is_gimple_reg (var)
2322 && auto_var_in_fn_p (var, id->src_fn)
2323 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2324 {
2325 tree *t = id->decl_map->get (var);
2326 if (!t)
2327 continue;
2328 tree new_var = *t;
2329 if (VAR_P (new_var)
2330 && !DECL_HARD_REGISTER (new_var)
2331 && !TREE_THIS_VOLATILE (new_var)
2332 && !DECL_HAS_VALUE_EXPR_P (new_var)
2333 && !is_gimple_reg (new_var)
2334 && auto_var_in_fn_p (new_var, id->dst_fn))
2335 {
2336 if (vars == NULL)
2337 vars = new live_vars_map;
2338 vars->put (DECL_UID (var), cnt++);
2339 }
2340 }
2341 if (vars == NULL)
2342 return;
2343
2344 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2345 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2346 if (VAR_P (var))
2347 {
2348 edge e;
2349 edge_iterator ei;
2350 bool needed = false;
2351 unsigned int *v = vars->get (DECL_UID (var));
2352 if (v == NULL)
2353 continue;
2354 FOR_EACH_EDGE (e, ei, bb->preds)
2355 if ((e->flags & EDGE_EH) != 0
2356 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2357 {
2358 basic_block src_bb = (basic_block) e->src->aux;
2359
2360 if (bitmap_bit_p (&live[src_bb->index], *v))
2361 {
2362 needed = true;
2363 break;
2364 }
2365 }
2366 if (needed)
2367 {
2368 tree new_var = *id->decl_map->get (var);
2369 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2370 tree clobber = build_clobber (TREE_TYPE (new_var));
2371 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2372 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2373 }
2374 }
2375 destroy_live_vars (live);
2376 delete vars;
2377 }
2378
2379 /* Copy edges from BB into its copy constructed earlier, scale profile
2380 accordingly. Edges will be taken care of later. Assume aux
2381 pointers to point to the copies of each BB. Return true if any
2382 debug stmts are left after a statement that must end the basic block. */
2383
2384 static bool
2385 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2386 basic_block ret_bb, basic_block abnormal_goto_dest,
2387 copy_body_data *id)
2388 {
2389 basic_block new_bb = (basic_block) bb->aux;
2390 edge_iterator ei;
2391 edge old_edge;
2392 gimple_stmt_iterator si;
2393 bool need_debug_cleanup = false;
2394
2395 /* Use the indices from the original blocks to create edges for the
2396 new ones. */
2397 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2398 if (!(old_edge->flags & EDGE_EH))
2399 {
2400 edge new_edge;
2401 int flags = old_edge->flags;
2402 location_t locus = old_edge->goto_locus;
2403
2404 /* Return edges do get a FALLTHRU flag when they get inlined. */
2405 if (old_edge->dest->index == EXIT_BLOCK
2406 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2407 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2408 flags |= EDGE_FALLTHRU;
2409
2410 new_edge
2411 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2412 new_edge->probability = old_edge->probability;
2413 if (!id->reset_location)
2414 new_edge->goto_locus = remap_location (locus, id);
2415 }
2416
2417 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2418 return false;
2419
2420 /* When doing function splitting, we must decrease count of the return block
2421 which was previously reachable by block we did not copy. */
2422 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2423 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2424 if (old_edge->src->index != ENTRY_BLOCK
2425 && !old_edge->src->aux)
2426 new_bb->count -= old_edge->count ().apply_scale (num, den);
2427
2428 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2429 {
2430 gimple *copy_stmt;
2431 bool can_throw, nonlocal_goto;
2432
2433 copy_stmt = gsi_stmt (si);
2434 if (!is_gimple_debug (copy_stmt))
2435 update_stmt (copy_stmt);
2436
2437 /* Do this before the possible split_block. */
2438 gsi_next (&si);
2439
2440 /* If this tree could throw an exception, there are two
2441 cases where we need to add abnormal edge(s): the
2442 tree wasn't in a region and there is a "current
2443 region" in the caller; or the original tree had
2444 EH edges. In both cases split the block after the tree,
2445 and add abnormal edge(s) as needed; we need both
2446 those from the callee and the caller.
2447 We check whether the copy can throw, because the const
2448 propagation can change an INDIRECT_REF which throws
2449 into a COMPONENT_REF which doesn't. If the copy
2450 can throw, the original could also throw. */
2451 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2452 nonlocal_goto
2453 = (stmt_can_make_abnormal_goto (copy_stmt)
2454 && !computed_goto_p (copy_stmt));
2455
2456 if (can_throw || nonlocal_goto)
2457 {
2458 if (!gsi_end_p (si))
2459 {
2460 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2461 gsi_next (&si);
2462 if (gsi_end_p (si))
2463 need_debug_cleanup = true;
2464 }
2465 if (!gsi_end_p (si))
2466 /* Note that bb's predecessor edges aren't necessarily
2467 right at this point; split_block doesn't care. */
2468 {
2469 edge e = split_block (new_bb, copy_stmt);
2470
2471 new_bb = e->dest;
2472 new_bb->aux = e->src->aux;
2473 si = gsi_start_bb (new_bb);
2474 }
2475 }
2476
2477 bool update_probs = false;
2478
2479 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2480 {
2481 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2482 update_probs = true;
2483 }
2484 else if (can_throw)
2485 {
2486 make_eh_edges (copy_stmt);
2487 update_probs = true;
2488 }
2489
2490 /* EH edges may not match old edges. Copy as much as possible. */
2491 if (update_probs)
2492 {
2493 edge e;
2494 edge_iterator ei;
2495 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2496
2497 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2498 if ((old_edge->flags & EDGE_EH)
2499 && (e = find_edge (copy_stmt_bb,
2500 (basic_block) old_edge->dest->aux))
2501 && (e->flags & EDGE_EH))
2502 e->probability = old_edge->probability;
2503
2504 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2505 if (e->flags & EDGE_EH)
2506 {
2507 if (!e->probability.initialized_p ())
2508 e->probability = profile_probability::never ();
2509 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2510 {
2511 if (id->eh_landing_pad_dest == NULL)
2512 id->eh_landing_pad_dest = e->dest;
2513 else
2514 gcc_assert (id->eh_landing_pad_dest == e->dest);
2515 }
2516 }
2517 }
2518
2519
2520 /* If the call we inline cannot make abnormal goto do not add
2521 additional abnormal edges but only retain those already present
2522 in the original function body. */
2523 if (abnormal_goto_dest == NULL)
2524 nonlocal_goto = false;
2525 if (nonlocal_goto)
2526 {
2527 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2528
2529 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2530 nonlocal_goto = false;
2531 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2532 in OpenMP regions which aren't allowed to be left abnormally.
2533 So, no need to add abnormal edge in that case. */
2534 else if (is_gimple_call (copy_stmt)
2535 && gimple_call_internal_p (copy_stmt)
2536 && (gimple_call_internal_fn (copy_stmt)
2537 == IFN_ABNORMAL_DISPATCHER)
2538 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2539 nonlocal_goto = false;
2540 else
2541 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2542 EDGE_ABNORMAL);
2543 }
2544
2545 if ((can_throw || nonlocal_goto)
2546 && gimple_in_ssa_p (cfun))
2547 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2548 can_throw, nonlocal_goto);
2549 }
2550 return need_debug_cleanup;
2551 }
2552
2553 /* Copy the PHIs. All blocks and edges are copied, some blocks
2554 was possibly split and new outgoing EH edges inserted.
2555 BB points to the block of original function and AUX pointers links
2556 the original and newly copied blocks. */
2557
2558 static void
2559 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2560 {
2561 basic_block const new_bb = (basic_block) bb->aux;
2562 edge_iterator ei;
2563 gphi *phi;
2564 gphi_iterator si;
2565 edge new_edge;
2566 bool inserted = false;
2567
2568 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2569 {
2570 tree res, new_res;
2571 gphi *new_phi;
2572
2573 phi = si.phi ();
2574 res = PHI_RESULT (phi);
2575 new_res = res;
2576 if (!virtual_operand_p (res))
2577 {
2578 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2579 if (EDGE_COUNT (new_bb->preds) == 0)
2580 {
2581 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2582 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2583 }
2584 else
2585 {
2586 new_phi = create_phi_node (new_res, new_bb);
2587 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2588 {
2589 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2590 bb);
2591 tree arg;
2592 tree new_arg;
2593 edge_iterator ei2;
2594 location_t locus;
2595
2596 /* When doing partial cloning, we allow PHIs on the entry
2597 block as long as all the arguments are the same.
2598 Find any input edge to see argument to copy. */
2599 if (!old_edge)
2600 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2601 if (!old_edge->src->aux)
2602 break;
2603
2604 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2605 new_arg = arg;
2606 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2607 gcc_assert (new_arg);
2608 /* With return slot optimization we can end up with
2609 non-gimple (foo *)&this->m, fix that here. */
2610 if (TREE_CODE (new_arg) != SSA_NAME
2611 && TREE_CODE (new_arg) != FUNCTION_DECL
2612 && !is_gimple_val (new_arg))
2613 {
2614 gimple_seq stmts = NULL;
2615 new_arg = force_gimple_operand (new_arg, &stmts, true,
2616 NULL);
2617 gsi_insert_seq_on_edge (new_edge, stmts);
2618 inserted = true;
2619 }
2620 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2621 if (id->reset_location)
2622 locus = input_location;
2623 else
2624 locus = remap_location (locus, id);
2625 add_phi_arg (new_phi, new_arg, new_edge, locus);
2626 }
2627 }
2628 }
2629 }
2630
2631 /* Commit the delayed edge insertions. */
2632 if (inserted)
2633 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2634 gsi_commit_one_edge_insert (new_edge, NULL);
2635 }
2636
2637
2638 /* Wrapper for remap_decl so it can be used as a callback. */
2639
2640 static tree
2641 remap_decl_1 (tree decl, void *data)
2642 {
2643 return remap_decl (decl, (copy_body_data *) data);
2644 }
2645
2646 /* Build struct function and associated datastructures for the new clone
2647 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2648 the cfun to the function of new_fndecl (and current_function_decl too). */
2649
2650 static void
2651 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2652 {
2653 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2654
2655 if (!DECL_ARGUMENTS (new_fndecl))
2656 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2657 if (!DECL_RESULT (new_fndecl))
2658 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2659
2660 /* Register specific tree functions. */
2661 gimple_register_cfg_hooks ();
2662
2663 /* Get clean struct function. */
2664 push_struct_function (new_fndecl);
2665
2666 /* We will rebuild these, so just sanity check that they are empty. */
2667 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2668 gcc_assert (cfun->local_decls == NULL);
2669 gcc_assert (cfun->cfg == NULL);
2670 gcc_assert (cfun->decl == new_fndecl);
2671
2672 /* Copy items we preserve during cloning. */
2673 cfun->static_chain_decl = src_cfun->static_chain_decl;
2674 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2675 cfun->function_end_locus = src_cfun->function_end_locus;
2676 cfun->curr_properties = src_cfun->curr_properties;
2677 cfun->last_verified = src_cfun->last_verified;
2678 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2679 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2680 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2681 cfun->calls_eh_return = src_cfun->calls_eh_return;
2682 cfun->stdarg = src_cfun->stdarg;
2683 cfun->after_inlining = src_cfun->after_inlining;
2684 cfun->can_throw_non_call_exceptions
2685 = src_cfun->can_throw_non_call_exceptions;
2686 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2687 cfun->returns_struct = src_cfun->returns_struct;
2688 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2689
2690 init_empty_tree_cfg ();
2691
2692 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2693
2694 profile_count num = count;
2695 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2696 profile_count::adjust_for_ipa_scaling (&num, &den);
2697
2698 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2699 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2700 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2701 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2702 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2703 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2704 if (src_cfun->eh)
2705 init_eh_for_function ();
2706
2707 if (src_cfun->gimple_df)
2708 {
2709 init_tree_ssa (cfun);
2710 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2711 if (cfun->gimple_df->in_ssa_p)
2712 init_ssa_operands (cfun);
2713 }
2714 }
2715
2716 /* Helper function for copy_cfg_body. Move debug stmts from the end
2717 of NEW_BB to the beginning of successor basic blocks when needed. If the
2718 successor has multiple predecessors, reset them, otherwise keep
2719 their value. */
2720
2721 static void
2722 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2723 {
2724 edge e;
2725 edge_iterator ei;
2726 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2727
2728 if (gsi_end_p (si)
2729 || gsi_one_before_end_p (si)
2730 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2731 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2732 return;
2733
2734 FOR_EACH_EDGE (e, ei, new_bb->succs)
2735 {
2736 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2737 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2738 while (is_gimple_debug (gsi_stmt (ssi)))
2739 {
2740 gimple *stmt = gsi_stmt (ssi);
2741 gdebug *new_stmt;
2742 tree var;
2743 tree value;
2744
2745 /* For the last edge move the debug stmts instead of copying
2746 them. */
2747 if (ei_one_before_end_p (ei))
2748 {
2749 si = ssi;
2750 gsi_prev (&ssi);
2751 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2752 {
2753 gimple_debug_bind_reset_value (stmt);
2754 gimple_set_location (stmt, UNKNOWN_LOCATION);
2755 }
2756 gsi_remove (&si, false);
2757 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2758 continue;
2759 }
2760
2761 if (gimple_debug_bind_p (stmt))
2762 {
2763 var = gimple_debug_bind_get_var (stmt);
2764 if (single_pred_p (e->dest))
2765 {
2766 value = gimple_debug_bind_get_value (stmt);
2767 value = unshare_expr (value);
2768 new_stmt = gimple_build_debug_bind (var, value, stmt);
2769 }
2770 else
2771 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2772 }
2773 else if (gimple_debug_source_bind_p (stmt))
2774 {
2775 var = gimple_debug_source_bind_get_var (stmt);
2776 value = gimple_debug_source_bind_get_value (stmt);
2777 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2778 }
2779 else if (gimple_debug_nonbind_marker_p (stmt))
2780 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2781 else
2782 gcc_unreachable ();
2783 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2784 id->debug_stmts.safe_push (new_stmt);
2785 gsi_prev (&ssi);
2786 }
2787 }
2788 }
2789
2790 /* Make a copy of the sub-loops of SRC_PARENT and place them
2791 as siblings of DEST_PARENT. */
2792
2793 static void
2794 copy_loops (copy_body_data *id,
2795 struct loop *dest_parent, struct loop *src_parent)
2796 {
2797 struct loop *src_loop = src_parent->inner;
2798 while (src_loop)
2799 {
2800 if (!id->blocks_to_copy
2801 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2802 {
2803 struct loop *dest_loop = alloc_loop ();
2804
2805 /* Assign the new loop its header and latch and associate
2806 those with the new loop. */
2807 dest_loop->header = (basic_block)src_loop->header->aux;
2808 dest_loop->header->loop_father = dest_loop;
2809 if (src_loop->latch != NULL)
2810 {
2811 dest_loop->latch = (basic_block)src_loop->latch->aux;
2812 dest_loop->latch->loop_father = dest_loop;
2813 }
2814
2815 /* Copy loop meta-data. */
2816 copy_loop_info (src_loop, dest_loop);
2817 if (dest_loop->unroll)
2818 cfun->has_unroll = true;
2819 if (dest_loop->force_vectorize)
2820 cfun->has_force_vectorize_loops = true;
2821 if (id->src_cfun->last_clique != 0)
2822 dest_loop->owned_clique
2823 = remap_dependence_clique (id,
2824 src_loop->owned_clique
2825 ? src_loop->owned_clique : 1);
2826
2827 /* Finally place it into the loop array and the loop tree. */
2828 place_new_loop (cfun, dest_loop);
2829 flow_loop_tree_node_add (dest_parent, dest_loop);
2830
2831 if (src_loop->simduid)
2832 {
2833 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2834 cfun->has_simduid_loops = true;
2835 }
2836
2837 /* Recurse. */
2838 copy_loops (id, dest_loop, src_loop);
2839 }
2840 src_loop = src_loop->next;
2841 }
2842 }
2843
2844 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2845
2846 void
2847 redirect_all_calls (copy_body_data * id, basic_block bb)
2848 {
2849 gimple_stmt_iterator si;
2850 gimple *last = last_stmt (bb);
2851 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2852 {
2853 gimple *stmt = gsi_stmt (si);
2854 if (is_gimple_call (stmt))
2855 {
2856 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2857 if (edge)
2858 {
2859 edge->redirect_call_stmt_to_callee ();
2860 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2861 gimple_purge_dead_eh_edges (bb);
2862 }
2863 }
2864 }
2865 }
2866
2867 /* Make a copy of the body of FN so that it can be inserted inline in
2868 another function. Walks FN via CFG, returns new fndecl. */
2869
2870 static tree
2871 copy_cfg_body (copy_body_data * id,
2872 basic_block entry_block_map, basic_block exit_block_map,
2873 basic_block new_entry)
2874 {
2875 tree callee_fndecl = id->src_fn;
2876 /* Original cfun for the callee, doesn't change. */
2877 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2878 struct function *cfun_to_copy;
2879 basic_block bb;
2880 tree new_fndecl = NULL;
2881 bool need_debug_cleanup = false;
2882 int last;
2883 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2884 profile_count num = entry_block_map->count;
2885
2886 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2887
2888 /* Register specific tree functions. */
2889 gimple_register_cfg_hooks ();
2890
2891 /* If we are inlining just region of the function, make sure to connect
2892 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2893 part of loop, we must compute frequency and probability of
2894 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2895 probabilities of edges incoming from nonduplicated region. */
2896 if (new_entry)
2897 {
2898 edge e;
2899 edge_iterator ei;
2900 den = profile_count::zero ();
2901
2902 FOR_EACH_EDGE (e, ei, new_entry->preds)
2903 if (!e->src->aux)
2904 den += e->count ();
2905 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2906 }
2907
2908 profile_count::adjust_for_ipa_scaling (&num, &den);
2909
2910 /* Must have a CFG here at this point. */
2911 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2912 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2913
2914
2915 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2916 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2917 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2918 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2919
2920 /* Duplicate any exception-handling regions. */
2921 if (cfun->eh)
2922 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2923 remap_decl_1, id);
2924
2925 /* Use aux pointers to map the original blocks to copy. */
2926 FOR_EACH_BB_FN (bb, cfun_to_copy)
2927 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2928 {
2929 basic_block new_bb = copy_bb (id, bb, num, den);
2930 bb->aux = new_bb;
2931 new_bb->aux = bb;
2932 new_bb->loop_father = entry_block_map->loop_father;
2933 }
2934
2935 last = last_basic_block_for_fn (cfun);
2936
2937 /* Now that we've duplicated the blocks, duplicate their edges. */
2938 basic_block abnormal_goto_dest = NULL;
2939 if (id->call_stmt
2940 && stmt_can_make_abnormal_goto (id->call_stmt))
2941 {
2942 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2943
2944 bb = gimple_bb (id->call_stmt);
2945 gsi_next (&gsi);
2946 if (gsi_end_p (gsi))
2947 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2948 }
2949 FOR_ALL_BB_FN (bb, cfun_to_copy)
2950 if (!id->blocks_to_copy
2951 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2952 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2953 abnormal_goto_dest, id);
2954
2955 if (id->eh_landing_pad_dest)
2956 {
2957 add_clobbers_to_eh_landing_pad (id);
2958 id->eh_landing_pad_dest = NULL;
2959 }
2960
2961 if (new_entry)
2962 {
2963 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2964 EDGE_FALLTHRU);
2965 e->probability = profile_probability::always ();
2966 }
2967
2968 /* Duplicate the loop tree, if available and wanted. */
2969 if (loops_for_fn (src_cfun) != NULL
2970 && current_loops != NULL)
2971 {
2972 copy_loops (id, entry_block_map->loop_father,
2973 get_loop (src_cfun, 0));
2974 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2975 loops_state_set (LOOPS_NEED_FIXUP);
2976 }
2977
2978 /* If the loop tree in the source function needed fixup, mark the
2979 destination loop tree for fixup, too. */
2980 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2981 loops_state_set (LOOPS_NEED_FIXUP);
2982
2983 if (gimple_in_ssa_p (cfun))
2984 FOR_ALL_BB_FN (bb, cfun_to_copy)
2985 if (!id->blocks_to_copy
2986 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2987 copy_phis_for_bb (bb, id);
2988
2989 FOR_ALL_BB_FN (bb, cfun_to_copy)
2990 if (bb->aux)
2991 {
2992 if (need_debug_cleanup
2993 && bb->index != ENTRY_BLOCK
2994 && bb->index != EXIT_BLOCK)
2995 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2996 /* Update call edge destinations. This cannot be done before loop
2997 info is updated, because we may split basic blocks. */
2998 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2999 && bb->index != ENTRY_BLOCK
3000 && bb->index != EXIT_BLOCK)
3001 redirect_all_calls (id, (basic_block)bb->aux);
3002 ((basic_block)bb->aux)->aux = NULL;
3003 bb->aux = NULL;
3004 }
3005
3006 /* Zero out AUX fields of newly created block during EH edge
3007 insertion. */
3008 for (; last < last_basic_block_for_fn (cfun); last++)
3009 {
3010 if (need_debug_cleanup)
3011 maybe_move_debug_stmts_to_successors (id,
3012 BASIC_BLOCK_FOR_FN (cfun, last));
3013 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3014 /* Update call edge destinations. This cannot be done before loop
3015 info is updated, because we may split basic blocks. */
3016 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3017 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3018 }
3019 entry_block_map->aux = NULL;
3020 exit_block_map->aux = NULL;
3021
3022 if (id->eh_map)
3023 {
3024 delete id->eh_map;
3025 id->eh_map = NULL;
3026 }
3027 if (id->dependence_map)
3028 {
3029 delete id->dependence_map;
3030 id->dependence_map = NULL;
3031 }
3032
3033 return new_fndecl;
3034 }
3035
3036 /* Copy the debug STMT using ID. We deal with these statements in a
3037 special way: if any variable in their VALUE expression wasn't
3038 remapped yet, we won't remap it, because that would get decl uids
3039 out of sync, causing codegen differences between -g and -g0. If
3040 this arises, we drop the VALUE expression altogether. */
3041
3042 static void
3043 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3044 {
3045 tree t, *n;
3046 struct walk_stmt_info wi;
3047
3048 if (tree block = gimple_block (stmt))
3049 {
3050 n = id->decl_map->get (block);
3051 gimple_set_block (stmt, n ? *n : id->block);
3052 }
3053
3054 if (gimple_debug_nonbind_marker_p (stmt))
3055 return;
3056
3057 /* Remap all the operands in COPY. */
3058 memset (&wi, 0, sizeof (wi));
3059 wi.info = id;
3060
3061 processing_debug_stmt = 1;
3062
3063 if (gimple_debug_source_bind_p (stmt))
3064 t = gimple_debug_source_bind_get_var (stmt);
3065 else if (gimple_debug_bind_p (stmt))
3066 t = gimple_debug_bind_get_var (stmt);
3067 else
3068 gcc_unreachable ();
3069
3070 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3071 && (n = id->debug_map->get (t)))
3072 {
3073 gcc_assert (VAR_P (*n));
3074 t = *n;
3075 }
3076 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3077 /* T is a non-localized variable. */;
3078 else
3079 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3080
3081 if (gimple_debug_bind_p (stmt))
3082 {
3083 gimple_debug_bind_set_var (stmt, t);
3084
3085 if (gimple_debug_bind_has_value_p (stmt))
3086 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3087 remap_gimple_op_r, &wi, NULL);
3088
3089 /* Punt if any decl couldn't be remapped. */
3090 if (processing_debug_stmt < 0)
3091 gimple_debug_bind_reset_value (stmt);
3092 }
3093 else if (gimple_debug_source_bind_p (stmt))
3094 {
3095 gimple_debug_source_bind_set_var (stmt, t);
3096 /* When inlining and source bind refers to one of the optimized
3097 away parameters, change the source bind into normal debug bind
3098 referring to the corresponding DEBUG_EXPR_DECL that should have
3099 been bound before the call stmt. */
3100 t = gimple_debug_source_bind_get_value (stmt);
3101 if (t != NULL_TREE
3102 && TREE_CODE (t) == PARM_DECL
3103 && id->call_stmt)
3104 {
3105 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3106 unsigned int i;
3107 if (debug_args != NULL)
3108 {
3109 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3110 if ((**debug_args)[i] == DECL_ORIGIN (t)
3111 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3112 {
3113 t = (**debug_args)[i + 1];
3114 stmt->subcode = GIMPLE_DEBUG_BIND;
3115 gimple_debug_bind_set_value (stmt, t);
3116 break;
3117 }
3118 }
3119 }
3120 if (gimple_debug_source_bind_p (stmt))
3121 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3122 remap_gimple_op_r, &wi, NULL);
3123 }
3124
3125 processing_debug_stmt = 0;
3126
3127 update_stmt (stmt);
3128 }
3129
3130 /* Process deferred debug stmts. In order to give values better odds
3131 of being successfully remapped, we delay the processing of debug
3132 stmts until all other stmts that might require remapping are
3133 processed. */
3134
3135 static void
3136 copy_debug_stmts (copy_body_data *id)
3137 {
3138 size_t i;
3139 gdebug *stmt;
3140
3141 if (!id->debug_stmts.exists ())
3142 return;
3143
3144 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3145 copy_debug_stmt (stmt, id);
3146
3147 id->debug_stmts.release ();
3148 }
3149
3150 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3151 another function. */
3152
3153 static tree
3154 copy_tree_body (copy_body_data *id)
3155 {
3156 tree fndecl = id->src_fn;
3157 tree body = DECL_SAVED_TREE (fndecl);
3158
3159 walk_tree (&body, copy_tree_body_r, id, NULL);
3160
3161 return body;
3162 }
3163
3164 /* Make a copy of the body of FN so that it can be inserted inline in
3165 another function. */
3166
3167 static tree
3168 copy_body (copy_body_data *id,
3169 basic_block entry_block_map, basic_block exit_block_map,
3170 basic_block new_entry)
3171 {
3172 tree fndecl = id->src_fn;
3173 tree body;
3174
3175 /* If this body has a CFG, walk CFG and copy. */
3176 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3177 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3178 new_entry);
3179 copy_debug_stmts (id);
3180
3181 return body;
3182 }
3183
3184 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3185 defined in function FN, or of a data member thereof. */
3186
3187 static bool
3188 self_inlining_addr_expr (tree value, tree fn)
3189 {
3190 tree var;
3191
3192 if (TREE_CODE (value) != ADDR_EXPR)
3193 return false;
3194
3195 var = get_base_address (TREE_OPERAND (value, 0));
3196
3197 return var && auto_var_in_fn_p (var, fn);
3198 }
3199
3200 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3201 lexical block and line number information from base_stmt, if given,
3202 or from the last stmt of the block otherwise. */
3203
3204 static gimple *
3205 insert_init_debug_bind (copy_body_data *id,
3206 basic_block bb, tree var, tree value,
3207 gimple *base_stmt)
3208 {
3209 gimple *note;
3210 gimple_stmt_iterator gsi;
3211 tree tracked_var;
3212
3213 if (!gimple_in_ssa_p (id->src_cfun))
3214 return NULL;
3215
3216 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3217 return NULL;
3218
3219 tracked_var = target_for_debug_bind (var);
3220 if (!tracked_var)
3221 return NULL;
3222
3223 if (bb)
3224 {
3225 gsi = gsi_last_bb (bb);
3226 if (!base_stmt && !gsi_end_p (gsi))
3227 base_stmt = gsi_stmt (gsi);
3228 }
3229
3230 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3231
3232 if (bb)
3233 {
3234 if (!gsi_end_p (gsi))
3235 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3236 else
3237 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3238 }
3239
3240 return note;
3241 }
3242
3243 static void
3244 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3245 {
3246 /* If VAR represents a zero-sized variable, it's possible that the
3247 assignment statement may result in no gimple statements. */
3248 if (init_stmt)
3249 {
3250 gimple_stmt_iterator si = gsi_last_bb (bb);
3251
3252 /* We can end up with init statements that store to a non-register
3253 from a rhs with a conversion. Handle that here by forcing the
3254 rhs into a temporary. gimple_regimplify_operands is not
3255 prepared to do this for us. */
3256 if (!is_gimple_debug (init_stmt)
3257 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3258 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3259 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3260 {
3261 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3262 gimple_expr_type (init_stmt),
3263 gimple_assign_rhs1 (init_stmt));
3264 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3265 GSI_NEW_STMT);
3266 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3267 gimple_assign_set_rhs1 (init_stmt, rhs);
3268 }
3269 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3270 gimple_regimplify_operands (init_stmt, &si);
3271
3272 if (!is_gimple_debug (init_stmt))
3273 {
3274 tree def = gimple_assign_lhs (init_stmt);
3275 insert_init_debug_bind (id, bb, def, def, init_stmt);
3276 }
3277 }
3278 }
3279
3280 /* Initialize parameter P with VALUE. If needed, produce init statement
3281 at the end of BB. When BB is NULL, we return init statement to be
3282 output later. */
3283 static gimple *
3284 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3285 basic_block bb, tree *vars)
3286 {
3287 gimple *init_stmt = NULL;
3288 tree var;
3289 tree rhs = value;
3290 tree def = (gimple_in_ssa_p (cfun)
3291 ? ssa_default_def (id->src_cfun, p) : NULL);
3292
3293 if (value
3294 && value != error_mark_node
3295 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3296 {
3297 /* If we can match up types by promotion/demotion do so. */
3298 if (fold_convertible_p (TREE_TYPE (p), value))
3299 rhs = fold_convert (TREE_TYPE (p), value);
3300 else
3301 {
3302 /* ??? For valid programs we should not end up here.
3303 Still if we end up with truly mismatched types here, fall back
3304 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3305 GIMPLE to the following passes. */
3306 if (!is_gimple_reg_type (TREE_TYPE (value))
3307 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3308 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3309 else
3310 rhs = build_zero_cst (TREE_TYPE (p));
3311 }
3312 }
3313
3314 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3315 here since the type of this decl must be visible to the calling
3316 function. */
3317 var = copy_decl_to_var (p, id);
3318
3319 /* Declare this new variable. */
3320 DECL_CHAIN (var) = *vars;
3321 *vars = var;
3322
3323 /* Make gimplifier happy about this variable. */
3324 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3325
3326 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3327 we would not need to create a new variable here at all, if it
3328 weren't for debug info. Still, we can just use the argument
3329 value. */
3330 if (TREE_READONLY (p)
3331 && !TREE_ADDRESSABLE (p)
3332 && value && !TREE_SIDE_EFFECTS (value)
3333 && !def)
3334 {
3335 /* We may produce non-gimple trees by adding NOPs or introduce
3336 invalid sharing when operand is not really constant.
3337 It is not big deal to prohibit constant propagation here as
3338 we will constant propagate in DOM1 pass anyway. */
3339 if (is_gimple_min_invariant (value)
3340 && useless_type_conversion_p (TREE_TYPE (p),
3341 TREE_TYPE (value))
3342 /* We have to be very careful about ADDR_EXPR. Make sure
3343 the base variable isn't a local variable of the inlined
3344 function, e.g., when doing recursive inlining, direct or
3345 mutually-recursive or whatever, which is why we don't
3346 just test whether fn == current_function_decl. */
3347 && ! self_inlining_addr_expr (value, fn))
3348 {
3349 insert_decl_map (id, p, value);
3350 insert_debug_decl_map (id, p, var);
3351 return insert_init_debug_bind (id, bb, var, value, NULL);
3352 }
3353 }
3354
3355 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3356 that way, when the PARM_DECL is encountered, it will be
3357 automatically replaced by the VAR_DECL. */
3358 insert_decl_map (id, p, var);
3359
3360 /* Even if P was TREE_READONLY, the new VAR should not be.
3361 In the original code, we would have constructed a
3362 temporary, and then the function body would have never
3363 changed the value of P. However, now, we will be
3364 constructing VAR directly. The constructor body may
3365 change its value multiple times as it is being
3366 constructed. Therefore, it must not be TREE_READONLY;
3367 the back-end assumes that TREE_READONLY variable is
3368 assigned to only once. */
3369 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3370 TREE_READONLY (var) = 0;
3371
3372 /* If there is no setup required and we are in SSA, take the easy route
3373 replacing all SSA names representing the function parameter by the
3374 SSA name passed to function.
3375
3376 We need to construct map for the variable anyway as it might be used
3377 in different SSA names when parameter is set in function.
3378
3379 Do replacement at -O0 for const arguments replaced by constant.
3380 This is important for builtin_constant_p and other construct requiring
3381 constant argument to be visible in inlined function body. */
3382 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3383 && (optimize
3384 || (TREE_READONLY (p)
3385 && is_gimple_min_invariant (rhs)))
3386 && (TREE_CODE (rhs) == SSA_NAME
3387 || is_gimple_min_invariant (rhs))
3388 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3389 {
3390 insert_decl_map (id, def, rhs);
3391 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3392 }
3393
3394 /* If the value of argument is never used, don't care about initializing
3395 it. */
3396 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3397 {
3398 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3399 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3400 }
3401
3402 /* Initialize this VAR_DECL from the equivalent argument. Convert
3403 the argument to the proper type in case it was promoted. */
3404 if (value)
3405 {
3406 if (rhs == error_mark_node)
3407 {
3408 insert_decl_map (id, p, var);
3409 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3410 }
3411
3412 STRIP_USELESS_TYPE_CONVERSION (rhs);
3413
3414 /* If we are in SSA form properly remap the default definition
3415 or assign to a dummy SSA name if the parameter is unused and
3416 we are not optimizing. */
3417 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3418 {
3419 if (def)
3420 {
3421 def = remap_ssa_name (def, id);
3422 init_stmt = gimple_build_assign (def, rhs);
3423 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3424 set_ssa_default_def (cfun, var, NULL);
3425 }
3426 else if (!optimize)
3427 {
3428 def = make_ssa_name (var);
3429 init_stmt = gimple_build_assign (def, rhs);
3430 }
3431 }
3432 else
3433 init_stmt = gimple_build_assign (var, rhs);
3434
3435 if (bb && init_stmt)
3436 insert_init_stmt (id, bb, init_stmt);
3437 }
3438 return init_stmt;
3439 }
3440
3441 /* Generate code to initialize the parameters of the function at the
3442 top of the stack in ID from the GIMPLE_CALL STMT. */
3443
3444 static void
3445 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3446 tree fn, basic_block bb)
3447 {
3448 tree parms;
3449 size_t i;
3450 tree p;
3451 tree vars = NULL_TREE;
3452 tree static_chain = gimple_call_chain (stmt);
3453
3454 /* Figure out what the parameters are. */
3455 parms = DECL_ARGUMENTS (fn);
3456
3457 /* Loop through the parameter declarations, replacing each with an
3458 equivalent VAR_DECL, appropriately initialized. */
3459 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3460 {
3461 tree val;
3462 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3463 setup_one_parameter (id, p, val, fn, bb, &vars);
3464 }
3465 /* After remapping parameters remap their types. This has to be done
3466 in a second loop over all parameters to appropriately remap
3467 variable sized arrays when the size is specified in a
3468 parameter following the array. */
3469 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3470 {
3471 tree *varp = id->decl_map->get (p);
3472 if (varp && VAR_P (*varp))
3473 {
3474 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3475 ? ssa_default_def (id->src_cfun, p) : NULL);
3476 tree var = *varp;
3477 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3478 /* Also remap the default definition if it was remapped
3479 to the default definition of the parameter replacement
3480 by the parameter setup. */
3481 if (def)
3482 {
3483 tree *defp = id->decl_map->get (def);
3484 if (defp
3485 && TREE_CODE (*defp) == SSA_NAME
3486 && SSA_NAME_VAR (*defp) == var)
3487 TREE_TYPE (*defp) = TREE_TYPE (var);
3488 }
3489 }
3490 }
3491
3492 /* Initialize the static chain. */
3493 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3494 gcc_assert (fn != current_function_decl);
3495 if (p)
3496 {
3497 /* No static chain? Seems like a bug in tree-nested.c. */
3498 gcc_assert (static_chain);
3499
3500 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3501 }
3502
3503 declare_inline_vars (id->block, vars);
3504 }
3505
3506
3507 /* Declare a return variable to replace the RESULT_DECL for the
3508 function we are calling. An appropriate DECL_STMT is returned.
3509 The USE_STMT is filled to contain a use of the declaration to
3510 indicate the return value of the function.
3511
3512 RETURN_SLOT, if non-null is place where to store the result. It
3513 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3514 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3515
3516 The return value is a (possibly null) value that holds the result
3517 as seen by the caller. */
3518
3519 static tree
3520 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3521 basic_block entry_bb)
3522 {
3523 tree callee = id->src_fn;
3524 tree result = DECL_RESULT (callee);
3525 tree callee_type = TREE_TYPE (result);
3526 tree caller_type;
3527 tree var, use;
3528
3529 /* Handle type-mismatches in the function declaration return type
3530 vs. the call expression. */
3531 if (modify_dest)
3532 caller_type = TREE_TYPE (modify_dest);
3533 else
3534 caller_type = TREE_TYPE (TREE_TYPE (callee));
3535
3536 /* We don't need to do anything for functions that don't return anything. */
3537 if (VOID_TYPE_P (callee_type))
3538 return NULL_TREE;
3539
3540 /* If there was a return slot, then the return value is the
3541 dereferenced address of that object. */
3542 if (return_slot)
3543 {
3544 /* The front end shouldn't have used both return_slot and
3545 a modify expression. */
3546 gcc_assert (!modify_dest);
3547 if (DECL_BY_REFERENCE (result))
3548 {
3549 tree return_slot_addr = build_fold_addr_expr (return_slot);
3550 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3551
3552 /* We are going to construct *&return_slot and we can't do that
3553 for variables believed to be not addressable.
3554
3555 FIXME: This check possibly can match, because values returned
3556 via return slot optimization are not believed to have address
3557 taken by alias analysis. */
3558 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3559 var = return_slot_addr;
3560 }
3561 else
3562 {
3563 var = return_slot;
3564 gcc_assert (TREE_CODE (var) != SSA_NAME);
3565 if (TREE_ADDRESSABLE (result))
3566 mark_addressable (var);
3567 }
3568 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3569 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3570 && !DECL_GIMPLE_REG_P (result)
3571 && DECL_P (var))
3572 DECL_GIMPLE_REG_P (var) = 0;
3573 use = NULL;
3574 goto done;
3575 }
3576
3577 /* All types requiring non-trivial constructors should have been handled. */
3578 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3579
3580 /* Attempt to avoid creating a new temporary variable. */
3581 if (modify_dest
3582 && TREE_CODE (modify_dest) != SSA_NAME)
3583 {
3584 bool use_it = false;
3585
3586 /* We can't use MODIFY_DEST if there's type promotion involved. */
3587 if (!useless_type_conversion_p (callee_type, caller_type))
3588 use_it = false;
3589
3590 /* ??? If we're assigning to a variable sized type, then we must
3591 reuse the destination variable, because we've no good way to
3592 create variable sized temporaries at this point. */
3593 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3594 use_it = true;
3595
3596 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3597 reuse it as the result of the call directly. Don't do this if
3598 it would promote MODIFY_DEST to addressable. */
3599 else if (TREE_ADDRESSABLE (result))
3600 use_it = false;
3601 else
3602 {
3603 tree base_m = get_base_address (modify_dest);
3604
3605 /* If the base isn't a decl, then it's a pointer, and we don't
3606 know where that's going to go. */
3607 if (!DECL_P (base_m))
3608 use_it = false;
3609 else if (is_global_var (base_m))
3610 use_it = false;
3611 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3612 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3613 && !DECL_GIMPLE_REG_P (result)
3614 && DECL_GIMPLE_REG_P (base_m))
3615 use_it = false;
3616 else if (!TREE_ADDRESSABLE (base_m))
3617 use_it = true;
3618 }
3619
3620 if (use_it)
3621 {
3622 var = modify_dest;
3623 use = NULL;
3624 goto done;
3625 }
3626 }
3627
3628 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3629
3630 var = copy_result_decl_to_var (result, id);
3631 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3632
3633 /* Do not have the rest of GCC warn about this variable as it should
3634 not be visible to the user. */
3635 TREE_NO_WARNING (var) = 1;
3636
3637 declare_inline_vars (id->block, var);
3638
3639 /* Build the use expr. If the return type of the function was
3640 promoted, convert it back to the expected type. */
3641 use = var;
3642 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3643 {
3644 /* If we can match up types by promotion/demotion do so. */
3645 if (fold_convertible_p (caller_type, var))
3646 use = fold_convert (caller_type, var);
3647 else
3648 {
3649 /* ??? For valid programs we should not end up here.
3650 Still if we end up with truly mismatched types here, fall back
3651 to using a MEM_REF to not leak invalid GIMPLE to the following
3652 passes. */
3653 /* Prevent var from being written into SSA form. */
3654 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3655 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3656 DECL_GIMPLE_REG_P (var) = false;
3657 else if (is_gimple_reg_type (TREE_TYPE (var)))
3658 TREE_ADDRESSABLE (var) = true;
3659 use = fold_build2 (MEM_REF, caller_type,
3660 build_fold_addr_expr (var),
3661 build_int_cst (ptr_type_node, 0));
3662 }
3663 }
3664
3665 STRIP_USELESS_TYPE_CONVERSION (use);
3666
3667 if (DECL_BY_REFERENCE (result))
3668 {
3669 TREE_ADDRESSABLE (var) = 1;
3670 var = build_fold_addr_expr (var);
3671 }
3672
3673 done:
3674 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3675 way, when the RESULT_DECL is encountered, it will be
3676 automatically replaced by the VAR_DECL.
3677
3678 When returning by reference, ensure that RESULT_DECL remaps to
3679 gimple_val. */
3680 if (DECL_BY_REFERENCE (result)
3681 && !is_gimple_val (var))
3682 {
3683 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3684 insert_decl_map (id, result, temp);
3685 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3686 it's default_def SSA_NAME. */
3687 if (gimple_in_ssa_p (id->src_cfun)
3688 && is_gimple_reg (result))
3689 {
3690 temp = make_ssa_name (temp);
3691 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3692 }
3693 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3694 }
3695 else
3696 insert_decl_map (id, result, var);
3697
3698 /* Remember this so we can ignore it in remap_decls. */
3699 id->retvar = var;
3700 return use;
3701 }
3702
3703 /* Determine if the function can be copied. If so return NULL. If
3704 not return a string describng the reason for failure. */
3705
3706 const char *
3707 copy_forbidden (struct function *fun)
3708 {
3709 const char *reason = fun->cannot_be_copied_reason;
3710
3711 /* Only examine the function once. */
3712 if (fun->cannot_be_copied_set)
3713 return reason;
3714
3715 /* We cannot copy a function that receives a non-local goto
3716 because we cannot remap the destination label used in the
3717 function that is performing the non-local goto. */
3718 /* ??? Actually, this should be possible, if we work at it.
3719 No doubt there's just a handful of places that simply
3720 assume it doesn't happen and don't substitute properly. */
3721 if (fun->has_nonlocal_label)
3722 {
3723 reason = G_("function %q+F can never be copied "
3724 "because it receives a non-local goto");
3725 goto fail;
3726 }
3727
3728 if (fun->has_forced_label_in_static)
3729 {
3730 reason = G_("function %q+F can never be copied because it saves "
3731 "address of local label in a static variable");
3732 goto fail;
3733 }
3734
3735 fail:
3736 fun->cannot_be_copied_reason = reason;
3737 fun->cannot_be_copied_set = true;
3738 return reason;
3739 }
3740
3741
3742 static const char *inline_forbidden_reason;
3743
3744 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3745 iff a function cannot be inlined. Also sets the reason why. */
3746
3747 static tree
3748 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3749 struct walk_stmt_info *wip)
3750 {
3751 tree fn = (tree) wip->info;
3752 tree t;
3753 gimple *stmt = gsi_stmt (*gsi);
3754
3755 switch (gimple_code (stmt))
3756 {
3757 case GIMPLE_CALL:
3758 /* Refuse to inline alloca call unless user explicitly forced so as
3759 this may change program's memory overhead drastically when the
3760 function using alloca is called in loop. In GCC present in
3761 SPEC2000 inlining into schedule_block cause it to require 2GB of
3762 RAM instead of 256MB. Don't do so for alloca calls emitted for
3763 VLA objects as those can't cause unbounded growth (they're always
3764 wrapped inside stack_save/stack_restore regions. */
3765 if (gimple_maybe_alloca_call_p (stmt)
3766 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3767 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3768 {
3769 inline_forbidden_reason
3770 = G_("function %q+F can never be inlined because it uses "
3771 "alloca (override using the always_inline attribute)");
3772 *handled_ops_p = true;
3773 return fn;
3774 }
3775
3776 t = gimple_call_fndecl (stmt);
3777 if (t == NULL_TREE)
3778 break;
3779
3780 /* We cannot inline functions that call setjmp. */
3781 if (setjmp_call_p (t))
3782 {
3783 inline_forbidden_reason
3784 = G_("function %q+F can never be inlined because it uses setjmp");
3785 *handled_ops_p = true;
3786 return t;
3787 }
3788
3789 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3790 switch (DECL_FUNCTION_CODE (t))
3791 {
3792 /* We cannot inline functions that take a variable number of
3793 arguments. */
3794 case BUILT_IN_VA_START:
3795 case BUILT_IN_NEXT_ARG:
3796 case BUILT_IN_VA_END:
3797 inline_forbidden_reason
3798 = G_("function %q+F can never be inlined because it "
3799 "uses variable argument lists");
3800 *handled_ops_p = true;
3801 return t;
3802
3803 case BUILT_IN_LONGJMP:
3804 /* We can't inline functions that call __builtin_longjmp at
3805 all. The non-local goto machinery really requires the
3806 destination be in a different function. If we allow the
3807 function calling __builtin_longjmp to be inlined into the
3808 function calling __builtin_setjmp, Things will Go Awry. */
3809 inline_forbidden_reason
3810 = G_("function %q+F can never be inlined because "
3811 "it uses setjmp-longjmp exception handling");
3812 *handled_ops_p = true;
3813 return t;
3814
3815 case BUILT_IN_NONLOCAL_GOTO:
3816 /* Similarly. */
3817 inline_forbidden_reason
3818 = G_("function %q+F can never be inlined because "
3819 "it uses non-local goto");
3820 *handled_ops_p = true;
3821 return t;
3822
3823 case BUILT_IN_RETURN:
3824 case BUILT_IN_APPLY_ARGS:
3825 /* If a __builtin_apply_args caller would be inlined,
3826 it would be saving arguments of the function it has
3827 been inlined into. Similarly __builtin_return would
3828 return from the function the inline has been inlined into. */
3829 inline_forbidden_reason
3830 = G_("function %q+F can never be inlined because "
3831 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3832 *handled_ops_p = true;
3833 return t;
3834
3835 default:
3836 break;
3837 }
3838 break;
3839
3840 case GIMPLE_GOTO:
3841 t = gimple_goto_dest (stmt);
3842
3843 /* We will not inline a function which uses computed goto. The
3844 addresses of its local labels, which may be tucked into
3845 global storage, are of course not constant across
3846 instantiations, which causes unexpected behavior. */
3847 if (TREE_CODE (t) != LABEL_DECL)
3848 {
3849 inline_forbidden_reason
3850 = G_("function %q+F can never be inlined "
3851 "because it contains a computed goto");
3852 *handled_ops_p = true;
3853 return t;
3854 }
3855 break;
3856
3857 default:
3858 break;
3859 }
3860
3861 *handled_ops_p = false;
3862 return NULL_TREE;
3863 }
3864
3865 /* Return true if FNDECL is a function that cannot be inlined into
3866 another one. */
3867
3868 static bool
3869 inline_forbidden_p (tree fndecl)
3870 {
3871 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3872 struct walk_stmt_info wi;
3873 basic_block bb;
3874 bool forbidden_p = false;
3875
3876 /* First check for shared reasons not to copy the code. */
3877 inline_forbidden_reason = copy_forbidden (fun);
3878 if (inline_forbidden_reason != NULL)
3879 return true;
3880
3881 /* Next, walk the statements of the function looking for
3882 constraucts we can't handle, or are non-optimal for inlining. */
3883 hash_set<tree> visited_nodes;
3884 memset (&wi, 0, sizeof (wi));
3885 wi.info = (void *) fndecl;
3886 wi.pset = &visited_nodes;
3887
3888 FOR_EACH_BB_FN (bb, fun)
3889 {
3890 gimple *ret;
3891 gimple_seq seq = bb_seq (bb);
3892 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3893 forbidden_p = (ret != NULL);
3894 if (forbidden_p)
3895 break;
3896 }
3897
3898 return forbidden_p;
3899 }
3900 \f
3901 /* Return false if the function FNDECL cannot be inlined on account of its
3902 attributes, true otherwise. */
3903 static bool
3904 function_attribute_inlinable_p (const_tree fndecl)
3905 {
3906 if (targetm.attribute_table)
3907 {
3908 const_tree a;
3909
3910 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3911 {
3912 const_tree name = get_attribute_name (a);
3913 int i;
3914
3915 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3916 if (is_attribute_p (targetm.attribute_table[i].name, name))
3917 return targetm.function_attribute_inlinable_p (fndecl);
3918 }
3919 }
3920
3921 return true;
3922 }
3923
3924 /* Returns nonzero if FN is a function that does not have any
3925 fundamental inline blocking properties. */
3926
3927 bool
3928 tree_inlinable_function_p (tree fn)
3929 {
3930 bool inlinable = true;
3931 bool do_warning;
3932 tree always_inline;
3933
3934 /* If we've already decided this function shouldn't be inlined,
3935 there's no need to check again. */
3936 if (DECL_UNINLINABLE (fn))
3937 return false;
3938
3939 /* We only warn for functions declared `inline' by the user. */
3940 do_warning = (warn_inline
3941 && DECL_DECLARED_INLINE_P (fn)
3942 && !DECL_NO_INLINE_WARNING_P (fn)
3943 && !DECL_IN_SYSTEM_HEADER (fn));
3944
3945 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3946
3947 if (flag_no_inline
3948 && always_inline == NULL)
3949 {
3950 if (do_warning)
3951 warning (OPT_Winline, "function %q+F can never be inlined because it "
3952 "is suppressed using %<-fno-inline%>", fn);
3953 inlinable = false;
3954 }
3955
3956 else if (!function_attribute_inlinable_p (fn))
3957 {
3958 if (do_warning)
3959 warning (OPT_Winline, "function %q+F can never be inlined because it "
3960 "uses attributes conflicting with inlining", fn);
3961 inlinable = false;
3962 }
3963
3964 else if (inline_forbidden_p (fn))
3965 {
3966 /* See if we should warn about uninlinable functions. Previously,
3967 some of these warnings would be issued while trying to expand
3968 the function inline, but that would cause multiple warnings
3969 about functions that would for example call alloca. But since
3970 this a property of the function, just one warning is enough.
3971 As a bonus we can now give more details about the reason why a
3972 function is not inlinable. */
3973 if (always_inline)
3974 error (inline_forbidden_reason, fn);
3975 else if (do_warning)
3976 warning (OPT_Winline, inline_forbidden_reason, fn);
3977
3978 inlinable = false;
3979 }
3980
3981 /* Squirrel away the result so that we don't have to check again. */
3982 DECL_UNINLINABLE (fn) = !inlinable;
3983
3984 return inlinable;
3985 }
3986
3987 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3988 word size and take possible memcpy call into account and return
3989 cost based on whether optimizing for size or speed according to SPEED_P. */
3990
3991 int
3992 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3993 {
3994 HOST_WIDE_INT size;
3995
3996 gcc_assert (!VOID_TYPE_P (type));
3997
3998 if (TREE_CODE (type) == VECTOR_TYPE)
3999 {
4000 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4001 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4002 int orig_mode_size
4003 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4004 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4005 return ((orig_mode_size + simd_mode_size - 1)
4006 / simd_mode_size);
4007 }
4008
4009 size = int_size_in_bytes (type);
4010
4011 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4012 /* Cost of a memcpy call, 3 arguments and the call. */
4013 return 4;
4014 else
4015 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4016 }
4017
4018 /* Returns cost of operation CODE, according to WEIGHTS */
4019
4020 static int
4021 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4022 tree op1 ATTRIBUTE_UNUSED, tree op2)
4023 {
4024 switch (code)
4025 {
4026 /* These are "free" conversions, or their presumed cost
4027 is folded into other operations. */
4028 case RANGE_EXPR:
4029 CASE_CONVERT:
4030 case COMPLEX_EXPR:
4031 case PAREN_EXPR:
4032 case VIEW_CONVERT_EXPR:
4033 return 0;
4034
4035 /* Assign cost of 1 to usual operations.
4036 ??? We may consider mapping RTL costs to this. */
4037 case COND_EXPR:
4038 case VEC_COND_EXPR:
4039 case VEC_PERM_EXPR:
4040
4041 case PLUS_EXPR:
4042 case POINTER_PLUS_EXPR:
4043 case POINTER_DIFF_EXPR:
4044 case MINUS_EXPR:
4045 case MULT_EXPR:
4046 case MULT_HIGHPART_EXPR:
4047
4048 case ADDR_SPACE_CONVERT_EXPR:
4049 case FIXED_CONVERT_EXPR:
4050 case FIX_TRUNC_EXPR:
4051
4052 case NEGATE_EXPR:
4053 case FLOAT_EXPR:
4054 case MIN_EXPR:
4055 case MAX_EXPR:
4056 case ABS_EXPR:
4057 case ABSU_EXPR:
4058
4059 case LSHIFT_EXPR:
4060 case RSHIFT_EXPR:
4061 case LROTATE_EXPR:
4062 case RROTATE_EXPR:
4063
4064 case BIT_IOR_EXPR:
4065 case BIT_XOR_EXPR:
4066 case BIT_AND_EXPR:
4067 case BIT_NOT_EXPR:
4068
4069 case TRUTH_ANDIF_EXPR:
4070 case TRUTH_ORIF_EXPR:
4071 case TRUTH_AND_EXPR:
4072 case TRUTH_OR_EXPR:
4073 case TRUTH_XOR_EXPR:
4074 case TRUTH_NOT_EXPR:
4075
4076 case LT_EXPR:
4077 case LE_EXPR:
4078 case GT_EXPR:
4079 case GE_EXPR:
4080 case EQ_EXPR:
4081 case NE_EXPR:
4082 case ORDERED_EXPR:
4083 case UNORDERED_EXPR:
4084
4085 case UNLT_EXPR:
4086 case UNLE_EXPR:
4087 case UNGT_EXPR:
4088 case UNGE_EXPR:
4089 case UNEQ_EXPR:
4090 case LTGT_EXPR:
4091
4092 case CONJ_EXPR:
4093
4094 case PREDECREMENT_EXPR:
4095 case PREINCREMENT_EXPR:
4096 case POSTDECREMENT_EXPR:
4097 case POSTINCREMENT_EXPR:
4098
4099 case REALIGN_LOAD_EXPR:
4100
4101 case WIDEN_SUM_EXPR:
4102 case WIDEN_MULT_EXPR:
4103 case DOT_PROD_EXPR:
4104 case SAD_EXPR:
4105 case WIDEN_MULT_PLUS_EXPR:
4106 case WIDEN_MULT_MINUS_EXPR:
4107 case WIDEN_LSHIFT_EXPR:
4108
4109 case VEC_WIDEN_MULT_HI_EXPR:
4110 case VEC_WIDEN_MULT_LO_EXPR:
4111 case VEC_WIDEN_MULT_EVEN_EXPR:
4112 case VEC_WIDEN_MULT_ODD_EXPR:
4113 case VEC_UNPACK_HI_EXPR:
4114 case VEC_UNPACK_LO_EXPR:
4115 case VEC_UNPACK_FLOAT_HI_EXPR:
4116 case VEC_UNPACK_FLOAT_LO_EXPR:
4117 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4118 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4119 case VEC_PACK_TRUNC_EXPR:
4120 case VEC_PACK_SAT_EXPR:
4121 case VEC_PACK_FIX_TRUNC_EXPR:
4122 case VEC_PACK_FLOAT_EXPR:
4123 case VEC_WIDEN_LSHIFT_HI_EXPR:
4124 case VEC_WIDEN_LSHIFT_LO_EXPR:
4125 case VEC_DUPLICATE_EXPR:
4126 case VEC_SERIES_EXPR:
4127
4128 return 1;
4129
4130 /* Few special cases of expensive operations. This is useful
4131 to avoid inlining on functions having too many of these. */
4132 case TRUNC_DIV_EXPR:
4133 case CEIL_DIV_EXPR:
4134 case FLOOR_DIV_EXPR:
4135 case ROUND_DIV_EXPR:
4136 case EXACT_DIV_EXPR:
4137 case TRUNC_MOD_EXPR:
4138 case CEIL_MOD_EXPR:
4139 case FLOOR_MOD_EXPR:
4140 case ROUND_MOD_EXPR:
4141 case RDIV_EXPR:
4142 if (TREE_CODE (op2) != INTEGER_CST)
4143 return weights->div_mod_cost;
4144 return 1;
4145
4146 /* Bit-field insertion needs several shift and mask operations. */
4147 case BIT_INSERT_EXPR:
4148 return 3;
4149
4150 default:
4151 /* We expect a copy assignment with no operator. */
4152 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4153 return 0;
4154 }
4155 }
4156
4157
4158 /* Estimate number of instructions that will be created by expanding
4159 the statements in the statement sequence STMTS.
4160 WEIGHTS contains weights attributed to various constructs. */
4161
4162 int
4163 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4164 {
4165 int cost;
4166 gimple_stmt_iterator gsi;
4167
4168 cost = 0;
4169 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4170 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4171
4172 return cost;
4173 }
4174
4175
4176 /* Estimate number of instructions that will be created by expanding STMT.
4177 WEIGHTS contains weights attributed to various constructs. */
4178
4179 int
4180 estimate_num_insns (gimple *stmt, eni_weights *weights)
4181 {
4182 unsigned cost, i;
4183 enum gimple_code code = gimple_code (stmt);
4184 tree lhs;
4185 tree rhs;
4186
4187 switch (code)
4188 {
4189 case GIMPLE_ASSIGN:
4190 /* Try to estimate the cost of assignments. We have three cases to
4191 deal with:
4192 1) Simple assignments to registers;
4193 2) Stores to things that must live in memory. This includes
4194 "normal" stores to scalars, but also assignments of large
4195 structures, or constructors of big arrays;
4196
4197 Let us look at the first two cases, assuming we have "a = b + C":
4198 <GIMPLE_ASSIGN <var_decl "a">
4199 <plus_expr <var_decl "b"> <constant C>>
4200 If "a" is a GIMPLE register, the assignment to it is free on almost
4201 any target, because "a" usually ends up in a real register. Hence
4202 the only cost of this expression comes from the PLUS_EXPR, and we
4203 can ignore the GIMPLE_ASSIGN.
4204 If "a" is not a GIMPLE register, the assignment to "a" will most
4205 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4206 of moving something into "a", which we compute using the function
4207 estimate_move_cost. */
4208 if (gimple_clobber_p (stmt))
4209 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4210
4211 lhs = gimple_assign_lhs (stmt);
4212 rhs = gimple_assign_rhs1 (stmt);
4213
4214 cost = 0;
4215
4216 /* Account for the cost of moving to / from memory. */
4217 if (gimple_store_p (stmt))
4218 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4219 if (gimple_assign_load_p (stmt))
4220 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4221
4222 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4223 gimple_assign_rhs1 (stmt),
4224 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4225 == GIMPLE_BINARY_RHS
4226 ? gimple_assign_rhs2 (stmt) : NULL);
4227 break;
4228
4229 case GIMPLE_COND:
4230 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4231 gimple_op (stmt, 0),
4232 gimple_op (stmt, 1));
4233 break;
4234
4235 case GIMPLE_SWITCH:
4236 {
4237 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4238 /* Take into account cost of the switch + guess 2 conditional jumps for
4239 each case label.
4240
4241 TODO: once the switch expansion logic is sufficiently separated, we can
4242 do better job on estimating cost of the switch. */
4243 if (weights->time_based)
4244 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4245 else
4246 cost = gimple_switch_num_labels (switch_stmt) * 2;
4247 }
4248 break;
4249
4250 case GIMPLE_CALL:
4251 {
4252 tree decl;
4253
4254 if (gimple_call_internal_p (stmt))
4255 return 0;
4256 else if ((decl = gimple_call_fndecl (stmt))
4257 && fndecl_built_in_p (decl))
4258 {
4259 /* Do not special case builtins where we see the body.
4260 This just confuse inliner. */
4261 struct cgraph_node *node;
4262 if (!(node = cgraph_node::get (decl))
4263 || node->definition)
4264 ;
4265 /* For buitins that are likely expanded to nothing or
4266 inlined do not account operand costs. */
4267 else if (is_simple_builtin (decl))
4268 return 0;
4269 else if (is_inexpensive_builtin (decl))
4270 return weights->target_builtin_call_cost;
4271 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4272 {
4273 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4274 specialize the cheap expansion we do here.
4275 ??? This asks for a more general solution. */
4276 switch (DECL_FUNCTION_CODE (decl))
4277 {
4278 case BUILT_IN_POW:
4279 case BUILT_IN_POWF:
4280 case BUILT_IN_POWL:
4281 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4282 && (real_equal
4283 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4284 &dconst2)))
4285 return estimate_operator_cost
4286 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4287 gimple_call_arg (stmt, 0));
4288 break;
4289
4290 default:
4291 break;
4292 }
4293 }
4294 }
4295
4296 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4297 if (gimple_call_lhs (stmt))
4298 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4299 weights->time_based);
4300 for (i = 0; i < gimple_call_num_args (stmt); i++)
4301 {
4302 tree arg = gimple_call_arg (stmt, i);
4303 cost += estimate_move_cost (TREE_TYPE (arg),
4304 weights->time_based);
4305 }
4306 break;
4307 }
4308
4309 case GIMPLE_RETURN:
4310 return weights->return_cost;
4311
4312 case GIMPLE_GOTO:
4313 case GIMPLE_LABEL:
4314 case GIMPLE_NOP:
4315 case GIMPLE_PHI:
4316 case GIMPLE_PREDICT:
4317 case GIMPLE_DEBUG:
4318 return 0;
4319
4320 case GIMPLE_ASM:
4321 {
4322 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4323 /* 1000 means infinity. This avoids overflows later
4324 with very long asm statements. */
4325 if (count > 1000)
4326 count = 1000;
4327 /* If this asm is asm inline, count anything as minimum size. */
4328 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4329 count = MIN (1, count);
4330 return MAX (1, count);
4331 }
4332
4333 case GIMPLE_RESX:
4334 /* This is either going to be an external function call with one
4335 argument, or two register copy statements plus a goto. */
4336 return 2;
4337
4338 case GIMPLE_EH_DISPATCH:
4339 /* ??? This is going to turn into a switch statement. Ideally
4340 we'd have a look at the eh region and estimate the number of
4341 edges involved. */
4342 return 10;
4343
4344 case GIMPLE_BIND:
4345 return estimate_num_insns_seq (
4346 gimple_bind_body (as_a <gbind *> (stmt)),
4347 weights);
4348
4349 case GIMPLE_EH_FILTER:
4350 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4351
4352 case GIMPLE_CATCH:
4353 return estimate_num_insns_seq (gimple_catch_handler (
4354 as_a <gcatch *> (stmt)),
4355 weights);
4356
4357 case GIMPLE_TRY:
4358 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4359 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4360
4361 /* OMP directives are generally very expensive. */
4362
4363 case GIMPLE_OMP_RETURN:
4364 case GIMPLE_OMP_SECTIONS_SWITCH:
4365 case GIMPLE_OMP_ATOMIC_STORE:
4366 case GIMPLE_OMP_CONTINUE:
4367 /* ...except these, which are cheap. */
4368 return 0;
4369
4370 case GIMPLE_OMP_ATOMIC_LOAD:
4371 return weights->omp_cost;
4372
4373 case GIMPLE_OMP_FOR:
4374 return (weights->omp_cost
4375 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4376 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4377
4378 case GIMPLE_OMP_PARALLEL:
4379 case GIMPLE_OMP_TASK:
4380 case GIMPLE_OMP_CRITICAL:
4381 case GIMPLE_OMP_MASTER:
4382 case GIMPLE_OMP_TASKGROUP:
4383 case GIMPLE_OMP_ORDERED:
4384 case GIMPLE_OMP_SCAN:
4385 case GIMPLE_OMP_SECTION:
4386 case GIMPLE_OMP_SECTIONS:
4387 case GIMPLE_OMP_SINGLE:
4388 case GIMPLE_OMP_TARGET:
4389 case GIMPLE_OMP_TEAMS:
4390 return (weights->omp_cost
4391 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4392
4393 case GIMPLE_TRANSACTION:
4394 return (weights->tm_cost
4395 + estimate_num_insns_seq (gimple_transaction_body (
4396 as_a <gtransaction *> (stmt)),
4397 weights));
4398
4399 default:
4400 gcc_unreachable ();
4401 }
4402
4403 return cost;
4404 }
4405
4406 /* Estimate number of instructions that will be created by expanding
4407 function FNDECL. WEIGHTS contains weights attributed to various
4408 constructs. */
4409
4410 int
4411 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4412 {
4413 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4414 gimple_stmt_iterator bsi;
4415 basic_block bb;
4416 int n = 0;
4417
4418 gcc_assert (my_function && my_function->cfg);
4419 FOR_EACH_BB_FN (bb, my_function)
4420 {
4421 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4422 n += estimate_num_insns (gsi_stmt (bsi), weights);
4423 }
4424
4425 return n;
4426 }
4427
4428
4429 /* Initializes weights used by estimate_num_insns. */
4430
4431 void
4432 init_inline_once (void)
4433 {
4434 eni_size_weights.call_cost = 1;
4435 eni_size_weights.indirect_call_cost = 3;
4436 eni_size_weights.target_builtin_call_cost = 1;
4437 eni_size_weights.div_mod_cost = 1;
4438 eni_size_weights.omp_cost = 40;
4439 eni_size_weights.tm_cost = 10;
4440 eni_size_weights.time_based = false;
4441 eni_size_weights.return_cost = 1;
4442
4443 /* Estimating time for call is difficult, since we have no idea what the
4444 called function does. In the current uses of eni_time_weights,
4445 underestimating the cost does less harm than overestimating it, so
4446 we choose a rather small value here. */
4447 eni_time_weights.call_cost = 10;
4448 eni_time_weights.indirect_call_cost = 15;
4449 eni_time_weights.target_builtin_call_cost = 1;
4450 eni_time_weights.div_mod_cost = 10;
4451 eni_time_weights.omp_cost = 40;
4452 eni_time_weights.tm_cost = 40;
4453 eni_time_weights.time_based = true;
4454 eni_time_weights.return_cost = 2;
4455 }
4456
4457
4458 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4459
4460 static void
4461 prepend_lexical_block (tree current_block, tree new_block)
4462 {
4463 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4464 BLOCK_SUBBLOCKS (current_block) = new_block;
4465 BLOCK_SUPERCONTEXT (new_block) = current_block;
4466 }
4467
4468 /* Add local variables from CALLEE to CALLER. */
4469
4470 static inline void
4471 add_local_variables (struct function *callee, struct function *caller,
4472 copy_body_data *id)
4473 {
4474 tree var;
4475 unsigned ix;
4476
4477 FOR_EACH_LOCAL_DECL (callee, ix, var)
4478 if (!can_be_nonlocal (var, id))
4479 {
4480 tree new_var = remap_decl (var, id);
4481
4482 /* Remap debug-expressions. */
4483 if (VAR_P (new_var)
4484 && DECL_HAS_DEBUG_EXPR_P (var)
4485 && new_var != var)
4486 {
4487 tree tem = DECL_DEBUG_EXPR (var);
4488 bool old_regimplify = id->regimplify;
4489 id->remapping_type_depth++;
4490 walk_tree (&tem, copy_tree_body_r, id, NULL);
4491 id->remapping_type_depth--;
4492 id->regimplify = old_regimplify;
4493 SET_DECL_DEBUG_EXPR (new_var, tem);
4494 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4495 }
4496 add_local_decl (caller, new_var);
4497 }
4498 }
4499
4500 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4501 have brought in or introduced any debug stmts for SRCVAR. */
4502
4503 static inline void
4504 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4505 {
4506 tree *remappedvarp = id->decl_map->get (srcvar);
4507
4508 if (!remappedvarp)
4509 return;
4510
4511 if (!VAR_P (*remappedvarp))
4512 return;
4513
4514 if (*remappedvarp == id->retvar)
4515 return;
4516
4517 tree tvar = target_for_debug_bind (*remappedvarp);
4518 if (!tvar)
4519 return;
4520
4521 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4522 id->call_stmt);
4523 gimple_seq_add_stmt (bindings, stmt);
4524 }
4525
4526 /* For each inlined variable for which we may have debug bind stmts,
4527 add before GSI a final debug stmt resetting it, marking the end of
4528 its life, so that var-tracking knows it doesn't have to compute
4529 further locations for it. */
4530
4531 static inline void
4532 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4533 {
4534 tree var;
4535 unsigned ix;
4536 gimple_seq bindings = NULL;
4537
4538 if (!gimple_in_ssa_p (id->src_cfun))
4539 return;
4540
4541 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4542 return;
4543
4544 for (var = DECL_ARGUMENTS (id->src_fn);
4545 var; var = DECL_CHAIN (var))
4546 reset_debug_binding (id, var, &bindings);
4547
4548 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4549 reset_debug_binding (id, var, &bindings);
4550
4551 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4552 }
4553
4554 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4555
4556 static bool
4557 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4558 {
4559 tree use_retvar;
4560 tree fn;
4561 hash_map<tree, tree> *dst;
4562 hash_map<tree, tree> *st = NULL;
4563 tree return_slot;
4564 tree modify_dest;
4565 struct cgraph_edge *cg_edge;
4566 cgraph_inline_failed_t reason;
4567 basic_block return_block;
4568 edge e;
4569 gimple_stmt_iterator gsi, stmt_gsi;
4570 bool successfully_inlined = false;
4571 bool purge_dead_abnormal_edges;
4572 gcall *call_stmt;
4573 unsigned int prop_mask, src_properties;
4574 struct function *dst_cfun;
4575 tree simduid;
4576 use_operand_p use;
4577 gimple *simtenter_stmt = NULL;
4578 vec<tree> *simtvars_save;
4579
4580 /* The gimplifier uses input_location in too many places, such as
4581 internal_get_tmp_var (). */
4582 location_t saved_location = input_location;
4583 input_location = gimple_location (stmt);
4584
4585 /* From here on, we're only interested in CALL_EXPRs. */
4586 call_stmt = dyn_cast <gcall *> (stmt);
4587 if (!call_stmt)
4588 goto egress;
4589
4590 cg_edge = id->dst_node->get_edge (stmt);
4591 gcc_checking_assert (cg_edge);
4592 /* First, see if we can figure out what function is being called.
4593 If we cannot, then there is no hope of inlining the function. */
4594 if (cg_edge->indirect_unknown_callee)
4595 goto egress;
4596 fn = cg_edge->callee->decl;
4597 gcc_checking_assert (fn);
4598
4599 /* If FN is a declaration of a function in a nested scope that was
4600 globally declared inline, we don't set its DECL_INITIAL.
4601 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4602 C++ front-end uses it for cdtors to refer to their internal
4603 declarations, that are not real functions. Fortunately those
4604 don't have trees to be saved, so we can tell by checking their
4605 gimple_body. */
4606 if (!DECL_INITIAL (fn)
4607 && DECL_ABSTRACT_ORIGIN (fn)
4608 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4609 fn = DECL_ABSTRACT_ORIGIN (fn);
4610
4611 /* Don't try to inline functions that are not well-suited to inlining. */
4612 if (cg_edge->inline_failed)
4613 {
4614 reason = cg_edge->inline_failed;
4615 /* If this call was originally indirect, we do not want to emit any
4616 inlining related warnings or sorry messages because there are no
4617 guarantees regarding those. */
4618 if (cg_edge->indirect_inlining_edge)
4619 goto egress;
4620
4621 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4622 /* For extern inline functions that get redefined we always
4623 silently ignored always_inline flag. Better behavior would
4624 be to be able to keep both bodies and use extern inline body
4625 for inlining, but we can't do that because frontends overwrite
4626 the body. */
4627 && !cg_edge->callee->local.redefined_extern_inline
4628 /* During early inline pass, report only when optimization is
4629 not turned on. */
4630 && (symtab->global_info_ready
4631 || !optimize
4632 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4633 /* PR 20090218-1_0.c. Body can be provided by another module. */
4634 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4635 {
4636 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4637 cgraph_inline_failed_string (reason));
4638 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4639 inform (gimple_location (stmt), "called from here");
4640 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4641 inform (DECL_SOURCE_LOCATION (cfun->decl),
4642 "called from this function");
4643 }
4644 else if (warn_inline
4645 && DECL_DECLARED_INLINE_P (fn)
4646 && !DECL_NO_INLINE_WARNING_P (fn)
4647 && !DECL_IN_SYSTEM_HEADER (fn)
4648 && reason != CIF_UNSPECIFIED
4649 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4650 /* Do not warn about not inlined recursive calls. */
4651 && !cg_edge->recursive_p ()
4652 /* Avoid warnings during early inline pass. */
4653 && symtab->global_info_ready)
4654 {
4655 auto_diagnostic_group d;
4656 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4657 fn, _(cgraph_inline_failed_string (reason))))
4658 {
4659 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4660 inform (gimple_location (stmt), "called from here");
4661 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4662 inform (DECL_SOURCE_LOCATION (cfun->decl),
4663 "called from this function");
4664 }
4665 }
4666 goto egress;
4667 }
4668 id->src_node = cg_edge->callee;
4669
4670 /* If callee is thunk, all we need is to adjust the THIS pointer
4671 and redirect to function being thunked. */
4672 if (id->src_node->thunk.thunk_p)
4673 {
4674 cgraph_edge *edge;
4675 tree virtual_offset = NULL;
4676 profile_count count = cg_edge->count;
4677 tree op;
4678 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4679
4680 cg_edge->remove ();
4681 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4682 gimple_uid (stmt),
4683 profile_count::one (),
4684 profile_count::one (),
4685 true);
4686 edge->count = count;
4687 if (id->src_node->thunk.virtual_offset_p)
4688 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4689 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4690 NULL);
4691 gsi_insert_before (&iter, gimple_build_assign (op,
4692 gimple_call_arg (stmt, 0)),
4693 GSI_NEW_STMT);
4694 gcc_assert (id->src_node->thunk.this_adjusting);
4695 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4696 virtual_offset, id->src_node->thunk.indirect_offset);
4697
4698 gimple_call_set_arg (stmt, 0, op);
4699 gimple_call_set_fndecl (stmt, edge->callee->decl);
4700 update_stmt (stmt);
4701 id->src_node->remove ();
4702 expand_call_inline (bb, stmt, id);
4703 maybe_remove_unused_call_args (cfun, stmt);
4704 return true;
4705 }
4706 fn = cg_edge->callee->decl;
4707 cg_edge->callee->get_untransformed_body ();
4708
4709 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4710 cg_edge->callee->verify ();
4711
4712 /* We will be inlining this callee. */
4713 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4714
4715 /* Update the callers EH personality. */
4716 if (DECL_FUNCTION_PERSONALITY (fn))
4717 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4718 = DECL_FUNCTION_PERSONALITY (fn);
4719
4720 /* Split the block before the GIMPLE_CALL. */
4721 stmt_gsi = gsi_for_stmt (stmt);
4722 gsi_prev (&stmt_gsi);
4723 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4724 bb = e->src;
4725 return_block = e->dest;
4726 remove_edge (e);
4727
4728 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4729 been the source of abnormal edges. In this case, schedule
4730 the removal of dead abnormal edges. */
4731 gsi = gsi_start_bb (return_block);
4732 gsi_next (&gsi);
4733 purge_dead_abnormal_edges = gsi_end_p (gsi);
4734
4735 stmt_gsi = gsi_start_bb (return_block);
4736
4737 /* Build a block containing code to initialize the arguments, the
4738 actual inline expansion of the body, and a label for the return
4739 statements within the function to jump to. The type of the
4740 statement expression is the return type of the function call.
4741 ??? If the call does not have an associated block then we will
4742 remap all callee blocks to NULL, effectively dropping most of
4743 its debug information. This should only happen for calls to
4744 artificial decls inserted by the compiler itself. We need to
4745 either link the inlined blocks into the caller block tree or
4746 not refer to them in any way to not break GC for locations. */
4747 if (tree block = gimple_block (stmt))
4748 {
4749 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4750 to make inlined_function_outer_scope_p return true on this BLOCK. */
4751 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4752 if (loc == UNKNOWN_LOCATION)
4753 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4754 if (loc == UNKNOWN_LOCATION)
4755 loc = BUILTINS_LOCATION;
4756 id->block = make_node (BLOCK);
4757 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4758 BLOCK_SOURCE_LOCATION (id->block) = loc;
4759 prepend_lexical_block (block, id->block);
4760 }
4761
4762 /* Local declarations will be replaced by their equivalents in this map. */
4763 st = id->decl_map;
4764 id->decl_map = new hash_map<tree, tree>;
4765 dst = id->debug_map;
4766 id->debug_map = NULL;
4767 if (flag_stack_reuse != SR_NONE)
4768 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4769
4770 /* Record the function we are about to inline. */
4771 id->src_fn = fn;
4772 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4773 id->reset_location = DECL_IGNORED_P (fn);
4774 id->call_stmt = call_stmt;
4775
4776 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4777 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4778 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4779 simtvars_save = id->dst_simt_vars;
4780 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4781 && (simduid = bb->loop_father->simduid) != NULL_TREE
4782 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4783 && single_imm_use (simduid, &use, &simtenter_stmt)
4784 && is_gimple_call (simtenter_stmt)
4785 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4786 vec_alloc (id->dst_simt_vars, 0);
4787 else
4788 id->dst_simt_vars = NULL;
4789
4790 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4791 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4792
4793 /* If the src function contains an IFN_VA_ARG, then so will the dst
4794 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4795 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4796 src_properties = id->src_cfun->curr_properties & prop_mask;
4797 if (src_properties != prop_mask)
4798 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4799 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4800
4801 gcc_assert (!id->src_cfun->after_inlining);
4802
4803 id->entry_bb = bb;
4804 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4805 {
4806 gimple_stmt_iterator si = gsi_last_bb (bb);
4807 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4808 NOT_TAKEN),
4809 GSI_NEW_STMT);
4810 }
4811 initialize_inlined_parameters (id, stmt, fn, bb);
4812 if (debug_nonbind_markers_p && debug_inline_points && id->block
4813 && inlined_function_outer_scope_p (id->block))
4814 {
4815 gimple_stmt_iterator si = gsi_last_bb (bb);
4816 gsi_insert_after (&si, gimple_build_debug_inline_entry
4817 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4818 GSI_NEW_STMT);
4819 }
4820
4821 if (DECL_INITIAL (fn))
4822 {
4823 if (gimple_block (stmt))
4824 {
4825 tree *var;
4826
4827 prepend_lexical_block (id->block,
4828 remap_blocks (DECL_INITIAL (fn), id));
4829 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4830 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4831 == NULL_TREE));
4832 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4833 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4834 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4835 under it. The parameters can be then evaluated in the debugger,
4836 but don't show in backtraces. */
4837 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4838 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4839 {
4840 tree v = *var;
4841 *var = TREE_CHAIN (v);
4842 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4843 BLOCK_VARS (id->block) = v;
4844 }
4845 else
4846 var = &TREE_CHAIN (*var);
4847 }
4848 else
4849 remap_blocks_to_null (DECL_INITIAL (fn), id);
4850 }
4851
4852 /* Return statements in the function body will be replaced by jumps
4853 to the RET_LABEL. */
4854 gcc_assert (DECL_INITIAL (fn));
4855 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4856
4857 /* Find the LHS to which the result of this call is assigned. */
4858 return_slot = NULL;
4859 if (gimple_call_lhs (stmt))
4860 {
4861 modify_dest = gimple_call_lhs (stmt);
4862
4863 /* The function which we are inlining might not return a value,
4864 in which case we should issue a warning that the function
4865 does not return a value. In that case the optimizers will
4866 see that the variable to which the value is assigned was not
4867 initialized. We do not want to issue a warning about that
4868 uninitialized variable. */
4869 if (DECL_P (modify_dest))
4870 TREE_NO_WARNING (modify_dest) = 1;
4871
4872 if (gimple_call_return_slot_opt_p (call_stmt))
4873 {
4874 return_slot = modify_dest;
4875 modify_dest = NULL;
4876 }
4877 }
4878 else
4879 modify_dest = NULL;
4880
4881 /* If we are inlining a call to the C++ operator new, we don't want
4882 to use type based alias analysis on the return value. Otherwise
4883 we may get confused if the compiler sees that the inlined new
4884 function returns a pointer which was just deleted. See bug
4885 33407. */
4886 if (DECL_IS_OPERATOR_NEW (fn))
4887 {
4888 return_slot = NULL;
4889 modify_dest = NULL;
4890 }
4891
4892 /* Declare the return variable for the function. */
4893 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4894
4895 /* Add local vars in this inlined callee to caller. */
4896 add_local_variables (id->src_cfun, cfun, id);
4897
4898 if (dump_enabled_p ())
4899 {
4900 char buf[128];
4901 snprintf (buf, sizeof(buf), "%4.2f",
4902 cg_edge->sreal_frequency ().to_double ());
4903 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4904 call_stmt,
4905 "Inlining %C to %C with frequency %s\n",
4906 id->src_node, id->dst_node, buf);
4907 if (dump_file && (dump_flags & TDF_DETAILS))
4908 {
4909 id->src_node->dump (dump_file);
4910 id->dst_node->dump (dump_file);
4911 }
4912 }
4913
4914 /* This is it. Duplicate the callee body. Assume callee is
4915 pre-gimplified. Note that we must not alter the caller
4916 function in any way before this point, as this CALL_EXPR may be
4917 a self-referential call; if we're calling ourselves, we need to
4918 duplicate our body before altering anything. */
4919 copy_body (id, bb, return_block, NULL);
4920
4921 reset_debug_bindings (id, stmt_gsi);
4922
4923 if (flag_stack_reuse != SR_NONE)
4924 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4925 if (!TREE_THIS_VOLATILE (p))
4926 {
4927 tree *varp = id->decl_map->get (p);
4928 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4929 {
4930 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4931 gimple *clobber_stmt;
4932 TREE_THIS_VOLATILE (clobber) = 1;
4933 clobber_stmt = gimple_build_assign (*varp, clobber);
4934 gimple_set_location (clobber_stmt, gimple_location (stmt));
4935 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4936 }
4937 }
4938
4939 /* Reset the escaped solution. */
4940 if (cfun->gimple_df)
4941 pt_solution_reset (&cfun->gimple_df->escaped);
4942
4943 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4944 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4945 {
4946 size_t nargs = gimple_call_num_args (simtenter_stmt);
4947 vec<tree> *vars = id->dst_simt_vars;
4948 auto_vec<tree> newargs (nargs + vars->length ());
4949 for (size_t i = 0; i < nargs; i++)
4950 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4951 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4952 {
4953 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4954 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4955 }
4956 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4957 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4958 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4959 gsi_replace (&gsi, g, false);
4960 }
4961 vec_free (id->dst_simt_vars);
4962 id->dst_simt_vars = simtvars_save;
4963
4964 /* Clean up. */
4965 if (id->debug_map)
4966 {
4967 delete id->debug_map;
4968 id->debug_map = dst;
4969 }
4970 delete id->decl_map;
4971 id->decl_map = st;
4972
4973 /* Unlink the calls virtual operands before replacing it. */
4974 unlink_stmt_vdef (stmt);
4975 if (gimple_vdef (stmt)
4976 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4977 release_ssa_name (gimple_vdef (stmt));
4978
4979 /* If the inlined function returns a result that we care about,
4980 substitute the GIMPLE_CALL with an assignment of the return
4981 variable to the LHS of the call. That is, if STMT was
4982 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4983 if (use_retvar && gimple_call_lhs (stmt))
4984 {
4985 gimple *old_stmt = stmt;
4986 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4987 gimple_set_location (stmt, gimple_location (old_stmt));
4988 gsi_replace (&stmt_gsi, stmt, false);
4989 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4990 /* Append a clobber for id->retvar if easily possible. */
4991 if (flag_stack_reuse != SR_NONE
4992 && id->retvar
4993 && VAR_P (id->retvar)
4994 && id->retvar != return_slot
4995 && id->retvar != modify_dest
4996 && !TREE_THIS_VOLATILE (id->retvar)
4997 && !is_gimple_reg (id->retvar)
4998 && !stmt_ends_bb_p (stmt))
4999 {
5000 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
5001 gimple *clobber_stmt;
5002 TREE_THIS_VOLATILE (clobber) = 1;
5003 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5004 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5005 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5006 }
5007 }
5008 else
5009 {
5010 /* Handle the case of inlining a function with no return
5011 statement, which causes the return value to become undefined. */
5012 if (gimple_call_lhs (stmt)
5013 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5014 {
5015 tree name = gimple_call_lhs (stmt);
5016 tree var = SSA_NAME_VAR (name);
5017 tree def = var ? ssa_default_def (cfun, var) : NULL;
5018
5019 if (def)
5020 {
5021 /* If the variable is used undefined, make this name
5022 undefined via a move. */
5023 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5024 gsi_replace (&stmt_gsi, stmt, true);
5025 }
5026 else
5027 {
5028 if (!var)
5029 {
5030 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5031 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5032 }
5033 /* Otherwise make this variable undefined. */
5034 gsi_remove (&stmt_gsi, true);
5035 set_ssa_default_def (cfun, var, name);
5036 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5037 }
5038 }
5039 /* Replace with a clobber for id->retvar. */
5040 else if (flag_stack_reuse != SR_NONE
5041 && id->retvar
5042 && VAR_P (id->retvar)
5043 && id->retvar != return_slot
5044 && id->retvar != modify_dest
5045 && !TREE_THIS_VOLATILE (id->retvar)
5046 && !is_gimple_reg (id->retvar))
5047 {
5048 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
5049 gimple *clobber_stmt;
5050 TREE_THIS_VOLATILE (clobber) = 1;
5051 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5052 gimple_set_location (clobber_stmt, gimple_location (stmt));
5053 gsi_replace (&stmt_gsi, clobber_stmt, false);
5054 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5055 }
5056 else
5057 gsi_remove (&stmt_gsi, true);
5058 }
5059
5060 if (purge_dead_abnormal_edges)
5061 {
5062 gimple_purge_dead_eh_edges (return_block);
5063 gimple_purge_dead_abnormal_call_edges (return_block);
5064 }
5065
5066 /* If the value of the new expression is ignored, that's OK. We
5067 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5068 the equivalent inlined version either. */
5069 if (is_gimple_assign (stmt))
5070 {
5071 gcc_assert (gimple_assign_single_p (stmt)
5072 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5073 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5074 }
5075
5076 id->add_clobbers_to_eh_landing_pads = 0;
5077
5078 /* Output the inlining info for this abstract function, since it has been
5079 inlined. If we don't do this now, we can lose the information about the
5080 variables in the function when the blocks get blown away as soon as we
5081 remove the cgraph node. */
5082 if (gimple_block (stmt))
5083 (*debug_hooks->outlining_inline_function) (fn);
5084
5085 /* Update callgraph if needed. */
5086 cg_edge->callee->remove ();
5087
5088 id->block = NULL_TREE;
5089 id->retvar = NULL_TREE;
5090 successfully_inlined = true;
5091
5092 egress:
5093 input_location = saved_location;
5094 return successfully_inlined;
5095 }
5096
5097 /* Expand call statements reachable from STMT_P.
5098 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5099 in a MODIFY_EXPR. */
5100
5101 static bool
5102 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
5103 {
5104 gimple_stmt_iterator gsi;
5105 bool inlined = false;
5106
5107 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5108 {
5109 gimple *stmt = gsi_stmt (gsi);
5110 gsi_prev (&gsi);
5111
5112 if (is_gimple_call (stmt)
5113 && !gimple_call_internal_p (stmt))
5114 inlined |= expand_call_inline (bb, stmt, id);
5115 }
5116
5117 return inlined;
5118 }
5119
5120
5121 /* Walk all basic blocks created after FIRST and try to fold every statement
5122 in the STATEMENTS pointer set. */
5123
5124 static void
5125 fold_marked_statements (int first, hash_set<gimple *> *statements)
5126 {
5127 for (; first < last_basic_block_for_fn (cfun); first++)
5128 if (BASIC_BLOCK_FOR_FN (cfun, first))
5129 {
5130 gimple_stmt_iterator gsi;
5131
5132 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5133 !gsi_end_p (gsi);
5134 gsi_next (&gsi))
5135 if (statements->contains (gsi_stmt (gsi)))
5136 {
5137 gimple *old_stmt = gsi_stmt (gsi);
5138 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5139
5140 if (old_decl && fndecl_built_in_p (old_decl))
5141 {
5142 /* Folding builtins can create multiple instructions,
5143 we need to look at all of them. */
5144 gimple_stmt_iterator i2 = gsi;
5145 gsi_prev (&i2);
5146 if (fold_stmt (&gsi))
5147 {
5148 gimple *new_stmt;
5149 /* If a builtin at the end of a bb folded into nothing,
5150 the following loop won't work. */
5151 if (gsi_end_p (gsi))
5152 {
5153 cgraph_update_edges_for_call_stmt (old_stmt,
5154 old_decl, NULL);
5155 break;
5156 }
5157 if (gsi_end_p (i2))
5158 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5159 else
5160 gsi_next (&i2);
5161 while (1)
5162 {
5163 new_stmt = gsi_stmt (i2);
5164 update_stmt (new_stmt);
5165 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5166 new_stmt);
5167
5168 if (new_stmt == gsi_stmt (gsi))
5169 {
5170 /* It is okay to check only for the very last
5171 of these statements. If it is a throwing
5172 statement nothing will change. If it isn't
5173 this can remove EH edges. If that weren't
5174 correct then because some intermediate stmts
5175 throw, but not the last one. That would mean
5176 we'd have to split the block, which we can't
5177 here and we'd loose anyway. And as builtins
5178 probably never throw, this all
5179 is mood anyway. */
5180 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5181 new_stmt))
5182 gimple_purge_dead_eh_edges (
5183 BASIC_BLOCK_FOR_FN (cfun, first));
5184 break;
5185 }
5186 gsi_next (&i2);
5187 }
5188 }
5189 }
5190 else if (fold_stmt (&gsi))
5191 {
5192 /* Re-read the statement from GSI as fold_stmt() may
5193 have changed it. */
5194 gimple *new_stmt = gsi_stmt (gsi);
5195 update_stmt (new_stmt);
5196
5197 if (is_gimple_call (old_stmt)
5198 || is_gimple_call (new_stmt))
5199 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5200 new_stmt);
5201
5202 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5203 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5204 first));
5205 }
5206 }
5207 }
5208 }
5209
5210 /* Expand calls to inline functions in the body of FN. */
5211
5212 unsigned int
5213 optimize_inline_calls (tree fn)
5214 {
5215 copy_body_data id;
5216 basic_block bb;
5217 int last = n_basic_blocks_for_fn (cfun);
5218 bool inlined_p = false;
5219
5220 /* Clear out ID. */
5221 memset (&id, 0, sizeof (id));
5222
5223 id.src_node = id.dst_node = cgraph_node::get (fn);
5224 gcc_assert (id.dst_node->definition);
5225 id.dst_fn = fn;
5226 /* Or any functions that aren't finished yet. */
5227 if (current_function_decl)
5228 id.dst_fn = current_function_decl;
5229
5230 id.copy_decl = copy_decl_maybe_to_var;
5231 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5232 id.transform_new_cfg = false;
5233 id.transform_return_to_modify = true;
5234 id.transform_parameter = true;
5235 id.transform_lang_insert_block = NULL;
5236 id.statements_to_fold = new hash_set<gimple *>;
5237
5238 push_gimplify_context ();
5239
5240 /* We make no attempts to keep dominance info up-to-date. */
5241 free_dominance_info (CDI_DOMINATORS);
5242 free_dominance_info (CDI_POST_DOMINATORS);
5243
5244 /* Register specific gimple functions. */
5245 gimple_register_cfg_hooks ();
5246
5247 /* Reach the trees by walking over the CFG, and note the
5248 enclosing basic-blocks in the call edges. */
5249 /* We walk the blocks going forward, because inlined function bodies
5250 will split id->current_basic_block, and the new blocks will
5251 follow it; we'll trudge through them, processing their CALL_EXPRs
5252 along the way. */
5253 FOR_EACH_BB_FN (bb, cfun)
5254 inlined_p |= gimple_expand_calls_inline (bb, &id);
5255
5256 pop_gimplify_context (NULL);
5257
5258 if (flag_checking)
5259 {
5260 struct cgraph_edge *e;
5261
5262 id.dst_node->verify ();
5263
5264 /* Double check that we inlined everything we are supposed to inline. */
5265 for (e = id.dst_node->callees; e; e = e->next_callee)
5266 gcc_assert (e->inline_failed);
5267 }
5268
5269 /* Fold queued statements. */
5270 update_max_bb_count ();
5271 fold_marked_statements (last, id.statements_to_fold);
5272 delete id.statements_to_fold;
5273
5274 gcc_assert (!id.debug_stmts.exists ());
5275
5276 /* If we didn't inline into the function there is nothing to do. */
5277 if (!inlined_p)
5278 return 0;
5279
5280 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5281 number_blocks (fn);
5282
5283 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5284
5285 if (flag_checking)
5286 id.dst_node->verify ();
5287
5288 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5289 not possible yet - the IPA passes might make various functions to not
5290 throw and they don't care to proactively update local EH info. This is
5291 done later in fixup_cfg pass that also execute the verification. */
5292 return (TODO_update_ssa
5293 | TODO_cleanup_cfg
5294 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5295 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5296 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5297 ? TODO_rebuild_frequencies : 0));
5298 }
5299
5300 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5301
5302 tree
5303 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5304 {
5305 enum tree_code code = TREE_CODE (*tp);
5306 enum tree_code_class cl = TREE_CODE_CLASS (code);
5307
5308 /* We make copies of most nodes. */
5309 if (IS_EXPR_CODE_CLASS (cl)
5310 || code == TREE_LIST
5311 || code == TREE_VEC
5312 || code == TYPE_DECL
5313 || code == OMP_CLAUSE)
5314 {
5315 /* Because the chain gets clobbered when we make a copy, we save it
5316 here. */
5317 tree chain = NULL_TREE, new_tree;
5318
5319 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5320 chain = TREE_CHAIN (*tp);
5321
5322 /* Copy the node. */
5323 new_tree = copy_node (*tp);
5324
5325 *tp = new_tree;
5326
5327 /* Now, restore the chain, if appropriate. That will cause
5328 walk_tree to walk into the chain as well. */
5329 if (code == PARM_DECL
5330 || code == TREE_LIST
5331 || code == OMP_CLAUSE)
5332 TREE_CHAIN (*tp) = chain;
5333
5334 /* For now, we don't update BLOCKs when we make copies. So, we
5335 have to nullify all BIND_EXPRs. */
5336 if (TREE_CODE (*tp) == BIND_EXPR)
5337 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5338 }
5339 else if (code == CONSTRUCTOR)
5340 {
5341 /* CONSTRUCTOR nodes need special handling because
5342 we need to duplicate the vector of elements. */
5343 tree new_tree;
5344
5345 new_tree = copy_node (*tp);
5346 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5347 *tp = new_tree;
5348 }
5349 else if (code == STATEMENT_LIST)
5350 /* We used to just abort on STATEMENT_LIST, but we can run into them
5351 with statement-expressions (c++/40975). */
5352 copy_statement_list (tp);
5353 else if (TREE_CODE_CLASS (code) == tcc_type)
5354 *walk_subtrees = 0;
5355 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5356 *walk_subtrees = 0;
5357 else if (TREE_CODE_CLASS (code) == tcc_constant)
5358 *walk_subtrees = 0;
5359 return NULL_TREE;
5360 }
5361
5362 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5363 information indicating to what new SAVE_EXPR this one should be mapped,
5364 use that one. Otherwise, create a new node and enter it in ST. FN is
5365 the function into which the copy will be placed. */
5366
5367 static void
5368 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5369 {
5370 tree *n;
5371 tree t;
5372
5373 /* See if we already encountered this SAVE_EXPR. */
5374 n = st->get (*tp);
5375
5376 /* If we didn't already remap this SAVE_EXPR, do so now. */
5377 if (!n)
5378 {
5379 t = copy_node (*tp);
5380
5381 /* Remember this SAVE_EXPR. */
5382 st->put (*tp, t);
5383 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5384 st->put (t, t);
5385 }
5386 else
5387 {
5388 /* We've already walked into this SAVE_EXPR; don't do it again. */
5389 *walk_subtrees = 0;
5390 t = *n;
5391 }
5392
5393 /* Replace this SAVE_EXPR with the copy. */
5394 *tp = t;
5395 }
5396
5397 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5398 label, copies the declaration and enters it in the splay_tree in DATA (which
5399 is really a 'copy_body_data *'. */
5400
5401 static tree
5402 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5403 bool *handled_ops_p ATTRIBUTE_UNUSED,
5404 struct walk_stmt_info *wi)
5405 {
5406 copy_body_data *id = (copy_body_data *) wi->info;
5407 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5408
5409 if (stmt)
5410 {
5411 tree decl = gimple_label_label (stmt);
5412
5413 /* Copy the decl and remember the copy. */
5414 insert_decl_map (id, decl, id->copy_decl (decl, id));
5415 }
5416
5417 return NULL_TREE;
5418 }
5419
5420 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5421 struct walk_stmt_info *wi);
5422
5423 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5424 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5425 remaps all local declarations to appropriate replacements in gimple
5426 operands. */
5427
5428 static tree
5429 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5430 {
5431 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5432 copy_body_data *id = (copy_body_data *) wi->info;
5433 hash_map<tree, tree> *st = id->decl_map;
5434 tree *n;
5435 tree expr = *tp;
5436
5437 /* For recursive invocations this is no longer the LHS itself. */
5438 bool is_lhs = wi->is_lhs;
5439 wi->is_lhs = false;
5440
5441 if (TREE_CODE (expr) == SSA_NAME)
5442 {
5443 *tp = remap_ssa_name (*tp, id);
5444 *walk_subtrees = 0;
5445 if (is_lhs)
5446 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5447 }
5448 /* Only a local declaration (variable or label). */
5449 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5450 || TREE_CODE (expr) == LABEL_DECL)
5451 {
5452 /* Lookup the declaration. */
5453 n = st->get (expr);
5454
5455 /* If it's there, remap it. */
5456 if (n)
5457 *tp = *n;
5458 *walk_subtrees = 0;
5459 }
5460 else if (TREE_CODE (expr) == STATEMENT_LIST
5461 || TREE_CODE (expr) == BIND_EXPR
5462 || TREE_CODE (expr) == SAVE_EXPR)
5463 gcc_unreachable ();
5464 else if (TREE_CODE (expr) == TARGET_EXPR)
5465 {
5466 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5467 It's OK for this to happen if it was part of a subtree that
5468 isn't immediately expanded, such as operand 2 of another
5469 TARGET_EXPR. */
5470 if (!TREE_OPERAND (expr, 1))
5471 {
5472 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5473 TREE_OPERAND (expr, 3) = NULL_TREE;
5474 }
5475 }
5476 else if (TREE_CODE (expr) == OMP_CLAUSE)
5477 {
5478 /* Before the omplower pass completes, some OMP clauses can contain
5479 sequences that are neither copied by gimple_seq_copy nor walked by
5480 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5481 in those situations, we have to copy and process them explicitely. */
5482
5483 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5484 {
5485 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5486 seq = duplicate_remap_omp_clause_seq (seq, wi);
5487 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5488 }
5489 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5490 {
5491 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5492 seq = duplicate_remap_omp_clause_seq (seq, wi);
5493 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5494 }
5495 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5496 {
5497 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5498 seq = duplicate_remap_omp_clause_seq (seq, wi);
5499 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5500 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5501 seq = duplicate_remap_omp_clause_seq (seq, wi);
5502 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5503 }
5504 }
5505
5506 /* Keep iterating. */
5507 return NULL_TREE;
5508 }
5509
5510
5511 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5512 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5513 remaps all local declarations to appropriate replacements in gimple
5514 statements. */
5515
5516 static tree
5517 replace_locals_stmt (gimple_stmt_iterator *gsip,
5518 bool *handled_ops_p ATTRIBUTE_UNUSED,
5519 struct walk_stmt_info *wi)
5520 {
5521 copy_body_data *id = (copy_body_data *) wi->info;
5522 gimple *gs = gsi_stmt (*gsip);
5523
5524 if (gbind *stmt = dyn_cast <gbind *> (gs))
5525 {
5526 tree block = gimple_bind_block (stmt);
5527
5528 if (block)
5529 {
5530 remap_block (&block, id);
5531 gimple_bind_set_block (stmt, block);
5532 }
5533
5534 /* This will remap a lot of the same decls again, but this should be
5535 harmless. */
5536 if (gimple_bind_vars (stmt))
5537 {
5538 tree old_var, decls = gimple_bind_vars (stmt);
5539
5540 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5541 if (!can_be_nonlocal (old_var, id)
5542 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5543 remap_decl (old_var, id);
5544
5545 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5546 id->prevent_decl_creation_for_types = true;
5547 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5548 id->prevent_decl_creation_for_types = false;
5549 }
5550 }
5551
5552 /* Keep iterating. */
5553 return NULL_TREE;
5554 }
5555
5556 /* Create a copy of SEQ and remap all decls in it. */
5557
5558 static gimple_seq
5559 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5560 {
5561 if (!seq)
5562 return NULL;
5563
5564 /* If there are any labels in OMP sequences, they can be only referred to in
5565 the sequence itself and therefore we can do both here. */
5566 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5567 gimple_seq copy = gimple_seq_copy (seq);
5568 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5569 return copy;
5570 }
5571
5572 /* Copies everything in SEQ and replaces variables and labels local to
5573 current_function_decl. */
5574
5575 gimple_seq
5576 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5577 {
5578 copy_body_data id;
5579 struct walk_stmt_info wi;
5580 gimple_seq copy;
5581
5582 /* There's nothing to do for NULL_TREE. */
5583 if (seq == NULL)
5584 return seq;
5585
5586 /* Set up ID. */
5587 memset (&id, 0, sizeof (id));
5588 id.src_fn = current_function_decl;
5589 id.dst_fn = current_function_decl;
5590 id.src_cfun = cfun;
5591 id.decl_map = new hash_map<tree, tree>;
5592 id.debug_map = NULL;
5593
5594 id.copy_decl = copy_decl_no_change;
5595 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5596 id.transform_new_cfg = false;
5597 id.transform_return_to_modify = false;
5598 id.transform_parameter = false;
5599 id.transform_lang_insert_block = NULL;
5600
5601 /* Walk the tree once to find local labels. */
5602 memset (&wi, 0, sizeof (wi));
5603 hash_set<tree> visited;
5604 wi.info = &id;
5605 wi.pset = &visited;
5606 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5607
5608 copy = gimple_seq_copy (seq);
5609
5610 /* Walk the copy, remapping decls. */
5611 memset (&wi, 0, sizeof (wi));
5612 wi.info = &id;
5613 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5614
5615 /* Clean up. */
5616 delete id.decl_map;
5617 if (id.debug_map)
5618 delete id.debug_map;
5619 if (id.dependence_map)
5620 {
5621 delete id.dependence_map;
5622 id.dependence_map = NULL;
5623 }
5624
5625 return copy;
5626 }
5627
5628
5629 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5630
5631 static tree
5632 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5633 {
5634 if (*tp == data)
5635 return (tree) data;
5636 else
5637 return NULL;
5638 }
5639
5640 DEBUG_FUNCTION bool
5641 debug_find_tree (tree top, tree search)
5642 {
5643 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5644 }
5645
5646
5647 /* Declare the variables created by the inliner. Add all the variables in
5648 VARS to BIND_EXPR. */
5649
5650 static void
5651 declare_inline_vars (tree block, tree vars)
5652 {
5653 tree t;
5654 for (t = vars; t; t = DECL_CHAIN (t))
5655 {
5656 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5657 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5658 add_local_decl (cfun, t);
5659 }
5660
5661 if (block)
5662 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5663 }
5664
5665 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5666 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5667 VAR_DECL translation. */
5668
5669 tree
5670 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5671 {
5672 /* Don't generate debug information for the copy if we wouldn't have
5673 generated it for the copy either. */
5674 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5675 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5676
5677 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5678 declaration inspired this copy. */
5679 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5680
5681 /* The new variable/label has no RTL, yet. */
5682 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5683 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5684 SET_DECL_RTL (copy, 0);
5685 /* For vector typed decls make sure to update DECL_MODE according
5686 to the new function context. */
5687 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5688 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5689
5690 /* These args would always appear unused, if not for this. */
5691 TREE_USED (copy) = 1;
5692
5693 /* Set the context for the new declaration. */
5694 if (!DECL_CONTEXT (decl))
5695 /* Globals stay global. */
5696 ;
5697 else if (DECL_CONTEXT (decl) != id->src_fn)
5698 /* Things that weren't in the scope of the function we're inlining
5699 from aren't in the scope we're inlining to, either. */
5700 ;
5701 else if (TREE_STATIC (decl))
5702 /* Function-scoped static variables should stay in the original
5703 function. */
5704 ;
5705 else
5706 {
5707 /* Ordinary automatic local variables are now in the scope of the
5708 new function. */
5709 DECL_CONTEXT (copy) = id->dst_fn;
5710 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5711 {
5712 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5713 DECL_ATTRIBUTES (copy)
5714 = tree_cons (get_identifier ("omp simt private"), NULL,
5715 DECL_ATTRIBUTES (copy));
5716 id->dst_simt_vars->safe_push (copy);
5717 }
5718 }
5719
5720 return copy;
5721 }
5722
5723 static tree
5724 copy_decl_to_var (tree decl, copy_body_data *id)
5725 {
5726 tree copy, type;
5727
5728 gcc_assert (TREE_CODE (decl) == PARM_DECL
5729 || TREE_CODE (decl) == RESULT_DECL);
5730
5731 type = TREE_TYPE (decl);
5732
5733 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5734 VAR_DECL, DECL_NAME (decl), type);
5735 if (DECL_PT_UID_SET_P (decl))
5736 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5737 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5738 TREE_READONLY (copy) = TREE_READONLY (decl);
5739 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5740 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5741
5742 return copy_decl_for_dup_finish (id, decl, copy);
5743 }
5744
5745 /* Like copy_decl_to_var, but create a return slot object instead of a
5746 pointer variable for return by invisible reference. */
5747
5748 static tree
5749 copy_result_decl_to_var (tree decl, copy_body_data *id)
5750 {
5751 tree copy, type;
5752
5753 gcc_assert (TREE_CODE (decl) == PARM_DECL
5754 || TREE_CODE (decl) == RESULT_DECL);
5755
5756 type = TREE_TYPE (decl);
5757 if (DECL_BY_REFERENCE (decl))
5758 type = TREE_TYPE (type);
5759
5760 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5761 VAR_DECL, DECL_NAME (decl), type);
5762 if (DECL_PT_UID_SET_P (decl))
5763 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5764 TREE_READONLY (copy) = TREE_READONLY (decl);
5765 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5766 if (!DECL_BY_REFERENCE (decl))
5767 {
5768 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5769 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5770 }
5771
5772 return copy_decl_for_dup_finish (id, decl, copy);
5773 }
5774
5775 tree
5776 copy_decl_no_change (tree decl, copy_body_data *id)
5777 {
5778 tree copy;
5779
5780 copy = copy_node (decl);
5781
5782 /* The COPY is not abstract; it will be generated in DST_FN. */
5783 DECL_ABSTRACT_P (copy) = false;
5784 lang_hooks.dup_lang_specific_decl (copy);
5785
5786 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5787 been taken; it's for internal bookkeeping in expand_goto_internal. */
5788 if (TREE_CODE (copy) == LABEL_DECL)
5789 {
5790 TREE_ADDRESSABLE (copy) = 0;
5791 LABEL_DECL_UID (copy) = -1;
5792 }
5793
5794 return copy_decl_for_dup_finish (id, decl, copy);
5795 }
5796
5797 static tree
5798 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5799 {
5800 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5801 return copy_decl_to_var (decl, id);
5802 else
5803 return copy_decl_no_change (decl, id);
5804 }
5805
5806 /* Return a copy of the function's argument tree. */
5807 static tree
5808 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5809 bitmap args_to_skip, tree *vars)
5810 {
5811 tree arg, *parg;
5812 tree new_parm = NULL;
5813 int i = 0;
5814
5815 parg = &new_parm;
5816
5817 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5818 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5819 {
5820 tree new_tree = remap_decl (arg, id);
5821 if (TREE_CODE (new_tree) != PARM_DECL)
5822 new_tree = id->copy_decl (arg, id);
5823 lang_hooks.dup_lang_specific_decl (new_tree);
5824 *parg = new_tree;
5825 parg = &DECL_CHAIN (new_tree);
5826 }
5827 else if (!id->decl_map->get (arg))
5828 {
5829 /* Make an equivalent VAR_DECL. If the argument was used
5830 as temporary variable later in function, the uses will be
5831 replaced by local variable. */
5832 tree var = copy_decl_to_var (arg, id);
5833 insert_decl_map (id, arg, var);
5834 /* Declare this new variable. */
5835 DECL_CHAIN (var) = *vars;
5836 *vars = var;
5837 }
5838 return new_parm;
5839 }
5840
5841 /* Return a copy of the function's static chain. */
5842 static tree
5843 copy_static_chain (tree static_chain, copy_body_data * id)
5844 {
5845 tree *chain_copy, *pvar;
5846
5847 chain_copy = &static_chain;
5848 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5849 {
5850 tree new_tree = remap_decl (*pvar, id);
5851 lang_hooks.dup_lang_specific_decl (new_tree);
5852 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5853 *pvar = new_tree;
5854 }
5855 return static_chain;
5856 }
5857
5858 /* Return true if the function is allowed to be versioned.
5859 This is a guard for the versioning functionality. */
5860
5861 bool
5862 tree_versionable_function_p (tree fndecl)
5863 {
5864 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5865 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5866 }
5867
5868 /* Update clone info after duplication. */
5869
5870 static void
5871 update_clone_info (copy_body_data * id)
5872 {
5873 struct cgraph_node *node;
5874 if (!id->dst_node->clones)
5875 return;
5876 for (node = id->dst_node->clones; node != id->dst_node;)
5877 {
5878 /* First update replace maps to match the new body. */
5879 if (node->clone.tree_map)
5880 {
5881 unsigned int i;
5882 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5883 {
5884 struct ipa_replace_map *replace_info;
5885 replace_info = (*node->clone.tree_map)[i];
5886 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5887 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5888 }
5889 }
5890 if (node->clones)
5891 node = node->clones;
5892 else if (node->next_sibling_clone)
5893 node = node->next_sibling_clone;
5894 else
5895 {
5896 while (node != id->dst_node && !node->next_sibling_clone)
5897 node = node->clone_of;
5898 if (node != id->dst_node)
5899 node = node->next_sibling_clone;
5900 }
5901 }
5902 }
5903
5904 /* Create a copy of a function's tree.
5905 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5906 of the original function and the new copied function
5907 respectively. In case we want to replace a DECL
5908 tree with another tree while duplicating the function's
5909 body, TREE_MAP represents the mapping between these
5910 trees. If UPDATE_CLONES is set, the call_stmt fields
5911 of edges of clones of the function will be updated.
5912
5913 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5914 from new version.
5915 If SKIP_RETURN is true, the new version will return void.
5916 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5917 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5918 */
5919 void
5920 tree_function_versioning (tree old_decl, tree new_decl,
5921 vec<ipa_replace_map *, va_gc> *tree_map,
5922 bool update_clones, bitmap args_to_skip,
5923 bool skip_return, bitmap blocks_to_copy,
5924 basic_block new_entry)
5925 {
5926 struct cgraph_node *old_version_node;
5927 struct cgraph_node *new_version_node;
5928 copy_body_data id;
5929 tree p;
5930 unsigned i;
5931 struct ipa_replace_map *replace_info;
5932 basic_block old_entry_block, bb;
5933 auto_vec<gimple *, 10> init_stmts;
5934 tree vars = NULL_TREE;
5935 bitmap debug_args_to_skip = args_to_skip;
5936
5937 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5938 && TREE_CODE (new_decl) == FUNCTION_DECL);
5939 DECL_POSSIBLY_INLINED (old_decl) = 1;
5940
5941 old_version_node = cgraph_node::get (old_decl);
5942 gcc_checking_assert (old_version_node);
5943 new_version_node = cgraph_node::get (new_decl);
5944 gcc_checking_assert (new_version_node);
5945
5946 /* Copy over debug args. */
5947 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5948 {
5949 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5950 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5951 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5952 old_debug_args = decl_debug_args_lookup (old_decl);
5953 if (old_debug_args)
5954 {
5955 new_debug_args = decl_debug_args_insert (new_decl);
5956 *new_debug_args = vec_safe_copy (*old_debug_args);
5957 }
5958 }
5959
5960 /* Output the inlining info for this abstract function, since it has been
5961 inlined. If we don't do this now, we can lose the information about the
5962 variables in the function when the blocks get blown away as soon as we
5963 remove the cgraph node. */
5964 (*debug_hooks->outlining_inline_function) (old_decl);
5965
5966 DECL_ARTIFICIAL (new_decl) = 1;
5967 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5968 if (DECL_ORIGIN (old_decl) == old_decl)
5969 old_version_node->used_as_abstract_origin = true;
5970 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5971
5972 /* Prepare the data structures for the tree copy. */
5973 memset (&id, 0, sizeof (id));
5974
5975 /* Generate a new name for the new version. */
5976 id.statements_to_fold = new hash_set<gimple *>;
5977
5978 id.decl_map = new hash_map<tree, tree>;
5979 id.debug_map = NULL;
5980 id.src_fn = old_decl;
5981 id.dst_fn = new_decl;
5982 id.src_node = old_version_node;
5983 id.dst_node = new_version_node;
5984 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5985 id.blocks_to_copy = blocks_to_copy;
5986
5987 id.copy_decl = copy_decl_no_change;
5988 id.transform_call_graph_edges
5989 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5990 id.transform_new_cfg = true;
5991 id.transform_return_to_modify = false;
5992 id.transform_parameter = false;
5993 id.transform_lang_insert_block = NULL;
5994
5995 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5996 (DECL_STRUCT_FUNCTION (old_decl));
5997 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5998 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5999 initialize_cfun (new_decl, old_decl,
6000 new_entry ? new_entry->count : old_entry_block->count);
6001 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6002 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6003 = id.src_cfun->gimple_df->ipa_pta;
6004
6005 /* Copy the function's static chain. */
6006 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6007 if (p)
6008 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6009 = copy_static_chain (p, &id);
6010
6011 /* If there's a tree_map, prepare for substitution. */
6012 if (tree_map)
6013 for (i = 0; i < tree_map->length (); i++)
6014 {
6015 gimple *init;
6016 replace_info = (*tree_map)[i];
6017 if (replace_info->replace_p)
6018 {
6019 int parm_num = -1;
6020 if (!replace_info->old_tree)
6021 {
6022 int p = replace_info->parm_num;
6023 tree parm;
6024 tree req_type, new_type;
6025
6026 for (parm = DECL_ARGUMENTS (old_decl); p;
6027 parm = DECL_CHAIN (parm))
6028 p--;
6029 replace_info->old_tree = parm;
6030 parm_num = replace_info->parm_num;
6031 req_type = TREE_TYPE (parm);
6032 new_type = TREE_TYPE (replace_info->new_tree);
6033 if (!useless_type_conversion_p (req_type, new_type))
6034 {
6035 if (fold_convertible_p (req_type, replace_info->new_tree))
6036 replace_info->new_tree
6037 = fold_build1 (NOP_EXPR, req_type,
6038 replace_info->new_tree);
6039 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6040 replace_info->new_tree
6041 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6042 replace_info->new_tree);
6043 else
6044 {
6045 if (dump_file)
6046 {
6047 fprintf (dump_file, " const ");
6048 print_generic_expr (dump_file,
6049 replace_info->new_tree);
6050 fprintf (dump_file,
6051 " can't be converted to param ");
6052 print_generic_expr (dump_file, parm);
6053 fprintf (dump_file, "\n");
6054 }
6055 replace_info->old_tree = NULL;
6056 }
6057 }
6058 }
6059 else
6060 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
6061 if (replace_info->old_tree)
6062 {
6063 init = setup_one_parameter (&id, replace_info->old_tree,
6064 replace_info->new_tree, id.src_fn,
6065 NULL,
6066 &vars);
6067 if (init)
6068 init_stmts.safe_push (init);
6069 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
6070 {
6071 if (parm_num == -1)
6072 {
6073 tree parm;
6074 int p;
6075 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6076 parm = DECL_CHAIN (parm), p++)
6077 if (parm == replace_info->old_tree)
6078 {
6079 parm_num = p;
6080 break;
6081 }
6082 }
6083 if (parm_num != -1)
6084 {
6085 if (debug_args_to_skip == args_to_skip)
6086 {
6087 debug_args_to_skip = BITMAP_ALLOC (NULL);
6088 bitmap_copy (debug_args_to_skip, args_to_skip);
6089 }
6090 bitmap_clear_bit (debug_args_to_skip, parm_num);
6091 }
6092 }
6093 }
6094 }
6095 }
6096 /* Copy the function's arguments. */
6097 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6098 DECL_ARGUMENTS (new_decl)
6099 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6100 args_to_skip, &vars);
6101
6102 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6103 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6104
6105 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6106
6107 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6108 /* Add local vars. */
6109 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6110
6111 if (DECL_RESULT (old_decl) == NULL_TREE)
6112 ;
6113 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6114 {
6115 DECL_RESULT (new_decl)
6116 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6117 RESULT_DECL, NULL_TREE, void_type_node);
6118 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6119 cfun->returns_struct = 0;
6120 cfun->returns_pcc_struct = 0;
6121 }
6122 else
6123 {
6124 tree old_name;
6125 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6126 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6127 if (gimple_in_ssa_p (id.src_cfun)
6128 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6129 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6130 {
6131 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6132 insert_decl_map (&id, old_name, new_name);
6133 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6134 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6135 }
6136 }
6137
6138 /* Set up the destination functions loop tree. */
6139 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6140 {
6141 cfun->curr_properties &= ~PROP_loops;
6142 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6143 cfun->curr_properties |= PROP_loops;
6144 }
6145
6146 /* Copy the Function's body. */
6147 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6148 new_entry);
6149
6150 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6151 number_blocks (new_decl);
6152
6153 /* We want to create the BB unconditionally, so that the addition of
6154 debug stmts doesn't affect BB count, which may in the end cause
6155 codegen differences. */
6156 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6157 while (init_stmts.length ())
6158 insert_init_stmt (&id, bb, init_stmts.pop ());
6159 update_clone_info (&id);
6160
6161 /* Remap the nonlocal_goto_save_area, if any. */
6162 if (cfun->nonlocal_goto_save_area)
6163 {
6164 struct walk_stmt_info wi;
6165
6166 memset (&wi, 0, sizeof (wi));
6167 wi.info = &id;
6168 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6169 }
6170
6171 /* Clean up. */
6172 delete id.decl_map;
6173 if (id.debug_map)
6174 delete id.debug_map;
6175 free_dominance_info (CDI_DOMINATORS);
6176 free_dominance_info (CDI_POST_DOMINATORS);
6177
6178 update_max_bb_count ();
6179 fold_marked_statements (0, id.statements_to_fold);
6180 delete id.statements_to_fold;
6181 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6182 if (id.dst_node->definition)
6183 cgraph_edge::rebuild_references ();
6184 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6185 {
6186 calculate_dominance_info (CDI_DOMINATORS);
6187 fix_loop_structure (NULL);
6188 }
6189 update_ssa (TODO_update_ssa);
6190
6191 /* After partial cloning we need to rescale frequencies, so they are
6192 within proper range in the cloned function. */
6193 if (new_entry)
6194 {
6195 struct cgraph_edge *e;
6196 rebuild_frequencies ();
6197
6198 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6199 for (e = new_version_node->callees; e; e = e->next_callee)
6200 {
6201 basic_block bb = gimple_bb (e->call_stmt);
6202 e->count = bb->count;
6203 }
6204 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6205 {
6206 basic_block bb = gimple_bb (e->call_stmt);
6207 e->count = bb->count;
6208 }
6209 }
6210
6211 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6212 {
6213 tree parm;
6214 vec<tree, va_gc> **debug_args = NULL;
6215 unsigned int len = 0;
6216 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6217 parm; parm = DECL_CHAIN (parm), i++)
6218 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6219 {
6220 tree ddecl;
6221
6222 if (debug_args == NULL)
6223 {
6224 debug_args = decl_debug_args_insert (new_decl);
6225 len = vec_safe_length (*debug_args);
6226 }
6227 ddecl = make_node (DEBUG_EXPR_DECL);
6228 DECL_ARTIFICIAL (ddecl) = 1;
6229 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6230 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6231 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6232 vec_safe_push (*debug_args, ddecl);
6233 }
6234 if (debug_args != NULL)
6235 {
6236 /* On the callee side, add
6237 DEBUG D#Y s=> parm
6238 DEBUG var => D#Y
6239 stmts to the first bb where var is a VAR_DECL created for the
6240 optimized away parameter in DECL_INITIAL block. This hints
6241 in the debug info that var (whole DECL_ORIGIN is the parm
6242 PARM_DECL) is optimized away, but could be looked up at the
6243 call site as value of D#X there. */
6244 tree var = vars, vexpr;
6245 gimple_stmt_iterator cgsi
6246 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6247 gimple *def_temp;
6248 var = vars;
6249 i = vec_safe_length (*debug_args);
6250 do
6251 {
6252 i -= 2;
6253 while (var != NULL_TREE
6254 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6255 var = TREE_CHAIN (var);
6256 if (var == NULL_TREE)
6257 break;
6258 vexpr = make_node (DEBUG_EXPR_DECL);
6259 parm = (**debug_args)[i];
6260 DECL_ARTIFICIAL (vexpr) = 1;
6261 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6262 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6263 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6264 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6265 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6266 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6267 }
6268 while (i > len);
6269 }
6270 }
6271
6272 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6273 BITMAP_FREE (debug_args_to_skip);
6274 free_dominance_info (CDI_DOMINATORS);
6275 free_dominance_info (CDI_POST_DOMINATORS);
6276
6277 gcc_assert (!id.debug_stmts.exists ());
6278 pop_cfun ();
6279 return;
6280 }
6281
6282 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6283 the callee and return the inlined body on success. */
6284
6285 tree
6286 maybe_inline_call_in_expr (tree exp)
6287 {
6288 tree fn = get_callee_fndecl (exp);
6289
6290 /* We can only try to inline "const" functions. */
6291 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6292 {
6293 call_expr_arg_iterator iter;
6294 copy_body_data id;
6295 tree param, arg, t;
6296 hash_map<tree, tree> decl_map;
6297
6298 /* Remap the parameters. */
6299 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6300 param;
6301 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6302 decl_map.put (param, arg);
6303
6304 memset (&id, 0, sizeof (id));
6305 id.src_fn = fn;
6306 id.dst_fn = current_function_decl;
6307 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6308 id.decl_map = &decl_map;
6309
6310 id.copy_decl = copy_decl_no_change;
6311 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6312 id.transform_new_cfg = false;
6313 id.transform_return_to_modify = true;
6314 id.transform_parameter = true;
6315 id.transform_lang_insert_block = NULL;
6316
6317 /* Make sure not to unshare trees behind the front-end's back
6318 since front-end specific mechanisms may rely on sharing. */
6319 id.regimplify = false;
6320 id.do_not_unshare = true;
6321
6322 /* We're not inside any EH region. */
6323 id.eh_lp_nr = 0;
6324
6325 t = copy_tree_body (&id);
6326
6327 /* We can only return something suitable for use in a GENERIC
6328 expression tree. */
6329 if (TREE_CODE (t) == MODIFY_EXPR)
6330 return TREE_OPERAND (t, 1);
6331 }
6332
6333 return NULL_TREE;
6334 }
6335
6336 /* Duplicate a type, fields and all. */
6337
6338 tree
6339 build_duplicate_type (tree type)
6340 {
6341 struct copy_body_data id;
6342
6343 memset (&id, 0, sizeof (id));
6344 id.src_fn = current_function_decl;
6345 id.dst_fn = current_function_decl;
6346 id.src_cfun = cfun;
6347 id.decl_map = new hash_map<tree, tree>;
6348 id.debug_map = NULL;
6349 id.copy_decl = copy_decl_no_change;
6350
6351 type = remap_type_1 (type, &id);
6352
6353 delete id.decl_map;
6354 if (id.debug_map)
6355 delete id.debug_map;
6356
6357 TYPE_CANONICAL (type) = type;
6358
6359 return type;
6360 }
6361
6362 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6363 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6364 evaluation. */
6365
6366 tree
6367 copy_fn (tree fn, tree& parms, tree& result)
6368 {
6369 copy_body_data id;
6370 tree param;
6371 hash_map<tree, tree> decl_map;
6372
6373 tree *p = &parms;
6374 *p = NULL_TREE;
6375
6376 memset (&id, 0, sizeof (id));
6377 id.src_fn = fn;
6378 id.dst_fn = current_function_decl;
6379 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6380 id.decl_map = &decl_map;
6381
6382 id.copy_decl = copy_decl_no_change;
6383 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6384 id.transform_new_cfg = false;
6385 id.transform_return_to_modify = false;
6386 id.transform_parameter = true;
6387 id.transform_lang_insert_block = NULL;
6388
6389 /* Make sure not to unshare trees behind the front-end's back
6390 since front-end specific mechanisms may rely on sharing. */
6391 id.regimplify = false;
6392 id.do_not_unshare = true;
6393 id.do_not_fold = true;
6394
6395 /* We're not inside any EH region. */
6396 id.eh_lp_nr = 0;
6397
6398 /* Remap the parameters and result and return them to the caller. */
6399 for (param = DECL_ARGUMENTS (fn);
6400 param;
6401 param = DECL_CHAIN (param))
6402 {
6403 *p = remap_decl (param, &id);
6404 p = &DECL_CHAIN (*p);
6405 }
6406
6407 if (DECL_RESULT (fn))
6408 result = remap_decl (DECL_RESULT (fn), &id);
6409 else
6410 result = NULL_TREE;
6411
6412 return copy_tree_body (&id);
6413 }