re PR tree-optimization/89618 (Inner loop won't vectorize unless dummy statement...
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64
65 /* I'm not real happy about this, but we need to handle gimple and
66 non-gimple trees. */
67
68 /* Inlining, Cloning, Versioning, Parallelization
69
70 Inlining: a function body is duplicated, but the PARM_DECLs are
71 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72 MODIFY_EXPRs that store to a dedicated returned-value variable.
73 The duplicated eh_region info of the copy will later be appended
74 to the info for the caller; the eh_region info in copied throwing
75 statements and RESX statements are adjusted accordingly.
76
77 Cloning: (only in C++) We have one body for a con/de/structor, and
78 multiple function decls, each with a unique parameter list.
79 Duplicate the body, using the given splay tree; some parameters
80 will become constants (like 0 or 1).
81
82 Versioning: a function body is duplicated and the result is a new
83 function rather than into blocks of an existing function as with
84 inlining. Some parameters will become constants.
85
86 Parallelization: a region of a function is duplicated resulting in
87 a new function. Variables may be replaced with complex expressions
88 to enable shared variable semantics.
89
90 All of these will simultaneously lookup any callgraph edges. If
91 we're going to inline the duplicated function body, and the given
92 function has some cloned callgraph nodes (one for each place this
93 function will be inlined) those callgraph edges will be duplicated.
94 If we're cloning the body, those callgraph edges will be
95 updated to point into the new body. (Note that the original
96 callgraph node and edge list will not be altered.)
97
98 See the CALL_EXPR handling case in copy_tree_body_r (). */
99
100 /* To Do:
101
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
108
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
111
112
113 /* Weights that estimate_num_insns uses to estimate the size of the
114 produced code. */
115
116 eni_weights eni_size_weights;
117
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119 to execute the produced code. */
120
121 eni_weights eni_time_weights;
122
123 /* Prototypes. */
124
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_decl_to_var (tree, copy_body_data *);
133 static tree copy_result_decl_to_var (tree, copy_body_data *);
134 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
140
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
143 {
144 id->decl_map->put (key, value);
145
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
150 }
151
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
154
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 {
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
160
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
163
164 if (!target_for_debug_bind (key))
165 return;
166
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
169
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
172
173 id->debug_map->put (key, value);
174 }
175
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
181
182 /* Construct new SSA name for old NAME. ID is the inline context. */
183
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
186 {
187 tree new_tree, var;
188 tree *n;
189
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
191
192 n = id->decl_map->get (name);
193 if (n)
194 return unshare_expr (*n);
195
196 if (processing_debug_stmt)
197 {
198 if (SSA_NAME_IS_DEFAULT_DEF (name)
199 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 && id->entry_bb == NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
202 {
203 tree vexpr = make_node (DEBUG_EXPR_DECL);
204 gimple *def_temp;
205 gimple_stmt_iterator gsi;
206 tree val = SSA_NAME_VAR (name);
207
208 n = id->decl_map->get (val);
209 if (n != NULL)
210 val = *n;
211 if (TREE_CODE (val) != PARM_DECL
212 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
213 {
214 processing_debug_stmt = -1;
215 return name;
216 }
217 n = id->decl_map->get (val);
218 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
219 return *n;
220 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
221 DECL_ARTIFICIAL (vexpr) = 1;
222 TREE_TYPE (vexpr) = TREE_TYPE (name);
223 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
224 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
225 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
226 insert_decl_map (id, val, vexpr);
227 return vexpr;
228 }
229
230 processing_debug_stmt = -1;
231 return name;
232 }
233
234 /* Remap anonymous SSA names or SSA names of anonymous decls. */
235 var = SSA_NAME_VAR (name);
236 if (!var
237 || (!SSA_NAME_IS_DEFAULT_DEF (name)
238 && VAR_P (var)
239 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
240 && DECL_ARTIFICIAL (var)
241 && DECL_IGNORED_P (var)
242 && !DECL_NAME (var)))
243 {
244 struct ptr_info_def *pi;
245 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
246 if (!var && SSA_NAME_IDENTIFIER (name))
247 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
248 insert_decl_map (id, name, new_tree);
249 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
250 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
251 /* At least IPA points-to info can be directly transferred. */
252 if (id->src_cfun->gimple_df
253 && id->src_cfun->gimple_df->ipa_pta
254 && POINTER_TYPE_P (TREE_TYPE (name))
255 && (pi = SSA_NAME_PTR_INFO (name))
256 && !pi->pt.anything)
257 {
258 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
259 new_pi->pt = pi->pt;
260 }
261 return new_tree;
262 }
263
264 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
265 in copy_bb. */
266 new_tree = remap_decl (var, id);
267
268 /* We might've substituted constant or another SSA_NAME for
269 the variable.
270
271 Replace the SSA name representing RESULT_DECL by variable during
272 inlining: this saves us from need to introduce PHI node in a case
273 return value is just partly initialized. */
274 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
275 && (!SSA_NAME_VAR (name)
276 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
277 || !id->transform_return_to_modify))
278 {
279 struct ptr_info_def *pi;
280 new_tree = make_ssa_name (new_tree);
281 insert_decl_map (id, name, new_tree);
282 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
283 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
284 /* At least IPA points-to info can be directly transferred. */
285 if (id->src_cfun->gimple_df
286 && id->src_cfun->gimple_df->ipa_pta
287 && POINTER_TYPE_P (TREE_TYPE (name))
288 && (pi = SSA_NAME_PTR_INFO (name))
289 && !pi->pt.anything)
290 {
291 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
292 new_pi->pt = pi->pt;
293 }
294 if (SSA_NAME_IS_DEFAULT_DEF (name))
295 {
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
299 abnormal edge, but also increase register pressure.
300
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
305 if (id->entry_bb
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
307 && (!SSA_NAME_VAR (name)
308 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
309 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
310 0)->dest
311 || EDGE_COUNT (id->entry_bb->preds) != 1))
312 {
313 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
314 gimple *init_stmt;
315 tree zero = build_zero_cst (TREE_TYPE (new_tree));
316
317 init_stmt = gimple_build_assign (new_tree, zero);
318 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
319 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
320 }
321 else
322 {
323 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
324 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
325 }
326 }
327 }
328 else
329 insert_decl_map (id, name, new_tree);
330 return new_tree;
331 }
332
333 /* Remap DECL during the copying of the BLOCK tree for the function. */
334
335 tree
336 remap_decl (tree decl, copy_body_data *id)
337 {
338 tree *n;
339
340 /* We only remap local variables in the current function. */
341
342 /* See if we have remapped this declaration. */
343
344 n = id->decl_map->get (decl);
345
346 if (!n && processing_debug_stmt)
347 {
348 processing_debug_stmt = -1;
349 return decl;
350 }
351
352 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
353 necessary DECLs have already been remapped and we do not want to duplicate
354 a decl coming from outside of the sequence we are copying. */
355 if (!n
356 && id->prevent_decl_creation_for_types
357 && id->remapping_type_depth > 0
358 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
359 return decl;
360
361 /* If we didn't already have an equivalent for this declaration, create one
362 now. */
363 if (!n)
364 {
365 /* Make a copy of the variable or label. */
366 tree t = id->copy_decl (decl, id);
367
368 /* Remember it, so that if we encounter this local entity again
369 we can reuse this copy. Do this early because remap_type may
370 need this decl for TYPE_STUB_DECL. */
371 insert_decl_map (id, decl, t);
372
373 if (!DECL_P (t))
374 return t;
375
376 /* Remap types, if necessary. */
377 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
378 if (TREE_CODE (t) == TYPE_DECL)
379 {
380 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
381
382 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
383 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
384 is not set on the TYPE_DECL, for example in LTO mode. */
385 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
386 {
387 tree x = build_variant_type_copy (TREE_TYPE (t));
388 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
389 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
390 DECL_ORIGINAL_TYPE (t) = x;
391 }
392 }
393
394 /* Remap sizes as necessary. */
395 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
396 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
397
398 /* If fields, do likewise for offset and qualifier. */
399 if (TREE_CODE (t) == FIELD_DECL)
400 {
401 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
402 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
403 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
404 }
405
406 return t;
407 }
408
409 if (id->do_not_unshare)
410 return *n;
411 else
412 return unshare_expr (*n);
413 }
414
415 static tree
416 remap_type_1 (tree type, copy_body_data *id)
417 {
418 tree new_tree, t;
419
420 /* We do need a copy. build and register it now. If this is a pointer or
421 reference type, remap the designated type and make a new pointer or
422 reference type. */
423 if (TREE_CODE (type) == POINTER_TYPE)
424 {
425 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
426 TYPE_MODE (type),
427 TYPE_REF_CAN_ALIAS_ALL (type));
428 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
429 new_tree = build_type_attribute_qual_variant (new_tree,
430 TYPE_ATTRIBUTES (type),
431 TYPE_QUALS (type));
432 insert_decl_map (id, type, new_tree);
433 return new_tree;
434 }
435 else if (TREE_CODE (type) == REFERENCE_TYPE)
436 {
437 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
438 TYPE_MODE (type),
439 TYPE_REF_CAN_ALIAS_ALL (type));
440 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
441 new_tree = build_type_attribute_qual_variant (new_tree,
442 TYPE_ATTRIBUTES (type),
443 TYPE_QUALS (type));
444 insert_decl_map (id, type, new_tree);
445 return new_tree;
446 }
447 else
448 new_tree = copy_node (type);
449
450 insert_decl_map (id, type, new_tree);
451
452 /* This is a new type, not a copy of an old type. Need to reassociate
453 variants. We can handle everything except the main variant lazily. */
454 t = TYPE_MAIN_VARIANT (type);
455 if (type != t)
456 {
457 t = remap_type (t, id);
458 TYPE_MAIN_VARIANT (new_tree) = t;
459 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
460 TYPE_NEXT_VARIANT (t) = new_tree;
461 }
462 else
463 {
464 TYPE_MAIN_VARIANT (new_tree) = new_tree;
465 TYPE_NEXT_VARIANT (new_tree) = NULL;
466 }
467
468 if (TYPE_STUB_DECL (type))
469 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
470
471 /* Lazily create pointer and reference types. */
472 TYPE_POINTER_TO (new_tree) = NULL;
473 TYPE_REFERENCE_TO (new_tree) = NULL;
474
475 /* Copy all types that may contain references to local variables; be sure to
476 preserve sharing in between type and its main variant when possible. */
477 switch (TREE_CODE (new_tree))
478 {
479 case INTEGER_TYPE:
480 case REAL_TYPE:
481 case FIXED_POINT_TYPE:
482 case ENUMERAL_TYPE:
483 case BOOLEAN_TYPE:
484 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
485 {
486 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
487 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
488
489 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
490 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 }
492 else
493 {
494 t = TYPE_MIN_VALUE (new_tree);
495 if (t && TREE_CODE (t) != INTEGER_CST)
496 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
497
498 t = TYPE_MAX_VALUE (new_tree);
499 if (t && TREE_CODE (t) != INTEGER_CST)
500 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
501 }
502 return new_tree;
503
504 case FUNCTION_TYPE:
505 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
506 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
507 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
508 else
509 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
510 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
511 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
512 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
513 else
514 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
515 return new_tree;
516
517 case ARRAY_TYPE:
518 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
519 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
520 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
521 else
522 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
523
524 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
525 {
526 gcc_checking_assert (TYPE_DOMAIN (type)
527 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
528 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
529 }
530 else
531 {
532 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
533 /* For array bounds where we have decided not to copy over the bounds
534 variable which isn't used in OpenMP/OpenACC region, change them to
535 an uninitialized VAR_DECL temporary. */
536 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
537 && id->adjust_array_error_bounds
538 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
539 {
540 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
541 DECL_ATTRIBUTES (v)
542 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
543 DECL_ATTRIBUTES (v));
544 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
545 }
546 }
547 break;
548
549 case RECORD_TYPE:
550 case UNION_TYPE:
551 case QUAL_UNION_TYPE:
552 if (TYPE_MAIN_VARIANT (type) != type
553 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
554 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
555 else
556 {
557 tree f, nf = NULL;
558
559 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
560 {
561 t = remap_decl (f, id);
562 DECL_CONTEXT (t) = new_tree;
563 DECL_CHAIN (t) = nf;
564 nf = t;
565 }
566 TYPE_FIELDS (new_tree) = nreverse (nf);
567 }
568 break;
569
570 case OFFSET_TYPE:
571 default:
572 /* Shouldn't have been thought variable sized. */
573 gcc_unreachable ();
574 }
575
576 /* All variants of type share the same size, so use the already remaped data. */
577 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
578 {
579 tree s = TYPE_SIZE (type);
580 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
581 tree su = TYPE_SIZE_UNIT (type);
582 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
583 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
584 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
585 || s == mvs);
586 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
587 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
588 || su == mvsu);
589 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
590 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
591 }
592 else
593 {
594 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
595 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
596 }
597
598 return new_tree;
599 }
600
601 tree
602 remap_type (tree type, copy_body_data *id)
603 {
604 tree *node;
605 tree tmp;
606
607 if (type == NULL)
608 return type;
609
610 /* See if we have remapped this type. */
611 node = id->decl_map->get (type);
612 if (node)
613 return *node;
614
615 /* The type only needs remapping if it's variably modified. */
616 if (! variably_modified_type_p (type, id->src_fn))
617 {
618 insert_decl_map (id, type, type);
619 return type;
620 }
621
622 id->remapping_type_depth++;
623 tmp = remap_type_1 (type, id);
624 id->remapping_type_depth--;
625
626 return tmp;
627 }
628
629 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
630
631 static bool
632 can_be_nonlocal (tree decl, copy_body_data *id)
633 {
634 /* We cannot duplicate function decls. */
635 if (TREE_CODE (decl) == FUNCTION_DECL)
636 return true;
637
638 /* Local static vars must be non-local or we get multiple declaration
639 problems. */
640 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
641 return true;
642
643 return false;
644 }
645
646 static tree
647 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
648 copy_body_data *id)
649 {
650 tree old_var;
651 tree new_decls = NULL_TREE;
652
653 /* Remap its variables. */
654 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
655 {
656 tree new_var;
657
658 if (can_be_nonlocal (old_var, id))
659 {
660 /* We need to add this variable to the local decls as otherwise
661 nothing else will do so. */
662 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
663 add_local_decl (cfun, old_var);
664 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
665 && !DECL_IGNORED_P (old_var)
666 && nonlocalized_list)
667 vec_safe_push (*nonlocalized_list, old_var);
668 continue;
669 }
670
671 /* Remap the variable. */
672 new_var = remap_decl (old_var, id);
673
674 /* If we didn't remap this variable, we can't mess with its
675 TREE_CHAIN. If we remapped this variable to the return slot, it's
676 already declared somewhere else, so don't declare it here. */
677
678 if (new_var == id->retvar)
679 ;
680 else if (!new_var)
681 {
682 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
683 && !DECL_IGNORED_P (old_var)
684 && nonlocalized_list)
685 vec_safe_push (*nonlocalized_list, old_var);
686 }
687 else
688 {
689 gcc_assert (DECL_P (new_var));
690 DECL_CHAIN (new_var) = new_decls;
691 new_decls = new_var;
692
693 /* Also copy value-expressions. */
694 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
695 {
696 tree tem = DECL_VALUE_EXPR (new_var);
697 bool old_regimplify = id->regimplify;
698 id->remapping_type_depth++;
699 walk_tree (&tem, copy_tree_body_r, id, NULL);
700 id->remapping_type_depth--;
701 id->regimplify = old_regimplify;
702 SET_DECL_VALUE_EXPR (new_var, tem);
703 }
704 }
705 }
706
707 return nreverse (new_decls);
708 }
709
710 /* Copy the BLOCK to contain remapped versions of the variables
711 therein. And hook the new block into the block-tree. */
712
713 static void
714 remap_block (tree *block, copy_body_data *id)
715 {
716 tree old_block;
717 tree new_block;
718
719 /* Make the new block. */
720 old_block = *block;
721 new_block = make_node (BLOCK);
722 TREE_USED (new_block) = TREE_USED (old_block);
723 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
724 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
725 BLOCK_NONLOCALIZED_VARS (new_block)
726 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
727 *block = new_block;
728
729 /* Remap its variables. */
730 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
731 &BLOCK_NONLOCALIZED_VARS (new_block),
732 id);
733
734 if (id->transform_lang_insert_block)
735 id->transform_lang_insert_block (new_block);
736
737 /* Remember the remapped block. */
738 insert_decl_map (id, old_block, new_block);
739 }
740
741 /* Copy the whole block tree and root it in id->block. */
742
743 static tree
744 remap_blocks (tree block, copy_body_data *id)
745 {
746 tree t;
747 tree new_tree = block;
748
749 if (!block)
750 return NULL;
751
752 remap_block (&new_tree, id);
753 gcc_assert (new_tree != block);
754 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
755 prepend_lexical_block (new_tree, remap_blocks (t, id));
756 /* Blocks are in arbitrary order, but make things slightly prettier and do
757 not swap order when producing a copy. */
758 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
759 return new_tree;
760 }
761
762 /* Remap the block tree rooted at BLOCK to nothing. */
763
764 static void
765 remap_blocks_to_null (tree block, copy_body_data *id)
766 {
767 tree t;
768 insert_decl_map (id, block, NULL_TREE);
769 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
770 remap_blocks_to_null (t, id);
771 }
772
773 /* Remap the location info pointed to by LOCUS. */
774
775 static location_t
776 remap_location (location_t locus, copy_body_data *id)
777 {
778 if (LOCATION_BLOCK (locus))
779 {
780 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
781 gcc_assert (n);
782 if (*n)
783 return set_block (locus, *n);
784 }
785
786 locus = LOCATION_LOCUS (locus);
787
788 if (locus != UNKNOWN_LOCATION && id->block)
789 return set_block (locus, id->block);
790
791 return locus;
792 }
793
794 static void
795 copy_statement_list (tree *tp)
796 {
797 tree_stmt_iterator oi, ni;
798 tree new_tree;
799
800 new_tree = alloc_stmt_list ();
801 ni = tsi_start (new_tree);
802 oi = tsi_start (*tp);
803 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
804 *tp = new_tree;
805
806 for (; !tsi_end_p (oi); tsi_next (&oi))
807 {
808 tree stmt = tsi_stmt (oi);
809 if (TREE_CODE (stmt) == STATEMENT_LIST)
810 /* This copy is not redundant; tsi_link_after will smash this
811 STATEMENT_LIST into the end of the one we're building, and we
812 don't want to do that with the original. */
813 copy_statement_list (&stmt);
814 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
815 }
816 }
817
818 static void
819 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
820 {
821 tree block = BIND_EXPR_BLOCK (*tp);
822 /* Copy (and replace) the statement. */
823 copy_tree_r (tp, walk_subtrees, NULL);
824 if (block)
825 {
826 remap_block (&block, id);
827 BIND_EXPR_BLOCK (*tp) = block;
828 }
829
830 if (BIND_EXPR_VARS (*tp))
831 /* This will remap a lot of the same decls again, but this should be
832 harmless. */
833 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
834 }
835
836
837 /* Create a new gimple_seq by remapping all the statements in BODY
838 using the inlining information in ID. */
839
840 static gimple_seq
841 remap_gimple_seq (gimple_seq body, copy_body_data *id)
842 {
843 gimple_stmt_iterator si;
844 gimple_seq new_body = NULL;
845
846 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
847 {
848 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
849 gimple_seq_add_seq (&new_body, new_stmts);
850 }
851
852 return new_body;
853 }
854
855
856 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
857 block using the mapping information in ID. */
858
859 static gimple *
860 copy_gimple_bind (gbind *stmt, copy_body_data *id)
861 {
862 gimple *new_bind;
863 tree new_block, new_vars;
864 gimple_seq body, new_body;
865
866 /* Copy the statement. Note that we purposely don't use copy_stmt
867 here because we need to remap statements as we copy. */
868 body = gimple_bind_body (stmt);
869 new_body = remap_gimple_seq (body, id);
870
871 new_block = gimple_bind_block (stmt);
872 if (new_block)
873 remap_block (&new_block, id);
874
875 /* This will remap a lot of the same decls again, but this should be
876 harmless. */
877 new_vars = gimple_bind_vars (stmt);
878 if (new_vars)
879 new_vars = remap_decls (new_vars, NULL, id);
880
881 new_bind = gimple_build_bind (new_vars, new_body, new_block);
882
883 return new_bind;
884 }
885
886 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
887
888 static bool
889 is_parm (tree decl)
890 {
891 if (TREE_CODE (decl) == SSA_NAME)
892 {
893 decl = SSA_NAME_VAR (decl);
894 if (!decl)
895 return false;
896 }
897
898 return (TREE_CODE (decl) == PARM_DECL);
899 }
900
901 /* Remap the dependence CLIQUE from the source to the destination function
902 as specified in ID. */
903
904 static unsigned short
905 remap_dependence_clique (copy_body_data *id, unsigned short clique)
906 {
907 if (clique == 0 || processing_debug_stmt)
908 return 0;
909 if (!id->dependence_map)
910 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
911 bool existed;
912 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
913 if (!existed)
914 {
915 /* Clique 1 is reserved for local ones set by PTA. */
916 if (cfun->last_clique == 0)
917 cfun->last_clique = 1;
918 newc = ++cfun->last_clique;
919 }
920 return newc;
921 }
922
923 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
924 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
925 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
926 recursing into the children nodes of *TP. */
927
928 static tree
929 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
930 {
931 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
932 copy_body_data *id = (copy_body_data *) wi_p->info;
933 tree fn = id->src_fn;
934
935 /* For recursive invocations this is no longer the LHS itself. */
936 bool is_lhs = wi_p->is_lhs;
937 wi_p->is_lhs = false;
938
939 if (TREE_CODE (*tp) == SSA_NAME)
940 {
941 *tp = remap_ssa_name (*tp, id);
942 *walk_subtrees = 0;
943 if (is_lhs)
944 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
945 return NULL;
946 }
947 else if (auto_var_in_fn_p (*tp, fn))
948 {
949 /* Local variables and labels need to be replaced by equivalent
950 variables. We don't want to copy static variables; there's
951 only one of those, no matter how many times we inline the
952 containing function. Similarly for globals from an outer
953 function. */
954 tree new_decl;
955
956 /* Remap the declaration. */
957 new_decl = remap_decl (*tp, id);
958 gcc_assert (new_decl);
959 /* Replace this variable with the copy. */
960 STRIP_TYPE_NOPS (new_decl);
961 /* ??? The C++ frontend uses void * pointer zero to initialize
962 any other type. This confuses the middle-end type verification.
963 As cloned bodies do not go through gimplification again the fixup
964 there doesn't trigger. */
965 if (TREE_CODE (new_decl) == INTEGER_CST
966 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
967 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
968 *tp = new_decl;
969 *walk_subtrees = 0;
970 }
971 else if (TREE_CODE (*tp) == STATEMENT_LIST)
972 gcc_unreachable ();
973 else if (TREE_CODE (*tp) == SAVE_EXPR)
974 gcc_unreachable ();
975 else if (TREE_CODE (*tp) == LABEL_DECL
976 && (!DECL_CONTEXT (*tp)
977 || decl_function_context (*tp) == id->src_fn))
978 /* These may need to be remapped for EH handling. */
979 *tp = remap_decl (*tp, id);
980 else if (TREE_CODE (*tp) == FIELD_DECL)
981 {
982 /* If the enclosing record type is variably_modified_type_p, the field
983 has already been remapped. Otherwise, it need not be. */
984 tree *n = id->decl_map->get (*tp);
985 if (n)
986 *tp = *n;
987 *walk_subtrees = 0;
988 }
989 else if (TYPE_P (*tp))
990 /* Types may need remapping as well. */
991 *tp = remap_type (*tp, id);
992 else if (CONSTANT_CLASS_P (*tp))
993 {
994 /* If this is a constant, we have to copy the node iff the type
995 will be remapped. copy_tree_r will not copy a constant. */
996 tree new_type = remap_type (TREE_TYPE (*tp), id);
997
998 if (new_type == TREE_TYPE (*tp))
999 *walk_subtrees = 0;
1000
1001 else if (TREE_CODE (*tp) == INTEGER_CST)
1002 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1003 else
1004 {
1005 *tp = copy_node (*tp);
1006 TREE_TYPE (*tp) = new_type;
1007 }
1008 }
1009 else
1010 {
1011 /* Otherwise, just copy the node. Note that copy_tree_r already
1012 knows not to copy VAR_DECLs, etc., so this is safe. */
1013
1014 if (TREE_CODE (*tp) == MEM_REF)
1015 {
1016 /* We need to re-canonicalize MEM_REFs from inline substitutions
1017 that can happen when a pointer argument is an ADDR_EXPR.
1018 Recurse here manually to allow that. */
1019 tree ptr = TREE_OPERAND (*tp, 0);
1020 tree type = remap_type (TREE_TYPE (*tp), id);
1021 tree old = *tp;
1022 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1023 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1024 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1025 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1026 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1027 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1028 {
1029 MR_DEPENDENCE_CLIQUE (*tp)
1030 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1031 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1032 }
1033 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1034 remapped a parameter as the property might be valid only
1035 for the parameter itself. */
1036 if (TREE_THIS_NOTRAP (old)
1037 && (!is_parm (TREE_OPERAND (old, 0))
1038 || (!id->transform_parameter && is_parm (ptr))))
1039 TREE_THIS_NOTRAP (*tp) = 1;
1040 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1041 *walk_subtrees = 0;
1042 return NULL;
1043 }
1044
1045 /* Here is the "usual case". Copy this tree node, and then
1046 tweak some special cases. */
1047 copy_tree_r (tp, walk_subtrees, NULL);
1048
1049 if (TREE_CODE (*tp) != OMP_CLAUSE)
1050 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1051
1052 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1053 {
1054 /* The copied TARGET_EXPR has never been expanded, even if the
1055 original node was expanded already. */
1056 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1057 TREE_OPERAND (*tp, 3) = NULL_TREE;
1058 }
1059 else if (TREE_CODE (*tp) == ADDR_EXPR)
1060 {
1061 /* Variable substitution need not be simple. In particular,
1062 the MEM_REF substitution above. Make sure that
1063 TREE_CONSTANT and friends are up-to-date. */
1064 int invariant = is_gimple_min_invariant (*tp);
1065 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1066 recompute_tree_invariant_for_addr_expr (*tp);
1067
1068 /* If this used to be invariant, but is not any longer,
1069 then regimplification is probably needed. */
1070 if (invariant && !is_gimple_min_invariant (*tp))
1071 id->regimplify = true;
1072
1073 *walk_subtrees = 0;
1074 }
1075 }
1076
1077 /* Update the TREE_BLOCK for the cloned expr. */
1078 if (EXPR_P (*tp))
1079 {
1080 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1081 tree old_block = TREE_BLOCK (*tp);
1082 if (old_block)
1083 {
1084 tree *n;
1085 n = id->decl_map->get (TREE_BLOCK (*tp));
1086 if (n)
1087 new_block = *n;
1088 }
1089 TREE_SET_BLOCK (*tp, new_block);
1090 }
1091
1092 /* Keep iterating. */
1093 return NULL_TREE;
1094 }
1095
1096
1097 /* Called from copy_body_id via walk_tree. DATA is really a
1098 `copy_body_data *'. */
1099
1100 tree
1101 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1102 {
1103 copy_body_data *id = (copy_body_data *) data;
1104 tree fn = id->src_fn;
1105 tree new_block;
1106
1107 /* Begin by recognizing trees that we'll completely rewrite for the
1108 inlining context. Our output for these trees is completely
1109 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1110 into an edge). Further down, we'll handle trees that get
1111 duplicated and/or tweaked. */
1112
1113 /* When requested, RETURN_EXPRs should be transformed to just the
1114 contained MODIFY_EXPR. The branch semantics of the return will
1115 be handled elsewhere by manipulating the CFG rather than a statement. */
1116 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1117 {
1118 tree assignment = TREE_OPERAND (*tp, 0);
1119
1120 /* If we're returning something, just turn that into an
1121 assignment into the equivalent of the original RESULT_DECL.
1122 If the "assignment" is just the result decl, the result
1123 decl has already been set (e.g. a recent "foo (&result_decl,
1124 ...)"); just toss the entire RETURN_EXPR. */
1125 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1126 {
1127 /* Replace the RETURN_EXPR with (a copy of) the
1128 MODIFY_EXPR hanging underneath. */
1129 *tp = copy_node (assignment);
1130 }
1131 else /* Else the RETURN_EXPR returns no value. */
1132 {
1133 *tp = NULL;
1134 return (tree) (void *)1;
1135 }
1136 }
1137 else if (TREE_CODE (*tp) == SSA_NAME)
1138 {
1139 *tp = remap_ssa_name (*tp, id);
1140 *walk_subtrees = 0;
1141 return NULL;
1142 }
1143
1144 /* Local variables and labels need to be replaced by equivalent
1145 variables. We don't want to copy static variables; there's only
1146 one of those, no matter how many times we inline the containing
1147 function. Similarly for globals from an outer function. */
1148 else if (auto_var_in_fn_p (*tp, fn))
1149 {
1150 tree new_decl;
1151
1152 /* Remap the declaration. */
1153 new_decl = remap_decl (*tp, id);
1154 gcc_assert (new_decl);
1155 /* Replace this variable with the copy. */
1156 STRIP_TYPE_NOPS (new_decl);
1157 *tp = new_decl;
1158 *walk_subtrees = 0;
1159 }
1160 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1161 copy_statement_list (tp);
1162 else if (TREE_CODE (*tp) == SAVE_EXPR
1163 || TREE_CODE (*tp) == TARGET_EXPR)
1164 remap_save_expr (tp, id->decl_map, walk_subtrees);
1165 else if (TREE_CODE (*tp) == LABEL_DECL
1166 && (! DECL_CONTEXT (*tp)
1167 || decl_function_context (*tp) == id->src_fn))
1168 /* These may need to be remapped for EH handling. */
1169 *tp = remap_decl (*tp, id);
1170 else if (TREE_CODE (*tp) == BIND_EXPR)
1171 copy_bind_expr (tp, walk_subtrees, id);
1172 /* Types may need remapping as well. */
1173 else if (TYPE_P (*tp))
1174 *tp = remap_type (*tp, id);
1175
1176 /* If this is a constant, we have to copy the node iff the type will be
1177 remapped. copy_tree_r will not copy a constant. */
1178 else if (CONSTANT_CLASS_P (*tp))
1179 {
1180 tree new_type = remap_type (TREE_TYPE (*tp), id);
1181
1182 if (new_type == TREE_TYPE (*tp))
1183 *walk_subtrees = 0;
1184
1185 else if (TREE_CODE (*tp) == INTEGER_CST)
1186 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1187 else
1188 {
1189 *tp = copy_node (*tp);
1190 TREE_TYPE (*tp) = new_type;
1191 }
1192 }
1193
1194 /* Otherwise, just copy the node. Note that copy_tree_r already
1195 knows not to copy VAR_DECLs, etc., so this is safe. */
1196 else
1197 {
1198 /* Here we handle trees that are not completely rewritten.
1199 First we detect some inlining-induced bogosities for
1200 discarding. */
1201 if (TREE_CODE (*tp) == MODIFY_EXPR
1202 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1203 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1204 {
1205 /* Some assignments VAR = VAR; don't generate any rtl code
1206 and thus don't count as variable modification. Avoid
1207 keeping bogosities like 0 = 0. */
1208 tree decl = TREE_OPERAND (*tp, 0), value;
1209 tree *n;
1210
1211 n = id->decl_map->get (decl);
1212 if (n)
1213 {
1214 value = *n;
1215 STRIP_TYPE_NOPS (value);
1216 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1217 {
1218 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1219 return copy_tree_body_r (tp, walk_subtrees, data);
1220 }
1221 }
1222 }
1223 else if (TREE_CODE (*tp) == INDIRECT_REF)
1224 {
1225 /* Get rid of *& from inline substitutions that can happen when a
1226 pointer argument is an ADDR_EXPR. */
1227 tree decl = TREE_OPERAND (*tp, 0);
1228 tree *n = id->decl_map->get (decl);
1229 if (n)
1230 {
1231 /* If we happen to get an ADDR_EXPR in n->value, strip
1232 it manually here as we'll eventually get ADDR_EXPRs
1233 which lie about their types pointed to. In this case
1234 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1235 but we absolutely rely on that. As fold_indirect_ref
1236 does other useful transformations, try that first, though. */
1237 tree type = TREE_TYPE (*tp);
1238 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1239 tree old = *tp;
1240 *tp = gimple_fold_indirect_ref (ptr);
1241 if (! *tp)
1242 {
1243 type = remap_type (type, id);
1244 if (TREE_CODE (ptr) == ADDR_EXPR)
1245 {
1246 *tp
1247 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1248 /* ??? We should either assert here or build
1249 a VIEW_CONVERT_EXPR instead of blindly leaking
1250 incompatible types to our IL. */
1251 if (! *tp)
1252 *tp = TREE_OPERAND (ptr, 0);
1253 }
1254 else
1255 {
1256 *tp = build1 (INDIRECT_REF, type, ptr);
1257 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1258 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1259 TREE_READONLY (*tp) = TREE_READONLY (old);
1260 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1261 have remapped a parameter as the property might be
1262 valid only for the parameter itself. */
1263 if (TREE_THIS_NOTRAP (old)
1264 && (!is_parm (TREE_OPERAND (old, 0))
1265 || (!id->transform_parameter && is_parm (ptr))))
1266 TREE_THIS_NOTRAP (*tp) = 1;
1267 }
1268 }
1269 *walk_subtrees = 0;
1270 return NULL;
1271 }
1272 }
1273 else if (TREE_CODE (*tp) == MEM_REF)
1274 {
1275 /* We need to re-canonicalize MEM_REFs from inline substitutions
1276 that can happen when a pointer argument is an ADDR_EXPR.
1277 Recurse here manually to allow that. */
1278 tree ptr = TREE_OPERAND (*tp, 0);
1279 tree type = remap_type (TREE_TYPE (*tp), id);
1280 tree old = *tp;
1281 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1282 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1283 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1284 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1285 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1286 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1287 {
1288 MR_DEPENDENCE_CLIQUE (*tp)
1289 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1290 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1291 }
1292 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1293 remapped a parameter as the property might be valid only
1294 for the parameter itself. */
1295 if (TREE_THIS_NOTRAP (old)
1296 && (!is_parm (TREE_OPERAND (old, 0))
1297 || (!id->transform_parameter && is_parm (ptr))))
1298 TREE_THIS_NOTRAP (*tp) = 1;
1299 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1300 *walk_subtrees = 0;
1301 return NULL;
1302 }
1303
1304 /* Here is the "usual case". Copy this tree node, and then
1305 tweak some special cases. */
1306 copy_tree_r (tp, walk_subtrees, NULL);
1307
1308 /* If EXPR has block defined, map it to newly constructed block.
1309 When inlining we want EXPRs without block appear in the block
1310 of function call if we are not remapping a type. */
1311 if (EXPR_P (*tp))
1312 {
1313 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1314 if (TREE_BLOCK (*tp))
1315 {
1316 tree *n;
1317 n = id->decl_map->get (TREE_BLOCK (*tp));
1318 if (n)
1319 new_block = *n;
1320 }
1321 TREE_SET_BLOCK (*tp, new_block);
1322 }
1323
1324 if (TREE_CODE (*tp) != OMP_CLAUSE)
1325 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1326
1327 /* The copied TARGET_EXPR has never been expanded, even if the
1328 original node was expanded already. */
1329 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1330 {
1331 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1332 TREE_OPERAND (*tp, 3) = NULL_TREE;
1333 }
1334
1335 /* Variable substitution need not be simple. In particular, the
1336 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1337 and friends are up-to-date. */
1338 else if (TREE_CODE (*tp) == ADDR_EXPR)
1339 {
1340 int invariant = is_gimple_min_invariant (*tp);
1341 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1342
1343 /* Handle the case where we substituted an INDIRECT_REF
1344 into the operand of the ADDR_EXPR. */
1345 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1346 {
1347 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1348 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1349 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1350 *tp = t;
1351 }
1352 else
1353 recompute_tree_invariant_for_addr_expr (*tp);
1354
1355 /* If this used to be invariant, but is not any longer,
1356 then regimplification is probably needed. */
1357 if (invariant && !is_gimple_min_invariant (*tp))
1358 id->regimplify = true;
1359
1360 *walk_subtrees = 0;
1361 }
1362 }
1363
1364 /* Keep iterating. */
1365 return NULL_TREE;
1366 }
1367
1368 /* Helper for remap_gimple_stmt. Given an EH region number for the
1369 source function, map that to the duplicate EH region number in
1370 the destination function. */
1371
1372 static int
1373 remap_eh_region_nr (int old_nr, copy_body_data *id)
1374 {
1375 eh_region old_r, new_r;
1376
1377 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1378 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1379
1380 return new_r->index;
1381 }
1382
1383 /* Similar, but operate on INTEGER_CSTs. */
1384
1385 static tree
1386 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1387 {
1388 int old_nr, new_nr;
1389
1390 old_nr = tree_to_shwi (old_t_nr);
1391 new_nr = remap_eh_region_nr (old_nr, id);
1392
1393 return build_int_cst (integer_type_node, new_nr);
1394 }
1395
1396 /* Helper for copy_bb. Remap statement STMT using the inlining
1397 information in ID. Return the new statement copy. */
1398
1399 static gimple_seq
1400 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1401 {
1402 gimple *copy = NULL;
1403 struct walk_stmt_info wi;
1404 bool skip_first = false;
1405 gimple_seq stmts = NULL;
1406
1407 if (is_gimple_debug (stmt)
1408 && (gimple_debug_nonbind_marker_p (stmt)
1409 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1410 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1411 return NULL;
1412
1413 /* Begin by recognizing trees that we'll completely rewrite for the
1414 inlining context. Our output for these trees is completely
1415 different from our input (e.g. RETURN_EXPR is deleted and morphs
1416 into an edge). Further down, we'll handle trees that get
1417 duplicated and/or tweaked. */
1418
1419 /* When requested, GIMPLE_RETURN should be transformed to just the
1420 contained GIMPLE_ASSIGN. The branch semantics of the return will
1421 be handled elsewhere by manipulating the CFG rather than the
1422 statement. */
1423 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1424 {
1425 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1426
1427 /* If we're returning something, just turn that into an
1428 assignment to the equivalent of the original RESULT_DECL.
1429 If RETVAL is just the result decl, the result decl has
1430 already been set (e.g. a recent "foo (&result_decl, ...)");
1431 just toss the entire GIMPLE_RETURN. */
1432 if (retval
1433 && (TREE_CODE (retval) != RESULT_DECL
1434 && (TREE_CODE (retval) != SSA_NAME
1435 || ! SSA_NAME_VAR (retval)
1436 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1437 {
1438 copy = gimple_build_assign (id->do_not_unshare
1439 ? id->retvar : unshare_expr (id->retvar),
1440 retval);
1441 /* id->retvar is already substituted. Skip it on later remapping. */
1442 skip_first = true;
1443 }
1444 else
1445 return NULL;
1446 }
1447 else if (gimple_has_substatements (stmt))
1448 {
1449 gimple_seq s1, s2;
1450
1451 /* When cloning bodies from the C++ front end, we will be handed bodies
1452 in High GIMPLE form. Handle here all the High GIMPLE statements that
1453 have embedded statements. */
1454 switch (gimple_code (stmt))
1455 {
1456 case GIMPLE_BIND:
1457 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1458 break;
1459
1460 case GIMPLE_CATCH:
1461 {
1462 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1463 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1464 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1465 }
1466 break;
1467
1468 case GIMPLE_EH_FILTER:
1469 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1470 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1471 break;
1472
1473 case GIMPLE_TRY:
1474 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1475 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1476 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1477 break;
1478
1479 case GIMPLE_WITH_CLEANUP_EXPR:
1480 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1481 copy = gimple_build_wce (s1);
1482 break;
1483
1484 case GIMPLE_OMP_PARALLEL:
1485 {
1486 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1487 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1488 copy = gimple_build_omp_parallel
1489 (s1,
1490 gimple_omp_parallel_clauses (omp_par_stmt),
1491 gimple_omp_parallel_child_fn (omp_par_stmt),
1492 gimple_omp_parallel_data_arg (omp_par_stmt));
1493 }
1494 break;
1495
1496 case GIMPLE_OMP_TASK:
1497 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1498 copy = gimple_build_omp_task
1499 (s1,
1500 gimple_omp_task_clauses (stmt),
1501 gimple_omp_task_child_fn (stmt),
1502 gimple_omp_task_data_arg (stmt),
1503 gimple_omp_task_copy_fn (stmt),
1504 gimple_omp_task_arg_size (stmt),
1505 gimple_omp_task_arg_align (stmt));
1506 break;
1507
1508 case GIMPLE_OMP_FOR:
1509 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1510 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1511 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1512 gimple_omp_for_clauses (stmt),
1513 gimple_omp_for_collapse (stmt), s2);
1514 {
1515 size_t i;
1516 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1517 {
1518 gimple_omp_for_set_index (copy, i,
1519 gimple_omp_for_index (stmt, i));
1520 gimple_omp_for_set_initial (copy, i,
1521 gimple_omp_for_initial (stmt, i));
1522 gimple_omp_for_set_final (copy, i,
1523 gimple_omp_for_final (stmt, i));
1524 gimple_omp_for_set_incr (copy, i,
1525 gimple_omp_for_incr (stmt, i));
1526 gimple_omp_for_set_cond (copy, i,
1527 gimple_omp_for_cond (stmt, i));
1528 }
1529 }
1530 break;
1531
1532 case GIMPLE_OMP_MASTER:
1533 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1534 copy = gimple_build_omp_master (s1);
1535 break;
1536
1537 case GIMPLE_OMP_TASKGROUP:
1538 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1539 copy = gimple_build_omp_taskgroup
1540 (s1, gimple_omp_taskgroup_clauses (stmt));
1541 break;
1542
1543 case GIMPLE_OMP_ORDERED:
1544 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1545 copy = gimple_build_omp_ordered
1546 (s1,
1547 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1548 break;
1549
1550 case GIMPLE_OMP_SECTION:
1551 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1552 copy = gimple_build_omp_section (s1);
1553 break;
1554
1555 case GIMPLE_OMP_SECTIONS:
1556 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1557 copy = gimple_build_omp_sections
1558 (s1, gimple_omp_sections_clauses (stmt));
1559 break;
1560
1561 case GIMPLE_OMP_SINGLE:
1562 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1563 copy = gimple_build_omp_single
1564 (s1, gimple_omp_single_clauses (stmt));
1565 break;
1566
1567 case GIMPLE_OMP_TARGET:
1568 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1569 copy = gimple_build_omp_target
1570 (s1, gimple_omp_target_kind (stmt),
1571 gimple_omp_target_clauses (stmt));
1572 break;
1573
1574 case GIMPLE_OMP_TEAMS:
1575 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1576 copy = gimple_build_omp_teams
1577 (s1, gimple_omp_teams_clauses (stmt));
1578 break;
1579
1580 case GIMPLE_OMP_CRITICAL:
1581 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1582 copy = gimple_build_omp_critical (s1,
1583 gimple_omp_critical_name
1584 (as_a <gomp_critical *> (stmt)),
1585 gimple_omp_critical_clauses
1586 (as_a <gomp_critical *> (stmt)));
1587 break;
1588
1589 case GIMPLE_TRANSACTION:
1590 {
1591 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1592 gtransaction *new_trans_stmt;
1593 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1594 id);
1595 copy = new_trans_stmt = gimple_build_transaction (s1);
1596 gimple_transaction_set_subcode (new_trans_stmt,
1597 gimple_transaction_subcode (old_trans_stmt));
1598 gimple_transaction_set_label_norm (new_trans_stmt,
1599 gimple_transaction_label_norm (old_trans_stmt));
1600 gimple_transaction_set_label_uninst (new_trans_stmt,
1601 gimple_transaction_label_uninst (old_trans_stmt));
1602 gimple_transaction_set_label_over (new_trans_stmt,
1603 gimple_transaction_label_over (old_trans_stmt));
1604 }
1605 break;
1606
1607 default:
1608 gcc_unreachable ();
1609 }
1610 }
1611 else
1612 {
1613 if (gimple_assign_copy_p (stmt)
1614 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1615 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1616 {
1617 /* Here we handle statements that are not completely rewritten.
1618 First we detect some inlining-induced bogosities for
1619 discarding. */
1620
1621 /* Some assignments VAR = VAR; don't generate any rtl code
1622 and thus don't count as variable modification. Avoid
1623 keeping bogosities like 0 = 0. */
1624 tree decl = gimple_assign_lhs (stmt), value;
1625 tree *n;
1626
1627 n = id->decl_map->get (decl);
1628 if (n)
1629 {
1630 value = *n;
1631 STRIP_TYPE_NOPS (value);
1632 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1633 return NULL;
1634 }
1635 }
1636
1637 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1638 in a block that we aren't copying during tree_function_versioning,
1639 just drop the clobber stmt. */
1640 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1641 {
1642 tree lhs = gimple_assign_lhs (stmt);
1643 if (TREE_CODE (lhs) == MEM_REF
1644 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1645 {
1646 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1647 if (gimple_bb (def_stmt)
1648 && !bitmap_bit_p (id->blocks_to_copy,
1649 gimple_bb (def_stmt)->index))
1650 return NULL;
1651 }
1652 }
1653
1654 if (gimple_debug_bind_p (stmt))
1655 {
1656 gdebug *copy
1657 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1658 gimple_debug_bind_get_value (stmt),
1659 stmt);
1660 if (id->reset_location)
1661 gimple_set_location (copy, input_location);
1662 id->debug_stmts.safe_push (copy);
1663 gimple_seq_add_stmt (&stmts, copy);
1664 return stmts;
1665 }
1666 if (gimple_debug_source_bind_p (stmt))
1667 {
1668 gdebug *copy = gimple_build_debug_source_bind
1669 (gimple_debug_source_bind_get_var (stmt),
1670 gimple_debug_source_bind_get_value (stmt),
1671 stmt);
1672 if (id->reset_location)
1673 gimple_set_location (copy, input_location);
1674 id->debug_stmts.safe_push (copy);
1675 gimple_seq_add_stmt (&stmts, copy);
1676 return stmts;
1677 }
1678 if (gimple_debug_nonbind_marker_p (stmt))
1679 {
1680 /* If the inlined function has too many debug markers,
1681 don't copy them. */
1682 if (id->src_cfun->debug_marker_count
1683 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1684 return stmts;
1685
1686 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1687 if (id->reset_location)
1688 gimple_set_location (copy, input_location);
1689 id->debug_stmts.safe_push (copy);
1690 gimple_seq_add_stmt (&stmts, copy);
1691 return stmts;
1692 }
1693
1694 /* Create a new deep copy of the statement. */
1695 copy = gimple_copy (stmt);
1696
1697 /* Clear flags that need revisiting. */
1698 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1699 {
1700 if (gimple_call_tail_p (call_stmt))
1701 gimple_call_set_tail (call_stmt, false);
1702 if (gimple_call_from_thunk_p (call_stmt))
1703 gimple_call_set_from_thunk (call_stmt, false);
1704 if (gimple_call_internal_p (call_stmt))
1705 switch (gimple_call_internal_fn (call_stmt))
1706 {
1707 case IFN_GOMP_SIMD_LANE:
1708 case IFN_GOMP_SIMD_VF:
1709 case IFN_GOMP_SIMD_LAST_LANE:
1710 case IFN_GOMP_SIMD_ORDERED_START:
1711 case IFN_GOMP_SIMD_ORDERED_END:
1712 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1713 break;
1714 default:
1715 break;
1716 }
1717 }
1718
1719 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1720 RESX and EH_DISPATCH. */
1721 if (id->eh_map)
1722 switch (gimple_code (copy))
1723 {
1724 case GIMPLE_CALL:
1725 {
1726 tree r, fndecl = gimple_call_fndecl (copy);
1727 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1728 switch (DECL_FUNCTION_CODE (fndecl))
1729 {
1730 case BUILT_IN_EH_COPY_VALUES:
1731 r = gimple_call_arg (copy, 1);
1732 r = remap_eh_region_tree_nr (r, id);
1733 gimple_call_set_arg (copy, 1, r);
1734 /* FALLTHRU */
1735
1736 case BUILT_IN_EH_POINTER:
1737 case BUILT_IN_EH_FILTER:
1738 r = gimple_call_arg (copy, 0);
1739 r = remap_eh_region_tree_nr (r, id);
1740 gimple_call_set_arg (copy, 0, r);
1741 break;
1742
1743 default:
1744 break;
1745 }
1746
1747 /* Reset alias info if we didn't apply measures to
1748 keep it valid over inlining by setting DECL_PT_UID. */
1749 if (!id->src_cfun->gimple_df
1750 || !id->src_cfun->gimple_df->ipa_pta)
1751 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1752 }
1753 break;
1754
1755 case GIMPLE_RESX:
1756 {
1757 gresx *resx_stmt = as_a <gresx *> (copy);
1758 int r = gimple_resx_region (resx_stmt);
1759 r = remap_eh_region_nr (r, id);
1760 gimple_resx_set_region (resx_stmt, r);
1761 }
1762 break;
1763
1764 case GIMPLE_EH_DISPATCH:
1765 {
1766 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1767 int r = gimple_eh_dispatch_region (eh_dispatch);
1768 r = remap_eh_region_nr (r, id);
1769 gimple_eh_dispatch_set_region (eh_dispatch, r);
1770 }
1771 break;
1772
1773 default:
1774 break;
1775 }
1776 }
1777
1778 /* If STMT has a block defined, map it to the newly constructed block. */
1779 if (gimple_block (copy))
1780 {
1781 tree *n;
1782 n = id->decl_map->get (gimple_block (copy));
1783 gcc_assert (n);
1784 gimple_set_block (copy, *n);
1785 }
1786
1787 if (id->reset_location)
1788 gimple_set_location (copy, input_location);
1789
1790 /* Debug statements ought to be rebuilt and not copied. */
1791 gcc_checking_assert (!is_gimple_debug (copy));
1792
1793 /* Remap all the operands in COPY. */
1794 memset (&wi, 0, sizeof (wi));
1795 wi.info = id;
1796 if (skip_first)
1797 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1798 else
1799 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1800
1801 /* Clear the copied virtual operands. We are not remapping them here
1802 but are going to recreate them from scratch. */
1803 if (gimple_has_mem_ops (copy))
1804 {
1805 gimple_set_vdef (copy, NULL_TREE);
1806 gimple_set_vuse (copy, NULL_TREE);
1807 }
1808
1809 gimple_seq_add_stmt (&stmts, copy);
1810 return stmts;
1811 }
1812
1813
1814 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1815 later */
1816
1817 static basic_block
1818 copy_bb (copy_body_data *id, basic_block bb,
1819 profile_count num, profile_count den)
1820 {
1821 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1822 basic_block copy_basic_block;
1823 tree decl;
1824 basic_block prev;
1825
1826 profile_count::adjust_for_ipa_scaling (&num, &den);
1827
1828 /* Search for previous copied basic block. */
1829 prev = bb->prev_bb;
1830 while (!prev->aux)
1831 prev = prev->prev_bb;
1832
1833 /* create_basic_block() will append every new block to
1834 basic_block_info automatically. */
1835 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1836 copy_basic_block->count = bb->count.apply_scale (num, den);
1837
1838 copy_gsi = gsi_start_bb (copy_basic_block);
1839
1840 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1841 {
1842 gimple_seq stmts;
1843 gimple *stmt = gsi_stmt (gsi);
1844 gimple *orig_stmt = stmt;
1845 gimple_stmt_iterator stmts_gsi;
1846 bool stmt_added = false;
1847
1848 id->regimplify = false;
1849 stmts = remap_gimple_stmt (stmt, id);
1850
1851 if (gimple_seq_empty_p (stmts))
1852 continue;
1853
1854 seq_gsi = copy_gsi;
1855
1856 for (stmts_gsi = gsi_start (stmts);
1857 !gsi_end_p (stmts_gsi); )
1858 {
1859 stmt = gsi_stmt (stmts_gsi);
1860
1861 /* Advance iterator now before stmt is moved to seq_gsi. */
1862 gsi_next (&stmts_gsi);
1863
1864 if (gimple_nop_p (stmt))
1865 continue;
1866
1867 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1868 orig_stmt);
1869
1870 /* With return slot optimization we can end up with
1871 non-gimple (foo *)&this->m, fix that here. */
1872 if (is_gimple_assign (stmt)
1873 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1874 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1875 {
1876 tree new_rhs;
1877 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1878 gimple_assign_rhs1 (stmt),
1879 true, NULL, false,
1880 GSI_CONTINUE_LINKING);
1881 gimple_assign_set_rhs1 (stmt, new_rhs);
1882 id->regimplify = false;
1883 }
1884
1885 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1886
1887 if (id->regimplify)
1888 gimple_regimplify_operands (stmt, &seq_gsi);
1889
1890 stmt_added = true;
1891 }
1892
1893 if (!stmt_added)
1894 continue;
1895
1896 /* If copy_basic_block has been empty at the start of this iteration,
1897 call gsi_start_bb again to get at the newly added statements. */
1898 if (gsi_end_p (copy_gsi))
1899 copy_gsi = gsi_start_bb (copy_basic_block);
1900 else
1901 gsi_next (&copy_gsi);
1902
1903 /* Process the new statement. The call to gimple_regimplify_operands
1904 possibly turned the statement into multiple statements, we
1905 need to process all of them. */
1906 do
1907 {
1908 tree fn;
1909 gcall *call_stmt;
1910
1911 stmt = gsi_stmt (copy_gsi);
1912 call_stmt = dyn_cast <gcall *> (stmt);
1913 if (call_stmt
1914 && gimple_call_va_arg_pack_p (call_stmt)
1915 && id->call_stmt
1916 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1917 {
1918 /* __builtin_va_arg_pack () should be replaced by
1919 all arguments corresponding to ... in the caller. */
1920 tree p;
1921 gcall *new_call;
1922 vec<tree> argarray;
1923 size_t nargs = gimple_call_num_args (id->call_stmt);
1924 size_t n;
1925
1926 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1927 nargs--;
1928
1929 /* Create the new array of arguments. */
1930 n = nargs + gimple_call_num_args (call_stmt);
1931 argarray.create (n);
1932 argarray.safe_grow_cleared (n);
1933
1934 /* Copy all the arguments before '...' */
1935 memcpy (argarray.address (),
1936 gimple_call_arg_ptr (call_stmt, 0),
1937 gimple_call_num_args (call_stmt) * sizeof (tree));
1938
1939 /* Append the arguments passed in '...' */
1940 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1941 gimple_call_arg_ptr (id->call_stmt, 0)
1942 + (gimple_call_num_args (id->call_stmt) - nargs),
1943 nargs * sizeof (tree));
1944
1945 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1946 argarray);
1947
1948 argarray.release ();
1949
1950 /* Copy all GIMPLE_CALL flags, location and block, except
1951 GF_CALL_VA_ARG_PACK. */
1952 gimple_call_copy_flags (new_call, call_stmt);
1953 gimple_call_set_va_arg_pack (new_call, false);
1954 gimple_set_location (new_call, gimple_location (stmt));
1955 gimple_set_block (new_call, gimple_block (stmt));
1956 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1957
1958 gsi_replace (&copy_gsi, new_call, false);
1959 stmt = new_call;
1960 }
1961 else if (call_stmt
1962 && id->call_stmt
1963 && (decl = gimple_call_fndecl (stmt))
1964 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
1965 {
1966 /* __builtin_va_arg_pack_len () should be replaced by
1967 the number of anonymous arguments. */
1968 size_t nargs = gimple_call_num_args (id->call_stmt);
1969 tree count, p;
1970 gimple *new_stmt;
1971
1972 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1973 nargs--;
1974
1975 if (!gimple_call_lhs (stmt))
1976 {
1977 /* Drop unused calls. */
1978 gsi_remove (&copy_gsi, false);
1979 continue;
1980 }
1981 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
1982 {
1983 count = build_int_cst (integer_type_node, nargs);
1984 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1985 gsi_replace (&copy_gsi, new_stmt, false);
1986 stmt = new_stmt;
1987 }
1988 else if (nargs != 0)
1989 {
1990 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
1991 count = build_int_cst (integer_type_node, nargs);
1992 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1993 PLUS_EXPR, newlhs, count);
1994 gimple_call_set_lhs (stmt, newlhs);
1995 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
1996 }
1997 }
1998 else if (call_stmt
1999 && id->call_stmt
2000 && gimple_call_internal_p (stmt)
2001 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2002 {
2003 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2004 gsi_remove (&copy_gsi, false);
2005 continue;
2006 }
2007
2008 /* Statements produced by inlining can be unfolded, especially
2009 when we constant propagated some operands. We can't fold
2010 them right now for two reasons:
2011 1) folding require SSA_NAME_DEF_STMTs to be correct
2012 2) we can't change function calls to builtins.
2013 So we just mark statement for later folding. We mark
2014 all new statements, instead just statements that has changed
2015 by some nontrivial substitution so even statements made
2016 foldable indirectly are updated. If this turns out to be
2017 expensive, copy_body can be told to watch for nontrivial
2018 changes. */
2019 if (id->statements_to_fold)
2020 id->statements_to_fold->add (stmt);
2021
2022 /* We're duplicating a CALL_EXPR. Find any corresponding
2023 callgraph edges and update or duplicate them. */
2024 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2025 {
2026 struct cgraph_edge *edge;
2027
2028 switch (id->transform_call_graph_edges)
2029 {
2030 case CB_CGE_DUPLICATE:
2031 edge = id->src_node->get_edge (orig_stmt);
2032 if (edge)
2033 {
2034 struct cgraph_edge *old_edge = edge;
2035 profile_count old_cnt = edge->count;
2036 edge = edge->clone (id->dst_node, call_stmt,
2037 gimple_uid (stmt),
2038 num, den,
2039 true);
2040
2041 /* Speculative calls consist of two edges - direct and
2042 indirect. Duplicate the whole thing and distribute
2043 frequencies accordingly. */
2044 if (edge->speculative)
2045 {
2046 struct cgraph_edge *direct, *indirect;
2047 struct ipa_ref *ref;
2048
2049 gcc_assert (!edge->indirect_unknown_callee);
2050 old_edge->speculative_call_info (direct, indirect, ref);
2051
2052 profile_count indir_cnt = indirect->count;
2053 indirect = indirect->clone (id->dst_node, call_stmt,
2054 gimple_uid (stmt),
2055 num, den,
2056 true);
2057
2058 profile_probability prob
2059 = indir_cnt.probability_in (old_cnt + indir_cnt);
2060 indirect->count
2061 = copy_basic_block->count.apply_probability (prob);
2062 edge->count = copy_basic_block->count - indirect->count;
2063 id->dst_node->clone_reference (ref, stmt);
2064 }
2065 else
2066 edge->count = copy_basic_block->count;
2067 }
2068 break;
2069
2070 case CB_CGE_MOVE_CLONES:
2071 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2072 call_stmt);
2073 edge = id->dst_node->get_edge (stmt);
2074 break;
2075
2076 case CB_CGE_MOVE:
2077 edge = id->dst_node->get_edge (orig_stmt);
2078 if (edge)
2079 edge->set_call_stmt (call_stmt);
2080 break;
2081
2082 default:
2083 gcc_unreachable ();
2084 }
2085
2086 /* Constant propagation on argument done during inlining
2087 may create new direct call. Produce an edge for it. */
2088 if ((!edge
2089 || (edge->indirect_inlining_edge
2090 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2091 && id->dst_node->definition
2092 && (fn = gimple_call_fndecl (stmt)) != NULL)
2093 {
2094 struct cgraph_node *dest = cgraph_node::get_create (fn);
2095
2096 /* We have missing edge in the callgraph. This can happen
2097 when previous inlining turned an indirect call into a
2098 direct call by constant propagating arguments or we are
2099 producing dead clone (for further cloning). In all
2100 other cases we hit a bug (incorrect node sharing is the
2101 most common reason for missing edges). */
2102 gcc_assert (!dest->definition
2103 || dest->address_taken
2104 || !id->src_node->definition
2105 || !id->dst_node->definition);
2106 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2107 id->dst_node->create_edge_including_clones
2108 (dest, orig_stmt, call_stmt, bb->count,
2109 CIF_ORIGINALLY_INDIRECT_CALL);
2110 else
2111 id->dst_node->create_edge (dest, call_stmt,
2112 bb->count)->inline_failed
2113 = CIF_ORIGINALLY_INDIRECT_CALL;
2114 if (dump_file)
2115 {
2116 fprintf (dump_file, "Created new direct edge to %s\n",
2117 dest->name ());
2118 }
2119 }
2120
2121 notice_special_calls (as_a <gcall *> (stmt));
2122 }
2123
2124 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2125 id->eh_map, id->eh_lp_nr);
2126
2127 gsi_next (&copy_gsi);
2128 }
2129 while (!gsi_end_p (copy_gsi));
2130
2131 copy_gsi = gsi_last_bb (copy_basic_block);
2132 }
2133
2134 return copy_basic_block;
2135 }
2136
2137 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2138 form is quite easy, since dominator relationship for old basic blocks does
2139 not change.
2140
2141 There is however exception where inlining might change dominator relation
2142 across EH edges from basic block within inlined functions destinating
2143 to landing pads in function we inline into.
2144
2145 The function fills in PHI_RESULTs of such PHI nodes if they refer
2146 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2147 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2148 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2149 set, and this means that there will be no overlapping live ranges
2150 for the underlying symbol.
2151
2152 This might change in future if we allow redirecting of EH edges and
2153 we might want to change way build CFG pre-inlining to include
2154 all the possible edges then. */
2155 static void
2156 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2157 bool can_throw, bool nonlocal_goto)
2158 {
2159 edge e;
2160 edge_iterator ei;
2161
2162 FOR_EACH_EDGE (e, ei, bb->succs)
2163 if (!e->dest->aux
2164 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2165 {
2166 gphi *phi;
2167 gphi_iterator si;
2168
2169 if (!nonlocal_goto)
2170 gcc_assert (e->flags & EDGE_EH);
2171
2172 if (!can_throw)
2173 gcc_assert (!(e->flags & EDGE_EH));
2174
2175 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2176 {
2177 edge re;
2178
2179 phi = si.phi ();
2180
2181 /* For abnormal goto/call edges the receiver can be the
2182 ENTRY_BLOCK. Do not assert this cannot happen. */
2183
2184 gcc_assert ((e->flags & EDGE_EH)
2185 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2186
2187 re = find_edge (ret_bb, e->dest);
2188 gcc_checking_assert (re);
2189 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2190 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2191
2192 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2193 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2194 }
2195 }
2196 }
2197
2198 /* Insert clobbers for automatic variables of inlined ID->src_fn
2199 function at the start of basic block BB. */
2200
2201 static void
2202 add_clobbers_to_eh_landing_pad (basic_block bb, copy_body_data *id)
2203 {
2204 tree var;
2205 unsigned int i;
2206 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2207 if (VAR_P (var)
2208 && !DECL_HARD_REGISTER (var)
2209 && !TREE_THIS_VOLATILE (var)
2210 && !DECL_HAS_VALUE_EXPR_P (var)
2211 && !is_gimple_reg (var)
2212 && auto_var_in_fn_p (var, id->src_fn)
2213 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2214 {
2215 tree *t = id->decl_map->get (var);
2216 if (!t)
2217 continue;
2218 tree new_var = *t;
2219 if (VAR_P (new_var)
2220 && !DECL_HARD_REGISTER (new_var)
2221 && !TREE_THIS_VOLATILE (new_var)
2222 && !DECL_HAS_VALUE_EXPR_P (new_var)
2223 && !is_gimple_reg (new_var)
2224 && auto_var_in_fn_p (new_var, id->dst_fn))
2225 {
2226 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2227 tree clobber = build_clobber (TREE_TYPE (new_var));
2228 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2229 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2230 }
2231 }
2232 }
2233
2234 /* Copy edges from BB into its copy constructed earlier, scale profile
2235 accordingly. Edges will be taken care of later. Assume aux
2236 pointers to point to the copies of each BB. Return true if any
2237 debug stmts are left after a statement that must end the basic block. */
2238
2239 static bool
2240 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2241 basic_block ret_bb, basic_block abnormal_goto_dest,
2242 copy_body_data *id)
2243 {
2244 basic_block new_bb = (basic_block) bb->aux;
2245 edge_iterator ei;
2246 edge old_edge;
2247 gimple_stmt_iterator si;
2248 bool need_debug_cleanup = false;
2249
2250 /* Use the indices from the original blocks to create edges for the
2251 new ones. */
2252 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2253 if (!(old_edge->flags & EDGE_EH))
2254 {
2255 edge new_edge;
2256 int flags = old_edge->flags;
2257 location_t locus = old_edge->goto_locus;
2258
2259 /* Return edges do get a FALLTHRU flag when they get inlined. */
2260 if (old_edge->dest->index == EXIT_BLOCK
2261 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2262 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2263 flags |= EDGE_FALLTHRU;
2264
2265 new_edge
2266 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2267 new_edge->probability = old_edge->probability;
2268 if (!id->reset_location)
2269 new_edge->goto_locus = remap_location (locus, id);
2270 }
2271
2272 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2273 return false;
2274
2275 /* When doing function splitting, we must decrease count of the return block
2276 which was previously reachable by block we did not copy. */
2277 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2278 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2279 if (old_edge->src->index != ENTRY_BLOCK
2280 && !old_edge->src->aux)
2281 new_bb->count -= old_edge->count ().apply_scale (num, den);
2282
2283 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2284 {
2285 gimple *copy_stmt;
2286 bool can_throw, nonlocal_goto;
2287
2288 copy_stmt = gsi_stmt (si);
2289 if (!is_gimple_debug (copy_stmt))
2290 update_stmt (copy_stmt);
2291
2292 /* Do this before the possible split_block. */
2293 gsi_next (&si);
2294
2295 /* If this tree could throw an exception, there are two
2296 cases where we need to add abnormal edge(s): the
2297 tree wasn't in a region and there is a "current
2298 region" in the caller; or the original tree had
2299 EH edges. In both cases split the block after the tree,
2300 and add abnormal edge(s) as needed; we need both
2301 those from the callee and the caller.
2302 We check whether the copy can throw, because the const
2303 propagation can change an INDIRECT_REF which throws
2304 into a COMPONENT_REF which doesn't. If the copy
2305 can throw, the original could also throw. */
2306 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2307 nonlocal_goto
2308 = (stmt_can_make_abnormal_goto (copy_stmt)
2309 && !computed_goto_p (copy_stmt));
2310
2311 if (can_throw || nonlocal_goto)
2312 {
2313 if (!gsi_end_p (si))
2314 {
2315 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2316 gsi_next (&si);
2317 if (gsi_end_p (si))
2318 need_debug_cleanup = true;
2319 }
2320 if (!gsi_end_p (si))
2321 /* Note that bb's predecessor edges aren't necessarily
2322 right at this point; split_block doesn't care. */
2323 {
2324 edge e = split_block (new_bb, copy_stmt);
2325
2326 new_bb = e->dest;
2327 new_bb->aux = e->src->aux;
2328 si = gsi_start_bb (new_bb);
2329 }
2330 }
2331
2332 bool update_probs = false;
2333
2334 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2335 {
2336 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2337 update_probs = true;
2338 }
2339 else if (can_throw)
2340 {
2341 make_eh_edges (copy_stmt);
2342 update_probs = true;
2343 }
2344
2345 /* EH edges may not match old edges. Copy as much as possible. */
2346 if (update_probs)
2347 {
2348 edge e;
2349 edge_iterator ei;
2350 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2351
2352 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2353 if ((old_edge->flags & EDGE_EH)
2354 && (e = find_edge (copy_stmt_bb,
2355 (basic_block) old_edge->dest->aux))
2356 && (e->flags & EDGE_EH))
2357 e->probability = old_edge->probability;
2358
2359 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2360 if (e->flags & EDGE_EH)
2361 {
2362 if (!e->probability.initialized_p ())
2363 e->probability = profile_probability::never ();
2364 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2365 {
2366 add_clobbers_to_eh_landing_pad (e->dest, id);
2367 id->add_clobbers_to_eh_landing_pads = 0;
2368 }
2369 }
2370 }
2371
2372
2373 /* If the call we inline cannot make abnormal goto do not add
2374 additional abnormal edges but only retain those already present
2375 in the original function body. */
2376 if (abnormal_goto_dest == NULL)
2377 nonlocal_goto = false;
2378 if (nonlocal_goto)
2379 {
2380 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2381
2382 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2383 nonlocal_goto = false;
2384 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2385 in OpenMP regions which aren't allowed to be left abnormally.
2386 So, no need to add abnormal edge in that case. */
2387 else if (is_gimple_call (copy_stmt)
2388 && gimple_call_internal_p (copy_stmt)
2389 && (gimple_call_internal_fn (copy_stmt)
2390 == IFN_ABNORMAL_DISPATCHER)
2391 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2392 nonlocal_goto = false;
2393 else
2394 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2395 EDGE_ABNORMAL);
2396 }
2397
2398 if ((can_throw || nonlocal_goto)
2399 && gimple_in_ssa_p (cfun))
2400 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2401 can_throw, nonlocal_goto);
2402 }
2403 return need_debug_cleanup;
2404 }
2405
2406 /* Copy the PHIs. All blocks and edges are copied, some blocks
2407 was possibly split and new outgoing EH edges inserted.
2408 BB points to the block of original function and AUX pointers links
2409 the original and newly copied blocks. */
2410
2411 static void
2412 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2413 {
2414 basic_block const new_bb = (basic_block) bb->aux;
2415 edge_iterator ei;
2416 gphi *phi;
2417 gphi_iterator si;
2418 edge new_edge;
2419 bool inserted = false;
2420
2421 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2422 {
2423 tree res, new_res;
2424 gphi *new_phi;
2425
2426 phi = si.phi ();
2427 res = PHI_RESULT (phi);
2428 new_res = res;
2429 if (!virtual_operand_p (res))
2430 {
2431 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2432 if (EDGE_COUNT (new_bb->preds) == 0)
2433 {
2434 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2435 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2436 }
2437 else
2438 {
2439 new_phi = create_phi_node (new_res, new_bb);
2440 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2441 {
2442 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2443 bb);
2444 tree arg;
2445 tree new_arg;
2446 edge_iterator ei2;
2447 location_t locus;
2448
2449 /* When doing partial cloning, we allow PHIs on the entry
2450 block as long as all the arguments are the same.
2451 Find any input edge to see argument to copy. */
2452 if (!old_edge)
2453 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2454 if (!old_edge->src->aux)
2455 break;
2456
2457 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2458 new_arg = arg;
2459 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2460 gcc_assert (new_arg);
2461 /* With return slot optimization we can end up with
2462 non-gimple (foo *)&this->m, fix that here. */
2463 if (TREE_CODE (new_arg) != SSA_NAME
2464 && TREE_CODE (new_arg) != FUNCTION_DECL
2465 && !is_gimple_val (new_arg))
2466 {
2467 gimple_seq stmts = NULL;
2468 new_arg = force_gimple_operand (new_arg, &stmts, true,
2469 NULL);
2470 gsi_insert_seq_on_edge (new_edge, stmts);
2471 inserted = true;
2472 }
2473 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2474 if (id->reset_location)
2475 locus = input_location;
2476 else
2477 locus = remap_location (locus, id);
2478 add_phi_arg (new_phi, new_arg, new_edge, locus);
2479 }
2480 }
2481 }
2482 }
2483
2484 /* Commit the delayed edge insertions. */
2485 if (inserted)
2486 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2487 gsi_commit_one_edge_insert (new_edge, NULL);
2488 }
2489
2490
2491 /* Wrapper for remap_decl so it can be used as a callback. */
2492
2493 static tree
2494 remap_decl_1 (tree decl, void *data)
2495 {
2496 return remap_decl (decl, (copy_body_data *) data);
2497 }
2498
2499 /* Build struct function and associated datastructures for the new clone
2500 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2501 the cfun to the function of new_fndecl (and current_function_decl too). */
2502
2503 static void
2504 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2505 {
2506 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2507
2508 if (!DECL_ARGUMENTS (new_fndecl))
2509 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2510 if (!DECL_RESULT (new_fndecl))
2511 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2512
2513 /* Register specific tree functions. */
2514 gimple_register_cfg_hooks ();
2515
2516 /* Get clean struct function. */
2517 push_struct_function (new_fndecl);
2518
2519 /* We will rebuild these, so just sanity check that they are empty. */
2520 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2521 gcc_assert (cfun->local_decls == NULL);
2522 gcc_assert (cfun->cfg == NULL);
2523 gcc_assert (cfun->decl == new_fndecl);
2524
2525 /* Copy items we preserve during cloning. */
2526 cfun->static_chain_decl = src_cfun->static_chain_decl;
2527 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2528 cfun->function_end_locus = src_cfun->function_end_locus;
2529 cfun->curr_properties = src_cfun->curr_properties;
2530 cfun->last_verified = src_cfun->last_verified;
2531 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2532 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2533 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2534 cfun->stdarg = src_cfun->stdarg;
2535 cfun->after_inlining = src_cfun->after_inlining;
2536 cfun->can_throw_non_call_exceptions
2537 = src_cfun->can_throw_non_call_exceptions;
2538 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2539 cfun->returns_struct = src_cfun->returns_struct;
2540 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2541
2542 init_empty_tree_cfg ();
2543
2544 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2545
2546 profile_count num = count;
2547 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2548 profile_count::adjust_for_ipa_scaling (&num, &den);
2549
2550 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2551 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2552 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2553 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2554 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2555 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2556 if (src_cfun->eh)
2557 init_eh_for_function ();
2558
2559 if (src_cfun->gimple_df)
2560 {
2561 init_tree_ssa (cfun);
2562 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2563 if (cfun->gimple_df->in_ssa_p)
2564 init_ssa_operands (cfun);
2565 }
2566 }
2567
2568 /* Helper function for copy_cfg_body. Move debug stmts from the end
2569 of NEW_BB to the beginning of successor basic blocks when needed. If the
2570 successor has multiple predecessors, reset them, otherwise keep
2571 their value. */
2572
2573 static void
2574 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2575 {
2576 edge e;
2577 edge_iterator ei;
2578 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2579
2580 if (gsi_end_p (si)
2581 || gsi_one_before_end_p (si)
2582 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2583 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2584 return;
2585
2586 FOR_EACH_EDGE (e, ei, new_bb->succs)
2587 {
2588 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2589 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2590 while (is_gimple_debug (gsi_stmt (ssi)))
2591 {
2592 gimple *stmt = gsi_stmt (ssi);
2593 gdebug *new_stmt;
2594 tree var;
2595 tree value;
2596
2597 /* For the last edge move the debug stmts instead of copying
2598 them. */
2599 if (ei_one_before_end_p (ei))
2600 {
2601 si = ssi;
2602 gsi_prev (&ssi);
2603 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2604 {
2605 gimple_debug_bind_reset_value (stmt);
2606 gimple_set_location (stmt, UNKNOWN_LOCATION);
2607 }
2608 gsi_remove (&si, false);
2609 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2610 continue;
2611 }
2612
2613 if (gimple_debug_bind_p (stmt))
2614 {
2615 var = gimple_debug_bind_get_var (stmt);
2616 if (single_pred_p (e->dest))
2617 {
2618 value = gimple_debug_bind_get_value (stmt);
2619 value = unshare_expr (value);
2620 new_stmt = gimple_build_debug_bind (var, value, stmt);
2621 }
2622 else
2623 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2624 }
2625 else if (gimple_debug_source_bind_p (stmt))
2626 {
2627 var = gimple_debug_source_bind_get_var (stmt);
2628 value = gimple_debug_source_bind_get_value (stmt);
2629 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2630 }
2631 else if (gimple_debug_nonbind_marker_p (stmt))
2632 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2633 else
2634 gcc_unreachable ();
2635 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2636 id->debug_stmts.safe_push (new_stmt);
2637 gsi_prev (&ssi);
2638 }
2639 }
2640 }
2641
2642 /* Make a copy of the sub-loops of SRC_PARENT and place them
2643 as siblings of DEST_PARENT. */
2644
2645 static void
2646 copy_loops (copy_body_data *id,
2647 struct loop *dest_parent, struct loop *src_parent)
2648 {
2649 struct loop *src_loop = src_parent->inner;
2650 while (src_loop)
2651 {
2652 if (!id->blocks_to_copy
2653 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2654 {
2655 struct loop *dest_loop = alloc_loop ();
2656
2657 /* Assign the new loop its header and latch and associate
2658 those with the new loop. */
2659 dest_loop->header = (basic_block)src_loop->header->aux;
2660 dest_loop->header->loop_father = dest_loop;
2661 if (src_loop->latch != NULL)
2662 {
2663 dest_loop->latch = (basic_block)src_loop->latch->aux;
2664 dest_loop->latch->loop_father = dest_loop;
2665 }
2666
2667 /* Copy loop meta-data. */
2668 copy_loop_info (src_loop, dest_loop);
2669 if (dest_loop->unroll)
2670 cfun->has_unroll = true;
2671 if (dest_loop->force_vectorize)
2672 cfun->has_force_vectorize_loops = true;
2673
2674 /* Finally place it into the loop array and the loop tree. */
2675 place_new_loop (cfun, dest_loop);
2676 flow_loop_tree_node_add (dest_parent, dest_loop);
2677
2678 if (src_loop->simduid)
2679 {
2680 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2681 cfun->has_simduid_loops = true;
2682 }
2683
2684 /* Recurse. */
2685 copy_loops (id, dest_loop, src_loop);
2686 }
2687 src_loop = src_loop->next;
2688 }
2689 }
2690
2691 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2692
2693 void
2694 redirect_all_calls (copy_body_data * id, basic_block bb)
2695 {
2696 gimple_stmt_iterator si;
2697 gimple *last = last_stmt (bb);
2698 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2699 {
2700 gimple *stmt = gsi_stmt (si);
2701 if (is_gimple_call (stmt))
2702 {
2703 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2704 if (edge)
2705 {
2706 edge->redirect_call_stmt_to_callee ();
2707 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2708 gimple_purge_dead_eh_edges (bb);
2709 }
2710 }
2711 }
2712 }
2713
2714 /* Make a copy of the body of FN so that it can be inserted inline in
2715 another function. Walks FN via CFG, returns new fndecl. */
2716
2717 static tree
2718 copy_cfg_body (copy_body_data * id,
2719 basic_block entry_block_map, basic_block exit_block_map,
2720 basic_block new_entry)
2721 {
2722 tree callee_fndecl = id->src_fn;
2723 /* Original cfun for the callee, doesn't change. */
2724 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2725 struct function *cfun_to_copy;
2726 basic_block bb;
2727 tree new_fndecl = NULL;
2728 bool need_debug_cleanup = false;
2729 int last;
2730 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2731 profile_count num = entry_block_map->count;
2732
2733 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2734
2735 /* Register specific tree functions. */
2736 gimple_register_cfg_hooks ();
2737
2738 /* If we are inlining just region of the function, make sure to connect
2739 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2740 part of loop, we must compute frequency and probability of
2741 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2742 probabilities of edges incoming from nonduplicated region. */
2743 if (new_entry)
2744 {
2745 edge e;
2746 edge_iterator ei;
2747 den = profile_count::zero ();
2748
2749 FOR_EACH_EDGE (e, ei, new_entry->preds)
2750 if (!e->src->aux)
2751 den += e->count ();
2752 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2753 }
2754
2755 profile_count::adjust_for_ipa_scaling (&num, &den);
2756
2757 /* Must have a CFG here at this point. */
2758 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2759 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2760
2761
2762 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2763 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2764 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2765 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2766
2767 /* Duplicate any exception-handling regions. */
2768 if (cfun->eh)
2769 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2770 remap_decl_1, id);
2771
2772 /* Use aux pointers to map the original blocks to copy. */
2773 FOR_EACH_BB_FN (bb, cfun_to_copy)
2774 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2775 {
2776 basic_block new_bb = copy_bb (id, bb, num, den);
2777 bb->aux = new_bb;
2778 new_bb->aux = bb;
2779 new_bb->loop_father = entry_block_map->loop_father;
2780 }
2781
2782 last = last_basic_block_for_fn (cfun);
2783
2784 /* Now that we've duplicated the blocks, duplicate their edges. */
2785 basic_block abnormal_goto_dest = NULL;
2786 if (id->call_stmt
2787 && stmt_can_make_abnormal_goto (id->call_stmt))
2788 {
2789 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2790
2791 bb = gimple_bb (id->call_stmt);
2792 gsi_next (&gsi);
2793 if (gsi_end_p (gsi))
2794 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2795 }
2796 FOR_ALL_BB_FN (bb, cfun_to_copy)
2797 if (!id->blocks_to_copy
2798 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2799 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2800 abnormal_goto_dest, id);
2801
2802 if (new_entry)
2803 {
2804 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2805 EDGE_FALLTHRU);
2806 e->probability = profile_probability::always ();
2807 }
2808
2809 /* Duplicate the loop tree, if available and wanted. */
2810 if (loops_for_fn (src_cfun) != NULL
2811 && current_loops != NULL)
2812 {
2813 copy_loops (id, entry_block_map->loop_father,
2814 get_loop (src_cfun, 0));
2815 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2816 loops_state_set (LOOPS_NEED_FIXUP);
2817 }
2818
2819 /* If the loop tree in the source function needed fixup, mark the
2820 destination loop tree for fixup, too. */
2821 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2822 loops_state_set (LOOPS_NEED_FIXUP);
2823
2824 if (gimple_in_ssa_p (cfun))
2825 FOR_ALL_BB_FN (bb, cfun_to_copy)
2826 if (!id->blocks_to_copy
2827 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2828 copy_phis_for_bb (bb, id);
2829
2830 FOR_ALL_BB_FN (bb, cfun_to_copy)
2831 if (bb->aux)
2832 {
2833 if (need_debug_cleanup
2834 && bb->index != ENTRY_BLOCK
2835 && bb->index != EXIT_BLOCK)
2836 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2837 /* Update call edge destinations. This cannot be done before loop
2838 info is updated, because we may split basic blocks. */
2839 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2840 && bb->index != ENTRY_BLOCK
2841 && bb->index != EXIT_BLOCK)
2842 redirect_all_calls (id, (basic_block)bb->aux);
2843 ((basic_block)bb->aux)->aux = NULL;
2844 bb->aux = NULL;
2845 }
2846
2847 /* Zero out AUX fields of newly created block during EH edge
2848 insertion. */
2849 for (; last < last_basic_block_for_fn (cfun); last++)
2850 {
2851 if (need_debug_cleanup)
2852 maybe_move_debug_stmts_to_successors (id,
2853 BASIC_BLOCK_FOR_FN (cfun, last));
2854 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2855 /* Update call edge destinations. This cannot be done before loop
2856 info is updated, because we may split basic blocks. */
2857 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2858 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2859 }
2860 entry_block_map->aux = NULL;
2861 exit_block_map->aux = NULL;
2862
2863 if (id->eh_map)
2864 {
2865 delete id->eh_map;
2866 id->eh_map = NULL;
2867 }
2868 if (id->dependence_map)
2869 {
2870 delete id->dependence_map;
2871 id->dependence_map = NULL;
2872 }
2873
2874 return new_fndecl;
2875 }
2876
2877 /* Copy the debug STMT using ID. We deal with these statements in a
2878 special way: if any variable in their VALUE expression wasn't
2879 remapped yet, we won't remap it, because that would get decl uids
2880 out of sync, causing codegen differences between -g and -g0. If
2881 this arises, we drop the VALUE expression altogether. */
2882
2883 static void
2884 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2885 {
2886 tree t, *n;
2887 struct walk_stmt_info wi;
2888
2889 if (gimple_block (stmt))
2890 {
2891 n = id->decl_map->get (gimple_block (stmt));
2892 gimple_set_block (stmt, n ? *n : id->block);
2893 }
2894
2895 if (gimple_debug_nonbind_marker_p (stmt))
2896 return;
2897
2898 /* Remap all the operands in COPY. */
2899 memset (&wi, 0, sizeof (wi));
2900 wi.info = id;
2901
2902 processing_debug_stmt = 1;
2903
2904 if (gimple_debug_source_bind_p (stmt))
2905 t = gimple_debug_source_bind_get_var (stmt);
2906 else if (gimple_debug_bind_p (stmt))
2907 t = gimple_debug_bind_get_var (stmt);
2908 else
2909 gcc_unreachable ();
2910
2911 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2912 && (n = id->debug_map->get (t)))
2913 {
2914 gcc_assert (VAR_P (*n));
2915 t = *n;
2916 }
2917 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2918 /* T is a non-localized variable. */;
2919 else
2920 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2921
2922 if (gimple_debug_bind_p (stmt))
2923 {
2924 gimple_debug_bind_set_var (stmt, t);
2925
2926 if (gimple_debug_bind_has_value_p (stmt))
2927 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2928 remap_gimple_op_r, &wi, NULL);
2929
2930 /* Punt if any decl couldn't be remapped. */
2931 if (processing_debug_stmt < 0)
2932 gimple_debug_bind_reset_value (stmt);
2933 }
2934 else if (gimple_debug_source_bind_p (stmt))
2935 {
2936 gimple_debug_source_bind_set_var (stmt, t);
2937 /* When inlining and source bind refers to one of the optimized
2938 away parameters, change the source bind into normal debug bind
2939 referring to the corresponding DEBUG_EXPR_DECL that should have
2940 been bound before the call stmt. */
2941 t = gimple_debug_source_bind_get_value (stmt);
2942 if (t != NULL_TREE
2943 && TREE_CODE (t) == PARM_DECL
2944 && id->call_stmt)
2945 {
2946 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2947 unsigned int i;
2948 if (debug_args != NULL)
2949 {
2950 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2951 if ((**debug_args)[i] == DECL_ORIGIN (t)
2952 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2953 {
2954 t = (**debug_args)[i + 1];
2955 stmt->subcode = GIMPLE_DEBUG_BIND;
2956 gimple_debug_bind_set_value (stmt, t);
2957 break;
2958 }
2959 }
2960 }
2961 if (gimple_debug_source_bind_p (stmt))
2962 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2963 remap_gimple_op_r, &wi, NULL);
2964 }
2965
2966 processing_debug_stmt = 0;
2967
2968 update_stmt (stmt);
2969 }
2970
2971 /* Process deferred debug stmts. In order to give values better odds
2972 of being successfully remapped, we delay the processing of debug
2973 stmts until all other stmts that might require remapping are
2974 processed. */
2975
2976 static void
2977 copy_debug_stmts (copy_body_data *id)
2978 {
2979 size_t i;
2980 gdebug *stmt;
2981
2982 if (!id->debug_stmts.exists ())
2983 return;
2984
2985 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2986 copy_debug_stmt (stmt, id);
2987
2988 id->debug_stmts.release ();
2989 }
2990
2991 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2992 another function. */
2993
2994 static tree
2995 copy_tree_body (copy_body_data *id)
2996 {
2997 tree fndecl = id->src_fn;
2998 tree body = DECL_SAVED_TREE (fndecl);
2999
3000 walk_tree (&body, copy_tree_body_r, id, NULL);
3001
3002 return body;
3003 }
3004
3005 /* Make a copy of the body of FN so that it can be inserted inline in
3006 another function. */
3007
3008 static tree
3009 copy_body (copy_body_data *id,
3010 basic_block entry_block_map, basic_block exit_block_map,
3011 basic_block new_entry)
3012 {
3013 tree fndecl = id->src_fn;
3014 tree body;
3015
3016 /* If this body has a CFG, walk CFG and copy. */
3017 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3018 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3019 new_entry);
3020 copy_debug_stmts (id);
3021
3022 return body;
3023 }
3024
3025 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3026 defined in function FN, or of a data member thereof. */
3027
3028 static bool
3029 self_inlining_addr_expr (tree value, tree fn)
3030 {
3031 tree var;
3032
3033 if (TREE_CODE (value) != ADDR_EXPR)
3034 return false;
3035
3036 var = get_base_address (TREE_OPERAND (value, 0));
3037
3038 return var && auto_var_in_fn_p (var, fn);
3039 }
3040
3041 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3042 lexical block and line number information from base_stmt, if given,
3043 or from the last stmt of the block otherwise. */
3044
3045 static gimple *
3046 insert_init_debug_bind (copy_body_data *id,
3047 basic_block bb, tree var, tree value,
3048 gimple *base_stmt)
3049 {
3050 gimple *note;
3051 gimple_stmt_iterator gsi;
3052 tree tracked_var;
3053
3054 if (!gimple_in_ssa_p (id->src_cfun))
3055 return NULL;
3056
3057 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3058 return NULL;
3059
3060 tracked_var = target_for_debug_bind (var);
3061 if (!tracked_var)
3062 return NULL;
3063
3064 if (bb)
3065 {
3066 gsi = gsi_last_bb (bb);
3067 if (!base_stmt && !gsi_end_p (gsi))
3068 base_stmt = gsi_stmt (gsi);
3069 }
3070
3071 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3072
3073 if (bb)
3074 {
3075 if (!gsi_end_p (gsi))
3076 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3077 else
3078 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3079 }
3080
3081 return note;
3082 }
3083
3084 static void
3085 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3086 {
3087 /* If VAR represents a zero-sized variable, it's possible that the
3088 assignment statement may result in no gimple statements. */
3089 if (init_stmt)
3090 {
3091 gimple_stmt_iterator si = gsi_last_bb (bb);
3092
3093 /* We can end up with init statements that store to a non-register
3094 from a rhs with a conversion. Handle that here by forcing the
3095 rhs into a temporary. gimple_regimplify_operands is not
3096 prepared to do this for us. */
3097 if (!is_gimple_debug (init_stmt)
3098 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3099 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3100 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3101 {
3102 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3103 gimple_expr_type (init_stmt),
3104 gimple_assign_rhs1 (init_stmt));
3105 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3106 GSI_NEW_STMT);
3107 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3108 gimple_assign_set_rhs1 (init_stmt, rhs);
3109 }
3110 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3111 gimple_regimplify_operands (init_stmt, &si);
3112
3113 if (!is_gimple_debug (init_stmt))
3114 {
3115 tree def = gimple_assign_lhs (init_stmt);
3116 insert_init_debug_bind (id, bb, def, def, init_stmt);
3117 }
3118 }
3119 }
3120
3121 /* Initialize parameter P with VALUE. If needed, produce init statement
3122 at the end of BB. When BB is NULL, we return init statement to be
3123 output later. */
3124 static gimple *
3125 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3126 basic_block bb, tree *vars)
3127 {
3128 gimple *init_stmt = NULL;
3129 tree var;
3130 tree rhs = value;
3131 tree def = (gimple_in_ssa_p (cfun)
3132 ? ssa_default_def (id->src_cfun, p) : NULL);
3133
3134 if (value
3135 && value != error_mark_node
3136 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3137 {
3138 /* If we can match up types by promotion/demotion do so. */
3139 if (fold_convertible_p (TREE_TYPE (p), value))
3140 rhs = fold_convert (TREE_TYPE (p), value);
3141 else
3142 {
3143 /* ??? For valid programs we should not end up here.
3144 Still if we end up with truly mismatched types here, fall back
3145 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3146 GIMPLE to the following passes. */
3147 if (!is_gimple_reg_type (TREE_TYPE (value))
3148 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3149 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3150 else
3151 rhs = build_zero_cst (TREE_TYPE (p));
3152 }
3153 }
3154
3155 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3156 here since the type of this decl must be visible to the calling
3157 function. */
3158 var = copy_decl_to_var (p, id);
3159
3160 /* Declare this new variable. */
3161 DECL_CHAIN (var) = *vars;
3162 *vars = var;
3163
3164 /* Make gimplifier happy about this variable. */
3165 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3166
3167 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3168 we would not need to create a new variable here at all, if it
3169 weren't for debug info. Still, we can just use the argument
3170 value. */
3171 if (TREE_READONLY (p)
3172 && !TREE_ADDRESSABLE (p)
3173 && value && !TREE_SIDE_EFFECTS (value)
3174 && !def)
3175 {
3176 /* We may produce non-gimple trees by adding NOPs or introduce
3177 invalid sharing when operand is not really constant.
3178 It is not big deal to prohibit constant propagation here as
3179 we will constant propagate in DOM1 pass anyway. */
3180 if (is_gimple_min_invariant (value)
3181 && useless_type_conversion_p (TREE_TYPE (p),
3182 TREE_TYPE (value))
3183 /* We have to be very careful about ADDR_EXPR. Make sure
3184 the base variable isn't a local variable of the inlined
3185 function, e.g., when doing recursive inlining, direct or
3186 mutually-recursive or whatever, which is why we don't
3187 just test whether fn == current_function_decl. */
3188 && ! self_inlining_addr_expr (value, fn))
3189 {
3190 insert_decl_map (id, p, value);
3191 insert_debug_decl_map (id, p, var);
3192 return insert_init_debug_bind (id, bb, var, value, NULL);
3193 }
3194 }
3195
3196 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3197 that way, when the PARM_DECL is encountered, it will be
3198 automatically replaced by the VAR_DECL. */
3199 insert_decl_map (id, p, var);
3200
3201 /* Even if P was TREE_READONLY, the new VAR should not be.
3202 In the original code, we would have constructed a
3203 temporary, and then the function body would have never
3204 changed the value of P. However, now, we will be
3205 constructing VAR directly. The constructor body may
3206 change its value multiple times as it is being
3207 constructed. Therefore, it must not be TREE_READONLY;
3208 the back-end assumes that TREE_READONLY variable is
3209 assigned to only once. */
3210 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3211 TREE_READONLY (var) = 0;
3212
3213 /* If there is no setup required and we are in SSA, take the easy route
3214 replacing all SSA names representing the function parameter by the
3215 SSA name passed to function.
3216
3217 We need to construct map for the variable anyway as it might be used
3218 in different SSA names when parameter is set in function.
3219
3220 Do replacement at -O0 for const arguments replaced by constant.
3221 This is important for builtin_constant_p and other construct requiring
3222 constant argument to be visible in inlined function body. */
3223 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3224 && (optimize
3225 || (TREE_READONLY (p)
3226 && is_gimple_min_invariant (rhs)))
3227 && (TREE_CODE (rhs) == SSA_NAME
3228 || is_gimple_min_invariant (rhs))
3229 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3230 {
3231 insert_decl_map (id, def, rhs);
3232 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3233 }
3234
3235 /* If the value of argument is never used, don't care about initializing
3236 it. */
3237 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3238 {
3239 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3240 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3241 }
3242
3243 /* Initialize this VAR_DECL from the equivalent argument. Convert
3244 the argument to the proper type in case it was promoted. */
3245 if (value)
3246 {
3247 if (rhs == error_mark_node)
3248 {
3249 insert_decl_map (id, p, var);
3250 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3251 }
3252
3253 STRIP_USELESS_TYPE_CONVERSION (rhs);
3254
3255 /* If we are in SSA form properly remap the default definition
3256 or assign to a dummy SSA name if the parameter is unused and
3257 we are not optimizing. */
3258 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3259 {
3260 if (def)
3261 {
3262 def = remap_ssa_name (def, id);
3263 init_stmt = gimple_build_assign (def, rhs);
3264 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3265 set_ssa_default_def (cfun, var, NULL);
3266 }
3267 else if (!optimize)
3268 {
3269 def = make_ssa_name (var);
3270 init_stmt = gimple_build_assign (def, rhs);
3271 }
3272 }
3273 else
3274 init_stmt = gimple_build_assign (var, rhs);
3275
3276 if (bb && init_stmt)
3277 insert_init_stmt (id, bb, init_stmt);
3278 }
3279 return init_stmt;
3280 }
3281
3282 /* Generate code to initialize the parameters of the function at the
3283 top of the stack in ID from the GIMPLE_CALL STMT. */
3284
3285 static void
3286 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3287 tree fn, basic_block bb)
3288 {
3289 tree parms;
3290 size_t i;
3291 tree p;
3292 tree vars = NULL_TREE;
3293 tree static_chain = gimple_call_chain (stmt);
3294
3295 /* Figure out what the parameters are. */
3296 parms = DECL_ARGUMENTS (fn);
3297
3298 /* Loop through the parameter declarations, replacing each with an
3299 equivalent VAR_DECL, appropriately initialized. */
3300 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3301 {
3302 tree val;
3303 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3304 setup_one_parameter (id, p, val, fn, bb, &vars);
3305 }
3306 /* After remapping parameters remap their types. This has to be done
3307 in a second loop over all parameters to appropriately remap
3308 variable sized arrays when the size is specified in a
3309 parameter following the array. */
3310 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3311 {
3312 tree *varp = id->decl_map->get (p);
3313 if (varp && VAR_P (*varp))
3314 {
3315 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3316 ? ssa_default_def (id->src_cfun, p) : NULL);
3317 tree var = *varp;
3318 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3319 /* Also remap the default definition if it was remapped
3320 to the default definition of the parameter replacement
3321 by the parameter setup. */
3322 if (def)
3323 {
3324 tree *defp = id->decl_map->get (def);
3325 if (defp
3326 && TREE_CODE (*defp) == SSA_NAME
3327 && SSA_NAME_VAR (*defp) == var)
3328 TREE_TYPE (*defp) = TREE_TYPE (var);
3329 }
3330 }
3331 }
3332
3333 /* Initialize the static chain. */
3334 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3335 gcc_assert (fn != current_function_decl);
3336 if (p)
3337 {
3338 /* No static chain? Seems like a bug in tree-nested.c. */
3339 gcc_assert (static_chain);
3340
3341 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3342 }
3343
3344 declare_inline_vars (id->block, vars);
3345 }
3346
3347
3348 /* Declare a return variable to replace the RESULT_DECL for the
3349 function we are calling. An appropriate DECL_STMT is returned.
3350 The USE_STMT is filled to contain a use of the declaration to
3351 indicate the return value of the function.
3352
3353 RETURN_SLOT, if non-null is place where to store the result. It
3354 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3355 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3356
3357 The return value is a (possibly null) value that holds the result
3358 as seen by the caller. */
3359
3360 static tree
3361 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3362 basic_block entry_bb)
3363 {
3364 tree callee = id->src_fn;
3365 tree result = DECL_RESULT (callee);
3366 tree callee_type = TREE_TYPE (result);
3367 tree caller_type;
3368 tree var, use;
3369
3370 /* Handle type-mismatches in the function declaration return type
3371 vs. the call expression. */
3372 if (modify_dest)
3373 caller_type = TREE_TYPE (modify_dest);
3374 else
3375 caller_type = TREE_TYPE (TREE_TYPE (callee));
3376
3377 /* We don't need to do anything for functions that don't return anything. */
3378 if (VOID_TYPE_P (callee_type))
3379 return NULL_TREE;
3380
3381 /* If there was a return slot, then the return value is the
3382 dereferenced address of that object. */
3383 if (return_slot)
3384 {
3385 /* The front end shouldn't have used both return_slot and
3386 a modify expression. */
3387 gcc_assert (!modify_dest);
3388 if (DECL_BY_REFERENCE (result))
3389 {
3390 tree return_slot_addr = build_fold_addr_expr (return_slot);
3391 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3392
3393 /* We are going to construct *&return_slot and we can't do that
3394 for variables believed to be not addressable.
3395
3396 FIXME: This check possibly can match, because values returned
3397 via return slot optimization are not believed to have address
3398 taken by alias analysis. */
3399 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3400 var = return_slot_addr;
3401 }
3402 else
3403 {
3404 var = return_slot;
3405 gcc_assert (TREE_CODE (var) != SSA_NAME);
3406 if (TREE_ADDRESSABLE (result))
3407 mark_addressable (var);
3408 }
3409 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3410 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3411 && !DECL_GIMPLE_REG_P (result)
3412 && DECL_P (var))
3413 DECL_GIMPLE_REG_P (var) = 0;
3414 use = NULL;
3415 goto done;
3416 }
3417
3418 /* All types requiring non-trivial constructors should have been handled. */
3419 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3420
3421 /* Attempt to avoid creating a new temporary variable. */
3422 if (modify_dest
3423 && TREE_CODE (modify_dest) != SSA_NAME)
3424 {
3425 bool use_it = false;
3426
3427 /* We can't use MODIFY_DEST if there's type promotion involved. */
3428 if (!useless_type_conversion_p (callee_type, caller_type))
3429 use_it = false;
3430
3431 /* ??? If we're assigning to a variable sized type, then we must
3432 reuse the destination variable, because we've no good way to
3433 create variable sized temporaries at this point. */
3434 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3435 use_it = true;
3436
3437 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3438 reuse it as the result of the call directly. Don't do this if
3439 it would promote MODIFY_DEST to addressable. */
3440 else if (TREE_ADDRESSABLE (result))
3441 use_it = false;
3442 else
3443 {
3444 tree base_m = get_base_address (modify_dest);
3445
3446 /* If the base isn't a decl, then it's a pointer, and we don't
3447 know where that's going to go. */
3448 if (!DECL_P (base_m))
3449 use_it = false;
3450 else if (is_global_var (base_m))
3451 use_it = false;
3452 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3453 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3454 && !DECL_GIMPLE_REG_P (result)
3455 && DECL_GIMPLE_REG_P (base_m))
3456 use_it = false;
3457 else if (!TREE_ADDRESSABLE (base_m))
3458 use_it = true;
3459 }
3460
3461 if (use_it)
3462 {
3463 var = modify_dest;
3464 use = NULL;
3465 goto done;
3466 }
3467 }
3468
3469 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3470
3471 var = copy_result_decl_to_var (result, id);
3472 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3473
3474 /* Do not have the rest of GCC warn about this variable as it should
3475 not be visible to the user. */
3476 TREE_NO_WARNING (var) = 1;
3477
3478 declare_inline_vars (id->block, var);
3479
3480 /* Build the use expr. If the return type of the function was
3481 promoted, convert it back to the expected type. */
3482 use = var;
3483 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3484 {
3485 /* If we can match up types by promotion/demotion do so. */
3486 if (fold_convertible_p (caller_type, var))
3487 use = fold_convert (caller_type, var);
3488 else
3489 {
3490 /* ??? For valid programs we should not end up here.
3491 Still if we end up with truly mismatched types here, fall back
3492 to using a MEM_REF to not leak invalid GIMPLE to the following
3493 passes. */
3494 /* Prevent var from being written into SSA form. */
3495 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3496 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3497 DECL_GIMPLE_REG_P (var) = false;
3498 else if (is_gimple_reg_type (TREE_TYPE (var)))
3499 TREE_ADDRESSABLE (var) = true;
3500 use = fold_build2 (MEM_REF, caller_type,
3501 build_fold_addr_expr (var),
3502 build_int_cst (ptr_type_node, 0));
3503 }
3504 }
3505
3506 STRIP_USELESS_TYPE_CONVERSION (use);
3507
3508 if (DECL_BY_REFERENCE (result))
3509 {
3510 TREE_ADDRESSABLE (var) = 1;
3511 var = build_fold_addr_expr (var);
3512 }
3513
3514 done:
3515 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3516 way, when the RESULT_DECL is encountered, it will be
3517 automatically replaced by the VAR_DECL.
3518
3519 When returning by reference, ensure that RESULT_DECL remaps to
3520 gimple_val. */
3521 if (DECL_BY_REFERENCE (result)
3522 && !is_gimple_val (var))
3523 {
3524 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3525 insert_decl_map (id, result, temp);
3526 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3527 it's default_def SSA_NAME. */
3528 if (gimple_in_ssa_p (id->src_cfun)
3529 && is_gimple_reg (result))
3530 {
3531 temp = make_ssa_name (temp);
3532 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3533 }
3534 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3535 }
3536 else
3537 insert_decl_map (id, result, var);
3538
3539 /* Remember this so we can ignore it in remap_decls. */
3540 id->retvar = var;
3541 return use;
3542 }
3543
3544 /* Determine if the function can be copied. If so return NULL. If
3545 not return a string describng the reason for failure. */
3546
3547 const char *
3548 copy_forbidden (struct function *fun)
3549 {
3550 const char *reason = fun->cannot_be_copied_reason;
3551
3552 /* Only examine the function once. */
3553 if (fun->cannot_be_copied_set)
3554 return reason;
3555
3556 /* We cannot copy a function that receives a non-local goto
3557 because we cannot remap the destination label used in the
3558 function that is performing the non-local goto. */
3559 /* ??? Actually, this should be possible, if we work at it.
3560 No doubt there's just a handful of places that simply
3561 assume it doesn't happen and don't substitute properly. */
3562 if (fun->has_nonlocal_label)
3563 {
3564 reason = G_("function %q+F can never be copied "
3565 "because it receives a non-local goto");
3566 goto fail;
3567 }
3568
3569 if (fun->has_forced_label_in_static)
3570 {
3571 reason = G_("function %q+F can never be copied because it saves "
3572 "address of local label in a static variable");
3573 goto fail;
3574 }
3575
3576 fail:
3577 fun->cannot_be_copied_reason = reason;
3578 fun->cannot_be_copied_set = true;
3579 return reason;
3580 }
3581
3582
3583 static const char *inline_forbidden_reason;
3584
3585 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3586 iff a function cannot be inlined. Also sets the reason why. */
3587
3588 static tree
3589 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3590 struct walk_stmt_info *wip)
3591 {
3592 tree fn = (tree) wip->info;
3593 tree t;
3594 gimple *stmt = gsi_stmt (*gsi);
3595
3596 switch (gimple_code (stmt))
3597 {
3598 case GIMPLE_CALL:
3599 /* Refuse to inline alloca call unless user explicitly forced so as
3600 this may change program's memory overhead drastically when the
3601 function using alloca is called in loop. In GCC present in
3602 SPEC2000 inlining into schedule_block cause it to require 2GB of
3603 RAM instead of 256MB. Don't do so for alloca calls emitted for
3604 VLA objects as those can't cause unbounded growth (they're always
3605 wrapped inside stack_save/stack_restore regions. */
3606 if (gimple_maybe_alloca_call_p (stmt)
3607 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3608 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3609 {
3610 inline_forbidden_reason
3611 = G_("function %q+F can never be inlined because it uses "
3612 "alloca (override using the always_inline attribute)");
3613 *handled_ops_p = true;
3614 return fn;
3615 }
3616
3617 t = gimple_call_fndecl (stmt);
3618 if (t == NULL_TREE)
3619 break;
3620
3621 /* We cannot inline functions that call setjmp. */
3622 if (setjmp_call_p (t))
3623 {
3624 inline_forbidden_reason
3625 = G_("function %q+F can never be inlined because it uses setjmp");
3626 *handled_ops_p = true;
3627 return t;
3628 }
3629
3630 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3631 switch (DECL_FUNCTION_CODE (t))
3632 {
3633 /* We cannot inline functions that take a variable number of
3634 arguments. */
3635 case BUILT_IN_VA_START:
3636 case BUILT_IN_NEXT_ARG:
3637 case BUILT_IN_VA_END:
3638 inline_forbidden_reason
3639 = G_("function %q+F can never be inlined because it "
3640 "uses variable argument lists");
3641 *handled_ops_p = true;
3642 return t;
3643
3644 case BUILT_IN_LONGJMP:
3645 /* We can't inline functions that call __builtin_longjmp at
3646 all. The non-local goto machinery really requires the
3647 destination be in a different function. If we allow the
3648 function calling __builtin_longjmp to be inlined into the
3649 function calling __builtin_setjmp, Things will Go Awry. */
3650 inline_forbidden_reason
3651 = G_("function %q+F can never be inlined because "
3652 "it uses setjmp-longjmp exception handling");
3653 *handled_ops_p = true;
3654 return t;
3655
3656 case BUILT_IN_NONLOCAL_GOTO:
3657 /* Similarly. */
3658 inline_forbidden_reason
3659 = G_("function %q+F can never be inlined because "
3660 "it uses non-local goto");
3661 *handled_ops_p = true;
3662 return t;
3663
3664 case BUILT_IN_RETURN:
3665 case BUILT_IN_APPLY_ARGS:
3666 /* If a __builtin_apply_args caller would be inlined,
3667 it would be saving arguments of the function it has
3668 been inlined into. Similarly __builtin_return would
3669 return from the function the inline has been inlined into. */
3670 inline_forbidden_reason
3671 = G_("function %q+F can never be inlined because "
3672 "it uses __builtin_return or __builtin_apply_args");
3673 *handled_ops_p = true;
3674 return t;
3675
3676 default:
3677 break;
3678 }
3679 break;
3680
3681 case GIMPLE_GOTO:
3682 t = gimple_goto_dest (stmt);
3683
3684 /* We will not inline a function which uses computed goto. The
3685 addresses of its local labels, which may be tucked into
3686 global storage, are of course not constant across
3687 instantiations, which causes unexpected behavior. */
3688 if (TREE_CODE (t) != LABEL_DECL)
3689 {
3690 inline_forbidden_reason
3691 = G_("function %q+F can never be inlined "
3692 "because it contains a computed goto");
3693 *handled_ops_p = true;
3694 return t;
3695 }
3696 break;
3697
3698 default:
3699 break;
3700 }
3701
3702 *handled_ops_p = false;
3703 return NULL_TREE;
3704 }
3705
3706 /* Return true if FNDECL is a function that cannot be inlined into
3707 another one. */
3708
3709 static bool
3710 inline_forbidden_p (tree fndecl)
3711 {
3712 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3713 struct walk_stmt_info wi;
3714 basic_block bb;
3715 bool forbidden_p = false;
3716
3717 /* First check for shared reasons not to copy the code. */
3718 inline_forbidden_reason = copy_forbidden (fun);
3719 if (inline_forbidden_reason != NULL)
3720 return true;
3721
3722 /* Next, walk the statements of the function looking for
3723 constraucts we can't handle, or are non-optimal for inlining. */
3724 hash_set<tree> visited_nodes;
3725 memset (&wi, 0, sizeof (wi));
3726 wi.info = (void *) fndecl;
3727 wi.pset = &visited_nodes;
3728
3729 FOR_EACH_BB_FN (bb, fun)
3730 {
3731 gimple *ret;
3732 gimple_seq seq = bb_seq (bb);
3733 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3734 forbidden_p = (ret != NULL);
3735 if (forbidden_p)
3736 break;
3737 }
3738
3739 return forbidden_p;
3740 }
3741 \f
3742 /* Return false if the function FNDECL cannot be inlined on account of its
3743 attributes, true otherwise. */
3744 static bool
3745 function_attribute_inlinable_p (const_tree fndecl)
3746 {
3747 if (targetm.attribute_table)
3748 {
3749 const_tree a;
3750
3751 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3752 {
3753 const_tree name = TREE_PURPOSE (a);
3754 int i;
3755
3756 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3757 if (is_attribute_p (targetm.attribute_table[i].name, name))
3758 return targetm.function_attribute_inlinable_p (fndecl);
3759 }
3760 }
3761
3762 return true;
3763 }
3764
3765 /* Returns nonzero if FN is a function that does not have any
3766 fundamental inline blocking properties. */
3767
3768 bool
3769 tree_inlinable_function_p (tree fn)
3770 {
3771 bool inlinable = true;
3772 bool do_warning;
3773 tree always_inline;
3774
3775 /* If we've already decided this function shouldn't be inlined,
3776 there's no need to check again. */
3777 if (DECL_UNINLINABLE (fn))
3778 return false;
3779
3780 /* We only warn for functions declared `inline' by the user. */
3781 do_warning = (warn_inline
3782 && DECL_DECLARED_INLINE_P (fn)
3783 && !DECL_NO_INLINE_WARNING_P (fn)
3784 && !DECL_IN_SYSTEM_HEADER (fn));
3785
3786 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3787
3788 if (flag_no_inline
3789 && always_inline == NULL)
3790 {
3791 if (do_warning)
3792 warning (OPT_Winline, "function %q+F can never be inlined because it "
3793 "is suppressed using -fno-inline", fn);
3794 inlinable = false;
3795 }
3796
3797 else if (!function_attribute_inlinable_p (fn))
3798 {
3799 if (do_warning)
3800 warning (OPT_Winline, "function %q+F can never be inlined because it "
3801 "uses attributes conflicting with inlining", fn);
3802 inlinable = false;
3803 }
3804
3805 else if (inline_forbidden_p (fn))
3806 {
3807 /* See if we should warn about uninlinable functions. Previously,
3808 some of these warnings would be issued while trying to expand
3809 the function inline, but that would cause multiple warnings
3810 about functions that would for example call alloca. But since
3811 this a property of the function, just one warning is enough.
3812 As a bonus we can now give more details about the reason why a
3813 function is not inlinable. */
3814 if (always_inline)
3815 error (inline_forbidden_reason, fn);
3816 else if (do_warning)
3817 warning (OPT_Winline, inline_forbidden_reason, fn);
3818
3819 inlinable = false;
3820 }
3821
3822 /* Squirrel away the result so that we don't have to check again. */
3823 DECL_UNINLINABLE (fn) = !inlinable;
3824
3825 return inlinable;
3826 }
3827
3828 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3829 word size and take possible memcpy call into account and return
3830 cost based on whether optimizing for size or speed according to SPEED_P. */
3831
3832 int
3833 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3834 {
3835 HOST_WIDE_INT size;
3836
3837 gcc_assert (!VOID_TYPE_P (type));
3838
3839 if (TREE_CODE (type) == VECTOR_TYPE)
3840 {
3841 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3842 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3843 int orig_mode_size
3844 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3845 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3846 return ((orig_mode_size + simd_mode_size - 1)
3847 / simd_mode_size);
3848 }
3849
3850 size = int_size_in_bytes (type);
3851
3852 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3853 /* Cost of a memcpy call, 3 arguments and the call. */
3854 return 4;
3855 else
3856 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3857 }
3858
3859 /* Returns cost of operation CODE, according to WEIGHTS */
3860
3861 static int
3862 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3863 tree op1 ATTRIBUTE_UNUSED, tree op2)
3864 {
3865 switch (code)
3866 {
3867 /* These are "free" conversions, or their presumed cost
3868 is folded into other operations. */
3869 case RANGE_EXPR:
3870 CASE_CONVERT:
3871 case COMPLEX_EXPR:
3872 case PAREN_EXPR:
3873 case VIEW_CONVERT_EXPR:
3874 return 0;
3875
3876 /* Assign cost of 1 to usual operations.
3877 ??? We may consider mapping RTL costs to this. */
3878 case COND_EXPR:
3879 case VEC_COND_EXPR:
3880 case VEC_PERM_EXPR:
3881
3882 case PLUS_EXPR:
3883 case POINTER_PLUS_EXPR:
3884 case POINTER_DIFF_EXPR:
3885 case MINUS_EXPR:
3886 case MULT_EXPR:
3887 case MULT_HIGHPART_EXPR:
3888
3889 case ADDR_SPACE_CONVERT_EXPR:
3890 case FIXED_CONVERT_EXPR:
3891 case FIX_TRUNC_EXPR:
3892
3893 case NEGATE_EXPR:
3894 case FLOAT_EXPR:
3895 case MIN_EXPR:
3896 case MAX_EXPR:
3897 case ABS_EXPR:
3898 case ABSU_EXPR:
3899
3900 case LSHIFT_EXPR:
3901 case RSHIFT_EXPR:
3902 case LROTATE_EXPR:
3903 case RROTATE_EXPR:
3904
3905 case BIT_IOR_EXPR:
3906 case BIT_XOR_EXPR:
3907 case BIT_AND_EXPR:
3908 case BIT_NOT_EXPR:
3909
3910 case TRUTH_ANDIF_EXPR:
3911 case TRUTH_ORIF_EXPR:
3912 case TRUTH_AND_EXPR:
3913 case TRUTH_OR_EXPR:
3914 case TRUTH_XOR_EXPR:
3915 case TRUTH_NOT_EXPR:
3916
3917 case LT_EXPR:
3918 case LE_EXPR:
3919 case GT_EXPR:
3920 case GE_EXPR:
3921 case EQ_EXPR:
3922 case NE_EXPR:
3923 case ORDERED_EXPR:
3924 case UNORDERED_EXPR:
3925
3926 case UNLT_EXPR:
3927 case UNLE_EXPR:
3928 case UNGT_EXPR:
3929 case UNGE_EXPR:
3930 case UNEQ_EXPR:
3931 case LTGT_EXPR:
3932
3933 case CONJ_EXPR:
3934
3935 case PREDECREMENT_EXPR:
3936 case PREINCREMENT_EXPR:
3937 case POSTDECREMENT_EXPR:
3938 case POSTINCREMENT_EXPR:
3939
3940 case REALIGN_LOAD_EXPR:
3941
3942 case WIDEN_SUM_EXPR:
3943 case WIDEN_MULT_EXPR:
3944 case DOT_PROD_EXPR:
3945 case SAD_EXPR:
3946 case WIDEN_MULT_PLUS_EXPR:
3947 case WIDEN_MULT_MINUS_EXPR:
3948 case WIDEN_LSHIFT_EXPR:
3949
3950 case VEC_WIDEN_MULT_HI_EXPR:
3951 case VEC_WIDEN_MULT_LO_EXPR:
3952 case VEC_WIDEN_MULT_EVEN_EXPR:
3953 case VEC_WIDEN_MULT_ODD_EXPR:
3954 case VEC_UNPACK_HI_EXPR:
3955 case VEC_UNPACK_LO_EXPR:
3956 case VEC_UNPACK_FLOAT_HI_EXPR:
3957 case VEC_UNPACK_FLOAT_LO_EXPR:
3958 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3959 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3960 case VEC_PACK_TRUNC_EXPR:
3961 case VEC_PACK_SAT_EXPR:
3962 case VEC_PACK_FIX_TRUNC_EXPR:
3963 case VEC_PACK_FLOAT_EXPR:
3964 case VEC_WIDEN_LSHIFT_HI_EXPR:
3965 case VEC_WIDEN_LSHIFT_LO_EXPR:
3966 case VEC_DUPLICATE_EXPR:
3967 case VEC_SERIES_EXPR:
3968
3969 return 1;
3970
3971 /* Few special cases of expensive operations. This is useful
3972 to avoid inlining on functions having too many of these. */
3973 case TRUNC_DIV_EXPR:
3974 case CEIL_DIV_EXPR:
3975 case FLOOR_DIV_EXPR:
3976 case ROUND_DIV_EXPR:
3977 case EXACT_DIV_EXPR:
3978 case TRUNC_MOD_EXPR:
3979 case CEIL_MOD_EXPR:
3980 case FLOOR_MOD_EXPR:
3981 case ROUND_MOD_EXPR:
3982 case RDIV_EXPR:
3983 if (TREE_CODE (op2) != INTEGER_CST)
3984 return weights->div_mod_cost;
3985 return 1;
3986
3987 /* Bit-field insertion needs several shift and mask operations. */
3988 case BIT_INSERT_EXPR:
3989 return 3;
3990
3991 default:
3992 /* We expect a copy assignment with no operator. */
3993 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3994 return 0;
3995 }
3996 }
3997
3998
3999 /* Estimate number of instructions that will be created by expanding
4000 the statements in the statement sequence STMTS.
4001 WEIGHTS contains weights attributed to various constructs. */
4002
4003 int
4004 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4005 {
4006 int cost;
4007 gimple_stmt_iterator gsi;
4008
4009 cost = 0;
4010 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4011 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4012
4013 return cost;
4014 }
4015
4016
4017 /* Estimate number of instructions that will be created by expanding STMT.
4018 WEIGHTS contains weights attributed to various constructs. */
4019
4020 int
4021 estimate_num_insns (gimple *stmt, eni_weights *weights)
4022 {
4023 unsigned cost, i;
4024 enum gimple_code code = gimple_code (stmt);
4025 tree lhs;
4026 tree rhs;
4027
4028 switch (code)
4029 {
4030 case GIMPLE_ASSIGN:
4031 /* Try to estimate the cost of assignments. We have three cases to
4032 deal with:
4033 1) Simple assignments to registers;
4034 2) Stores to things that must live in memory. This includes
4035 "normal" stores to scalars, but also assignments of large
4036 structures, or constructors of big arrays;
4037
4038 Let us look at the first two cases, assuming we have "a = b + C":
4039 <GIMPLE_ASSIGN <var_decl "a">
4040 <plus_expr <var_decl "b"> <constant C>>
4041 If "a" is a GIMPLE register, the assignment to it is free on almost
4042 any target, because "a" usually ends up in a real register. Hence
4043 the only cost of this expression comes from the PLUS_EXPR, and we
4044 can ignore the GIMPLE_ASSIGN.
4045 If "a" is not a GIMPLE register, the assignment to "a" will most
4046 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4047 of moving something into "a", which we compute using the function
4048 estimate_move_cost. */
4049 if (gimple_clobber_p (stmt))
4050 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4051
4052 lhs = gimple_assign_lhs (stmt);
4053 rhs = gimple_assign_rhs1 (stmt);
4054
4055 cost = 0;
4056
4057 /* Account for the cost of moving to / from memory. */
4058 if (gimple_store_p (stmt))
4059 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4060 if (gimple_assign_load_p (stmt))
4061 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4062
4063 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4064 gimple_assign_rhs1 (stmt),
4065 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4066 == GIMPLE_BINARY_RHS
4067 ? gimple_assign_rhs2 (stmt) : NULL);
4068 break;
4069
4070 case GIMPLE_COND:
4071 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4072 gimple_op (stmt, 0),
4073 gimple_op (stmt, 1));
4074 break;
4075
4076 case GIMPLE_SWITCH:
4077 {
4078 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4079 /* Take into account cost of the switch + guess 2 conditional jumps for
4080 each case label.
4081
4082 TODO: once the switch expansion logic is sufficiently separated, we can
4083 do better job on estimating cost of the switch. */
4084 if (weights->time_based)
4085 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4086 else
4087 cost = gimple_switch_num_labels (switch_stmt) * 2;
4088 }
4089 break;
4090
4091 case GIMPLE_CALL:
4092 {
4093 tree decl;
4094
4095 if (gimple_call_internal_p (stmt))
4096 return 0;
4097 else if ((decl = gimple_call_fndecl (stmt))
4098 && fndecl_built_in_p (decl))
4099 {
4100 /* Do not special case builtins where we see the body.
4101 This just confuse inliner. */
4102 struct cgraph_node *node;
4103 if (!(node = cgraph_node::get (decl))
4104 || node->definition)
4105 ;
4106 /* For buitins that are likely expanded to nothing or
4107 inlined do not account operand costs. */
4108 else if (is_simple_builtin (decl))
4109 return 0;
4110 else if (is_inexpensive_builtin (decl))
4111 return weights->target_builtin_call_cost;
4112 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4113 {
4114 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4115 specialize the cheap expansion we do here.
4116 ??? This asks for a more general solution. */
4117 switch (DECL_FUNCTION_CODE (decl))
4118 {
4119 case BUILT_IN_POW:
4120 case BUILT_IN_POWF:
4121 case BUILT_IN_POWL:
4122 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4123 && (real_equal
4124 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4125 &dconst2)))
4126 return estimate_operator_cost
4127 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4128 gimple_call_arg (stmt, 0));
4129 break;
4130
4131 default:
4132 break;
4133 }
4134 }
4135 }
4136
4137 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4138 if (gimple_call_lhs (stmt))
4139 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4140 weights->time_based);
4141 for (i = 0; i < gimple_call_num_args (stmt); i++)
4142 {
4143 tree arg = gimple_call_arg (stmt, i);
4144 cost += estimate_move_cost (TREE_TYPE (arg),
4145 weights->time_based);
4146 }
4147 break;
4148 }
4149
4150 case GIMPLE_RETURN:
4151 return weights->return_cost;
4152
4153 case GIMPLE_GOTO:
4154 case GIMPLE_LABEL:
4155 case GIMPLE_NOP:
4156 case GIMPLE_PHI:
4157 case GIMPLE_PREDICT:
4158 case GIMPLE_DEBUG:
4159 return 0;
4160
4161 case GIMPLE_ASM:
4162 {
4163 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4164 /* 1000 means infinity. This avoids overflows later
4165 with very long asm statements. */
4166 if (count > 1000)
4167 count = 1000;
4168 /* If this asm is asm inline, count anything as minimum size. */
4169 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4170 count = MIN (1, count);
4171 return MAX (1, count);
4172 }
4173
4174 case GIMPLE_RESX:
4175 /* This is either going to be an external function call with one
4176 argument, or two register copy statements plus a goto. */
4177 return 2;
4178
4179 case GIMPLE_EH_DISPATCH:
4180 /* ??? This is going to turn into a switch statement. Ideally
4181 we'd have a look at the eh region and estimate the number of
4182 edges involved. */
4183 return 10;
4184
4185 case GIMPLE_BIND:
4186 return estimate_num_insns_seq (
4187 gimple_bind_body (as_a <gbind *> (stmt)),
4188 weights);
4189
4190 case GIMPLE_EH_FILTER:
4191 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4192
4193 case GIMPLE_CATCH:
4194 return estimate_num_insns_seq (gimple_catch_handler (
4195 as_a <gcatch *> (stmt)),
4196 weights);
4197
4198 case GIMPLE_TRY:
4199 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4200 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4201
4202 /* OMP directives are generally very expensive. */
4203
4204 case GIMPLE_OMP_RETURN:
4205 case GIMPLE_OMP_SECTIONS_SWITCH:
4206 case GIMPLE_OMP_ATOMIC_STORE:
4207 case GIMPLE_OMP_CONTINUE:
4208 /* ...except these, which are cheap. */
4209 return 0;
4210
4211 case GIMPLE_OMP_ATOMIC_LOAD:
4212 return weights->omp_cost;
4213
4214 case GIMPLE_OMP_FOR:
4215 return (weights->omp_cost
4216 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4217 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4218
4219 case GIMPLE_OMP_PARALLEL:
4220 case GIMPLE_OMP_TASK:
4221 case GIMPLE_OMP_CRITICAL:
4222 case GIMPLE_OMP_MASTER:
4223 case GIMPLE_OMP_TASKGROUP:
4224 case GIMPLE_OMP_ORDERED:
4225 case GIMPLE_OMP_SECTION:
4226 case GIMPLE_OMP_SECTIONS:
4227 case GIMPLE_OMP_SINGLE:
4228 case GIMPLE_OMP_TARGET:
4229 case GIMPLE_OMP_TEAMS:
4230 return (weights->omp_cost
4231 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4232
4233 case GIMPLE_TRANSACTION:
4234 return (weights->tm_cost
4235 + estimate_num_insns_seq (gimple_transaction_body (
4236 as_a <gtransaction *> (stmt)),
4237 weights));
4238
4239 default:
4240 gcc_unreachable ();
4241 }
4242
4243 return cost;
4244 }
4245
4246 /* Estimate number of instructions that will be created by expanding
4247 function FNDECL. WEIGHTS contains weights attributed to various
4248 constructs. */
4249
4250 int
4251 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4252 {
4253 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4254 gimple_stmt_iterator bsi;
4255 basic_block bb;
4256 int n = 0;
4257
4258 gcc_assert (my_function && my_function->cfg);
4259 FOR_EACH_BB_FN (bb, my_function)
4260 {
4261 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4262 n += estimate_num_insns (gsi_stmt (bsi), weights);
4263 }
4264
4265 return n;
4266 }
4267
4268
4269 /* Initializes weights used by estimate_num_insns. */
4270
4271 void
4272 init_inline_once (void)
4273 {
4274 eni_size_weights.call_cost = 1;
4275 eni_size_weights.indirect_call_cost = 3;
4276 eni_size_weights.target_builtin_call_cost = 1;
4277 eni_size_weights.div_mod_cost = 1;
4278 eni_size_weights.omp_cost = 40;
4279 eni_size_weights.tm_cost = 10;
4280 eni_size_weights.time_based = false;
4281 eni_size_weights.return_cost = 1;
4282
4283 /* Estimating time for call is difficult, since we have no idea what the
4284 called function does. In the current uses of eni_time_weights,
4285 underestimating the cost does less harm than overestimating it, so
4286 we choose a rather small value here. */
4287 eni_time_weights.call_cost = 10;
4288 eni_time_weights.indirect_call_cost = 15;
4289 eni_time_weights.target_builtin_call_cost = 1;
4290 eni_time_weights.div_mod_cost = 10;
4291 eni_time_weights.omp_cost = 40;
4292 eni_time_weights.tm_cost = 40;
4293 eni_time_weights.time_based = true;
4294 eni_time_weights.return_cost = 2;
4295 }
4296
4297
4298 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4299
4300 static void
4301 prepend_lexical_block (tree current_block, tree new_block)
4302 {
4303 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4304 BLOCK_SUBBLOCKS (current_block) = new_block;
4305 BLOCK_SUPERCONTEXT (new_block) = current_block;
4306 }
4307
4308 /* Add local variables from CALLEE to CALLER. */
4309
4310 static inline void
4311 add_local_variables (struct function *callee, struct function *caller,
4312 copy_body_data *id)
4313 {
4314 tree var;
4315 unsigned ix;
4316
4317 FOR_EACH_LOCAL_DECL (callee, ix, var)
4318 if (!can_be_nonlocal (var, id))
4319 {
4320 tree new_var = remap_decl (var, id);
4321
4322 /* Remap debug-expressions. */
4323 if (VAR_P (new_var)
4324 && DECL_HAS_DEBUG_EXPR_P (var)
4325 && new_var != var)
4326 {
4327 tree tem = DECL_DEBUG_EXPR (var);
4328 bool old_regimplify = id->regimplify;
4329 id->remapping_type_depth++;
4330 walk_tree (&tem, copy_tree_body_r, id, NULL);
4331 id->remapping_type_depth--;
4332 id->regimplify = old_regimplify;
4333 SET_DECL_DEBUG_EXPR (new_var, tem);
4334 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4335 }
4336 add_local_decl (caller, new_var);
4337 }
4338 }
4339
4340 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4341 have brought in or introduced any debug stmts for SRCVAR. */
4342
4343 static inline void
4344 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4345 {
4346 tree *remappedvarp = id->decl_map->get (srcvar);
4347
4348 if (!remappedvarp)
4349 return;
4350
4351 if (!VAR_P (*remappedvarp))
4352 return;
4353
4354 if (*remappedvarp == id->retvar)
4355 return;
4356
4357 tree tvar = target_for_debug_bind (*remappedvarp);
4358 if (!tvar)
4359 return;
4360
4361 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4362 id->call_stmt);
4363 gimple_seq_add_stmt (bindings, stmt);
4364 }
4365
4366 /* For each inlined variable for which we may have debug bind stmts,
4367 add before GSI a final debug stmt resetting it, marking the end of
4368 its life, so that var-tracking knows it doesn't have to compute
4369 further locations for it. */
4370
4371 static inline void
4372 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4373 {
4374 tree var;
4375 unsigned ix;
4376 gimple_seq bindings = NULL;
4377
4378 if (!gimple_in_ssa_p (id->src_cfun))
4379 return;
4380
4381 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4382 return;
4383
4384 for (var = DECL_ARGUMENTS (id->src_fn);
4385 var; var = DECL_CHAIN (var))
4386 reset_debug_binding (id, var, &bindings);
4387
4388 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4389 reset_debug_binding (id, var, &bindings);
4390
4391 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4392 }
4393
4394 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4395
4396 static bool
4397 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4398 {
4399 tree use_retvar;
4400 tree fn;
4401 hash_map<tree, tree> *dst;
4402 hash_map<tree, tree> *st = NULL;
4403 tree return_slot;
4404 tree modify_dest;
4405 struct cgraph_edge *cg_edge;
4406 cgraph_inline_failed_t reason;
4407 basic_block return_block;
4408 edge e;
4409 gimple_stmt_iterator gsi, stmt_gsi;
4410 bool successfully_inlined = false;
4411 bool purge_dead_abnormal_edges;
4412 gcall *call_stmt;
4413 unsigned int prop_mask, src_properties;
4414 struct function *dst_cfun;
4415 tree simduid;
4416 use_operand_p use;
4417 gimple *simtenter_stmt = NULL;
4418 vec<tree> *simtvars_save;
4419
4420 /* The gimplifier uses input_location in too many places, such as
4421 internal_get_tmp_var (). */
4422 location_t saved_location = input_location;
4423 input_location = gimple_location (stmt);
4424
4425 /* From here on, we're only interested in CALL_EXPRs. */
4426 call_stmt = dyn_cast <gcall *> (stmt);
4427 if (!call_stmt)
4428 goto egress;
4429
4430 cg_edge = id->dst_node->get_edge (stmt);
4431 gcc_checking_assert (cg_edge);
4432 /* First, see if we can figure out what function is being called.
4433 If we cannot, then there is no hope of inlining the function. */
4434 if (cg_edge->indirect_unknown_callee)
4435 goto egress;
4436 fn = cg_edge->callee->decl;
4437 gcc_checking_assert (fn);
4438
4439 /* If FN is a declaration of a function in a nested scope that was
4440 globally declared inline, we don't set its DECL_INITIAL.
4441 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4442 C++ front-end uses it for cdtors to refer to their internal
4443 declarations, that are not real functions. Fortunately those
4444 don't have trees to be saved, so we can tell by checking their
4445 gimple_body. */
4446 if (!DECL_INITIAL (fn)
4447 && DECL_ABSTRACT_ORIGIN (fn)
4448 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4449 fn = DECL_ABSTRACT_ORIGIN (fn);
4450
4451 /* Don't try to inline functions that are not well-suited to inlining. */
4452 if (cg_edge->inline_failed)
4453 {
4454 reason = cg_edge->inline_failed;
4455 /* If this call was originally indirect, we do not want to emit any
4456 inlining related warnings or sorry messages because there are no
4457 guarantees regarding those. */
4458 if (cg_edge->indirect_inlining_edge)
4459 goto egress;
4460
4461 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4462 /* For extern inline functions that get redefined we always
4463 silently ignored always_inline flag. Better behavior would
4464 be to be able to keep both bodies and use extern inline body
4465 for inlining, but we can't do that because frontends overwrite
4466 the body. */
4467 && !cg_edge->callee->local.redefined_extern_inline
4468 /* During early inline pass, report only when optimization is
4469 not turned on. */
4470 && (symtab->global_info_ready
4471 || !optimize
4472 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4473 /* PR 20090218-1_0.c. Body can be provided by another module. */
4474 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4475 {
4476 error ("inlining failed in call to always_inline %q+F: %s", fn,
4477 cgraph_inline_failed_string (reason));
4478 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4479 inform (gimple_location (stmt), "called from here");
4480 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4481 inform (DECL_SOURCE_LOCATION (cfun->decl),
4482 "called from this function");
4483 }
4484 else if (warn_inline
4485 && DECL_DECLARED_INLINE_P (fn)
4486 && !DECL_NO_INLINE_WARNING_P (fn)
4487 && !DECL_IN_SYSTEM_HEADER (fn)
4488 && reason != CIF_UNSPECIFIED
4489 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4490 /* Do not warn about not inlined recursive calls. */
4491 && !cg_edge->recursive_p ()
4492 /* Avoid warnings during early inline pass. */
4493 && symtab->global_info_ready)
4494 {
4495 auto_diagnostic_group d;
4496 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4497 fn, _(cgraph_inline_failed_string (reason))))
4498 {
4499 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4500 inform (gimple_location (stmt), "called from here");
4501 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4502 inform (DECL_SOURCE_LOCATION (cfun->decl),
4503 "called from this function");
4504 }
4505 }
4506 goto egress;
4507 }
4508 id->src_node = cg_edge->callee;
4509
4510 /* If callee is thunk, all we need is to adjust the THIS pointer
4511 and redirect to function being thunked. */
4512 if (id->src_node->thunk.thunk_p)
4513 {
4514 cgraph_edge *edge;
4515 tree virtual_offset = NULL;
4516 profile_count count = cg_edge->count;
4517 tree op;
4518 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4519
4520 cg_edge->remove ();
4521 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4522 gimple_uid (stmt),
4523 profile_count::one (),
4524 profile_count::one (),
4525 true);
4526 edge->count = count;
4527 if (id->src_node->thunk.virtual_offset_p)
4528 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4529 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4530 NULL);
4531 gsi_insert_before (&iter, gimple_build_assign (op,
4532 gimple_call_arg (stmt, 0)),
4533 GSI_NEW_STMT);
4534 gcc_assert (id->src_node->thunk.this_adjusting);
4535 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4536 virtual_offset, id->src_node->thunk.indirect_offset);
4537
4538 gimple_call_set_arg (stmt, 0, op);
4539 gimple_call_set_fndecl (stmt, edge->callee->decl);
4540 update_stmt (stmt);
4541 id->src_node->remove ();
4542 expand_call_inline (bb, stmt, id);
4543 maybe_remove_unused_call_args (cfun, stmt);
4544 return true;
4545 }
4546 fn = cg_edge->callee->decl;
4547 cg_edge->callee->get_untransformed_body ();
4548
4549 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4550 cg_edge->callee->verify ();
4551
4552 /* We will be inlining this callee. */
4553 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4554
4555 /* Update the callers EH personality. */
4556 if (DECL_FUNCTION_PERSONALITY (fn))
4557 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4558 = DECL_FUNCTION_PERSONALITY (fn);
4559
4560 /* Split the block before the GIMPLE_CALL. */
4561 stmt_gsi = gsi_for_stmt (stmt);
4562 gsi_prev (&stmt_gsi);
4563 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4564 bb = e->src;
4565 return_block = e->dest;
4566 remove_edge (e);
4567
4568 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4569 been the source of abnormal edges. In this case, schedule
4570 the removal of dead abnormal edges. */
4571 gsi = gsi_start_bb (return_block);
4572 gsi_next (&gsi);
4573 purge_dead_abnormal_edges = gsi_end_p (gsi);
4574
4575 stmt_gsi = gsi_start_bb (return_block);
4576
4577 /* Build a block containing code to initialize the arguments, the
4578 actual inline expansion of the body, and a label for the return
4579 statements within the function to jump to. The type of the
4580 statement expression is the return type of the function call.
4581 ??? If the call does not have an associated block then we will
4582 remap all callee blocks to NULL, effectively dropping most of
4583 its debug information. This should only happen for calls to
4584 artificial decls inserted by the compiler itself. We need to
4585 either link the inlined blocks into the caller block tree or
4586 not refer to them in any way to not break GC for locations. */
4587 if (gimple_block (stmt))
4588 {
4589 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4590 to make inlined_function_outer_scope_p return true on this BLOCK. */
4591 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4592 if (loc == UNKNOWN_LOCATION)
4593 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4594 if (loc == UNKNOWN_LOCATION)
4595 loc = BUILTINS_LOCATION;
4596 id->block = make_node (BLOCK);
4597 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4598 BLOCK_SOURCE_LOCATION (id->block) = loc;
4599 prepend_lexical_block (gimple_block (stmt), id->block);
4600 }
4601
4602 /* Local declarations will be replaced by their equivalents in this map. */
4603 st = id->decl_map;
4604 id->decl_map = new hash_map<tree, tree>;
4605 dst = id->debug_map;
4606 id->debug_map = NULL;
4607 if (flag_stack_reuse != SR_NONE)
4608 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4609
4610 /* Record the function we are about to inline. */
4611 id->src_fn = fn;
4612 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4613 id->reset_location = DECL_IGNORED_P (fn);
4614 id->call_stmt = call_stmt;
4615
4616 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4617 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4618 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4619 simtvars_save = id->dst_simt_vars;
4620 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4621 && (simduid = bb->loop_father->simduid) != NULL_TREE
4622 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4623 && single_imm_use (simduid, &use, &simtenter_stmt)
4624 && is_gimple_call (simtenter_stmt)
4625 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4626 vec_alloc (id->dst_simt_vars, 0);
4627 else
4628 id->dst_simt_vars = NULL;
4629
4630 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4631 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4632
4633 /* If the src function contains an IFN_VA_ARG, then so will the dst
4634 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4635 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4636 src_properties = id->src_cfun->curr_properties & prop_mask;
4637 if (src_properties != prop_mask)
4638 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4639
4640 gcc_assert (!id->src_cfun->after_inlining);
4641
4642 id->entry_bb = bb;
4643 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4644 {
4645 gimple_stmt_iterator si = gsi_last_bb (bb);
4646 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4647 NOT_TAKEN),
4648 GSI_NEW_STMT);
4649 }
4650 initialize_inlined_parameters (id, stmt, fn, bb);
4651 if (debug_nonbind_markers_p && debug_inline_points && id->block
4652 && inlined_function_outer_scope_p (id->block))
4653 {
4654 gimple_stmt_iterator si = gsi_last_bb (bb);
4655 gsi_insert_after (&si, gimple_build_debug_inline_entry
4656 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4657 GSI_NEW_STMT);
4658 }
4659
4660 if (DECL_INITIAL (fn))
4661 {
4662 if (gimple_block (stmt))
4663 {
4664 tree *var;
4665
4666 prepend_lexical_block (id->block,
4667 remap_blocks (DECL_INITIAL (fn), id));
4668 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4669 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4670 == NULL_TREE));
4671 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4672 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4673 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4674 under it. The parameters can be then evaluated in the debugger,
4675 but don't show in backtraces. */
4676 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4677 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4678 {
4679 tree v = *var;
4680 *var = TREE_CHAIN (v);
4681 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4682 BLOCK_VARS (id->block) = v;
4683 }
4684 else
4685 var = &TREE_CHAIN (*var);
4686 }
4687 else
4688 remap_blocks_to_null (DECL_INITIAL (fn), id);
4689 }
4690
4691 /* Return statements in the function body will be replaced by jumps
4692 to the RET_LABEL. */
4693 gcc_assert (DECL_INITIAL (fn));
4694 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4695
4696 /* Find the LHS to which the result of this call is assigned. */
4697 return_slot = NULL;
4698 if (gimple_call_lhs (stmt))
4699 {
4700 modify_dest = gimple_call_lhs (stmt);
4701
4702 /* The function which we are inlining might not return a value,
4703 in which case we should issue a warning that the function
4704 does not return a value. In that case the optimizers will
4705 see that the variable to which the value is assigned was not
4706 initialized. We do not want to issue a warning about that
4707 uninitialized variable. */
4708 if (DECL_P (modify_dest))
4709 TREE_NO_WARNING (modify_dest) = 1;
4710
4711 if (gimple_call_return_slot_opt_p (call_stmt))
4712 {
4713 return_slot = modify_dest;
4714 modify_dest = NULL;
4715 }
4716 }
4717 else
4718 modify_dest = NULL;
4719
4720 /* If we are inlining a call to the C++ operator new, we don't want
4721 to use type based alias analysis on the return value. Otherwise
4722 we may get confused if the compiler sees that the inlined new
4723 function returns a pointer which was just deleted. See bug
4724 33407. */
4725 if (DECL_IS_OPERATOR_NEW (fn))
4726 {
4727 return_slot = NULL;
4728 modify_dest = NULL;
4729 }
4730
4731 /* Declare the return variable for the function. */
4732 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4733
4734 /* Add local vars in this inlined callee to caller. */
4735 add_local_variables (id->src_cfun, cfun, id);
4736
4737 if (dump_enabled_p ())
4738 {
4739 char buf[128];
4740 snprintf (buf, sizeof(buf), "%4.2f",
4741 cg_edge->sreal_frequency ().to_double ());
4742 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4743 call_stmt,
4744 "Inlining %C to %C with frequency %s\n",
4745 id->src_node, id->dst_node, buf);
4746 if (dump_file && (dump_flags & TDF_DETAILS))
4747 {
4748 id->src_node->dump (dump_file);
4749 id->dst_node->dump (dump_file);
4750 }
4751 }
4752
4753 /* This is it. Duplicate the callee body. Assume callee is
4754 pre-gimplified. Note that we must not alter the caller
4755 function in any way before this point, as this CALL_EXPR may be
4756 a self-referential call; if we're calling ourselves, we need to
4757 duplicate our body before altering anything. */
4758 copy_body (id, bb, return_block, NULL);
4759
4760 reset_debug_bindings (id, stmt_gsi);
4761
4762 if (flag_stack_reuse != SR_NONE)
4763 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4764 if (!TREE_THIS_VOLATILE (p))
4765 {
4766 tree *varp = id->decl_map->get (p);
4767 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4768 {
4769 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4770 gimple *clobber_stmt;
4771 TREE_THIS_VOLATILE (clobber) = 1;
4772 clobber_stmt = gimple_build_assign (*varp, clobber);
4773 gimple_set_location (clobber_stmt, gimple_location (stmt));
4774 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4775 }
4776 }
4777
4778 /* Reset the escaped solution. */
4779 if (cfun->gimple_df)
4780 pt_solution_reset (&cfun->gimple_df->escaped);
4781
4782 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4783 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4784 {
4785 size_t nargs = gimple_call_num_args (simtenter_stmt);
4786 vec<tree> *vars = id->dst_simt_vars;
4787 auto_vec<tree> newargs (nargs + vars->length ());
4788 for (size_t i = 0; i < nargs; i++)
4789 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4790 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4791 {
4792 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4793 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4794 }
4795 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4796 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4797 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4798 gsi_replace (&gsi, g, false);
4799 }
4800 vec_free (id->dst_simt_vars);
4801 id->dst_simt_vars = simtvars_save;
4802
4803 /* Clean up. */
4804 if (id->debug_map)
4805 {
4806 delete id->debug_map;
4807 id->debug_map = dst;
4808 }
4809 delete id->decl_map;
4810 id->decl_map = st;
4811
4812 /* Unlink the calls virtual operands before replacing it. */
4813 unlink_stmt_vdef (stmt);
4814 if (gimple_vdef (stmt)
4815 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4816 release_ssa_name (gimple_vdef (stmt));
4817
4818 /* If the inlined function returns a result that we care about,
4819 substitute the GIMPLE_CALL with an assignment of the return
4820 variable to the LHS of the call. That is, if STMT was
4821 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4822 if (use_retvar && gimple_call_lhs (stmt))
4823 {
4824 gimple *old_stmt = stmt;
4825 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4826 gimple_set_location (stmt, gimple_location (old_stmt));
4827 gsi_replace (&stmt_gsi, stmt, false);
4828 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4829 /* Append a clobber for id->retvar if easily possible. */
4830 if (flag_stack_reuse != SR_NONE
4831 && id->retvar
4832 && VAR_P (id->retvar)
4833 && id->retvar != return_slot
4834 && id->retvar != modify_dest
4835 && !TREE_THIS_VOLATILE (id->retvar)
4836 && !is_gimple_reg (id->retvar)
4837 && !stmt_ends_bb_p (stmt))
4838 {
4839 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4840 gimple *clobber_stmt;
4841 TREE_THIS_VOLATILE (clobber) = 1;
4842 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4843 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4844 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4845 }
4846 }
4847 else
4848 {
4849 /* Handle the case of inlining a function with no return
4850 statement, which causes the return value to become undefined. */
4851 if (gimple_call_lhs (stmt)
4852 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4853 {
4854 tree name = gimple_call_lhs (stmt);
4855 tree var = SSA_NAME_VAR (name);
4856 tree def = var ? ssa_default_def (cfun, var) : NULL;
4857
4858 if (def)
4859 {
4860 /* If the variable is used undefined, make this name
4861 undefined via a move. */
4862 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4863 gsi_replace (&stmt_gsi, stmt, true);
4864 }
4865 else
4866 {
4867 if (!var)
4868 {
4869 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4870 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4871 }
4872 /* Otherwise make this variable undefined. */
4873 gsi_remove (&stmt_gsi, true);
4874 set_ssa_default_def (cfun, var, name);
4875 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4876 }
4877 }
4878 /* Replace with a clobber for id->retvar. */
4879 else if (flag_stack_reuse != SR_NONE
4880 && id->retvar
4881 && VAR_P (id->retvar)
4882 && id->retvar != return_slot
4883 && id->retvar != modify_dest
4884 && !TREE_THIS_VOLATILE (id->retvar)
4885 && !is_gimple_reg (id->retvar))
4886 {
4887 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4888 gimple *clobber_stmt;
4889 TREE_THIS_VOLATILE (clobber) = 1;
4890 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4891 gimple_set_location (clobber_stmt, gimple_location (stmt));
4892 gsi_replace (&stmt_gsi, clobber_stmt, false);
4893 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4894 }
4895 else
4896 gsi_remove (&stmt_gsi, true);
4897 }
4898
4899 if (purge_dead_abnormal_edges)
4900 {
4901 gimple_purge_dead_eh_edges (return_block);
4902 gimple_purge_dead_abnormal_call_edges (return_block);
4903 }
4904
4905 /* If the value of the new expression is ignored, that's OK. We
4906 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4907 the equivalent inlined version either. */
4908 if (is_gimple_assign (stmt))
4909 {
4910 gcc_assert (gimple_assign_single_p (stmt)
4911 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4912 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4913 }
4914
4915 id->add_clobbers_to_eh_landing_pads = 0;
4916
4917 /* Output the inlining info for this abstract function, since it has been
4918 inlined. If we don't do this now, we can lose the information about the
4919 variables in the function when the blocks get blown away as soon as we
4920 remove the cgraph node. */
4921 if (gimple_block (stmt))
4922 (*debug_hooks->outlining_inline_function) (fn);
4923
4924 /* Update callgraph if needed. */
4925 cg_edge->callee->remove ();
4926
4927 id->block = NULL_TREE;
4928 id->retvar = NULL_TREE;
4929 successfully_inlined = true;
4930
4931 egress:
4932 input_location = saved_location;
4933 return successfully_inlined;
4934 }
4935
4936 /* Expand call statements reachable from STMT_P.
4937 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4938 in a MODIFY_EXPR. */
4939
4940 static bool
4941 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4942 {
4943 gimple_stmt_iterator gsi;
4944 bool inlined = false;
4945
4946 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4947 {
4948 gimple *stmt = gsi_stmt (gsi);
4949 gsi_prev (&gsi);
4950
4951 if (is_gimple_call (stmt)
4952 && !gimple_call_internal_p (stmt))
4953 inlined |= expand_call_inline (bb, stmt, id);
4954 }
4955
4956 return inlined;
4957 }
4958
4959
4960 /* Walk all basic blocks created after FIRST and try to fold every statement
4961 in the STATEMENTS pointer set. */
4962
4963 static void
4964 fold_marked_statements (int first, hash_set<gimple *> *statements)
4965 {
4966 for (; first < last_basic_block_for_fn (cfun); first++)
4967 if (BASIC_BLOCK_FOR_FN (cfun, first))
4968 {
4969 gimple_stmt_iterator gsi;
4970
4971 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4972 !gsi_end_p (gsi);
4973 gsi_next (&gsi))
4974 if (statements->contains (gsi_stmt (gsi)))
4975 {
4976 gimple *old_stmt = gsi_stmt (gsi);
4977 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4978
4979 if (old_decl && fndecl_built_in_p (old_decl))
4980 {
4981 /* Folding builtins can create multiple instructions,
4982 we need to look at all of them. */
4983 gimple_stmt_iterator i2 = gsi;
4984 gsi_prev (&i2);
4985 if (fold_stmt (&gsi))
4986 {
4987 gimple *new_stmt;
4988 /* If a builtin at the end of a bb folded into nothing,
4989 the following loop won't work. */
4990 if (gsi_end_p (gsi))
4991 {
4992 cgraph_update_edges_for_call_stmt (old_stmt,
4993 old_decl, NULL);
4994 break;
4995 }
4996 if (gsi_end_p (i2))
4997 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4998 else
4999 gsi_next (&i2);
5000 while (1)
5001 {
5002 new_stmt = gsi_stmt (i2);
5003 update_stmt (new_stmt);
5004 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5005 new_stmt);
5006
5007 if (new_stmt == gsi_stmt (gsi))
5008 {
5009 /* It is okay to check only for the very last
5010 of these statements. If it is a throwing
5011 statement nothing will change. If it isn't
5012 this can remove EH edges. If that weren't
5013 correct then because some intermediate stmts
5014 throw, but not the last one. That would mean
5015 we'd have to split the block, which we can't
5016 here and we'd loose anyway. And as builtins
5017 probably never throw, this all
5018 is mood anyway. */
5019 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5020 new_stmt))
5021 gimple_purge_dead_eh_edges (
5022 BASIC_BLOCK_FOR_FN (cfun, first));
5023 break;
5024 }
5025 gsi_next (&i2);
5026 }
5027 }
5028 }
5029 else if (fold_stmt (&gsi))
5030 {
5031 /* Re-read the statement from GSI as fold_stmt() may
5032 have changed it. */
5033 gimple *new_stmt = gsi_stmt (gsi);
5034 update_stmt (new_stmt);
5035
5036 if (is_gimple_call (old_stmt)
5037 || is_gimple_call (new_stmt))
5038 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5039 new_stmt);
5040
5041 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5042 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5043 first));
5044 }
5045 }
5046 }
5047 }
5048
5049 /* Expand calls to inline functions in the body of FN. */
5050
5051 unsigned int
5052 optimize_inline_calls (tree fn)
5053 {
5054 copy_body_data id;
5055 basic_block bb;
5056 int last = n_basic_blocks_for_fn (cfun);
5057 bool inlined_p = false;
5058
5059 /* Clear out ID. */
5060 memset (&id, 0, sizeof (id));
5061
5062 id.src_node = id.dst_node = cgraph_node::get (fn);
5063 gcc_assert (id.dst_node->definition);
5064 id.dst_fn = fn;
5065 /* Or any functions that aren't finished yet. */
5066 if (current_function_decl)
5067 id.dst_fn = current_function_decl;
5068
5069 id.copy_decl = copy_decl_maybe_to_var;
5070 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5071 id.transform_new_cfg = false;
5072 id.transform_return_to_modify = true;
5073 id.transform_parameter = true;
5074 id.transform_lang_insert_block = NULL;
5075 id.statements_to_fold = new hash_set<gimple *>;
5076
5077 push_gimplify_context ();
5078
5079 /* We make no attempts to keep dominance info up-to-date. */
5080 free_dominance_info (CDI_DOMINATORS);
5081 free_dominance_info (CDI_POST_DOMINATORS);
5082
5083 /* Register specific gimple functions. */
5084 gimple_register_cfg_hooks ();
5085
5086 /* Reach the trees by walking over the CFG, and note the
5087 enclosing basic-blocks in the call edges. */
5088 /* We walk the blocks going forward, because inlined function bodies
5089 will split id->current_basic_block, and the new blocks will
5090 follow it; we'll trudge through them, processing their CALL_EXPRs
5091 along the way. */
5092 FOR_EACH_BB_FN (bb, cfun)
5093 inlined_p |= gimple_expand_calls_inline (bb, &id);
5094
5095 pop_gimplify_context (NULL);
5096
5097 if (flag_checking)
5098 {
5099 struct cgraph_edge *e;
5100
5101 id.dst_node->verify ();
5102
5103 /* Double check that we inlined everything we are supposed to inline. */
5104 for (e = id.dst_node->callees; e; e = e->next_callee)
5105 gcc_assert (e->inline_failed);
5106 }
5107
5108 /* Fold queued statements. */
5109 update_max_bb_count ();
5110 fold_marked_statements (last, id.statements_to_fold);
5111 delete id.statements_to_fold;
5112
5113 gcc_assert (!id.debug_stmts.exists ());
5114
5115 /* If we didn't inline into the function there is nothing to do. */
5116 if (!inlined_p)
5117 return 0;
5118
5119 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5120 number_blocks (fn);
5121
5122 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5123
5124 if (flag_checking)
5125 id.dst_node->verify ();
5126
5127 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5128 not possible yet - the IPA passes might make various functions to not
5129 throw and they don't care to proactively update local EH info. This is
5130 done later in fixup_cfg pass that also execute the verification. */
5131 return (TODO_update_ssa
5132 | TODO_cleanup_cfg
5133 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5134 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5135 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5136 ? TODO_rebuild_frequencies : 0));
5137 }
5138
5139 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5140
5141 tree
5142 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5143 {
5144 enum tree_code code = TREE_CODE (*tp);
5145 enum tree_code_class cl = TREE_CODE_CLASS (code);
5146
5147 /* We make copies of most nodes. */
5148 if (IS_EXPR_CODE_CLASS (cl)
5149 || code == TREE_LIST
5150 || code == TREE_VEC
5151 || code == TYPE_DECL
5152 || code == OMP_CLAUSE)
5153 {
5154 /* Because the chain gets clobbered when we make a copy, we save it
5155 here. */
5156 tree chain = NULL_TREE, new_tree;
5157
5158 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5159 chain = TREE_CHAIN (*tp);
5160
5161 /* Copy the node. */
5162 new_tree = copy_node (*tp);
5163
5164 *tp = new_tree;
5165
5166 /* Now, restore the chain, if appropriate. That will cause
5167 walk_tree to walk into the chain as well. */
5168 if (code == PARM_DECL
5169 || code == TREE_LIST
5170 || code == OMP_CLAUSE)
5171 TREE_CHAIN (*tp) = chain;
5172
5173 /* For now, we don't update BLOCKs when we make copies. So, we
5174 have to nullify all BIND_EXPRs. */
5175 if (TREE_CODE (*tp) == BIND_EXPR)
5176 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5177 }
5178 else if (code == CONSTRUCTOR)
5179 {
5180 /* CONSTRUCTOR nodes need special handling because
5181 we need to duplicate the vector of elements. */
5182 tree new_tree;
5183
5184 new_tree = copy_node (*tp);
5185 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5186 *tp = new_tree;
5187 }
5188 else if (code == STATEMENT_LIST)
5189 /* We used to just abort on STATEMENT_LIST, but we can run into them
5190 with statement-expressions (c++/40975). */
5191 copy_statement_list (tp);
5192 else if (TREE_CODE_CLASS (code) == tcc_type)
5193 *walk_subtrees = 0;
5194 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5195 *walk_subtrees = 0;
5196 else if (TREE_CODE_CLASS (code) == tcc_constant)
5197 *walk_subtrees = 0;
5198 return NULL_TREE;
5199 }
5200
5201 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5202 information indicating to what new SAVE_EXPR this one should be mapped,
5203 use that one. Otherwise, create a new node and enter it in ST. FN is
5204 the function into which the copy will be placed. */
5205
5206 static void
5207 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5208 {
5209 tree *n;
5210 tree t;
5211
5212 /* See if we already encountered this SAVE_EXPR. */
5213 n = st->get (*tp);
5214
5215 /* If we didn't already remap this SAVE_EXPR, do so now. */
5216 if (!n)
5217 {
5218 t = copy_node (*tp);
5219
5220 /* Remember this SAVE_EXPR. */
5221 st->put (*tp, t);
5222 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5223 st->put (t, t);
5224 }
5225 else
5226 {
5227 /* We've already walked into this SAVE_EXPR; don't do it again. */
5228 *walk_subtrees = 0;
5229 t = *n;
5230 }
5231
5232 /* Replace this SAVE_EXPR with the copy. */
5233 *tp = t;
5234 }
5235
5236 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5237 label, copies the declaration and enters it in the splay_tree in DATA (which
5238 is really a 'copy_body_data *'. */
5239
5240 static tree
5241 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5242 bool *handled_ops_p ATTRIBUTE_UNUSED,
5243 struct walk_stmt_info *wi)
5244 {
5245 copy_body_data *id = (copy_body_data *) wi->info;
5246 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5247
5248 if (stmt)
5249 {
5250 tree decl = gimple_label_label (stmt);
5251
5252 /* Copy the decl and remember the copy. */
5253 insert_decl_map (id, decl, id->copy_decl (decl, id));
5254 }
5255
5256 return NULL_TREE;
5257 }
5258
5259 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5260 struct walk_stmt_info *wi);
5261
5262 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5263 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5264 remaps all local declarations to appropriate replacements in gimple
5265 operands. */
5266
5267 static tree
5268 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5269 {
5270 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5271 copy_body_data *id = (copy_body_data *) wi->info;
5272 hash_map<tree, tree> *st = id->decl_map;
5273 tree *n;
5274 tree expr = *tp;
5275
5276 /* For recursive invocations this is no longer the LHS itself. */
5277 bool is_lhs = wi->is_lhs;
5278 wi->is_lhs = false;
5279
5280 if (TREE_CODE (expr) == SSA_NAME)
5281 {
5282 *tp = remap_ssa_name (*tp, id);
5283 *walk_subtrees = 0;
5284 if (is_lhs)
5285 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5286 }
5287 /* Only a local declaration (variable or label). */
5288 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5289 || TREE_CODE (expr) == LABEL_DECL)
5290 {
5291 /* Lookup the declaration. */
5292 n = st->get (expr);
5293
5294 /* If it's there, remap it. */
5295 if (n)
5296 *tp = *n;
5297 *walk_subtrees = 0;
5298 }
5299 else if (TREE_CODE (expr) == STATEMENT_LIST
5300 || TREE_CODE (expr) == BIND_EXPR
5301 || TREE_CODE (expr) == SAVE_EXPR)
5302 gcc_unreachable ();
5303 else if (TREE_CODE (expr) == TARGET_EXPR)
5304 {
5305 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5306 It's OK for this to happen if it was part of a subtree that
5307 isn't immediately expanded, such as operand 2 of another
5308 TARGET_EXPR. */
5309 if (!TREE_OPERAND (expr, 1))
5310 {
5311 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5312 TREE_OPERAND (expr, 3) = NULL_TREE;
5313 }
5314 }
5315 else if (TREE_CODE (expr) == OMP_CLAUSE)
5316 {
5317 /* Before the omplower pass completes, some OMP clauses can contain
5318 sequences that are neither copied by gimple_seq_copy nor walked by
5319 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5320 in those situations, we have to copy and process them explicitely. */
5321
5322 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5323 {
5324 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5325 seq = duplicate_remap_omp_clause_seq (seq, wi);
5326 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5327 }
5328 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5329 {
5330 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5331 seq = duplicate_remap_omp_clause_seq (seq, wi);
5332 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5333 }
5334 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5335 {
5336 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5337 seq = duplicate_remap_omp_clause_seq (seq, wi);
5338 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5339 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5340 seq = duplicate_remap_omp_clause_seq (seq, wi);
5341 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5342 }
5343 }
5344
5345 /* Keep iterating. */
5346 return NULL_TREE;
5347 }
5348
5349
5350 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5351 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5352 remaps all local declarations to appropriate replacements in gimple
5353 statements. */
5354
5355 static tree
5356 replace_locals_stmt (gimple_stmt_iterator *gsip,
5357 bool *handled_ops_p ATTRIBUTE_UNUSED,
5358 struct walk_stmt_info *wi)
5359 {
5360 copy_body_data *id = (copy_body_data *) wi->info;
5361 gimple *gs = gsi_stmt (*gsip);
5362
5363 if (gbind *stmt = dyn_cast <gbind *> (gs))
5364 {
5365 tree block = gimple_bind_block (stmt);
5366
5367 if (block)
5368 {
5369 remap_block (&block, id);
5370 gimple_bind_set_block (stmt, block);
5371 }
5372
5373 /* This will remap a lot of the same decls again, but this should be
5374 harmless. */
5375 if (gimple_bind_vars (stmt))
5376 {
5377 tree old_var, decls = gimple_bind_vars (stmt);
5378
5379 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5380 if (!can_be_nonlocal (old_var, id)
5381 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5382 remap_decl (old_var, id);
5383
5384 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5385 id->prevent_decl_creation_for_types = true;
5386 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5387 id->prevent_decl_creation_for_types = false;
5388 }
5389 }
5390
5391 /* Keep iterating. */
5392 return NULL_TREE;
5393 }
5394
5395 /* Create a copy of SEQ and remap all decls in it. */
5396
5397 static gimple_seq
5398 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5399 {
5400 if (!seq)
5401 return NULL;
5402
5403 /* If there are any labels in OMP sequences, they can be only referred to in
5404 the sequence itself and therefore we can do both here. */
5405 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5406 gimple_seq copy = gimple_seq_copy (seq);
5407 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5408 return copy;
5409 }
5410
5411 /* Copies everything in SEQ and replaces variables and labels local to
5412 current_function_decl. */
5413
5414 gimple_seq
5415 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5416 {
5417 copy_body_data id;
5418 struct walk_stmt_info wi;
5419 gimple_seq copy;
5420
5421 /* There's nothing to do for NULL_TREE. */
5422 if (seq == NULL)
5423 return seq;
5424
5425 /* Set up ID. */
5426 memset (&id, 0, sizeof (id));
5427 id.src_fn = current_function_decl;
5428 id.dst_fn = current_function_decl;
5429 id.src_cfun = cfun;
5430 id.decl_map = new hash_map<tree, tree>;
5431 id.debug_map = NULL;
5432
5433 id.copy_decl = copy_decl_no_change;
5434 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5435 id.transform_new_cfg = false;
5436 id.transform_return_to_modify = false;
5437 id.transform_parameter = false;
5438 id.transform_lang_insert_block = NULL;
5439
5440 /* Walk the tree once to find local labels. */
5441 memset (&wi, 0, sizeof (wi));
5442 hash_set<tree> visited;
5443 wi.info = &id;
5444 wi.pset = &visited;
5445 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5446
5447 copy = gimple_seq_copy (seq);
5448
5449 /* Walk the copy, remapping decls. */
5450 memset (&wi, 0, sizeof (wi));
5451 wi.info = &id;
5452 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5453
5454 /* Clean up. */
5455 delete id.decl_map;
5456 if (id.debug_map)
5457 delete id.debug_map;
5458 if (id.dependence_map)
5459 {
5460 delete id.dependence_map;
5461 id.dependence_map = NULL;
5462 }
5463
5464 return copy;
5465 }
5466
5467
5468 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5469
5470 static tree
5471 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5472 {
5473 if (*tp == data)
5474 return (tree) data;
5475 else
5476 return NULL;
5477 }
5478
5479 DEBUG_FUNCTION bool
5480 debug_find_tree (tree top, tree search)
5481 {
5482 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5483 }
5484
5485
5486 /* Declare the variables created by the inliner. Add all the variables in
5487 VARS to BIND_EXPR. */
5488
5489 static void
5490 declare_inline_vars (tree block, tree vars)
5491 {
5492 tree t;
5493 for (t = vars; t; t = DECL_CHAIN (t))
5494 {
5495 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5496 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5497 add_local_decl (cfun, t);
5498 }
5499
5500 if (block)
5501 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5502 }
5503
5504 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5505 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5506 VAR_DECL translation. */
5507
5508 tree
5509 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5510 {
5511 /* Don't generate debug information for the copy if we wouldn't have
5512 generated it for the copy either. */
5513 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5514 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5515
5516 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5517 declaration inspired this copy. */
5518 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5519
5520 /* The new variable/label has no RTL, yet. */
5521 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5522 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5523 SET_DECL_RTL (copy, 0);
5524 /* For vector typed decls make sure to update DECL_MODE according
5525 to the new function context. */
5526 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5527 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5528
5529 /* These args would always appear unused, if not for this. */
5530 TREE_USED (copy) = 1;
5531
5532 /* Set the context for the new declaration. */
5533 if (!DECL_CONTEXT (decl))
5534 /* Globals stay global. */
5535 ;
5536 else if (DECL_CONTEXT (decl) != id->src_fn)
5537 /* Things that weren't in the scope of the function we're inlining
5538 from aren't in the scope we're inlining to, either. */
5539 ;
5540 else if (TREE_STATIC (decl))
5541 /* Function-scoped static variables should stay in the original
5542 function. */
5543 ;
5544 else
5545 {
5546 /* Ordinary automatic local variables are now in the scope of the
5547 new function. */
5548 DECL_CONTEXT (copy) = id->dst_fn;
5549 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5550 {
5551 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5552 DECL_ATTRIBUTES (copy)
5553 = tree_cons (get_identifier ("omp simt private"), NULL,
5554 DECL_ATTRIBUTES (copy));
5555 id->dst_simt_vars->safe_push (copy);
5556 }
5557 }
5558
5559 return copy;
5560 }
5561
5562 static tree
5563 copy_decl_to_var (tree decl, copy_body_data *id)
5564 {
5565 tree copy, type;
5566
5567 gcc_assert (TREE_CODE (decl) == PARM_DECL
5568 || TREE_CODE (decl) == RESULT_DECL);
5569
5570 type = TREE_TYPE (decl);
5571
5572 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5573 VAR_DECL, DECL_NAME (decl), type);
5574 if (DECL_PT_UID_SET_P (decl))
5575 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5576 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5577 TREE_READONLY (copy) = TREE_READONLY (decl);
5578 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5579 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5580
5581 return copy_decl_for_dup_finish (id, decl, copy);
5582 }
5583
5584 /* Like copy_decl_to_var, but create a return slot object instead of a
5585 pointer variable for return by invisible reference. */
5586
5587 static tree
5588 copy_result_decl_to_var (tree decl, copy_body_data *id)
5589 {
5590 tree copy, type;
5591
5592 gcc_assert (TREE_CODE (decl) == PARM_DECL
5593 || TREE_CODE (decl) == RESULT_DECL);
5594
5595 type = TREE_TYPE (decl);
5596 if (DECL_BY_REFERENCE (decl))
5597 type = TREE_TYPE (type);
5598
5599 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5600 VAR_DECL, DECL_NAME (decl), type);
5601 if (DECL_PT_UID_SET_P (decl))
5602 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5603 TREE_READONLY (copy) = TREE_READONLY (decl);
5604 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5605 if (!DECL_BY_REFERENCE (decl))
5606 {
5607 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5608 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5609 }
5610
5611 return copy_decl_for_dup_finish (id, decl, copy);
5612 }
5613
5614 tree
5615 copy_decl_no_change (tree decl, copy_body_data *id)
5616 {
5617 tree copy;
5618
5619 copy = copy_node (decl);
5620
5621 /* The COPY is not abstract; it will be generated in DST_FN. */
5622 DECL_ABSTRACT_P (copy) = false;
5623 lang_hooks.dup_lang_specific_decl (copy);
5624
5625 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5626 been taken; it's for internal bookkeeping in expand_goto_internal. */
5627 if (TREE_CODE (copy) == LABEL_DECL)
5628 {
5629 TREE_ADDRESSABLE (copy) = 0;
5630 LABEL_DECL_UID (copy) = -1;
5631 }
5632
5633 return copy_decl_for_dup_finish (id, decl, copy);
5634 }
5635
5636 static tree
5637 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5638 {
5639 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5640 return copy_decl_to_var (decl, id);
5641 else
5642 return copy_decl_no_change (decl, id);
5643 }
5644
5645 /* Return a copy of the function's argument tree. */
5646 static tree
5647 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5648 bitmap args_to_skip, tree *vars)
5649 {
5650 tree arg, *parg;
5651 tree new_parm = NULL;
5652 int i = 0;
5653
5654 parg = &new_parm;
5655
5656 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5657 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5658 {
5659 tree new_tree = remap_decl (arg, id);
5660 if (TREE_CODE (new_tree) != PARM_DECL)
5661 new_tree = id->copy_decl (arg, id);
5662 lang_hooks.dup_lang_specific_decl (new_tree);
5663 *parg = new_tree;
5664 parg = &DECL_CHAIN (new_tree);
5665 }
5666 else if (!id->decl_map->get (arg))
5667 {
5668 /* Make an equivalent VAR_DECL. If the argument was used
5669 as temporary variable later in function, the uses will be
5670 replaced by local variable. */
5671 tree var = copy_decl_to_var (arg, id);
5672 insert_decl_map (id, arg, var);
5673 /* Declare this new variable. */
5674 DECL_CHAIN (var) = *vars;
5675 *vars = var;
5676 }
5677 return new_parm;
5678 }
5679
5680 /* Return a copy of the function's static chain. */
5681 static tree
5682 copy_static_chain (tree static_chain, copy_body_data * id)
5683 {
5684 tree *chain_copy, *pvar;
5685
5686 chain_copy = &static_chain;
5687 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5688 {
5689 tree new_tree = remap_decl (*pvar, id);
5690 lang_hooks.dup_lang_specific_decl (new_tree);
5691 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5692 *pvar = new_tree;
5693 }
5694 return static_chain;
5695 }
5696
5697 /* Return true if the function is allowed to be versioned.
5698 This is a guard for the versioning functionality. */
5699
5700 bool
5701 tree_versionable_function_p (tree fndecl)
5702 {
5703 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5704 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5705 }
5706
5707 /* Update clone info after duplication. */
5708
5709 static void
5710 update_clone_info (copy_body_data * id)
5711 {
5712 struct cgraph_node *node;
5713 if (!id->dst_node->clones)
5714 return;
5715 for (node = id->dst_node->clones; node != id->dst_node;)
5716 {
5717 /* First update replace maps to match the new body. */
5718 if (node->clone.tree_map)
5719 {
5720 unsigned int i;
5721 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5722 {
5723 struct ipa_replace_map *replace_info;
5724 replace_info = (*node->clone.tree_map)[i];
5725 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5726 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5727 }
5728 }
5729 if (node->clones)
5730 node = node->clones;
5731 else if (node->next_sibling_clone)
5732 node = node->next_sibling_clone;
5733 else
5734 {
5735 while (node != id->dst_node && !node->next_sibling_clone)
5736 node = node->clone_of;
5737 if (node != id->dst_node)
5738 node = node->next_sibling_clone;
5739 }
5740 }
5741 }
5742
5743 /* Create a copy of a function's tree.
5744 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5745 of the original function and the new copied function
5746 respectively. In case we want to replace a DECL
5747 tree with another tree while duplicating the function's
5748 body, TREE_MAP represents the mapping between these
5749 trees. If UPDATE_CLONES is set, the call_stmt fields
5750 of edges of clones of the function will be updated.
5751
5752 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5753 from new version.
5754 If SKIP_RETURN is true, the new version will return void.
5755 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5756 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5757 */
5758 void
5759 tree_function_versioning (tree old_decl, tree new_decl,
5760 vec<ipa_replace_map *, va_gc> *tree_map,
5761 bool update_clones, bitmap args_to_skip,
5762 bool skip_return, bitmap blocks_to_copy,
5763 basic_block new_entry)
5764 {
5765 struct cgraph_node *old_version_node;
5766 struct cgraph_node *new_version_node;
5767 copy_body_data id;
5768 tree p;
5769 unsigned i;
5770 struct ipa_replace_map *replace_info;
5771 basic_block old_entry_block, bb;
5772 auto_vec<gimple *, 10> init_stmts;
5773 tree vars = NULL_TREE;
5774 bitmap debug_args_to_skip = args_to_skip;
5775
5776 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5777 && TREE_CODE (new_decl) == FUNCTION_DECL);
5778 DECL_POSSIBLY_INLINED (old_decl) = 1;
5779
5780 old_version_node = cgraph_node::get (old_decl);
5781 gcc_checking_assert (old_version_node);
5782 new_version_node = cgraph_node::get (new_decl);
5783 gcc_checking_assert (new_version_node);
5784
5785 /* Copy over debug args. */
5786 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5787 {
5788 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5789 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5790 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5791 old_debug_args = decl_debug_args_lookup (old_decl);
5792 if (old_debug_args)
5793 {
5794 new_debug_args = decl_debug_args_insert (new_decl);
5795 *new_debug_args = vec_safe_copy (*old_debug_args);
5796 }
5797 }
5798
5799 /* Output the inlining info for this abstract function, since it has been
5800 inlined. If we don't do this now, we can lose the information about the
5801 variables in the function when the blocks get blown away as soon as we
5802 remove the cgraph node. */
5803 (*debug_hooks->outlining_inline_function) (old_decl);
5804
5805 DECL_ARTIFICIAL (new_decl) = 1;
5806 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5807 if (DECL_ORIGIN (old_decl) == old_decl)
5808 old_version_node->used_as_abstract_origin = true;
5809 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5810
5811 /* Prepare the data structures for the tree copy. */
5812 memset (&id, 0, sizeof (id));
5813
5814 /* Generate a new name for the new version. */
5815 id.statements_to_fold = new hash_set<gimple *>;
5816
5817 id.decl_map = new hash_map<tree, tree>;
5818 id.debug_map = NULL;
5819 id.src_fn = old_decl;
5820 id.dst_fn = new_decl;
5821 id.src_node = old_version_node;
5822 id.dst_node = new_version_node;
5823 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5824 id.blocks_to_copy = blocks_to_copy;
5825
5826 id.copy_decl = copy_decl_no_change;
5827 id.transform_call_graph_edges
5828 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5829 id.transform_new_cfg = true;
5830 id.transform_return_to_modify = false;
5831 id.transform_parameter = false;
5832 id.transform_lang_insert_block = NULL;
5833
5834 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5835 (DECL_STRUCT_FUNCTION (old_decl));
5836 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5837 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5838 initialize_cfun (new_decl, old_decl,
5839 new_entry ? new_entry->count : old_entry_block->count);
5840 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5841 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5842 = id.src_cfun->gimple_df->ipa_pta;
5843
5844 /* Copy the function's static chain. */
5845 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5846 if (p)
5847 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5848 = copy_static_chain (p, &id);
5849
5850 /* If there's a tree_map, prepare for substitution. */
5851 if (tree_map)
5852 for (i = 0; i < tree_map->length (); i++)
5853 {
5854 gimple *init;
5855 replace_info = (*tree_map)[i];
5856 if (replace_info->replace_p)
5857 {
5858 int parm_num = -1;
5859 if (!replace_info->old_tree)
5860 {
5861 int p = replace_info->parm_num;
5862 tree parm;
5863 tree req_type, new_type;
5864
5865 for (parm = DECL_ARGUMENTS (old_decl); p;
5866 parm = DECL_CHAIN (parm))
5867 p--;
5868 replace_info->old_tree = parm;
5869 parm_num = replace_info->parm_num;
5870 req_type = TREE_TYPE (parm);
5871 new_type = TREE_TYPE (replace_info->new_tree);
5872 if (!useless_type_conversion_p (req_type, new_type))
5873 {
5874 if (fold_convertible_p (req_type, replace_info->new_tree))
5875 replace_info->new_tree
5876 = fold_build1 (NOP_EXPR, req_type,
5877 replace_info->new_tree);
5878 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5879 replace_info->new_tree
5880 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5881 replace_info->new_tree);
5882 else
5883 {
5884 if (dump_file)
5885 {
5886 fprintf (dump_file, " const ");
5887 print_generic_expr (dump_file,
5888 replace_info->new_tree);
5889 fprintf (dump_file,
5890 " can't be converted to param ");
5891 print_generic_expr (dump_file, parm);
5892 fprintf (dump_file, "\n");
5893 }
5894 replace_info->old_tree = NULL;
5895 }
5896 }
5897 }
5898 else
5899 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5900 if (replace_info->old_tree)
5901 {
5902 init = setup_one_parameter (&id, replace_info->old_tree,
5903 replace_info->new_tree, id.src_fn,
5904 NULL,
5905 &vars);
5906 if (init)
5907 init_stmts.safe_push (init);
5908 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5909 {
5910 if (parm_num == -1)
5911 {
5912 tree parm;
5913 int p;
5914 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5915 parm = DECL_CHAIN (parm), p++)
5916 if (parm == replace_info->old_tree)
5917 {
5918 parm_num = p;
5919 break;
5920 }
5921 }
5922 if (parm_num != -1)
5923 {
5924 if (debug_args_to_skip == args_to_skip)
5925 {
5926 debug_args_to_skip = BITMAP_ALLOC (NULL);
5927 bitmap_copy (debug_args_to_skip, args_to_skip);
5928 }
5929 bitmap_clear_bit (debug_args_to_skip, parm_num);
5930 }
5931 }
5932 }
5933 }
5934 }
5935 /* Copy the function's arguments. */
5936 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5937 DECL_ARGUMENTS (new_decl)
5938 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5939 args_to_skip, &vars);
5940
5941 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5942 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5943
5944 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5945
5946 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5947 /* Add local vars. */
5948 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5949
5950 if (DECL_RESULT (old_decl) == NULL_TREE)
5951 ;
5952 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5953 {
5954 DECL_RESULT (new_decl)
5955 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5956 RESULT_DECL, NULL_TREE, void_type_node);
5957 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5958 cfun->returns_struct = 0;
5959 cfun->returns_pcc_struct = 0;
5960 }
5961 else
5962 {
5963 tree old_name;
5964 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5965 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5966 if (gimple_in_ssa_p (id.src_cfun)
5967 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5968 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5969 {
5970 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5971 insert_decl_map (&id, old_name, new_name);
5972 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5973 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5974 }
5975 }
5976
5977 /* Set up the destination functions loop tree. */
5978 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5979 {
5980 cfun->curr_properties &= ~PROP_loops;
5981 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5982 cfun->curr_properties |= PROP_loops;
5983 }
5984
5985 /* Copy the Function's body. */
5986 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5987 new_entry);
5988
5989 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5990 number_blocks (new_decl);
5991
5992 /* We want to create the BB unconditionally, so that the addition of
5993 debug stmts doesn't affect BB count, which may in the end cause
5994 codegen differences. */
5995 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5996 while (init_stmts.length ())
5997 insert_init_stmt (&id, bb, init_stmts.pop ());
5998 update_clone_info (&id);
5999
6000 /* Remap the nonlocal_goto_save_area, if any. */
6001 if (cfun->nonlocal_goto_save_area)
6002 {
6003 struct walk_stmt_info wi;
6004
6005 memset (&wi, 0, sizeof (wi));
6006 wi.info = &id;
6007 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6008 }
6009
6010 /* Clean up. */
6011 delete id.decl_map;
6012 if (id.debug_map)
6013 delete id.debug_map;
6014 free_dominance_info (CDI_DOMINATORS);
6015 free_dominance_info (CDI_POST_DOMINATORS);
6016
6017 update_max_bb_count ();
6018 fold_marked_statements (0, id.statements_to_fold);
6019 delete id.statements_to_fold;
6020 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6021 if (id.dst_node->definition)
6022 cgraph_edge::rebuild_references ();
6023 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6024 {
6025 calculate_dominance_info (CDI_DOMINATORS);
6026 fix_loop_structure (NULL);
6027 }
6028 update_ssa (TODO_update_ssa);
6029
6030 /* After partial cloning we need to rescale frequencies, so they are
6031 within proper range in the cloned function. */
6032 if (new_entry)
6033 {
6034 struct cgraph_edge *e;
6035 rebuild_frequencies ();
6036
6037 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6038 for (e = new_version_node->callees; e; e = e->next_callee)
6039 {
6040 basic_block bb = gimple_bb (e->call_stmt);
6041 e->count = bb->count;
6042 }
6043 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6044 {
6045 basic_block bb = gimple_bb (e->call_stmt);
6046 e->count = bb->count;
6047 }
6048 }
6049
6050 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6051 {
6052 tree parm;
6053 vec<tree, va_gc> **debug_args = NULL;
6054 unsigned int len = 0;
6055 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6056 parm; parm = DECL_CHAIN (parm), i++)
6057 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6058 {
6059 tree ddecl;
6060
6061 if (debug_args == NULL)
6062 {
6063 debug_args = decl_debug_args_insert (new_decl);
6064 len = vec_safe_length (*debug_args);
6065 }
6066 ddecl = make_node (DEBUG_EXPR_DECL);
6067 DECL_ARTIFICIAL (ddecl) = 1;
6068 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6069 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6070 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6071 vec_safe_push (*debug_args, ddecl);
6072 }
6073 if (debug_args != NULL)
6074 {
6075 /* On the callee side, add
6076 DEBUG D#Y s=> parm
6077 DEBUG var => D#Y
6078 stmts to the first bb where var is a VAR_DECL created for the
6079 optimized away parameter in DECL_INITIAL block. This hints
6080 in the debug info that var (whole DECL_ORIGIN is the parm
6081 PARM_DECL) is optimized away, but could be looked up at the
6082 call site as value of D#X there. */
6083 tree var = vars, vexpr;
6084 gimple_stmt_iterator cgsi
6085 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6086 gimple *def_temp;
6087 var = vars;
6088 i = vec_safe_length (*debug_args);
6089 do
6090 {
6091 i -= 2;
6092 while (var != NULL_TREE
6093 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6094 var = TREE_CHAIN (var);
6095 if (var == NULL_TREE)
6096 break;
6097 vexpr = make_node (DEBUG_EXPR_DECL);
6098 parm = (**debug_args)[i];
6099 DECL_ARTIFICIAL (vexpr) = 1;
6100 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6101 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6102 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6103 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6104 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6105 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6106 }
6107 while (i > len);
6108 }
6109 }
6110
6111 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6112 BITMAP_FREE (debug_args_to_skip);
6113 free_dominance_info (CDI_DOMINATORS);
6114 free_dominance_info (CDI_POST_DOMINATORS);
6115
6116 gcc_assert (!id.debug_stmts.exists ());
6117 pop_cfun ();
6118 return;
6119 }
6120
6121 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6122 the callee and return the inlined body on success. */
6123
6124 tree
6125 maybe_inline_call_in_expr (tree exp)
6126 {
6127 tree fn = get_callee_fndecl (exp);
6128
6129 /* We can only try to inline "const" functions. */
6130 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6131 {
6132 call_expr_arg_iterator iter;
6133 copy_body_data id;
6134 tree param, arg, t;
6135 hash_map<tree, tree> decl_map;
6136
6137 /* Remap the parameters. */
6138 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6139 param;
6140 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6141 decl_map.put (param, arg);
6142
6143 memset (&id, 0, sizeof (id));
6144 id.src_fn = fn;
6145 id.dst_fn = current_function_decl;
6146 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6147 id.decl_map = &decl_map;
6148
6149 id.copy_decl = copy_decl_no_change;
6150 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6151 id.transform_new_cfg = false;
6152 id.transform_return_to_modify = true;
6153 id.transform_parameter = true;
6154 id.transform_lang_insert_block = NULL;
6155
6156 /* Make sure not to unshare trees behind the front-end's back
6157 since front-end specific mechanisms may rely on sharing. */
6158 id.regimplify = false;
6159 id.do_not_unshare = true;
6160
6161 /* We're not inside any EH region. */
6162 id.eh_lp_nr = 0;
6163
6164 t = copy_tree_body (&id);
6165
6166 /* We can only return something suitable for use in a GENERIC
6167 expression tree. */
6168 if (TREE_CODE (t) == MODIFY_EXPR)
6169 return TREE_OPERAND (t, 1);
6170 }
6171
6172 return NULL_TREE;
6173 }
6174
6175 /* Duplicate a type, fields and all. */
6176
6177 tree
6178 build_duplicate_type (tree type)
6179 {
6180 struct copy_body_data id;
6181
6182 memset (&id, 0, sizeof (id));
6183 id.src_fn = current_function_decl;
6184 id.dst_fn = current_function_decl;
6185 id.src_cfun = cfun;
6186 id.decl_map = new hash_map<tree, tree>;
6187 id.debug_map = NULL;
6188 id.copy_decl = copy_decl_no_change;
6189
6190 type = remap_type_1 (type, &id);
6191
6192 delete id.decl_map;
6193 if (id.debug_map)
6194 delete id.debug_map;
6195
6196 TYPE_CANONICAL (type) = type;
6197
6198 return type;
6199 }
6200
6201 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6202 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6203 evaluation. */
6204
6205 tree
6206 copy_fn (tree fn, tree& parms, tree& result)
6207 {
6208 copy_body_data id;
6209 tree param;
6210 hash_map<tree, tree> decl_map;
6211
6212 tree *p = &parms;
6213 *p = NULL_TREE;
6214
6215 memset (&id, 0, sizeof (id));
6216 id.src_fn = fn;
6217 id.dst_fn = current_function_decl;
6218 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6219 id.decl_map = &decl_map;
6220
6221 id.copy_decl = copy_decl_no_change;
6222 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6223 id.transform_new_cfg = false;
6224 id.transform_return_to_modify = false;
6225 id.transform_parameter = true;
6226 id.transform_lang_insert_block = NULL;
6227
6228 /* Make sure not to unshare trees behind the front-end's back
6229 since front-end specific mechanisms may rely on sharing. */
6230 id.regimplify = false;
6231 id.do_not_unshare = true;
6232
6233 /* We're not inside any EH region. */
6234 id.eh_lp_nr = 0;
6235
6236 /* Remap the parameters and result and return them to the caller. */
6237 for (param = DECL_ARGUMENTS (fn);
6238 param;
6239 param = DECL_CHAIN (param))
6240 {
6241 *p = remap_decl (param, &id);
6242 p = &DECL_CHAIN (*p);
6243 }
6244
6245 if (DECL_RESULT (fn))
6246 result = remap_decl (DECL_RESULT (fn), &id);
6247 else
6248 result = NULL_TREE;
6249
6250 return copy_tree_body (&id);
6251 }