tree-inline.c (remap_gimple_stmt): Cache gimple_block.
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64
65 /* I'm not real happy about this, but we need to handle gimple and
66 non-gimple trees. */
67
68 /* Inlining, Cloning, Versioning, Parallelization
69
70 Inlining: a function body is duplicated, but the PARM_DECLs are
71 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72 MODIFY_EXPRs that store to a dedicated returned-value variable.
73 The duplicated eh_region info of the copy will later be appended
74 to the info for the caller; the eh_region info in copied throwing
75 statements and RESX statements are adjusted accordingly.
76
77 Cloning: (only in C++) We have one body for a con/de/structor, and
78 multiple function decls, each with a unique parameter list.
79 Duplicate the body, using the given splay tree; some parameters
80 will become constants (like 0 or 1).
81
82 Versioning: a function body is duplicated and the result is a new
83 function rather than into blocks of an existing function as with
84 inlining. Some parameters will become constants.
85
86 Parallelization: a region of a function is duplicated resulting in
87 a new function. Variables may be replaced with complex expressions
88 to enable shared variable semantics.
89
90 All of these will simultaneously lookup any callgraph edges. If
91 we're going to inline the duplicated function body, and the given
92 function has some cloned callgraph nodes (one for each place this
93 function will be inlined) those callgraph edges will be duplicated.
94 If we're cloning the body, those callgraph edges will be
95 updated to point into the new body. (Note that the original
96 callgraph node and edge list will not be altered.)
97
98 See the CALL_EXPR handling case in copy_tree_body_r (). */
99
100 /* To Do:
101
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
108
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
111
112
113 /* Weights that estimate_num_insns uses to estimate the size of the
114 produced code. */
115
116 eni_weights eni_size_weights;
117
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119 to execute the produced code. */
120
121 eni_weights eni_time_weights;
122
123 /* Prototypes. */
124
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_decl_to_var (tree, copy_body_data *);
133 static tree copy_result_decl_to_var (tree, copy_body_data *);
134 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
140
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
143 {
144 id->decl_map->put (key, value);
145
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
150 }
151
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
154
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 {
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
160
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
163
164 if (!target_for_debug_bind (key))
165 return;
166
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
169
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
172
173 id->debug_map->put (key, value);
174 }
175
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
181
182 /* Construct new SSA name for old NAME. ID is the inline context. */
183
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
186 {
187 tree new_tree, var;
188 tree *n;
189
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
191
192 n = id->decl_map->get (name);
193 if (n)
194 return unshare_expr (*n);
195
196 if (processing_debug_stmt)
197 {
198 if (SSA_NAME_IS_DEFAULT_DEF (name)
199 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 && id->entry_bb == NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
202 {
203 tree vexpr = make_node (DEBUG_EXPR_DECL);
204 gimple *def_temp;
205 gimple_stmt_iterator gsi;
206 tree val = SSA_NAME_VAR (name);
207
208 n = id->decl_map->get (val);
209 if (n != NULL)
210 val = *n;
211 if (TREE_CODE (val) != PARM_DECL
212 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
213 {
214 processing_debug_stmt = -1;
215 return name;
216 }
217 n = id->decl_map->get (val);
218 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
219 return *n;
220 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
221 DECL_ARTIFICIAL (vexpr) = 1;
222 TREE_TYPE (vexpr) = TREE_TYPE (name);
223 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
224 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
225 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
226 insert_decl_map (id, val, vexpr);
227 return vexpr;
228 }
229
230 processing_debug_stmt = -1;
231 return name;
232 }
233
234 /* Remap anonymous SSA names or SSA names of anonymous decls. */
235 var = SSA_NAME_VAR (name);
236 if (!var
237 || (!SSA_NAME_IS_DEFAULT_DEF (name)
238 && VAR_P (var)
239 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
240 && DECL_ARTIFICIAL (var)
241 && DECL_IGNORED_P (var)
242 && !DECL_NAME (var)))
243 {
244 struct ptr_info_def *pi;
245 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
246 if (!var && SSA_NAME_IDENTIFIER (name))
247 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
248 insert_decl_map (id, name, new_tree);
249 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
250 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
251 /* At least IPA points-to info can be directly transferred. */
252 if (id->src_cfun->gimple_df
253 && id->src_cfun->gimple_df->ipa_pta
254 && POINTER_TYPE_P (TREE_TYPE (name))
255 && (pi = SSA_NAME_PTR_INFO (name))
256 && !pi->pt.anything)
257 {
258 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
259 new_pi->pt = pi->pt;
260 }
261 return new_tree;
262 }
263
264 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
265 in copy_bb. */
266 new_tree = remap_decl (var, id);
267
268 /* We might've substituted constant or another SSA_NAME for
269 the variable.
270
271 Replace the SSA name representing RESULT_DECL by variable during
272 inlining: this saves us from need to introduce PHI node in a case
273 return value is just partly initialized. */
274 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
275 && (!SSA_NAME_VAR (name)
276 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
277 || !id->transform_return_to_modify))
278 {
279 struct ptr_info_def *pi;
280 new_tree = make_ssa_name (new_tree);
281 insert_decl_map (id, name, new_tree);
282 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
283 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
284 /* At least IPA points-to info can be directly transferred. */
285 if (id->src_cfun->gimple_df
286 && id->src_cfun->gimple_df->ipa_pta
287 && POINTER_TYPE_P (TREE_TYPE (name))
288 && (pi = SSA_NAME_PTR_INFO (name))
289 && !pi->pt.anything)
290 {
291 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
292 new_pi->pt = pi->pt;
293 }
294 if (SSA_NAME_IS_DEFAULT_DEF (name))
295 {
296 /* By inlining function having uninitialized variable, we might
297 extend the lifetime (variable might get reused). This cause
298 ICE in the case we end up extending lifetime of SSA name across
299 abnormal edge, but also increase register pressure.
300
301 We simply initialize all uninitialized vars by 0 except
302 for case we are inlining to very first BB. We can avoid
303 this for all BBs that are not inside strongly connected
304 regions of the CFG, but this is expensive to test. */
305 if (id->entry_bb
306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
307 && (!SSA_NAME_VAR (name)
308 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
309 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
310 0)->dest
311 || EDGE_COUNT (id->entry_bb->preds) != 1))
312 {
313 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
314 gimple *init_stmt;
315 tree zero = build_zero_cst (TREE_TYPE (new_tree));
316
317 init_stmt = gimple_build_assign (new_tree, zero);
318 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
319 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
320 }
321 else
322 {
323 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
324 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
325 }
326 }
327 }
328 else
329 insert_decl_map (id, name, new_tree);
330 return new_tree;
331 }
332
333 /* Remap DECL during the copying of the BLOCK tree for the function. */
334
335 tree
336 remap_decl (tree decl, copy_body_data *id)
337 {
338 tree *n;
339
340 /* We only remap local variables in the current function. */
341
342 /* See if we have remapped this declaration. */
343
344 n = id->decl_map->get (decl);
345
346 if (!n && processing_debug_stmt)
347 {
348 processing_debug_stmt = -1;
349 return decl;
350 }
351
352 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
353 necessary DECLs have already been remapped and we do not want to duplicate
354 a decl coming from outside of the sequence we are copying. */
355 if (!n
356 && id->prevent_decl_creation_for_types
357 && id->remapping_type_depth > 0
358 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
359 return decl;
360
361 /* If we didn't already have an equivalent for this declaration, create one
362 now. */
363 if (!n)
364 {
365 /* Make a copy of the variable or label. */
366 tree t = id->copy_decl (decl, id);
367
368 /* Remember it, so that if we encounter this local entity again
369 we can reuse this copy. Do this early because remap_type may
370 need this decl for TYPE_STUB_DECL. */
371 insert_decl_map (id, decl, t);
372
373 if (!DECL_P (t))
374 return t;
375
376 /* Remap types, if necessary. */
377 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
378 if (TREE_CODE (t) == TYPE_DECL)
379 {
380 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
381
382 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
383 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
384 is not set on the TYPE_DECL, for example in LTO mode. */
385 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
386 {
387 tree x = build_variant_type_copy (TREE_TYPE (t));
388 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
389 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
390 DECL_ORIGINAL_TYPE (t) = x;
391 }
392 }
393
394 /* Remap sizes as necessary. */
395 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
396 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
397
398 /* If fields, do likewise for offset and qualifier. */
399 if (TREE_CODE (t) == FIELD_DECL)
400 {
401 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
402 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
403 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
404 }
405
406 return t;
407 }
408
409 if (id->do_not_unshare)
410 return *n;
411 else
412 return unshare_expr (*n);
413 }
414
415 static tree
416 remap_type_1 (tree type, copy_body_data *id)
417 {
418 tree new_tree, t;
419
420 /* We do need a copy. build and register it now. If this is a pointer or
421 reference type, remap the designated type and make a new pointer or
422 reference type. */
423 if (TREE_CODE (type) == POINTER_TYPE)
424 {
425 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
426 TYPE_MODE (type),
427 TYPE_REF_CAN_ALIAS_ALL (type));
428 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
429 new_tree = build_type_attribute_qual_variant (new_tree,
430 TYPE_ATTRIBUTES (type),
431 TYPE_QUALS (type));
432 insert_decl_map (id, type, new_tree);
433 return new_tree;
434 }
435 else if (TREE_CODE (type) == REFERENCE_TYPE)
436 {
437 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
438 TYPE_MODE (type),
439 TYPE_REF_CAN_ALIAS_ALL (type));
440 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
441 new_tree = build_type_attribute_qual_variant (new_tree,
442 TYPE_ATTRIBUTES (type),
443 TYPE_QUALS (type));
444 insert_decl_map (id, type, new_tree);
445 return new_tree;
446 }
447 else
448 new_tree = copy_node (type);
449
450 insert_decl_map (id, type, new_tree);
451
452 /* This is a new type, not a copy of an old type. Need to reassociate
453 variants. We can handle everything except the main variant lazily. */
454 t = TYPE_MAIN_VARIANT (type);
455 if (type != t)
456 {
457 t = remap_type (t, id);
458 TYPE_MAIN_VARIANT (new_tree) = t;
459 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
460 TYPE_NEXT_VARIANT (t) = new_tree;
461 }
462 else
463 {
464 TYPE_MAIN_VARIANT (new_tree) = new_tree;
465 TYPE_NEXT_VARIANT (new_tree) = NULL;
466 }
467
468 if (TYPE_STUB_DECL (type))
469 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
470
471 /* Lazily create pointer and reference types. */
472 TYPE_POINTER_TO (new_tree) = NULL;
473 TYPE_REFERENCE_TO (new_tree) = NULL;
474
475 /* Copy all types that may contain references to local variables; be sure to
476 preserve sharing in between type and its main variant when possible. */
477 switch (TREE_CODE (new_tree))
478 {
479 case INTEGER_TYPE:
480 case REAL_TYPE:
481 case FIXED_POINT_TYPE:
482 case ENUMERAL_TYPE:
483 case BOOLEAN_TYPE:
484 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
485 {
486 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
487 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
488
489 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
490 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 }
492 else
493 {
494 t = TYPE_MIN_VALUE (new_tree);
495 if (t && TREE_CODE (t) != INTEGER_CST)
496 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
497
498 t = TYPE_MAX_VALUE (new_tree);
499 if (t && TREE_CODE (t) != INTEGER_CST)
500 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
501 }
502 return new_tree;
503
504 case FUNCTION_TYPE:
505 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
506 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
507 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
508 else
509 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
510 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
511 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
512 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
513 else
514 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
515 return new_tree;
516
517 case ARRAY_TYPE:
518 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
519 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
520 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
521 else
522 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
523
524 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
525 {
526 gcc_checking_assert (TYPE_DOMAIN (type)
527 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
528 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
529 }
530 else
531 {
532 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
533 /* For array bounds where we have decided not to copy over the bounds
534 variable which isn't used in OpenMP/OpenACC region, change them to
535 an uninitialized VAR_DECL temporary. */
536 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
537 && id->adjust_array_error_bounds
538 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
539 {
540 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
541 DECL_ATTRIBUTES (v)
542 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
543 DECL_ATTRIBUTES (v));
544 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
545 }
546 }
547 break;
548
549 case RECORD_TYPE:
550 case UNION_TYPE:
551 case QUAL_UNION_TYPE:
552 if (TYPE_MAIN_VARIANT (type) != type
553 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
554 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
555 else
556 {
557 tree f, nf = NULL;
558
559 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
560 {
561 t = remap_decl (f, id);
562 DECL_CONTEXT (t) = new_tree;
563 DECL_CHAIN (t) = nf;
564 nf = t;
565 }
566 TYPE_FIELDS (new_tree) = nreverse (nf);
567 }
568 break;
569
570 case OFFSET_TYPE:
571 default:
572 /* Shouldn't have been thought variable sized. */
573 gcc_unreachable ();
574 }
575
576 /* All variants of type share the same size, so use the already remaped data. */
577 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
578 {
579 tree s = TYPE_SIZE (type);
580 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
581 tree su = TYPE_SIZE_UNIT (type);
582 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
583 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
584 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
585 || s == mvs);
586 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
587 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
588 || su == mvsu);
589 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
590 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
591 }
592 else
593 {
594 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
595 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
596 }
597
598 return new_tree;
599 }
600
601 tree
602 remap_type (tree type, copy_body_data *id)
603 {
604 tree *node;
605 tree tmp;
606
607 if (type == NULL)
608 return type;
609
610 /* See if we have remapped this type. */
611 node = id->decl_map->get (type);
612 if (node)
613 return *node;
614
615 /* The type only needs remapping if it's variably modified. */
616 if (! variably_modified_type_p (type, id->src_fn))
617 {
618 insert_decl_map (id, type, type);
619 return type;
620 }
621
622 id->remapping_type_depth++;
623 tmp = remap_type_1 (type, id);
624 id->remapping_type_depth--;
625
626 return tmp;
627 }
628
629 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
630
631 static bool
632 can_be_nonlocal (tree decl, copy_body_data *id)
633 {
634 /* We cannot duplicate function decls. */
635 if (TREE_CODE (decl) == FUNCTION_DECL)
636 return true;
637
638 /* Local static vars must be non-local or we get multiple declaration
639 problems. */
640 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
641 return true;
642
643 return false;
644 }
645
646 static tree
647 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
648 copy_body_data *id)
649 {
650 tree old_var;
651 tree new_decls = NULL_TREE;
652
653 /* Remap its variables. */
654 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
655 {
656 tree new_var;
657
658 if (can_be_nonlocal (old_var, id))
659 {
660 /* We need to add this variable to the local decls as otherwise
661 nothing else will do so. */
662 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
663 add_local_decl (cfun, old_var);
664 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
665 && !DECL_IGNORED_P (old_var)
666 && nonlocalized_list)
667 vec_safe_push (*nonlocalized_list, old_var);
668 continue;
669 }
670
671 /* Remap the variable. */
672 new_var = remap_decl (old_var, id);
673
674 /* If we didn't remap this variable, we can't mess with its
675 TREE_CHAIN. If we remapped this variable to the return slot, it's
676 already declared somewhere else, so don't declare it here. */
677
678 if (new_var == id->retvar)
679 ;
680 else if (!new_var)
681 {
682 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
683 && !DECL_IGNORED_P (old_var)
684 && nonlocalized_list)
685 vec_safe_push (*nonlocalized_list, old_var);
686 }
687 else
688 {
689 gcc_assert (DECL_P (new_var));
690 DECL_CHAIN (new_var) = new_decls;
691 new_decls = new_var;
692
693 /* Also copy value-expressions. */
694 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
695 {
696 tree tem = DECL_VALUE_EXPR (new_var);
697 bool old_regimplify = id->regimplify;
698 id->remapping_type_depth++;
699 walk_tree (&tem, copy_tree_body_r, id, NULL);
700 id->remapping_type_depth--;
701 id->regimplify = old_regimplify;
702 SET_DECL_VALUE_EXPR (new_var, tem);
703 }
704 }
705 }
706
707 return nreverse (new_decls);
708 }
709
710 /* Copy the BLOCK to contain remapped versions of the variables
711 therein. And hook the new block into the block-tree. */
712
713 static void
714 remap_block (tree *block, copy_body_data *id)
715 {
716 tree old_block;
717 tree new_block;
718
719 /* Make the new block. */
720 old_block = *block;
721 new_block = make_node (BLOCK);
722 TREE_USED (new_block) = TREE_USED (old_block);
723 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
724 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
725 BLOCK_NONLOCALIZED_VARS (new_block)
726 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
727 *block = new_block;
728
729 /* Remap its variables. */
730 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
731 &BLOCK_NONLOCALIZED_VARS (new_block),
732 id);
733
734 if (id->transform_lang_insert_block)
735 id->transform_lang_insert_block (new_block);
736
737 /* Remember the remapped block. */
738 insert_decl_map (id, old_block, new_block);
739 }
740
741 /* Copy the whole block tree and root it in id->block. */
742
743 static tree
744 remap_blocks (tree block, copy_body_data *id)
745 {
746 tree t;
747 tree new_tree = block;
748
749 if (!block)
750 return NULL;
751
752 remap_block (&new_tree, id);
753 gcc_assert (new_tree != block);
754 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
755 prepend_lexical_block (new_tree, remap_blocks (t, id));
756 /* Blocks are in arbitrary order, but make things slightly prettier and do
757 not swap order when producing a copy. */
758 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
759 return new_tree;
760 }
761
762 /* Remap the block tree rooted at BLOCK to nothing. */
763
764 static void
765 remap_blocks_to_null (tree block, copy_body_data *id)
766 {
767 tree t;
768 insert_decl_map (id, block, NULL_TREE);
769 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
770 remap_blocks_to_null (t, id);
771 }
772
773 /* Remap the location info pointed to by LOCUS. */
774
775 static location_t
776 remap_location (location_t locus, copy_body_data *id)
777 {
778 if (LOCATION_BLOCK (locus))
779 {
780 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
781 gcc_assert (n);
782 if (*n)
783 return set_block (locus, *n);
784 }
785
786 locus = LOCATION_LOCUS (locus);
787
788 if (locus != UNKNOWN_LOCATION && id->block)
789 return set_block (locus, id->block);
790
791 return locus;
792 }
793
794 static void
795 copy_statement_list (tree *tp)
796 {
797 tree_stmt_iterator oi, ni;
798 tree new_tree;
799
800 new_tree = alloc_stmt_list ();
801 ni = tsi_start (new_tree);
802 oi = tsi_start (*tp);
803 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
804 *tp = new_tree;
805
806 for (; !tsi_end_p (oi); tsi_next (&oi))
807 {
808 tree stmt = tsi_stmt (oi);
809 if (TREE_CODE (stmt) == STATEMENT_LIST)
810 /* This copy is not redundant; tsi_link_after will smash this
811 STATEMENT_LIST into the end of the one we're building, and we
812 don't want to do that with the original. */
813 copy_statement_list (&stmt);
814 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
815 }
816 }
817
818 static void
819 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
820 {
821 tree block = BIND_EXPR_BLOCK (*tp);
822 /* Copy (and replace) the statement. */
823 copy_tree_r (tp, walk_subtrees, NULL);
824 if (block)
825 {
826 remap_block (&block, id);
827 BIND_EXPR_BLOCK (*tp) = block;
828 }
829
830 if (BIND_EXPR_VARS (*tp))
831 /* This will remap a lot of the same decls again, but this should be
832 harmless. */
833 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
834 }
835
836
837 /* Create a new gimple_seq by remapping all the statements in BODY
838 using the inlining information in ID. */
839
840 static gimple_seq
841 remap_gimple_seq (gimple_seq body, copy_body_data *id)
842 {
843 gimple_stmt_iterator si;
844 gimple_seq new_body = NULL;
845
846 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
847 {
848 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
849 gimple_seq_add_seq (&new_body, new_stmts);
850 }
851
852 return new_body;
853 }
854
855
856 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
857 block using the mapping information in ID. */
858
859 static gimple *
860 copy_gimple_bind (gbind *stmt, copy_body_data *id)
861 {
862 gimple *new_bind;
863 tree new_block, new_vars;
864 gimple_seq body, new_body;
865
866 /* Copy the statement. Note that we purposely don't use copy_stmt
867 here because we need to remap statements as we copy. */
868 body = gimple_bind_body (stmt);
869 new_body = remap_gimple_seq (body, id);
870
871 new_block = gimple_bind_block (stmt);
872 if (new_block)
873 remap_block (&new_block, id);
874
875 /* This will remap a lot of the same decls again, but this should be
876 harmless. */
877 new_vars = gimple_bind_vars (stmt);
878 if (new_vars)
879 new_vars = remap_decls (new_vars, NULL, id);
880
881 new_bind = gimple_build_bind (new_vars, new_body, new_block);
882
883 return new_bind;
884 }
885
886 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
887
888 static bool
889 is_parm (tree decl)
890 {
891 if (TREE_CODE (decl) == SSA_NAME)
892 {
893 decl = SSA_NAME_VAR (decl);
894 if (!decl)
895 return false;
896 }
897
898 return (TREE_CODE (decl) == PARM_DECL);
899 }
900
901 /* Remap the dependence CLIQUE from the source to the destination function
902 as specified in ID. */
903
904 static unsigned short
905 remap_dependence_clique (copy_body_data *id, unsigned short clique)
906 {
907 if (clique == 0 || processing_debug_stmt)
908 return 0;
909 if (!id->dependence_map)
910 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
911 bool existed;
912 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
913 if (!existed)
914 {
915 /* Clique 1 is reserved for local ones set by PTA. */
916 if (cfun->last_clique == 0)
917 cfun->last_clique = 1;
918 newc = ++cfun->last_clique;
919 }
920 return newc;
921 }
922
923 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
924 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
925 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
926 recursing into the children nodes of *TP. */
927
928 static tree
929 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
930 {
931 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
932 copy_body_data *id = (copy_body_data *) wi_p->info;
933 tree fn = id->src_fn;
934
935 /* For recursive invocations this is no longer the LHS itself. */
936 bool is_lhs = wi_p->is_lhs;
937 wi_p->is_lhs = false;
938
939 if (TREE_CODE (*tp) == SSA_NAME)
940 {
941 *tp = remap_ssa_name (*tp, id);
942 *walk_subtrees = 0;
943 if (is_lhs)
944 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
945 return NULL;
946 }
947 else if (auto_var_in_fn_p (*tp, fn))
948 {
949 /* Local variables and labels need to be replaced by equivalent
950 variables. We don't want to copy static variables; there's
951 only one of those, no matter how many times we inline the
952 containing function. Similarly for globals from an outer
953 function. */
954 tree new_decl;
955
956 /* Remap the declaration. */
957 new_decl = remap_decl (*tp, id);
958 gcc_assert (new_decl);
959 /* Replace this variable with the copy. */
960 STRIP_TYPE_NOPS (new_decl);
961 /* ??? The C++ frontend uses void * pointer zero to initialize
962 any other type. This confuses the middle-end type verification.
963 As cloned bodies do not go through gimplification again the fixup
964 there doesn't trigger. */
965 if (TREE_CODE (new_decl) == INTEGER_CST
966 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
967 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
968 *tp = new_decl;
969 *walk_subtrees = 0;
970 }
971 else if (TREE_CODE (*tp) == STATEMENT_LIST)
972 gcc_unreachable ();
973 else if (TREE_CODE (*tp) == SAVE_EXPR)
974 gcc_unreachable ();
975 else if (TREE_CODE (*tp) == LABEL_DECL
976 && (!DECL_CONTEXT (*tp)
977 || decl_function_context (*tp) == id->src_fn))
978 /* These may need to be remapped for EH handling. */
979 *tp = remap_decl (*tp, id);
980 else if (TREE_CODE (*tp) == FIELD_DECL)
981 {
982 /* If the enclosing record type is variably_modified_type_p, the field
983 has already been remapped. Otherwise, it need not be. */
984 tree *n = id->decl_map->get (*tp);
985 if (n)
986 *tp = *n;
987 *walk_subtrees = 0;
988 }
989 else if (TYPE_P (*tp))
990 /* Types may need remapping as well. */
991 *tp = remap_type (*tp, id);
992 else if (CONSTANT_CLASS_P (*tp))
993 {
994 /* If this is a constant, we have to copy the node iff the type
995 will be remapped. copy_tree_r will not copy a constant. */
996 tree new_type = remap_type (TREE_TYPE (*tp), id);
997
998 if (new_type == TREE_TYPE (*tp))
999 *walk_subtrees = 0;
1000
1001 else if (TREE_CODE (*tp) == INTEGER_CST)
1002 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1003 else
1004 {
1005 *tp = copy_node (*tp);
1006 TREE_TYPE (*tp) = new_type;
1007 }
1008 }
1009 else
1010 {
1011 /* Otherwise, just copy the node. Note that copy_tree_r already
1012 knows not to copy VAR_DECLs, etc., so this is safe. */
1013
1014 if (TREE_CODE (*tp) == MEM_REF)
1015 {
1016 /* We need to re-canonicalize MEM_REFs from inline substitutions
1017 that can happen when a pointer argument is an ADDR_EXPR.
1018 Recurse here manually to allow that. */
1019 tree ptr = TREE_OPERAND (*tp, 0);
1020 tree type = remap_type (TREE_TYPE (*tp), id);
1021 tree old = *tp;
1022 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1023 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1024 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1025 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1026 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1027 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1028 {
1029 MR_DEPENDENCE_CLIQUE (*tp)
1030 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1031 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1032 }
1033 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1034 remapped a parameter as the property might be valid only
1035 for the parameter itself. */
1036 if (TREE_THIS_NOTRAP (old)
1037 && (!is_parm (TREE_OPERAND (old, 0))
1038 || (!id->transform_parameter && is_parm (ptr))))
1039 TREE_THIS_NOTRAP (*tp) = 1;
1040 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1041 *walk_subtrees = 0;
1042 return NULL;
1043 }
1044
1045 /* Here is the "usual case". Copy this tree node, and then
1046 tweak some special cases. */
1047 copy_tree_r (tp, walk_subtrees, NULL);
1048
1049 if (TREE_CODE (*tp) != OMP_CLAUSE)
1050 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1051
1052 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1053 {
1054 /* The copied TARGET_EXPR has never been expanded, even if the
1055 original node was expanded already. */
1056 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1057 TREE_OPERAND (*tp, 3) = NULL_TREE;
1058 }
1059 else if (TREE_CODE (*tp) == ADDR_EXPR)
1060 {
1061 /* Variable substitution need not be simple. In particular,
1062 the MEM_REF substitution above. Make sure that
1063 TREE_CONSTANT and friends are up-to-date. */
1064 int invariant = is_gimple_min_invariant (*tp);
1065 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1066 recompute_tree_invariant_for_addr_expr (*tp);
1067
1068 /* If this used to be invariant, but is not any longer,
1069 then regimplification is probably needed. */
1070 if (invariant && !is_gimple_min_invariant (*tp))
1071 id->regimplify = true;
1072
1073 *walk_subtrees = 0;
1074 }
1075 }
1076
1077 /* Update the TREE_BLOCK for the cloned expr. */
1078 if (EXPR_P (*tp))
1079 {
1080 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1081 tree old_block = TREE_BLOCK (*tp);
1082 if (old_block)
1083 {
1084 tree *n;
1085 n = id->decl_map->get (TREE_BLOCK (*tp));
1086 if (n)
1087 new_block = *n;
1088 }
1089 TREE_SET_BLOCK (*tp, new_block);
1090 }
1091
1092 /* Keep iterating. */
1093 return NULL_TREE;
1094 }
1095
1096
1097 /* Called from copy_body_id via walk_tree. DATA is really a
1098 `copy_body_data *'. */
1099
1100 tree
1101 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1102 {
1103 copy_body_data *id = (copy_body_data *) data;
1104 tree fn = id->src_fn;
1105 tree new_block;
1106
1107 /* Begin by recognizing trees that we'll completely rewrite for the
1108 inlining context. Our output for these trees is completely
1109 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1110 into an edge). Further down, we'll handle trees that get
1111 duplicated and/or tweaked. */
1112
1113 /* When requested, RETURN_EXPRs should be transformed to just the
1114 contained MODIFY_EXPR. The branch semantics of the return will
1115 be handled elsewhere by manipulating the CFG rather than a statement. */
1116 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1117 {
1118 tree assignment = TREE_OPERAND (*tp, 0);
1119
1120 /* If we're returning something, just turn that into an
1121 assignment into the equivalent of the original RESULT_DECL.
1122 If the "assignment" is just the result decl, the result
1123 decl has already been set (e.g. a recent "foo (&result_decl,
1124 ...)"); just toss the entire RETURN_EXPR. */
1125 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1126 {
1127 /* Replace the RETURN_EXPR with (a copy of) the
1128 MODIFY_EXPR hanging underneath. */
1129 *tp = copy_node (assignment);
1130 }
1131 else /* Else the RETURN_EXPR returns no value. */
1132 {
1133 *tp = NULL;
1134 return (tree) (void *)1;
1135 }
1136 }
1137 else if (TREE_CODE (*tp) == SSA_NAME)
1138 {
1139 *tp = remap_ssa_name (*tp, id);
1140 *walk_subtrees = 0;
1141 return NULL;
1142 }
1143
1144 /* Local variables and labels need to be replaced by equivalent
1145 variables. We don't want to copy static variables; there's only
1146 one of those, no matter how many times we inline the containing
1147 function. Similarly for globals from an outer function. */
1148 else if (auto_var_in_fn_p (*tp, fn))
1149 {
1150 tree new_decl;
1151
1152 /* Remap the declaration. */
1153 new_decl = remap_decl (*tp, id);
1154 gcc_assert (new_decl);
1155 /* Replace this variable with the copy. */
1156 STRIP_TYPE_NOPS (new_decl);
1157 *tp = new_decl;
1158 *walk_subtrees = 0;
1159 }
1160 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1161 copy_statement_list (tp);
1162 else if (TREE_CODE (*tp) == SAVE_EXPR
1163 || TREE_CODE (*tp) == TARGET_EXPR)
1164 remap_save_expr (tp, id->decl_map, walk_subtrees);
1165 else if (TREE_CODE (*tp) == LABEL_DECL
1166 && (! DECL_CONTEXT (*tp)
1167 || decl_function_context (*tp) == id->src_fn))
1168 /* These may need to be remapped for EH handling. */
1169 *tp = remap_decl (*tp, id);
1170 else if (TREE_CODE (*tp) == BIND_EXPR)
1171 copy_bind_expr (tp, walk_subtrees, id);
1172 /* Types may need remapping as well. */
1173 else if (TYPE_P (*tp))
1174 *tp = remap_type (*tp, id);
1175
1176 /* If this is a constant, we have to copy the node iff the type will be
1177 remapped. copy_tree_r will not copy a constant. */
1178 else if (CONSTANT_CLASS_P (*tp))
1179 {
1180 tree new_type = remap_type (TREE_TYPE (*tp), id);
1181
1182 if (new_type == TREE_TYPE (*tp))
1183 *walk_subtrees = 0;
1184
1185 else if (TREE_CODE (*tp) == INTEGER_CST)
1186 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1187 else
1188 {
1189 *tp = copy_node (*tp);
1190 TREE_TYPE (*tp) = new_type;
1191 }
1192 }
1193
1194 /* Otherwise, just copy the node. Note that copy_tree_r already
1195 knows not to copy VAR_DECLs, etc., so this is safe. */
1196 else
1197 {
1198 /* Here we handle trees that are not completely rewritten.
1199 First we detect some inlining-induced bogosities for
1200 discarding. */
1201 if (TREE_CODE (*tp) == MODIFY_EXPR
1202 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1203 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1204 {
1205 /* Some assignments VAR = VAR; don't generate any rtl code
1206 and thus don't count as variable modification. Avoid
1207 keeping bogosities like 0 = 0. */
1208 tree decl = TREE_OPERAND (*tp, 0), value;
1209 tree *n;
1210
1211 n = id->decl_map->get (decl);
1212 if (n)
1213 {
1214 value = *n;
1215 STRIP_TYPE_NOPS (value);
1216 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1217 {
1218 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1219 return copy_tree_body_r (tp, walk_subtrees, data);
1220 }
1221 }
1222 }
1223 else if (TREE_CODE (*tp) == INDIRECT_REF)
1224 {
1225 /* Get rid of *& from inline substitutions that can happen when a
1226 pointer argument is an ADDR_EXPR. */
1227 tree decl = TREE_OPERAND (*tp, 0);
1228 tree *n = id->decl_map->get (decl);
1229 if (n)
1230 {
1231 /* If we happen to get an ADDR_EXPR in n->value, strip
1232 it manually here as we'll eventually get ADDR_EXPRs
1233 which lie about their types pointed to. In this case
1234 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1235 but we absolutely rely on that. As fold_indirect_ref
1236 does other useful transformations, try that first, though. */
1237 tree type = TREE_TYPE (*tp);
1238 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1239 tree old = *tp;
1240 *tp = gimple_fold_indirect_ref (ptr);
1241 if (! *tp)
1242 {
1243 type = remap_type (type, id);
1244 if (TREE_CODE (ptr) == ADDR_EXPR)
1245 {
1246 *tp
1247 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1248 /* ??? We should either assert here or build
1249 a VIEW_CONVERT_EXPR instead of blindly leaking
1250 incompatible types to our IL. */
1251 if (! *tp)
1252 *tp = TREE_OPERAND (ptr, 0);
1253 }
1254 else
1255 {
1256 *tp = build1 (INDIRECT_REF, type, ptr);
1257 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1258 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1259 TREE_READONLY (*tp) = TREE_READONLY (old);
1260 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1261 have remapped a parameter as the property might be
1262 valid only for the parameter itself. */
1263 if (TREE_THIS_NOTRAP (old)
1264 && (!is_parm (TREE_OPERAND (old, 0))
1265 || (!id->transform_parameter && is_parm (ptr))))
1266 TREE_THIS_NOTRAP (*tp) = 1;
1267 }
1268 }
1269 *walk_subtrees = 0;
1270 return NULL;
1271 }
1272 }
1273 else if (TREE_CODE (*tp) == MEM_REF)
1274 {
1275 /* We need to re-canonicalize MEM_REFs from inline substitutions
1276 that can happen when a pointer argument is an ADDR_EXPR.
1277 Recurse here manually to allow that. */
1278 tree ptr = TREE_OPERAND (*tp, 0);
1279 tree type = remap_type (TREE_TYPE (*tp), id);
1280 tree old = *tp;
1281 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1282 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1283 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1284 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1285 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1286 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1287 {
1288 MR_DEPENDENCE_CLIQUE (*tp)
1289 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1290 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1291 }
1292 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1293 remapped a parameter as the property might be valid only
1294 for the parameter itself. */
1295 if (TREE_THIS_NOTRAP (old)
1296 && (!is_parm (TREE_OPERAND (old, 0))
1297 || (!id->transform_parameter && is_parm (ptr))))
1298 TREE_THIS_NOTRAP (*tp) = 1;
1299 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1300 *walk_subtrees = 0;
1301 return NULL;
1302 }
1303
1304 /* Here is the "usual case". Copy this tree node, and then
1305 tweak some special cases. */
1306 copy_tree_r (tp, walk_subtrees, NULL);
1307
1308 /* If EXPR has block defined, map it to newly constructed block.
1309 When inlining we want EXPRs without block appear in the block
1310 of function call if we are not remapping a type. */
1311 if (EXPR_P (*tp))
1312 {
1313 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1314 if (TREE_BLOCK (*tp))
1315 {
1316 tree *n;
1317 n = id->decl_map->get (TREE_BLOCK (*tp));
1318 if (n)
1319 new_block = *n;
1320 }
1321 TREE_SET_BLOCK (*tp, new_block);
1322 }
1323
1324 if (TREE_CODE (*tp) != OMP_CLAUSE)
1325 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1326
1327 /* The copied TARGET_EXPR has never been expanded, even if the
1328 original node was expanded already. */
1329 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1330 {
1331 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1332 TREE_OPERAND (*tp, 3) = NULL_TREE;
1333 }
1334
1335 /* Variable substitution need not be simple. In particular, the
1336 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1337 and friends are up-to-date. */
1338 else if (TREE_CODE (*tp) == ADDR_EXPR)
1339 {
1340 int invariant = is_gimple_min_invariant (*tp);
1341 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1342
1343 /* Handle the case where we substituted an INDIRECT_REF
1344 into the operand of the ADDR_EXPR. */
1345 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1346 {
1347 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1348 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1349 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1350 *tp = t;
1351 }
1352 else
1353 recompute_tree_invariant_for_addr_expr (*tp);
1354
1355 /* If this used to be invariant, but is not any longer,
1356 then regimplification is probably needed. */
1357 if (invariant && !is_gimple_min_invariant (*tp))
1358 id->regimplify = true;
1359
1360 *walk_subtrees = 0;
1361 }
1362 }
1363
1364 /* Keep iterating. */
1365 return NULL_TREE;
1366 }
1367
1368 /* Helper for remap_gimple_stmt. Given an EH region number for the
1369 source function, map that to the duplicate EH region number in
1370 the destination function. */
1371
1372 static int
1373 remap_eh_region_nr (int old_nr, copy_body_data *id)
1374 {
1375 eh_region old_r, new_r;
1376
1377 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1378 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1379
1380 return new_r->index;
1381 }
1382
1383 /* Similar, but operate on INTEGER_CSTs. */
1384
1385 static tree
1386 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1387 {
1388 int old_nr, new_nr;
1389
1390 old_nr = tree_to_shwi (old_t_nr);
1391 new_nr = remap_eh_region_nr (old_nr, id);
1392
1393 return build_int_cst (integer_type_node, new_nr);
1394 }
1395
1396 /* Helper for copy_bb. Remap statement STMT using the inlining
1397 information in ID. Return the new statement copy. */
1398
1399 static gimple_seq
1400 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1401 {
1402 gimple *copy = NULL;
1403 struct walk_stmt_info wi;
1404 bool skip_first = false;
1405 gimple_seq stmts = NULL;
1406
1407 if (is_gimple_debug (stmt)
1408 && (gimple_debug_nonbind_marker_p (stmt)
1409 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1410 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1411 return NULL;
1412
1413 /* Begin by recognizing trees that we'll completely rewrite for the
1414 inlining context. Our output for these trees is completely
1415 different from our input (e.g. RETURN_EXPR is deleted and morphs
1416 into an edge). Further down, we'll handle trees that get
1417 duplicated and/or tweaked. */
1418
1419 /* When requested, GIMPLE_RETURN should be transformed to just the
1420 contained GIMPLE_ASSIGN. The branch semantics of the return will
1421 be handled elsewhere by manipulating the CFG rather than the
1422 statement. */
1423 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1424 {
1425 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1426
1427 /* If we're returning something, just turn that into an
1428 assignment to the equivalent of the original RESULT_DECL.
1429 If RETVAL is just the result decl, the result decl has
1430 already been set (e.g. a recent "foo (&result_decl, ...)");
1431 just toss the entire GIMPLE_RETURN. */
1432 if (retval
1433 && (TREE_CODE (retval) != RESULT_DECL
1434 && (TREE_CODE (retval) != SSA_NAME
1435 || ! SSA_NAME_VAR (retval)
1436 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1437 {
1438 copy = gimple_build_assign (id->do_not_unshare
1439 ? id->retvar : unshare_expr (id->retvar),
1440 retval);
1441 /* id->retvar is already substituted. Skip it on later remapping. */
1442 skip_first = true;
1443 }
1444 else
1445 return NULL;
1446 }
1447 else if (gimple_has_substatements (stmt))
1448 {
1449 gimple_seq s1, s2;
1450
1451 /* When cloning bodies from the C++ front end, we will be handed bodies
1452 in High GIMPLE form. Handle here all the High GIMPLE statements that
1453 have embedded statements. */
1454 switch (gimple_code (stmt))
1455 {
1456 case GIMPLE_BIND:
1457 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1458 break;
1459
1460 case GIMPLE_CATCH:
1461 {
1462 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1463 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1464 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1465 }
1466 break;
1467
1468 case GIMPLE_EH_FILTER:
1469 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1470 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1471 break;
1472
1473 case GIMPLE_TRY:
1474 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1475 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1476 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1477 break;
1478
1479 case GIMPLE_WITH_CLEANUP_EXPR:
1480 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1481 copy = gimple_build_wce (s1);
1482 break;
1483
1484 case GIMPLE_OMP_PARALLEL:
1485 {
1486 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1487 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1488 copy = gimple_build_omp_parallel
1489 (s1,
1490 gimple_omp_parallel_clauses (omp_par_stmt),
1491 gimple_omp_parallel_child_fn (omp_par_stmt),
1492 gimple_omp_parallel_data_arg (omp_par_stmt));
1493 }
1494 break;
1495
1496 case GIMPLE_OMP_TASK:
1497 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1498 copy = gimple_build_omp_task
1499 (s1,
1500 gimple_omp_task_clauses (stmt),
1501 gimple_omp_task_child_fn (stmt),
1502 gimple_omp_task_data_arg (stmt),
1503 gimple_omp_task_copy_fn (stmt),
1504 gimple_omp_task_arg_size (stmt),
1505 gimple_omp_task_arg_align (stmt));
1506 break;
1507
1508 case GIMPLE_OMP_FOR:
1509 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1510 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1511 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1512 gimple_omp_for_clauses (stmt),
1513 gimple_omp_for_collapse (stmt), s2);
1514 {
1515 size_t i;
1516 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1517 {
1518 gimple_omp_for_set_index (copy, i,
1519 gimple_omp_for_index (stmt, i));
1520 gimple_omp_for_set_initial (copy, i,
1521 gimple_omp_for_initial (stmt, i));
1522 gimple_omp_for_set_final (copy, i,
1523 gimple_omp_for_final (stmt, i));
1524 gimple_omp_for_set_incr (copy, i,
1525 gimple_omp_for_incr (stmt, i));
1526 gimple_omp_for_set_cond (copy, i,
1527 gimple_omp_for_cond (stmt, i));
1528 }
1529 }
1530 break;
1531
1532 case GIMPLE_OMP_MASTER:
1533 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1534 copy = gimple_build_omp_master (s1);
1535 break;
1536
1537 case GIMPLE_OMP_TASKGROUP:
1538 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1539 copy = gimple_build_omp_taskgroup
1540 (s1, gimple_omp_taskgroup_clauses (stmt));
1541 break;
1542
1543 case GIMPLE_OMP_ORDERED:
1544 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1545 copy = gimple_build_omp_ordered
1546 (s1,
1547 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1548 break;
1549
1550 case GIMPLE_OMP_SECTION:
1551 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1552 copy = gimple_build_omp_section (s1);
1553 break;
1554
1555 case GIMPLE_OMP_SECTIONS:
1556 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1557 copy = gimple_build_omp_sections
1558 (s1, gimple_omp_sections_clauses (stmt));
1559 break;
1560
1561 case GIMPLE_OMP_SINGLE:
1562 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1563 copy = gimple_build_omp_single
1564 (s1, gimple_omp_single_clauses (stmt));
1565 break;
1566
1567 case GIMPLE_OMP_TARGET:
1568 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1569 copy = gimple_build_omp_target
1570 (s1, gimple_omp_target_kind (stmt),
1571 gimple_omp_target_clauses (stmt));
1572 break;
1573
1574 case GIMPLE_OMP_TEAMS:
1575 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1576 copy = gimple_build_omp_teams
1577 (s1, gimple_omp_teams_clauses (stmt));
1578 break;
1579
1580 case GIMPLE_OMP_CRITICAL:
1581 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1582 copy = gimple_build_omp_critical (s1,
1583 gimple_omp_critical_name
1584 (as_a <gomp_critical *> (stmt)),
1585 gimple_omp_critical_clauses
1586 (as_a <gomp_critical *> (stmt)));
1587 break;
1588
1589 case GIMPLE_TRANSACTION:
1590 {
1591 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1592 gtransaction *new_trans_stmt;
1593 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1594 id);
1595 copy = new_trans_stmt = gimple_build_transaction (s1);
1596 gimple_transaction_set_subcode (new_trans_stmt,
1597 gimple_transaction_subcode (old_trans_stmt));
1598 gimple_transaction_set_label_norm (new_trans_stmt,
1599 gimple_transaction_label_norm (old_trans_stmt));
1600 gimple_transaction_set_label_uninst (new_trans_stmt,
1601 gimple_transaction_label_uninst (old_trans_stmt));
1602 gimple_transaction_set_label_over (new_trans_stmt,
1603 gimple_transaction_label_over (old_trans_stmt));
1604 }
1605 break;
1606
1607 default:
1608 gcc_unreachable ();
1609 }
1610 }
1611 else
1612 {
1613 if (gimple_assign_copy_p (stmt)
1614 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1615 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1616 {
1617 /* Here we handle statements that are not completely rewritten.
1618 First we detect some inlining-induced bogosities for
1619 discarding. */
1620
1621 /* Some assignments VAR = VAR; don't generate any rtl code
1622 and thus don't count as variable modification. Avoid
1623 keeping bogosities like 0 = 0. */
1624 tree decl = gimple_assign_lhs (stmt), value;
1625 tree *n;
1626
1627 n = id->decl_map->get (decl);
1628 if (n)
1629 {
1630 value = *n;
1631 STRIP_TYPE_NOPS (value);
1632 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1633 return NULL;
1634 }
1635 }
1636
1637 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1638 in a block that we aren't copying during tree_function_versioning,
1639 just drop the clobber stmt. */
1640 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1641 {
1642 tree lhs = gimple_assign_lhs (stmt);
1643 if (TREE_CODE (lhs) == MEM_REF
1644 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1645 {
1646 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1647 if (gimple_bb (def_stmt)
1648 && !bitmap_bit_p (id->blocks_to_copy,
1649 gimple_bb (def_stmt)->index))
1650 return NULL;
1651 }
1652 }
1653
1654 if (gimple_debug_bind_p (stmt))
1655 {
1656 gdebug *copy
1657 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1658 gimple_debug_bind_get_value (stmt),
1659 stmt);
1660 if (id->reset_location)
1661 gimple_set_location (copy, input_location);
1662 id->debug_stmts.safe_push (copy);
1663 gimple_seq_add_stmt (&stmts, copy);
1664 return stmts;
1665 }
1666 if (gimple_debug_source_bind_p (stmt))
1667 {
1668 gdebug *copy = gimple_build_debug_source_bind
1669 (gimple_debug_source_bind_get_var (stmt),
1670 gimple_debug_source_bind_get_value (stmt),
1671 stmt);
1672 if (id->reset_location)
1673 gimple_set_location (copy, input_location);
1674 id->debug_stmts.safe_push (copy);
1675 gimple_seq_add_stmt (&stmts, copy);
1676 return stmts;
1677 }
1678 if (gimple_debug_nonbind_marker_p (stmt))
1679 {
1680 /* If the inlined function has too many debug markers,
1681 don't copy them. */
1682 if (id->src_cfun->debug_marker_count
1683 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1684 return stmts;
1685
1686 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1687 if (id->reset_location)
1688 gimple_set_location (copy, input_location);
1689 id->debug_stmts.safe_push (copy);
1690 gimple_seq_add_stmt (&stmts, copy);
1691 return stmts;
1692 }
1693
1694 /* Create a new deep copy of the statement. */
1695 copy = gimple_copy (stmt);
1696
1697 /* Clear flags that need revisiting. */
1698 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1699 {
1700 if (gimple_call_tail_p (call_stmt))
1701 gimple_call_set_tail (call_stmt, false);
1702 if (gimple_call_from_thunk_p (call_stmt))
1703 gimple_call_set_from_thunk (call_stmt, false);
1704 if (gimple_call_internal_p (call_stmt))
1705 switch (gimple_call_internal_fn (call_stmt))
1706 {
1707 case IFN_GOMP_SIMD_LANE:
1708 case IFN_GOMP_SIMD_VF:
1709 case IFN_GOMP_SIMD_LAST_LANE:
1710 case IFN_GOMP_SIMD_ORDERED_START:
1711 case IFN_GOMP_SIMD_ORDERED_END:
1712 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1713 break;
1714 default:
1715 break;
1716 }
1717 }
1718
1719 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1720 RESX and EH_DISPATCH. */
1721 if (id->eh_map)
1722 switch (gimple_code (copy))
1723 {
1724 case GIMPLE_CALL:
1725 {
1726 tree r, fndecl = gimple_call_fndecl (copy);
1727 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1728 switch (DECL_FUNCTION_CODE (fndecl))
1729 {
1730 case BUILT_IN_EH_COPY_VALUES:
1731 r = gimple_call_arg (copy, 1);
1732 r = remap_eh_region_tree_nr (r, id);
1733 gimple_call_set_arg (copy, 1, r);
1734 /* FALLTHRU */
1735
1736 case BUILT_IN_EH_POINTER:
1737 case BUILT_IN_EH_FILTER:
1738 r = gimple_call_arg (copy, 0);
1739 r = remap_eh_region_tree_nr (r, id);
1740 gimple_call_set_arg (copy, 0, r);
1741 break;
1742
1743 default:
1744 break;
1745 }
1746
1747 /* Reset alias info if we didn't apply measures to
1748 keep it valid over inlining by setting DECL_PT_UID. */
1749 if (!id->src_cfun->gimple_df
1750 || !id->src_cfun->gimple_df->ipa_pta)
1751 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1752 }
1753 break;
1754
1755 case GIMPLE_RESX:
1756 {
1757 gresx *resx_stmt = as_a <gresx *> (copy);
1758 int r = gimple_resx_region (resx_stmt);
1759 r = remap_eh_region_nr (r, id);
1760 gimple_resx_set_region (resx_stmt, r);
1761 }
1762 break;
1763
1764 case GIMPLE_EH_DISPATCH:
1765 {
1766 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1767 int r = gimple_eh_dispatch_region (eh_dispatch);
1768 r = remap_eh_region_nr (r, id);
1769 gimple_eh_dispatch_set_region (eh_dispatch, r);
1770 }
1771 break;
1772
1773 default:
1774 break;
1775 }
1776 }
1777
1778 /* If STMT has a block defined, map it to the newly constructed block. */
1779 if (tree block = gimple_block (copy))
1780 {
1781 tree *n;
1782 n = id->decl_map->get (block);
1783 gcc_assert (n);
1784 gimple_set_block (copy, *n);
1785 }
1786
1787 if (id->reset_location)
1788 gimple_set_location (copy, input_location);
1789
1790 /* Debug statements ought to be rebuilt and not copied. */
1791 gcc_checking_assert (!is_gimple_debug (copy));
1792
1793 /* Remap all the operands in COPY. */
1794 memset (&wi, 0, sizeof (wi));
1795 wi.info = id;
1796 if (skip_first)
1797 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1798 else
1799 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1800
1801 /* Clear the copied virtual operands. We are not remapping them here
1802 but are going to recreate them from scratch. */
1803 if (gimple_has_mem_ops (copy))
1804 {
1805 gimple_set_vdef (copy, NULL_TREE);
1806 gimple_set_vuse (copy, NULL_TREE);
1807 }
1808
1809 gimple_seq_add_stmt (&stmts, copy);
1810 return stmts;
1811 }
1812
1813
1814 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1815 later */
1816
1817 static basic_block
1818 copy_bb (copy_body_data *id, basic_block bb,
1819 profile_count num, profile_count den)
1820 {
1821 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1822 basic_block copy_basic_block;
1823 tree decl;
1824 basic_block prev;
1825
1826 profile_count::adjust_for_ipa_scaling (&num, &den);
1827
1828 /* Search for previous copied basic block. */
1829 prev = bb->prev_bb;
1830 while (!prev->aux)
1831 prev = prev->prev_bb;
1832
1833 /* create_basic_block() will append every new block to
1834 basic_block_info automatically. */
1835 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1836 copy_basic_block->count = bb->count.apply_scale (num, den);
1837
1838 copy_gsi = gsi_start_bb (copy_basic_block);
1839
1840 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1841 {
1842 gimple_seq stmts;
1843 gimple *stmt = gsi_stmt (gsi);
1844 gimple *orig_stmt = stmt;
1845 gimple_stmt_iterator stmts_gsi;
1846 bool stmt_added = false;
1847
1848 id->regimplify = false;
1849 stmts = remap_gimple_stmt (stmt, id);
1850
1851 if (gimple_seq_empty_p (stmts))
1852 continue;
1853
1854 seq_gsi = copy_gsi;
1855
1856 for (stmts_gsi = gsi_start (stmts);
1857 !gsi_end_p (stmts_gsi); )
1858 {
1859 stmt = gsi_stmt (stmts_gsi);
1860
1861 /* Advance iterator now before stmt is moved to seq_gsi. */
1862 gsi_next (&stmts_gsi);
1863
1864 if (gimple_nop_p (stmt))
1865 continue;
1866
1867 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1868 orig_stmt);
1869
1870 /* With return slot optimization we can end up with
1871 non-gimple (foo *)&this->m, fix that here. */
1872 if (is_gimple_assign (stmt)
1873 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1874 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1875 {
1876 tree new_rhs;
1877 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1878 gimple_assign_rhs1 (stmt),
1879 true, NULL, false,
1880 GSI_CONTINUE_LINKING);
1881 gimple_assign_set_rhs1 (stmt, new_rhs);
1882 id->regimplify = false;
1883 }
1884
1885 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1886
1887 if (id->regimplify)
1888 gimple_regimplify_operands (stmt, &seq_gsi);
1889
1890 stmt_added = true;
1891 }
1892
1893 if (!stmt_added)
1894 continue;
1895
1896 /* If copy_basic_block has been empty at the start of this iteration,
1897 call gsi_start_bb again to get at the newly added statements. */
1898 if (gsi_end_p (copy_gsi))
1899 copy_gsi = gsi_start_bb (copy_basic_block);
1900 else
1901 gsi_next (&copy_gsi);
1902
1903 /* Process the new statement. The call to gimple_regimplify_operands
1904 possibly turned the statement into multiple statements, we
1905 need to process all of them. */
1906 do
1907 {
1908 tree fn;
1909 gcall *call_stmt;
1910
1911 stmt = gsi_stmt (copy_gsi);
1912 call_stmt = dyn_cast <gcall *> (stmt);
1913 if (call_stmt
1914 && gimple_call_va_arg_pack_p (call_stmt)
1915 && id->call_stmt
1916 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1917 {
1918 /* __builtin_va_arg_pack () should be replaced by
1919 all arguments corresponding to ... in the caller. */
1920 tree p;
1921 gcall *new_call;
1922 vec<tree> argarray;
1923 size_t nargs = gimple_call_num_args (id->call_stmt);
1924 size_t n;
1925
1926 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1927 nargs--;
1928
1929 /* Create the new array of arguments. */
1930 n = nargs + gimple_call_num_args (call_stmt);
1931 argarray.create (n);
1932 argarray.safe_grow_cleared (n);
1933
1934 /* Copy all the arguments before '...' */
1935 memcpy (argarray.address (),
1936 gimple_call_arg_ptr (call_stmt, 0),
1937 gimple_call_num_args (call_stmt) * sizeof (tree));
1938
1939 /* Append the arguments passed in '...' */
1940 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1941 gimple_call_arg_ptr (id->call_stmt, 0)
1942 + (gimple_call_num_args (id->call_stmt) - nargs),
1943 nargs * sizeof (tree));
1944
1945 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1946 argarray);
1947
1948 argarray.release ();
1949
1950 /* Copy all GIMPLE_CALL flags, location and block, except
1951 GF_CALL_VA_ARG_PACK. */
1952 gimple_call_copy_flags (new_call, call_stmt);
1953 gimple_call_set_va_arg_pack (new_call, false);
1954 /* location includes block. */
1955 gimple_set_location (new_call, gimple_location (stmt));
1956 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1957
1958 gsi_replace (&copy_gsi, new_call, false);
1959 stmt = new_call;
1960 }
1961 else if (call_stmt
1962 && id->call_stmt
1963 && (decl = gimple_call_fndecl (stmt))
1964 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
1965 {
1966 /* __builtin_va_arg_pack_len () should be replaced by
1967 the number of anonymous arguments. */
1968 size_t nargs = gimple_call_num_args (id->call_stmt);
1969 tree count, p;
1970 gimple *new_stmt;
1971
1972 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1973 nargs--;
1974
1975 if (!gimple_call_lhs (stmt))
1976 {
1977 /* Drop unused calls. */
1978 gsi_remove (&copy_gsi, false);
1979 continue;
1980 }
1981 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
1982 {
1983 count = build_int_cst (integer_type_node, nargs);
1984 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1985 gsi_replace (&copy_gsi, new_stmt, false);
1986 stmt = new_stmt;
1987 }
1988 else if (nargs != 0)
1989 {
1990 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
1991 count = build_int_cst (integer_type_node, nargs);
1992 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1993 PLUS_EXPR, newlhs, count);
1994 gimple_call_set_lhs (stmt, newlhs);
1995 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
1996 }
1997 }
1998 else if (call_stmt
1999 && id->call_stmt
2000 && gimple_call_internal_p (stmt)
2001 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2002 {
2003 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2004 gsi_remove (&copy_gsi, false);
2005 continue;
2006 }
2007
2008 /* Statements produced by inlining can be unfolded, especially
2009 when we constant propagated some operands. We can't fold
2010 them right now for two reasons:
2011 1) folding require SSA_NAME_DEF_STMTs to be correct
2012 2) we can't change function calls to builtins.
2013 So we just mark statement for later folding. We mark
2014 all new statements, instead just statements that has changed
2015 by some nontrivial substitution so even statements made
2016 foldable indirectly are updated. If this turns out to be
2017 expensive, copy_body can be told to watch for nontrivial
2018 changes. */
2019 if (id->statements_to_fold)
2020 id->statements_to_fold->add (stmt);
2021
2022 /* We're duplicating a CALL_EXPR. Find any corresponding
2023 callgraph edges and update or duplicate them. */
2024 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2025 {
2026 struct cgraph_edge *edge;
2027
2028 switch (id->transform_call_graph_edges)
2029 {
2030 case CB_CGE_DUPLICATE:
2031 edge = id->src_node->get_edge (orig_stmt);
2032 if (edge)
2033 {
2034 struct cgraph_edge *old_edge = edge;
2035 profile_count old_cnt = edge->count;
2036 edge = edge->clone (id->dst_node, call_stmt,
2037 gimple_uid (stmt),
2038 num, den,
2039 true);
2040
2041 /* Speculative calls consist of two edges - direct and
2042 indirect. Duplicate the whole thing and distribute
2043 frequencies accordingly. */
2044 if (edge->speculative)
2045 {
2046 struct cgraph_edge *direct, *indirect;
2047 struct ipa_ref *ref;
2048
2049 gcc_assert (!edge->indirect_unknown_callee);
2050 old_edge->speculative_call_info (direct, indirect, ref);
2051
2052 profile_count indir_cnt = indirect->count;
2053 indirect = indirect->clone (id->dst_node, call_stmt,
2054 gimple_uid (stmt),
2055 num, den,
2056 true);
2057
2058 profile_probability prob
2059 = indir_cnt.probability_in (old_cnt + indir_cnt);
2060 indirect->count
2061 = copy_basic_block->count.apply_probability (prob);
2062 edge->count = copy_basic_block->count - indirect->count;
2063 id->dst_node->clone_reference (ref, stmt);
2064 }
2065 else
2066 edge->count = copy_basic_block->count;
2067 }
2068 break;
2069
2070 case CB_CGE_MOVE_CLONES:
2071 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2072 call_stmt);
2073 edge = id->dst_node->get_edge (stmt);
2074 break;
2075
2076 case CB_CGE_MOVE:
2077 edge = id->dst_node->get_edge (orig_stmt);
2078 if (edge)
2079 edge->set_call_stmt (call_stmt);
2080 break;
2081
2082 default:
2083 gcc_unreachable ();
2084 }
2085
2086 /* Constant propagation on argument done during inlining
2087 may create new direct call. Produce an edge for it. */
2088 if ((!edge
2089 || (edge->indirect_inlining_edge
2090 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2091 && id->dst_node->definition
2092 && (fn = gimple_call_fndecl (stmt)) != NULL)
2093 {
2094 struct cgraph_node *dest = cgraph_node::get_create (fn);
2095
2096 /* We have missing edge in the callgraph. This can happen
2097 when previous inlining turned an indirect call into a
2098 direct call by constant propagating arguments or we are
2099 producing dead clone (for further cloning). In all
2100 other cases we hit a bug (incorrect node sharing is the
2101 most common reason for missing edges). */
2102 gcc_assert (!dest->definition
2103 || dest->address_taken
2104 || !id->src_node->definition
2105 || !id->dst_node->definition);
2106 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2107 id->dst_node->create_edge_including_clones
2108 (dest, orig_stmt, call_stmt, bb->count,
2109 CIF_ORIGINALLY_INDIRECT_CALL);
2110 else
2111 id->dst_node->create_edge (dest, call_stmt,
2112 bb->count)->inline_failed
2113 = CIF_ORIGINALLY_INDIRECT_CALL;
2114 if (dump_file)
2115 {
2116 fprintf (dump_file, "Created new direct edge to %s\n",
2117 dest->name ());
2118 }
2119 }
2120
2121 notice_special_calls (as_a <gcall *> (stmt));
2122 }
2123
2124 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2125 id->eh_map, id->eh_lp_nr);
2126
2127 gsi_next (&copy_gsi);
2128 }
2129 while (!gsi_end_p (copy_gsi));
2130
2131 copy_gsi = gsi_last_bb (copy_basic_block);
2132 }
2133
2134 return copy_basic_block;
2135 }
2136
2137 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2138 form is quite easy, since dominator relationship for old basic blocks does
2139 not change.
2140
2141 There is however exception where inlining might change dominator relation
2142 across EH edges from basic block within inlined functions destinating
2143 to landing pads in function we inline into.
2144
2145 The function fills in PHI_RESULTs of such PHI nodes if they refer
2146 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2147 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2148 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2149 set, and this means that there will be no overlapping live ranges
2150 for the underlying symbol.
2151
2152 This might change in future if we allow redirecting of EH edges and
2153 we might want to change way build CFG pre-inlining to include
2154 all the possible edges then. */
2155 static void
2156 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2157 bool can_throw, bool nonlocal_goto)
2158 {
2159 edge e;
2160 edge_iterator ei;
2161
2162 FOR_EACH_EDGE (e, ei, bb->succs)
2163 if (!e->dest->aux
2164 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2165 {
2166 gphi *phi;
2167 gphi_iterator si;
2168
2169 if (!nonlocal_goto)
2170 gcc_assert (e->flags & EDGE_EH);
2171
2172 if (!can_throw)
2173 gcc_assert (!(e->flags & EDGE_EH));
2174
2175 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2176 {
2177 edge re;
2178
2179 phi = si.phi ();
2180
2181 /* For abnormal goto/call edges the receiver can be the
2182 ENTRY_BLOCK. Do not assert this cannot happen. */
2183
2184 gcc_assert ((e->flags & EDGE_EH)
2185 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2186
2187 re = find_edge (ret_bb, e->dest);
2188 gcc_checking_assert (re);
2189 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2190 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2191
2192 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2193 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2194 }
2195 }
2196 }
2197
2198 /* Insert clobbers for automatic variables of inlined ID->src_fn
2199 function at the start of basic block BB. */
2200
2201 static void
2202 add_clobbers_to_eh_landing_pad (basic_block bb, copy_body_data *id)
2203 {
2204 tree var;
2205 unsigned int i;
2206 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2207 if (VAR_P (var)
2208 && !DECL_HARD_REGISTER (var)
2209 && !TREE_THIS_VOLATILE (var)
2210 && !DECL_HAS_VALUE_EXPR_P (var)
2211 && !is_gimple_reg (var)
2212 && auto_var_in_fn_p (var, id->src_fn)
2213 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2214 {
2215 tree *t = id->decl_map->get (var);
2216 if (!t)
2217 continue;
2218 tree new_var = *t;
2219 if (VAR_P (new_var)
2220 && !DECL_HARD_REGISTER (new_var)
2221 && !TREE_THIS_VOLATILE (new_var)
2222 && !DECL_HAS_VALUE_EXPR_P (new_var)
2223 && !is_gimple_reg (new_var)
2224 && auto_var_in_fn_p (new_var, id->dst_fn))
2225 {
2226 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2227 tree clobber = build_clobber (TREE_TYPE (new_var));
2228 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2229 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2230 }
2231 }
2232 }
2233
2234 /* Copy edges from BB into its copy constructed earlier, scale profile
2235 accordingly. Edges will be taken care of later. Assume aux
2236 pointers to point to the copies of each BB. Return true if any
2237 debug stmts are left after a statement that must end the basic block. */
2238
2239 static bool
2240 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2241 basic_block ret_bb, basic_block abnormal_goto_dest,
2242 copy_body_data *id)
2243 {
2244 basic_block new_bb = (basic_block) bb->aux;
2245 edge_iterator ei;
2246 edge old_edge;
2247 gimple_stmt_iterator si;
2248 bool need_debug_cleanup = false;
2249
2250 /* Use the indices from the original blocks to create edges for the
2251 new ones. */
2252 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2253 if (!(old_edge->flags & EDGE_EH))
2254 {
2255 edge new_edge;
2256 int flags = old_edge->flags;
2257 location_t locus = old_edge->goto_locus;
2258
2259 /* Return edges do get a FALLTHRU flag when they get inlined. */
2260 if (old_edge->dest->index == EXIT_BLOCK
2261 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2262 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2263 flags |= EDGE_FALLTHRU;
2264
2265 new_edge
2266 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2267 new_edge->probability = old_edge->probability;
2268 if (!id->reset_location)
2269 new_edge->goto_locus = remap_location (locus, id);
2270 }
2271
2272 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2273 return false;
2274
2275 /* When doing function splitting, we must decrease count of the return block
2276 which was previously reachable by block we did not copy. */
2277 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2278 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2279 if (old_edge->src->index != ENTRY_BLOCK
2280 && !old_edge->src->aux)
2281 new_bb->count -= old_edge->count ().apply_scale (num, den);
2282
2283 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2284 {
2285 gimple *copy_stmt;
2286 bool can_throw, nonlocal_goto;
2287
2288 copy_stmt = gsi_stmt (si);
2289 if (!is_gimple_debug (copy_stmt))
2290 update_stmt (copy_stmt);
2291
2292 /* Do this before the possible split_block. */
2293 gsi_next (&si);
2294
2295 /* If this tree could throw an exception, there are two
2296 cases where we need to add abnormal edge(s): the
2297 tree wasn't in a region and there is a "current
2298 region" in the caller; or the original tree had
2299 EH edges. In both cases split the block after the tree,
2300 and add abnormal edge(s) as needed; we need both
2301 those from the callee and the caller.
2302 We check whether the copy can throw, because the const
2303 propagation can change an INDIRECT_REF which throws
2304 into a COMPONENT_REF which doesn't. If the copy
2305 can throw, the original could also throw. */
2306 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2307 nonlocal_goto
2308 = (stmt_can_make_abnormal_goto (copy_stmt)
2309 && !computed_goto_p (copy_stmt));
2310
2311 if (can_throw || nonlocal_goto)
2312 {
2313 if (!gsi_end_p (si))
2314 {
2315 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2316 gsi_next (&si);
2317 if (gsi_end_p (si))
2318 need_debug_cleanup = true;
2319 }
2320 if (!gsi_end_p (si))
2321 /* Note that bb's predecessor edges aren't necessarily
2322 right at this point; split_block doesn't care. */
2323 {
2324 edge e = split_block (new_bb, copy_stmt);
2325
2326 new_bb = e->dest;
2327 new_bb->aux = e->src->aux;
2328 si = gsi_start_bb (new_bb);
2329 }
2330 }
2331
2332 bool update_probs = false;
2333
2334 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2335 {
2336 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2337 update_probs = true;
2338 }
2339 else if (can_throw)
2340 {
2341 make_eh_edges (copy_stmt);
2342 update_probs = true;
2343 }
2344
2345 /* EH edges may not match old edges. Copy as much as possible. */
2346 if (update_probs)
2347 {
2348 edge e;
2349 edge_iterator ei;
2350 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2351
2352 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2353 if ((old_edge->flags & EDGE_EH)
2354 && (e = find_edge (copy_stmt_bb,
2355 (basic_block) old_edge->dest->aux))
2356 && (e->flags & EDGE_EH))
2357 e->probability = old_edge->probability;
2358
2359 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2360 if (e->flags & EDGE_EH)
2361 {
2362 if (!e->probability.initialized_p ())
2363 e->probability = profile_probability::never ();
2364 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2365 {
2366 add_clobbers_to_eh_landing_pad (e->dest, id);
2367 id->add_clobbers_to_eh_landing_pads = 0;
2368 }
2369 }
2370 }
2371
2372
2373 /* If the call we inline cannot make abnormal goto do not add
2374 additional abnormal edges but only retain those already present
2375 in the original function body. */
2376 if (abnormal_goto_dest == NULL)
2377 nonlocal_goto = false;
2378 if (nonlocal_goto)
2379 {
2380 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2381
2382 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2383 nonlocal_goto = false;
2384 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2385 in OpenMP regions which aren't allowed to be left abnormally.
2386 So, no need to add abnormal edge in that case. */
2387 else if (is_gimple_call (copy_stmt)
2388 && gimple_call_internal_p (copy_stmt)
2389 && (gimple_call_internal_fn (copy_stmt)
2390 == IFN_ABNORMAL_DISPATCHER)
2391 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2392 nonlocal_goto = false;
2393 else
2394 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2395 EDGE_ABNORMAL);
2396 }
2397
2398 if ((can_throw || nonlocal_goto)
2399 && gimple_in_ssa_p (cfun))
2400 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2401 can_throw, nonlocal_goto);
2402 }
2403 return need_debug_cleanup;
2404 }
2405
2406 /* Copy the PHIs. All blocks and edges are copied, some blocks
2407 was possibly split and new outgoing EH edges inserted.
2408 BB points to the block of original function and AUX pointers links
2409 the original and newly copied blocks. */
2410
2411 static void
2412 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2413 {
2414 basic_block const new_bb = (basic_block) bb->aux;
2415 edge_iterator ei;
2416 gphi *phi;
2417 gphi_iterator si;
2418 edge new_edge;
2419 bool inserted = false;
2420
2421 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2422 {
2423 tree res, new_res;
2424 gphi *new_phi;
2425
2426 phi = si.phi ();
2427 res = PHI_RESULT (phi);
2428 new_res = res;
2429 if (!virtual_operand_p (res))
2430 {
2431 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2432 if (EDGE_COUNT (new_bb->preds) == 0)
2433 {
2434 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2435 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2436 }
2437 else
2438 {
2439 new_phi = create_phi_node (new_res, new_bb);
2440 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2441 {
2442 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2443 bb);
2444 tree arg;
2445 tree new_arg;
2446 edge_iterator ei2;
2447 location_t locus;
2448
2449 /* When doing partial cloning, we allow PHIs on the entry
2450 block as long as all the arguments are the same.
2451 Find any input edge to see argument to copy. */
2452 if (!old_edge)
2453 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2454 if (!old_edge->src->aux)
2455 break;
2456
2457 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2458 new_arg = arg;
2459 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2460 gcc_assert (new_arg);
2461 /* With return slot optimization we can end up with
2462 non-gimple (foo *)&this->m, fix that here. */
2463 if (TREE_CODE (new_arg) != SSA_NAME
2464 && TREE_CODE (new_arg) != FUNCTION_DECL
2465 && !is_gimple_val (new_arg))
2466 {
2467 gimple_seq stmts = NULL;
2468 new_arg = force_gimple_operand (new_arg, &stmts, true,
2469 NULL);
2470 gsi_insert_seq_on_edge (new_edge, stmts);
2471 inserted = true;
2472 }
2473 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2474 if (id->reset_location)
2475 locus = input_location;
2476 else
2477 locus = remap_location (locus, id);
2478 add_phi_arg (new_phi, new_arg, new_edge, locus);
2479 }
2480 }
2481 }
2482 }
2483
2484 /* Commit the delayed edge insertions. */
2485 if (inserted)
2486 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2487 gsi_commit_one_edge_insert (new_edge, NULL);
2488 }
2489
2490
2491 /* Wrapper for remap_decl so it can be used as a callback. */
2492
2493 static tree
2494 remap_decl_1 (tree decl, void *data)
2495 {
2496 return remap_decl (decl, (copy_body_data *) data);
2497 }
2498
2499 /* Build struct function and associated datastructures for the new clone
2500 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2501 the cfun to the function of new_fndecl (and current_function_decl too). */
2502
2503 static void
2504 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2505 {
2506 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2507
2508 if (!DECL_ARGUMENTS (new_fndecl))
2509 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2510 if (!DECL_RESULT (new_fndecl))
2511 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2512
2513 /* Register specific tree functions. */
2514 gimple_register_cfg_hooks ();
2515
2516 /* Get clean struct function. */
2517 push_struct_function (new_fndecl);
2518
2519 /* We will rebuild these, so just sanity check that they are empty. */
2520 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2521 gcc_assert (cfun->local_decls == NULL);
2522 gcc_assert (cfun->cfg == NULL);
2523 gcc_assert (cfun->decl == new_fndecl);
2524
2525 /* Copy items we preserve during cloning. */
2526 cfun->static_chain_decl = src_cfun->static_chain_decl;
2527 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2528 cfun->function_end_locus = src_cfun->function_end_locus;
2529 cfun->curr_properties = src_cfun->curr_properties;
2530 cfun->last_verified = src_cfun->last_verified;
2531 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2532 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2533 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2534 cfun->stdarg = src_cfun->stdarg;
2535 cfun->after_inlining = src_cfun->after_inlining;
2536 cfun->can_throw_non_call_exceptions
2537 = src_cfun->can_throw_non_call_exceptions;
2538 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2539 cfun->returns_struct = src_cfun->returns_struct;
2540 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2541
2542 init_empty_tree_cfg ();
2543
2544 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2545
2546 profile_count num = count;
2547 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2548 profile_count::adjust_for_ipa_scaling (&num, &den);
2549
2550 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2551 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2552 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2553 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2554 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2555 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2556 if (src_cfun->eh)
2557 init_eh_for_function ();
2558
2559 if (src_cfun->gimple_df)
2560 {
2561 init_tree_ssa (cfun);
2562 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2563 if (cfun->gimple_df->in_ssa_p)
2564 init_ssa_operands (cfun);
2565 }
2566 }
2567
2568 /* Helper function for copy_cfg_body. Move debug stmts from the end
2569 of NEW_BB to the beginning of successor basic blocks when needed. If the
2570 successor has multiple predecessors, reset them, otherwise keep
2571 their value. */
2572
2573 static void
2574 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2575 {
2576 edge e;
2577 edge_iterator ei;
2578 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2579
2580 if (gsi_end_p (si)
2581 || gsi_one_before_end_p (si)
2582 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2583 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2584 return;
2585
2586 FOR_EACH_EDGE (e, ei, new_bb->succs)
2587 {
2588 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2589 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2590 while (is_gimple_debug (gsi_stmt (ssi)))
2591 {
2592 gimple *stmt = gsi_stmt (ssi);
2593 gdebug *new_stmt;
2594 tree var;
2595 tree value;
2596
2597 /* For the last edge move the debug stmts instead of copying
2598 them. */
2599 if (ei_one_before_end_p (ei))
2600 {
2601 si = ssi;
2602 gsi_prev (&ssi);
2603 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2604 {
2605 gimple_debug_bind_reset_value (stmt);
2606 gimple_set_location (stmt, UNKNOWN_LOCATION);
2607 }
2608 gsi_remove (&si, false);
2609 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2610 continue;
2611 }
2612
2613 if (gimple_debug_bind_p (stmt))
2614 {
2615 var = gimple_debug_bind_get_var (stmt);
2616 if (single_pred_p (e->dest))
2617 {
2618 value = gimple_debug_bind_get_value (stmt);
2619 value = unshare_expr (value);
2620 new_stmt = gimple_build_debug_bind (var, value, stmt);
2621 }
2622 else
2623 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2624 }
2625 else if (gimple_debug_source_bind_p (stmt))
2626 {
2627 var = gimple_debug_source_bind_get_var (stmt);
2628 value = gimple_debug_source_bind_get_value (stmt);
2629 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2630 }
2631 else if (gimple_debug_nonbind_marker_p (stmt))
2632 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2633 else
2634 gcc_unreachable ();
2635 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2636 id->debug_stmts.safe_push (new_stmt);
2637 gsi_prev (&ssi);
2638 }
2639 }
2640 }
2641
2642 /* Make a copy of the sub-loops of SRC_PARENT and place them
2643 as siblings of DEST_PARENT. */
2644
2645 static void
2646 copy_loops (copy_body_data *id,
2647 struct loop *dest_parent, struct loop *src_parent)
2648 {
2649 struct loop *src_loop = src_parent->inner;
2650 while (src_loop)
2651 {
2652 if (!id->blocks_to_copy
2653 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2654 {
2655 struct loop *dest_loop = alloc_loop ();
2656
2657 /* Assign the new loop its header and latch and associate
2658 those with the new loop. */
2659 dest_loop->header = (basic_block)src_loop->header->aux;
2660 dest_loop->header->loop_father = dest_loop;
2661 if (src_loop->latch != NULL)
2662 {
2663 dest_loop->latch = (basic_block)src_loop->latch->aux;
2664 dest_loop->latch->loop_father = dest_loop;
2665 }
2666
2667 /* Copy loop meta-data. */
2668 copy_loop_info (src_loop, dest_loop);
2669 if (dest_loop->unroll)
2670 cfun->has_unroll = true;
2671 if (dest_loop->force_vectorize)
2672 cfun->has_force_vectorize_loops = true;
2673 if (id->src_cfun->last_clique != 0)
2674 dest_loop->owned_clique
2675 = remap_dependence_clique (id,
2676 src_loop->owned_clique
2677 ? src_loop->owned_clique : 1);
2678
2679 /* Finally place it into the loop array and the loop tree. */
2680 place_new_loop (cfun, dest_loop);
2681 flow_loop_tree_node_add (dest_parent, dest_loop);
2682
2683 if (src_loop->simduid)
2684 {
2685 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2686 cfun->has_simduid_loops = true;
2687 }
2688
2689 /* Recurse. */
2690 copy_loops (id, dest_loop, src_loop);
2691 }
2692 src_loop = src_loop->next;
2693 }
2694 }
2695
2696 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2697
2698 void
2699 redirect_all_calls (copy_body_data * id, basic_block bb)
2700 {
2701 gimple_stmt_iterator si;
2702 gimple *last = last_stmt (bb);
2703 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2704 {
2705 gimple *stmt = gsi_stmt (si);
2706 if (is_gimple_call (stmt))
2707 {
2708 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2709 if (edge)
2710 {
2711 edge->redirect_call_stmt_to_callee ();
2712 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2713 gimple_purge_dead_eh_edges (bb);
2714 }
2715 }
2716 }
2717 }
2718
2719 /* Make a copy of the body of FN so that it can be inserted inline in
2720 another function. Walks FN via CFG, returns new fndecl. */
2721
2722 static tree
2723 copy_cfg_body (copy_body_data * id,
2724 basic_block entry_block_map, basic_block exit_block_map,
2725 basic_block new_entry)
2726 {
2727 tree callee_fndecl = id->src_fn;
2728 /* Original cfun for the callee, doesn't change. */
2729 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2730 struct function *cfun_to_copy;
2731 basic_block bb;
2732 tree new_fndecl = NULL;
2733 bool need_debug_cleanup = false;
2734 int last;
2735 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2736 profile_count num = entry_block_map->count;
2737
2738 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2739
2740 /* Register specific tree functions. */
2741 gimple_register_cfg_hooks ();
2742
2743 /* If we are inlining just region of the function, make sure to connect
2744 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2745 part of loop, we must compute frequency and probability of
2746 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2747 probabilities of edges incoming from nonduplicated region. */
2748 if (new_entry)
2749 {
2750 edge e;
2751 edge_iterator ei;
2752 den = profile_count::zero ();
2753
2754 FOR_EACH_EDGE (e, ei, new_entry->preds)
2755 if (!e->src->aux)
2756 den += e->count ();
2757 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2758 }
2759
2760 profile_count::adjust_for_ipa_scaling (&num, &den);
2761
2762 /* Must have a CFG here at this point. */
2763 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2764 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2765
2766
2767 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2768 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2769 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2770 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2771
2772 /* Duplicate any exception-handling regions. */
2773 if (cfun->eh)
2774 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2775 remap_decl_1, id);
2776
2777 /* Use aux pointers to map the original blocks to copy. */
2778 FOR_EACH_BB_FN (bb, cfun_to_copy)
2779 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2780 {
2781 basic_block new_bb = copy_bb (id, bb, num, den);
2782 bb->aux = new_bb;
2783 new_bb->aux = bb;
2784 new_bb->loop_father = entry_block_map->loop_father;
2785 }
2786
2787 last = last_basic_block_for_fn (cfun);
2788
2789 /* Now that we've duplicated the blocks, duplicate their edges. */
2790 basic_block abnormal_goto_dest = NULL;
2791 if (id->call_stmt
2792 && stmt_can_make_abnormal_goto (id->call_stmt))
2793 {
2794 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2795
2796 bb = gimple_bb (id->call_stmt);
2797 gsi_next (&gsi);
2798 if (gsi_end_p (gsi))
2799 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2800 }
2801 FOR_ALL_BB_FN (bb, cfun_to_copy)
2802 if (!id->blocks_to_copy
2803 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2804 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2805 abnormal_goto_dest, id);
2806
2807 if (new_entry)
2808 {
2809 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2810 EDGE_FALLTHRU);
2811 e->probability = profile_probability::always ();
2812 }
2813
2814 /* Duplicate the loop tree, if available and wanted. */
2815 if (loops_for_fn (src_cfun) != NULL
2816 && current_loops != NULL)
2817 {
2818 copy_loops (id, entry_block_map->loop_father,
2819 get_loop (src_cfun, 0));
2820 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2821 loops_state_set (LOOPS_NEED_FIXUP);
2822 }
2823
2824 /* If the loop tree in the source function needed fixup, mark the
2825 destination loop tree for fixup, too. */
2826 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2827 loops_state_set (LOOPS_NEED_FIXUP);
2828
2829 if (gimple_in_ssa_p (cfun))
2830 FOR_ALL_BB_FN (bb, cfun_to_copy)
2831 if (!id->blocks_to_copy
2832 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2833 copy_phis_for_bb (bb, id);
2834
2835 FOR_ALL_BB_FN (bb, cfun_to_copy)
2836 if (bb->aux)
2837 {
2838 if (need_debug_cleanup
2839 && bb->index != ENTRY_BLOCK
2840 && bb->index != EXIT_BLOCK)
2841 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2842 /* Update call edge destinations. This cannot be done before loop
2843 info is updated, because we may split basic blocks. */
2844 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2845 && bb->index != ENTRY_BLOCK
2846 && bb->index != EXIT_BLOCK)
2847 redirect_all_calls (id, (basic_block)bb->aux);
2848 ((basic_block)bb->aux)->aux = NULL;
2849 bb->aux = NULL;
2850 }
2851
2852 /* Zero out AUX fields of newly created block during EH edge
2853 insertion. */
2854 for (; last < last_basic_block_for_fn (cfun); last++)
2855 {
2856 if (need_debug_cleanup)
2857 maybe_move_debug_stmts_to_successors (id,
2858 BASIC_BLOCK_FOR_FN (cfun, last));
2859 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2860 /* Update call edge destinations. This cannot be done before loop
2861 info is updated, because we may split basic blocks. */
2862 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2863 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2864 }
2865 entry_block_map->aux = NULL;
2866 exit_block_map->aux = NULL;
2867
2868 if (id->eh_map)
2869 {
2870 delete id->eh_map;
2871 id->eh_map = NULL;
2872 }
2873 if (id->dependence_map)
2874 {
2875 delete id->dependence_map;
2876 id->dependence_map = NULL;
2877 }
2878
2879 return new_fndecl;
2880 }
2881
2882 /* Copy the debug STMT using ID. We deal with these statements in a
2883 special way: if any variable in their VALUE expression wasn't
2884 remapped yet, we won't remap it, because that would get decl uids
2885 out of sync, causing codegen differences between -g and -g0. If
2886 this arises, we drop the VALUE expression altogether. */
2887
2888 static void
2889 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2890 {
2891 tree t, *n;
2892 struct walk_stmt_info wi;
2893
2894 if (tree block = gimple_block (stmt))
2895 {
2896 n = id->decl_map->get (block);
2897 gimple_set_block (stmt, n ? *n : id->block);
2898 }
2899
2900 if (gimple_debug_nonbind_marker_p (stmt))
2901 return;
2902
2903 /* Remap all the operands in COPY. */
2904 memset (&wi, 0, sizeof (wi));
2905 wi.info = id;
2906
2907 processing_debug_stmt = 1;
2908
2909 if (gimple_debug_source_bind_p (stmt))
2910 t = gimple_debug_source_bind_get_var (stmt);
2911 else if (gimple_debug_bind_p (stmt))
2912 t = gimple_debug_bind_get_var (stmt);
2913 else
2914 gcc_unreachable ();
2915
2916 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2917 && (n = id->debug_map->get (t)))
2918 {
2919 gcc_assert (VAR_P (*n));
2920 t = *n;
2921 }
2922 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2923 /* T is a non-localized variable. */;
2924 else
2925 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2926
2927 if (gimple_debug_bind_p (stmt))
2928 {
2929 gimple_debug_bind_set_var (stmt, t);
2930
2931 if (gimple_debug_bind_has_value_p (stmt))
2932 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2933 remap_gimple_op_r, &wi, NULL);
2934
2935 /* Punt if any decl couldn't be remapped. */
2936 if (processing_debug_stmt < 0)
2937 gimple_debug_bind_reset_value (stmt);
2938 }
2939 else if (gimple_debug_source_bind_p (stmt))
2940 {
2941 gimple_debug_source_bind_set_var (stmt, t);
2942 /* When inlining and source bind refers to one of the optimized
2943 away parameters, change the source bind into normal debug bind
2944 referring to the corresponding DEBUG_EXPR_DECL that should have
2945 been bound before the call stmt. */
2946 t = gimple_debug_source_bind_get_value (stmt);
2947 if (t != NULL_TREE
2948 && TREE_CODE (t) == PARM_DECL
2949 && id->call_stmt)
2950 {
2951 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2952 unsigned int i;
2953 if (debug_args != NULL)
2954 {
2955 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2956 if ((**debug_args)[i] == DECL_ORIGIN (t)
2957 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2958 {
2959 t = (**debug_args)[i + 1];
2960 stmt->subcode = GIMPLE_DEBUG_BIND;
2961 gimple_debug_bind_set_value (stmt, t);
2962 break;
2963 }
2964 }
2965 }
2966 if (gimple_debug_source_bind_p (stmt))
2967 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2968 remap_gimple_op_r, &wi, NULL);
2969 }
2970
2971 processing_debug_stmt = 0;
2972
2973 update_stmt (stmt);
2974 }
2975
2976 /* Process deferred debug stmts. In order to give values better odds
2977 of being successfully remapped, we delay the processing of debug
2978 stmts until all other stmts that might require remapping are
2979 processed. */
2980
2981 static void
2982 copy_debug_stmts (copy_body_data *id)
2983 {
2984 size_t i;
2985 gdebug *stmt;
2986
2987 if (!id->debug_stmts.exists ())
2988 return;
2989
2990 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2991 copy_debug_stmt (stmt, id);
2992
2993 id->debug_stmts.release ();
2994 }
2995
2996 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2997 another function. */
2998
2999 static tree
3000 copy_tree_body (copy_body_data *id)
3001 {
3002 tree fndecl = id->src_fn;
3003 tree body = DECL_SAVED_TREE (fndecl);
3004
3005 walk_tree (&body, copy_tree_body_r, id, NULL);
3006
3007 return body;
3008 }
3009
3010 /* Make a copy of the body of FN so that it can be inserted inline in
3011 another function. */
3012
3013 static tree
3014 copy_body (copy_body_data *id,
3015 basic_block entry_block_map, basic_block exit_block_map,
3016 basic_block new_entry)
3017 {
3018 tree fndecl = id->src_fn;
3019 tree body;
3020
3021 /* If this body has a CFG, walk CFG and copy. */
3022 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3023 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3024 new_entry);
3025 copy_debug_stmts (id);
3026
3027 return body;
3028 }
3029
3030 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3031 defined in function FN, or of a data member thereof. */
3032
3033 static bool
3034 self_inlining_addr_expr (tree value, tree fn)
3035 {
3036 tree var;
3037
3038 if (TREE_CODE (value) != ADDR_EXPR)
3039 return false;
3040
3041 var = get_base_address (TREE_OPERAND (value, 0));
3042
3043 return var && auto_var_in_fn_p (var, fn);
3044 }
3045
3046 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3047 lexical block and line number information from base_stmt, if given,
3048 or from the last stmt of the block otherwise. */
3049
3050 static gimple *
3051 insert_init_debug_bind (copy_body_data *id,
3052 basic_block bb, tree var, tree value,
3053 gimple *base_stmt)
3054 {
3055 gimple *note;
3056 gimple_stmt_iterator gsi;
3057 tree tracked_var;
3058
3059 if (!gimple_in_ssa_p (id->src_cfun))
3060 return NULL;
3061
3062 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3063 return NULL;
3064
3065 tracked_var = target_for_debug_bind (var);
3066 if (!tracked_var)
3067 return NULL;
3068
3069 if (bb)
3070 {
3071 gsi = gsi_last_bb (bb);
3072 if (!base_stmt && !gsi_end_p (gsi))
3073 base_stmt = gsi_stmt (gsi);
3074 }
3075
3076 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3077
3078 if (bb)
3079 {
3080 if (!gsi_end_p (gsi))
3081 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3082 else
3083 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3084 }
3085
3086 return note;
3087 }
3088
3089 static void
3090 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3091 {
3092 /* If VAR represents a zero-sized variable, it's possible that the
3093 assignment statement may result in no gimple statements. */
3094 if (init_stmt)
3095 {
3096 gimple_stmt_iterator si = gsi_last_bb (bb);
3097
3098 /* We can end up with init statements that store to a non-register
3099 from a rhs with a conversion. Handle that here by forcing the
3100 rhs into a temporary. gimple_regimplify_operands is not
3101 prepared to do this for us. */
3102 if (!is_gimple_debug (init_stmt)
3103 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3104 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3105 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3106 {
3107 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3108 gimple_expr_type (init_stmt),
3109 gimple_assign_rhs1 (init_stmt));
3110 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3111 GSI_NEW_STMT);
3112 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3113 gimple_assign_set_rhs1 (init_stmt, rhs);
3114 }
3115 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3116 gimple_regimplify_operands (init_stmt, &si);
3117
3118 if (!is_gimple_debug (init_stmt))
3119 {
3120 tree def = gimple_assign_lhs (init_stmt);
3121 insert_init_debug_bind (id, bb, def, def, init_stmt);
3122 }
3123 }
3124 }
3125
3126 /* Initialize parameter P with VALUE. If needed, produce init statement
3127 at the end of BB. When BB is NULL, we return init statement to be
3128 output later. */
3129 static gimple *
3130 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3131 basic_block bb, tree *vars)
3132 {
3133 gimple *init_stmt = NULL;
3134 tree var;
3135 tree rhs = value;
3136 tree def = (gimple_in_ssa_p (cfun)
3137 ? ssa_default_def (id->src_cfun, p) : NULL);
3138
3139 if (value
3140 && value != error_mark_node
3141 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3142 {
3143 /* If we can match up types by promotion/demotion do so. */
3144 if (fold_convertible_p (TREE_TYPE (p), value))
3145 rhs = fold_convert (TREE_TYPE (p), value);
3146 else
3147 {
3148 /* ??? For valid programs we should not end up here.
3149 Still if we end up with truly mismatched types here, fall back
3150 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3151 GIMPLE to the following passes. */
3152 if (!is_gimple_reg_type (TREE_TYPE (value))
3153 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3154 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3155 else
3156 rhs = build_zero_cst (TREE_TYPE (p));
3157 }
3158 }
3159
3160 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3161 here since the type of this decl must be visible to the calling
3162 function. */
3163 var = copy_decl_to_var (p, id);
3164
3165 /* Declare this new variable. */
3166 DECL_CHAIN (var) = *vars;
3167 *vars = var;
3168
3169 /* Make gimplifier happy about this variable. */
3170 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3171
3172 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3173 we would not need to create a new variable here at all, if it
3174 weren't for debug info. Still, we can just use the argument
3175 value. */
3176 if (TREE_READONLY (p)
3177 && !TREE_ADDRESSABLE (p)
3178 && value && !TREE_SIDE_EFFECTS (value)
3179 && !def)
3180 {
3181 /* We may produce non-gimple trees by adding NOPs or introduce
3182 invalid sharing when operand is not really constant.
3183 It is not big deal to prohibit constant propagation here as
3184 we will constant propagate in DOM1 pass anyway. */
3185 if (is_gimple_min_invariant (value)
3186 && useless_type_conversion_p (TREE_TYPE (p),
3187 TREE_TYPE (value))
3188 /* We have to be very careful about ADDR_EXPR. Make sure
3189 the base variable isn't a local variable of the inlined
3190 function, e.g., when doing recursive inlining, direct or
3191 mutually-recursive or whatever, which is why we don't
3192 just test whether fn == current_function_decl. */
3193 && ! self_inlining_addr_expr (value, fn))
3194 {
3195 insert_decl_map (id, p, value);
3196 insert_debug_decl_map (id, p, var);
3197 return insert_init_debug_bind (id, bb, var, value, NULL);
3198 }
3199 }
3200
3201 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3202 that way, when the PARM_DECL is encountered, it will be
3203 automatically replaced by the VAR_DECL. */
3204 insert_decl_map (id, p, var);
3205
3206 /* Even if P was TREE_READONLY, the new VAR should not be.
3207 In the original code, we would have constructed a
3208 temporary, and then the function body would have never
3209 changed the value of P. However, now, we will be
3210 constructing VAR directly. The constructor body may
3211 change its value multiple times as it is being
3212 constructed. Therefore, it must not be TREE_READONLY;
3213 the back-end assumes that TREE_READONLY variable is
3214 assigned to only once. */
3215 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3216 TREE_READONLY (var) = 0;
3217
3218 /* If there is no setup required and we are in SSA, take the easy route
3219 replacing all SSA names representing the function parameter by the
3220 SSA name passed to function.
3221
3222 We need to construct map for the variable anyway as it might be used
3223 in different SSA names when parameter is set in function.
3224
3225 Do replacement at -O0 for const arguments replaced by constant.
3226 This is important for builtin_constant_p and other construct requiring
3227 constant argument to be visible in inlined function body. */
3228 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3229 && (optimize
3230 || (TREE_READONLY (p)
3231 && is_gimple_min_invariant (rhs)))
3232 && (TREE_CODE (rhs) == SSA_NAME
3233 || is_gimple_min_invariant (rhs))
3234 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3235 {
3236 insert_decl_map (id, def, rhs);
3237 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3238 }
3239
3240 /* If the value of argument is never used, don't care about initializing
3241 it. */
3242 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3243 {
3244 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3245 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3246 }
3247
3248 /* Initialize this VAR_DECL from the equivalent argument. Convert
3249 the argument to the proper type in case it was promoted. */
3250 if (value)
3251 {
3252 if (rhs == error_mark_node)
3253 {
3254 insert_decl_map (id, p, var);
3255 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3256 }
3257
3258 STRIP_USELESS_TYPE_CONVERSION (rhs);
3259
3260 /* If we are in SSA form properly remap the default definition
3261 or assign to a dummy SSA name if the parameter is unused and
3262 we are not optimizing. */
3263 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3264 {
3265 if (def)
3266 {
3267 def = remap_ssa_name (def, id);
3268 init_stmt = gimple_build_assign (def, rhs);
3269 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3270 set_ssa_default_def (cfun, var, NULL);
3271 }
3272 else if (!optimize)
3273 {
3274 def = make_ssa_name (var);
3275 init_stmt = gimple_build_assign (def, rhs);
3276 }
3277 }
3278 else
3279 init_stmt = gimple_build_assign (var, rhs);
3280
3281 if (bb && init_stmt)
3282 insert_init_stmt (id, bb, init_stmt);
3283 }
3284 return init_stmt;
3285 }
3286
3287 /* Generate code to initialize the parameters of the function at the
3288 top of the stack in ID from the GIMPLE_CALL STMT. */
3289
3290 static void
3291 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3292 tree fn, basic_block bb)
3293 {
3294 tree parms;
3295 size_t i;
3296 tree p;
3297 tree vars = NULL_TREE;
3298 tree static_chain = gimple_call_chain (stmt);
3299
3300 /* Figure out what the parameters are. */
3301 parms = DECL_ARGUMENTS (fn);
3302
3303 /* Loop through the parameter declarations, replacing each with an
3304 equivalent VAR_DECL, appropriately initialized. */
3305 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3306 {
3307 tree val;
3308 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3309 setup_one_parameter (id, p, val, fn, bb, &vars);
3310 }
3311 /* After remapping parameters remap their types. This has to be done
3312 in a second loop over all parameters to appropriately remap
3313 variable sized arrays when the size is specified in a
3314 parameter following the array. */
3315 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3316 {
3317 tree *varp = id->decl_map->get (p);
3318 if (varp && VAR_P (*varp))
3319 {
3320 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3321 ? ssa_default_def (id->src_cfun, p) : NULL);
3322 tree var = *varp;
3323 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3324 /* Also remap the default definition if it was remapped
3325 to the default definition of the parameter replacement
3326 by the parameter setup. */
3327 if (def)
3328 {
3329 tree *defp = id->decl_map->get (def);
3330 if (defp
3331 && TREE_CODE (*defp) == SSA_NAME
3332 && SSA_NAME_VAR (*defp) == var)
3333 TREE_TYPE (*defp) = TREE_TYPE (var);
3334 }
3335 }
3336 }
3337
3338 /* Initialize the static chain. */
3339 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3340 gcc_assert (fn != current_function_decl);
3341 if (p)
3342 {
3343 /* No static chain? Seems like a bug in tree-nested.c. */
3344 gcc_assert (static_chain);
3345
3346 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3347 }
3348
3349 declare_inline_vars (id->block, vars);
3350 }
3351
3352
3353 /* Declare a return variable to replace the RESULT_DECL for the
3354 function we are calling. An appropriate DECL_STMT is returned.
3355 The USE_STMT is filled to contain a use of the declaration to
3356 indicate the return value of the function.
3357
3358 RETURN_SLOT, if non-null is place where to store the result. It
3359 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3360 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3361
3362 The return value is a (possibly null) value that holds the result
3363 as seen by the caller. */
3364
3365 static tree
3366 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3367 basic_block entry_bb)
3368 {
3369 tree callee = id->src_fn;
3370 tree result = DECL_RESULT (callee);
3371 tree callee_type = TREE_TYPE (result);
3372 tree caller_type;
3373 tree var, use;
3374
3375 /* Handle type-mismatches in the function declaration return type
3376 vs. the call expression. */
3377 if (modify_dest)
3378 caller_type = TREE_TYPE (modify_dest);
3379 else
3380 caller_type = TREE_TYPE (TREE_TYPE (callee));
3381
3382 /* We don't need to do anything for functions that don't return anything. */
3383 if (VOID_TYPE_P (callee_type))
3384 return NULL_TREE;
3385
3386 /* If there was a return slot, then the return value is the
3387 dereferenced address of that object. */
3388 if (return_slot)
3389 {
3390 /* The front end shouldn't have used both return_slot and
3391 a modify expression. */
3392 gcc_assert (!modify_dest);
3393 if (DECL_BY_REFERENCE (result))
3394 {
3395 tree return_slot_addr = build_fold_addr_expr (return_slot);
3396 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3397
3398 /* We are going to construct *&return_slot and we can't do that
3399 for variables believed to be not addressable.
3400
3401 FIXME: This check possibly can match, because values returned
3402 via return slot optimization are not believed to have address
3403 taken by alias analysis. */
3404 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3405 var = return_slot_addr;
3406 }
3407 else
3408 {
3409 var = return_slot;
3410 gcc_assert (TREE_CODE (var) != SSA_NAME);
3411 if (TREE_ADDRESSABLE (result))
3412 mark_addressable (var);
3413 }
3414 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3415 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3416 && !DECL_GIMPLE_REG_P (result)
3417 && DECL_P (var))
3418 DECL_GIMPLE_REG_P (var) = 0;
3419 use = NULL;
3420 goto done;
3421 }
3422
3423 /* All types requiring non-trivial constructors should have been handled. */
3424 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3425
3426 /* Attempt to avoid creating a new temporary variable. */
3427 if (modify_dest
3428 && TREE_CODE (modify_dest) != SSA_NAME)
3429 {
3430 bool use_it = false;
3431
3432 /* We can't use MODIFY_DEST if there's type promotion involved. */
3433 if (!useless_type_conversion_p (callee_type, caller_type))
3434 use_it = false;
3435
3436 /* ??? If we're assigning to a variable sized type, then we must
3437 reuse the destination variable, because we've no good way to
3438 create variable sized temporaries at this point. */
3439 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3440 use_it = true;
3441
3442 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3443 reuse it as the result of the call directly. Don't do this if
3444 it would promote MODIFY_DEST to addressable. */
3445 else if (TREE_ADDRESSABLE (result))
3446 use_it = false;
3447 else
3448 {
3449 tree base_m = get_base_address (modify_dest);
3450
3451 /* If the base isn't a decl, then it's a pointer, and we don't
3452 know where that's going to go. */
3453 if (!DECL_P (base_m))
3454 use_it = false;
3455 else if (is_global_var (base_m))
3456 use_it = false;
3457 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3458 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3459 && !DECL_GIMPLE_REG_P (result)
3460 && DECL_GIMPLE_REG_P (base_m))
3461 use_it = false;
3462 else if (!TREE_ADDRESSABLE (base_m))
3463 use_it = true;
3464 }
3465
3466 if (use_it)
3467 {
3468 var = modify_dest;
3469 use = NULL;
3470 goto done;
3471 }
3472 }
3473
3474 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3475
3476 var = copy_result_decl_to_var (result, id);
3477 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3478
3479 /* Do not have the rest of GCC warn about this variable as it should
3480 not be visible to the user. */
3481 TREE_NO_WARNING (var) = 1;
3482
3483 declare_inline_vars (id->block, var);
3484
3485 /* Build the use expr. If the return type of the function was
3486 promoted, convert it back to the expected type. */
3487 use = var;
3488 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3489 {
3490 /* If we can match up types by promotion/demotion do so. */
3491 if (fold_convertible_p (caller_type, var))
3492 use = fold_convert (caller_type, var);
3493 else
3494 {
3495 /* ??? For valid programs we should not end up here.
3496 Still if we end up with truly mismatched types here, fall back
3497 to using a MEM_REF to not leak invalid GIMPLE to the following
3498 passes. */
3499 /* Prevent var from being written into SSA form. */
3500 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3501 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3502 DECL_GIMPLE_REG_P (var) = false;
3503 else if (is_gimple_reg_type (TREE_TYPE (var)))
3504 TREE_ADDRESSABLE (var) = true;
3505 use = fold_build2 (MEM_REF, caller_type,
3506 build_fold_addr_expr (var),
3507 build_int_cst (ptr_type_node, 0));
3508 }
3509 }
3510
3511 STRIP_USELESS_TYPE_CONVERSION (use);
3512
3513 if (DECL_BY_REFERENCE (result))
3514 {
3515 TREE_ADDRESSABLE (var) = 1;
3516 var = build_fold_addr_expr (var);
3517 }
3518
3519 done:
3520 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3521 way, when the RESULT_DECL is encountered, it will be
3522 automatically replaced by the VAR_DECL.
3523
3524 When returning by reference, ensure that RESULT_DECL remaps to
3525 gimple_val. */
3526 if (DECL_BY_REFERENCE (result)
3527 && !is_gimple_val (var))
3528 {
3529 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3530 insert_decl_map (id, result, temp);
3531 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3532 it's default_def SSA_NAME. */
3533 if (gimple_in_ssa_p (id->src_cfun)
3534 && is_gimple_reg (result))
3535 {
3536 temp = make_ssa_name (temp);
3537 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3538 }
3539 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3540 }
3541 else
3542 insert_decl_map (id, result, var);
3543
3544 /* Remember this so we can ignore it in remap_decls. */
3545 id->retvar = var;
3546 return use;
3547 }
3548
3549 /* Determine if the function can be copied. If so return NULL. If
3550 not return a string describng the reason for failure. */
3551
3552 const char *
3553 copy_forbidden (struct function *fun)
3554 {
3555 const char *reason = fun->cannot_be_copied_reason;
3556
3557 /* Only examine the function once. */
3558 if (fun->cannot_be_copied_set)
3559 return reason;
3560
3561 /* We cannot copy a function that receives a non-local goto
3562 because we cannot remap the destination label used in the
3563 function that is performing the non-local goto. */
3564 /* ??? Actually, this should be possible, if we work at it.
3565 No doubt there's just a handful of places that simply
3566 assume it doesn't happen and don't substitute properly. */
3567 if (fun->has_nonlocal_label)
3568 {
3569 reason = G_("function %q+F can never be copied "
3570 "because it receives a non-local goto");
3571 goto fail;
3572 }
3573
3574 if (fun->has_forced_label_in_static)
3575 {
3576 reason = G_("function %q+F can never be copied because it saves "
3577 "address of local label in a static variable");
3578 goto fail;
3579 }
3580
3581 fail:
3582 fun->cannot_be_copied_reason = reason;
3583 fun->cannot_be_copied_set = true;
3584 return reason;
3585 }
3586
3587
3588 static const char *inline_forbidden_reason;
3589
3590 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3591 iff a function cannot be inlined. Also sets the reason why. */
3592
3593 static tree
3594 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3595 struct walk_stmt_info *wip)
3596 {
3597 tree fn = (tree) wip->info;
3598 tree t;
3599 gimple *stmt = gsi_stmt (*gsi);
3600
3601 switch (gimple_code (stmt))
3602 {
3603 case GIMPLE_CALL:
3604 /* Refuse to inline alloca call unless user explicitly forced so as
3605 this may change program's memory overhead drastically when the
3606 function using alloca is called in loop. In GCC present in
3607 SPEC2000 inlining into schedule_block cause it to require 2GB of
3608 RAM instead of 256MB. Don't do so for alloca calls emitted for
3609 VLA objects as those can't cause unbounded growth (they're always
3610 wrapped inside stack_save/stack_restore regions. */
3611 if (gimple_maybe_alloca_call_p (stmt)
3612 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3613 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3614 {
3615 inline_forbidden_reason
3616 = G_("function %q+F can never be inlined because it uses "
3617 "alloca (override using the always_inline attribute)");
3618 *handled_ops_p = true;
3619 return fn;
3620 }
3621
3622 t = gimple_call_fndecl (stmt);
3623 if (t == NULL_TREE)
3624 break;
3625
3626 /* We cannot inline functions that call setjmp. */
3627 if (setjmp_call_p (t))
3628 {
3629 inline_forbidden_reason
3630 = G_("function %q+F can never be inlined because it uses setjmp");
3631 *handled_ops_p = true;
3632 return t;
3633 }
3634
3635 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3636 switch (DECL_FUNCTION_CODE (t))
3637 {
3638 /* We cannot inline functions that take a variable number of
3639 arguments. */
3640 case BUILT_IN_VA_START:
3641 case BUILT_IN_NEXT_ARG:
3642 case BUILT_IN_VA_END:
3643 inline_forbidden_reason
3644 = G_("function %q+F can never be inlined because it "
3645 "uses variable argument lists");
3646 *handled_ops_p = true;
3647 return t;
3648
3649 case BUILT_IN_LONGJMP:
3650 /* We can't inline functions that call __builtin_longjmp at
3651 all. The non-local goto machinery really requires the
3652 destination be in a different function. If we allow the
3653 function calling __builtin_longjmp to be inlined into the
3654 function calling __builtin_setjmp, Things will Go Awry. */
3655 inline_forbidden_reason
3656 = G_("function %q+F can never be inlined because "
3657 "it uses setjmp-longjmp exception handling");
3658 *handled_ops_p = true;
3659 return t;
3660
3661 case BUILT_IN_NONLOCAL_GOTO:
3662 /* Similarly. */
3663 inline_forbidden_reason
3664 = G_("function %q+F can never be inlined because "
3665 "it uses non-local goto");
3666 *handled_ops_p = true;
3667 return t;
3668
3669 case BUILT_IN_RETURN:
3670 case BUILT_IN_APPLY_ARGS:
3671 /* If a __builtin_apply_args caller would be inlined,
3672 it would be saving arguments of the function it has
3673 been inlined into. Similarly __builtin_return would
3674 return from the function the inline has been inlined into. */
3675 inline_forbidden_reason
3676 = G_("function %q+F can never be inlined because "
3677 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3678 *handled_ops_p = true;
3679 return t;
3680
3681 default:
3682 break;
3683 }
3684 break;
3685
3686 case GIMPLE_GOTO:
3687 t = gimple_goto_dest (stmt);
3688
3689 /* We will not inline a function which uses computed goto. The
3690 addresses of its local labels, which may be tucked into
3691 global storage, are of course not constant across
3692 instantiations, which causes unexpected behavior. */
3693 if (TREE_CODE (t) != LABEL_DECL)
3694 {
3695 inline_forbidden_reason
3696 = G_("function %q+F can never be inlined "
3697 "because it contains a computed goto");
3698 *handled_ops_p = true;
3699 return t;
3700 }
3701 break;
3702
3703 default:
3704 break;
3705 }
3706
3707 *handled_ops_p = false;
3708 return NULL_TREE;
3709 }
3710
3711 /* Return true if FNDECL is a function that cannot be inlined into
3712 another one. */
3713
3714 static bool
3715 inline_forbidden_p (tree fndecl)
3716 {
3717 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3718 struct walk_stmt_info wi;
3719 basic_block bb;
3720 bool forbidden_p = false;
3721
3722 /* First check for shared reasons not to copy the code. */
3723 inline_forbidden_reason = copy_forbidden (fun);
3724 if (inline_forbidden_reason != NULL)
3725 return true;
3726
3727 /* Next, walk the statements of the function looking for
3728 constraucts we can't handle, or are non-optimal for inlining. */
3729 hash_set<tree> visited_nodes;
3730 memset (&wi, 0, sizeof (wi));
3731 wi.info = (void *) fndecl;
3732 wi.pset = &visited_nodes;
3733
3734 FOR_EACH_BB_FN (bb, fun)
3735 {
3736 gimple *ret;
3737 gimple_seq seq = bb_seq (bb);
3738 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3739 forbidden_p = (ret != NULL);
3740 if (forbidden_p)
3741 break;
3742 }
3743
3744 return forbidden_p;
3745 }
3746 \f
3747 /* Return false if the function FNDECL cannot be inlined on account of its
3748 attributes, true otherwise. */
3749 static bool
3750 function_attribute_inlinable_p (const_tree fndecl)
3751 {
3752 if (targetm.attribute_table)
3753 {
3754 const_tree a;
3755
3756 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3757 {
3758 const_tree name = TREE_PURPOSE (a);
3759 int i;
3760
3761 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3762 if (is_attribute_p (targetm.attribute_table[i].name, name))
3763 return targetm.function_attribute_inlinable_p (fndecl);
3764 }
3765 }
3766
3767 return true;
3768 }
3769
3770 /* Returns nonzero if FN is a function that does not have any
3771 fundamental inline blocking properties. */
3772
3773 bool
3774 tree_inlinable_function_p (tree fn)
3775 {
3776 bool inlinable = true;
3777 bool do_warning;
3778 tree always_inline;
3779
3780 /* If we've already decided this function shouldn't be inlined,
3781 there's no need to check again. */
3782 if (DECL_UNINLINABLE (fn))
3783 return false;
3784
3785 /* We only warn for functions declared `inline' by the user. */
3786 do_warning = (warn_inline
3787 && DECL_DECLARED_INLINE_P (fn)
3788 && !DECL_NO_INLINE_WARNING_P (fn)
3789 && !DECL_IN_SYSTEM_HEADER (fn));
3790
3791 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3792
3793 if (flag_no_inline
3794 && always_inline == NULL)
3795 {
3796 if (do_warning)
3797 warning (OPT_Winline, "function %q+F can never be inlined because it "
3798 "is suppressed using %<-fno-inline%>", fn);
3799 inlinable = false;
3800 }
3801
3802 else if (!function_attribute_inlinable_p (fn))
3803 {
3804 if (do_warning)
3805 warning (OPT_Winline, "function %q+F can never be inlined because it "
3806 "uses attributes conflicting with inlining", fn);
3807 inlinable = false;
3808 }
3809
3810 else if (inline_forbidden_p (fn))
3811 {
3812 /* See if we should warn about uninlinable functions. Previously,
3813 some of these warnings would be issued while trying to expand
3814 the function inline, but that would cause multiple warnings
3815 about functions that would for example call alloca. But since
3816 this a property of the function, just one warning is enough.
3817 As a bonus we can now give more details about the reason why a
3818 function is not inlinable. */
3819 if (always_inline)
3820 error (inline_forbidden_reason, fn);
3821 else if (do_warning)
3822 warning (OPT_Winline, inline_forbidden_reason, fn);
3823
3824 inlinable = false;
3825 }
3826
3827 /* Squirrel away the result so that we don't have to check again. */
3828 DECL_UNINLINABLE (fn) = !inlinable;
3829
3830 return inlinable;
3831 }
3832
3833 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3834 word size and take possible memcpy call into account and return
3835 cost based on whether optimizing for size or speed according to SPEED_P. */
3836
3837 int
3838 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3839 {
3840 HOST_WIDE_INT size;
3841
3842 gcc_assert (!VOID_TYPE_P (type));
3843
3844 if (TREE_CODE (type) == VECTOR_TYPE)
3845 {
3846 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3847 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3848 int orig_mode_size
3849 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3850 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3851 return ((orig_mode_size + simd_mode_size - 1)
3852 / simd_mode_size);
3853 }
3854
3855 size = int_size_in_bytes (type);
3856
3857 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3858 /* Cost of a memcpy call, 3 arguments and the call. */
3859 return 4;
3860 else
3861 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3862 }
3863
3864 /* Returns cost of operation CODE, according to WEIGHTS */
3865
3866 static int
3867 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3868 tree op1 ATTRIBUTE_UNUSED, tree op2)
3869 {
3870 switch (code)
3871 {
3872 /* These are "free" conversions, or their presumed cost
3873 is folded into other operations. */
3874 case RANGE_EXPR:
3875 CASE_CONVERT:
3876 case COMPLEX_EXPR:
3877 case PAREN_EXPR:
3878 case VIEW_CONVERT_EXPR:
3879 return 0;
3880
3881 /* Assign cost of 1 to usual operations.
3882 ??? We may consider mapping RTL costs to this. */
3883 case COND_EXPR:
3884 case VEC_COND_EXPR:
3885 case VEC_PERM_EXPR:
3886
3887 case PLUS_EXPR:
3888 case POINTER_PLUS_EXPR:
3889 case POINTER_DIFF_EXPR:
3890 case MINUS_EXPR:
3891 case MULT_EXPR:
3892 case MULT_HIGHPART_EXPR:
3893
3894 case ADDR_SPACE_CONVERT_EXPR:
3895 case FIXED_CONVERT_EXPR:
3896 case FIX_TRUNC_EXPR:
3897
3898 case NEGATE_EXPR:
3899 case FLOAT_EXPR:
3900 case MIN_EXPR:
3901 case MAX_EXPR:
3902 case ABS_EXPR:
3903 case ABSU_EXPR:
3904
3905 case LSHIFT_EXPR:
3906 case RSHIFT_EXPR:
3907 case LROTATE_EXPR:
3908 case RROTATE_EXPR:
3909
3910 case BIT_IOR_EXPR:
3911 case BIT_XOR_EXPR:
3912 case BIT_AND_EXPR:
3913 case BIT_NOT_EXPR:
3914
3915 case TRUTH_ANDIF_EXPR:
3916 case TRUTH_ORIF_EXPR:
3917 case TRUTH_AND_EXPR:
3918 case TRUTH_OR_EXPR:
3919 case TRUTH_XOR_EXPR:
3920 case TRUTH_NOT_EXPR:
3921
3922 case LT_EXPR:
3923 case LE_EXPR:
3924 case GT_EXPR:
3925 case GE_EXPR:
3926 case EQ_EXPR:
3927 case NE_EXPR:
3928 case ORDERED_EXPR:
3929 case UNORDERED_EXPR:
3930
3931 case UNLT_EXPR:
3932 case UNLE_EXPR:
3933 case UNGT_EXPR:
3934 case UNGE_EXPR:
3935 case UNEQ_EXPR:
3936 case LTGT_EXPR:
3937
3938 case CONJ_EXPR:
3939
3940 case PREDECREMENT_EXPR:
3941 case PREINCREMENT_EXPR:
3942 case POSTDECREMENT_EXPR:
3943 case POSTINCREMENT_EXPR:
3944
3945 case REALIGN_LOAD_EXPR:
3946
3947 case WIDEN_SUM_EXPR:
3948 case WIDEN_MULT_EXPR:
3949 case DOT_PROD_EXPR:
3950 case SAD_EXPR:
3951 case WIDEN_MULT_PLUS_EXPR:
3952 case WIDEN_MULT_MINUS_EXPR:
3953 case WIDEN_LSHIFT_EXPR:
3954
3955 case VEC_WIDEN_MULT_HI_EXPR:
3956 case VEC_WIDEN_MULT_LO_EXPR:
3957 case VEC_WIDEN_MULT_EVEN_EXPR:
3958 case VEC_WIDEN_MULT_ODD_EXPR:
3959 case VEC_UNPACK_HI_EXPR:
3960 case VEC_UNPACK_LO_EXPR:
3961 case VEC_UNPACK_FLOAT_HI_EXPR:
3962 case VEC_UNPACK_FLOAT_LO_EXPR:
3963 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3964 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3965 case VEC_PACK_TRUNC_EXPR:
3966 case VEC_PACK_SAT_EXPR:
3967 case VEC_PACK_FIX_TRUNC_EXPR:
3968 case VEC_PACK_FLOAT_EXPR:
3969 case VEC_WIDEN_LSHIFT_HI_EXPR:
3970 case VEC_WIDEN_LSHIFT_LO_EXPR:
3971 case VEC_DUPLICATE_EXPR:
3972 case VEC_SERIES_EXPR:
3973
3974 return 1;
3975
3976 /* Few special cases of expensive operations. This is useful
3977 to avoid inlining on functions having too many of these. */
3978 case TRUNC_DIV_EXPR:
3979 case CEIL_DIV_EXPR:
3980 case FLOOR_DIV_EXPR:
3981 case ROUND_DIV_EXPR:
3982 case EXACT_DIV_EXPR:
3983 case TRUNC_MOD_EXPR:
3984 case CEIL_MOD_EXPR:
3985 case FLOOR_MOD_EXPR:
3986 case ROUND_MOD_EXPR:
3987 case RDIV_EXPR:
3988 if (TREE_CODE (op2) != INTEGER_CST)
3989 return weights->div_mod_cost;
3990 return 1;
3991
3992 /* Bit-field insertion needs several shift and mask operations. */
3993 case BIT_INSERT_EXPR:
3994 return 3;
3995
3996 default:
3997 /* We expect a copy assignment with no operator. */
3998 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3999 return 0;
4000 }
4001 }
4002
4003
4004 /* Estimate number of instructions that will be created by expanding
4005 the statements in the statement sequence STMTS.
4006 WEIGHTS contains weights attributed to various constructs. */
4007
4008 int
4009 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4010 {
4011 int cost;
4012 gimple_stmt_iterator gsi;
4013
4014 cost = 0;
4015 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4016 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4017
4018 return cost;
4019 }
4020
4021
4022 /* Estimate number of instructions that will be created by expanding STMT.
4023 WEIGHTS contains weights attributed to various constructs. */
4024
4025 int
4026 estimate_num_insns (gimple *stmt, eni_weights *weights)
4027 {
4028 unsigned cost, i;
4029 enum gimple_code code = gimple_code (stmt);
4030 tree lhs;
4031 tree rhs;
4032
4033 switch (code)
4034 {
4035 case GIMPLE_ASSIGN:
4036 /* Try to estimate the cost of assignments. We have three cases to
4037 deal with:
4038 1) Simple assignments to registers;
4039 2) Stores to things that must live in memory. This includes
4040 "normal" stores to scalars, but also assignments of large
4041 structures, or constructors of big arrays;
4042
4043 Let us look at the first two cases, assuming we have "a = b + C":
4044 <GIMPLE_ASSIGN <var_decl "a">
4045 <plus_expr <var_decl "b"> <constant C>>
4046 If "a" is a GIMPLE register, the assignment to it is free on almost
4047 any target, because "a" usually ends up in a real register. Hence
4048 the only cost of this expression comes from the PLUS_EXPR, and we
4049 can ignore the GIMPLE_ASSIGN.
4050 If "a" is not a GIMPLE register, the assignment to "a" will most
4051 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4052 of moving something into "a", which we compute using the function
4053 estimate_move_cost. */
4054 if (gimple_clobber_p (stmt))
4055 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4056
4057 lhs = gimple_assign_lhs (stmt);
4058 rhs = gimple_assign_rhs1 (stmt);
4059
4060 cost = 0;
4061
4062 /* Account for the cost of moving to / from memory. */
4063 if (gimple_store_p (stmt))
4064 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4065 if (gimple_assign_load_p (stmt))
4066 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4067
4068 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4069 gimple_assign_rhs1 (stmt),
4070 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4071 == GIMPLE_BINARY_RHS
4072 ? gimple_assign_rhs2 (stmt) : NULL);
4073 break;
4074
4075 case GIMPLE_COND:
4076 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4077 gimple_op (stmt, 0),
4078 gimple_op (stmt, 1));
4079 break;
4080
4081 case GIMPLE_SWITCH:
4082 {
4083 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4084 /* Take into account cost of the switch + guess 2 conditional jumps for
4085 each case label.
4086
4087 TODO: once the switch expansion logic is sufficiently separated, we can
4088 do better job on estimating cost of the switch. */
4089 if (weights->time_based)
4090 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4091 else
4092 cost = gimple_switch_num_labels (switch_stmt) * 2;
4093 }
4094 break;
4095
4096 case GIMPLE_CALL:
4097 {
4098 tree decl;
4099
4100 if (gimple_call_internal_p (stmt))
4101 return 0;
4102 else if ((decl = gimple_call_fndecl (stmt))
4103 && fndecl_built_in_p (decl))
4104 {
4105 /* Do not special case builtins where we see the body.
4106 This just confuse inliner. */
4107 struct cgraph_node *node;
4108 if (!(node = cgraph_node::get (decl))
4109 || node->definition)
4110 ;
4111 /* For buitins that are likely expanded to nothing or
4112 inlined do not account operand costs. */
4113 else if (is_simple_builtin (decl))
4114 return 0;
4115 else if (is_inexpensive_builtin (decl))
4116 return weights->target_builtin_call_cost;
4117 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4118 {
4119 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4120 specialize the cheap expansion we do here.
4121 ??? This asks for a more general solution. */
4122 switch (DECL_FUNCTION_CODE (decl))
4123 {
4124 case BUILT_IN_POW:
4125 case BUILT_IN_POWF:
4126 case BUILT_IN_POWL:
4127 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4128 && (real_equal
4129 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4130 &dconst2)))
4131 return estimate_operator_cost
4132 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4133 gimple_call_arg (stmt, 0));
4134 break;
4135
4136 default:
4137 break;
4138 }
4139 }
4140 }
4141
4142 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4143 if (gimple_call_lhs (stmt))
4144 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4145 weights->time_based);
4146 for (i = 0; i < gimple_call_num_args (stmt); i++)
4147 {
4148 tree arg = gimple_call_arg (stmt, i);
4149 cost += estimate_move_cost (TREE_TYPE (arg),
4150 weights->time_based);
4151 }
4152 break;
4153 }
4154
4155 case GIMPLE_RETURN:
4156 return weights->return_cost;
4157
4158 case GIMPLE_GOTO:
4159 case GIMPLE_LABEL:
4160 case GIMPLE_NOP:
4161 case GIMPLE_PHI:
4162 case GIMPLE_PREDICT:
4163 case GIMPLE_DEBUG:
4164 return 0;
4165
4166 case GIMPLE_ASM:
4167 {
4168 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4169 /* 1000 means infinity. This avoids overflows later
4170 with very long asm statements. */
4171 if (count > 1000)
4172 count = 1000;
4173 /* If this asm is asm inline, count anything as minimum size. */
4174 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4175 count = MIN (1, count);
4176 return MAX (1, count);
4177 }
4178
4179 case GIMPLE_RESX:
4180 /* This is either going to be an external function call with one
4181 argument, or two register copy statements plus a goto. */
4182 return 2;
4183
4184 case GIMPLE_EH_DISPATCH:
4185 /* ??? This is going to turn into a switch statement. Ideally
4186 we'd have a look at the eh region and estimate the number of
4187 edges involved. */
4188 return 10;
4189
4190 case GIMPLE_BIND:
4191 return estimate_num_insns_seq (
4192 gimple_bind_body (as_a <gbind *> (stmt)),
4193 weights);
4194
4195 case GIMPLE_EH_FILTER:
4196 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4197
4198 case GIMPLE_CATCH:
4199 return estimate_num_insns_seq (gimple_catch_handler (
4200 as_a <gcatch *> (stmt)),
4201 weights);
4202
4203 case GIMPLE_TRY:
4204 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4205 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4206
4207 /* OMP directives are generally very expensive. */
4208
4209 case GIMPLE_OMP_RETURN:
4210 case GIMPLE_OMP_SECTIONS_SWITCH:
4211 case GIMPLE_OMP_ATOMIC_STORE:
4212 case GIMPLE_OMP_CONTINUE:
4213 /* ...except these, which are cheap. */
4214 return 0;
4215
4216 case GIMPLE_OMP_ATOMIC_LOAD:
4217 return weights->omp_cost;
4218
4219 case GIMPLE_OMP_FOR:
4220 return (weights->omp_cost
4221 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4222 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4223
4224 case GIMPLE_OMP_PARALLEL:
4225 case GIMPLE_OMP_TASK:
4226 case GIMPLE_OMP_CRITICAL:
4227 case GIMPLE_OMP_MASTER:
4228 case GIMPLE_OMP_TASKGROUP:
4229 case GIMPLE_OMP_ORDERED:
4230 case GIMPLE_OMP_SECTION:
4231 case GIMPLE_OMP_SECTIONS:
4232 case GIMPLE_OMP_SINGLE:
4233 case GIMPLE_OMP_TARGET:
4234 case GIMPLE_OMP_TEAMS:
4235 return (weights->omp_cost
4236 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4237
4238 case GIMPLE_TRANSACTION:
4239 return (weights->tm_cost
4240 + estimate_num_insns_seq (gimple_transaction_body (
4241 as_a <gtransaction *> (stmt)),
4242 weights));
4243
4244 default:
4245 gcc_unreachable ();
4246 }
4247
4248 return cost;
4249 }
4250
4251 /* Estimate number of instructions that will be created by expanding
4252 function FNDECL. WEIGHTS contains weights attributed to various
4253 constructs. */
4254
4255 int
4256 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4257 {
4258 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4259 gimple_stmt_iterator bsi;
4260 basic_block bb;
4261 int n = 0;
4262
4263 gcc_assert (my_function && my_function->cfg);
4264 FOR_EACH_BB_FN (bb, my_function)
4265 {
4266 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4267 n += estimate_num_insns (gsi_stmt (bsi), weights);
4268 }
4269
4270 return n;
4271 }
4272
4273
4274 /* Initializes weights used by estimate_num_insns. */
4275
4276 void
4277 init_inline_once (void)
4278 {
4279 eni_size_weights.call_cost = 1;
4280 eni_size_weights.indirect_call_cost = 3;
4281 eni_size_weights.target_builtin_call_cost = 1;
4282 eni_size_weights.div_mod_cost = 1;
4283 eni_size_weights.omp_cost = 40;
4284 eni_size_weights.tm_cost = 10;
4285 eni_size_weights.time_based = false;
4286 eni_size_weights.return_cost = 1;
4287
4288 /* Estimating time for call is difficult, since we have no idea what the
4289 called function does. In the current uses of eni_time_weights,
4290 underestimating the cost does less harm than overestimating it, so
4291 we choose a rather small value here. */
4292 eni_time_weights.call_cost = 10;
4293 eni_time_weights.indirect_call_cost = 15;
4294 eni_time_weights.target_builtin_call_cost = 1;
4295 eni_time_weights.div_mod_cost = 10;
4296 eni_time_weights.omp_cost = 40;
4297 eni_time_weights.tm_cost = 40;
4298 eni_time_weights.time_based = true;
4299 eni_time_weights.return_cost = 2;
4300 }
4301
4302
4303 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4304
4305 static void
4306 prepend_lexical_block (tree current_block, tree new_block)
4307 {
4308 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4309 BLOCK_SUBBLOCKS (current_block) = new_block;
4310 BLOCK_SUPERCONTEXT (new_block) = current_block;
4311 }
4312
4313 /* Add local variables from CALLEE to CALLER. */
4314
4315 static inline void
4316 add_local_variables (struct function *callee, struct function *caller,
4317 copy_body_data *id)
4318 {
4319 tree var;
4320 unsigned ix;
4321
4322 FOR_EACH_LOCAL_DECL (callee, ix, var)
4323 if (!can_be_nonlocal (var, id))
4324 {
4325 tree new_var = remap_decl (var, id);
4326
4327 /* Remap debug-expressions. */
4328 if (VAR_P (new_var)
4329 && DECL_HAS_DEBUG_EXPR_P (var)
4330 && new_var != var)
4331 {
4332 tree tem = DECL_DEBUG_EXPR (var);
4333 bool old_regimplify = id->regimplify;
4334 id->remapping_type_depth++;
4335 walk_tree (&tem, copy_tree_body_r, id, NULL);
4336 id->remapping_type_depth--;
4337 id->regimplify = old_regimplify;
4338 SET_DECL_DEBUG_EXPR (new_var, tem);
4339 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4340 }
4341 add_local_decl (caller, new_var);
4342 }
4343 }
4344
4345 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4346 have brought in or introduced any debug stmts for SRCVAR. */
4347
4348 static inline void
4349 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4350 {
4351 tree *remappedvarp = id->decl_map->get (srcvar);
4352
4353 if (!remappedvarp)
4354 return;
4355
4356 if (!VAR_P (*remappedvarp))
4357 return;
4358
4359 if (*remappedvarp == id->retvar)
4360 return;
4361
4362 tree tvar = target_for_debug_bind (*remappedvarp);
4363 if (!tvar)
4364 return;
4365
4366 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4367 id->call_stmt);
4368 gimple_seq_add_stmt (bindings, stmt);
4369 }
4370
4371 /* For each inlined variable for which we may have debug bind stmts,
4372 add before GSI a final debug stmt resetting it, marking the end of
4373 its life, so that var-tracking knows it doesn't have to compute
4374 further locations for it. */
4375
4376 static inline void
4377 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4378 {
4379 tree var;
4380 unsigned ix;
4381 gimple_seq bindings = NULL;
4382
4383 if (!gimple_in_ssa_p (id->src_cfun))
4384 return;
4385
4386 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4387 return;
4388
4389 for (var = DECL_ARGUMENTS (id->src_fn);
4390 var; var = DECL_CHAIN (var))
4391 reset_debug_binding (id, var, &bindings);
4392
4393 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4394 reset_debug_binding (id, var, &bindings);
4395
4396 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4397 }
4398
4399 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4400
4401 static bool
4402 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4403 {
4404 tree use_retvar;
4405 tree fn;
4406 hash_map<tree, tree> *dst;
4407 hash_map<tree, tree> *st = NULL;
4408 tree return_slot;
4409 tree modify_dest;
4410 struct cgraph_edge *cg_edge;
4411 cgraph_inline_failed_t reason;
4412 basic_block return_block;
4413 edge e;
4414 gimple_stmt_iterator gsi, stmt_gsi;
4415 bool successfully_inlined = false;
4416 bool purge_dead_abnormal_edges;
4417 gcall *call_stmt;
4418 unsigned int prop_mask, src_properties;
4419 struct function *dst_cfun;
4420 tree simduid;
4421 use_operand_p use;
4422 gimple *simtenter_stmt = NULL;
4423 vec<tree> *simtvars_save;
4424
4425 /* The gimplifier uses input_location in too many places, such as
4426 internal_get_tmp_var (). */
4427 location_t saved_location = input_location;
4428 input_location = gimple_location (stmt);
4429
4430 /* From here on, we're only interested in CALL_EXPRs. */
4431 call_stmt = dyn_cast <gcall *> (stmt);
4432 if (!call_stmt)
4433 goto egress;
4434
4435 cg_edge = id->dst_node->get_edge (stmt);
4436 gcc_checking_assert (cg_edge);
4437 /* First, see if we can figure out what function is being called.
4438 If we cannot, then there is no hope of inlining the function. */
4439 if (cg_edge->indirect_unknown_callee)
4440 goto egress;
4441 fn = cg_edge->callee->decl;
4442 gcc_checking_assert (fn);
4443
4444 /* If FN is a declaration of a function in a nested scope that was
4445 globally declared inline, we don't set its DECL_INITIAL.
4446 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4447 C++ front-end uses it for cdtors to refer to their internal
4448 declarations, that are not real functions. Fortunately those
4449 don't have trees to be saved, so we can tell by checking their
4450 gimple_body. */
4451 if (!DECL_INITIAL (fn)
4452 && DECL_ABSTRACT_ORIGIN (fn)
4453 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4454 fn = DECL_ABSTRACT_ORIGIN (fn);
4455
4456 /* Don't try to inline functions that are not well-suited to inlining. */
4457 if (cg_edge->inline_failed)
4458 {
4459 reason = cg_edge->inline_failed;
4460 /* If this call was originally indirect, we do not want to emit any
4461 inlining related warnings or sorry messages because there are no
4462 guarantees regarding those. */
4463 if (cg_edge->indirect_inlining_edge)
4464 goto egress;
4465
4466 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4467 /* For extern inline functions that get redefined we always
4468 silently ignored always_inline flag. Better behavior would
4469 be to be able to keep both bodies and use extern inline body
4470 for inlining, but we can't do that because frontends overwrite
4471 the body. */
4472 && !cg_edge->callee->local.redefined_extern_inline
4473 /* During early inline pass, report only when optimization is
4474 not turned on. */
4475 && (symtab->global_info_ready
4476 || !optimize
4477 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4478 /* PR 20090218-1_0.c. Body can be provided by another module. */
4479 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4480 {
4481 error ("inlining failed in call to always_inline %q+F: %s", fn,
4482 cgraph_inline_failed_string (reason));
4483 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4484 inform (gimple_location (stmt), "called from here");
4485 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4486 inform (DECL_SOURCE_LOCATION (cfun->decl),
4487 "called from this function");
4488 }
4489 else if (warn_inline
4490 && DECL_DECLARED_INLINE_P (fn)
4491 && !DECL_NO_INLINE_WARNING_P (fn)
4492 && !DECL_IN_SYSTEM_HEADER (fn)
4493 && reason != CIF_UNSPECIFIED
4494 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4495 /* Do not warn about not inlined recursive calls. */
4496 && !cg_edge->recursive_p ()
4497 /* Avoid warnings during early inline pass. */
4498 && symtab->global_info_ready)
4499 {
4500 auto_diagnostic_group d;
4501 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4502 fn, _(cgraph_inline_failed_string (reason))))
4503 {
4504 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4505 inform (gimple_location (stmt), "called from here");
4506 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4507 inform (DECL_SOURCE_LOCATION (cfun->decl),
4508 "called from this function");
4509 }
4510 }
4511 goto egress;
4512 }
4513 id->src_node = cg_edge->callee;
4514
4515 /* If callee is thunk, all we need is to adjust the THIS pointer
4516 and redirect to function being thunked. */
4517 if (id->src_node->thunk.thunk_p)
4518 {
4519 cgraph_edge *edge;
4520 tree virtual_offset = NULL;
4521 profile_count count = cg_edge->count;
4522 tree op;
4523 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4524
4525 cg_edge->remove ();
4526 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4527 gimple_uid (stmt),
4528 profile_count::one (),
4529 profile_count::one (),
4530 true);
4531 edge->count = count;
4532 if (id->src_node->thunk.virtual_offset_p)
4533 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4534 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4535 NULL);
4536 gsi_insert_before (&iter, gimple_build_assign (op,
4537 gimple_call_arg (stmt, 0)),
4538 GSI_NEW_STMT);
4539 gcc_assert (id->src_node->thunk.this_adjusting);
4540 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4541 virtual_offset, id->src_node->thunk.indirect_offset);
4542
4543 gimple_call_set_arg (stmt, 0, op);
4544 gimple_call_set_fndecl (stmt, edge->callee->decl);
4545 update_stmt (stmt);
4546 id->src_node->remove ();
4547 expand_call_inline (bb, stmt, id);
4548 maybe_remove_unused_call_args (cfun, stmt);
4549 return true;
4550 }
4551 fn = cg_edge->callee->decl;
4552 cg_edge->callee->get_untransformed_body ();
4553
4554 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4555 cg_edge->callee->verify ();
4556
4557 /* We will be inlining this callee. */
4558 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4559
4560 /* Update the callers EH personality. */
4561 if (DECL_FUNCTION_PERSONALITY (fn))
4562 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4563 = DECL_FUNCTION_PERSONALITY (fn);
4564
4565 /* Split the block before the GIMPLE_CALL. */
4566 stmt_gsi = gsi_for_stmt (stmt);
4567 gsi_prev (&stmt_gsi);
4568 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4569 bb = e->src;
4570 return_block = e->dest;
4571 remove_edge (e);
4572
4573 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4574 been the source of abnormal edges. In this case, schedule
4575 the removal of dead abnormal edges. */
4576 gsi = gsi_start_bb (return_block);
4577 gsi_next (&gsi);
4578 purge_dead_abnormal_edges = gsi_end_p (gsi);
4579
4580 stmt_gsi = gsi_start_bb (return_block);
4581
4582 /* Build a block containing code to initialize the arguments, the
4583 actual inline expansion of the body, and a label for the return
4584 statements within the function to jump to. The type of the
4585 statement expression is the return type of the function call.
4586 ??? If the call does not have an associated block then we will
4587 remap all callee blocks to NULL, effectively dropping most of
4588 its debug information. This should only happen for calls to
4589 artificial decls inserted by the compiler itself. We need to
4590 either link the inlined blocks into the caller block tree or
4591 not refer to them in any way to not break GC for locations. */
4592 if (tree block = gimple_block (stmt))
4593 {
4594 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4595 to make inlined_function_outer_scope_p return true on this BLOCK. */
4596 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4597 if (loc == UNKNOWN_LOCATION)
4598 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4599 if (loc == UNKNOWN_LOCATION)
4600 loc = BUILTINS_LOCATION;
4601 id->block = make_node (BLOCK);
4602 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4603 BLOCK_SOURCE_LOCATION (id->block) = loc;
4604 prepend_lexical_block (block, id->block);
4605 }
4606
4607 /* Local declarations will be replaced by their equivalents in this map. */
4608 st = id->decl_map;
4609 id->decl_map = new hash_map<tree, tree>;
4610 dst = id->debug_map;
4611 id->debug_map = NULL;
4612 if (flag_stack_reuse != SR_NONE)
4613 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4614
4615 /* Record the function we are about to inline. */
4616 id->src_fn = fn;
4617 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4618 id->reset_location = DECL_IGNORED_P (fn);
4619 id->call_stmt = call_stmt;
4620
4621 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4622 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4623 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4624 simtvars_save = id->dst_simt_vars;
4625 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4626 && (simduid = bb->loop_father->simduid) != NULL_TREE
4627 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4628 && single_imm_use (simduid, &use, &simtenter_stmt)
4629 && is_gimple_call (simtenter_stmt)
4630 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4631 vec_alloc (id->dst_simt_vars, 0);
4632 else
4633 id->dst_simt_vars = NULL;
4634
4635 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4636 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4637
4638 /* If the src function contains an IFN_VA_ARG, then so will the dst
4639 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4640 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4641 src_properties = id->src_cfun->curr_properties & prop_mask;
4642 if (src_properties != prop_mask)
4643 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4644
4645 gcc_assert (!id->src_cfun->after_inlining);
4646
4647 id->entry_bb = bb;
4648 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4649 {
4650 gimple_stmt_iterator si = gsi_last_bb (bb);
4651 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4652 NOT_TAKEN),
4653 GSI_NEW_STMT);
4654 }
4655 initialize_inlined_parameters (id, stmt, fn, bb);
4656 if (debug_nonbind_markers_p && debug_inline_points && id->block
4657 && inlined_function_outer_scope_p (id->block))
4658 {
4659 gimple_stmt_iterator si = gsi_last_bb (bb);
4660 gsi_insert_after (&si, gimple_build_debug_inline_entry
4661 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4662 GSI_NEW_STMT);
4663 }
4664
4665 if (DECL_INITIAL (fn))
4666 {
4667 if (gimple_block (stmt))
4668 {
4669 tree *var;
4670
4671 prepend_lexical_block (id->block,
4672 remap_blocks (DECL_INITIAL (fn), id));
4673 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4674 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4675 == NULL_TREE));
4676 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4677 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4678 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4679 under it. The parameters can be then evaluated in the debugger,
4680 but don't show in backtraces. */
4681 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4682 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4683 {
4684 tree v = *var;
4685 *var = TREE_CHAIN (v);
4686 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4687 BLOCK_VARS (id->block) = v;
4688 }
4689 else
4690 var = &TREE_CHAIN (*var);
4691 }
4692 else
4693 remap_blocks_to_null (DECL_INITIAL (fn), id);
4694 }
4695
4696 /* Return statements in the function body will be replaced by jumps
4697 to the RET_LABEL. */
4698 gcc_assert (DECL_INITIAL (fn));
4699 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4700
4701 /* Find the LHS to which the result of this call is assigned. */
4702 return_slot = NULL;
4703 if (gimple_call_lhs (stmt))
4704 {
4705 modify_dest = gimple_call_lhs (stmt);
4706
4707 /* The function which we are inlining might not return a value,
4708 in which case we should issue a warning that the function
4709 does not return a value. In that case the optimizers will
4710 see that the variable to which the value is assigned was not
4711 initialized. We do not want to issue a warning about that
4712 uninitialized variable. */
4713 if (DECL_P (modify_dest))
4714 TREE_NO_WARNING (modify_dest) = 1;
4715
4716 if (gimple_call_return_slot_opt_p (call_stmt))
4717 {
4718 return_slot = modify_dest;
4719 modify_dest = NULL;
4720 }
4721 }
4722 else
4723 modify_dest = NULL;
4724
4725 /* If we are inlining a call to the C++ operator new, we don't want
4726 to use type based alias analysis on the return value. Otherwise
4727 we may get confused if the compiler sees that the inlined new
4728 function returns a pointer which was just deleted. See bug
4729 33407. */
4730 if (DECL_IS_OPERATOR_NEW (fn))
4731 {
4732 return_slot = NULL;
4733 modify_dest = NULL;
4734 }
4735
4736 /* Declare the return variable for the function. */
4737 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4738
4739 /* Add local vars in this inlined callee to caller. */
4740 add_local_variables (id->src_cfun, cfun, id);
4741
4742 if (dump_enabled_p ())
4743 {
4744 char buf[128];
4745 snprintf (buf, sizeof(buf), "%4.2f",
4746 cg_edge->sreal_frequency ().to_double ());
4747 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4748 call_stmt,
4749 "Inlining %C to %C with frequency %s\n",
4750 id->src_node, id->dst_node, buf);
4751 if (dump_file && (dump_flags & TDF_DETAILS))
4752 {
4753 id->src_node->dump (dump_file);
4754 id->dst_node->dump (dump_file);
4755 }
4756 }
4757
4758 /* This is it. Duplicate the callee body. Assume callee is
4759 pre-gimplified. Note that we must not alter the caller
4760 function in any way before this point, as this CALL_EXPR may be
4761 a self-referential call; if we're calling ourselves, we need to
4762 duplicate our body before altering anything. */
4763 copy_body (id, bb, return_block, NULL);
4764
4765 reset_debug_bindings (id, stmt_gsi);
4766
4767 if (flag_stack_reuse != SR_NONE)
4768 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4769 if (!TREE_THIS_VOLATILE (p))
4770 {
4771 tree *varp = id->decl_map->get (p);
4772 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4773 {
4774 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4775 gimple *clobber_stmt;
4776 TREE_THIS_VOLATILE (clobber) = 1;
4777 clobber_stmt = gimple_build_assign (*varp, clobber);
4778 gimple_set_location (clobber_stmt, gimple_location (stmt));
4779 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4780 }
4781 }
4782
4783 /* Reset the escaped solution. */
4784 if (cfun->gimple_df)
4785 pt_solution_reset (&cfun->gimple_df->escaped);
4786
4787 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4788 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4789 {
4790 size_t nargs = gimple_call_num_args (simtenter_stmt);
4791 vec<tree> *vars = id->dst_simt_vars;
4792 auto_vec<tree> newargs (nargs + vars->length ());
4793 for (size_t i = 0; i < nargs; i++)
4794 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4795 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4796 {
4797 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4798 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4799 }
4800 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4801 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4802 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4803 gsi_replace (&gsi, g, false);
4804 }
4805 vec_free (id->dst_simt_vars);
4806 id->dst_simt_vars = simtvars_save;
4807
4808 /* Clean up. */
4809 if (id->debug_map)
4810 {
4811 delete id->debug_map;
4812 id->debug_map = dst;
4813 }
4814 delete id->decl_map;
4815 id->decl_map = st;
4816
4817 /* Unlink the calls virtual operands before replacing it. */
4818 unlink_stmt_vdef (stmt);
4819 if (gimple_vdef (stmt)
4820 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4821 release_ssa_name (gimple_vdef (stmt));
4822
4823 /* If the inlined function returns a result that we care about,
4824 substitute the GIMPLE_CALL with an assignment of the return
4825 variable to the LHS of the call. That is, if STMT was
4826 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4827 if (use_retvar && gimple_call_lhs (stmt))
4828 {
4829 gimple *old_stmt = stmt;
4830 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4831 gimple_set_location (stmt, gimple_location (old_stmt));
4832 gsi_replace (&stmt_gsi, stmt, false);
4833 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4834 /* Append a clobber for id->retvar if easily possible. */
4835 if (flag_stack_reuse != SR_NONE
4836 && id->retvar
4837 && VAR_P (id->retvar)
4838 && id->retvar != return_slot
4839 && id->retvar != modify_dest
4840 && !TREE_THIS_VOLATILE (id->retvar)
4841 && !is_gimple_reg (id->retvar)
4842 && !stmt_ends_bb_p (stmt))
4843 {
4844 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4845 gimple *clobber_stmt;
4846 TREE_THIS_VOLATILE (clobber) = 1;
4847 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4848 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4849 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4850 }
4851 }
4852 else
4853 {
4854 /* Handle the case of inlining a function with no return
4855 statement, which causes the return value to become undefined. */
4856 if (gimple_call_lhs (stmt)
4857 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4858 {
4859 tree name = gimple_call_lhs (stmt);
4860 tree var = SSA_NAME_VAR (name);
4861 tree def = var ? ssa_default_def (cfun, var) : NULL;
4862
4863 if (def)
4864 {
4865 /* If the variable is used undefined, make this name
4866 undefined via a move. */
4867 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4868 gsi_replace (&stmt_gsi, stmt, true);
4869 }
4870 else
4871 {
4872 if (!var)
4873 {
4874 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4875 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4876 }
4877 /* Otherwise make this variable undefined. */
4878 gsi_remove (&stmt_gsi, true);
4879 set_ssa_default_def (cfun, var, name);
4880 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4881 }
4882 }
4883 /* Replace with a clobber for id->retvar. */
4884 else if (flag_stack_reuse != SR_NONE
4885 && id->retvar
4886 && VAR_P (id->retvar)
4887 && id->retvar != return_slot
4888 && id->retvar != modify_dest
4889 && !TREE_THIS_VOLATILE (id->retvar)
4890 && !is_gimple_reg (id->retvar))
4891 {
4892 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4893 gimple *clobber_stmt;
4894 TREE_THIS_VOLATILE (clobber) = 1;
4895 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4896 gimple_set_location (clobber_stmt, gimple_location (stmt));
4897 gsi_replace (&stmt_gsi, clobber_stmt, false);
4898 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4899 }
4900 else
4901 gsi_remove (&stmt_gsi, true);
4902 }
4903
4904 if (purge_dead_abnormal_edges)
4905 {
4906 gimple_purge_dead_eh_edges (return_block);
4907 gimple_purge_dead_abnormal_call_edges (return_block);
4908 }
4909
4910 /* If the value of the new expression is ignored, that's OK. We
4911 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4912 the equivalent inlined version either. */
4913 if (is_gimple_assign (stmt))
4914 {
4915 gcc_assert (gimple_assign_single_p (stmt)
4916 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4917 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4918 }
4919
4920 id->add_clobbers_to_eh_landing_pads = 0;
4921
4922 /* Output the inlining info for this abstract function, since it has been
4923 inlined. If we don't do this now, we can lose the information about the
4924 variables in the function when the blocks get blown away as soon as we
4925 remove the cgraph node. */
4926 if (gimple_block (stmt))
4927 (*debug_hooks->outlining_inline_function) (fn);
4928
4929 /* Update callgraph if needed. */
4930 cg_edge->callee->remove ();
4931
4932 id->block = NULL_TREE;
4933 id->retvar = NULL_TREE;
4934 successfully_inlined = true;
4935
4936 egress:
4937 input_location = saved_location;
4938 return successfully_inlined;
4939 }
4940
4941 /* Expand call statements reachable from STMT_P.
4942 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4943 in a MODIFY_EXPR. */
4944
4945 static bool
4946 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4947 {
4948 gimple_stmt_iterator gsi;
4949 bool inlined = false;
4950
4951 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4952 {
4953 gimple *stmt = gsi_stmt (gsi);
4954 gsi_prev (&gsi);
4955
4956 if (is_gimple_call (stmt)
4957 && !gimple_call_internal_p (stmt))
4958 inlined |= expand_call_inline (bb, stmt, id);
4959 }
4960
4961 return inlined;
4962 }
4963
4964
4965 /* Walk all basic blocks created after FIRST and try to fold every statement
4966 in the STATEMENTS pointer set. */
4967
4968 static void
4969 fold_marked_statements (int first, hash_set<gimple *> *statements)
4970 {
4971 for (; first < last_basic_block_for_fn (cfun); first++)
4972 if (BASIC_BLOCK_FOR_FN (cfun, first))
4973 {
4974 gimple_stmt_iterator gsi;
4975
4976 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4977 !gsi_end_p (gsi);
4978 gsi_next (&gsi))
4979 if (statements->contains (gsi_stmt (gsi)))
4980 {
4981 gimple *old_stmt = gsi_stmt (gsi);
4982 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4983
4984 if (old_decl && fndecl_built_in_p (old_decl))
4985 {
4986 /* Folding builtins can create multiple instructions,
4987 we need to look at all of them. */
4988 gimple_stmt_iterator i2 = gsi;
4989 gsi_prev (&i2);
4990 if (fold_stmt (&gsi))
4991 {
4992 gimple *new_stmt;
4993 /* If a builtin at the end of a bb folded into nothing,
4994 the following loop won't work. */
4995 if (gsi_end_p (gsi))
4996 {
4997 cgraph_update_edges_for_call_stmt (old_stmt,
4998 old_decl, NULL);
4999 break;
5000 }
5001 if (gsi_end_p (i2))
5002 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5003 else
5004 gsi_next (&i2);
5005 while (1)
5006 {
5007 new_stmt = gsi_stmt (i2);
5008 update_stmt (new_stmt);
5009 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5010 new_stmt);
5011
5012 if (new_stmt == gsi_stmt (gsi))
5013 {
5014 /* It is okay to check only for the very last
5015 of these statements. If it is a throwing
5016 statement nothing will change. If it isn't
5017 this can remove EH edges. If that weren't
5018 correct then because some intermediate stmts
5019 throw, but not the last one. That would mean
5020 we'd have to split the block, which we can't
5021 here and we'd loose anyway. And as builtins
5022 probably never throw, this all
5023 is mood anyway. */
5024 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5025 new_stmt))
5026 gimple_purge_dead_eh_edges (
5027 BASIC_BLOCK_FOR_FN (cfun, first));
5028 break;
5029 }
5030 gsi_next (&i2);
5031 }
5032 }
5033 }
5034 else if (fold_stmt (&gsi))
5035 {
5036 /* Re-read the statement from GSI as fold_stmt() may
5037 have changed it. */
5038 gimple *new_stmt = gsi_stmt (gsi);
5039 update_stmt (new_stmt);
5040
5041 if (is_gimple_call (old_stmt)
5042 || is_gimple_call (new_stmt))
5043 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5044 new_stmt);
5045
5046 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5047 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5048 first));
5049 }
5050 }
5051 }
5052 }
5053
5054 /* Expand calls to inline functions in the body of FN. */
5055
5056 unsigned int
5057 optimize_inline_calls (tree fn)
5058 {
5059 copy_body_data id;
5060 basic_block bb;
5061 int last = n_basic_blocks_for_fn (cfun);
5062 bool inlined_p = false;
5063
5064 /* Clear out ID. */
5065 memset (&id, 0, sizeof (id));
5066
5067 id.src_node = id.dst_node = cgraph_node::get (fn);
5068 gcc_assert (id.dst_node->definition);
5069 id.dst_fn = fn;
5070 /* Or any functions that aren't finished yet. */
5071 if (current_function_decl)
5072 id.dst_fn = current_function_decl;
5073
5074 id.copy_decl = copy_decl_maybe_to_var;
5075 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5076 id.transform_new_cfg = false;
5077 id.transform_return_to_modify = true;
5078 id.transform_parameter = true;
5079 id.transform_lang_insert_block = NULL;
5080 id.statements_to_fold = new hash_set<gimple *>;
5081
5082 push_gimplify_context ();
5083
5084 /* We make no attempts to keep dominance info up-to-date. */
5085 free_dominance_info (CDI_DOMINATORS);
5086 free_dominance_info (CDI_POST_DOMINATORS);
5087
5088 /* Register specific gimple functions. */
5089 gimple_register_cfg_hooks ();
5090
5091 /* Reach the trees by walking over the CFG, and note the
5092 enclosing basic-blocks in the call edges. */
5093 /* We walk the blocks going forward, because inlined function bodies
5094 will split id->current_basic_block, and the new blocks will
5095 follow it; we'll trudge through them, processing their CALL_EXPRs
5096 along the way. */
5097 FOR_EACH_BB_FN (bb, cfun)
5098 inlined_p |= gimple_expand_calls_inline (bb, &id);
5099
5100 pop_gimplify_context (NULL);
5101
5102 if (flag_checking)
5103 {
5104 struct cgraph_edge *e;
5105
5106 id.dst_node->verify ();
5107
5108 /* Double check that we inlined everything we are supposed to inline. */
5109 for (e = id.dst_node->callees; e; e = e->next_callee)
5110 gcc_assert (e->inline_failed);
5111 }
5112
5113 /* Fold queued statements. */
5114 update_max_bb_count ();
5115 fold_marked_statements (last, id.statements_to_fold);
5116 delete id.statements_to_fold;
5117
5118 gcc_assert (!id.debug_stmts.exists ());
5119
5120 /* If we didn't inline into the function there is nothing to do. */
5121 if (!inlined_p)
5122 return 0;
5123
5124 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5125 number_blocks (fn);
5126
5127 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5128
5129 if (flag_checking)
5130 id.dst_node->verify ();
5131
5132 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5133 not possible yet - the IPA passes might make various functions to not
5134 throw and they don't care to proactively update local EH info. This is
5135 done later in fixup_cfg pass that also execute the verification. */
5136 return (TODO_update_ssa
5137 | TODO_cleanup_cfg
5138 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5139 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5140 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5141 ? TODO_rebuild_frequencies : 0));
5142 }
5143
5144 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5145
5146 tree
5147 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5148 {
5149 enum tree_code code = TREE_CODE (*tp);
5150 enum tree_code_class cl = TREE_CODE_CLASS (code);
5151
5152 /* We make copies of most nodes. */
5153 if (IS_EXPR_CODE_CLASS (cl)
5154 || code == TREE_LIST
5155 || code == TREE_VEC
5156 || code == TYPE_DECL
5157 || code == OMP_CLAUSE)
5158 {
5159 /* Because the chain gets clobbered when we make a copy, we save it
5160 here. */
5161 tree chain = NULL_TREE, new_tree;
5162
5163 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5164 chain = TREE_CHAIN (*tp);
5165
5166 /* Copy the node. */
5167 new_tree = copy_node (*tp);
5168
5169 *tp = new_tree;
5170
5171 /* Now, restore the chain, if appropriate. That will cause
5172 walk_tree to walk into the chain as well. */
5173 if (code == PARM_DECL
5174 || code == TREE_LIST
5175 || code == OMP_CLAUSE)
5176 TREE_CHAIN (*tp) = chain;
5177
5178 /* For now, we don't update BLOCKs when we make copies. So, we
5179 have to nullify all BIND_EXPRs. */
5180 if (TREE_CODE (*tp) == BIND_EXPR)
5181 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5182 }
5183 else if (code == CONSTRUCTOR)
5184 {
5185 /* CONSTRUCTOR nodes need special handling because
5186 we need to duplicate the vector of elements. */
5187 tree new_tree;
5188
5189 new_tree = copy_node (*tp);
5190 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5191 *tp = new_tree;
5192 }
5193 else if (code == STATEMENT_LIST)
5194 /* We used to just abort on STATEMENT_LIST, but we can run into them
5195 with statement-expressions (c++/40975). */
5196 copy_statement_list (tp);
5197 else if (TREE_CODE_CLASS (code) == tcc_type)
5198 *walk_subtrees = 0;
5199 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5200 *walk_subtrees = 0;
5201 else if (TREE_CODE_CLASS (code) == tcc_constant)
5202 *walk_subtrees = 0;
5203 return NULL_TREE;
5204 }
5205
5206 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5207 information indicating to what new SAVE_EXPR this one should be mapped,
5208 use that one. Otherwise, create a new node and enter it in ST. FN is
5209 the function into which the copy will be placed. */
5210
5211 static void
5212 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5213 {
5214 tree *n;
5215 tree t;
5216
5217 /* See if we already encountered this SAVE_EXPR. */
5218 n = st->get (*tp);
5219
5220 /* If we didn't already remap this SAVE_EXPR, do so now. */
5221 if (!n)
5222 {
5223 t = copy_node (*tp);
5224
5225 /* Remember this SAVE_EXPR. */
5226 st->put (*tp, t);
5227 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5228 st->put (t, t);
5229 }
5230 else
5231 {
5232 /* We've already walked into this SAVE_EXPR; don't do it again. */
5233 *walk_subtrees = 0;
5234 t = *n;
5235 }
5236
5237 /* Replace this SAVE_EXPR with the copy. */
5238 *tp = t;
5239 }
5240
5241 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5242 label, copies the declaration and enters it in the splay_tree in DATA (which
5243 is really a 'copy_body_data *'. */
5244
5245 static tree
5246 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5247 bool *handled_ops_p ATTRIBUTE_UNUSED,
5248 struct walk_stmt_info *wi)
5249 {
5250 copy_body_data *id = (copy_body_data *) wi->info;
5251 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5252
5253 if (stmt)
5254 {
5255 tree decl = gimple_label_label (stmt);
5256
5257 /* Copy the decl and remember the copy. */
5258 insert_decl_map (id, decl, id->copy_decl (decl, id));
5259 }
5260
5261 return NULL_TREE;
5262 }
5263
5264 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5265 struct walk_stmt_info *wi);
5266
5267 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5268 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5269 remaps all local declarations to appropriate replacements in gimple
5270 operands. */
5271
5272 static tree
5273 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5274 {
5275 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5276 copy_body_data *id = (copy_body_data *) wi->info;
5277 hash_map<tree, tree> *st = id->decl_map;
5278 tree *n;
5279 tree expr = *tp;
5280
5281 /* For recursive invocations this is no longer the LHS itself. */
5282 bool is_lhs = wi->is_lhs;
5283 wi->is_lhs = false;
5284
5285 if (TREE_CODE (expr) == SSA_NAME)
5286 {
5287 *tp = remap_ssa_name (*tp, id);
5288 *walk_subtrees = 0;
5289 if (is_lhs)
5290 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5291 }
5292 /* Only a local declaration (variable or label). */
5293 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5294 || TREE_CODE (expr) == LABEL_DECL)
5295 {
5296 /* Lookup the declaration. */
5297 n = st->get (expr);
5298
5299 /* If it's there, remap it. */
5300 if (n)
5301 *tp = *n;
5302 *walk_subtrees = 0;
5303 }
5304 else if (TREE_CODE (expr) == STATEMENT_LIST
5305 || TREE_CODE (expr) == BIND_EXPR
5306 || TREE_CODE (expr) == SAVE_EXPR)
5307 gcc_unreachable ();
5308 else if (TREE_CODE (expr) == TARGET_EXPR)
5309 {
5310 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5311 It's OK for this to happen if it was part of a subtree that
5312 isn't immediately expanded, such as operand 2 of another
5313 TARGET_EXPR. */
5314 if (!TREE_OPERAND (expr, 1))
5315 {
5316 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5317 TREE_OPERAND (expr, 3) = NULL_TREE;
5318 }
5319 }
5320 else if (TREE_CODE (expr) == OMP_CLAUSE)
5321 {
5322 /* Before the omplower pass completes, some OMP clauses can contain
5323 sequences that are neither copied by gimple_seq_copy nor walked by
5324 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5325 in those situations, we have to copy and process them explicitely. */
5326
5327 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5328 {
5329 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5330 seq = duplicate_remap_omp_clause_seq (seq, wi);
5331 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5332 }
5333 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5334 {
5335 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5336 seq = duplicate_remap_omp_clause_seq (seq, wi);
5337 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5338 }
5339 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5340 {
5341 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5342 seq = duplicate_remap_omp_clause_seq (seq, wi);
5343 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5344 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5345 seq = duplicate_remap_omp_clause_seq (seq, wi);
5346 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5347 }
5348 }
5349
5350 /* Keep iterating. */
5351 return NULL_TREE;
5352 }
5353
5354
5355 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5356 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5357 remaps all local declarations to appropriate replacements in gimple
5358 statements. */
5359
5360 static tree
5361 replace_locals_stmt (gimple_stmt_iterator *gsip,
5362 bool *handled_ops_p ATTRIBUTE_UNUSED,
5363 struct walk_stmt_info *wi)
5364 {
5365 copy_body_data *id = (copy_body_data *) wi->info;
5366 gimple *gs = gsi_stmt (*gsip);
5367
5368 if (gbind *stmt = dyn_cast <gbind *> (gs))
5369 {
5370 tree block = gimple_bind_block (stmt);
5371
5372 if (block)
5373 {
5374 remap_block (&block, id);
5375 gimple_bind_set_block (stmt, block);
5376 }
5377
5378 /* This will remap a lot of the same decls again, but this should be
5379 harmless. */
5380 if (gimple_bind_vars (stmt))
5381 {
5382 tree old_var, decls = gimple_bind_vars (stmt);
5383
5384 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5385 if (!can_be_nonlocal (old_var, id)
5386 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5387 remap_decl (old_var, id);
5388
5389 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5390 id->prevent_decl_creation_for_types = true;
5391 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5392 id->prevent_decl_creation_for_types = false;
5393 }
5394 }
5395
5396 /* Keep iterating. */
5397 return NULL_TREE;
5398 }
5399
5400 /* Create a copy of SEQ and remap all decls in it. */
5401
5402 static gimple_seq
5403 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5404 {
5405 if (!seq)
5406 return NULL;
5407
5408 /* If there are any labels in OMP sequences, they can be only referred to in
5409 the sequence itself and therefore we can do both here. */
5410 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5411 gimple_seq copy = gimple_seq_copy (seq);
5412 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5413 return copy;
5414 }
5415
5416 /* Copies everything in SEQ and replaces variables and labels local to
5417 current_function_decl. */
5418
5419 gimple_seq
5420 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5421 {
5422 copy_body_data id;
5423 struct walk_stmt_info wi;
5424 gimple_seq copy;
5425
5426 /* There's nothing to do for NULL_TREE. */
5427 if (seq == NULL)
5428 return seq;
5429
5430 /* Set up ID. */
5431 memset (&id, 0, sizeof (id));
5432 id.src_fn = current_function_decl;
5433 id.dst_fn = current_function_decl;
5434 id.src_cfun = cfun;
5435 id.decl_map = new hash_map<tree, tree>;
5436 id.debug_map = NULL;
5437
5438 id.copy_decl = copy_decl_no_change;
5439 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5440 id.transform_new_cfg = false;
5441 id.transform_return_to_modify = false;
5442 id.transform_parameter = false;
5443 id.transform_lang_insert_block = NULL;
5444
5445 /* Walk the tree once to find local labels. */
5446 memset (&wi, 0, sizeof (wi));
5447 hash_set<tree> visited;
5448 wi.info = &id;
5449 wi.pset = &visited;
5450 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5451
5452 copy = gimple_seq_copy (seq);
5453
5454 /* Walk the copy, remapping decls. */
5455 memset (&wi, 0, sizeof (wi));
5456 wi.info = &id;
5457 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5458
5459 /* Clean up. */
5460 delete id.decl_map;
5461 if (id.debug_map)
5462 delete id.debug_map;
5463 if (id.dependence_map)
5464 {
5465 delete id.dependence_map;
5466 id.dependence_map = NULL;
5467 }
5468
5469 return copy;
5470 }
5471
5472
5473 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5474
5475 static tree
5476 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5477 {
5478 if (*tp == data)
5479 return (tree) data;
5480 else
5481 return NULL;
5482 }
5483
5484 DEBUG_FUNCTION bool
5485 debug_find_tree (tree top, tree search)
5486 {
5487 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5488 }
5489
5490
5491 /* Declare the variables created by the inliner. Add all the variables in
5492 VARS to BIND_EXPR. */
5493
5494 static void
5495 declare_inline_vars (tree block, tree vars)
5496 {
5497 tree t;
5498 for (t = vars; t; t = DECL_CHAIN (t))
5499 {
5500 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5501 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5502 add_local_decl (cfun, t);
5503 }
5504
5505 if (block)
5506 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5507 }
5508
5509 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5510 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5511 VAR_DECL translation. */
5512
5513 tree
5514 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5515 {
5516 /* Don't generate debug information for the copy if we wouldn't have
5517 generated it for the copy either. */
5518 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5519 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5520
5521 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5522 declaration inspired this copy. */
5523 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5524
5525 /* The new variable/label has no RTL, yet. */
5526 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5527 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5528 SET_DECL_RTL (copy, 0);
5529 /* For vector typed decls make sure to update DECL_MODE according
5530 to the new function context. */
5531 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5532 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5533
5534 /* These args would always appear unused, if not for this. */
5535 TREE_USED (copy) = 1;
5536
5537 /* Set the context for the new declaration. */
5538 if (!DECL_CONTEXT (decl))
5539 /* Globals stay global. */
5540 ;
5541 else if (DECL_CONTEXT (decl) != id->src_fn)
5542 /* Things that weren't in the scope of the function we're inlining
5543 from aren't in the scope we're inlining to, either. */
5544 ;
5545 else if (TREE_STATIC (decl))
5546 /* Function-scoped static variables should stay in the original
5547 function. */
5548 ;
5549 else
5550 {
5551 /* Ordinary automatic local variables are now in the scope of the
5552 new function. */
5553 DECL_CONTEXT (copy) = id->dst_fn;
5554 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5555 {
5556 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5557 DECL_ATTRIBUTES (copy)
5558 = tree_cons (get_identifier ("omp simt private"), NULL,
5559 DECL_ATTRIBUTES (copy));
5560 id->dst_simt_vars->safe_push (copy);
5561 }
5562 }
5563
5564 return copy;
5565 }
5566
5567 static tree
5568 copy_decl_to_var (tree decl, copy_body_data *id)
5569 {
5570 tree copy, type;
5571
5572 gcc_assert (TREE_CODE (decl) == PARM_DECL
5573 || TREE_CODE (decl) == RESULT_DECL);
5574
5575 type = TREE_TYPE (decl);
5576
5577 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5578 VAR_DECL, DECL_NAME (decl), type);
5579 if (DECL_PT_UID_SET_P (decl))
5580 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5581 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5582 TREE_READONLY (copy) = TREE_READONLY (decl);
5583 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5584 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5585
5586 return copy_decl_for_dup_finish (id, decl, copy);
5587 }
5588
5589 /* Like copy_decl_to_var, but create a return slot object instead of a
5590 pointer variable for return by invisible reference. */
5591
5592 static tree
5593 copy_result_decl_to_var (tree decl, copy_body_data *id)
5594 {
5595 tree copy, type;
5596
5597 gcc_assert (TREE_CODE (decl) == PARM_DECL
5598 || TREE_CODE (decl) == RESULT_DECL);
5599
5600 type = TREE_TYPE (decl);
5601 if (DECL_BY_REFERENCE (decl))
5602 type = TREE_TYPE (type);
5603
5604 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5605 VAR_DECL, DECL_NAME (decl), type);
5606 if (DECL_PT_UID_SET_P (decl))
5607 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5608 TREE_READONLY (copy) = TREE_READONLY (decl);
5609 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5610 if (!DECL_BY_REFERENCE (decl))
5611 {
5612 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5613 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5614 }
5615
5616 return copy_decl_for_dup_finish (id, decl, copy);
5617 }
5618
5619 tree
5620 copy_decl_no_change (tree decl, copy_body_data *id)
5621 {
5622 tree copy;
5623
5624 copy = copy_node (decl);
5625
5626 /* The COPY is not abstract; it will be generated in DST_FN. */
5627 DECL_ABSTRACT_P (copy) = false;
5628 lang_hooks.dup_lang_specific_decl (copy);
5629
5630 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5631 been taken; it's for internal bookkeeping in expand_goto_internal. */
5632 if (TREE_CODE (copy) == LABEL_DECL)
5633 {
5634 TREE_ADDRESSABLE (copy) = 0;
5635 LABEL_DECL_UID (copy) = -1;
5636 }
5637
5638 return copy_decl_for_dup_finish (id, decl, copy);
5639 }
5640
5641 static tree
5642 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5643 {
5644 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5645 return copy_decl_to_var (decl, id);
5646 else
5647 return copy_decl_no_change (decl, id);
5648 }
5649
5650 /* Return a copy of the function's argument tree. */
5651 static tree
5652 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5653 bitmap args_to_skip, tree *vars)
5654 {
5655 tree arg, *parg;
5656 tree new_parm = NULL;
5657 int i = 0;
5658
5659 parg = &new_parm;
5660
5661 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5662 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5663 {
5664 tree new_tree = remap_decl (arg, id);
5665 if (TREE_CODE (new_tree) != PARM_DECL)
5666 new_tree = id->copy_decl (arg, id);
5667 lang_hooks.dup_lang_specific_decl (new_tree);
5668 *parg = new_tree;
5669 parg = &DECL_CHAIN (new_tree);
5670 }
5671 else if (!id->decl_map->get (arg))
5672 {
5673 /* Make an equivalent VAR_DECL. If the argument was used
5674 as temporary variable later in function, the uses will be
5675 replaced by local variable. */
5676 tree var = copy_decl_to_var (arg, id);
5677 insert_decl_map (id, arg, var);
5678 /* Declare this new variable. */
5679 DECL_CHAIN (var) = *vars;
5680 *vars = var;
5681 }
5682 return new_parm;
5683 }
5684
5685 /* Return a copy of the function's static chain. */
5686 static tree
5687 copy_static_chain (tree static_chain, copy_body_data * id)
5688 {
5689 tree *chain_copy, *pvar;
5690
5691 chain_copy = &static_chain;
5692 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5693 {
5694 tree new_tree = remap_decl (*pvar, id);
5695 lang_hooks.dup_lang_specific_decl (new_tree);
5696 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5697 *pvar = new_tree;
5698 }
5699 return static_chain;
5700 }
5701
5702 /* Return true if the function is allowed to be versioned.
5703 This is a guard for the versioning functionality. */
5704
5705 bool
5706 tree_versionable_function_p (tree fndecl)
5707 {
5708 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5709 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5710 }
5711
5712 /* Update clone info after duplication. */
5713
5714 static void
5715 update_clone_info (copy_body_data * id)
5716 {
5717 struct cgraph_node *node;
5718 if (!id->dst_node->clones)
5719 return;
5720 for (node = id->dst_node->clones; node != id->dst_node;)
5721 {
5722 /* First update replace maps to match the new body. */
5723 if (node->clone.tree_map)
5724 {
5725 unsigned int i;
5726 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5727 {
5728 struct ipa_replace_map *replace_info;
5729 replace_info = (*node->clone.tree_map)[i];
5730 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5731 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5732 }
5733 }
5734 if (node->clones)
5735 node = node->clones;
5736 else if (node->next_sibling_clone)
5737 node = node->next_sibling_clone;
5738 else
5739 {
5740 while (node != id->dst_node && !node->next_sibling_clone)
5741 node = node->clone_of;
5742 if (node != id->dst_node)
5743 node = node->next_sibling_clone;
5744 }
5745 }
5746 }
5747
5748 /* Create a copy of a function's tree.
5749 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5750 of the original function and the new copied function
5751 respectively. In case we want to replace a DECL
5752 tree with another tree while duplicating the function's
5753 body, TREE_MAP represents the mapping between these
5754 trees. If UPDATE_CLONES is set, the call_stmt fields
5755 of edges of clones of the function will be updated.
5756
5757 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5758 from new version.
5759 If SKIP_RETURN is true, the new version will return void.
5760 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5761 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5762 */
5763 void
5764 tree_function_versioning (tree old_decl, tree new_decl,
5765 vec<ipa_replace_map *, va_gc> *tree_map,
5766 bool update_clones, bitmap args_to_skip,
5767 bool skip_return, bitmap blocks_to_copy,
5768 basic_block new_entry)
5769 {
5770 struct cgraph_node *old_version_node;
5771 struct cgraph_node *new_version_node;
5772 copy_body_data id;
5773 tree p;
5774 unsigned i;
5775 struct ipa_replace_map *replace_info;
5776 basic_block old_entry_block, bb;
5777 auto_vec<gimple *, 10> init_stmts;
5778 tree vars = NULL_TREE;
5779 bitmap debug_args_to_skip = args_to_skip;
5780
5781 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5782 && TREE_CODE (new_decl) == FUNCTION_DECL);
5783 DECL_POSSIBLY_INLINED (old_decl) = 1;
5784
5785 old_version_node = cgraph_node::get (old_decl);
5786 gcc_checking_assert (old_version_node);
5787 new_version_node = cgraph_node::get (new_decl);
5788 gcc_checking_assert (new_version_node);
5789
5790 /* Copy over debug args. */
5791 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5792 {
5793 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5794 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5795 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5796 old_debug_args = decl_debug_args_lookup (old_decl);
5797 if (old_debug_args)
5798 {
5799 new_debug_args = decl_debug_args_insert (new_decl);
5800 *new_debug_args = vec_safe_copy (*old_debug_args);
5801 }
5802 }
5803
5804 /* Output the inlining info for this abstract function, since it has been
5805 inlined. If we don't do this now, we can lose the information about the
5806 variables in the function when the blocks get blown away as soon as we
5807 remove the cgraph node. */
5808 (*debug_hooks->outlining_inline_function) (old_decl);
5809
5810 DECL_ARTIFICIAL (new_decl) = 1;
5811 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5812 if (DECL_ORIGIN (old_decl) == old_decl)
5813 old_version_node->used_as_abstract_origin = true;
5814 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5815
5816 /* Prepare the data structures for the tree copy. */
5817 memset (&id, 0, sizeof (id));
5818
5819 /* Generate a new name for the new version. */
5820 id.statements_to_fold = new hash_set<gimple *>;
5821
5822 id.decl_map = new hash_map<tree, tree>;
5823 id.debug_map = NULL;
5824 id.src_fn = old_decl;
5825 id.dst_fn = new_decl;
5826 id.src_node = old_version_node;
5827 id.dst_node = new_version_node;
5828 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5829 id.blocks_to_copy = blocks_to_copy;
5830
5831 id.copy_decl = copy_decl_no_change;
5832 id.transform_call_graph_edges
5833 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5834 id.transform_new_cfg = true;
5835 id.transform_return_to_modify = false;
5836 id.transform_parameter = false;
5837 id.transform_lang_insert_block = NULL;
5838
5839 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5840 (DECL_STRUCT_FUNCTION (old_decl));
5841 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5842 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5843 initialize_cfun (new_decl, old_decl,
5844 new_entry ? new_entry->count : old_entry_block->count);
5845 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5846 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5847 = id.src_cfun->gimple_df->ipa_pta;
5848
5849 /* Copy the function's static chain. */
5850 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5851 if (p)
5852 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5853 = copy_static_chain (p, &id);
5854
5855 /* If there's a tree_map, prepare for substitution. */
5856 if (tree_map)
5857 for (i = 0; i < tree_map->length (); i++)
5858 {
5859 gimple *init;
5860 replace_info = (*tree_map)[i];
5861 if (replace_info->replace_p)
5862 {
5863 int parm_num = -1;
5864 if (!replace_info->old_tree)
5865 {
5866 int p = replace_info->parm_num;
5867 tree parm;
5868 tree req_type, new_type;
5869
5870 for (parm = DECL_ARGUMENTS (old_decl); p;
5871 parm = DECL_CHAIN (parm))
5872 p--;
5873 replace_info->old_tree = parm;
5874 parm_num = replace_info->parm_num;
5875 req_type = TREE_TYPE (parm);
5876 new_type = TREE_TYPE (replace_info->new_tree);
5877 if (!useless_type_conversion_p (req_type, new_type))
5878 {
5879 if (fold_convertible_p (req_type, replace_info->new_tree))
5880 replace_info->new_tree
5881 = fold_build1 (NOP_EXPR, req_type,
5882 replace_info->new_tree);
5883 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5884 replace_info->new_tree
5885 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5886 replace_info->new_tree);
5887 else
5888 {
5889 if (dump_file)
5890 {
5891 fprintf (dump_file, " const ");
5892 print_generic_expr (dump_file,
5893 replace_info->new_tree);
5894 fprintf (dump_file,
5895 " can't be converted to param ");
5896 print_generic_expr (dump_file, parm);
5897 fprintf (dump_file, "\n");
5898 }
5899 replace_info->old_tree = NULL;
5900 }
5901 }
5902 }
5903 else
5904 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5905 if (replace_info->old_tree)
5906 {
5907 init = setup_one_parameter (&id, replace_info->old_tree,
5908 replace_info->new_tree, id.src_fn,
5909 NULL,
5910 &vars);
5911 if (init)
5912 init_stmts.safe_push (init);
5913 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5914 {
5915 if (parm_num == -1)
5916 {
5917 tree parm;
5918 int p;
5919 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5920 parm = DECL_CHAIN (parm), p++)
5921 if (parm == replace_info->old_tree)
5922 {
5923 parm_num = p;
5924 break;
5925 }
5926 }
5927 if (parm_num != -1)
5928 {
5929 if (debug_args_to_skip == args_to_skip)
5930 {
5931 debug_args_to_skip = BITMAP_ALLOC (NULL);
5932 bitmap_copy (debug_args_to_skip, args_to_skip);
5933 }
5934 bitmap_clear_bit (debug_args_to_skip, parm_num);
5935 }
5936 }
5937 }
5938 }
5939 }
5940 /* Copy the function's arguments. */
5941 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5942 DECL_ARGUMENTS (new_decl)
5943 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5944 args_to_skip, &vars);
5945
5946 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5947 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5948
5949 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5950
5951 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5952 /* Add local vars. */
5953 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5954
5955 if (DECL_RESULT (old_decl) == NULL_TREE)
5956 ;
5957 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5958 {
5959 DECL_RESULT (new_decl)
5960 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5961 RESULT_DECL, NULL_TREE, void_type_node);
5962 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5963 cfun->returns_struct = 0;
5964 cfun->returns_pcc_struct = 0;
5965 }
5966 else
5967 {
5968 tree old_name;
5969 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5970 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5971 if (gimple_in_ssa_p (id.src_cfun)
5972 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5973 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5974 {
5975 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5976 insert_decl_map (&id, old_name, new_name);
5977 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5978 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5979 }
5980 }
5981
5982 /* Set up the destination functions loop tree. */
5983 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5984 {
5985 cfun->curr_properties &= ~PROP_loops;
5986 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5987 cfun->curr_properties |= PROP_loops;
5988 }
5989
5990 /* Copy the Function's body. */
5991 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5992 new_entry);
5993
5994 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5995 number_blocks (new_decl);
5996
5997 /* We want to create the BB unconditionally, so that the addition of
5998 debug stmts doesn't affect BB count, which may in the end cause
5999 codegen differences. */
6000 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6001 while (init_stmts.length ())
6002 insert_init_stmt (&id, bb, init_stmts.pop ());
6003 update_clone_info (&id);
6004
6005 /* Remap the nonlocal_goto_save_area, if any. */
6006 if (cfun->nonlocal_goto_save_area)
6007 {
6008 struct walk_stmt_info wi;
6009
6010 memset (&wi, 0, sizeof (wi));
6011 wi.info = &id;
6012 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6013 }
6014
6015 /* Clean up. */
6016 delete id.decl_map;
6017 if (id.debug_map)
6018 delete id.debug_map;
6019 free_dominance_info (CDI_DOMINATORS);
6020 free_dominance_info (CDI_POST_DOMINATORS);
6021
6022 update_max_bb_count ();
6023 fold_marked_statements (0, id.statements_to_fold);
6024 delete id.statements_to_fold;
6025 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6026 if (id.dst_node->definition)
6027 cgraph_edge::rebuild_references ();
6028 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6029 {
6030 calculate_dominance_info (CDI_DOMINATORS);
6031 fix_loop_structure (NULL);
6032 }
6033 update_ssa (TODO_update_ssa);
6034
6035 /* After partial cloning we need to rescale frequencies, so they are
6036 within proper range in the cloned function. */
6037 if (new_entry)
6038 {
6039 struct cgraph_edge *e;
6040 rebuild_frequencies ();
6041
6042 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6043 for (e = new_version_node->callees; e; e = e->next_callee)
6044 {
6045 basic_block bb = gimple_bb (e->call_stmt);
6046 e->count = bb->count;
6047 }
6048 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6049 {
6050 basic_block bb = gimple_bb (e->call_stmt);
6051 e->count = bb->count;
6052 }
6053 }
6054
6055 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6056 {
6057 tree parm;
6058 vec<tree, va_gc> **debug_args = NULL;
6059 unsigned int len = 0;
6060 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6061 parm; parm = DECL_CHAIN (parm), i++)
6062 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6063 {
6064 tree ddecl;
6065
6066 if (debug_args == NULL)
6067 {
6068 debug_args = decl_debug_args_insert (new_decl);
6069 len = vec_safe_length (*debug_args);
6070 }
6071 ddecl = make_node (DEBUG_EXPR_DECL);
6072 DECL_ARTIFICIAL (ddecl) = 1;
6073 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6074 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6075 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6076 vec_safe_push (*debug_args, ddecl);
6077 }
6078 if (debug_args != NULL)
6079 {
6080 /* On the callee side, add
6081 DEBUG D#Y s=> parm
6082 DEBUG var => D#Y
6083 stmts to the first bb where var is a VAR_DECL created for the
6084 optimized away parameter in DECL_INITIAL block. This hints
6085 in the debug info that var (whole DECL_ORIGIN is the parm
6086 PARM_DECL) is optimized away, but could be looked up at the
6087 call site as value of D#X there. */
6088 tree var = vars, vexpr;
6089 gimple_stmt_iterator cgsi
6090 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6091 gimple *def_temp;
6092 var = vars;
6093 i = vec_safe_length (*debug_args);
6094 do
6095 {
6096 i -= 2;
6097 while (var != NULL_TREE
6098 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6099 var = TREE_CHAIN (var);
6100 if (var == NULL_TREE)
6101 break;
6102 vexpr = make_node (DEBUG_EXPR_DECL);
6103 parm = (**debug_args)[i];
6104 DECL_ARTIFICIAL (vexpr) = 1;
6105 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6106 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6107 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6108 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6109 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6110 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6111 }
6112 while (i > len);
6113 }
6114 }
6115
6116 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6117 BITMAP_FREE (debug_args_to_skip);
6118 free_dominance_info (CDI_DOMINATORS);
6119 free_dominance_info (CDI_POST_DOMINATORS);
6120
6121 gcc_assert (!id.debug_stmts.exists ());
6122 pop_cfun ();
6123 return;
6124 }
6125
6126 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6127 the callee and return the inlined body on success. */
6128
6129 tree
6130 maybe_inline_call_in_expr (tree exp)
6131 {
6132 tree fn = get_callee_fndecl (exp);
6133
6134 /* We can only try to inline "const" functions. */
6135 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6136 {
6137 call_expr_arg_iterator iter;
6138 copy_body_data id;
6139 tree param, arg, t;
6140 hash_map<tree, tree> decl_map;
6141
6142 /* Remap the parameters. */
6143 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6144 param;
6145 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6146 decl_map.put (param, arg);
6147
6148 memset (&id, 0, sizeof (id));
6149 id.src_fn = fn;
6150 id.dst_fn = current_function_decl;
6151 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6152 id.decl_map = &decl_map;
6153
6154 id.copy_decl = copy_decl_no_change;
6155 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6156 id.transform_new_cfg = false;
6157 id.transform_return_to_modify = true;
6158 id.transform_parameter = true;
6159 id.transform_lang_insert_block = NULL;
6160
6161 /* Make sure not to unshare trees behind the front-end's back
6162 since front-end specific mechanisms may rely on sharing. */
6163 id.regimplify = false;
6164 id.do_not_unshare = true;
6165
6166 /* We're not inside any EH region. */
6167 id.eh_lp_nr = 0;
6168
6169 t = copy_tree_body (&id);
6170
6171 /* We can only return something suitable for use in a GENERIC
6172 expression tree. */
6173 if (TREE_CODE (t) == MODIFY_EXPR)
6174 return TREE_OPERAND (t, 1);
6175 }
6176
6177 return NULL_TREE;
6178 }
6179
6180 /* Duplicate a type, fields and all. */
6181
6182 tree
6183 build_duplicate_type (tree type)
6184 {
6185 struct copy_body_data id;
6186
6187 memset (&id, 0, sizeof (id));
6188 id.src_fn = current_function_decl;
6189 id.dst_fn = current_function_decl;
6190 id.src_cfun = cfun;
6191 id.decl_map = new hash_map<tree, tree>;
6192 id.debug_map = NULL;
6193 id.copy_decl = copy_decl_no_change;
6194
6195 type = remap_type_1 (type, &id);
6196
6197 delete id.decl_map;
6198 if (id.debug_map)
6199 delete id.debug_map;
6200
6201 TYPE_CANONICAL (type) = type;
6202
6203 return type;
6204 }
6205
6206 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6207 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6208 evaluation. */
6209
6210 tree
6211 copy_fn (tree fn, tree& parms, tree& result)
6212 {
6213 copy_body_data id;
6214 tree param;
6215 hash_map<tree, tree> decl_map;
6216
6217 tree *p = &parms;
6218 *p = NULL_TREE;
6219
6220 memset (&id, 0, sizeof (id));
6221 id.src_fn = fn;
6222 id.dst_fn = current_function_decl;
6223 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6224 id.decl_map = &decl_map;
6225
6226 id.copy_decl = copy_decl_no_change;
6227 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6228 id.transform_new_cfg = false;
6229 id.transform_return_to_modify = false;
6230 id.transform_parameter = true;
6231 id.transform_lang_insert_block = NULL;
6232
6233 /* Make sure not to unshare trees behind the front-end's back
6234 since front-end specific mechanisms may rely on sharing. */
6235 id.regimplify = false;
6236 id.do_not_unshare = true;
6237
6238 /* We're not inside any EH region. */
6239 id.eh_lp_nr = 0;
6240
6241 /* Remap the parameters and result and return them to the caller. */
6242 for (param = DECL_ARGUMENTS (fn);
6243 param;
6244 param = DECL_CHAIN (param))
6245 {
6246 *p = remap_decl (param, &id);
6247 p = &DECL_CHAIN (*p);
6248 }
6249
6250 if (DECL_RESULT (fn))
6251 result = remap_decl (DECL_RESULT (fn), &id);
6252 else
6253 result = NULL_TREE;
6254
6255 return copy_tree_body (&id);
6256 }